Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  import array 
  23  import copy 
  24  import itertools 
  25  import logging 
  26   
  27  import madgraph.core.base_objects as base_objects 
  28  import madgraph.various.misc as misc 
  29  from madgraph import InvalidCmd 
  30  logger = logging.getLogger('madgraph.diagram_generation') 
31 32 33 -class NoDiagramException(InvalidCmd): pass
34
35 #=============================================================================== 36 # DiagramTag mother class 37 #=============================================================================== 38 39 -class DiagramTag(object):
40 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 41 PDG code/interaction id (for comparing diagrams from the same amplitude), 42 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 43 Algorithm: Create chains starting from external particles: 44 1 \ / 6 45 2 /\______/\ 7 46 3_ / | \_ 8 47 4 / 5 \_ 9 48 \ 10 49 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 50 (((1,2,id12),(3,4,id34)),id1234), 51 5,id91086712345) 52 where idN is the id of the corresponding interaction. The ordering within 53 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 54 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 55 The determination of central vertex is based on minimizing the chain length 56 for the longest subchain. 57 This gives a unique tag which can be used to identify diagrams 58 (instead of symmetry), as well as identify identical matrix elements from 59 different processes.""" 60
61 - class DiagramTagError(Exception):
62 """Exception for any problems in DiagramTags""" 63 pass
64
65 - def __init__(self, diagram, model=None, ninitial=2):
66 """Initialize with a diagram. Create DiagramTagChainLinks according to 67 the diagram, and figure out if we need to shift the central vertex.""" 68 69 # wf_dict keeps track of the intermediate particles 70 leg_dict = {} 71 # Create the chain which will be the diagram tag 72 for vertex in diagram.get('vertices'): 73 # Only add incoming legs 74 legs = vertex.get('legs')[:-1] 75 lastvx = vertex == diagram.get('vertices')[-1] 76 if lastvx: 77 # If last vertex, all legs are incoming 78 legs = vertex.get('legs') 79 # Add links corresponding to the relevant legs 80 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 81 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 82 for leg in legs], 83 self.vertex_id_from_vertex(vertex, 84 lastvx, 85 model, 86 ninitial)) 87 # Add vertex to leg_dict if not last one 88 if not lastvx: 89 leg_dict[vertex.get('legs')[-1].get('number')] = link 90 91 # The resulting link is the hypothetical result 92 self.tag = link 93 94 # Now make sure to find the central vertex in the diagram, 95 # defined by the longest leg being as short as possible 96 done = max([l.depth for l in self.tag.links]) == 0 97 while not done: 98 # Identify the longest chain in the tag 99 longest_chain = self.tag.links[0] 100 # Create a new link corresponding to moving one step 101 new_link = DiagramTagChainLink(self.tag.links[1:], 102 self.flip_vertex(\ 103 self.tag.vertex_id, 104 longest_chain.vertex_id, 105 self.tag.links[1:])) 106 # Create a new final vertex in the direction of the longest link 107 other_links = list(longest_chain.links) + [new_link] 108 other_link = DiagramTagChainLink(other_links, 109 self.flip_vertex(\ 110 longest_chain.vertex_id, 111 self.tag.vertex_id, 112 other_links)) 113 114 if other_link.links[0] < self.tag.links[0]: 115 # Switch to new tag, continue search 116 self.tag = other_link 117 else: 118 # We have found the central vertex 119 done = True
120
121 - def get_external_numbers(self):
122 """Get the order of external particles in this tag""" 123 124 return self.tag.get_external_numbers()
125
126 - def diagram_from_tag(self, model):
127 """Output a diagram from a DiagramTag. Note that each daughter 128 class must implement the static functions id_from_vertex_id 129 (if the vertex id is something else than an integer) and 130 leg_from_link (to pass the correct info from an end link to a 131 leg).""" 132 133 # Create the vertices, starting from the final vertex 134 diagram = base_objects.Diagram({'vertices': \ 135 self.vertices_from_link(self.tag, 136 model, 137 True)}) 138 diagram.calculate_orders(model) 139 return diagram
140 141 @classmethod 176 177 @classmethod
178 - def legPDGs_from_vertex_id(cls, vertex_id,model):
179 """Returns the list of external PDGs of the interaction corresponding 180 to this vertex_id.""" 181 182 # In case we have to deal with a regular vertex, we return the list 183 # external PDGs as given by the model information on that integer 184 # vertex id. 185 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 186 return vertex_id[2]['PDGs'] 187 else: 188 return [part.get_pdg_code() for part in model.get_interaction( 189 cls.id_from_vertex_id(vertex_id)).get('particles')]
190 191 @classmethod
192 - def leg_from_legs(cls,legs, vertex_id, model):
193 """Return a leg from a leg list and the model info""" 194 195 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 196 197 # Extract the resulting pdg code from the interaction pdgs 198 for pdg in [leg.get('id') for leg in legs]: 199 pdgs.remove(pdg) 200 201 assert len(pdgs) == 1 202 # Prepare the new leg properties 203 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 204 number = min([l.get('number') for l in legs]) 205 # State is False for t-channel, True for s-channel 206 state = (len([l for l in legs if l.get('state') == False]) != 1) 207 # Note that this needs to be done before combining decay chains 208 onshell= False 209 210 return base_objects.Leg({'id': pdg, 211 'number': number, 212 'state': state, 213 'onshell': onshell})
214 215 @classmethod 228 229 @staticmethod 242 243 @staticmethod
244 - def id_from_vertex_id(vertex_id):
245 """Return the numerical vertex id from a link.vertex_id""" 246 247 return vertex_id[0][0]
248 249 @staticmethod
250 - def loop_info_from_vertex_id(vertex_id):
251 """Return the loop_info stored in this vertex id. Notice that the 252 IdentifyME tag does not store the loop_info, but should normally never 253 need access to it.""" 254 255 return vertex_id[2]
256 257 @staticmethod
258 - def reorder_permutation(perm, start_perm):
259 """Reorder a permutation with respect to start_perm. Note that 260 both need to start from 1.""" 261 if perm == start_perm: 262 return range(len(perm)) 263 order = [i for (p,i) in \ 264 sorted([(p,i) for (i,p) in enumerate(perm)])] 265 return [start_perm[i]-1 for i in order]
266 267 @staticmethod 278 279 @staticmethod
280 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
281 """Returns the default vertex id: just the interaction id 282 Note that in the vertex id, like the leg, only the first entry is 283 taken into account in the tag comparison, while the second is for 284 storing information that is not to be used in comparisons and the 285 third for additional info regarding the shrunk loop vertex.""" 286 287 if isinstance(vertex,base_objects.ContractedVertex): 288 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 289 return ((vertex.get('id'),vertex.get('loop_tag')),(), 290 {'PDGs':vertex.get('PDGs')}) 291 else: 292 return ((vertex.get('id'),()),(),{})
293 294 @staticmethod
295 - def flip_vertex(new_vertex, old_vertex, links):
296 """Returns the default vertex flip: just the new_vertex""" 297 return new_vertex
298
299 - def __eq__(self, other):
300 """Equal if same tag""" 301 if type(self) != type(other): 302 return False 303 return self.tag == other.tag
304
305 - def __ne__(self, other):
306 return not self.__eq__(other)
307
308 - def __str__(self):
309 return str(self.tag)
310
311 - def __lt__(self, other):
312 return self.tag < other.tag
313
314 - def __gt__(self, other):
315 return self.tag > other.tag
316 317 __repr__ = __str__
318 404
405 #=============================================================================== 406 # Amplitude 407 #=============================================================================== 408 -class Amplitude(base_objects.PhysicsObject):
409 """Amplitude: process + list of diagrams (ordered) 410 Initialize with a process, then call generate_diagrams() to 411 generate the diagrams for the amplitude 412 """ 413
414 - def default_setup(self):
415 """Default values for all properties""" 416 417 self['process'] = base_objects.Process() 418 self['diagrams'] = None 419 # has_mirror_process is True if the same process but with the 420 # two incoming particles interchanged has been generated 421 self['has_mirror_process'] = False
422
423 - def __init__(self, argument=None):
424 """Allow initialization with Process""" 425 if isinstance(argument, base_objects.Process): 426 super(Amplitude, self).__init__() 427 self.set('process', argument) 428 self.generate_diagrams() 429 elif argument != None: 430 # call the mother routine 431 super(Amplitude, self).__init__(argument) 432 else: 433 # call the mother routine 434 super(Amplitude, self).__init__()
435
436 - def filter(self, name, value):
437 """Filter for valid amplitude property values.""" 438 439 if name == 'process': 440 if not isinstance(value, base_objects.Process): 441 raise self.PhysicsObjectError, \ 442 "%s is not a valid Process object" % str(value) 443 if name == 'diagrams': 444 if not isinstance(value, base_objects.DiagramList): 445 raise self.PhysicsObjectError, \ 446 "%s is not a valid DiagramList object" % str(value) 447 if name == 'has_mirror_process': 448 if not isinstance(value, bool): 449 raise self.PhysicsObjectError, \ 450 "%s is not a valid boolean" % str(value) 451 return True
452
453 - def get(self, name):
454 """Get the value of the property name.""" 455 456 if name == 'diagrams' and self[name] == None: 457 # Have not yet generated diagrams for this process 458 if self['process']: 459 self.generate_diagrams() 460 461 return super(Amplitude, self).get(name)
462 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 463 464
465 - def get_sorted_keys(self):
466 """Return diagram property names as a nicely sorted list.""" 467 468 return ['process', 'diagrams', 'has_mirror_process']
469
470 - def get_number_of_diagrams(self):
471 """Returns number of diagrams for this amplitude""" 472 return len(self.get('diagrams'))
473
474 - def get_amplitudes(self):
475 """Return an AmplitudeList with just this amplitude. 476 Needed for DecayChainAmplitude.""" 477 478 return AmplitudeList([self])
479
480 - def nice_string(self, indent=0):
481 """Returns a nicely formatted string of the amplitude content.""" 482 return self.get('process').nice_string(indent) + "\n" + \ 483 self.get('diagrams').nice_string(indent)
484
485 - def nice_string_processes(self, indent=0):
486 """Returns a nicely formatted string of the amplitude process.""" 487 return self.get('process').nice_string(indent)
488
489 - def get_ninitial(self):
490 """Returns the number of initial state particles in the process.""" 491 return self.get('process').get_ninitial()
492
493 - def has_loop_process(self):
494 """ Returns wether this amplitude has a loop process.""" 495 496 return self.get('process').get('perturbation_couplings')
497
498 - def generate_diagrams(self, returndiag=False):
499 """Generate diagrams. Algorithm: 500 501 1. Define interaction dictionaries: 502 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 503 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 504 505 2. Set flag from_group=true for all external particles. 506 Flip particle/anti particle for incoming particles. 507 508 3. If there is a dictionary n->0 with n=number of external 509 particles, create if possible the combination [(1,2,3,4,...)] 510 with *at least two* from_group==true. This will give a 511 finished (set of) diagram(s) (done by reduce_leglist) 512 513 4. Create all allowed groupings of particles with at least one 514 from_group==true (according to dictionaries n->1): 515 [(1,2),3,4...],[1,(2,3),4,...],..., 516 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 517 (done by combine_legs) 518 519 5. Replace each group with a (list of) new particle(s) with number 520 n = min(group numbers). Set from_group true for these 521 particles and false for all other particles. Store vertex info. 522 (done by merge_comb_legs) 523 524 6. Stop algorithm when at most 2 particles remain. 525 Return all diagrams (lists of vertices). 526 527 7. Repeat from 3 (recursion done by reduce_leglist) 528 529 8. Replace final p=p vertex 530 531 Be aware that the resulting vertices have all particles outgoing, 532 so need to flip for incoming particles when used. 533 534 SPECIAL CASE: For A>BC... processes which are legs in decay 535 chains, we need to ensure that BC... combine first, giving A=A 536 as a final vertex. This case is defined by the Process 537 property is_decay_chain = True. 538 This function can also be called by the generate_diagram function 539 of LoopAmplitudes, in which case the generated diagrams here must not 540 be directly assigned to the 'diagrams' attributed but returned as a 541 DiagramList by the function. This is controlled by the argument 542 returndiag. 543 """ 544 545 process = self.get('process') 546 model = process.get('model') 547 legs = process.get('legs') 548 # Make sure orders is the minimum of orders and overall_orders 549 for key in process.get('overall_orders').keys(): 550 try: 551 process.get('orders')[key] = \ 552 min(process.get('orders')[key], 553 process.get('overall_orders')[key]) 554 except KeyError: 555 process.get('orders')[key] = process.get('overall_orders')[key] 556 557 assert model.get('particles'), \ 558 "particles are missing in model: %s" % model.get('particles') 559 560 assert model.get('interactions'), \ 561 "interactions are missing in model" 562 563 564 res = base_objects.DiagramList() 565 # First check that the number of fermions is even 566 if len(filter(lambda leg: model.get('particle_dict')[\ 567 leg.get('id')].is_fermion(), legs)) % 2 == 1: 568 if not returndiag: 569 self['diagrams'] = res 570 raise InvalidCmd, 'The number of fermion is odd' 571 else: 572 return False, res 573 574 # Then check same number of incoming and outgoing fermions (if 575 # no Majorana particles in model) 576 if not model.get('got_majoranas') and \ 577 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \ 578 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)): 579 if not returndiag: 580 self['diagrams'] = res 581 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different' 582 else: 583 return False, res 584 585 # Finally check that charge (conserve by all interactions) of the process 586 #is globally conserve for this process. 587 for charge in model.get('conserved_charge'): 588 total = 0 589 for leg in legs: 590 part = model.get('particle_dict')[leg.get('id')] 591 try: 592 value = part.get(charge) 593 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 594 try: 595 value = getattr(part, charge) 596 except AttributeError: 597 value = 0 598 599 if (leg.get('id') != part['pdg_code']) != leg['state']: 600 total -= value 601 else: 602 total += value 603 604 if abs(total) > 1e-10: 605 if not returndiag: 606 self['diagrams'] = res 607 raise InvalidCmd, 'No %s conservation for this process ' % charge 608 return res 609 else: 610 raise InvalidCmd, 'No %s conservation for this process ' % charge 611 return res, res 612 613 if not returndiag: 614 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 615 616 # Give numbers to legs in process 617 for i in range(0, len(process.get('legs'))): 618 # Make sure legs are unique 619 leg = copy.copy(process.get('legs')[i]) 620 process.get('legs')[i] = leg 621 if leg.get('number') == 0: 622 leg.set('number', i + 1) 623 624 # Copy leglist from process, so we can flip leg identities 625 # without affecting the original process 626 leglist = self.copy_leglist(process.get('legs')) 627 628 for leg in leglist: 629 630 # For the first step, ensure the tag from_group 631 # is true for all legs 632 leg.set('from_group', True) 633 634 # Need to flip part-antipart for incoming particles, 635 # so they are all outgoing 636 if leg.get('state') == False: 637 part = model.get('particle_dict')[leg.get('id')] 638 leg.set('id', part.get_anti_pdg_code()) 639 640 # Calculate the maximal multiplicity of n-1>1 configurations 641 # to restrict possible leg combinations 642 max_multi_to1 = max([len(key) for key in \ 643 model.get('ref_dict_to1').keys()]) 644 645 646 # Reduce the leg list and return the corresponding 647 # list of vertices 648 649 # For decay processes, generate starting from final-state 650 # combined only as the last particle. This allows to use these 651 # in decay chains later on. 652 is_decay_proc = process.get_ninitial() == 1 653 if is_decay_proc: 654 part = model.get('particle_dict')[leglist[0].get('id')] 655 # For decay chain legs, we want everything to combine to 656 # the initial leg. This is done by only allowing the 657 # initial leg to combine as a final identity. 658 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 659 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 660 # Need to set initial leg from_group to None, to make sure 661 # it can only be combined at the end. 662 leglist[0].set('from_group', None) 663 reduced_leglist = self.reduce_leglist(leglist, 664 max_multi_to1, 665 ref_dict_to0, 666 is_decay_proc, 667 process.get('orders')) 668 else: 669 reduced_leglist = self.reduce_leglist(leglist, 670 max_multi_to1, 671 model.get('ref_dict_to0'), 672 is_decay_proc, 673 process.get('orders')) 674 675 #In LoopAmplitude the function below is overloaded such that it 676 #converts back all DGLoopLegs to Legs. In the default tree-level 677 #diagram generation, this does nothing. 678 self.convert_dgleg_to_leg(reduced_leglist) 679 680 if reduced_leglist: 681 for vertex_list in reduced_leglist: 682 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 683 684 # Record whether or not we failed generation before required 685 # s-channel propagators are taken into account 686 failed_crossing = not res 687 688 # Required s-channels is a list of id-lists. Select the 689 # diagrams where all required s-channel propagators in any of 690 # the lists are present (i.e., the different lists correspond 691 # to "or", while the elements of the list correspond to 692 # "and"). 693 if process.get('required_s_channels') and \ 694 process.get('required_s_channels')[0]: 695 # We shouldn't look at the last vertex in each diagram, 696 # since that is the n->0 vertex 697 lastvx = -1 698 # For decay chain processes, there is an "artificial" 699 # extra vertex corresponding to particle 1=1, so we need 700 # to exclude the two last vertexes. 701 if is_decay_proc: lastvx = -2 702 ninitial = len(filter(lambda leg: leg.get('state') == False, 703 process.get('legs'))) 704 # Check required s-channels for each list in required_s_channels 705 old_res = res 706 res = base_objects.DiagramList() 707 for id_list in process.get('required_s_channels'): 708 res_diags = filter(lambda diagram: \ 709 all([req_s_channel in \ 710 [vertex.get_s_channel_id(\ 711 process.get('model'), ninitial) \ 712 for vertex in diagram.get('vertices')[:lastvx]] \ 713 for req_s_channel in \ 714 id_list]), old_res) 715 # Add diagrams only if not already in res 716 res.extend([diag for diag in res_diags if diag not in res]) 717 718 # Remove all diagrams with a "double" forbidden s-channel propagator 719 # is present. 720 # Note that we shouldn't look at the last vertex in each 721 # diagram, since that is the n->0 vertex 722 if process.get('forbidden_s_channels'): 723 ninitial = len(filter(lambda leg: leg.get('state') == False, 724 process.get('legs'))) 725 if ninitial == 2: 726 res = base_objects.DiagramList(\ 727 filter(lambda diagram: \ 728 not any([vertex.get_s_channel_id(\ 729 process.get('model'), ninitial) \ 730 in process.get('forbidden_s_channels') 731 for vertex in diagram.get('vertices')[:-1]]), 732 res)) 733 else: 734 # split since we need to avoid that the initial particle is forbidden 735 # as well. 736 newres= [] 737 for diagram in res: 738 leg1 = 1 739 #check the latest vertex to see if the leg 1 is inside if it 740 #is we need to inverse the look-up and allow the first s-channel 741 # of the associate particles. 742 vertex = diagram.get('vertices')[-1] 743 if any([l['number'] ==1 for l in vertex.get('legs')]): 744 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 745 to_loop = range(len(diagram.get('vertices'))-1) 746 if leg1 >1: 747 to_loop.reverse() 748 for i in to_loop: 749 vertex = diagram.get('vertices')[i] 750 if leg1: 751 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 752 leg1 = 0 753 continue 754 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 755 in process.get('forbidden_s_channels'): 756 break 757 else: 758 newres.append(diagram) 759 res = base_objects.DiagramList(newres) 760 761 762 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 763 # generation. 764 if process.get('forbidden_onsh_s_channels'): 765 ninitial = len(filter(lambda leg: leg.get('state') == False, 766 process.get('legs'))) 767 768 verts = base_objects.VertexList(sum([[vertex for vertex \ 769 in diagram.get('vertices')[:-1] 770 if vertex.get_s_channel_id(\ 771 process.get('model'), ninitial) \ 772 in process.get('forbidden_onsh_s_channels')] \ 773 for diagram in res], [])) 774 for vert in verts: 775 # Use onshell = False to indicate that this s-channel is forbidden 776 newleg = copy.copy(vert.get('legs').pop(-1)) 777 newleg.set('onshell', False) 778 vert.get('legs').append(newleg) 779 780 # Set actual coupling orders for each diagram 781 for diagram in res: 782 diagram.calculate_orders(model) 783 784 # Filter the diagrams according to the squared coupling order 785 # constraints and possible the negative one. Remember that OrderName=-n 786 # means that the user wants to include everything up to the N^(n+1)LO 787 # contribution in that order and at most one order can be restricted 788 # in this way. We shall do this only if the diagrams are not asked to 789 # be returned, as it is the case for NLO because it this case the 790 # interference are not necessarily among the diagrams generated here only. 791 if not returndiag and len(res)>0: 792 res = self.apply_squared_order_constraints(res) 793 794 # Replace final id=0 vertex if necessary 795 if not process.get('is_decay_chain'): 796 for diagram in res: 797 vertices = diagram.get('vertices') 798 if len(vertices) > 1 and vertices[-1].get('id') == 0: 799 # Need to "glue together" last and next-to-last 800 # vertex, by replacing the (incoming) last leg of the 801 # next-to-last vertex with the (outgoing) leg in the 802 # last vertex 803 vertices = copy.copy(vertices) 804 lastvx = vertices.pop() 805 nexttolastvertex = copy.copy(vertices.pop()) 806 legs = copy.copy(nexttolastvertex.get('legs')) 807 ntlnumber = legs[-1].get('number') 808 lastleg = filter(lambda leg: leg.get('number') != ntlnumber, 809 lastvx.get('legs'))[0] 810 # Reset onshell in case we have forbidden s-channels 811 if lastleg.get('onshell') == False: 812 lastleg.set('onshell', None) 813 # Replace the last leg of nexttolastvertex 814 legs[-1] = lastleg 815 nexttolastvertex.set('legs', legs) 816 vertices.append(nexttolastvertex) 817 diagram.set('vertices', vertices) 818 819 if res and not returndiag: 820 logger.info("Process has %d diagrams" % len(res)) 821 822 # Trim down number of legs and vertices used to save memory 823 self.trim_diagrams(diaglist=res) 824 825 # Sort process legs according to leg number 826 pertur = 'QCD' 827 if self.get('process')['perturbation_couplings']: 828 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 829 self.get('process').get('legs').sort(pert=pertur) 830 831 # Set diagrams to res if not asked to be returned 832 if not returndiag: 833 self['diagrams'] = res 834 return not failed_crossing 835 else: 836 return not failed_crossing, res
837
838 - def apply_squared_order_constraints(self, diag_list):
839 """Applies the user specified squared order constraints on the diagram 840 list in argument.""" 841 842 res = copy.copy(diag_list) 843 844 # Iterate the filtering since the applying the constraint on one 845 # type of coupling order can impact what the filtering on a previous 846 # one (relevant for the '==' type of constraint). 847 while True: 848 new_res = res.apply_positive_sq_orders(res, 849 self['process'].get('squared_orders'), 850 self['process']['sqorders_types']) 851 # Exit condition 852 if len(res)==len(new_res): 853 break 854 elif (len(new_res)>len(res)): 855 raise MadGraph5Error( 856 'Inconsistency in function apply_squared_order_constraints().') 857 # Actualizing the list of diagram for the next iteration 858 res = new_res 859 860 # Now treat the negative squared order constraint (at most one) 861 neg_orders = [(order, value) for order, value in \ 862 self['process'].get('squared_orders').items() if value<0] 863 if len(neg_orders)==1: 864 neg_order, neg_value = neg_orders[0] 865 # Now check any negative order constraint 866 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 867 neg_value, self['process']['sqorders_types'][neg_order]) 868 # Substitute the negative value to this positive one so that 869 # the resulting computed constraints appears in the print out 870 # and at the output stage we no longer have to deal with 871 # negative valued target orders 872 self['process']['squared_orders'][neg_order]=target_order 873 elif len(neg_orders)>1: 874 raise InvalidCmd('At most one negative squared order constraint'+\ 875 ' can be specified, not %s.'%str(neg_orders)) 876 877 return res
878
879 - def create_diagram(self, vertexlist):
880 """ Return a Diagram created from the vertex list. This function can be 881 overloaded by daughter classes.""" 882 return base_objects.Diagram({'vertices':vertexlist})
883
884 - def convert_dgleg_to_leg(self, vertexdoublelist):
885 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 886 In Amplitude, there is nothing to do. """ 887 888 return True
889
890 - def copy_leglist(self, legs):
891 """ Simply returns a copy of the leg list. This function is 892 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 893 The DGLoopLeg has some additional parameters only useful during 894 loop diagram generation""" 895 896 return base_objects.LegList(\ 897 [ copy.copy(leg) for leg in legs ])
898
899 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 900 is_decay_proc = False, coupling_orders = None):
901 """Recursive function to reduce N LegList to N-1 902 For algorithm, see doc for generate_diagrams. 903 """ 904 905 # Result variable which is a list of lists of vertices 906 # to be added 907 res = [] 908 909 # Stop condition. If LegList is None, that means that this 910 # diagram must be discarded 911 if curr_leglist is None: 912 return None 913 914 # Extract ref dict information 915 model = self.get('process').get('model') 916 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 917 918 919 # If all legs can be combined in one single vertex, add this 920 # vertex to res and continue. 921 # Special treatment for decay chain legs 922 923 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 924 # Extract the interaction id associated to the vertex 925 926 vertex_ids = self.get_combined_vertices(curr_leglist, 927 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 928 leg in curr_leglist]))])) 929 930 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 931 'id':vertex_id}) for \ 932 vertex_id in vertex_ids] 933 # Check for coupling orders. If orders < 0, skip vertex 934 for final_vertex in final_vertices: 935 if self.reduce_orders(coupling_orders, model, 936 [final_vertex.get('id')]) != False: 937 res.append([final_vertex]) 938 # Stop condition 2: if the leglist contained exactly two particles, 939 # return the result, if any, and stop. 940 if len(curr_leglist) == 2: 941 if res: 942 return res 943 else: 944 return None 945 946 # Create a list of all valid combinations of legs 947 comb_lists = self.combine_legs(curr_leglist, 948 ref_dict_to1, max_multi_to1) 949 950 # Create a list of leglists/vertices by merging combinations 951 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 952 953 # Consider all the pairs 954 for leg_vertex_tuple in leg_vertex_list: 955 956 # Remove forbidden particles 957 if self.get('process').get('forbidden_particles') and \ 958 any([abs(vertex.get('legs')[-1].get('id')) in \ 959 self.get('process').get('forbidden_particles') \ 960 for vertex in leg_vertex_tuple[1]]): 961 continue 962 963 # Check for coupling orders. If couplings < 0, skip recursion. 964 new_coupling_orders = self.reduce_orders(coupling_orders, 965 model, 966 [vertex.get('id') for vertex in \ 967 leg_vertex_tuple[1]]) 968 if new_coupling_orders == False: 969 # Some coupling order < 0 970 continue 971 972 # This is where recursion happens 973 # First, reduce again the leg part 974 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 975 max_multi_to1, 976 ref_dict_to0, 977 is_decay_proc, 978 new_coupling_orders) 979 # If there is a reduced diagram 980 if reduced_diagram: 981 vertex_list_list = [list(leg_vertex_tuple[1])] 982 vertex_list_list.append(reduced_diagram) 983 expanded_list = expand_list_list(vertex_list_list) 984 res.extend(expanded_list) 985 986 return res
987
988 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
989 """Return False if the coupling orders for any coupling is < 990 0, otherwise return the new coupling orders with the vertex 991 orders subtracted. If coupling_orders is not given, return 992 None (which counts as success). 993 WEIGHTED is a special order, which corresponds to the sum of 994 order hierarchies for the couplings. 995 We ignore negative constraints as these cannot be taken into 996 account on the fly but only after generation.""" 997 998 if not coupling_orders: 999 return None 1000 1001 present_couplings = copy.copy(coupling_orders) 1002 for id in vertex_id_list: 1003 # Don't check for identity vertex (id = 0) 1004 if not id: 1005 continue 1006 inter = model.get("interaction_dict")[id] 1007 for coupling in inter.get('orders').keys(): 1008 # Note that we don't consider a missing coupling as a 1009 # constraint 1010 if coupling in present_couplings and \ 1011 present_couplings[coupling]>=0: 1012 # Reduce the number of couplings that are left 1013 present_couplings[coupling] -= \ 1014 inter.get('orders')[coupling] 1015 if present_couplings[coupling] < 0: 1016 # We have too many couplings of this type 1017 return False 1018 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1019 if 'WEIGHTED' in present_couplings and \ 1020 present_couplings['WEIGHTED']>=0: 1021 weight = sum([model.get('order_hierarchy')[c]*n for \ 1022 (c,n) in inter.get('orders').items()]) 1023 present_couplings['WEIGHTED'] -= weight 1024 if present_couplings['WEIGHTED'] < 0: 1025 # Total coupling weight too large 1026 return False 1027 1028 return present_couplings
1029
1030 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1031 """Recursive function. Take a list of legs as an input, with 1032 the reference dictionary n-1->1, and output a list of list of 1033 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1034 1035 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1036 1037 2. For each combination, say [34]. Check if combination is valid. 1038 If so: 1039 1040 a. Append [12[34]56] to result array 1041 1042 b. Split [123456] at index(first element in combination+1), 1043 i.e. [12],[456] and subtract combination from second half, 1044 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1045 1046 3. Take result array from call to 1. (here, [[56]]) and append 1047 (first half in step b - combination) + combination + (result 1048 from 1.) = [12[34][56]] to result array 1049 1050 4. After appending results from all n-combinations, return 1051 resulting array. Example, if [13] and [45] are valid 1052 combinations: 1053 [[[13]2456],[[13]2[45]6],[123[45]6]] 1054 """ 1055 1056 res = [] 1057 1058 # loop over possible combination lengths (+1 is for range convention!) 1059 for comb_length in range(2, max_multi_to1 + 1): 1060 1061 # Check the considered length is not longer than the list length 1062 if comb_length > len(list_legs): 1063 return res 1064 1065 # itertools.combinations returns all possible combinations 1066 # of comb_length elements from list_legs 1067 for comb in itertools.combinations(list_legs, comb_length): 1068 1069 # Check if the combination is valid 1070 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1071 1072 # Identify the rest, create a list [comb,rest] and 1073 # add it to res 1074 res_list = copy.copy(list_legs) 1075 for leg in comb: 1076 res_list.remove(leg) 1077 res_list.insert(list_legs.index(comb[0]), comb) 1078 res.append(res_list) 1079 1080 # Now, deal with cases with more than 1 combination 1081 1082 # First, split the list into two, according to the 1083 # position of the first element in comb, and remove 1084 # all elements form comb 1085 res_list1 = list_legs[0:list_legs.index(comb[0])] 1086 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1087 for leg in comb[1:]: 1088 res_list2.remove(leg) 1089 1090 # Create a list of type [comb,rest1,rest2(combined)] 1091 res_list = res_list1 1092 res_list.append(comb) 1093 # This is where recursion actually happens, 1094 # on the second part 1095 for item in self.combine_legs(res_list2, 1096 ref_dict_to1, 1097 max_multi_to1): 1098 final_res_list = copy.copy(res_list) 1099 final_res_list.extend(item) 1100 res.append(final_res_list) 1101 1102 return res
1103 1104
1105 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1106 """Takes a list of allowed leg combinations as an input and returns 1107 a set of lists where combinations have been properly replaced 1108 (one list per element in the ref_dict, so that all possible intermediate 1109 particles are included). For each list, give the list of vertices 1110 corresponding to the executed merging, group the two as a tuple. 1111 """ 1112 1113 res = [] 1114 1115 for comb_list in comb_lists: 1116 1117 reduced_list = [] 1118 vertex_list = [] 1119 1120 for entry in comb_list: 1121 1122 # Act on all leg combinations 1123 if isinstance(entry, tuple): 1124 1125 # Build the leg object which will replace the combination: 1126 # 1) leg ids is as given in the ref_dict 1127 leg_vert_ids = copy.copy(ref_dict_to1[\ 1128 tuple(sorted([leg.get('id') for leg in entry]))]) 1129 # 2) number is the minimum of leg numbers involved in the 1130 # combination 1131 number = min([leg.get('number') for leg in entry]) 1132 # 3) state is final, unless there is exactly one initial 1133 # state particle involved in the combination -> t-channel 1134 if len(filter(lambda leg: leg.get('state') == False, 1135 entry)) == 1: 1136 state = False 1137 else: 1138 state = True 1139 # 4) from_group is True, by definition 1140 1141 # Create and add the object. This is done by a 1142 # separate routine, to allow overloading by 1143 # daughter classes 1144 new_leg_vert_ids = [] 1145 if leg_vert_ids: 1146 new_leg_vert_ids = self.get_combined_legs(entry, 1147 leg_vert_ids, 1148 number, 1149 state) 1150 1151 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1152 1153 1154 # Create and add the corresponding vertex 1155 # Extract vertex ids corresponding to the various legs 1156 # in mylegs 1157 vlist = base_objects.VertexList() 1158 for (myleg, vert_id) in new_leg_vert_ids: 1159 # Start with the considered combination... 1160 myleglist = base_objects.LegList(list(entry)) 1161 # ... and complete with legs after reducing 1162 myleglist.append(myleg) 1163 # ... and consider the correct vertex id 1164 vlist.append(base_objects.Vertex( 1165 {'legs':myleglist, 1166 'id':vert_id})) 1167 1168 vertex_list.append(vlist) 1169 1170 # If entry is not a combination, switch the from_group flag 1171 # and add it 1172 else: 1173 cp_entry = copy.copy(entry) 1174 # Need special case for from_group == None; this 1175 # is for initial state leg of decay chain process 1176 # (see Leg.can_combine_to_0) 1177 if cp_entry.get('from_group') != None: 1178 cp_entry.set('from_group', False) 1179 reduced_list.append(cp_entry) 1180 1181 # Flatten the obtained leg and vertex lists 1182 flat_red_lists = expand_list(reduced_list) 1183 flat_vx_lists = expand_list(vertex_list) 1184 1185 # Combine the two lists in a list of tuple 1186 for i in range(0, len(flat_vx_lists)): 1187 res.append((base_objects.LegList(flat_red_lists[i]), \ 1188 base_objects.VertexList(flat_vx_lists[i]))) 1189 1190 return res
1191
1192 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1193 """Create a set of new legs from the info given. This can be 1194 overloaded by daughter classes.""" 1195 1196 mylegs = [(base_objects.Leg({'id':leg_id, 1197 'number':number, 1198 'state':state, 1199 'from_group':True}), 1200 vert_id)\ 1201 for leg_id, vert_id in leg_vert_ids] 1202 1203 return mylegs
1204
1205 - def get_combined_vertices(self, legs, vert_ids):
1206 """Allow for selection of vertex ids. This can be 1207 overloaded by daughter classes.""" 1208 1209 return vert_ids
1210
1211 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1212 """Reduce the number of legs and vertices used in memory. 1213 When called by a diagram generation initiated by LoopAmplitude, 1214 this function should not trim the diagrams in the attribute 'diagrams' 1215 but rather a given list in the 'diaglist' argument.""" 1216 1217 legs = [] 1218 vertices = [] 1219 1220 if diaglist is None: 1221 diaglist=self.get('diagrams') 1222 1223 # Flag decaying legs in the core process by onshell = True 1224 process = self.get('process') 1225 for leg in process.get('legs'): 1226 if leg.get('state') and leg.get('id') in decay_ids: 1227 leg.set('onshell', True) 1228 1229 for diagram in diaglist: 1230 # Keep track of external legs (leg numbers already used) 1231 leg_external = set() 1232 for ivx, vertex in enumerate(diagram.get('vertices')): 1233 for ileg, leg in enumerate(vertex.get('legs')): 1234 # Ensure that only external legs get decay flag 1235 if leg.get('state') and leg.get('id') in decay_ids and \ 1236 leg.get('number') not in leg_external: 1237 # Use onshell to indicate decaying legs, 1238 # i.e. legs that have decay chains 1239 leg = copy.copy(leg) 1240 leg.set('onshell', True) 1241 try: 1242 index = legs.index(leg) 1243 except ValueError: 1244 vertex.get('legs')[ileg] = leg 1245 legs.append(leg) 1246 else: # Found a leg 1247 vertex.get('legs')[ileg] = legs[index] 1248 leg_external.add(leg.get('number')) 1249 try: 1250 index = vertices.index(vertex) 1251 diagram.get('vertices')[ivx] = vertices[index] 1252 except ValueError: 1253 vertices.append(vertex)
1254
1255 #=============================================================================== 1256 # AmplitudeList 1257 #=============================================================================== 1258 -class AmplitudeList(base_objects.PhysicsObjectList):
1259 """List of Amplitude objects 1260 """ 1261
1262 - def has_any_loop_process(self):
1263 """ Check the content of all processes of the amplitudes in this list to 1264 see if there is any which defines perturbation couplings. """ 1265 1266 for amp in self: 1267 if amp.has_loop_process(): 1268 return True
1269
1270 - def is_valid_element(self, obj):
1271 """Test if object obj is a valid Amplitude for the list.""" 1272 1273 return isinstance(obj, Amplitude)
1274
1275 #=============================================================================== 1276 # DecayChainAmplitude 1277 #=============================================================================== 1278 -class DecayChainAmplitude(Amplitude):
1279 """A list of amplitudes + a list of decay chain amplitude lists; 1280 corresponding to a ProcessDefinition with a list of decay chains 1281 """ 1282
1283 - def default_setup(self):
1284 """Default values for all properties""" 1285 1286 self['amplitudes'] = AmplitudeList() 1287 self['decay_chains'] = DecayChainAmplitudeList()
1288
1289 - def __init__(self, argument = None, collect_mirror_procs = False, 1290 ignore_six_quark_processes = False):
1291 """Allow initialization with Process and with ProcessDefinition""" 1292 1293 if isinstance(argument, base_objects.Process): 1294 super(DecayChainAmplitude, self).__init__() 1295 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1296 if argument['perturbation_couplings']: 1297 MultiProcessClass=LoopMultiProcess 1298 else: 1299 MultiProcessClass=MultiProcess 1300 if isinstance(argument, base_objects.ProcessDefinition): 1301 self['amplitudes'].extend(\ 1302 MultiProcessClass.generate_multi_amplitudes(argument, 1303 collect_mirror_procs, 1304 ignore_six_quark_processes)) 1305 else: 1306 self['amplitudes'].append(\ 1307 MultiProcessClass.get_amplitude_from_proc(argument)) 1308 # Clean decay chains from process, since we haven't 1309 # combined processes with decay chains yet 1310 process = copy.copy(self.get('amplitudes')[0].get('process')) 1311 process.set('decay_chains', base_objects.ProcessList()) 1312 self['amplitudes'][0].set('process', process) 1313 1314 for process in argument.get('decay_chains'): 1315 if process.get('perturbation_couplings'): 1316 raise MadGraph5Error,\ 1317 "Decay processes can not be perturbed" 1318 process.set('overall_orders', argument.get('overall_orders')) 1319 if not process.get('is_decay_chain'): 1320 process.set('is_decay_chain',True) 1321 if not process.get_ninitial() == 1: 1322 raise InvalidCmd,\ 1323 "Decay chain process must have exactly one" + \ 1324 " incoming particle" 1325 self['decay_chains'].append(\ 1326 DecayChainAmplitude(process, collect_mirror_procs, 1327 ignore_six_quark_processes)) 1328 1329 # Flag decaying legs in the core diagrams by onshell = True 1330 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1331 for a in dec.get('amplitudes')] for dec in \ 1332 self['decay_chains']], []) 1333 decay_ids = set(decay_ids) 1334 for amp in self['amplitudes']: 1335 amp.trim_diagrams(decay_ids) 1336 1337 # Check that all decay ids are present in at least some process 1338 for amp in self['amplitudes']: 1339 for l in amp.get('process').get('legs'): 1340 if l.get('id') in decay_ids: 1341 decay_ids.remove(l.get('id')) 1342 1343 if decay_ids: 1344 model = amp.get('process').get('model') 1345 names = [model.get_particle(id).get('name') for id in decay_ids] 1346 1347 logger.warning( 1348 "$RED Decay without corresponding particle in core process found.\n" + \ 1349 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1350 "Please check your process definition carefully. \n" + \ 1351 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1352 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1353 1354 # Remove unused decays from the process list 1355 for dc in reversed(self['decay_chains']): 1356 for a in reversed(dc.get('amplitudes')): 1357 # Remove the amplitudes from this decay chain 1358 if a.get('process').get('legs')[0].get('id') in decay_ids: 1359 dc.get('amplitudes').remove(a) 1360 if not dc.get('amplitudes'): 1361 # If no amplitudes left, remove the decay chain 1362 self['decay_chains'].remove(dc) 1363 1364 # Finally, write a fat warning if any decay process has 1365 # the decaying particle (or its antiparticle) in the final state 1366 bad_procs = [] 1367 for dc in self['decay_chains']: 1368 for amp in dc.get('amplitudes'): 1369 legs = amp.get('process').get('legs') 1370 fs_parts = [abs(l.get('id')) for l in legs if 1371 l.get('state')] 1372 is_part = [l.get('id') for l in legs if not 1373 l.get('state')][0] 1374 if abs(is_part) in fs_parts: 1375 bad_procs.append(amp.get('process')) 1376 1377 if bad_procs: 1378 logger.warning( 1379 "$RED Decay(s) with particle decaying to itself:\n" + \ 1380 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1381 "\nPlease check your process definition carefully. \n") 1382 1383 1384 elif argument != None: 1385 # call the mother routine 1386 super(DecayChainAmplitude, self).__init__(argument) 1387 else: 1388 # call the mother routine 1389 super(DecayChainAmplitude, self).__init__()
1390
1391 - def filter(self, name, value):
1392 """Filter for valid amplitude property values.""" 1393 1394 if name == 'amplitudes': 1395 if not isinstance(value, AmplitudeList): 1396 raise self.PhysicsObjectError, \ 1397 "%s is not a valid AmplitudeList" % str(value) 1398 if name == 'decay_chains': 1399 if not isinstance(value, DecayChainAmplitudeList): 1400 raise self.PhysicsObjectError, \ 1401 "%s is not a valid DecayChainAmplitudeList object" % \ 1402 str(value) 1403 return True
1404
1405 - def get_sorted_keys(self):
1406 """Return diagram property names as a nicely sorted list.""" 1407 1408 return ['amplitudes', 'decay_chains']
1409 1410 # Helper functions 1411
1412 - def get_number_of_diagrams(self):
1413 """Returns number of diagrams for this amplitude""" 1414 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1415 + sum(d.get_number_of_diagrams() for d in \ 1416 self.get('decay_chains'))
1417
1418 - def nice_string(self, indent = 0):
1419 """Returns a nicely formatted string of the amplitude content.""" 1420 mystr = "" 1421 for amplitude in self.get('amplitudes'): 1422 mystr = mystr + amplitude.nice_string(indent) + "\n" 1423 1424 if self.get('decay_chains'): 1425 mystr = mystr + " " * indent + "Decays:\n" 1426 for dec in self.get('decay_chains'): 1427 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1428 1429 return mystr[:-1]
1430
1431 - def nice_string_processes(self, indent = 0):
1432 """Returns a nicely formatted string of the amplitude processes.""" 1433 mystr = "" 1434 for amplitude in self.get('amplitudes'): 1435 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1436 1437 if self.get('decay_chains'): 1438 mystr = mystr + " " * indent + "Decays:\n" 1439 for dec in self.get('decay_chains'): 1440 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1441 1442 return mystr[:-1]
1443
1444 - def get_ninitial(self):
1445 """Returns the number of initial state particles in the process.""" 1446 return self.get('amplitudes')[0].get('process').get_ninitial()
1447
1448 - def get_decay_ids(self):
1449 """Returns a set of all particle ids for which a decay is defined""" 1450 1451 decay_ids = [] 1452 1453 # Get all amplitudes for the decay processes 1454 for amp in sum([dc.get('amplitudes') for dc \ 1455 in self['decay_chains']], []): 1456 # For each amplitude, find the initial state leg 1457 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1458 1459 # Return a list with unique ids 1460 return list(set(decay_ids))
1461
1462 - def has_loop_process(self):
1463 """ Returns wether this amplitude has a loop process.""" 1464 return self['amplitudes'].has_any_loop_process()
1465
1466 - def get_amplitudes(self):
1467 """Recursive function to extract all amplitudes for this process""" 1468 1469 amplitudes = AmplitudeList() 1470 1471 amplitudes.extend(self.get('amplitudes')) 1472 for decay in self.get('decay_chains'): 1473 amplitudes.extend(decay.get_amplitudes()) 1474 1475 return amplitudes
1476
1477 1478 #=============================================================================== 1479 # DecayChainAmplitudeList 1480 #=============================================================================== 1481 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1482 """List of DecayChainAmplitude objects 1483 """ 1484
1485 - def is_valid_element(self, obj):
1486 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1487 1488 return isinstance(obj, DecayChainAmplitude)
1489
1490 1491 #=============================================================================== 1492 # MultiProcess 1493 #=============================================================================== 1494 -class MultiProcess(base_objects.PhysicsObject):
1495 """MultiProcess: list of process definitions 1496 list of processes (after cleaning) 1497 list of amplitudes (after generation) 1498 """ 1499
1500 - def default_setup(self):
1501 """Default values for all properties""" 1502 1503 self['process_definitions'] = base_objects.ProcessDefinitionList() 1504 # self['amplitudes'] can be an AmplitudeList or a 1505 # DecayChainAmplitudeList, depending on whether there are 1506 # decay chains in the process definitions or not. 1507 self['amplitudes'] = AmplitudeList() 1508 # Flag for whether to combine IS mirror processes together 1509 self['collect_mirror_procs'] = False 1510 # List of quark flavors where we ignore processes with at 1511 # least 6 quarks (three quark lines) 1512 self['ignore_six_quark_processes'] = [] 1513 # Allow to use the model parameter numerical value for optimization. 1514 #This is currently use for 1->N generation(check mass). 1515 self['use_numerical'] = False
1516
1517 - def __init__(self, argument=None, collect_mirror_procs = False, 1518 ignore_six_quark_processes = [], optimize=False):
1519 """Allow initialization with ProcessDefinition or 1520 ProcessDefinitionList 1521 optimize allows to use param_card information. (usefull for 1-.N)""" 1522 1523 if isinstance(argument, base_objects.ProcessDefinition): 1524 super(MultiProcess, self).__init__() 1525 self['process_definitions'].append(argument) 1526 elif isinstance(argument, base_objects.ProcessDefinitionList): 1527 super(MultiProcess, self).__init__() 1528 self['process_definitions'] = argument 1529 elif argument != None: 1530 # call the mother routine 1531 super(MultiProcess, self).__init__(argument) 1532 else: 1533 # call the mother routine 1534 super(MultiProcess, self).__init__() 1535 1536 self['collect_mirror_procs'] = collect_mirror_procs 1537 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1538 self['use_numerical'] = optimize 1539 1540 if isinstance(argument, base_objects.ProcessDefinition) or \ 1541 isinstance(argument, base_objects.ProcessDefinitionList): 1542 # Generate the diagrams 1543 self.get('amplitudes')
1544 1545
1546 - def filter(self, name, value):
1547 """Filter for valid process property values.""" 1548 1549 if name == 'process_definitions': 1550 if not isinstance(value, base_objects.ProcessDefinitionList): 1551 raise self.PhysicsObjectError, \ 1552 "%s is not a valid ProcessDefinitionList object" % str(value) 1553 1554 if name == 'amplitudes': 1555 if not isinstance(value, diagram_generation.AmplitudeList): 1556 raise self.PhysicsObjectError, \ 1557 "%s is not a valid AmplitudeList object" % str(value) 1558 1559 if name in ['collect_mirror_procs']: 1560 if not isinstance(value, bool): 1561 raise self.PhysicsObjectError, \ 1562 "%s is not a valid boolean" % str(value) 1563 1564 if name == 'ignore_six_quark_processes': 1565 if not isinstance(value, list): 1566 raise self.PhysicsObjectError, \ 1567 "%s is not a valid list" % str(value) 1568 1569 return True
1570
1571 - def get(self, name):
1572 """Get the value of the property name.""" 1573 1574 if (name == 'amplitudes') and not self[name]: 1575 for process_def in self.get('process_definitions'): 1576 if process_def.get('decay_chains'): 1577 # This is a decay chain process 1578 # Store amplitude(s) as DecayChainAmplitude 1579 self['amplitudes'].append(\ 1580 DecayChainAmplitude(process_def, 1581 self.get('collect_mirror_procs'), 1582 self.get('ignore_six_quark_processes'))) 1583 else: 1584 self['amplitudes'].extend(\ 1585 self.generate_multi_amplitudes(process_def, 1586 self.get('collect_mirror_procs'), 1587 self.get('ignore_six_quark_processes'), 1588 self['use_numerical'])) 1589 1590 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1591
1592 - def get_sorted_keys(self):
1593 """Return process property names as a nicely sorted list.""" 1594 1595 return ['process_definitions', 'amplitudes']
1596 1597 @classmethod
1598 - def generate_multi_amplitudes(cls,process_definition, 1599 collect_mirror_procs = False, 1600 ignore_six_quark_processes = [], 1601 use_numerical=False):
1602 """Generate amplitudes in a semi-efficient way. 1603 Make use of crossing symmetry for processes that fail diagram 1604 generation, but not for processes that succeed diagram 1605 generation. Doing so will risk making it impossible to 1606 identify processes with identical amplitudes. 1607 """ 1608 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1609 "%s not valid ProcessDefinition object" % \ 1610 repr(process_definition) 1611 1612 # Set automatic coupling orders 1613 process_definition.set('orders', MultiProcess.\ 1614 find_optimal_process_orders(process_definition)) 1615 # Check for maximum orders from the model 1616 process_definition.check_expansion_orders() 1617 1618 processes = base_objects.ProcessList() 1619 amplitudes = AmplitudeList() 1620 1621 # failed_procs and success_procs are sorted processes that have 1622 # already failed/succeeded based on crossing symmetry 1623 failed_procs = [] 1624 success_procs = [] 1625 # Complete processes, for identification of mirror processes 1626 non_permuted_procs = [] 1627 # permutations keeps the permutations of the crossed processes 1628 permutations = [] 1629 1630 # Store the diagram tags for processes, to allow for 1631 # identifying identical matrix elements already at this stage. 1632 model = process_definition['model'] 1633 1634 isids = [leg['ids'] for leg in process_definition['legs'] \ 1635 if leg['state'] == False] 1636 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1637 if leg['state'] == True] 1638 # Generate all combinations for the initial state 1639 1640 for prod in itertools.product(*isids): 1641 islegs = [\ 1642 base_objects.Leg({'id':id, 'state': False}) \ 1643 for id in prod] 1644 1645 # Generate all combinations for the final state, and make 1646 # sure to remove double counting 1647 1648 red_fsidlist = [] 1649 1650 for prod in itertools.product(*fsids): 1651 1652 # Remove double counting between final states 1653 if tuple(sorted(prod)) in red_fsidlist: 1654 continue 1655 1656 red_fsidlist.append(tuple(sorted(prod))); 1657 1658 # Generate leg list for process 1659 leg_list = [copy.copy(leg) for leg in islegs] 1660 1661 leg_list.extend([\ 1662 base_objects.Leg({'id':id, 'state': True}) \ 1663 for id in prod]) 1664 1665 legs = base_objects.LegList(leg_list) 1666 1667 # Check for crossed processes 1668 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1669 enumerate(legs.get_outgoing_id_list(model))]) 1670 permutation = [l[1] for l in sorted_legs] 1671 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1672 1673 # Check for six-quark processes 1674 if ignore_six_quark_processes and \ 1675 len([i for i in sorted_legs if abs(i) in \ 1676 ignore_six_quark_processes]) >= 6: 1677 continue 1678 1679 # Check if crossed process has already failed, 1680 # in that case don't check process 1681 if sorted_legs in failed_procs: 1682 continue 1683 1684 # If allowed check mass validity [assume 1->N] 1685 if use_numerical: 1686 # check that final state has lower mass than initial state 1687 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1688 if initial_mass == 0: 1689 continue 1690 for leg in legs[1:]: 1691 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1692 initial_mass -= abs(m) 1693 if initial_mass.real <= 0: 1694 continue 1695 1696 # Setup process 1697 process = process_definition.get_process_with_legs(legs) 1698 1699 fast_proc = \ 1700 array.array('i',[leg.get('id') for leg in legs]) 1701 if collect_mirror_procs and \ 1702 process_definition.get_ninitial() == 2: 1703 # Check if mirrored process is already generated 1704 mirror_proc = \ 1705 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1706 list(fast_proc[2:])) 1707 try: 1708 mirror_amp = \ 1709 amplitudes[non_permuted_procs.index(mirror_proc)] 1710 except Exception: 1711 # Didn't find any mirror process 1712 pass 1713 else: 1714 # Mirror process found 1715 mirror_amp.set('has_mirror_process', True) 1716 logger.info("Process %s added to mirror process %s" % \ 1717 (process.base_string(), 1718 mirror_amp.get('process').base_string())) 1719 continue 1720 1721 # Check for successful crossings, unless we have specified 1722 # properties that break crossing symmetry 1723 if not process.get('required_s_channels') and \ 1724 not process.get('forbidden_onsh_s_channels') and \ 1725 not process.get('forbidden_s_channels') and \ 1726 not process.get('is_decay_chain'): 1727 try: 1728 crossed_index = success_procs.index(sorted_legs) 1729 # The relabeling of legs for loop amplitudes is cumbersome 1730 # and does not save so much time. It is disable here and 1731 # we use the key 'loop_diagrams' to decide whether 1732 # it is an instance of LoopAmplitude. 1733 if 'loop_diagrams' in amplitudes[crossed_index]: 1734 raise ValueError 1735 except ValueError: 1736 # No crossing found, just continue 1737 pass 1738 else: 1739 # Found crossing - reuse amplitude 1740 amplitude = MultiProcess.cross_amplitude(\ 1741 amplitudes[crossed_index], 1742 process, 1743 permutations[crossed_index], 1744 permutation) 1745 amplitudes.append(amplitude) 1746 success_procs.append(sorted_legs) 1747 permutations.append(permutation) 1748 non_permuted_procs.append(fast_proc) 1749 logger.info("Crossed process found for %s, reuse diagrams." % \ 1750 process.base_string()) 1751 continue 1752 1753 # Create new amplitude 1754 amplitude = cls.get_amplitude_from_proc(process) 1755 1756 try: 1757 result = amplitude.generate_diagrams() 1758 except InvalidCmd as error: 1759 failed_procs.append(sorted_legs) 1760 else: 1761 # Succeeded in generating diagrams 1762 if amplitude.get('diagrams'): 1763 amplitudes.append(amplitude) 1764 success_procs.append(sorted_legs) 1765 permutations.append(permutation) 1766 non_permuted_procs.append(fast_proc) 1767 elif not result: 1768 # Diagram generation failed for all crossings 1769 failed_procs.append(sorted_legs) 1770 1771 # Raise exception if there are no amplitudes for this process 1772 if not amplitudes: 1773 if len(failed_procs) == 1 and 'error' in locals(): 1774 raise error 1775 else: 1776 raise NoDiagramException, \ 1777 "No amplitudes generated from process %s. Please enter a valid process" % \ 1778 process_definition.nice_string() 1779 1780 1781 # Return the produced amplitudes 1782 return amplitudes
1783 1784 @classmethod
1785 - def get_amplitude_from_proc(cls,proc):
1786 """ Return the correct amplitude type according to the characteristics of 1787 the process proc """ 1788 return Amplitude({"process": proc})
1789 1790 1791 @staticmethod
1792 - def find_optimal_process_orders(process_definition):
1793 """Find the minimal WEIGHTED order for this set of processes. 1794 1795 The algorithm: 1796 1797 1) Check the coupling hierarchy of the model. Assign all 1798 particles to the different coupling hierarchies so that a 1799 particle is considered to be in the highest hierarchy (i.e., 1800 with lowest value) where it has an interaction. 1801 1802 2) Pick out the legs in the multiprocess according to the 1803 highest hierarchy represented (so don't mix particles from 1804 different hierarchy classes in the same multiparticles!) 1805 1806 3) Find the starting maximum WEIGHTED order as the sum of the 1807 highest n-2 weighted orders 1808 1809 4) Pick out required s-channel particle hierarchies, and use 1810 the highest of the maximum WEIGHTED order from the legs and 1811 the minimum WEIGHTED order extracted from 2*s-channel 1812 hierarchys plus the n-2-2*(number of s-channels) lowest 1813 leg weighted orders. 1814 1815 5) Run process generation with the WEIGHTED order determined 1816 in 3)-4) - # final state gluons, with all gluons removed from 1817 the final state 1818 1819 6) If no process is found, increase WEIGHTED order by 1 and go 1820 back to 5), until we find a process which passes. Return that 1821 order. 1822 1823 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1824 If still no process has passed, return 1825 WEIGHTED = (n-2)*(highest hierarchy) 1826 """ 1827 1828 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1829 "%s not valid ProcessDefinition object" % \ 1830 repr(process_definition) 1831 1832 processes = base_objects.ProcessList() 1833 amplitudes = AmplitudeList() 1834 1835 # If there are already couplings defined, return 1836 if process_definition.get('orders') or \ 1837 process_definition.get('overall_orders') or \ 1838 process_definition.get('NLO_mode')=='virt': 1839 return process_definition.get('orders') 1840 1841 # If this is a decay process (and not a decay chain), return 1842 if process_definition.get_ninitial() == 1 and not \ 1843 process_definition.get('is_decay_chain'): 1844 return process_definition.get('orders') 1845 1846 logger.info("Checking for minimal orders which gives processes.") 1847 logger.info("Please specify coupling orders to bypass this step.") 1848 1849 # Calculate minimum starting guess for WEIGHTED order 1850 max_order_now, particles, hierarchy = \ 1851 process_definition.get_minimum_WEIGHTED() 1852 coupling = 'WEIGHTED' 1853 1854 model = process_definition.get('model') 1855 1856 # Extract the initial and final leg ids 1857 isids = [leg['ids'] for leg in \ 1858 filter(lambda leg: leg['state'] == False, process_definition['legs'])] 1859 fsids = [leg['ids'] for leg in \ 1860 filter(lambda leg: leg['state'] == True, process_definition['legs'])] 1861 1862 max_WEIGHTED_order = \ 1863 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1864 1865 # Run diagram generation with increasing max_order_now until 1866 # we manage to get diagrams 1867 while max_order_now < max_WEIGHTED_order: 1868 1869 logger.info("Trying coupling order WEIGHTED=%d" % max_order_now) 1870 1871 oldloglevel = logger.level 1872 logger.setLevel(logging.WARNING) 1873 1874 # failed_procs are processes that have already failed 1875 # based on crossing symmetry 1876 failed_procs = [] 1877 1878 # Generate all combinations for the initial state 1879 for prod in apply(itertools.product, isids): 1880 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1881 for id in prod] 1882 1883 # Generate all combinations for the final state, and make 1884 # sure to remove double counting 1885 1886 red_fsidlist = [] 1887 1888 for prod in apply(itertools.product, fsids): 1889 1890 # Remove double counting between final states 1891 if tuple(sorted(prod)) in red_fsidlist: 1892 continue 1893 1894 red_fsidlist.append(tuple(sorted(prod))); 1895 1896 # Remove gluons from final state if QCD is among 1897 # the highest coupling hierarchy 1898 nglue = 0 1899 if 21 in particles[0]: 1900 nglue = len([id for id in prod if id == 21]) 1901 prod = [id for id in prod if id != 21] 1902 1903 # Generate leg list for process 1904 leg_list = [copy.copy(leg) for leg in islegs] 1905 1906 leg_list.extend([\ 1907 base_objects.Leg({'id':id, 'state': True}) \ 1908 for id in prod]) 1909 1910 legs = base_objects.LegList(leg_list) 1911 1912 # Set summed coupling order according to max_order_now 1913 # subtracting the removed gluons 1914 coupling_orders_now = {coupling: max_order_now - \ 1915 nglue * model['order_hierarchy']['QCD']} 1916 1917 # Setup process 1918 process = base_objects.Process({\ 1919 'legs':legs, 1920 'model':model, 1921 'id': process_definition.get('id'), 1922 'orders': coupling_orders_now, 1923 'required_s_channels': \ 1924 process_definition.get('required_s_channels'), 1925 'forbidden_onsh_s_channels': \ 1926 process_definition.get('forbidden_onsh_s_channels'), 1927 'sqorders_types': \ 1928 process_definition.get('sqorders_types'), 1929 'squared_orders': \ 1930 process_definition.get('squared_orders'), 1931 'split_orders': \ 1932 process_definition.get('split_orders'), 1933 'forbidden_s_channels': \ 1934 process_definition.get('forbidden_s_channels'), 1935 'forbidden_particles': \ 1936 process_definition.get('forbidden_particles'), 1937 'is_decay_chain': \ 1938 process_definition.get('is_decay_chain'), 1939 'overall_orders': \ 1940 process_definition.get('overall_orders'), 1941 'split_orders': \ 1942 process_definition.get('split_orders')}) 1943 1944 # Check for couplings with given expansion orders 1945 process.check_expansion_orders() 1946 1947 # Check for crossed processes 1948 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 1949 # Check if crossed process has already failed 1950 # In that case don't check process 1951 if tuple(sorted_legs) in failed_procs: 1952 continue 1953 1954 amplitude = Amplitude({'process': process}) 1955 try: 1956 amplitude.generate_diagrams() 1957 except InvalidCmd: 1958 failed_procs.append(tuple(sorted_legs)) 1959 else: 1960 if amplitude.get('diagrams'): 1961 # We found a valid amplitude. Return this order number 1962 logger.setLevel(oldloglevel) 1963 return {coupling: max_order_now} 1964 else: 1965 failed_procs.append(tuple(sorted_legs)) 1966 1967 # No processes found, increase max_order_now 1968 max_order_now += 1 1969 logger.setLevel(oldloglevel) 1970 1971 # If no valid processes found with nfinal-1 couplings, return maximal 1972 return {coupling: max_order_now}
1973 1974 @staticmethod
1975 - def cross_amplitude(amplitude, process, org_perm, new_perm):
1976 """Return the amplitude crossed with the permutation new_perm""" 1977 # Create dict from original leg numbers to new leg numbers 1978 perm_map = dict(zip(org_perm, new_perm)) 1979 # Initiate new amplitude 1980 new_amp = copy.copy(amplitude) 1981 # Number legs 1982 for i, leg in enumerate(process.get('legs')): 1983 leg.set('number', i+1) 1984 # Set process 1985 new_amp.set('process', process) 1986 # Now replace the leg numbers in the diagrams 1987 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 1988 process.get('legs'),) for \ 1989 d in new_amp.get('diagrams')]) 1990 new_amp.set('diagrams', diagrams) 1991 new_amp.trim_diagrams() 1992 1993 # Make sure to reset mirror process 1994 new_amp.set('has_mirror_process', False) 1995 1996 return new_amp
1997
1998 #=============================================================================== 1999 # Global helper methods 2000 #=============================================================================== 2001 2002 -def expand_list(mylist):
2003 """Takes a list of lists and elements and returns a list of flat lists. 2004 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2005 """ 2006 2007 # Check that argument is a list 2008 assert isinstance(mylist, list), "Expand_list argument must be a list" 2009 2010 res = [] 2011 2012 tmplist = [] 2013 for item in mylist: 2014 if isinstance(item, list): 2015 tmplist.append(item) 2016 else: 2017 tmplist.append([item]) 2018 2019 for item in apply(itertools.product, tmplist): 2020 res.append(list(item)) 2021 2022 return res
2023
2024 -def expand_list_list(mylist):
2025 """Recursive function. Takes a list of lists and lists of lists 2026 and returns a list of flat lists. 2027 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2028 """ 2029 2030 res = [] 2031 2032 if not mylist or len(mylist) == 1 and not mylist[0]: 2033 return [[]] 2034 2035 # Check the first element is at least a list 2036 assert isinstance(mylist[0], list), \ 2037 "Expand_list_list needs a list of lists and lists of lists" 2038 2039 # Recursion stop condition, one single element 2040 if len(mylist) == 1: 2041 if isinstance(mylist[0][0], list): 2042 return mylist[0] 2043 else: 2044 return mylist 2045 2046 if isinstance(mylist[0][0], list): 2047 for item in mylist[0]: 2048 # Here the recursion happens, create lists starting with 2049 # each element of the first item and completed with 2050 # the rest expanded 2051 for rest in expand_list_list(mylist[1:]): 2052 reslist = copy.copy(item) 2053 reslist.extend(rest) 2054 res.append(reslist) 2055 else: 2056 for rest in expand_list_list(mylist[1:]): 2057 reslist = copy.copy(mylist[0]) 2058 reslist.extend(rest) 2059 res.append(reslist) 2060 2061 2062 return res
2063