Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  from __future__ import absolute_import 
  23  from six.moves import filter 
  24  #force filter to be a generator # like in py3 
  25   
  26   
  27  import array 
  28  import copy 
  29  import itertools 
  30  import logging 
  31   
  32  import madgraph.core.base_objects as base_objects 
  33  import madgraph.various.misc as misc 
  34  from madgraph import InvalidCmd, MadGraph5Error 
  35  from six.moves import range 
  36  from six.moves import zip 
  37   
  38  logger = logging.getLogger('madgraph.diagram_generation') 
39 40 41 -class NoDiagramException(InvalidCmd): pass
42
43 #=============================================================================== 44 # DiagramTag mother class 45 #=============================================================================== 46 47 -class DiagramTag(object):
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 49 PDG code/interaction id (for comparing diagrams from the same amplitude), 50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 51 Algorithm: Create chains starting from external particles: 52 1 \ / 6 53 2 /\______/\ 7 54 3_ / | \_ 8 55 4 / 5 \_ 9 56 \ 10 57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 58 (((1,2,id12),(3,4,id34)),id1234), 59 5,id91086712345) 60 where idN is the id of the corresponding interaction. The ordering within 61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 63 The determination of central vertex is based on minimizing the chain length 64 for the longest subchain. 65 This gives a unique tag which can be used to identify diagrams 66 (instead of symmetry), as well as identify identical matrix elements from 67 different processes.""" 68
69 - class DiagramTagError(Exception):
70 """Exception for any problems in DiagramTags""" 71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to 75 the diagram, and figure out if we need to shift the central vertex.""" 76 77 # wf_dict keeps track of the intermediate particles 78 leg_dict = {} 79 # Create the chain which will be the diagram tag 80 for vertex in diagram.get('vertices'): 81 # Only add incoming legs 82 legs = vertex.get('legs')[:-1] 83 lastvx = vertex == diagram.get('vertices')[-1] 84 if lastvx: 85 # If last vertex, all legs are incoming 86 legs = vertex.get('legs') 87 # Add links corresponding to the relevant legs 88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 89 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 90 for leg in legs], 91 self.vertex_id_from_vertex(vertex, 92 lastvx, 93 model, 94 ninitial)) 95 # Add vertex to leg_dict if not last one 96 if not lastvx: 97 leg_dict[vertex.get('legs')[-1].get('number')] = link 98 99 # The resulting link is the hypothetical result 100 self.tag = link 101 102 # Now make sure to find the central vertex in the diagram, 103 # defined by the longest leg being as short as possible 104 done = max([l.depth for l in self.tag.links]) == 0 105 while not done: 106 # Identify the longest chain in the tag 107 longest_chain = self.tag.links[0] 108 # Create a new link corresponding to moving one step 109 new_link = DiagramTagChainLink(self.tag.links[1:], 110 self.flip_vertex(\ 111 self.tag.vertex_id, 112 longest_chain.vertex_id, 113 self.tag.links[1:])) 114 # Create a new final vertex in the direction of the longest link 115 other_links = list(longest_chain.links) + [new_link] 116 other_link = DiagramTagChainLink(other_links, 117 self.flip_vertex(\ 118 longest_chain.vertex_id, 119 self.tag.vertex_id, 120 other_links)) 121 122 if other_link.links[0] < self.tag.links[0]: 123 # Switch to new tag, continue search 124 self.tag = other_link 125 else: 126 # We have found the central vertex 127 done = True
128
129 - def get_external_numbers(self):
130 """Get the order of external particles in this tag""" 131 132 return self.tag.get_external_numbers()
133
134 - def diagram_from_tag(self, model):
135 """Output a diagram from a DiagramTag. Note that each daughter 136 class must implement the static functions id_from_vertex_id 137 (if the vertex id is something else than an integer) and 138 leg_from_link (to pass the correct info from an end link to a 139 leg).""" 140 141 # Create the vertices, starting from the final vertex 142 diagram = base_objects.Diagram({'vertices': \ 143 self.vertices_from_link(self.tag, 144 model, 145 True)}) 146 diagram.calculate_orders(model) 147 return diagram
148 149 @classmethod 184 185 @classmethod
186 - def legPDGs_from_vertex_id(cls, vertex_id,model):
187 """Returns the list of external PDGs of the interaction corresponding 188 to this vertex_id.""" 189 190 # In case we have to deal with a regular vertex, we return the list 191 # external PDGs as given by the model information on that integer 192 # vertex id. 193 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 194 return vertex_id[2]['PDGs'] 195 else: 196 return [part.get_pdg_code() for part in model.get_interaction( 197 cls.id_from_vertex_id(vertex_id)).get('particles')]
198 199 @classmethod
200 - def leg_from_legs(cls,legs, vertex_id, model):
201 """Return a leg from a leg list and the model info""" 202 203 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 204 205 # Extract the resulting pdg code from the interaction pdgs 206 for pdg in [leg.get('id') for leg in legs]: 207 pdgs.remove(pdg) 208 209 assert len(pdgs) == 1 210 # Prepare the new leg properties 211 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 212 number = min([l.get('number') for l in legs]) 213 # State is False for t-channel, True for s-channel 214 state = (len([l for l in legs if l.get('state') == False]) != 1) 215 # Note that this needs to be done before combining decay chains 216 onshell= False 217 218 return base_objects.Leg({'id': pdg, 219 'number': number, 220 'state': state, 221 'onshell': onshell})
222 223 @classmethod 236 237 @staticmethod 250 251 @staticmethod
252 - def id_from_vertex_id(vertex_id):
253 """Return the numerical vertex id from a link.vertex_id""" 254 255 return vertex_id[0][0]
256 257 @staticmethod
258 - def loop_info_from_vertex_id(vertex_id):
259 """Return the loop_info stored in this vertex id. Notice that the 260 IdentifyME tag does not store the loop_info, but should normally never 261 need access to it.""" 262 263 return vertex_id[2]
264 265 @staticmethod
266 - def reorder_permutation(perm, start_perm):
267 """Reorder a permutation with respect to start_perm. Note that 268 both need to start from 1.""" 269 if perm == start_perm: 270 return list(range(len(perm))) 271 order = [i for (p,i) in \ 272 sorted([(p,i) for (i,p) in enumerate(perm)])] 273 return [start_perm[i]-1 for i in order]
274 275 @staticmethod 286 287 @staticmethod
288 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
289 """Returns the default vertex id: just the interaction id 290 Note that in the vertex id, like the leg, only the first entry is 291 taken into account in the tag comparison, while the second is for 292 storing information that is not to be used in comparisons and the 293 third for additional info regarding the shrunk loop vertex.""" 294 295 if isinstance(vertex,base_objects.ContractedVertex): 296 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 297 return ((vertex.get('id'),vertex.get('loop_tag')),(), 298 {'PDGs':vertex.get('PDGs')}) 299 else: 300 return ((vertex.get('id'),()),(),{})
301 302 @staticmethod
303 - def flip_vertex(new_vertex, old_vertex, links):
304 """Returns the default vertex flip: just the new_vertex""" 305 return new_vertex
306
307 - def __eq__(self, other):
308 """Equal if same tag""" 309 if type(self) != type(other): 310 return False 311 return self.tag == other.tag
312
313 - def __ne__(self, other):
314 return not self.__eq__(other)
315
316 - def __str__(self):
317 return str(self.tag)
318
319 - def __lt__(self, other):
320 return self.tag < other.tag
321
322 - def __gt__(self, other):
323 return self.tag > other.tag
324 325 __repr__ = __str__
326 430
431 #=============================================================================== 432 # Amplitude 433 #=============================================================================== 434 -class Amplitude(base_objects.PhysicsObject):
435 """Amplitude: process + list of diagrams (ordered) 436 Initialize with a process, then call generate_diagrams() to 437 generate the diagrams for the amplitude 438 """ 439
440 - def default_setup(self):
441 """Default values for all properties""" 442 443 self['process'] = base_objects.Process() 444 self['diagrams'] = None 445 # has_mirror_process is True if the same process but with the 446 # two incoming particles interchanged has been generated 447 self['has_mirror_process'] = False
448
449 - def __init__(self, argument=None):
450 """Allow initialization with Process""" 451 if isinstance(argument, base_objects.Process): 452 super(Amplitude, self).__init__() 453 self.set('process', argument) 454 self.generate_diagrams() 455 elif argument != None: 456 # call the mother routine 457 super(Amplitude, self).__init__(argument) 458 else: 459 # call the mother routine 460 super(Amplitude, self).__init__()
461
462 - def filter(self, name, value):
463 """Filter for valid amplitude property values.""" 464 465 if name == 'process': 466 if not isinstance(value, base_objects.Process): 467 raise self.PhysicsObjectError("%s is not a valid Process object" % str(value)) 468 if name == 'diagrams': 469 if not isinstance(value, base_objects.DiagramList): 470 raise self.PhysicsObjectError("%s is not a valid DiagramList object" % str(value)) 471 if name == 'has_mirror_process': 472 if not isinstance(value, bool): 473 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 474 return True
475
476 - def get(self, name):
477 """Get the value of the property name.""" 478 479 if name == 'diagrams' and self[name] == None: 480 # Have not yet generated diagrams for this process 481 if self['process']: 482 self.generate_diagrams() 483 484 return super(Amplitude, self).get(name)
485 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 486 487
488 - def get_sorted_keys(self):
489 """Return diagram property names as a nicely sorted list.""" 490 491 return ['process', 'diagrams', 'has_mirror_process']
492
493 - def get_number_of_diagrams(self):
494 """Returns number of diagrams for this amplitude""" 495 return len(self.get('diagrams'))
496
497 - def get_amplitudes(self):
498 """Return an AmplitudeList with just this amplitude. 499 Needed for DecayChainAmplitude.""" 500 501 return AmplitudeList([self])
502
503 - def nice_string(self, indent=0):
504 """Returns a nicely formatted string of the amplitude content.""" 505 return self.get('process').nice_string(indent) + "\n" + \ 506 self.get('diagrams').nice_string(indent)
507
508 - def nice_string_processes(self, indent=0):
509 """Returns a nicely formatted string of the amplitude process.""" 510 return self.get('process').nice_string(indent)
511
512 - def get_ninitial(self):
513 """Returns the number of initial state particles in the process.""" 514 return self.get('process').get_ninitial()
515
516 - def has_loop_process(self):
517 """ Returns wether this amplitude has a loop process.""" 518 519 return self.get('process').get('perturbation_couplings')
520
521 - def generate_diagrams(self, returndiag=False, diagram_filter=False):
522 """Generate diagrams. Algorithm: 523 524 1. Define interaction dictionaries: 525 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 526 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 527 528 2. Set flag from_group=true for all external particles. 529 Flip particle/anti particle for incoming particles. 530 531 3. If there is a dictionary n->0 with n=number of external 532 particles, create if possible the combination [(1,2,3,4,...)] 533 with *at least two* from_group==true. This will give a 534 finished (set of) diagram(s) (done by reduce_leglist) 535 536 4. Create all allowed groupings of particles with at least one 537 from_group==true (according to dictionaries n->1): 538 [(1,2),3,4...],[1,(2,3),4,...],..., 539 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 540 (done by combine_legs) 541 542 5. Replace each group with a (list of) new particle(s) with number 543 n = min(group numbers). Set from_group true for these 544 particles and false for all other particles. Store vertex info. 545 (done by merge_comb_legs) 546 547 6. Stop algorithm when at most 2 particles remain. 548 Return all diagrams (lists of vertices). 549 550 7. Repeat from 3 (recursion done by reduce_leglist) 551 552 8. Replace final p=p vertex 553 554 Be aware that the resulting vertices have all particles outgoing, 555 so need to flip for incoming particles when used. 556 557 SPECIAL CASE: For A>BC... processes which are legs in decay 558 chains, we need to ensure that BC... combine first, giving A=A 559 as a final vertex. This case is defined by the Process 560 property is_decay_chain = True. 561 This function can also be called by the generate_diagram function 562 of LoopAmplitudes, in which case the generated diagrams here must not 563 be directly assigned to the 'diagrams' attributed but returned as a 564 DiagramList by the function. This is controlled by the argument 565 returndiag. 566 """ 567 568 process = self.get('process') 569 model = process.get('model') 570 legs = process.get('legs') 571 # Make sure orders is the minimum of orders and overall_orders 572 for key in process.get('overall_orders').keys(): 573 try: 574 process.get('orders')[key] = \ 575 min(process.get('orders')[key], 576 process.get('overall_orders')[key]) 577 except KeyError: 578 process.get('orders')[key] = process.get('overall_orders')[key] 579 580 assert model.get('particles'), \ 581 "particles are missing in model: %s" % model.get('particles') 582 583 assert model.get('interactions'), \ 584 "interactions are missing in model" 585 586 587 res = base_objects.DiagramList() 588 # First check that the number of fermions is even 589 if len([leg for leg in legs if model.get('particle_dict')[\ 590 leg.get('id')].is_fermion()]) % 2 == 1: 591 if not returndiag: 592 self['diagrams'] = res 593 raise InvalidCmd('The number of fermion is odd') 594 else: 595 return False, res 596 597 # Then check same number of incoming and outgoing fermions (if 598 # no Majorana particles in model) 599 if not model.get('got_majoranas') and \ 600 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \ 601 len([leg for leg in legs if leg.is_outgoing_fermion(model)]): 602 if not returndiag: 603 self['diagrams'] = res 604 raise InvalidCmd('The number of of incoming/outcoming fermions are different') 605 else: 606 return False, res 607 608 # Finally check that charge (conserve by all interactions) of the process 609 #is globally conserve for this process. 610 for charge in model.get('conserved_charge'): 611 total = 0 612 for leg in legs: 613 part = model.get('particle_dict')[leg.get('id')] 614 try: 615 value = part.get(charge) 616 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 617 try: 618 value = getattr(part, charge) 619 except AttributeError: 620 value = 0 621 622 if (leg.get('id') != part['pdg_code']) != leg['state']: 623 total -= value 624 else: 625 total += value 626 627 if abs(total) > 1e-10: 628 if not returndiag: 629 self['diagrams'] = res 630 raise InvalidCmd('No %s conservation for this process ' % charge) 631 return res 632 else: 633 raise InvalidCmd('No %s conservation for this process ' % charge) 634 return res, res 635 636 if not returndiag: 637 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 638 639 # Give numbers to legs in process 640 for i in range(0, len(process.get('legs'))): 641 # Make sure legs are unique 642 leg = copy.copy(process.get('legs')[i]) 643 process.get('legs')[i] = leg 644 if leg.get('number') == 0: 645 leg.set('number', i + 1) 646 647 # Copy leglist from process, so we can flip leg identities 648 # without affecting the original process 649 leglist = self.copy_leglist(process.get('legs')) 650 651 for leg in leglist: 652 # For the first step, ensure the tag from_group 653 # is true for all legs 654 leg.set('from_group', True) 655 656 # Need to flip part-antipart for incoming particles, 657 # so they are all outgoing 658 if leg.get('state') == False: 659 part = model.get('particle_dict')[leg.get('id')] 660 leg.set('id', part.get_anti_pdg_code()) 661 662 # Calculate the maximal multiplicity of n-1>1 configurations 663 # to restrict possible leg combinations 664 max_multi_to1 = max([len(key) for key in \ 665 model.get('ref_dict_to1').keys()]) 666 667 668 # Reduce the leg list and return the corresponding 669 # list of vertices 670 671 # For decay processes, generate starting from final-state 672 # combined only as the last particle. This allows to use these 673 # in decay chains later on. 674 is_decay_proc = process.get_ninitial() == 1 675 if is_decay_proc: 676 part = model.get('particle_dict')[leglist[0].get('id')] 677 # For decay chain legs, we want everything to combine to 678 # the initial leg. This is done by only allowing the 679 # initial leg to combine as a final identity. 680 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 681 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 682 # Need to set initial leg from_group to None, to make sure 683 # it can only be combined at the end. 684 leglist[0].set('from_group', None) 685 reduced_leglist = self.reduce_leglist(leglist, 686 max_multi_to1, 687 ref_dict_to0, 688 is_decay_proc, 689 process.get('orders')) 690 else: 691 reduced_leglist = self.reduce_leglist(leglist, 692 max_multi_to1, 693 model.get('ref_dict_to0'), 694 is_decay_proc, 695 process.get('orders')) 696 697 #In LoopAmplitude the function below is overloaded such that it 698 #converts back all DGLoopLegs to Legs. In the default tree-level 699 #diagram generation, this does nothing. 700 self.convert_dgleg_to_leg(reduced_leglist) 701 702 if reduced_leglist: 703 for vertex_list in reduced_leglist: 704 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 705 706 # Record whether or not we failed generation before required 707 # s-channel propagators are taken into account 708 failed_crossing = not res 709 710 # Required s-channels is a list of id-lists. Select the 711 # diagrams where all required s-channel propagators in any of 712 # the lists are present (i.e., the different lists correspond 713 # to "or", while the elements of the list correspond to 714 # "and"). 715 if process.get('required_s_channels') and \ 716 process.get('required_s_channels')[0]: 717 # We shouldn't look at the last vertex in each diagram, 718 # since that is the n->0 vertex 719 lastvx = -1 720 # For decay chain processes, there is an "artificial" 721 # extra vertex corresponding to particle 1=1, so we need 722 # to exclude the two last vertexes. 723 if is_decay_proc: lastvx = -2 724 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 725 # Check required s-channels for each list in required_s_channels 726 old_res = res 727 res = base_objects.DiagramList() 728 for id_list in process.get('required_s_channels'): 729 res_diags = [diagram for diagram in old_res if all([req_s_channel in \ 730 [vertex.get_s_channel_id(\ 731 process.get('model'), ninitial) \ 732 for vertex in diagram.get('vertices')[:lastvx]] \ 733 for req_s_channel in \ 734 id_list])] 735 # Add diagrams only if not already in res 736 res.extend([diag for diag in res_diags if diag not in res]) 737 738 # Remove all diagrams with a "double" forbidden s-channel propagator 739 # is present. 740 # Note that we shouldn't look at the last vertex in each 741 # diagram, since that is the n->0 vertex 742 if process.get('forbidden_s_channels'): 743 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 744 if ninitial == 2: 745 res = base_objects.DiagramList(\ 746 [diagram for diagram in res if not any([vertex.get_s_channel_id(\ 747 process.get('model'), ninitial) \ 748 in process.get('forbidden_s_channels') 749 for vertex in diagram.get('vertices')[:-1]])]) 750 else: 751 # split since we need to avoid that the initial particle is forbidden 752 # as well. 753 newres= [] 754 for diagram in res: 755 leg1 = 1 756 #check the latest vertex to see if the leg 1 is inside if it 757 #is we need to inverse the look-up and allow the first s-channel 758 # of the associate particles. 759 vertex = diagram.get('vertices')[-1] 760 if any([l['number'] ==1 for l in vertex.get('legs')]): 761 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 762 to_loop = list(range(len(diagram.get('vertices'))-1)) 763 if leg1 >1: 764 to_loop.reverse() 765 for i in to_loop: 766 vertex = diagram.get('vertices')[i] 767 if leg1: 768 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 769 leg1 = 0 770 continue 771 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 772 in process.get('forbidden_s_channels'): 773 break 774 else: 775 newres.append(diagram) 776 res = base_objects.DiagramList(newres) 777 778 779 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 780 # generation. 781 if process.get('forbidden_onsh_s_channels'): 782 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 783 784 verts = base_objects.VertexList(sum([[vertex for vertex \ 785 in diagram.get('vertices')[:-1] 786 if vertex.get_s_channel_id(\ 787 process.get('model'), ninitial) \ 788 in process.get('forbidden_onsh_s_channels')] \ 789 for diagram in res], [])) 790 for vert in verts: 791 # Use onshell = False to indicate that this s-channel is forbidden 792 newleg = copy.copy(vert.get('legs').pop(-1)) 793 newleg.set('onshell', False) 794 vert.get('legs').append(newleg) 795 796 # Set actual coupling orders for each diagram 797 for diagram in res: 798 diagram.calculate_orders(model) 799 800 # Filter the diagrams according to the squared coupling order 801 # constraints and possible the negative one. Remember that OrderName=-n 802 # means that the user wants to include everything up to the N^(n+1)LO 803 # contribution in that order and at most one order can be restricted 804 # in this way. We shall do this only if the diagrams are not asked to 805 # be returned, as it is the case for NLO because it this case the 806 # interference are not necessarily among the diagrams generated here only. 807 if not returndiag and len(res)>0: 808 res = self.apply_squared_order_constraints(res) 809 810 if diagram_filter: 811 res = self.apply_user_filter(res) 812 813 # Replace final id=0 vertex if necessary 814 if not process.get('is_decay_chain'): 815 for diagram in res: 816 vertices = diagram.get('vertices') 817 if len(vertices) > 1 and vertices[-1].get('id') == 0: 818 # Need to "glue together" last and next-to-last 819 # vertex, by replacing the (incoming) last leg of the 820 # next-to-last vertex with the (outgoing) leg in the 821 # last vertex 822 vertices = copy.copy(vertices) 823 lastvx = vertices.pop() 824 nexttolastvertex = copy.copy(vertices.pop()) 825 legs = copy.copy(nexttolastvertex.get('legs')) 826 ntlnumber = legs[-1].get('number') 827 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0] 828 # Reset onshell in case we have forbidden s-channels 829 if lastleg.get('onshell') == False: 830 lastleg.set('onshell', None) 831 # Replace the last leg of nexttolastvertex 832 legs[-1] = lastleg 833 nexttolastvertex.set('legs', legs) 834 vertices.append(nexttolastvertex) 835 diagram.set('vertices', vertices) 836 837 if res and not returndiag: 838 logger.info("Process has %d diagrams" % len(res)) 839 840 # Trim down number of legs and vertices used to save memory 841 self.trim_diagrams(diaglist=res) 842 843 # Sort process legs according to leg number 844 pertur = 'QCD' 845 if self.get('process')['perturbation_couplings']: 846 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 847 self.get('process').get('legs').sort(pert=pertur) 848 849 # Set diagrams to res if not asked to be returned 850 if not returndiag: 851 self['diagrams'] = res 852 return not failed_crossing 853 else: 854 return not failed_crossing, res
855
856 - def apply_squared_order_constraints(self, diag_list):
857 """Applies the user specified squared order constraints on the diagram 858 list in argument.""" 859 860 res = copy.copy(diag_list) 861 862 # Apply the filtering on constrained amplitude (== and >) 863 # No need to iterate on this one 864 for name, (value, operator) in self['process'].get('constrained_orders').items(): 865 res.filter_constrained_orders(name, value, operator) 866 867 # Iterate the filtering since the applying the constraint on one 868 # type of coupling order can impact what the filtering on a previous 869 # one (relevant for the '==' type of constraint). 870 while True: 871 new_res = res.apply_positive_sq_orders(res, 872 self['process'].get('squared_orders'), 873 self['process']['sqorders_types']) 874 # Exit condition 875 if len(res)==len(new_res): 876 break 877 elif (len(new_res)>len(res)): 878 raise MadGraph5Error( 879 'Inconsistency in function apply_squared_order_constraints().') 880 # Actualizing the list of diagram for the next iteration 881 res = new_res 882 883 884 885 # Now treat the negative squared order constraint (at most one) 886 neg_orders = [(order, value) for order, value in \ 887 self['process'].get('squared_orders').items() if value<0] 888 if len(neg_orders)==1: 889 neg_order, neg_value = neg_orders[0] 890 # Now check any negative order constraint 891 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 892 neg_value, self['process']['sqorders_types'][neg_order]) 893 # Substitute the negative value to this positive one so that 894 # the resulting computed constraints appears in the print out 895 # and at the output stage we no longer have to deal with 896 # negative valued target orders 897 self['process']['squared_orders'][neg_order]=target_order 898 elif len(neg_orders)>1: 899 raise InvalidCmd('At most one negative squared order constraint'+\ 900 ' can be specified, not %s.'%str(neg_orders)) 901 902 return res
903
904 - def apply_user_filter(self, diag_list):
905 """Applies the user specified squared order constraints on the diagram 906 list in argument.""" 907 908 if True: 909 remove_diag = misc.plugin_import('user_filter', 910 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed', 911 fcts=['remove_diag']) 912 else: 913 #example and simple tests 914 def remove_diag(diag, model=None): 915 for vertex in diag['vertices']: #last 916 if vertex['id'] == 0: #special final vertex 917 continue 918 if vertex['legs'][-1]['number'] < 3: #this means T-channel 919 if abs(vertex['legs'][-1]['id']) <6: 920 return True 921 return False
922 923 res = diag_list.__class__() 924 nb_removed = 0 925 model = self['process']['model'] 926 for diag in diag_list: 927 if remove_diag(diag, model): 928 nb_removed +=1 929 else: 930 res.append(diag) 931 932 if nb_removed: 933 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed) 934 935 return res
936 937 938
939 - def create_diagram(self, vertexlist):
940 """ Return a Diagram created from the vertex list. This function can be 941 overloaded by daughter classes.""" 942 return base_objects.Diagram({'vertices':vertexlist})
943
944 - def convert_dgleg_to_leg(self, vertexdoublelist):
945 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 946 In Amplitude, there is nothing to do. """ 947 948 return True
949
950 - def copy_leglist(self, legs):
951 """ Simply returns a copy of the leg list. This function is 952 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 953 The DGLoopLeg has some additional parameters only useful during 954 loop diagram generation""" 955 956 return base_objects.LegList(\ 957 [ copy.copy(leg) for leg in legs ])
958
959 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 960 is_decay_proc = False, coupling_orders = None):
961 """Recursive function to reduce N LegList to N-1 962 For algorithm, see doc for generate_diagrams. 963 """ 964 965 # Result variable which is a list of lists of vertices 966 # to be added 967 res = [] 968 969 # Stop condition. If LegList is None, that means that this 970 # diagram must be discarded 971 if curr_leglist is None: 972 return None 973 974 # Extract ref dict information 975 model = self.get('process').get('model') 976 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 977 978 979 # If all legs can be combined in one single vertex, add this 980 # vertex to res and continue. 981 # Special treatment for decay chain legs 982 983 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 984 # Extract the interaction id associated to the vertex 985 986 vertex_ids = self.get_combined_vertices(curr_leglist, 987 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 988 leg in curr_leglist]))])) 989 990 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 991 'id':vertex_id}) for \ 992 vertex_id in vertex_ids] 993 # Check for coupling orders. If orders < 0, skip vertex 994 for final_vertex in final_vertices: 995 if self.reduce_orders(coupling_orders, model, 996 [final_vertex.get('id')]) != False: 997 res.append([final_vertex]) 998 # Stop condition 2: if the leglist contained exactly two particles, 999 # return the result, if any, and stop. 1000 if len(curr_leglist) == 2: 1001 if res: 1002 return res 1003 else: 1004 return None 1005 1006 # Create a list of all valid combinations of legs 1007 comb_lists = self.combine_legs(curr_leglist, 1008 ref_dict_to1, max_multi_to1) 1009 1010 # Create a list of leglists/vertices by merging combinations 1011 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 1012 1013 # Consider all the pairs 1014 for leg_vertex_tuple in leg_vertex_list: 1015 1016 # Remove forbidden particles 1017 if self.get('process').get('forbidden_particles') and \ 1018 any([abs(vertex.get('legs')[-1].get('id')) in \ 1019 self.get('process').get('forbidden_particles') \ 1020 for vertex in leg_vertex_tuple[1]]): 1021 continue 1022 1023 # Check for coupling orders. If couplings < 0, skip recursion. 1024 new_coupling_orders = self.reduce_orders(coupling_orders, 1025 model, 1026 [vertex.get('id') for vertex in \ 1027 leg_vertex_tuple[1]]) 1028 if new_coupling_orders == False: 1029 # Some coupling order < 0 1030 continue 1031 1032 # This is where recursion happens 1033 # First, reduce again the leg part 1034 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 1035 max_multi_to1, 1036 ref_dict_to0, 1037 is_decay_proc, 1038 new_coupling_orders) 1039 # If there is a reduced diagram 1040 if reduced_diagram: 1041 vertex_list_list = [list(leg_vertex_tuple[1])] 1042 vertex_list_list.append(reduced_diagram) 1043 expanded_list = expand_list_list(vertex_list_list) 1044 res.extend(expanded_list) 1045 1046 return res
1047
1048 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1049 """Return False if the coupling orders for any coupling is < 1050 0, otherwise return the new coupling orders with the vertex 1051 orders subtracted. If coupling_orders is not given, return 1052 None (which counts as success). 1053 WEIGHTED is a special order, which corresponds to the sum of 1054 order hierarchies for the couplings. 1055 We ignore negative constraints as these cannot be taken into 1056 account on the fly but only after generation.""" 1057 1058 if not coupling_orders: 1059 return None 1060 1061 present_couplings = copy.copy(coupling_orders) 1062 for id in vertex_id_list: 1063 # Don't check for identity vertex (id = 0) 1064 if not id: 1065 continue 1066 inter = model.get("interaction_dict")[id] 1067 for coupling in inter.get('orders').keys(): 1068 # Note that we don't consider a missing coupling as a 1069 # constraint 1070 if coupling in present_couplings and \ 1071 present_couplings[coupling]>=0: 1072 # Reduce the number of couplings that are left 1073 present_couplings[coupling] -= \ 1074 inter.get('orders')[coupling] 1075 if present_couplings[coupling] < 0: 1076 # We have too many couplings of this type 1077 return False 1078 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1079 if 'WEIGHTED' in present_couplings and \ 1080 present_couplings['WEIGHTED']>=0: 1081 weight = sum([model.get('order_hierarchy')[c]*n for \ 1082 (c,n) in inter.get('orders').items()]) 1083 present_couplings['WEIGHTED'] -= weight 1084 if present_couplings['WEIGHTED'] < 0: 1085 # Total coupling weight too large 1086 return False 1087 1088 return present_couplings
1089
1090 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1091 """Recursive function. Take a list of legs as an input, with 1092 the reference dictionary n-1->1, and output a list of list of 1093 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1094 1095 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1096 1097 2. For each combination, say [34]. Check if combination is valid. 1098 If so: 1099 1100 a. Append [12[34]56] to result array 1101 1102 b. Split [123456] at index(first element in combination+1), 1103 i.e. [12],[456] and subtract combination from second half, 1104 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1105 1106 3. Take result array from call to 1. (here, [[56]]) and append 1107 (first half in step b - combination) + combination + (result 1108 from 1.) = [12[34][56]] to result array 1109 1110 4. After appending results from all n-combinations, return 1111 resulting array. Example, if [13] and [45] are valid 1112 combinations: 1113 [[[13]2456],[[13]2[45]6],[123[45]6]] 1114 """ 1115 1116 res = [] 1117 1118 # loop over possible combination lengths (+1 is for range convention!) 1119 for comb_length in range(2, max_multi_to1 + 1): 1120 1121 # Check the considered length is not longer than the list length 1122 if comb_length > len(list_legs): 1123 return res 1124 1125 # itertools.combinations returns all possible combinations 1126 # of comb_length elements from list_legs 1127 for comb in itertools.combinations(list_legs, comb_length): 1128 1129 # Check if the combination is valid 1130 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1131 1132 # Identify the rest, create a list [comb,rest] and 1133 # add it to res 1134 res_list = copy.copy(list_legs) 1135 for leg in comb: 1136 res_list.remove(leg) 1137 res_list.insert(list_legs.index(comb[0]), comb) 1138 res.append(res_list) 1139 1140 # Now, deal with cases with more than 1 combination 1141 1142 # First, split the list into two, according to the 1143 # position of the first element in comb, and remove 1144 # all elements form comb 1145 res_list1 = list_legs[0:list_legs.index(comb[0])] 1146 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1147 for leg in comb[1:]: 1148 res_list2.remove(leg) 1149 1150 # Create a list of type [comb,rest1,rest2(combined)] 1151 res_list = res_list1 1152 res_list.append(comb) 1153 # This is where recursion actually happens, 1154 # on the second part 1155 for item in self.combine_legs(res_list2, 1156 ref_dict_to1, 1157 max_multi_to1): 1158 final_res_list = copy.copy(res_list) 1159 final_res_list.extend(item) 1160 res.append(final_res_list) 1161 1162 return res
1163 1164
1165 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1166 """Takes a list of allowed leg combinations as an input and returns 1167 a set of lists where combinations have been properly replaced 1168 (one list per element in the ref_dict, so that all possible intermediate 1169 particles are included). For each list, give the list of vertices 1170 corresponding to the executed merging, group the two as a tuple. 1171 """ 1172 1173 res = [] 1174 1175 for comb_list in comb_lists: 1176 1177 reduced_list = [] 1178 vertex_list = [] 1179 1180 for entry in comb_list: 1181 1182 # Act on all leg combinations 1183 if isinstance(entry, tuple): 1184 1185 # Build the leg object which will replace the combination: 1186 # 1) leg ids is as given in the ref_dict 1187 leg_vert_ids = copy.copy(ref_dict_to1[\ 1188 tuple(sorted([leg.get('id') for leg in entry]))]) 1189 # 2) number is the minimum of leg numbers involved in the 1190 # combination 1191 number = min([leg.get('number') for leg in entry]) 1192 # 3) state is final, unless there is exactly one initial 1193 # state particle involved in the combination -> t-channel 1194 if len([leg for leg in entry if leg.get('state') == False]) == 1: 1195 state = False 1196 else: 1197 state = True 1198 # 4) from_group is True, by definition 1199 1200 # Create and add the object. This is done by a 1201 # separate routine, to allow overloading by 1202 # daughter classes 1203 new_leg_vert_ids = [] 1204 if leg_vert_ids: 1205 new_leg_vert_ids = self.get_combined_legs(entry, 1206 leg_vert_ids, 1207 number, 1208 state) 1209 1210 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1211 1212 1213 # Create and add the corresponding vertex 1214 # Extract vertex ids corresponding to the various legs 1215 # in mylegs 1216 vlist = base_objects.VertexList() 1217 for (myleg, vert_id) in new_leg_vert_ids: 1218 # Start with the considered combination... 1219 myleglist = base_objects.LegList(list(entry)) 1220 # ... and complete with legs after reducing 1221 myleglist.append(myleg) 1222 # ... and consider the correct vertex id 1223 vlist.append(base_objects.Vertex( 1224 {'legs':myleglist, 1225 'id':vert_id})) 1226 1227 vertex_list.append(vlist) 1228 1229 # If entry is not a combination, switch the from_group flag 1230 # and add it 1231 else: 1232 cp_entry = copy.copy(entry) 1233 # Need special case for from_group == None; this 1234 # is for initial state leg of decay chain process 1235 # (see Leg.can_combine_to_0) 1236 if cp_entry.get('from_group') != None: 1237 cp_entry.set('from_group', False) 1238 reduced_list.append(cp_entry) 1239 1240 # Flatten the obtained leg and vertex lists 1241 flat_red_lists = expand_list(reduced_list) 1242 flat_vx_lists = expand_list(vertex_list) 1243 1244 # Combine the two lists in a list of tuple 1245 for i in range(0, len(flat_vx_lists)): 1246 res.append((base_objects.LegList(flat_red_lists[i]), \ 1247 base_objects.VertexList(flat_vx_lists[i]))) 1248 1249 return res
1250
1251 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1252 """Create a set of new legs from the info given. This can be 1253 overloaded by daughter classes.""" 1254 1255 mylegs = [(base_objects.Leg({'id':leg_id, 1256 'number':number, 1257 'state':state, 1258 'from_group':True}), 1259 vert_id)\ 1260 for leg_id, vert_id in leg_vert_ids] 1261 1262 return mylegs
1263
1264 - def get_combined_vertices(self, legs, vert_ids):
1265 """Allow for selection of vertex ids. This can be 1266 overloaded by daughter classes.""" 1267 1268 return vert_ids
1269
1270 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1271 """Reduce the number of legs and vertices used in memory. 1272 When called by a diagram generation initiated by LoopAmplitude, 1273 this function should not trim the diagrams in the attribute 'diagrams' 1274 but rather a given list in the 'diaglist' argument.""" 1275 1276 legs = [] 1277 vertices = [] 1278 1279 if diaglist is None: 1280 diaglist=self.get('diagrams') 1281 1282 # Flag decaying legs in the core process by onshell = True 1283 process = self.get('process') 1284 for leg in process.get('legs'): 1285 if leg.get('state') and leg.get('id') in decay_ids: 1286 leg.set('onshell', True) 1287 1288 for diagram in diaglist: 1289 # Keep track of external legs (leg numbers already used) 1290 leg_external = set() 1291 for ivx, vertex in enumerate(diagram.get('vertices')): 1292 for ileg, leg in enumerate(vertex.get('legs')): 1293 # Ensure that only external legs get decay flag 1294 if leg.get('state') and leg.get('id') in decay_ids and \ 1295 leg.get('number') not in leg_external: 1296 # Use onshell to indicate decaying legs, 1297 # i.e. legs that have decay chains 1298 leg = copy.copy(leg) 1299 leg.set('onshell', True) 1300 try: 1301 index = legs.index(leg) 1302 except ValueError: 1303 vertex.get('legs')[ileg] = leg 1304 legs.append(leg) 1305 else: # Found a leg 1306 vertex.get('legs')[ileg] = legs[index] 1307 leg_external.add(leg.get('number')) 1308 try: 1309 index = vertices.index(vertex) 1310 diagram.get('vertices')[ivx] = vertices[index] 1311 except ValueError: 1312 vertices.append(vertex)
1313
1314 #=============================================================================== 1315 # AmplitudeList 1316 #=============================================================================== 1317 -class AmplitudeList(base_objects.PhysicsObjectList):
1318 """List of Amplitude objects 1319 """ 1320
1321 - def has_any_loop_process(self):
1322 """ Check the content of all processes of the amplitudes in this list to 1323 see if there is any which defines perturbation couplings. """ 1324 1325 for amp in self: 1326 if amp.has_loop_process(): 1327 return True
1328
1329 - def is_valid_element(self, obj):
1330 """Test if object obj is a valid Amplitude for the list.""" 1331 1332 return isinstance(obj, Amplitude)
1333
1334 #=============================================================================== 1335 # DecayChainAmplitude 1336 #=============================================================================== 1337 -class DecayChainAmplitude(Amplitude):
1338 """A list of amplitudes + a list of decay chain amplitude lists; 1339 corresponding to a ProcessDefinition with a list of decay chains 1340 """ 1341
1342 - def default_setup(self):
1343 """Default values for all properties""" 1344 1345 self['amplitudes'] = AmplitudeList() 1346 self['decay_chains'] = DecayChainAmplitudeList()
1347
1348 - def __init__(self, argument = None, collect_mirror_procs = False, 1349 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1350 """Allow initialization with Process and with ProcessDefinition""" 1351 1352 if isinstance(argument, base_objects.Process): 1353 super(DecayChainAmplitude, self).__init__() 1354 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1355 if argument['perturbation_couplings']: 1356 MultiProcessClass=LoopMultiProcess 1357 else: 1358 MultiProcessClass=MultiProcess 1359 if isinstance(argument, base_objects.ProcessDefinition): 1360 self['amplitudes'].extend(\ 1361 MultiProcessClass.generate_multi_amplitudes(argument, 1362 collect_mirror_procs, 1363 ignore_six_quark_processes, 1364 loop_filter=loop_filter, 1365 diagram_filter=diagram_filter)) 1366 else: 1367 self['amplitudes'].append(\ 1368 MultiProcessClass.get_amplitude_from_proc(argument, 1369 loop_filter=loop_filter, 1370 diagram_filter=diagram_filter)) 1371 # Clean decay chains from process, since we haven't 1372 # combined processes with decay chains yet 1373 process = copy.copy(self.get('amplitudes')[0].get('process')) 1374 process.set('decay_chains', base_objects.ProcessList()) 1375 self['amplitudes'][0].set('process', process) 1376 1377 for process in argument.get('decay_chains'): 1378 if process.get('perturbation_couplings'): 1379 raise MadGraph5Error("Decay processes can not be perturbed") 1380 process.set('overall_orders', argument.get('overall_orders')) 1381 if not process.get('is_decay_chain'): 1382 process.set('is_decay_chain',True) 1383 if not process.get_ninitial() == 1: 1384 raise InvalidCmd("Decay chain process must have exactly one" + \ 1385 " incoming particle") 1386 self['decay_chains'].append(\ 1387 DecayChainAmplitude(process, collect_mirror_procs, 1388 ignore_six_quark_processes, 1389 diagram_filter=diagram_filter)) 1390 1391 # Flag decaying legs in the core diagrams by onshell = True 1392 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1393 for a in dec.get('amplitudes')] for dec in \ 1394 self['decay_chains']], []) 1395 decay_ids = set(decay_ids) 1396 for amp in self['amplitudes']: 1397 amp.trim_diagrams(decay_ids) 1398 1399 # Check that all decay ids are present in at least some process 1400 for amp in self['amplitudes']: 1401 for l in amp.get('process').get('legs'): 1402 if l.get('id') in decay_ids: 1403 decay_ids.remove(l.get('id')) 1404 1405 if decay_ids: 1406 model = amp.get('process').get('model') 1407 names = [model.get_particle(id).get('name') for id in decay_ids] 1408 1409 logger.warning( 1410 "$RED Decay without corresponding particle in core process found.\n" + \ 1411 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1412 "Please check your process definition carefully. \n" + \ 1413 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1414 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1415 1416 # Remove unused decays from the process list 1417 for dc in reversed(self['decay_chains']): 1418 for a in reversed(dc.get('amplitudes')): 1419 # Remove the amplitudes from this decay chain 1420 if a.get('process').get('legs')[0].get('id') in decay_ids: 1421 dc.get('amplitudes').remove(a) 1422 if not dc.get('amplitudes'): 1423 # If no amplitudes left, remove the decay chain 1424 self['decay_chains'].remove(dc) 1425 1426 # Finally, write a fat warning if any decay process has 1427 # the decaying particle (or its antiparticle) in the final state 1428 bad_procs = [] 1429 for dc in self['decay_chains']: 1430 for amp in dc.get('amplitudes'): 1431 legs = amp.get('process').get('legs') 1432 fs_parts = [abs(l.get('id')) for l in legs if 1433 l.get('state')] 1434 is_part = [l.get('id') for l in legs if not 1435 l.get('state')][0] 1436 if abs(is_part) in fs_parts: 1437 bad_procs.append(amp.get('process')) 1438 1439 if bad_procs: 1440 logger.warning( 1441 "$RED Decay(s) with particle decaying to itself:\n" + \ 1442 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1443 "\nPlease check your process definition carefully. \n") 1444 1445 1446 elif argument != None: 1447 # call the mother routine 1448 super(DecayChainAmplitude, self).__init__(argument) 1449 else: 1450 # call the mother routine 1451 super(DecayChainAmplitude, self).__init__()
1452
1453 - def filter(self, name, value):
1454 """Filter for valid amplitude property values.""" 1455 1456 if name == 'amplitudes': 1457 if not isinstance(value, AmplitudeList): 1458 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value)) 1459 if name == 'decay_chains': 1460 if not isinstance(value, DecayChainAmplitudeList): 1461 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \ 1462 str(value)) 1463 return True
1464
1465 - def get_sorted_keys(self):
1466 """Return diagram property names as a nicely sorted list.""" 1467 1468 return ['amplitudes', 'decay_chains']
1469 1470 # Helper functions 1471
1472 - def get_number_of_diagrams(self):
1473 """Returns number of diagrams for this amplitude""" 1474 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1475 + sum(d.get_number_of_diagrams() for d in \ 1476 self.get('decay_chains'))
1477
1478 - def nice_string(self, indent = 0):
1479 """Returns a nicely formatted string of the amplitude content.""" 1480 mystr = "" 1481 for amplitude in self.get('amplitudes'): 1482 mystr = mystr + amplitude.nice_string(indent) + "\n" 1483 1484 if self.get('decay_chains'): 1485 mystr = mystr + " " * indent + "Decays:\n" 1486 for dec in self.get('decay_chains'): 1487 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1488 1489 return mystr[:-1]
1490
1491 - def nice_string_processes(self, indent = 0):
1492 """Returns a nicely formatted string of the amplitude processes.""" 1493 mystr = "" 1494 for amplitude in self.get('amplitudes'): 1495 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1496 1497 if self.get('decay_chains'): 1498 mystr = mystr + " " * indent + "Decays:\n" 1499 for dec in self.get('decay_chains'): 1500 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1501 1502 return mystr[:-1]
1503
1504 - def get_ninitial(self):
1505 """Returns the number of initial state particles in the process.""" 1506 return self.get('amplitudes')[0].get('process').get_ninitial()
1507
1508 - def get_decay_ids(self):
1509 """Returns a set of all particle ids for which a decay is defined""" 1510 1511 decay_ids = [] 1512 1513 # Get all amplitudes for the decay processes 1514 for amp in sum([dc.get('amplitudes') for dc \ 1515 in self['decay_chains']], []): 1516 # For each amplitude, find the initial state leg 1517 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1518 1519 # Return a list with unique ids 1520 return list(set(decay_ids))
1521
1522 - def has_loop_process(self):
1523 """ Returns wether this amplitude has a loop process.""" 1524 return self['amplitudes'].has_any_loop_process()
1525
1526 - def get_amplitudes(self):
1527 """Recursive function to extract all amplitudes for this process""" 1528 1529 amplitudes = AmplitudeList() 1530 1531 amplitudes.extend(self.get('amplitudes')) 1532 for decay in self.get('decay_chains'): 1533 amplitudes.extend(decay.get_amplitudes()) 1534 1535 return amplitudes
1536
1537 1538 #=============================================================================== 1539 # DecayChainAmplitudeList 1540 #=============================================================================== 1541 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1542 """List of DecayChainAmplitude objects 1543 """ 1544
1545 - def is_valid_element(self, obj):
1546 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1547 1548 return isinstance(obj, DecayChainAmplitude)
1549
1550 1551 #=============================================================================== 1552 # MultiProcess 1553 #=============================================================================== 1554 -class MultiProcess(base_objects.PhysicsObject):
1555 """MultiProcess: list of process definitions 1556 list of processes (after cleaning) 1557 list of amplitudes (after generation) 1558 """ 1559
1560 - def default_setup(self):
1561 """Default values for all properties""" 1562 1563 self['process_definitions'] = base_objects.ProcessDefinitionList() 1564 # self['amplitudes'] can be an AmplitudeList or a 1565 # DecayChainAmplitudeList, depending on whether there are 1566 # decay chains in the process definitions or not. 1567 self['amplitudes'] = AmplitudeList() 1568 # Flag for whether to combine IS mirror processes together 1569 self['collect_mirror_procs'] = False 1570 # List of quark flavors where we ignore processes with at 1571 # least 6 quarks (three quark lines) 1572 self['ignore_six_quark_processes'] = [] 1573 # Allow to use the model parameter numerical value for optimization. 1574 #This is currently use for 1->N generation(check mass). 1575 self['use_numerical'] = False
1576
1577 - def __init__(self, argument=None, collect_mirror_procs = False, 1578 ignore_six_quark_processes = [], optimize=False, 1579 loop_filter=None, diagram_filter=None):
1580 """Allow initialization with ProcessDefinition or 1581 ProcessDefinitionList 1582 optimize allows to use param_card information. (usefull for 1-.N)""" 1583 1584 if isinstance(argument, base_objects.ProcessDefinition): 1585 super(MultiProcess, self).__init__() 1586 self['process_definitions'].append(argument) 1587 elif isinstance(argument, base_objects.ProcessDefinitionList): 1588 super(MultiProcess, self).__init__() 1589 self['process_definitions'] = argument 1590 elif argument != None: 1591 # call the mother routine 1592 super(MultiProcess, self).__init__(argument) 1593 else: 1594 # call the mother routine 1595 super(MultiProcess, self).__init__() 1596 1597 self['collect_mirror_procs'] = collect_mirror_procs 1598 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1599 self['use_numerical'] = optimize 1600 self['loop_filter'] = loop_filter 1601 self['diagram_filter'] = diagram_filter # only True/False so far 1602 1603 if isinstance(argument, base_objects.ProcessDefinition) or \ 1604 isinstance(argument, base_objects.ProcessDefinitionList): 1605 # Generate the diagrams 1606 self.get('amplitudes')
1607 1608
1609 - def filter(self, name, value):
1610 """Filter for valid process property values.""" 1611 1612 if name == 'process_definitions': 1613 if not isinstance(value, base_objects.ProcessDefinitionList): 1614 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value)) 1615 1616 if name == 'amplitudes': 1617 if not isinstance(value, AmplitudeList): 1618 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value)) 1619 1620 if name in ['collect_mirror_procs']: 1621 if not isinstance(value, bool): 1622 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 1623 1624 if name == 'ignore_six_quark_processes': 1625 if not isinstance(value, list): 1626 raise self.PhysicsObjectError("%s is not a valid list" % str(value)) 1627 1628 return True
1629
1630 - def get(self, name):
1631 """Get the value of the property name.""" 1632 1633 if (name == 'amplitudes') and not self[name]: 1634 for process_def in self.get('process_definitions'): 1635 if process_def.get('decay_chains'): 1636 # This is a decay chain process 1637 # Store amplitude(s) as DecayChainAmplitude 1638 self['amplitudes'].append(\ 1639 DecayChainAmplitude(process_def, 1640 self.get('collect_mirror_procs'), 1641 self.get('ignore_six_quark_processes'), 1642 diagram_filter=self['diagram_filter'])) 1643 else: 1644 self['amplitudes'].extend(\ 1645 self.generate_multi_amplitudes(process_def, 1646 self.get('collect_mirror_procs'), 1647 self.get('ignore_six_quark_processes'), 1648 self['use_numerical'], 1649 loop_filter=self['loop_filter'], 1650 diagram_filter=self['diagram_filter'])) 1651 1652 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1653
1654 - def get_sorted_keys(self):
1655 """Return process property names as a nicely sorted list.""" 1656 1657 return ['process_definitions', 'amplitudes']
1658
1659 - def get_model(self):
1660 1661 return self['process_definitions'][0]['model']
1662 1663 @classmethod
1664 - def generate_multi_amplitudes(cls,process_definition, 1665 collect_mirror_procs = False, 1666 ignore_six_quark_processes = [], 1667 use_numerical=False, 1668 loop_filter=None, 1669 diagram_filter=False):
1670 """Generate amplitudes in a semi-efficient way. 1671 Make use of crossing symmetry for processes that fail diagram 1672 generation, but not for processes that succeed diagram 1673 generation. Doing so will risk making it impossible to 1674 identify processes with identical amplitudes. 1675 """ 1676 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1677 "%s not valid ProcessDefinition object" % \ 1678 repr(process_definition) 1679 1680 # Set automatic coupling orders 1681 process_definition.set('orders', MultiProcess.\ 1682 find_optimal_process_orders(process_definition, 1683 diagram_filter)) 1684 # Check for maximum orders from the model 1685 process_definition.check_expansion_orders() 1686 1687 processes = base_objects.ProcessList() 1688 amplitudes = AmplitudeList() 1689 1690 # failed_procs and success_procs are sorted processes that have 1691 # already failed/succeeded based on crossing symmetry 1692 failed_procs = [] 1693 success_procs = [] 1694 # Complete processes, for identification of mirror processes 1695 non_permuted_procs = [] 1696 # permutations keeps the permutations of the crossed processes 1697 permutations = [] 1698 1699 # Store the diagram tags for processes, to allow for 1700 # identifying identical matrix elements already at this stage. 1701 model = process_definition['model'] 1702 1703 islegs = [leg for leg in process_definition['legs'] \ 1704 if leg['state'] == False] 1705 fslegs = [leg for leg in process_definition['legs'] \ 1706 if leg['state'] == True] 1707 1708 isids = [leg['ids'] for leg in process_definition['legs'] \ 1709 if leg['state'] == False] 1710 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1711 if leg['state'] == True] 1712 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \ 1713 if leg['state'] == True] 1714 # Generate all combinations for the initial state 1715 for prod in itertools.product(*isids): 1716 islegs = [\ 1717 base_objects.Leg({'id':id, 'state': False, 1718 'polarization': islegs[i]['polarization']}) 1719 for i,id in enumerate(prod)] 1720 1721 # Generate all combinations for the final state, and make 1722 # sure to remove double counting 1723 1724 red_fsidlist = set() 1725 1726 for prod in itertools.product(*fsids): 1727 tag = zip(prod, polids) 1728 tag = sorted(tag) 1729 # Remove double counting between final states 1730 if tuple(tag) in red_fsidlist: 1731 continue 1732 1733 red_fsidlist.add(tuple(tag)) 1734 # Generate leg list for process 1735 leg_list = [copy.copy(leg) for leg in islegs] 1736 leg_list.extend([\ 1737 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \ 1738 for i,id in enumerate(prod)]) 1739 1740 legs = base_objects.LegList(leg_list) 1741 1742 # Check for crossed processes 1743 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1744 enumerate(legs.get_outgoing_id_list(model))]) 1745 permutation = [l[1] for l in sorted_legs] 1746 1747 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1748 1749 # Check for six-quark processes 1750 if ignore_six_quark_processes and \ 1751 len([i for i in sorted_legs if abs(i) in \ 1752 ignore_six_quark_processes]) >= 6: 1753 continue 1754 1755 # Check if crossed process has already failed, 1756 # in that case don't check process 1757 if sorted_legs in failed_procs: 1758 continue 1759 1760 # If allowed check mass validity [assume 1->N] 1761 if use_numerical: 1762 # check that final state has lower mass than initial state 1763 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1764 if initial_mass == 0: 1765 continue 1766 for leg in legs[1:]: 1767 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1768 initial_mass -= abs(m) 1769 if initial_mass.real <= 0: 1770 continue 1771 1772 # Setup process 1773 process = process_definition.get_process_with_legs(legs) 1774 1775 fast_proc = \ 1776 array.array('i',[leg.get('id') for leg in legs]) 1777 if collect_mirror_procs and \ 1778 process_definition.get_ninitial() == 2: 1779 # Check if mirrored process is already generated 1780 mirror_proc = \ 1781 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1782 list(fast_proc[2:])) 1783 try: 1784 mirror_amp = \ 1785 amplitudes[non_permuted_procs.index(mirror_proc)] 1786 except Exception: 1787 # Didn't find any mirror process 1788 pass 1789 else: 1790 # Mirror process found 1791 mirror_amp.set('has_mirror_process', True) 1792 logger.info("Process %s added to mirror process %s" % \ 1793 (process.base_string(), 1794 mirror_amp.get('process').base_string())) 1795 continue 1796 1797 # Check for successful crossings, unless we have specified 1798 # properties that break crossing symmetry 1799 if not process.get('required_s_channels') and \ 1800 not process.get('forbidden_onsh_s_channels') and \ 1801 not process.get('forbidden_s_channels') and \ 1802 not process.get('is_decay_chain') and not diagram_filter: 1803 try: 1804 crossed_index = success_procs.index(sorted_legs) 1805 # The relabeling of legs for loop amplitudes is cumbersome 1806 # and does not save so much time. It is disable here and 1807 # we use the key 'loop_diagrams' to decide whether 1808 # it is an instance of LoopAmplitude. 1809 if 'loop_diagrams' in amplitudes[crossed_index]: 1810 raise ValueError 1811 except ValueError: 1812 # No crossing found, just continue 1813 pass 1814 else: 1815 # Found crossing - reuse amplitude 1816 amplitude = MultiProcess.cross_amplitude(\ 1817 amplitudes[crossed_index], 1818 process, 1819 permutations[crossed_index], 1820 permutation) 1821 amplitudes.append(amplitude) 1822 success_procs.append(sorted_legs) 1823 permutations.append(permutation) 1824 non_permuted_procs.append(fast_proc) 1825 logger.info("Crossed process found for %s, reuse diagrams." % \ 1826 process.base_string()) 1827 continue 1828 1829 # Create new amplitude 1830 amplitude = cls.get_amplitude_from_proc(process, 1831 loop_filter=loop_filter) 1832 1833 try: 1834 result = amplitude.generate_diagrams(diagram_filter=diagram_filter) 1835 except InvalidCmd as error: 1836 failed_procs.append(sorted_legs) 1837 else: 1838 # Succeeded in generating diagrams 1839 if amplitude.get('diagrams'): 1840 amplitudes.append(amplitude) 1841 success_procs.append(sorted_legs) 1842 permutations.append(permutation) 1843 non_permuted_procs.append(fast_proc) 1844 elif not result: 1845 # Diagram generation failed for all crossings 1846 failed_procs.append(sorted_legs) 1847 1848 # Raise exception if there are no amplitudes for this process 1849 if not amplitudes: 1850 if len(failed_procs) == 1 and 'error' in locals(): 1851 raise error 1852 else: 1853 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \ 1854 process_definition.nice_string()) 1855 1856 1857 # Return the produced amplitudes 1858 return amplitudes
1859 1860 @classmethod
1861 - def get_amplitude_from_proc(cls,proc,**opts):
1862 """ Return the correct amplitude type according to the characteristics of 1863 the process proc. The only option that could be specified here is 1864 loop_filter and it is of course not relevant for a tree amplitude.""" 1865 1866 return Amplitude({"process": proc})
1867 1868 1869 @staticmethod
1870 - def find_optimal_process_orders(process_definition, diagram_filter=False):
1871 """Find the minimal WEIGHTED order for this set of processes. 1872 1873 The algorithm: 1874 1875 1) Check the coupling hierarchy of the model. Assign all 1876 particles to the different coupling hierarchies so that a 1877 particle is considered to be in the highest hierarchy (i.e., 1878 with lowest value) where it has an interaction. 1879 1880 2) Pick out the legs in the multiprocess according to the 1881 highest hierarchy represented (so don't mix particles from 1882 different hierarchy classes in the same multiparticles!) 1883 1884 3) Find the starting maximum WEIGHTED order as the sum of the 1885 highest n-2 weighted orders 1886 1887 4) Pick out required s-channel particle hierarchies, and use 1888 the highest of the maximum WEIGHTED order from the legs and 1889 the minimum WEIGHTED order extracted from 2*s-channel 1890 hierarchys plus the n-2-2*(number of s-channels) lowest 1891 leg weighted orders. 1892 1893 5) Run process generation with the WEIGHTED order determined 1894 in 3)-4) - # final state gluons, with all gluons removed from 1895 the final state 1896 1897 6) If no process is found, increase WEIGHTED order by 1 and go 1898 back to 5), until we find a process which passes. Return that 1899 order. 1900 1901 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1902 If still no process has passed, return 1903 WEIGHTED = (n-2)*(highest hierarchy) 1904 """ 1905 1906 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1907 "%s not valid ProcessDefinition object" % \ 1908 repr(process_definition) 1909 1910 processes = base_objects.ProcessList() 1911 amplitudes = AmplitudeList() 1912 1913 # If there are already couplings defined, return 1914 if process_definition.get('orders') or \ 1915 process_definition.get('overall_orders') or \ 1916 process_definition.get('NLO_mode')=='virt': 1917 return process_definition.get('orders') 1918 1919 # If this is a decay process (and not a decay chain), return 1920 if process_definition.get_ninitial() == 1 and not \ 1921 process_definition.get('is_decay_chain'): 1922 return process_definition.get('orders') 1923 1924 logger.info("Checking for minimal orders which gives processes.") 1925 logger.info("Please specify coupling orders to bypass this step.") 1926 1927 # Calculate minimum starting guess for WEIGHTED order 1928 max_order_now, particles, hierarchy = \ 1929 process_definition.get_minimum_WEIGHTED() 1930 coupling = 'WEIGHTED' 1931 1932 model = process_definition.get('model') 1933 1934 # Extract the initial and final leg ids 1935 isids = [leg['ids'] for leg in \ 1936 [leg for leg in process_definition['legs'] if leg['state'] == False]] 1937 fsids = [leg['ids'] for leg in \ 1938 [leg for leg in process_definition['legs'] if leg['state'] == True]] 1939 1940 max_WEIGHTED_order = \ 1941 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1942 # get the definition of the WEIGHTED 1943 hierarchydef = process_definition['model'].get('order_hierarchy') 1944 tmp = [] 1945 hierarchy = list(hierarchydef.items()) 1946 hierarchy.sort() 1947 for key, value in hierarchydef.items(): 1948 if value>1: 1949 tmp.append('%s*%s' % (value,key)) 1950 else: 1951 tmp.append('%s' % key) 1952 wgtdef = '+'.join(tmp) 1953 # Run diagram generation with increasing max_order_now until 1954 # we manage to get diagrams 1955 while max_order_now < max_WEIGHTED_order: 1956 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1957 1958 oldloglevel = logger.level 1959 logger.setLevel(logging.WARNING) 1960 1961 # failed_procs are processes that have already failed 1962 # based on crossing symmetry 1963 failed_procs = [] 1964 # Generate all combinations for the initial state 1965 for prod in itertools.product(*isids): 1966 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1967 for id in prod] 1968 1969 # Generate all combinations for the final state, and make 1970 # sure to remove double counting 1971 1972 red_fsidlist = [] 1973 1974 for prod in itertools.product(*fsids): 1975 1976 # Remove double counting between final states 1977 if tuple(sorted(prod)) in red_fsidlist: 1978 continue 1979 1980 red_fsidlist.append(tuple(sorted(prod))); 1981 1982 # Remove gluons from final state if QCD is among 1983 # the highest coupling hierarchy 1984 nglue = 0 1985 if 21 in particles[0]: 1986 nglue = len([id for id in prod if id == 21]) 1987 prod = [id for id in prod if id != 21] 1988 1989 # Generate leg list for process 1990 leg_list = [copy.copy(leg) for leg in islegs] 1991 1992 leg_list.extend([\ 1993 base_objects.Leg({'id':id, 'state': True}) \ 1994 for id in prod]) 1995 1996 legs = base_objects.LegList(leg_list) 1997 1998 # Set summed coupling order according to max_order_now 1999 # subtracting the removed gluons 2000 coupling_orders_now = {coupling: max_order_now - \ 2001 nglue * model['order_hierarchy']['QCD']} 2002 2003 # Setup process 2004 process = base_objects.Process({\ 2005 'legs':legs, 2006 'model':model, 2007 'id': process_definition.get('id'), 2008 'orders': coupling_orders_now, 2009 'required_s_channels': \ 2010 process_definition.get('required_s_channels'), 2011 'forbidden_onsh_s_channels': \ 2012 process_definition.get('forbidden_onsh_s_channels'), 2013 'sqorders_types': \ 2014 process_definition.get('sqorders_types'), 2015 'squared_orders': \ 2016 process_definition.get('squared_orders'), 2017 'split_orders': \ 2018 process_definition.get('split_orders'), 2019 'forbidden_s_channels': \ 2020 process_definition.get('forbidden_s_channels'), 2021 'forbidden_particles': \ 2022 process_definition.get('forbidden_particles'), 2023 'is_decay_chain': \ 2024 process_definition.get('is_decay_chain'), 2025 'overall_orders': \ 2026 process_definition.get('overall_orders'), 2027 'split_orders': \ 2028 process_definition.get('split_orders')}) 2029 2030 # Check for couplings with given expansion orders 2031 process.check_expansion_orders() 2032 2033 # Check for crossed processes 2034 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 2035 # Check if crossed process has already failed 2036 # In that case don't check process 2037 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'): 2038 continue 2039 2040 amplitude = Amplitude({'process': process}) 2041 try: 2042 amplitude.generate_diagrams(diagram_filter=diagram_filter) 2043 except InvalidCmd as error: 2044 failed_procs.append(tuple(sorted_legs)) 2045 else: 2046 if amplitude.get('diagrams'): 2047 # We found a valid amplitude. Return this order number 2048 logger.setLevel(oldloglevel) 2049 return {coupling: max_order_now} 2050 else: 2051 failed_procs.append(tuple(sorted_legs)) 2052 # No processes found, increase max_order_now 2053 max_order_now += 1 2054 logger.setLevel(oldloglevel) 2055 2056 # If no valid processes found with nfinal-1 couplings, return maximal 2057 return {coupling: max_order_now}
2058 2059 @staticmethod
2060 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2061 """Return the amplitude crossed with the permutation new_perm""" 2062 # Create dict from original leg numbers to new leg numbers 2063 perm_map = dict(list(zip(org_perm, new_perm))) 2064 # Initiate new amplitude 2065 new_amp = copy.copy(amplitude) 2066 # Number legs 2067 for i, leg in enumerate(process.get('legs')): 2068 leg.set('number', i+1) 2069 # Set process 2070 new_amp.set('process', process) 2071 # Now replace the leg numbers in the diagrams 2072 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2073 process.get('legs'),) for \ 2074 d in new_amp.get('diagrams')]) 2075 new_amp.set('diagrams', diagrams) 2076 new_amp.trim_diagrams() 2077 2078 # Make sure to reset mirror process 2079 new_amp.set('has_mirror_process', False) 2080 2081 return new_amp
2082
2083 #=============================================================================== 2084 # Global helper methods 2085 #=============================================================================== 2086 2087 -def expand_list(mylist):
2088 """Takes a list of lists and elements and returns a list of flat lists. 2089 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2090 """ 2091 2092 # Check that argument is a list 2093 assert isinstance(mylist, list), "Expand_list argument must be a list" 2094 2095 res = [] 2096 2097 tmplist = [] 2098 for item in mylist: 2099 if isinstance(item, list): 2100 tmplist.append(item) 2101 else: 2102 tmplist.append([item]) 2103 2104 for item in itertools.product(*tmplist): 2105 res.append(list(item)) 2106 2107 return res
2108
2109 -def expand_list_list(mylist):
2110 """Recursive function. Takes a list of lists and lists of lists 2111 and returns a list of flat lists. 2112 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2113 """ 2114 2115 res = [] 2116 2117 if not mylist or len(mylist) == 1 and not mylist[0]: 2118 return [[]] 2119 2120 # Check the first element is at least a list 2121 assert isinstance(mylist[0], list), \ 2122 "Expand_list_list needs a list of lists and lists of lists" 2123 2124 # Recursion stop condition, one single element 2125 if len(mylist) == 1: 2126 if isinstance(mylist[0][0], list): 2127 return mylist[0] 2128 else: 2129 return mylist 2130 2131 if isinstance(mylist[0][0], list): 2132 for item in mylist[0]: 2133 # Here the recursion happens, create lists starting with 2134 # each element of the first item and completed with 2135 # the rest expanded 2136 for rest in expand_list_list(mylist[1:]): 2137 reslist = copy.copy(item) 2138 reslist.extend(rest) 2139 res.append(reslist) 2140 else: 2141 for rest in expand_list_list(mylist[1:]): 2142 reslist = copy.copy(mylist[0]) 2143 reslist.extend(rest) 2144 res.append(reslist) 2145 2146 2147 return res
2148