Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  from __future__ import absolute_import 
  23  from six.moves import filter 
  24  #force filter to be a generator # like in py3 
  25   
  26   
  27  import array 
  28  import copy 
  29  import itertools 
  30  import logging 
  31   
  32  import madgraph.core.base_objects as base_objects 
  33  import madgraph.various.misc as misc 
  34  from madgraph import InvalidCmd, MadGraph5Error 
  35  from six.moves import range 
  36  from six.moves import zip 
  37   
  38  logger = logging.getLogger('madgraph.diagram_generation') 
39 40 41 -class NoDiagramException(InvalidCmd): pass
42
43 #=============================================================================== 44 # DiagramTag mother class 45 #=============================================================================== 46 47 -class DiagramTag(object):
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 49 PDG code/interaction id (for comparing diagrams from the same amplitude), 50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 51 Algorithm: Create chains starting from external particles: 52 1 \ / 6 53 2 /\______/\ 7 54 3_ / | \_ 8 55 4 / 5 \_ 9 56 \ 10 57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 58 (((1,2,id12),(3,4,id34)),id1234), 59 5,id91086712345) 60 where idN is the id of the corresponding interaction. The ordering within 61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 63 The determination of central vertex is based on minimizing the chain length 64 for the longest subchain. 65 This gives a unique tag which can be used to identify diagrams 66 (instead of symmetry), as well as identify identical matrix elements from 67 different processes.""" 68
69 - class DiagramTagError(Exception):
70 """Exception for any problems in DiagramTags""" 71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to 75 the diagram, and figure out if we need to shift the central vertex.""" 76 77 # wf_dict keeps track of the intermediate particles 78 leg_dict = {} 79 # Create the chain which will be the diagram tag 80 for vertex in diagram.get('vertices'): 81 # Only add incoming legs 82 legs = vertex.get('legs')[:-1] 83 lastvx = vertex == diagram.get('vertices')[-1] 84 if lastvx: 85 # If last vertex, all legs are incoming 86 legs = vertex.get('legs') 87 # Add links corresponding to the relevant legs 88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 89 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 90 for leg in legs], 91 self.vertex_id_from_vertex(vertex, 92 lastvx, 93 model, 94 ninitial)) 95 # Add vertex to leg_dict if not last one 96 if not lastvx: 97 leg_dict[vertex.get('legs')[-1].get('number')] = link 98 99 # The resulting link is the hypothetical result 100 self.tag = link 101 102 # Now make sure to find the central vertex in the diagram, 103 # defined by the longest leg being as short as possible 104 done = max([l.depth for l in self.tag.links]) == 0 105 while not done: 106 # Identify the longest chain in the tag 107 longest_chain = self.tag.links[0] 108 # Create a new link corresponding to moving one step 109 new_link = DiagramTagChainLink(self.tag.links[1:], 110 self.flip_vertex(\ 111 self.tag.vertex_id, 112 longest_chain.vertex_id, 113 self.tag.links[1:])) 114 # Create a new final vertex in the direction of the longest link 115 other_links = list(longest_chain.links) + [new_link] 116 other_link = DiagramTagChainLink(other_links, 117 self.flip_vertex(\ 118 longest_chain.vertex_id, 119 self.tag.vertex_id, 120 other_links)) 121 122 if other_link.links[0] < self.tag.links[0]: 123 # Switch to new tag, continue search 124 self.tag = other_link 125 else: 126 # We have found the central vertex 127 done = True
128
129 - def get_external_numbers(self):
130 """Get the order of external particles in this tag""" 131 return self.tag.get_external_numbers()
132
133 - def diagram_from_tag(self, model):
134 """Output a diagram from a DiagramTag. Note that each daughter 135 class must implement the static functions id_from_vertex_id 136 (if the vertex id is something else than an integer) and 137 leg_from_link (to pass the correct info from an end link to a 138 leg).""" 139 140 # Create the vertices, starting from the final vertex 141 diagram = base_objects.Diagram({'vertices': \ 142 self.vertices_from_link(self.tag, 143 model, 144 True)}) 145 diagram.calculate_orders(model) 146 return diagram
147 148 @classmethod 183 184 @classmethod
185 - def legPDGs_from_vertex_id(cls, vertex_id,model):
186 """Returns the list of external PDGs of the interaction corresponding 187 to this vertex_id.""" 188 189 # In case we have to deal with a regular vertex, we return the list 190 # external PDGs as given by the model information on that integer 191 # vertex id. 192 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 193 return vertex_id[2]['PDGs'] 194 else: 195 return [part.get_pdg_code() for part in model.get_interaction( 196 cls.id_from_vertex_id(vertex_id)).get('particles')]
197 198 @classmethod
199 - def leg_from_legs(cls,legs, vertex_id, model):
200 """Return a leg from a leg list and the model info""" 201 202 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 203 204 # Extract the resulting pdg code from the interaction pdgs 205 for pdg in [leg.get('id') for leg in legs]: 206 pdgs.remove(pdg) 207 208 assert len(pdgs) == 1 209 # Prepare the new leg properties 210 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 211 number = min([l.get('number') for l in legs]) 212 # State is False for t-channel, True for s-channel 213 state = (len([l for l in legs if l.get('state') == False]) != 1) 214 # Note that this needs to be done before combining decay chains 215 onshell= False 216 217 return base_objects.Leg({'id': pdg, 218 'number': number, 219 'state': state, 220 'onshell': onshell})
221 222 @classmethod 235 236 @staticmethod 249 250 @staticmethod
251 - def id_from_vertex_id(vertex_id):
252 """Return the numerical vertex id from a link.vertex_id""" 253 254 return vertex_id[0][0]
255 256 @staticmethod
257 - def loop_info_from_vertex_id(vertex_id):
258 """Return the loop_info stored in this vertex id. Notice that the 259 IdentifyME tag does not store the loop_info, but should normally never 260 need access to it.""" 261 262 return vertex_id[2]
263 264 @staticmethod
265 - def reorder_permutation(perm, start_perm):
266 """Reorder a permutation with respect to start_perm. Note that 267 both need to start from 1.""" 268 if perm == start_perm: 269 return list(range(len(perm))) 270 order = [i for (p,i) in \ 271 sorted([(p,i) for (i,p) in enumerate(perm)])] 272 return [start_perm[i]-1 for i in order]
273 274 @staticmethod 285 286 @staticmethod
287 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
288 """Returns the default vertex id: just the interaction id 289 Note that in the vertex id, like the leg, only the first entry is 290 taken into account in the tag comparison, while the second is for 291 storing information that is not to be used in comparisons and the 292 third for additional info regarding the shrunk loop vertex.""" 293 294 if isinstance(vertex,base_objects.ContractedVertex): 295 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 296 return ((vertex.get('id'),vertex.get('loop_tag')),(), 297 {'PDGs':vertex.get('PDGs')}) 298 else: 299 return ((vertex.get('id'),()),(),{})
300 301 @staticmethod
302 - def flip_vertex(new_vertex, old_vertex, links):
303 """Returns the default vertex flip: just the new_vertex""" 304 return new_vertex
305
306 - def __eq__(self, other):
307 """Equal if same tag""" 308 if type(self) != type(other): 309 return False 310 return self.tag == other.tag
311
312 - def __ne__(self, other):
313 return not self.__eq__(other)
314
315 - def __str__(self):
316 return str(self.tag)
317
318 - def __lt__(self, other):
319 return self.tag < other.tag
320
321 - def __gt__(self, other):
322 return self.tag > other.tag
323 324 __repr__ = __str__
325 430
431 #=============================================================================== 432 # Amplitude 433 #=============================================================================== 434 -class Amplitude(base_objects.PhysicsObject):
435 """Amplitude: process + list of diagrams (ordered) 436 Initialize with a process, then call generate_diagrams() to 437 generate the diagrams for the amplitude 438 """ 439
440 - def default_setup(self):
441 """Default values for all properties""" 442 443 self['process'] = base_objects.Process() 444 self['diagrams'] = None 445 # has_mirror_process is True if the same process but with the 446 # two incoming particles interchanged has been generated 447 self['has_mirror_process'] = False
448
449 - def __init__(self, argument=None):
450 """Allow initialization with Process""" 451 if isinstance(argument, base_objects.Process): 452 super(Amplitude, self).__init__() 453 self.set('process', argument) 454 self.generate_diagrams() 455 elif argument != None: 456 # call the mother routine 457 super(Amplitude, self).__init__(argument) 458 else: 459 # call the mother routine 460 super(Amplitude, self).__init__()
461
462 - def filter(self, name, value):
463 """Filter for valid amplitude property values.""" 464 465 if name == 'process': 466 if not isinstance(value, base_objects.Process): 467 raise self.PhysicsObjectError("%s is not a valid Process object" % str(value)) 468 if name == 'diagrams': 469 if not isinstance(value, base_objects.DiagramList): 470 raise self.PhysicsObjectError("%s is not a valid DiagramList object" % str(value)) 471 if name == 'has_mirror_process': 472 if not isinstance(value, bool): 473 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 474 return True
475
476 - def get(self, name):
477 """Get the value of the property name.""" 478 479 if name == 'diagrams' and self[name] == None: 480 # Have not yet generated diagrams for this process 481 if self['process']: 482 self.generate_diagrams() 483 484 return super(Amplitude, self).get(name)
485 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 486 487
488 - def get_sorted_keys(self):
489 """Return diagram property names as a nicely sorted list.""" 490 491 return ['process', 'diagrams', 'has_mirror_process']
492
493 - def get_number_of_diagrams(self):
494 """Returns number of diagrams for this amplitude""" 495 return len(self.get('diagrams'))
496
497 - def get_amplitudes(self):
498 """Return an AmplitudeList with just this amplitude. 499 Needed for DecayChainAmplitude.""" 500 501 return AmplitudeList([self])
502
503 - def nice_string(self, indent=0):
504 """Returns a nicely formatted string of the amplitude content.""" 505 return self.get('process').nice_string(indent) + "\n" + \ 506 self.get('diagrams').nice_string(indent)
507
508 - def nice_string_processes(self, indent=0):
509 """Returns a nicely formatted string of the amplitude process.""" 510 return self.get('process').nice_string(indent)
511
512 - def get_ninitial(self):
513 """Returns the number of initial state particles in the process.""" 514 return self.get('process').get_ninitial()
515
516 - def has_loop_process(self):
517 """ Returns wether this amplitude has a loop process.""" 518 519 return self.get('process').get('perturbation_couplings')
520
521 - def generate_diagrams(self, returndiag=False, diagram_filter=False):
522 """Generate diagrams. Algorithm: 523 524 1. Define interaction dictionaries: 525 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 526 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 527 528 2. Set flag from_group=true for all external particles. 529 Flip particle/anti particle for incoming particles. 530 531 3. If there is a dictionary n->0 with n=number of external 532 particles, create if possible the combination [(1,2,3,4,...)] 533 with *at least two* from_group==true. This will give a 534 finished (set of) diagram(s) (done by reduce_leglist) 535 536 4. Create all allowed groupings of particles with at least one 537 from_group==true (according to dictionaries n->1): 538 [(1,2),3,4...],[1,(2,3),4,...],..., 539 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 540 (done by combine_legs) 541 542 5. Replace each group with a (list of) new particle(s) with number 543 n = min(group numbers). Set from_group true for these 544 particles and false for all other particles. Store vertex info. 545 (done by merge_comb_legs) 546 547 6. Stop algorithm when at most 2 particles remain. 548 Return all diagrams (lists of vertices). 549 550 7. Repeat from 3 (recursion done by reduce_leglist) 551 552 8. Replace final p=p vertex 553 554 Be aware that the resulting vertices have all particles outgoing, 555 so need to flip for incoming particles when used. 556 557 SPECIAL CASE: For A>BC... processes which are legs in decay 558 chains, we need to ensure that BC... combine first, giving A=A 559 as a final vertex. This case is defined by the Process 560 property is_decay_chain = True. 561 This function can also be called by the generate_diagram function 562 of LoopAmplitudes, in which case the generated diagrams here must not 563 be directly assigned to the 'diagrams' attributed but returned as a 564 DiagramList by the function. This is controlled by the argument 565 returndiag. 566 """ 567 568 process = self.get('process') 569 model = process.get('model') 570 legs = process.get('legs') 571 # Make sure orders is the minimum of orders and overall_orders 572 for key in process.get('overall_orders').keys(): 573 try: 574 process.get('orders')[key] = \ 575 min(process.get('orders')[key], 576 process.get('overall_orders')[key]) 577 except KeyError: 578 process.get('orders')[key] = process.get('overall_orders')[key] 579 580 assert model.get('particles'), \ 581 "particles are missing in model: %s" % model.get('particles') 582 583 assert model.get('interactions'), \ 584 "interactions are missing in model" 585 586 587 res = base_objects.DiagramList() 588 # First check that the number of fermions is even 589 if len([leg for leg in legs if model.get('particle_dict')[\ 590 leg.get('id')].is_fermion()]) % 2 == 1: 591 if not returndiag: 592 self['diagrams'] = res 593 raise InvalidCmd('The number of fermion is odd') 594 else: 595 return False, res 596 597 # Then check same number of incoming and outgoing fermions (if 598 # no Majorana particles in model) 599 if not model.get('got_majoranas') and \ 600 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \ 601 len([leg for leg in legs if leg.is_outgoing_fermion(model)]): 602 if not returndiag: 603 self['diagrams'] = res 604 raise InvalidCmd('The number of of incoming/outcoming fermions are different') 605 else: 606 return False, res 607 608 # Finally check that charge (conserve by all interactions) of the process 609 #is globally conserve for this process. 610 for charge in model.get('conserved_charge'): 611 total = 0 612 for leg in legs: 613 part = model.get('particle_dict')[leg.get('id')] 614 try: 615 value = part.get(charge) 616 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 617 try: 618 value = getattr(part, charge) 619 except AttributeError: 620 value = 0 621 622 if (leg.get('id') != part['pdg_code']) != leg['state']: 623 total -= value 624 else: 625 total += value 626 627 if abs(total) > 1e-10: 628 if not returndiag: 629 self['diagrams'] = res 630 raise InvalidCmd('No %s conservation for this process ' % charge) 631 return res 632 else: 633 raise InvalidCmd('No %s conservation for this process ' % charge) 634 return res, res 635 636 if not returndiag: 637 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 638 639 # Give numbers to legs in process 640 for i in range(0, len(process.get('legs'))): 641 # Make sure legs are unique 642 leg = copy.copy(process.get('legs')[i]) 643 process.get('legs')[i] = leg 644 if leg.get('number') == 0: 645 leg.set('number', i + 1) 646 647 648 # Copy leglist from process, so we can flip leg identities 649 # without affecting the original process 650 leglist = self.copy_leglist(process.get('legs')) 651 652 for leg in leglist: 653 # For the first step, ensure the tag from_group 654 # is true for all legs 655 leg.set('from_group', True) 656 657 # Need to flip part-antipart for incoming particles, 658 # so they are all outgoing 659 if leg.get('state') == False: 660 part = model.get('particle_dict')[leg.get('id')] 661 leg.set('id', part.get_anti_pdg_code()) 662 663 # Calculate the maximal multiplicity of n-1>1 configurations 664 # to restrict possible leg combinations 665 max_multi_to1 = max([len(key) for key in \ 666 model.get('ref_dict_to1').keys()]) 667 668 669 # Reduce the leg list and return the corresponding 670 # list of vertices 671 672 # For decay processes, generate starting from final-state 673 # combined only as the last particle. This allows to use these 674 # in decay chains later on. 675 is_decay_proc = process.get_ninitial() == 1 676 if is_decay_proc: 677 part = model.get('particle_dict')[leglist[0].get('id')] 678 # For decay chain legs, we want everything to combine to 679 # the initial leg. This is done by only allowing the 680 # initial leg to combine as a final identity. 681 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 682 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 683 # Need to set initial leg from_group to None, to make sure 684 # it can only be combined at the end. 685 leglist[0].set('from_group', None) 686 reduced_leglist = self.reduce_leglist(leglist, 687 max_multi_to1, 688 ref_dict_to0, 689 is_decay_proc, 690 process.get('orders')) 691 else: 692 reduced_leglist = self.reduce_leglist(leglist, 693 max_multi_to1, 694 model.get('ref_dict_to0'), 695 is_decay_proc, 696 process.get('orders')) 697 698 #In LoopAmplitude the function below is overloaded such that it 699 #converts back all DGLoopLegs to Legs. In the default tree-level 700 #diagram generation, this does nothing. 701 self.convert_dgleg_to_leg(reduced_leglist) 702 703 if reduced_leglist: 704 for vertex_list in reduced_leglist: 705 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 706 707 # Record whether or not we failed generation before required 708 # s-channel propagators are taken into account 709 failed_crossing = not res 710 711 # Required s-channels is a list of id-lists. Select the 712 # diagrams where all required s-channel propagators in any of 713 # the lists are present (i.e., the different lists correspond 714 # to "or", while the elements of the list correspond to 715 # "and"). 716 if process.get('required_s_channels') and \ 717 process.get('required_s_channels')[0]: 718 # We shouldn't look at the last vertex in each diagram, 719 # since that is the n->0 vertex 720 lastvx = -1 721 # For decay chain processes, there is an "artificial" 722 # extra vertex corresponding to particle 1=1, so we need 723 # to exclude the two last vertexes. 724 if is_decay_proc: lastvx = -2 725 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 726 # Check required s-channels for each list in required_s_channels 727 old_res = res 728 res = base_objects.DiagramList() 729 for id_list in process.get('required_s_channels'): 730 res_diags = [diagram for diagram in old_res if all([req_s_channel in \ 731 [vertex.get_s_channel_id(\ 732 process.get('model'), ninitial) \ 733 for vertex in diagram.get('vertices')[:lastvx]] \ 734 for req_s_channel in \ 735 id_list])] 736 # Add diagrams only if not already in res 737 res.extend([diag for diag in res_diags if diag not in res]) 738 739 # Remove all diagrams with a "double" forbidden s-channel propagator 740 # is present. 741 # Note that we shouldn't look at the last vertex in each 742 # diagram, since that is the n->0 vertex 743 if process.get('forbidden_s_channels'): 744 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 745 if ninitial == 2: 746 res = base_objects.DiagramList(\ 747 [diagram for diagram in res if not any([vertex.get_s_channel_id(\ 748 process.get('model'), ninitial) \ 749 in process.get('forbidden_s_channels') 750 for vertex in diagram.get('vertices')[:-1]])]) 751 else: 752 # split since we need to avoid that the initial particle is forbidden 753 # as well. 754 newres= [] 755 for diagram in res: 756 leg1 = 1 757 #check the latest vertex to see if the leg 1 is inside if it 758 #is we need to inverse the look-up and allow the first s-channel 759 # of the associate particles. 760 vertex = diagram.get('vertices')[-1] 761 if any([l['number'] ==1 for l in vertex.get('legs')]): 762 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 763 to_loop = list(range(len(diagram.get('vertices'))-1)) 764 if leg1 >1: 765 to_loop.reverse() 766 for i in to_loop: 767 vertex = diagram.get('vertices')[i] 768 if leg1: 769 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 770 leg1 = 0 771 continue 772 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 773 in process.get('forbidden_s_channels'): 774 break 775 else: 776 newres.append(diagram) 777 res = base_objects.DiagramList(newres) 778 779 780 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 781 # generation. 782 if process.get('forbidden_onsh_s_channels'): 783 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 784 785 verts = base_objects.VertexList(sum([[vertex for vertex \ 786 in diagram.get('vertices')[:-1] 787 if vertex.get_s_channel_id(\ 788 process.get('model'), ninitial) \ 789 in process.get('forbidden_onsh_s_channels')] \ 790 for diagram in res], [])) 791 for vert in verts: 792 # Use onshell = False to indicate that this s-channel is forbidden 793 newleg = copy.copy(vert.get('legs').pop(-1)) 794 newleg.set('onshell', False) 795 vert.get('legs').append(newleg) 796 797 # Set actual coupling orders for each diagram 798 for diagram in res: 799 diagram.calculate_orders(model) 800 801 # Filter the diagrams according to the squared coupling order 802 # constraints and possible the negative one. Remember that OrderName=-n 803 # means that the user wants to include everything up to the N^(n+1)LO 804 # contribution in that order and at most one order can be restricted 805 # in this way. We shall do this only if the diagrams are not asked to 806 # be returned, as it is the case for NLO because it this case the 807 # interference are not necessarily among the diagrams generated here only. 808 if not returndiag and len(res)>0: 809 res = self.apply_squared_order_constraints(res) 810 811 if diagram_filter: 812 res = self.apply_user_filter(res) 813 814 # Replace final id=0 vertex if necessary 815 if not process.get('is_decay_chain'): 816 for diagram in res: 817 vertices = diagram.get('vertices') 818 if len(vertices) > 1 and vertices[-1].get('id') == 0: 819 # Need to "glue together" last and next-to-last 820 # vertex, by replacing the (incoming) last leg of the 821 # next-to-last vertex with the (outgoing) leg in the 822 # last vertex 823 vertices = copy.copy(vertices) 824 lastvx = vertices.pop() 825 nexttolastvertex = copy.copy(vertices.pop()) 826 legs = copy.copy(nexttolastvertex.get('legs')) 827 ntlnumber = legs[-1].get('number') 828 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0] 829 # Reset onshell in case we have forbidden s-channels 830 if lastleg.get('onshell') == False: 831 lastleg.set('onshell', None) 832 # Replace the last leg of nexttolastvertex 833 legs[-1] = lastleg 834 nexttolastvertex.set('legs', legs) 835 vertices.append(nexttolastvertex) 836 diagram.set('vertices', vertices) 837 838 if res and not returndiag: 839 logger.info("Process has %d diagrams" % len(res)) 840 841 # Trim down number of legs and vertices used to save memory 842 self.trim_diagrams(diaglist=res) 843 844 # Sort process legs according to leg number 845 pertur = 'QCD' 846 if self.get('process')['perturbation_couplings']: 847 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 848 self.get('process').get('legs').sort(pert=pertur) 849 850 # Set diagrams to res if not asked to be returned 851 if not returndiag: 852 self['diagrams'] = res 853 return not failed_crossing 854 else: 855 return not failed_crossing, res
856
857 - def apply_squared_order_constraints(self, diag_list):
858 """Applies the user specified squared order constraints on the diagram 859 list in argument.""" 860 861 res = copy.copy(diag_list) 862 863 # Apply the filtering on constrained amplitude (== and >) 864 # No need to iterate on this one 865 for name, (value, operator) in self['process'].get('constrained_orders').items(): 866 res.filter_constrained_orders(name, value, operator) 867 868 # Iterate the filtering since the applying the constraint on one 869 # type of coupling order can impact what the filtering on a previous 870 # one (relevant for the '==' type of constraint). 871 while True: 872 new_res = res.apply_positive_sq_orders(res, 873 self['process'].get('squared_orders'), 874 self['process']['sqorders_types']) 875 # Exit condition 876 if len(res)==len(new_res): 877 break 878 elif (len(new_res)>len(res)): 879 raise MadGraph5Error( 880 'Inconsistency in function apply_squared_order_constraints().') 881 # Actualizing the list of diagram for the next iteration 882 res = new_res 883 884 885 886 # Now treat the negative squared order constraint (at most one) 887 neg_orders = [(order, value) for order, value in \ 888 self['process'].get('squared_orders').items() if value<0] 889 if len(neg_orders)==1: 890 neg_order, neg_value = neg_orders[0] 891 # Now check any negative order constraint 892 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 893 neg_value, self['process']['sqorders_types'][neg_order]) 894 # Substitute the negative value to this positive one so that 895 # the resulting computed constraints appears in the print out 896 # and at the output stage we no longer have to deal with 897 # negative valued target orders 898 self['process']['squared_orders'][neg_order]=target_order 899 elif len(neg_orders)>1: 900 raise InvalidCmd('At most one negative squared order constraint'+\ 901 ' can be specified, not %s.'%str(neg_orders)) 902 903 return res
904
905 - def apply_user_filter(self, diag_list):
906 """Applies the user specified squared order constraints on the diagram 907 list in argument.""" 908 909 if True: 910 remove_diag = misc.plugin_import('user_filter', 911 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed', 912 fcts=['remove_diag']) 913 else: 914 #example and simple tests 915 def remove_diag(diag, model=None): 916 for vertex in diag['vertices']: #last 917 if vertex['id'] == 0: #special final vertex 918 continue 919 if vertex['legs'][-1]['number'] < 3: #this means T-channel 920 if abs(vertex['legs'][-1]['id']) <6: 921 return True 922 return False
923 924 res = diag_list.__class__() 925 nb_removed = 0 926 model = self['process']['model'] 927 for diag in diag_list: 928 if remove_diag(diag, model): 929 nb_removed +=1 930 else: 931 res.append(diag) 932 933 if nb_removed: 934 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed) 935 936 return res
937 938 939
940 - def create_diagram(self, vertexlist):
941 """ Return a Diagram created from the vertex list. This function can be 942 overloaded by daughter classes.""" 943 return base_objects.Diagram({'vertices':vertexlist})
944
945 - def convert_dgleg_to_leg(self, vertexdoublelist):
946 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 947 In Amplitude, there is nothing to do. """ 948 949 return True
950
951 - def copy_leglist(self, legs):
952 """ Simply returns a copy of the leg list. This function is 953 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 954 The DGLoopLeg has some additional parameters only useful during 955 loop diagram generation""" 956 957 return base_objects.LegList(\ 958 [ copy.copy(leg) for leg in legs ])
959
960 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 961 is_decay_proc = False, coupling_orders = None):
962 """Recursive function to reduce N LegList to N-1 963 For algorithm, see doc for generate_diagrams. 964 """ 965 966 # Result variable which is a list of lists of vertices 967 # to be added 968 res = [] 969 970 # Stop condition. If LegList is None, that means that this 971 # diagram must be discarded 972 if curr_leglist is None: 973 return None 974 975 # Extract ref dict information 976 model = self.get('process').get('model') 977 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 978 979 980 # If all legs can be combined in one single vertex, add this 981 # vertex to res and continue. 982 # Special treatment for decay chain legs 983 984 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 985 # Extract the interaction id associated to the vertex 986 987 vertex_ids = self.get_combined_vertices(curr_leglist, 988 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 989 leg in curr_leglist]))])) 990 991 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 992 'id':vertex_id}) for \ 993 vertex_id in vertex_ids] 994 # Check for coupling orders. If orders < 0, skip vertex 995 for final_vertex in final_vertices: 996 if self.reduce_orders(coupling_orders, model, 997 [final_vertex.get('id')]) != False: 998 res.append([final_vertex]) 999 # Stop condition 2: if the leglist contained exactly two particles, 1000 # return the result, if any, and stop. 1001 if len(curr_leglist) == 2: 1002 if res: 1003 return res 1004 else: 1005 return None 1006 1007 # Create a list of all valid combinations of legs 1008 comb_lists = self.combine_legs(curr_leglist, 1009 ref_dict_to1, max_multi_to1) 1010 1011 # Create a list of leglists/vertices by merging combinations 1012 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 1013 1014 # Consider all the pairs 1015 for leg_vertex_tuple in leg_vertex_list: 1016 1017 # Remove forbidden particles 1018 if self.get('process').get('forbidden_particles') and \ 1019 any([abs(vertex.get('legs')[-1].get('id')) in \ 1020 self.get('process').get('forbidden_particles') \ 1021 for vertex in leg_vertex_tuple[1]]): 1022 continue 1023 1024 # Check for coupling orders. If couplings < 0, skip recursion. 1025 new_coupling_orders = self.reduce_orders(coupling_orders, 1026 model, 1027 [vertex.get('id') for vertex in \ 1028 leg_vertex_tuple[1]]) 1029 if new_coupling_orders == False: 1030 # Some coupling order < 0 1031 continue 1032 1033 # This is where recursion happens 1034 # First, reduce again the leg part 1035 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 1036 max_multi_to1, 1037 ref_dict_to0, 1038 is_decay_proc, 1039 new_coupling_orders) 1040 # If there is a reduced diagram 1041 if reduced_diagram: 1042 vertex_list_list = [list(leg_vertex_tuple[1])] 1043 vertex_list_list.append(reduced_diagram) 1044 expanded_list = expand_list_list(vertex_list_list) 1045 res.extend(expanded_list) 1046 1047 return res
1048
1049 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1050 """Return False if the coupling orders for any coupling is < 1051 0, otherwise return the new coupling orders with the vertex 1052 orders subtracted. If coupling_orders is not given, return 1053 None (which counts as success). 1054 WEIGHTED is a special order, which corresponds to the sum of 1055 order hierarchies for the couplings. 1056 We ignore negative constraints as these cannot be taken into 1057 account on the fly but only after generation.""" 1058 1059 if not coupling_orders: 1060 return None 1061 1062 present_couplings = copy.copy(coupling_orders) 1063 for id in vertex_id_list: 1064 # Don't check for identity vertex (id = 0) 1065 if not id: 1066 continue 1067 inter = model.get("interaction_dict")[id] 1068 for coupling in inter.get('orders').keys(): 1069 # Note that we don't consider a missing coupling as a 1070 # constraint 1071 if coupling in present_couplings and \ 1072 present_couplings[coupling]>=0: 1073 # Reduce the number of couplings that are left 1074 present_couplings[coupling] -= \ 1075 inter.get('orders')[coupling] 1076 if present_couplings[coupling] < 0: 1077 # We have too many couplings of this type 1078 return False 1079 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1080 if 'WEIGHTED' in present_couplings and \ 1081 present_couplings['WEIGHTED']>=0: 1082 weight = sum([model.get('order_hierarchy')[c]*n for \ 1083 (c,n) in inter.get('orders').items()]) 1084 present_couplings['WEIGHTED'] -= weight 1085 if present_couplings['WEIGHTED'] < 0: 1086 # Total coupling weight too large 1087 return False 1088 1089 return present_couplings
1090
1091 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1092 """Recursive function. Take a list of legs as an input, with 1093 the reference dictionary n-1->1, and output a list of list of 1094 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1095 1096 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1097 1098 2. For each combination, say [34]. Check if combination is valid. 1099 If so: 1100 1101 a. Append [12[34]56] to result array 1102 1103 b. Split [123456] at index(first element in combination+1), 1104 i.e. [12],[456] and subtract combination from second half, 1105 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1106 1107 3. Take result array from call to 1. (here, [[56]]) and append 1108 (first half in step b - combination) + combination + (result 1109 from 1.) = [12[34][56]] to result array 1110 1111 4. After appending results from all n-combinations, return 1112 resulting array. Example, if [13] and [45] are valid 1113 combinations: 1114 [[[13]2456],[[13]2[45]6],[123[45]6]] 1115 """ 1116 1117 res = [] 1118 1119 # loop over possible combination lengths (+1 is for range convention!) 1120 for comb_length in range(2, max_multi_to1 + 1): 1121 1122 # Check the considered length is not longer than the list length 1123 if comb_length > len(list_legs): 1124 return res 1125 1126 # itertools.combinations returns all possible combinations 1127 # of comb_length elements from list_legs 1128 for comb in itertools.combinations(list_legs, comb_length): 1129 1130 # Check if the combination is valid 1131 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1132 1133 # Identify the rest, create a list [comb,rest] and 1134 # add it to res 1135 res_list = copy.copy(list_legs) 1136 for leg in comb: 1137 res_list.remove(leg) 1138 res_list.insert(list_legs.index(comb[0]), comb) 1139 res.append(res_list) 1140 1141 # Now, deal with cases with more than 1 combination 1142 1143 # First, split the list into two, according to the 1144 # position of the first element in comb, and remove 1145 # all elements form comb 1146 res_list1 = list_legs[0:list_legs.index(comb[0])] 1147 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1148 for leg in comb[1:]: 1149 res_list2.remove(leg) 1150 1151 # Create a list of type [comb,rest1,rest2(combined)] 1152 res_list = res_list1 1153 res_list.append(comb) 1154 # This is where recursion actually happens, 1155 # on the second part 1156 for item in self.combine_legs(res_list2, 1157 ref_dict_to1, 1158 max_multi_to1): 1159 final_res_list = copy.copy(res_list) 1160 final_res_list.extend(item) 1161 res.append(final_res_list) 1162 1163 return res
1164 1165
1166 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1167 """Takes a list of allowed leg combinations as an input and returns 1168 a set of lists where combinations have been properly replaced 1169 (one list per element in the ref_dict, so that all possible intermediate 1170 particles are included). For each list, give the list of vertices 1171 corresponding to the executed merging, group the two as a tuple. 1172 """ 1173 1174 res = [] 1175 1176 for comb_list in comb_lists: 1177 1178 reduced_list = [] 1179 vertex_list = [] 1180 1181 for entry in comb_list: 1182 1183 # Act on all leg combinations 1184 if isinstance(entry, tuple): 1185 1186 # Build the leg object which will replace the combination: 1187 # 1) leg ids is as given in the ref_dict 1188 leg_vert_ids = copy.copy(ref_dict_to1[\ 1189 tuple(sorted([leg.get('id') for leg in entry]))]) 1190 # 2) number is the minimum of leg numbers involved in the 1191 # combination 1192 number = min([leg.get('number') for leg in entry]) 1193 # 3) state is final, unless there is exactly one initial 1194 # state particle involved in the combination -> t-channel 1195 if len([leg for leg in entry if leg.get('state') == False]) == 1: 1196 state = False 1197 else: 1198 state = True 1199 # 4) from_group is True, by definition 1200 1201 # Create and add the object. This is done by a 1202 # separate routine, to allow overloading by 1203 # daughter classes 1204 new_leg_vert_ids = [] 1205 if leg_vert_ids: 1206 new_leg_vert_ids = self.get_combined_legs(entry, 1207 leg_vert_ids, 1208 number, 1209 state) 1210 1211 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1212 1213 1214 # Create and add the corresponding vertex 1215 # Extract vertex ids corresponding to the various legs 1216 # in mylegs 1217 vlist = base_objects.VertexList() 1218 for (myleg, vert_id) in new_leg_vert_ids: 1219 # Start with the considered combination... 1220 myleglist = base_objects.LegList(list(entry)) 1221 # ... and complete with legs after reducing 1222 myleglist.append(myleg) 1223 # ... and consider the correct vertex id 1224 vlist.append(base_objects.Vertex( 1225 {'legs':myleglist, 1226 'id':vert_id})) 1227 1228 vertex_list.append(vlist) 1229 1230 # If entry is not a combination, switch the from_group flag 1231 # and add it 1232 else: 1233 cp_entry = copy.copy(entry) 1234 # Need special case for from_group == None; this 1235 # is for initial state leg of decay chain process 1236 # (see Leg.can_combine_to_0) 1237 if cp_entry.get('from_group') != None: 1238 cp_entry.set('from_group', False) 1239 reduced_list.append(cp_entry) 1240 1241 # Flatten the obtained leg and vertex lists 1242 flat_red_lists = expand_list(reduced_list) 1243 flat_vx_lists = expand_list(vertex_list) 1244 1245 # Combine the two lists in a list of tuple 1246 for i in range(0, len(flat_vx_lists)): 1247 res.append((base_objects.LegList(flat_red_lists[i]), \ 1248 base_objects.VertexList(flat_vx_lists[i]))) 1249 1250 return res
1251
1252 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1253 """Create a set of new legs from the info given. This can be 1254 overloaded by daughter classes.""" 1255 1256 mylegs = [(base_objects.Leg({'id':leg_id, 1257 'number':number, 1258 'state':state, 1259 'from_group':True}), 1260 vert_id)\ 1261 for leg_id, vert_id in leg_vert_ids] 1262 1263 return mylegs
1264
1265 - def get_combined_vertices(self, legs, vert_ids):
1266 """Allow for selection of vertex ids. This can be 1267 overloaded by daughter classes.""" 1268 1269 return vert_ids
1270
1271 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1272 """Reduce the number of legs and vertices used in memory. 1273 When called by a diagram generation initiated by LoopAmplitude, 1274 this function should not trim the diagrams in the attribute 'diagrams' 1275 but rather a given list in the 'diaglist' argument.""" 1276 1277 legs = [] 1278 vertices = [] 1279 1280 if diaglist is None: 1281 diaglist=self.get('diagrams') 1282 1283 # Flag decaying legs in the core process by onshell = True 1284 process = self.get('process') 1285 for leg in process.get('legs'): 1286 if leg.get('state') and leg.get('id') in decay_ids: 1287 leg.set('onshell', True) 1288 1289 for diagram in diaglist: 1290 # Keep track of external legs (leg numbers already used) 1291 leg_external = set() 1292 for ivx, vertex in enumerate(diagram.get('vertices')): 1293 for ileg, leg in enumerate(vertex.get('legs')): 1294 # Ensure that only external legs get decay flag 1295 if leg.get('state') and leg.get('id') in decay_ids and \ 1296 leg.get('number') not in leg_external: 1297 # Use onshell to indicate decaying legs, 1298 # i.e. legs that have decay chains 1299 leg = copy.copy(leg) 1300 leg.set('onshell', True) 1301 try: 1302 index = legs.index(leg) 1303 except ValueError: 1304 vertex.get('legs')[ileg] = leg 1305 legs.append(leg) 1306 else: # Found a leg 1307 vertex.get('legs')[ileg] = legs[index] 1308 leg_external.add(leg.get('number')) 1309 try: 1310 index = vertices.index(vertex) 1311 diagram.get('vertices')[ivx] = vertices[index] 1312 except ValueError: 1313 vertices.append(vertex)
1314
1315 #=============================================================================== 1316 # AmplitudeList 1317 #=============================================================================== 1318 -class AmplitudeList(base_objects.PhysicsObjectList):
1319 """List of Amplitude objects 1320 """ 1321
1322 - def has_any_loop_process(self):
1323 """ Check the content of all processes of the amplitudes in this list to 1324 see if there is any which defines perturbation couplings. """ 1325 1326 for amp in self: 1327 if amp.has_loop_process(): 1328 return True
1329
1330 - def is_valid_element(self, obj):
1331 """Test if object obj is a valid Amplitude for the list.""" 1332 1333 return isinstance(obj, Amplitude)
1334
1335 #=============================================================================== 1336 # DecayChainAmplitude 1337 #=============================================================================== 1338 -class DecayChainAmplitude(Amplitude):
1339 """A list of amplitudes + a list of decay chain amplitude lists; 1340 corresponding to a ProcessDefinition with a list of decay chains 1341 """ 1342
1343 - def default_setup(self):
1344 """Default values for all properties""" 1345 1346 self['amplitudes'] = AmplitudeList() 1347 self['decay_chains'] = DecayChainAmplitudeList()
1348
1349 - def __init__(self, argument = None, collect_mirror_procs = False, 1350 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1351 """Allow initialization with Process and with ProcessDefinition""" 1352 1353 if isinstance(argument, base_objects.Process): 1354 super(DecayChainAmplitude, self).__init__() 1355 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1356 if argument['perturbation_couplings']: 1357 MultiProcessClass=LoopMultiProcess 1358 else: 1359 MultiProcessClass=MultiProcess 1360 if isinstance(argument, base_objects.ProcessDefinition): 1361 self['amplitudes'].extend(\ 1362 MultiProcessClass.generate_multi_amplitudes(argument, 1363 collect_mirror_procs, 1364 ignore_six_quark_processes, 1365 loop_filter=loop_filter, 1366 diagram_filter=diagram_filter)) 1367 else: 1368 self['amplitudes'].append(\ 1369 MultiProcessClass.get_amplitude_from_proc(argument, 1370 loop_filter=loop_filter, 1371 diagram_filter=diagram_filter)) 1372 # Clean decay chains from process, since we haven't 1373 # combined processes with decay chains yet 1374 process = copy.copy(self.get('amplitudes')[0].get('process')) 1375 process.set('decay_chains', base_objects.ProcessList()) 1376 self['amplitudes'][0].set('process', process) 1377 1378 for process in argument.get('decay_chains'): 1379 if process.get('perturbation_couplings'): 1380 raise MadGraph5Error("Decay processes can not be perturbed") 1381 process.set('overall_orders', argument.get('overall_orders')) 1382 if not process.get('is_decay_chain'): 1383 process.set('is_decay_chain',True) 1384 if not process.get_ninitial() == 1: 1385 raise InvalidCmd("Decay chain process must have exactly one" + \ 1386 " incoming particle") 1387 self['decay_chains'].append(\ 1388 DecayChainAmplitude(process, collect_mirror_procs, 1389 ignore_six_quark_processes, 1390 diagram_filter=diagram_filter)) 1391 1392 # Flag decaying legs in the core diagrams by onshell = True 1393 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1394 for a in dec.get('amplitudes')] for dec in \ 1395 self['decay_chains']], []) 1396 decay_ids = set(decay_ids) 1397 for amp in self['amplitudes']: 1398 amp.trim_diagrams(decay_ids) 1399 1400 # Check that all decay ids are present in at least some process 1401 for amp in self['amplitudes']: 1402 for l in amp.get('process').get('legs'): 1403 if l.get('id') in decay_ids: 1404 decay_ids.remove(l.get('id')) 1405 1406 if decay_ids: 1407 model = amp.get('process').get('model') 1408 names = [model.get_particle(id).get('name') for id in decay_ids] 1409 1410 logger.warning( 1411 "$RED Decay without corresponding particle in core process found.\n" + \ 1412 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1413 "Please check your process definition carefully. \n" + \ 1414 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1415 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1416 1417 # Remove unused decays from the process list 1418 for dc in reversed(self['decay_chains']): 1419 for a in reversed(dc.get('amplitudes')): 1420 # Remove the amplitudes from this decay chain 1421 if a.get('process').get('legs')[0].get('id') in decay_ids: 1422 dc.get('amplitudes').remove(a) 1423 if not dc.get('amplitudes'): 1424 # If no amplitudes left, remove the decay chain 1425 self['decay_chains'].remove(dc) 1426 1427 # Finally, write a fat warning if any decay process has 1428 # the decaying particle (or its antiparticle) in the final state 1429 bad_procs = [] 1430 for dc in self['decay_chains']: 1431 for amp in dc.get('amplitudes'): 1432 legs = amp.get('process').get('legs') 1433 fs_parts = [abs(l.get('id')) for l in legs if 1434 l.get('state')] 1435 is_part = [l.get('id') for l in legs if not 1436 l.get('state')][0] 1437 if abs(is_part) in fs_parts: 1438 bad_procs.append(amp.get('process')) 1439 1440 if bad_procs: 1441 logger.warning( 1442 "$RED Decay(s) with particle decaying to itself:\n" + \ 1443 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1444 "\nPlease check your process definition carefully. \n") 1445 1446 1447 elif argument != None: 1448 # call the mother routine 1449 super(DecayChainAmplitude, self).__init__(argument) 1450 else: 1451 # call the mother routine 1452 super(DecayChainAmplitude, self).__init__()
1453
1454 - def filter(self, name, value):
1455 """Filter for valid amplitude property values.""" 1456 1457 if name == 'amplitudes': 1458 if not isinstance(value, AmplitudeList): 1459 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value)) 1460 if name == 'decay_chains': 1461 if not isinstance(value, DecayChainAmplitudeList): 1462 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \ 1463 str(value)) 1464 return True
1465
1466 - def get_sorted_keys(self):
1467 """Return diagram property names as a nicely sorted list.""" 1468 1469 return ['amplitudes', 'decay_chains']
1470 1471 # Helper functions 1472
1473 - def get_number_of_diagrams(self):
1474 """Returns number of diagrams for this amplitude""" 1475 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1476 + sum(d.get_number_of_diagrams() for d in \ 1477 self.get('decay_chains'))
1478
1479 - def nice_string(self, indent = 0):
1480 """Returns a nicely formatted string of the amplitude content.""" 1481 mystr = "" 1482 for amplitude in self.get('amplitudes'): 1483 mystr = mystr + amplitude.nice_string(indent) + "\n" 1484 1485 if self.get('decay_chains'): 1486 mystr = mystr + " " * indent + "Decays:\n" 1487 for dec in self.get('decay_chains'): 1488 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1489 1490 return mystr[:-1]
1491
1492 - def nice_string_processes(self, indent = 0):
1493 """Returns a nicely formatted string of the amplitude processes.""" 1494 mystr = "" 1495 for amplitude in self.get('amplitudes'): 1496 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1497 1498 if self.get('decay_chains'): 1499 mystr = mystr + " " * indent + "Decays:\n" 1500 for dec in self.get('decay_chains'): 1501 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1502 1503 return mystr[:-1]
1504
1505 - def get_ninitial(self):
1506 """Returns the number of initial state particles in the process.""" 1507 return self.get('amplitudes')[0].get('process').get_ninitial()
1508
1509 - def get_decay_ids(self):
1510 """Returns a set of all particle ids for which a decay is defined""" 1511 1512 decay_ids = [] 1513 1514 # Get all amplitudes for the decay processes 1515 for amp in sum([dc.get('amplitudes') for dc \ 1516 in self['decay_chains']], []): 1517 # For each amplitude, find the initial state leg 1518 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1519 1520 # Return a list with unique ids 1521 return list(set(decay_ids))
1522
1523 - def has_loop_process(self):
1524 """ Returns wether this amplitude has a loop process.""" 1525 return self['amplitudes'].has_any_loop_process()
1526
1527 - def get_amplitudes(self):
1528 """Recursive function to extract all amplitudes for this process""" 1529 1530 amplitudes = AmplitudeList() 1531 1532 amplitudes.extend(self.get('amplitudes')) 1533 for decay in self.get('decay_chains'): 1534 amplitudes.extend(decay.get_amplitudes()) 1535 1536 return amplitudes
1537
1538 1539 #=============================================================================== 1540 # DecayChainAmplitudeList 1541 #=============================================================================== 1542 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1543 """List of DecayChainAmplitude objects 1544 """ 1545
1546 - def is_valid_element(self, obj):
1547 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1548 1549 return isinstance(obj, DecayChainAmplitude)
1550
1551 1552 #=============================================================================== 1553 # MultiProcess 1554 #=============================================================================== 1555 -class MultiProcess(base_objects.PhysicsObject):
1556 """MultiProcess: list of process definitions 1557 list of processes (after cleaning) 1558 list of amplitudes (after generation) 1559 """ 1560
1561 - def default_setup(self):
1562 """Default values for all properties""" 1563 1564 self['process_definitions'] = base_objects.ProcessDefinitionList() 1565 # self['amplitudes'] can be an AmplitudeList or a 1566 # DecayChainAmplitudeList, depending on whether there are 1567 # decay chains in the process definitions or not. 1568 self['amplitudes'] = AmplitudeList() 1569 # Flag for whether to combine IS mirror processes together 1570 self['collect_mirror_procs'] = False 1571 # List of quark flavors where we ignore processes with at 1572 # least 6 quarks (three quark lines) 1573 self['ignore_six_quark_processes'] = [] 1574 # Allow to use the model parameter numerical value for optimization. 1575 #This is currently use for 1->N generation(check mass). 1576 self['use_numerical'] = False
1577
1578 - def __init__(self, argument=None, collect_mirror_procs = False, 1579 ignore_six_quark_processes = [], optimize=False, 1580 loop_filter=None, diagram_filter=None):
1581 """Allow initialization with ProcessDefinition or 1582 ProcessDefinitionList 1583 optimize allows to use param_card information. (usefull for 1-.N)""" 1584 1585 if isinstance(argument, base_objects.ProcessDefinition): 1586 super(MultiProcess, self).__init__() 1587 self['process_definitions'].append(argument) 1588 elif isinstance(argument, base_objects.ProcessDefinitionList): 1589 super(MultiProcess, self).__init__() 1590 self['process_definitions'] = argument 1591 elif argument != None: 1592 # call the mother routine 1593 super(MultiProcess, self).__init__(argument) 1594 else: 1595 # call the mother routine 1596 super(MultiProcess, self).__init__() 1597 1598 self['collect_mirror_procs'] = collect_mirror_procs 1599 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1600 self['use_numerical'] = optimize 1601 self['loop_filter'] = loop_filter 1602 self['diagram_filter'] = diagram_filter # only True/False so far 1603 1604 if isinstance(argument, base_objects.ProcessDefinition) or \ 1605 isinstance(argument, base_objects.ProcessDefinitionList): 1606 # Generate the diagrams 1607 self.get('amplitudes')
1608 1609
1610 - def filter(self, name, value):
1611 """Filter for valid process property values.""" 1612 1613 if name == 'process_definitions': 1614 if not isinstance(value, base_objects.ProcessDefinitionList): 1615 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value)) 1616 1617 if name == 'amplitudes': 1618 if not isinstance(value, AmplitudeList): 1619 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value)) 1620 1621 if name in ['collect_mirror_procs']: 1622 if not isinstance(value, bool): 1623 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 1624 1625 if name == 'ignore_six_quark_processes': 1626 if not isinstance(value, list): 1627 raise self.PhysicsObjectError("%s is not a valid list" % str(value)) 1628 1629 return True
1630
1631 - def get(self, name):
1632 """Get the value of the property name.""" 1633 1634 if (name == 'amplitudes') and not self[name]: 1635 for process_def in self.get('process_definitions'): 1636 if process_def.get('decay_chains'): 1637 # This is a decay chain process 1638 # Store amplitude(s) as DecayChainAmplitude 1639 self['amplitudes'].append(\ 1640 DecayChainAmplitude(process_def, 1641 self.get('collect_mirror_procs'), 1642 self.get('ignore_six_quark_processes'), 1643 diagram_filter=self['diagram_filter'])) 1644 else: 1645 self['amplitudes'].extend(\ 1646 self.generate_multi_amplitudes(process_def, 1647 self.get('collect_mirror_procs'), 1648 self.get('ignore_six_quark_processes'), 1649 self['use_numerical'], 1650 loop_filter=self['loop_filter'], 1651 diagram_filter=self['diagram_filter'])) 1652 1653 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1654
1655 - def get_sorted_keys(self):
1656 """Return process property names as a nicely sorted list.""" 1657 1658 return ['process_definitions', 'amplitudes']
1659
1660 - def get_model(self):
1661 1662 return self['process_definitions'][0]['model']
1663 1664 @classmethod
1665 - def generate_multi_amplitudes(cls,process_definition, 1666 collect_mirror_procs = False, 1667 ignore_six_quark_processes = [], 1668 use_numerical=False, 1669 loop_filter=None, 1670 diagram_filter=False):
1671 """Generate amplitudes in a semi-efficient way. 1672 Make use of crossing symmetry for processes that fail diagram 1673 generation, but not for processes that succeed diagram 1674 generation. Doing so will risk making it impossible to 1675 identify processes with identical amplitudes. 1676 """ 1677 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1678 "%s not valid ProcessDefinition object" % \ 1679 repr(process_definition) 1680 1681 # Set automatic coupling orders 1682 process_definition.set('orders', MultiProcess.\ 1683 find_optimal_process_orders(process_definition, 1684 diagram_filter)) 1685 # Check for maximum orders from the model 1686 process_definition.check_expansion_orders() 1687 1688 processes = base_objects.ProcessList() 1689 amplitudes = AmplitudeList() 1690 1691 # failed_procs and success_procs are sorted processes that have 1692 # already failed/succeeded based on crossing symmetry 1693 failed_procs = [] 1694 success_procs = [] 1695 # Complete processes, for identification of mirror processes 1696 non_permuted_procs = [] 1697 # permutations keeps the permutations of the crossed processes 1698 permutations = [] 1699 1700 # Store the diagram tags for processes, to allow for 1701 # identifying identical matrix elements already at this stage. 1702 model = process_definition['model'] 1703 1704 islegs = [leg for leg in process_definition['legs'] \ 1705 if leg['state'] == False] 1706 fslegs = [leg for leg in process_definition['legs'] \ 1707 if leg['state'] == True] 1708 1709 isids = [leg['ids'] for leg in process_definition['legs'] \ 1710 if leg['state'] == False] 1711 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1712 if leg['state'] == True] 1713 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \ 1714 if leg['state'] == True] 1715 # Generate all combinations for the initial state 1716 for prod in itertools.product(*isids): 1717 islegs = [\ 1718 base_objects.Leg({'id':id, 'state': False, 1719 'polarization': islegs[i]['polarization']}) 1720 for i,id in enumerate(prod)] 1721 1722 # Generate all combinations for the final state, and make 1723 # sure to remove double counting 1724 1725 red_fsidlist = set() 1726 1727 for prod in itertools.product(*fsids): 1728 tag = zip(prod, polids) 1729 tag = sorted(tag) 1730 # Remove double counting between final states 1731 if tuple(tag) in red_fsidlist: 1732 continue 1733 1734 red_fsidlist.add(tuple(tag)) 1735 # Generate leg list for process 1736 leg_list = [copy.copy(leg) for leg in islegs] 1737 leg_list.extend([\ 1738 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \ 1739 for i,id in enumerate(prod)]) 1740 1741 legs = base_objects.LegList(leg_list) 1742 1743 # Check for crossed processes 1744 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1745 enumerate(legs.get_outgoing_id_list(model))]) 1746 permutation = [l[1] for l in sorted_legs] 1747 1748 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1749 1750 # Check for six-quark processes 1751 if ignore_six_quark_processes and \ 1752 len([i for i in sorted_legs if abs(i) in \ 1753 ignore_six_quark_processes]) >= 6: 1754 continue 1755 1756 # Check if crossed process has already failed, 1757 # in that case don't check process 1758 if sorted_legs in failed_procs: 1759 continue 1760 1761 # If allowed check mass validity [assume 1->N] 1762 if use_numerical: 1763 # check that final state has lower mass than initial state 1764 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1765 if initial_mass == 0: 1766 continue 1767 for leg in legs[1:]: 1768 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1769 initial_mass -= abs(m) 1770 if initial_mass.real <= 0: 1771 continue 1772 1773 # Setup process 1774 process = process_definition.get_process_with_legs(legs) 1775 1776 fast_proc = \ 1777 array.array('i',[leg.get('id') for leg in legs]) 1778 if collect_mirror_procs and \ 1779 process_definition.get_ninitial() == 2: 1780 # Check if mirrored process is already generated 1781 mirror_proc = \ 1782 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1783 list(fast_proc[2:])) 1784 try: 1785 mirror_amp = \ 1786 amplitudes[non_permuted_procs.index(mirror_proc)] 1787 except Exception: 1788 # Didn't find any mirror process 1789 pass 1790 else: 1791 # Mirror process found 1792 mirror_amp.set('has_mirror_process', True) 1793 logger.info("Process %s added to mirror process %s" % \ 1794 (process.base_string(), 1795 mirror_amp.get('process').base_string())) 1796 continue 1797 1798 # Check for successful crossings, unless we have specified 1799 # properties that break crossing symmetry 1800 if not process.get('required_s_channels') and \ 1801 not process.get('forbidden_onsh_s_channels') and \ 1802 not process.get('forbidden_s_channels') and \ 1803 not process.get('is_decay_chain') and not diagram_filter: 1804 try: 1805 crossed_index = success_procs.index(sorted_legs) 1806 # The relabeling of legs for loop amplitudes is cumbersome 1807 # and does not save so much time. It is disable here and 1808 # we use the key 'loop_diagrams' to decide whether 1809 # it is an instance of LoopAmplitude. 1810 if 'loop_diagrams' in amplitudes[crossed_index]: 1811 raise ValueError 1812 except ValueError: 1813 # No crossing found, just continue 1814 pass 1815 else: 1816 # Found crossing - reuse amplitude 1817 amplitude = MultiProcess.cross_amplitude(\ 1818 amplitudes[crossed_index], 1819 process, 1820 permutations[crossed_index], 1821 permutation) 1822 amplitudes.append(amplitude) 1823 success_procs.append(sorted_legs) 1824 permutations.append(permutation) 1825 non_permuted_procs.append(fast_proc) 1826 logger.info("Crossed process found for %s, reuse diagrams." % \ 1827 process.base_string()) 1828 continue 1829 1830 # Create new amplitude 1831 amplitude = cls.get_amplitude_from_proc(process, 1832 loop_filter=loop_filter) 1833 1834 try: 1835 result = amplitude.generate_diagrams(diagram_filter=diagram_filter) 1836 except InvalidCmd as error: 1837 failed_procs.append(sorted_legs) 1838 else: 1839 # Succeeded in generating diagrams 1840 if amplitude.get('diagrams'): 1841 amplitudes.append(amplitude) 1842 success_procs.append(sorted_legs) 1843 permutations.append(permutation) 1844 non_permuted_procs.append(fast_proc) 1845 elif not result: 1846 # Diagram generation failed for all crossings 1847 failed_procs.append(sorted_legs) 1848 1849 # Raise exception if there are no amplitudes for this process 1850 if not amplitudes: 1851 if len(failed_procs) == 1 and 'error' in locals(): 1852 raise error 1853 else: 1854 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \ 1855 process_definition.nice_string()) 1856 1857 1858 # Return the produced amplitudes 1859 return amplitudes
1860 1861 @classmethod
1862 - def get_amplitude_from_proc(cls,proc,**opts):
1863 """ Return the correct amplitude type according to the characteristics of 1864 the process proc. The only option that could be specified here is 1865 loop_filter and it is of course not relevant for a tree amplitude.""" 1866 1867 return Amplitude({"process": proc})
1868 1869 1870 @staticmethod
1871 - def find_optimal_process_orders(process_definition, diagram_filter=False):
1872 """Find the minimal WEIGHTED order for this set of processes. 1873 1874 The algorithm: 1875 1876 1) Check the coupling hierarchy of the model. Assign all 1877 particles to the different coupling hierarchies so that a 1878 particle is considered to be in the highest hierarchy (i.e., 1879 with lowest value) where it has an interaction. 1880 1881 2) Pick out the legs in the multiprocess according to the 1882 highest hierarchy represented (so don't mix particles from 1883 different hierarchy classes in the same multiparticles!) 1884 1885 3) Find the starting maximum WEIGHTED order as the sum of the 1886 highest n-2 weighted orders 1887 1888 4) Pick out required s-channel particle hierarchies, and use 1889 the highest of the maximum WEIGHTED order from the legs and 1890 the minimum WEIGHTED order extracted from 2*s-channel 1891 hierarchys plus the n-2-2*(number of s-channels) lowest 1892 leg weighted orders. 1893 1894 5) Run process generation with the WEIGHTED order determined 1895 in 3)-4) - # final state gluons, with all gluons removed from 1896 the final state 1897 1898 6) If no process is found, increase WEIGHTED order by 1 and go 1899 back to 5), until we find a process which passes. Return that 1900 order. 1901 1902 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1903 If still no process has passed, return 1904 WEIGHTED = (n-2)*(highest hierarchy) 1905 """ 1906 1907 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1908 "%s not valid ProcessDefinition object" % \ 1909 repr(process_definition) 1910 1911 processes = base_objects.ProcessList() 1912 amplitudes = AmplitudeList() 1913 1914 # If there are already couplings defined, return 1915 if process_definition.get('orders') or \ 1916 process_definition.get('overall_orders') or \ 1917 process_definition.get('NLO_mode')=='virt': 1918 return process_definition.get('orders') 1919 1920 # If this is a decay process (and not a decay chain), return 1921 if process_definition.get_ninitial() == 1 and not \ 1922 process_definition.get('is_decay_chain'): 1923 return process_definition.get('orders') 1924 1925 logger.info("Checking for minimal orders which gives processes.") 1926 logger.info("Please specify coupling orders to bypass this step.") 1927 1928 # Calculate minimum starting guess for WEIGHTED order 1929 max_order_now, particles, hierarchy = \ 1930 process_definition.get_minimum_WEIGHTED() 1931 coupling = 'WEIGHTED' 1932 1933 model = process_definition.get('model') 1934 1935 # Extract the initial and final leg ids 1936 isids = [leg['ids'] for leg in \ 1937 [leg for leg in process_definition['legs'] if leg['state'] == False]] 1938 fsids = [leg['ids'] for leg in \ 1939 [leg for leg in process_definition['legs'] if leg['state'] == True]] 1940 1941 max_WEIGHTED_order = \ 1942 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1943 # get the definition of the WEIGHTED 1944 hierarchydef = process_definition['model'].get('order_hierarchy') 1945 tmp = [] 1946 hierarchy = list(hierarchydef.items()) 1947 hierarchy.sort() 1948 for key, value in hierarchydef.items(): 1949 if value>1: 1950 tmp.append('%s*%s' % (value,key)) 1951 else: 1952 tmp.append('%s' % key) 1953 wgtdef = '+'.join(tmp) 1954 # Run diagram generation with increasing max_order_now until 1955 # we manage to get diagrams 1956 while max_order_now < max_WEIGHTED_order: 1957 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1958 1959 oldloglevel = logger.level 1960 logger.setLevel(logging.WARNING) 1961 1962 # failed_procs are processes that have already failed 1963 # based on crossing symmetry 1964 failed_procs = [] 1965 # Generate all combinations for the initial state 1966 for prod in itertools.product(*isids): 1967 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1968 for id in prod] 1969 1970 # Generate all combinations for the final state, and make 1971 # sure to remove double counting 1972 1973 red_fsidlist = [] 1974 1975 for prod in itertools.product(*fsids): 1976 1977 # Remove double counting between final states 1978 if tuple(sorted(prod)) in red_fsidlist: 1979 continue 1980 1981 red_fsidlist.append(tuple(sorted(prod))); 1982 1983 # Remove gluons from final state if QCD is among 1984 # the highest coupling hierarchy 1985 nglue = 0 1986 if 21 in particles[0]: 1987 nglue = len([id for id in prod if id == 21]) 1988 prod = [id for id in prod if id != 21] 1989 1990 # Generate leg list for process 1991 leg_list = [copy.copy(leg) for leg in islegs] 1992 1993 leg_list.extend([\ 1994 base_objects.Leg({'id':id, 'state': True}) \ 1995 for id in prod]) 1996 1997 legs = base_objects.LegList(leg_list) 1998 1999 # Set summed coupling order according to max_order_now 2000 # subtracting the removed gluons 2001 coupling_orders_now = {coupling: max_order_now - \ 2002 nglue * model['order_hierarchy']['QCD']} 2003 2004 # Setup process 2005 process = base_objects.Process({\ 2006 'legs':legs, 2007 'model':model, 2008 'id': process_definition.get('id'), 2009 'orders': coupling_orders_now, 2010 'required_s_channels': \ 2011 process_definition.get('required_s_channels'), 2012 'forbidden_onsh_s_channels': \ 2013 process_definition.get('forbidden_onsh_s_channels'), 2014 'sqorders_types': \ 2015 process_definition.get('sqorders_types'), 2016 'squared_orders': \ 2017 process_definition.get('squared_orders'), 2018 'split_orders': \ 2019 process_definition.get('split_orders'), 2020 'forbidden_s_channels': \ 2021 process_definition.get('forbidden_s_channels'), 2022 'forbidden_particles': \ 2023 process_definition.get('forbidden_particles'), 2024 'is_decay_chain': \ 2025 process_definition.get('is_decay_chain'), 2026 'overall_orders': \ 2027 process_definition.get('overall_orders'), 2028 'split_orders': \ 2029 process_definition.get('split_orders')}) 2030 2031 # Check for couplings with given expansion orders 2032 process.check_expansion_orders() 2033 2034 # Check for crossed processes 2035 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 2036 # Check if crossed process has already failed 2037 # In that case don't check process 2038 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'): 2039 continue 2040 2041 amplitude = Amplitude({'process': process}) 2042 try: 2043 amplitude.generate_diagrams(diagram_filter=diagram_filter) 2044 except InvalidCmd as error: 2045 failed_procs.append(tuple(sorted_legs)) 2046 else: 2047 if amplitude.get('diagrams'): 2048 # We found a valid amplitude. Return this order number 2049 logger.setLevel(oldloglevel) 2050 return {coupling: max_order_now} 2051 else: 2052 failed_procs.append(tuple(sorted_legs)) 2053 # No processes found, increase max_order_now 2054 max_order_now += 1 2055 logger.setLevel(oldloglevel) 2056 2057 # If no valid processes found with nfinal-1 couplings, return maximal 2058 return {coupling: max_order_now}
2059 2060 @staticmethod
2061 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2062 """Return the amplitude crossed with the permutation new_perm""" 2063 # Create dict from original leg numbers to new leg numbers 2064 perm_map = dict(list(zip(org_perm, new_perm))) 2065 # Initiate new amplitude 2066 new_amp = copy.copy(amplitude) 2067 # Number legs 2068 for i, leg in enumerate(process.get('legs')): 2069 leg.set('number', i+1) 2070 # Set process 2071 new_amp.set('process', process) 2072 # Now replace the leg numbers in the diagrams 2073 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2074 process.get('legs'),) for \ 2075 d in new_amp.get('diagrams')]) 2076 new_amp.set('diagrams', diagrams) 2077 new_amp.trim_diagrams() 2078 2079 # Make sure to reset mirror process 2080 new_amp.set('has_mirror_process', False) 2081 2082 return new_amp
2083
2084 #=============================================================================== 2085 # Global helper methods 2086 #=============================================================================== 2087 2088 -def expand_list(mylist):
2089 """Takes a list of lists and elements and returns a list of flat lists. 2090 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2091 """ 2092 2093 # Check that argument is a list 2094 assert isinstance(mylist, list), "Expand_list argument must be a list" 2095 2096 res = [] 2097 2098 tmplist = [] 2099 for item in mylist: 2100 if isinstance(item, list): 2101 tmplist.append(item) 2102 else: 2103 tmplist.append([item]) 2104 2105 for item in itertools.product(*tmplist): 2106 res.append(list(item)) 2107 2108 return res
2109
2110 -def expand_list_list(mylist):
2111 """Recursive function. Takes a list of lists and lists of lists 2112 and returns a list of flat lists. 2113 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2114 """ 2115 2116 res = [] 2117 2118 if not mylist or len(mylist) == 1 and not mylist[0]: 2119 return [[]] 2120 2121 # Check the first element is at least a list 2122 assert isinstance(mylist[0], list), \ 2123 "Expand_list_list needs a list of lists and lists of lists" 2124 2125 # Recursion stop condition, one single element 2126 if len(mylist) == 1: 2127 if isinstance(mylist[0][0], list): 2128 return mylist[0] 2129 else: 2130 return mylist 2131 2132 if isinstance(mylist[0][0], list): 2133 for item in mylist[0]: 2134 # Here the recursion happens, create lists starting with 2135 # each element of the first item and completed with 2136 # the rest expanded 2137 for rest in expand_list_list(mylist[1:]): 2138 reslist = copy.copy(item) 2139 reslist.extend(rest) 2140 res.append(reslist) 2141 else: 2142 for rest in expand_list_list(mylist[1:]): 2143 reslist = copy.copy(mylist[0]) 2144 reslist.extend(rest) 2145 res.append(reslist) 2146 2147 2148 return res
2149