Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  import array 
  23  import copy 
  24  import itertools 
  25  import logging 
  26   
  27  import madgraph.core.base_objects as base_objects 
  28  import madgraph.various.misc as misc 
  29  from madgraph import InvalidCmd, MadGraph5Error 
  30   
  31  logger = logging.getLogger('madgraph.diagram_generation') 
32 33 34 -class NoDiagramException(InvalidCmd): pass
35
36 #=============================================================================== 37 # DiagramTag mother class 38 #=============================================================================== 39 40 -class DiagramTag(object):
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 42 PDG code/interaction id (for comparing diagrams from the same amplitude), 43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 44 Algorithm: Create chains starting from external particles: 45 1 \ / 6 46 2 /\______/\ 7 47 3_ / | \_ 8 48 4 / 5 \_ 9 49 \ 10 50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 51 (((1,2,id12),(3,4,id34)),id1234), 52 5,id91086712345) 53 where idN is the id of the corresponding interaction. The ordering within 54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 56 The determination of central vertex is based on minimizing the chain length 57 for the longest subchain. 58 This gives a unique tag which can be used to identify diagrams 59 (instead of symmetry), as well as identify identical matrix elements from 60 different processes.""" 61
62 - class DiagramTagError(Exception):
63 """Exception for any problems in DiagramTags""" 64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to 68 the diagram, and figure out if we need to shift the central vertex.""" 69 70 # wf_dict keeps track of the intermediate particles 71 leg_dict = {} 72 # Create the chain which will be the diagram tag 73 for vertex in diagram.get('vertices'): 74 # Only add incoming legs 75 legs = vertex.get('legs')[:-1] 76 lastvx = vertex == diagram.get('vertices')[-1] 77 if lastvx: 78 # If last vertex, all legs are incoming 79 legs = vertex.get('legs') 80 # Add links corresponding to the relevant legs 81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 82 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 83 for leg in legs], 84 self.vertex_id_from_vertex(vertex, 85 lastvx, 86 model, 87 ninitial)) 88 # Add vertex to leg_dict if not last one 89 if not lastvx: 90 leg_dict[vertex.get('legs')[-1].get('number')] = link 91 92 # The resulting link is the hypothetical result 93 self.tag = link 94 95 # Now make sure to find the central vertex in the diagram, 96 # defined by the longest leg being as short as possible 97 done = max([l.depth for l in self.tag.links]) == 0 98 while not done: 99 # Identify the longest chain in the tag 100 longest_chain = self.tag.links[0] 101 # Create a new link corresponding to moving one step 102 new_link = DiagramTagChainLink(self.tag.links[1:], 103 self.flip_vertex(\ 104 self.tag.vertex_id, 105 longest_chain.vertex_id, 106 self.tag.links[1:])) 107 # Create a new final vertex in the direction of the longest link 108 other_links = list(longest_chain.links) + [new_link] 109 other_link = DiagramTagChainLink(other_links, 110 self.flip_vertex(\ 111 longest_chain.vertex_id, 112 self.tag.vertex_id, 113 other_links)) 114 115 if other_link.links[0] < self.tag.links[0]: 116 # Switch to new tag, continue search 117 self.tag = other_link 118 else: 119 # We have found the central vertex 120 done = True
121
122 - def get_external_numbers(self):
123 """Get the order of external particles in this tag""" 124 125 return self.tag.get_external_numbers()
126
127 - def diagram_from_tag(self, model):
128 """Output a diagram from a DiagramTag. Note that each daughter 129 class must implement the static functions id_from_vertex_id 130 (if the vertex id is something else than an integer) and 131 leg_from_link (to pass the correct info from an end link to a 132 leg).""" 133 134 # Create the vertices, starting from the final vertex 135 diagram = base_objects.Diagram({'vertices': \ 136 self.vertices_from_link(self.tag, 137 model, 138 True)}) 139 diagram.calculate_orders(model) 140 return diagram
141 142 @classmethod 177 178 @classmethod
179 - def legPDGs_from_vertex_id(cls, vertex_id,model):
180 """Returns the list of external PDGs of the interaction corresponding 181 to this vertex_id.""" 182 183 # In case we have to deal with a regular vertex, we return the list 184 # external PDGs as given by the model information on that integer 185 # vertex id. 186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 187 return vertex_id[2]['PDGs'] 188 else: 189 return [part.get_pdg_code() for part in model.get_interaction( 190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191 192 @classmethod
193 - def leg_from_legs(cls,legs, vertex_id, model):
194 """Return a leg from a leg list and the model info""" 195 196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 197 198 # Extract the resulting pdg code from the interaction pdgs 199 for pdg in [leg.get('id') for leg in legs]: 200 pdgs.remove(pdg) 201 202 assert len(pdgs) == 1 203 # Prepare the new leg properties 204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 205 number = min([l.get('number') for l in legs]) 206 # State is False for t-channel, True for s-channel 207 state = (len([l for l in legs if l.get('state') == False]) != 1) 208 # Note that this needs to be done before combining decay chains 209 onshell= False 210 211 return base_objects.Leg({'id': pdg, 212 'number': number, 213 'state': state, 214 'onshell': onshell})
215 216 @classmethod 229 230 @staticmethod 243 244 @staticmethod
245 - def id_from_vertex_id(vertex_id):
246 """Return the numerical vertex id from a link.vertex_id""" 247 248 return vertex_id[0][0]
249 250 @staticmethod
251 - def loop_info_from_vertex_id(vertex_id):
252 """Return the loop_info stored in this vertex id. Notice that the 253 IdentifyME tag does not store the loop_info, but should normally never 254 need access to it.""" 255 256 return vertex_id[2]
257 258 @staticmethod
259 - def reorder_permutation(perm, start_perm):
260 """Reorder a permutation with respect to start_perm. Note that 261 both need to start from 1.""" 262 if perm == start_perm: 263 return range(len(perm)) 264 order = [i for (p,i) in \ 265 sorted([(p,i) for (i,p) in enumerate(perm)])] 266 return [start_perm[i]-1 for i in order]
267 268 @staticmethod 279 280 @staticmethod
281 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
282 """Returns the default vertex id: just the interaction id 283 Note that in the vertex id, like the leg, only the first entry is 284 taken into account in the tag comparison, while the second is for 285 storing information that is not to be used in comparisons and the 286 third for additional info regarding the shrunk loop vertex.""" 287 288 if isinstance(vertex,base_objects.ContractedVertex): 289 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 290 return ((vertex.get('id'),vertex.get('loop_tag')),(), 291 {'PDGs':vertex.get('PDGs')}) 292 else: 293 return ((vertex.get('id'),()),(),{})
294 295 @staticmethod
296 - def flip_vertex(new_vertex, old_vertex, links):
297 """Returns the default vertex flip: just the new_vertex""" 298 return new_vertex
299
300 - def __eq__(self, other):
301 """Equal if same tag""" 302 if type(self) != type(other): 303 return False 304 return self.tag == other.tag
305
306 - def __ne__(self, other):
307 return not self.__eq__(other)
308
309 - def __str__(self):
310 return str(self.tag)
311
312 - def __lt__(self, other):
313 return self.tag < other.tag
314
315 - def __gt__(self, other):
316 return self.tag > other.tag
317 318 __repr__ = __str__
319 405
406 #=============================================================================== 407 # Amplitude 408 #=============================================================================== 409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered) 411 Initialize with a process, then call generate_diagrams() to 412 generate the diagrams for the amplitude 413 """ 414
415 - def default_setup(self):
416 """Default values for all properties""" 417 418 self['process'] = base_objects.Process() 419 self['diagrams'] = None 420 # has_mirror_process is True if the same process but with the 421 # two incoming particles interchanged has been generated 422 self['has_mirror_process'] = False
423
424 - def __init__(self, argument=None):
425 """Allow initialization with Process""" 426 if isinstance(argument, base_objects.Process): 427 super(Amplitude, self).__init__() 428 self.set('process', argument) 429 self.generate_diagrams() 430 elif argument != None: 431 # call the mother routine 432 super(Amplitude, self).__init__(argument) 433 else: 434 # call the mother routine 435 super(Amplitude, self).__init__()
436
437 - def filter(self, name, value):
438 """Filter for valid amplitude property values.""" 439 440 if name == 'process': 441 if not isinstance(value, base_objects.Process): 442 raise self.PhysicsObjectError, \ 443 "%s is not a valid Process object" % str(value) 444 if name == 'diagrams': 445 if not isinstance(value, base_objects.DiagramList): 446 raise self.PhysicsObjectError, \ 447 "%s is not a valid DiagramList object" % str(value) 448 if name == 'has_mirror_process': 449 if not isinstance(value, bool): 450 raise self.PhysicsObjectError, \ 451 "%s is not a valid boolean" % str(value) 452 return True
453
454 - def get(self, name):
455 """Get the value of the property name.""" 456 457 if name == 'diagrams' and self[name] == None: 458 # Have not yet generated diagrams for this process 459 if self['process']: 460 self.generate_diagrams() 461 462 return super(Amplitude, self).get(name)
463 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 464 465
466 - def get_sorted_keys(self):
467 """Return diagram property names as a nicely sorted list.""" 468 469 return ['process', 'diagrams', 'has_mirror_process']
470
471 - def get_number_of_diagrams(self):
472 """Returns number of diagrams for this amplitude""" 473 return len(self.get('diagrams'))
474
475 - def get_amplitudes(self):
476 """Return an AmplitudeList with just this amplitude. 477 Needed for DecayChainAmplitude.""" 478 479 return AmplitudeList([self])
480
481 - def nice_string(self, indent=0):
482 """Returns a nicely formatted string of the amplitude content.""" 483 return self.get('process').nice_string(indent) + "\n" + \ 484 self.get('diagrams').nice_string(indent)
485
486 - def nice_string_processes(self, indent=0):
487 """Returns a nicely formatted string of the amplitude process.""" 488 return self.get('process').nice_string(indent)
489
490 - def get_ninitial(self):
491 """Returns the number of initial state particles in the process.""" 492 return self.get('process').get_ninitial()
493
494 - def has_loop_process(self):
495 """ Returns wether this amplitude has a loop process.""" 496 497 return self.get('process').get('perturbation_couplings')
498
499 - def generate_diagrams(self, returndiag=False, diagram_filter=False):
500 """Generate diagrams. Algorithm: 501 502 1. Define interaction dictionaries: 503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 505 506 2. Set flag from_group=true for all external particles. 507 Flip particle/anti particle for incoming particles. 508 509 3. If there is a dictionary n->0 with n=number of external 510 particles, create if possible the combination [(1,2,3,4,...)] 511 with *at least two* from_group==true. This will give a 512 finished (set of) diagram(s) (done by reduce_leglist) 513 514 4. Create all allowed groupings of particles with at least one 515 from_group==true (according to dictionaries n->1): 516 [(1,2),3,4...],[1,(2,3),4,...],..., 517 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 518 (done by combine_legs) 519 520 5. Replace each group with a (list of) new particle(s) with number 521 n = min(group numbers). Set from_group true for these 522 particles and false for all other particles. Store vertex info. 523 (done by merge_comb_legs) 524 525 6. Stop algorithm when at most 2 particles remain. 526 Return all diagrams (lists of vertices). 527 528 7. Repeat from 3 (recursion done by reduce_leglist) 529 530 8. Replace final p=p vertex 531 532 Be aware that the resulting vertices have all particles outgoing, 533 so need to flip for incoming particles when used. 534 535 SPECIAL CASE: For A>BC... processes which are legs in decay 536 chains, we need to ensure that BC... combine first, giving A=A 537 as a final vertex. This case is defined by the Process 538 property is_decay_chain = True. 539 This function can also be called by the generate_diagram function 540 of LoopAmplitudes, in which case the generated diagrams here must not 541 be directly assigned to the 'diagrams' attributed but returned as a 542 DiagramList by the function. This is controlled by the argument 543 returndiag. 544 """ 545 546 process = self.get('process') 547 model = process.get('model') 548 legs = process.get('legs') 549 # Make sure orders is the minimum of orders and overall_orders 550 for key in process.get('overall_orders').keys(): 551 try: 552 process.get('orders')[key] = \ 553 min(process.get('orders')[key], 554 process.get('overall_orders')[key]) 555 except KeyError: 556 process.get('orders')[key] = process.get('overall_orders')[key] 557 558 assert model.get('particles'), \ 559 "particles are missing in model: %s" % model.get('particles') 560 561 assert model.get('interactions'), \ 562 "interactions are missing in model" 563 564 565 res = base_objects.DiagramList() 566 # First check that the number of fermions is even 567 if len(filter(lambda leg: model.get('particle_dict')[\ 568 leg.get('id')].is_fermion(), legs)) % 2 == 1: 569 if not returndiag: 570 self['diagrams'] = res 571 raise InvalidCmd, 'The number of fermion is odd' 572 else: 573 return False, res 574 575 # Then check same number of incoming and outgoing fermions (if 576 # no Majorana particles in model) 577 if not model.get('got_majoranas') and \ 578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \ 579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)): 580 if not returndiag: 581 self['diagrams'] = res 582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different' 583 else: 584 return False, res 585 586 # Finally check that charge (conserve by all interactions) of the process 587 #is globally conserve for this process. 588 for charge in model.get('conserved_charge'): 589 total = 0 590 for leg in legs: 591 part = model.get('particle_dict')[leg.get('id')] 592 try: 593 value = part.get(charge) 594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 595 try: 596 value = getattr(part, charge) 597 except AttributeError: 598 value = 0 599 600 if (leg.get('id') != part['pdg_code']) != leg['state']: 601 total -= value 602 else: 603 total += value 604 605 if abs(total) > 1e-10: 606 if not returndiag: 607 self['diagrams'] = res 608 raise InvalidCmd, 'No %s conservation for this process ' % charge 609 return res 610 else: 611 raise InvalidCmd, 'No %s conservation for this process ' % charge 612 return res, res 613 614 if not returndiag: 615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 616 617 # Give numbers to legs in process 618 for i in range(0, len(process.get('legs'))): 619 # Make sure legs are unique 620 leg = copy.copy(process.get('legs')[i]) 621 process.get('legs')[i] = leg 622 if leg.get('number') == 0: 623 leg.set('number', i + 1) 624 625 # Copy leglist from process, so we can flip leg identities 626 # without affecting the original process 627 leglist = self.copy_leglist(process.get('legs')) 628 629 for leg in leglist: 630 # For the first step, ensure the tag from_group 631 # is true for all legs 632 leg.set('from_group', True) 633 634 # Need to flip part-antipart for incoming particles, 635 # so they are all outgoing 636 if leg.get('state') == False: 637 part = model.get('particle_dict')[leg.get('id')] 638 leg.set('id', part.get_anti_pdg_code()) 639 640 # Calculate the maximal multiplicity of n-1>1 configurations 641 # to restrict possible leg combinations 642 max_multi_to1 = max([len(key) for key in \ 643 model.get('ref_dict_to1').keys()]) 644 645 646 # Reduce the leg list and return the corresponding 647 # list of vertices 648 649 # For decay processes, generate starting from final-state 650 # combined only as the last particle. This allows to use these 651 # in decay chains later on. 652 is_decay_proc = process.get_ninitial() == 1 653 if is_decay_proc: 654 part = model.get('particle_dict')[leglist[0].get('id')] 655 # For decay chain legs, we want everything to combine to 656 # the initial leg. This is done by only allowing the 657 # initial leg to combine as a final identity. 658 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 659 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 660 # Need to set initial leg from_group to None, to make sure 661 # it can only be combined at the end. 662 leglist[0].set('from_group', None) 663 reduced_leglist = self.reduce_leglist(leglist, 664 max_multi_to1, 665 ref_dict_to0, 666 is_decay_proc, 667 process.get('orders')) 668 else: 669 reduced_leglist = self.reduce_leglist(leglist, 670 max_multi_to1, 671 model.get('ref_dict_to0'), 672 is_decay_proc, 673 process.get('orders')) 674 675 #In LoopAmplitude the function below is overloaded such that it 676 #converts back all DGLoopLegs to Legs. In the default tree-level 677 #diagram generation, this does nothing. 678 self.convert_dgleg_to_leg(reduced_leglist) 679 680 if reduced_leglist: 681 for vertex_list in reduced_leglist: 682 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 683 684 # Record whether or not we failed generation before required 685 # s-channel propagators are taken into account 686 failed_crossing = not res 687 688 # Required s-channels is a list of id-lists. Select the 689 # diagrams where all required s-channel propagators in any of 690 # the lists are present (i.e., the different lists correspond 691 # to "or", while the elements of the list correspond to 692 # "and"). 693 if process.get('required_s_channels') and \ 694 process.get('required_s_channels')[0]: 695 # We shouldn't look at the last vertex in each diagram, 696 # since that is the n->0 vertex 697 lastvx = -1 698 # For decay chain processes, there is an "artificial" 699 # extra vertex corresponding to particle 1=1, so we need 700 # to exclude the two last vertexes. 701 if is_decay_proc: lastvx = -2 702 ninitial = len(filter(lambda leg: leg.get('state') == False, 703 process.get('legs'))) 704 # Check required s-channels for each list in required_s_channels 705 old_res = res 706 res = base_objects.DiagramList() 707 for id_list in process.get('required_s_channels'): 708 res_diags = filter(lambda diagram: \ 709 all([req_s_channel in \ 710 [vertex.get_s_channel_id(\ 711 process.get('model'), ninitial) \ 712 for vertex in diagram.get('vertices')[:lastvx]] \ 713 for req_s_channel in \ 714 id_list]), old_res) 715 # Add diagrams only if not already in res 716 res.extend([diag for diag in res_diags if diag not in res]) 717 718 # Remove all diagrams with a "double" forbidden s-channel propagator 719 # is present. 720 # Note that we shouldn't look at the last vertex in each 721 # diagram, since that is the n->0 vertex 722 if process.get('forbidden_s_channels'): 723 ninitial = len(filter(lambda leg: leg.get('state') == False, 724 process.get('legs'))) 725 if ninitial == 2: 726 res = base_objects.DiagramList(\ 727 filter(lambda diagram: \ 728 not any([vertex.get_s_channel_id(\ 729 process.get('model'), ninitial) \ 730 in process.get('forbidden_s_channels') 731 for vertex in diagram.get('vertices')[:-1]]), 732 res)) 733 else: 734 # split since we need to avoid that the initial particle is forbidden 735 # as well. 736 newres= [] 737 for diagram in res: 738 leg1 = 1 739 #check the latest vertex to see if the leg 1 is inside if it 740 #is we need to inverse the look-up and allow the first s-channel 741 # of the associate particles. 742 vertex = diagram.get('vertices')[-1] 743 if any([l['number'] ==1 for l in vertex.get('legs')]): 744 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 745 to_loop = range(len(diagram.get('vertices'))-1) 746 if leg1 >1: 747 to_loop.reverse() 748 for i in to_loop: 749 vertex = diagram.get('vertices')[i] 750 if leg1: 751 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 752 leg1 = 0 753 continue 754 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 755 in process.get('forbidden_s_channels'): 756 break 757 else: 758 newres.append(diagram) 759 res = base_objects.DiagramList(newres) 760 761 762 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 763 # generation. 764 if process.get('forbidden_onsh_s_channels'): 765 ninitial = len(filter(lambda leg: leg.get('state') == False, 766 process.get('legs'))) 767 768 verts = base_objects.VertexList(sum([[vertex for vertex \ 769 in diagram.get('vertices')[:-1] 770 if vertex.get_s_channel_id(\ 771 process.get('model'), ninitial) \ 772 in process.get('forbidden_onsh_s_channels')] \ 773 for diagram in res], [])) 774 for vert in verts: 775 # Use onshell = False to indicate that this s-channel is forbidden 776 newleg = copy.copy(vert.get('legs').pop(-1)) 777 newleg.set('onshell', False) 778 vert.get('legs').append(newleg) 779 780 # Set actual coupling orders for each diagram 781 for diagram in res: 782 diagram.calculate_orders(model) 783 784 # Filter the diagrams according to the squared coupling order 785 # constraints and possible the negative one. Remember that OrderName=-n 786 # means that the user wants to include everything up to the N^(n+1)LO 787 # contribution in that order and at most one order can be restricted 788 # in this way. We shall do this only if the diagrams are not asked to 789 # be returned, as it is the case for NLO because it this case the 790 # interference are not necessarily among the diagrams generated here only. 791 if not returndiag and len(res)>0: 792 res = self.apply_squared_order_constraints(res) 793 794 if diagram_filter: 795 res = self.apply_user_filter(res) 796 797 # Replace final id=0 vertex if necessary 798 if not process.get('is_decay_chain'): 799 for diagram in res: 800 vertices = diagram.get('vertices') 801 if len(vertices) > 1 and vertices[-1].get('id') == 0: 802 # Need to "glue together" last and next-to-last 803 # vertex, by replacing the (incoming) last leg of the 804 # next-to-last vertex with the (outgoing) leg in the 805 # last vertex 806 vertices = copy.copy(vertices) 807 lastvx = vertices.pop() 808 nexttolastvertex = copy.copy(vertices.pop()) 809 legs = copy.copy(nexttolastvertex.get('legs')) 810 ntlnumber = legs[-1].get('number') 811 lastleg = filter(lambda leg: leg.get('number') != ntlnumber, 812 lastvx.get('legs'))[0] 813 # Reset onshell in case we have forbidden s-channels 814 if lastleg.get('onshell') == False: 815 lastleg.set('onshell', None) 816 # Replace the last leg of nexttolastvertex 817 legs[-1] = lastleg 818 nexttolastvertex.set('legs', legs) 819 vertices.append(nexttolastvertex) 820 diagram.set('vertices', vertices) 821 822 if res and not returndiag: 823 logger.info("Process has %d diagrams" % len(res)) 824 825 # Trim down number of legs and vertices used to save memory 826 self.trim_diagrams(diaglist=res) 827 828 # Sort process legs according to leg number 829 pertur = 'QCD' 830 if self.get('process')['perturbation_couplings']: 831 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 832 self.get('process').get('legs').sort(pert=pertur) 833 834 # Set diagrams to res if not asked to be returned 835 if not returndiag: 836 self['diagrams'] = res 837 return not failed_crossing 838 else: 839 return not failed_crossing, res
840
841 - def apply_squared_order_constraints(self, diag_list):
842 """Applies the user specified squared order constraints on the diagram 843 list in argument.""" 844 845 res = copy.copy(diag_list) 846 847 # Apply the filtering on constrained amplitude (== and >) 848 # No need to iterate on this one 849 for name, (value, operator) in self['process'].get('constrained_orders').items(): 850 res.filter_constrained_orders(name, value, operator) 851 852 # Iterate the filtering since the applying the constraint on one 853 # type of coupling order can impact what the filtering on a previous 854 # one (relevant for the '==' type of constraint). 855 while True: 856 new_res = res.apply_positive_sq_orders(res, 857 self['process'].get('squared_orders'), 858 self['process']['sqorders_types']) 859 # Exit condition 860 if len(res)==len(new_res): 861 break 862 elif (len(new_res)>len(res)): 863 raise MadGraph5Error( 864 'Inconsistency in function apply_squared_order_constraints().') 865 # Actualizing the list of diagram for the next iteration 866 res = new_res 867 868 869 870 # Now treat the negative squared order constraint (at most one) 871 neg_orders = [(order, value) for order, value in \ 872 self['process'].get('squared_orders').items() if value<0] 873 if len(neg_orders)==1: 874 neg_order, neg_value = neg_orders[0] 875 # Now check any negative order constraint 876 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 877 neg_value, self['process']['sqorders_types'][neg_order]) 878 # Substitute the negative value to this positive one so that 879 # the resulting computed constraints appears in the print out 880 # and at the output stage we no longer have to deal with 881 # negative valued target orders 882 self['process']['squared_orders'][neg_order]=target_order 883 elif len(neg_orders)>1: 884 raise InvalidCmd('At most one negative squared order constraint'+\ 885 ' can be specified, not %s.'%str(neg_orders)) 886 887 return res
888
889 - def apply_user_filter(self, diag_list):
890 """Applies the user specified squared order constraints on the diagram 891 list in argument.""" 892 893 if True: 894 remove_diag = misc.plugin_import('user_filter', 895 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed', 896 fcts=['remove_diag']) 897 else: 898 #example and simple tests 899 def remove_diag(diag): 900 for vertex in diag['vertices']: #last 901 if vertex['id'] == 0: #special final vertex 902 continue 903 if vertex['legs'][-1]['number'] < 3: #this means T-channel 904 if abs(vertex['legs'][-1]['id']) <6: 905 return True 906 return False
907 908 res = diag_list.__class__() 909 nb_removed = 0 910 for diag in diag_list: 911 if remove_diag(diag): 912 nb_removed +=1 913 else: 914 res.append(diag) 915 916 if nb_removed: 917 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed) 918 919 return res
920 921 922
923 - def create_diagram(self, vertexlist):
924 """ Return a Diagram created from the vertex list. This function can be 925 overloaded by daughter classes.""" 926 return base_objects.Diagram({'vertices':vertexlist})
927
928 - def convert_dgleg_to_leg(self, vertexdoublelist):
929 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 930 In Amplitude, there is nothing to do. """ 931 932 return True
933
934 - def copy_leglist(self, legs):
935 """ Simply returns a copy of the leg list. This function is 936 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 937 The DGLoopLeg has some additional parameters only useful during 938 loop diagram generation""" 939 940 return base_objects.LegList(\ 941 [ copy.copy(leg) for leg in legs ])
942
943 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 944 is_decay_proc = False, coupling_orders = None):
945 """Recursive function to reduce N LegList to N-1 946 For algorithm, see doc for generate_diagrams. 947 """ 948 949 # Result variable which is a list of lists of vertices 950 # to be added 951 res = [] 952 953 # Stop condition. If LegList is None, that means that this 954 # diagram must be discarded 955 if curr_leglist is None: 956 return None 957 958 # Extract ref dict information 959 model = self.get('process').get('model') 960 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 961 962 963 # If all legs can be combined in one single vertex, add this 964 # vertex to res and continue. 965 # Special treatment for decay chain legs 966 967 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 968 # Extract the interaction id associated to the vertex 969 970 vertex_ids = self.get_combined_vertices(curr_leglist, 971 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 972 leg in curr_leglist]))])) 973 974 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 975 'id':vertex_id}) for \ 976 vertex_id in vertex_ids] 977 # Check for coupling orders. If orders < 0, skip vertex 978 for final_vertex in final_vertices: 979 if self.reduce_orders(coupling_orders, model, 980 [final_vertex.get('id')]) != False: 981 res.append([final_vertex]) 982 # Stop condition 2: if the leglist contained exactly two particles, 983 # return the result, if any, and stop. 984 if len(curr_leglist) == 2: 985 if res: 986 return res 987 else: 988 return None 989 990 # Create a list of all valid combinations of legs 991 comb_lists = self.combine_legs(curr_leglist, 992 ref_dict_to1, max_multi_to1) 993 994 # Create a list of leglists/vertices by merging combinations 995 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 996 997 # Consider all the pairs 998 for leg_vertex_tuple in leg_vertex_list: 999 1000 # Remove forbidden particles 1001 if self.get('process').get('forbidden_particles') and \ 1002 any([abs(vertex.get('legs')[-1].get('id')) in \ 1003 self.get('process').get('forbidden_particles') \ 1004 for vertex in leg_vertex_tuple[1]]): 1005 continue 1006 1007 # Check for coupling orders. If couplings < 0, skip recursion. 1008 new_coupling_orders = self.reduce_orders(coupling_orders, 1009 model, 1010 [vertex.get('id') for vertex in \ 1011 leg_vertex_tuple[1]]) 1012 if new_coupling_orders == False: 1013 # Some coupling order < 0 1014 continue 1015 1016 # This is where recursion happens 1017 # First, reduce again the leg part 1018 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 1019 max_multi_to1, 1020 ref_dict_to0, 1021 is_decay_proc, 1022 new_coupling_orders) 1023 # If there is a reduced diagram 1024 if reduced_diagram: 1025 vertex_list_list = [list(leg_vertex_tuple[1])] 1026 vertex_list_list.append(reduced_diagram) 1027 expanded_list = expand_list_list(vertex_list_list) 1028 res.extend(expanded_list) 1029 1030 return res
1031
1032 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1033 """Return False if the coupling orders for any coupling is < 1034 0, otherwise return the new coupling orders with the vertex 1035 orders subtracted. If coupling_orders is not given, return 1036 None (which counts as success). 1037 WEIGHTED is a special order, which corresponds to the sum of 1038 order hierarchies for the couplings. 1039 We ignore negative constraints as these cannot be taken into 1040 account on the fly but only after generation.""" 1041 1042 if not coupling_orders: 1043 return None 1044 1045 present_couplings = copy.copy(coupling_orders) 1046 for id in vertex_id_list: 1047 # Don't check for identity vertex (id = 0) 1048 if not id: 1049 continue 1050 inter = model.get("interaction_dict")[id] 1051 for coupling in inter.get('orders').keys(): 1052 # Note that we don't consider a missing coupling as a 1053 # constraint 1054 if coupling in present_couplings and \ 1055 present_couplings[coupling]>=0: 1056 # Reduce the number of couplings that are left 1057 present_couplings[coupling] -= \ 1058 inter.get('orders')[coupling] 1059 if present_couplings[coupling] < 0: 1060 # We have too many couplings of this type 1061 return False 1062 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1063 if 'WEIGHTED' in present_couplings and \ 1064 present_couplings['WEIGHTED']>=0: 1065 weight = sum([model.get('order_hierarchy')[c]*n for \ 1066 (c,n) in inter.get('orders').items()]) 1067 present_couplings['WEIGHTED'] -= weight 1068 if present_couplings['WEIGHTED'] < 0: 1069 # Total coupling weight too large 1070 return False 1071 1072 return present_couplings
1073
1074 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1075 """Recursive function. Take a list of legs as an input, with 1076 the reference dictionary n-1->1, and output a list of list of 1077 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1078 1079 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1080 1081 2. For each combination, say [34]. Check if combination is valid. 1082 If so: 1083 1084 a. Append [12[34]56] to result array 1085 1086 b. Split [123456] at index(first element in combination+1), 1087 i.e. [12],[456] and subtract combination from second half, 1088 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1089 1090 3. Take result array from call to 1. (here, [[56]]) and append 1091 (first half in step b - combination) + combination + (result 1092 from 1.) = [12[34][56]] to result array 1093 1094 4. After appending results from all n-combinations, return 1095 resulting array. Example, if [13] and [45] are valid 1096 combinations: 1097 [[[13]2456],[[13]2[45]6],[123[45]6]] 1098 """ 1099 1100 res = [] 1101 1102 # loop over possible combination lengths (+1 is for range convention!) 1103 for comb_length in range(2, max_multi_to1 + 1): 1104 1105 # Check the considered length is not longer than the list length 1106 if comb_length > len(list_legs): 1107 return res 1108 1109 # itertools.combinations returns all possible combinations 1110 # of comb_length elements from list_legs 1111 for comb in itertools.combinations(list_legs, comb_length): 1112 1113 # Check if the combination is valid 1114 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1115 1116 # Identify the rest, create a list [comb,rest] and 1117 # add it to res 1118 res_list = copy.copy(list_legs) 1119 for leg in comb: 1120 res_list.remove(leg) 1121 res_list.insert(list_legs.index(comb[0]), comb) 1122 res.append(res_list) 1123 1124 # Now, deal with cases with more than 1 combination 1125 1126 # First, split the list into two, according to the 1127 # position of the first element in comb, and remove 1128 # all elements form comb 1129 res_list1 = list_legs[0:list_legs.index(comb[0])] 1130 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1131 for leg in comb[1:]: 1132 res_list2.remove(leg) 1133 1134 # Create a list of type [comb,rest1,rest2(combined)] 1135 res_list = res_list1 1136 res_list.append(comb) 1137 # This is where recursion actually happens, 1138 # on the second part 1139 for item in self.combine_legs(res_list2, 1140 ref_dict_to1, 1141 max_multi_to1): 1142 final_res_list = copy.copy(res_list) 1143 final_res_list.extend(item) 1144 res.append(final_res_list) 1145 1146 return res
1147 1148
1149 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1150 """Takes a list of allowed leg combinations as an input and returns 1151 a set of lists where combinations have been properly replaced 1152 (one list per element in the ref_dict, so that all possible intermediate 1153 particles are included). For each list, give the list of vertices 1154 corresponding to the executed merging, group the two as a tuple. 1155 """ 1156 1157 res = [] 1158 1159 for comb_list in comb_lists: 1160 1161 reduced_list = [] 1162 vertex_list = [] 1163 1164 for entry in comb_list: 1165 1166 # Act on all leg combinations 1167 if isinstance(entry, tuple): 1168 1169 # Build the leg object which will replace the combination: 1170 # 1) leg ids is as given in the ref_dict 1171 leg_vert_ids = copy.copy(ref_dict_to1[\ 1172 tuple(sorted([leg.get('id') for leg in entry]))]) 1173 # 2) number is the minimum of leg numbers involved in the 1174 # combination 1175 number = min([leg.get('number') for leg in entry]) 1176 # 3) state is final, unless there is exactly one initial 1177 # state particle involved in the combination -> t-channel 1178 if len(filter(lambda leg: leg.get('state') == False, 1179 entry)) == 1: 1180 state = False 1181 else: 1182 state = True 1183 # 4) from_group is True, by definition 1184 1185 # Create and add the object. This is done by a 1186 # separate routine, to allow overloading by 1187 # daughter classes 1188 new_leg_vert_ids = [] 1189 if leg_vert_ids: 1190 new_leg_vert_ids = self.get_combined_legs(entry, 1191 leg_vert_ids, 1192 number, 1193 state) 1194 1195 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1196 1197 1198 # Create and add the corresponding vertex 1199 # Extract vertex ids corresponding to the various legs 1200 # in mylegs 1201 vlist = base_objects.VertexList() 1202 for (myleg, vert_id) in new_leg_vert_ids: 1203 # Start with the considered combination... 1204 myleglist = base_objects.LegList(list(entry)) 1205 # ... and complete with legs after reducing 1206 myleglist.append(myleg) 1207 # ... and consider the correct vertex id 1208 vlist.append(base_objects.Vertex( 1209 {'legs':myleglist, 1210 'id':vert_id})) 1211 1212 vertex_list.append(vlist) 1213 1214 # If entry is not a combination, switch the from_group flag 1215 # and add it 1216 else: 1217 cp_entry = copy.copy(entry) 1218 # Need special case for from_group == None; this 1219 # is for initial state leg of decay chain process 1220 # (see Leg.can_combine_to_0) 1221 if cp_entry.get('from_group') != None: 1222 cp_entry.set('from_group', False) 1223 reduced_list.append(cp_entry) 1224 1225 # Flatten the obtained leg and vertex lists 1226 flat_red_lists = expand_list(reduced_list) 1227 flat_vx_lists = expand_list(vertex_list) 1228 1229 # Combine the two lists in a list of tuple 1230 for i in range(0, len(flat_vx_lists)): 1231 res.append((base_objects.LegList(flat_red_lists[i]), \ 1232 base_objects.VertexList(flat_vx_lists[i]))) 1233 1234 return res
1235
1236 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1237 """Create a set of new legs from the info given. This can be 1238 overloaded by daughter classes.""" 1239 1240 mylegs = [(base_objects.Leg({'id':leg_id, 1241 'number':number, 1242 'state':state, 1243 'from_group':True}), 1244 vert_id)\ 1245 for leg_id, vert_id in leg_vert_ids] 1246 1247 return mylegs
1248
1249 - def get_combined_vertices(self, legs, vert_ids):
1250 """Allow for selection of vertex ids. This can be 1251 overloaded by daughter classes.""" 1252 1253 return vert_ids
1254
1255 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1256 """Reduce the number of legs and vertices used in memory. 1257 When called by a diagram generation initiated by LoopAmplitude, 1258 this function should not trim the diagrams in the attribute 'diagrams' 1259 but rather a given list in the 'diaglist' argument.""" 1260 1261 legs = [] 1262 vertices = [] 1263 1264 if diaglist is None: 1265 diaglist=self.get('diagrams') 1266 1267 # Flag decaying legs in the core process by onshell = True 1268 process = self.get('process') 1269 for leg in process.get('legs'): 1270 if leg.get('state') and leg.get('id') in decay_ids: 1271 leg.set('onshell', True) 1272 1273 for diagram in diaglist: 1274 # Keep track of external legs (leg numbers already used) 1275 leg_external = set() 1276 for ivx, vertex in enumerate(diagram.get('vertices')): 1277 for ileg, leg in enumerate(vertex.get('legs')): 1278 # Ensure that only external legs get decay flag 1279 if leg.get('state') and leg.get('id') in decay_ids and \ 1280 leg.get('number') not in leg_external: 1281 # Use onshell to indicate decaying legs, 1282 # i.e. legs that have decay chains 1283 leg = copy.copy(leg) 1284 leg.set('onshell', True) 1285 try: 1286 index = legs.index(leg) 1287 except ValueError: 1288 vertex.get('legs')[ileg] = leg 1289 legs.append(leg) 1290 else: # Found a leg 1291 vertex.get('legs')[ileg] = legs[index] 1292 leg_external.add(leg.get('number')) 1293 try: 1294 index = vertices.index(vertex) 1295 diagram.get('vertices')[ivx] = vertices[index] 1296 except ValueError: 1297 vertices.append(vertex)
1298
1299 #=============================================================================== 1300 # AmplitudeList 1301 #=============================================================================== 1302 -class AmplitudeList(base_objects.PhysicsObjectList):
1303 """List of Amplitude objects 1304 """ 1305
1306 - def has_any_loop_process(self):
1307 """ Check the content of all processes of the amplitudes in this list to 1308 see if there is any which defines perturbation couplings. """ 1309 1310 for amp in self: 1311 if amp.has_loop_process(): 1312 return True
1313
1314 - def is_valid_element(self, obj):
1315 """Test if object obj is a valid Amplitude for the list.""" 1316 1317 return isinstance(obj, Amplitude)
1318
1319 #=============================================================================== 1320 # DecayChainAmplitude 1321 #=============================================================================== 1322 -class DecayChainAmplitude(Amplitude):
1323 """A list of amplitudes + a list of decay chain amplitude lists; 1324 corresponding to a ProcessDefinition with a list of decay chains 1325 """ 1326
1327 - def default_setup(self):
1328 """Default values for all properties""" 1329 1330 self['amplitudes'] = AmplitudeList() 1331 self['decay_chains'] = DecayChainAmplitudeList()
1332
1333 - def __init__(self, argument = None, collect_mirror_procs = False, 1334 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1335 """Allow initialization with Process and with ProcessDefinition""" 1336 1337 if isinstance(argument, base_objects.Process): 1338 super(DecayChainAmplitude, self).__init__() 1339 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1340 if argument['perturbation_couplings']: 1341 MultiProcessClass=LoopMultiProcess 1342 else: 1343 MultiProcessClass=MultiProcess 1344 if isinstance(argument, base_objects.ProcessDefinition): 1345 self['amplitudes'].extend(\ 1346 MultiProcessClass.generate_multi_amplitudes(argument, 1347 collect_mirror_procs, 1348 ignore_six_quark_processes, 1349 loop_filter=loop_filter, 1350 diagram_filter=diagram_filter)) 1351 else: 1352 self['amplitudes'].append(\ 1353 MultiProcessClass.get_amplitude_from_proc(argument, 1354 loop_filter=loop_filter, 1355 diagram_filter=diagram_filter)) 1356 # Clean decay chains from process, since we haven't 1357 # combined processes with decay chains yet 1358 process = copy.copy(self.get('amplitudes')[0].get('process')) 1359 process.set('decay_chains', base_objects.ProcessList()) 1360 self['amplitudes'][0].set('process', process) 1361 1362 for process in argument.get('decay_chains'): 1363 if process.get('perturbation_couplings'): 1364 raise MadGraph5Error,\ 1365 "Decay processes can not be perturbed" 1366 process.set('overall_orders', argument.get('overall_orders')) 1367 if not process.get('is_decay_chain'): 1368 process.set('is_decay_chain',True) 1369 if not process.get_ninitial() == 1: 1370 raise InvalidCmd,\ 1371 "Decay chain process must have exactly one" + \ 1372 " incoming particle" 1373 self['decay_chains'].append(\ 1374 DecayChainAmplitude(process, collect_mirror_procs, 1375 ignore_six_quark_processes, 1376 diagram_filter=diagram_filter)) 1377 1378 # Flag decaying legs in the core diagrams by onshell = True 1379 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1380 for a in dec.get('amplitudes')] for dec in \ 1381 self['decay_chains']], []) 1382 decay_ids = set(decay_ids) 1383 for amp in self['amplitudes']: 1384 amp.trim_diagrams(decay_ids) 1385 1386 # Check that all decay ids are present in at least some process 1387 for amp in self['amplitudes']: 1388 for l in amp.get('process').get('legs'): 1389 if l.get('id') in decay_ids: 1390 decay_ids.remove(l.get('id')) 1391 1392 if decay_ids: 1393 model = amp.get('process').get('model') 1394 names = [model.get_particle(id).get('name') for id in decay_ids] 1395 1396 logger.warning( 1397 "$RED Decay without corresponding particle in core process found.\n" + \ 1398 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1399 "Please check your process definition carefully. \n" + \ 1400 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1401 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1402 1403 # Remove unused decays from the process list 1404 for dc in reversed(self['decay_chains']): 1405 for a in reversed(dc.get('amplitudes')): 1406 # Remove the amplitudes from this decay chain 1407 if a.get('process').get('legs')[0].get('id') in decay_ids: 1408 dc.get('amplitudes').remove(a) 1409 if not dc.get('amplitudes'): 1410 # If no amplitudes left, remove the decay chain 1411 self['decay_chains'].remove(dc) 1412 1413 # Finally, write a fat warning if any decay process has 1414 # the decaying particle (or its antiparticle) in the final state 1415 bad_procs = [] 1416 for dc in self['decay_chains']: 1417 for amp in dc.get('amplitudes'): 1418 legs = amp.get('process').get('legs') 1419 fs_parts = [abs(l.get('id')) for l in legs if 1420 l.get('state')] 1421 is_part = [l.get('id') for l in legs if not 1422 l.get('state')][0] 1423 if abs(is_part) in fs_parts: 1424 bad_procs.append(amp.get('process')) 1425 1426 if bad_procs: 1427 logger.warning( 1428 "$RED Decay(s) with particle decaying to itself:\n" + \ 1429 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1430 "\nPlease check your process definition carefully. \n") 1431 1432 1433 elif argument != None: 1434 # call the mother routine 1435 super(DecayChainAmplitude, self).__init__(argument) 1436 else: 1437 # call the mother routine 1438 super(DecayChainAmplitude, self).__init__()
1439
1440 - def filter(self, name, value):
1441 """Filter for valid amplitude property values.""" 1442 1443 if name == 'amplitudes': 1444 if not isinstance(value, AmplitudeList): 1445 raise self.PhysicsObjectError, \ 1446 "%s is not a valid AmplitudeList" % str(value) 1447 if name == 'decay_chains': 1448 if not isinstance(value, DecayChainAmplitudeList): 1449 raise self.PhysicsObjectError, \ 1450 "%s is not a valid DecayChainAmplitudeList object" % \ 1451 str(value) 1452 return True
1453
1454 - def get_sorted_keys(self):
1455 """Return diagram property names as a nicely sorted list.""" 1456 1457 return ['amplitudes', 'decay_chains']
1458 1459 # Helper functions 1460
1461 - def get_number_of_diagrams(self):
1462 """Returns number of diagrams for this amplitude""" 1463 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1464 + sum(d.get_number_of_diagrams() for d in \ 1465 self.get('decay_chains'))
1466
1467 - def nice_string(self, indent = 0):
1468 """Returns a nicely formatted string of the amplitude content.""" 1469 mystr = "" 1470 for amplitude in self.get('amplitudes'): 1471 mystr = mystr + amplitude.nice_string(indent) + "\n" 1472 1473 if self.get('decay_chains'): 1474 mystr = mystr + " " * indent + "Decays:\n" 1475 for dec in self.get('decay_chains'): 1476 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1477 1478 return mystr[:-1]
1479
1480 - def nice_string_processes(self, indent = 0):
1481 """Returns a nicely formatted string of the amplitude processes.""" 1482 mystr = "" 1483 for amplitude in self.get('amplitudes'): 1484 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1485 1486 if self.get('decay_chains'): 1487 mystr = mystr + " " * indent + "Decays:\n" 1488 for dec in self.get('decay_chains'): 1489 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1490 1491 return mystr[:-1]
1492
1493 - def get_ninitial(self):
1494 """Returns the number of initial state particles in the process.""" 1495 return self.get('amplitudes')[0].get('process').get_ninitial()
1496
1497 - def get_decay_ids(self):
1498 """Returns a set of all particle ids for which a decay is defined""" 1499 1500 decay_ids = [] 1501 1502 # Get all amplitudes for the decay processes 1503 for amp in sum([dc.get('amplitudes') for dc \ 1504 in self['decay_chains']], []): 1505 # For each amplitude, find the initial state leg 1506 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1507 1508 # Return a list with unique ids 1509 return list(set(decay_ids))
1510
1511 - def has_loop_process(self):
1512 """ Returns wether this amplitude has a loop process.""" 1513 return self['amplitudes'].has_any_loop_process()
1514
1515 - def get_amplitudes(self):
1516 """Recursive function to extract all amplitudes for this process""" 1517 1518 amplitudes = AmplitudeList() 1519 1520 amplitudes.extend(self.get('amplitudes')) 1521 for decay in self.get('decay_chains'): 1522 amplitudes.extend(decay.get_amplitudes()) 1523 1524 return amplitudes
1525
1526 1527 #=============================================================================== 1528 # DecayChainAmplitudeList 1529 #=============================================================================== 1530 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1531 """List of DecayChainAmplitude objects 1532 """ 1533
1534 - def is_valid_element(self, obj):
1535 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1536 1537 return isinstance(obj, DecayChainAmplitude)
1538
1539 1540 #=============================================================================== 1541 # MultiProcess 1542 #=============================================================================== 1543 -class MultiProcess(base_objects.PhysicsObject):
1544 """MultiProcess: list of process definitions 1545 list of processes (after cleaning) 1546 list of amplitudes (after generation) 1547 """ 1548
1549 - def default_setup(self):
1550 """Default values for all properties""" 1551 1552 self['process_definitions'] = base_objects.ProcessDefinitionList() 1553 # self['amplitudes'] can be an AmplitudeList or a 1554 # DecayChainAmplitudeList, depending on whether there are 1555 # decay chains in the process definitions or not. 1556 self['amplitudes'] = AmplitudeList() 1557 # Flag for whether to combine IS mirror processes together 1558 self['collect_mirror_procs'] = False 1559 # List of quark flavors where we ignore processes with at 1560 # least 6 quarks (three quark lines) 1561 self['ignore_six_quark_processes'] = [] 1562 # Allow to use the model parameter numerical value for optimization. 1563 #This is currently use for 1->N generation(check mass). 1564 self['use_numerical'] = False
1565
1566 - def __init__(self, argument=None, collect_mirror_procs = False, 1567 ignore_six_quark_processes = [], optimize=False, 1568 loop_filter=None, diagram_filter=None):
1569 """Allow initialization with ProcessDefinition or 1570 ProcessDefinitionList 1571 optimize allows to use param_card information. (usefull for 1-.N)""" 1572 1573 if isinstance(argument, base_objects.ProcessDefinition): 1574 super(MultiProcess, self).__init__() 1575 self['process_definitions'].append(argument) 1576 elif isinstance(argument, base_objects.ProcessDefinitionList): 1577 super(MultiProcess, self).__init__() 1578 self['process_definitions'] = argument 1579 elif argument != None: 1580 # call the mother routine 1581 super(MultiProcess, self).__init__(argument) 1582 else: 1583 # call the mother routine 1584 super(MultiProcess, self).__init__() 1585 1586 self['collect_mirror_procs'] = collect_mirror_procs 1587 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1588 self['use_numerical'] = optimize 1589 self['loop_filter'] = loop_filter 1590 self['diagram_filter'] = diagram_filter # only True/False so far 1591 1592 if isinstance(argument, base_objects.ProcessDefinition) or \ 1593 isinstance(argument, base_objects.ProcessDefinitionList): 1594 # Generate the diagrams 1595 self.get('amplitudes')
1596 1597
1598 - def filter(self, name, value):
1599 """Filter for valid process property values.""" 1600 1601 if name == 'process_definitions': 1602 if not isinstance(value, base_objects.ProcessDefinitionList): 1603 raise self.PhysicsObjectError, \ 1604 "%s is not a valid ProcessDefinitionList object" % str(value) 1605 1606 if name == 'amplitudes': 1607 if not isinstance(value, AmplitudeList): 1608 raise self.PhysicsObjectError, \ 1609 "%s is not a valid AmplitudeList object" % str(value) 1610 1611 if name in ['collect_mirror_procs']: 1612 if not isinstance(value, bool): 1613 raise self.PhysicsObjectError, \ 1614 "%s is not a valid boolean" % str(value) 1615 1616 if name == 'ignore_six_quark_processes': 1617 if not isinstance(value, list): 1618 raise self.PhysicsObjectError, \ 1619 "%s is not a valid list" % str(value) 1620 1621 return True
1622
1623 - def get(self, name):
1624 """Get the value of the property name.""" 1625 1626 if (name == 'amplitudes') and not self[name]: 1627 for process_def in self.get('process_definitions'): 1628 if process_def.get('decay_chains'): 1629 # This is a decay chain process 1630 # Store amplitude(s) as DecayChainAmplitude 1631 self['amplitudes'].append(\ 1632 DecayChainAmplitude(process_def, 1633 self.get('collect_mirror_procs'), 1634 self.get('ignore_six_quark_processes'), 1635 diagram_filter=self['diagram_filter'])) 1636 else: 1637 self['amplitudes'].extend(\ 1638 self.generate_multi_amplitudes(process_def, 1639 self.get('collect_mirror_procs'), 1640 self.get('ignore_six_quark_processes'), 1641 self['use_numerical'], 1642 loop_filter=self['loop_filter'], 1643 diagram_filter=self['diagram_filter'])) 1644 1645 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1646
1647 - def get_sorted_keys(self):
1648 """Return process property names as a nicely sorted list.""" 1649 1650 return ['process_definitions', 'amplitudes']
1651 1652 @classmethod
1653 - def generate_multi_amplitudes(cls,process_definition, 1654 collect_mirror_procs = False, 1655 ignore_six_quark_processes = [], 1656 use_numerical=False, 1657 loop_filter=None, 1658 diagram_filter=False):
1659 """Generate amplitudes in a semi-efficient way. 1660 Make use of crossing symmetry for processes that fail diagram 1661 generation, but not for processes that succeed diagram 1662 generation. Doing so will risk making it impossible to 1663 identify processes with identical amplitudes. 1664 """ 1665 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1666 "%s not valid ProcessDefinition object" % \ 1667 repr(process_definition) 1668 1669 # Set automatic coupling orders 1670 process_definition.set('orders', MultiProcess.\ 1671 find_optimal_process_orders(process_definition, 1672 diagram_filter)) 1673 # Check for maximum orders from the model 1674 process_definition.check_expansion_orders() 1675 1676 processes = base_objects.ProcessList() 1677 amplitudes = AmplitudeList() 1678 1679 # failed_procs and success_procs are sorted processes that have 1680 # already failed/succeeded based on crossing symmetry 1681 failed_procs = [] 1682 success_procs = [] 1683 # Complete processes, for identification of mirror processes 1684 non_permuted_procs = [] 1685 # permutations keeps the permutations of the crossed processes 1686 permutations = [] 1687 1688 # Store the diagram tags for processes, to allow for 1689 # identifying identical matrix elements already at this stage. 1690 model = process_definition['model'] 1691 1692 islegs = [leg for leg in process_definition['legs'] \ 1693 if leg['state'] == False] 1694 fslegs = [leg for leg in process_definition['legs'] \ 1695 if leg['state'] == True] 1696 1697 isids = [leg['ids'] for leg in process_definition['legs'] \ 1698 if leg['state'] == False] 1699 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1700 if leg['state'] == True] 1701 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \ 1702 if leg['state'] == True] 1703 # Generate all combinations for the initial state 1704 for prod in itertools.product(*isids): 1705 islegs = [\ 1706 base_objects.Leg({'id':id, 'state': False, 1707 'polarization': islegs[i]['polarization']}) 1708 for i,id in enumerate(prod)] 1709 1710 # Generate all combinations for the final state, and make 1711 # sure to remove double counting 1712 1713 red_fsidlist = set() 1714 1715 for prod in itertools.product(*fsids): 1716 tag = zip(prod, polids) 1717 tag = sorted(tag) 1718 # Remove double counting between final states 1719 if tuple(tag) in red_fsidlist: 1720 continue 1721 1722 red_fsidlist.add(tuple(tag)) 1723 # Generate leg list for process 1724 leg_list = [copy.copy(leg) for leg in islegs] 1725 leg_list.extend([\ 1726 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \ 1727 for i,id in enumerate(prod)]) 1728 1729 legs = base_objects.LegList(leg_list) 1730 1731 # Check for crossed processes 1732 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1733 enumerate(legs.get_outgoing_id_list(model))]) 1734 permutation = [l[1] for l in sorted_legs] 1735 1736 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1737 1738 # Check for six-quark processes 1739 if ignore_six_quark_processes and \ 1740 len([i for i in sorted_legs if abs(i) in \ 1741 ignore_six_quark_processes]) >= 6: 1742 continue 1743 1744 # Check if crossed process has already failed, 1745 # in that case don't check process 1746 if sorted_legs in failed_procs: 1747 continue 1748 1749 # If allowed check mass validity [assume 1->N] 1750 if use_numerical: 1751 # check that final state has lower mass than initial state 1752 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1753 if initial_mass == 0: 1754 continue 1755 for leg in legs[1:]: 1756 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1757 initial_mass -= abs(m) 1758 if initial_mass.real <= 0: 1759 continue 1760 1761 # Setup process 1762 process = process_definition.get_process_with_legs(legs) 1763 1764 fast_proc = \ 1765 array.array('i',[leg.get('id') for leg in legs]) 1766 if collect_mirror_procs and \ 1767 process_definition.get_ninitial() == 2: 1768 # Check if mirrored process is already generated 1769 mirror_proc = \ 1770 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1771 list(fast_proc[2:])) 1772 try: 1773 mirror_amp = \ 1774 amplitudes[non_permuted_procs.index(mirror_proc)] 1775 except Exception: 1776 # Didn't find any mirror process 1777 pass 1778 else: 1779 # Mirror process found 1780 mirror_amp.set('has_mirror_process', True) 1781 logger.info("Process %s added to mirror process %s" % \ 1782 (process.base_string(), 1783 mirror_amp.get('process').base_string())) 1784 continue 1785 1786 # Check for successful crossings, unless we have specified 1787 # properties that break crossing symmetry 1788 if not process.get('required_s_channels') and \ 1789 not process.get('forbidden_onsh_s_channels') and \ 1790 not process.get('forbidden_s_channels') and \ 1791 not process.get('is_decay_chain') and not diagram_filter: 1792 try: 1793 crossed_index = success_procs.index(sorted_legs) 1794 # The relabeling of legs for loop amplitudes is cumbersome 1795 # and does not save so much time. It is disable here and 1796 # we use the key 'loop_diagrams' to decide whether 1797 # it is an instance of LoopAmplitude. 1798 if 'loop_diagrams' in amplitudes[crossed_index]: 1799 raise ValueError 1800 except ValueError: 1801 # No crossing found, just continue 1802 pass 1803 else: 1804 # Found crossing - reuse amplitude 1805 amplitude = MultiProcess.cross_amplitude(\ 1806 amplitudes[crossed_index], 1807 process, 1808 permutations[crossed_index], 1809 permutation) 1810 amplitudes.append(amplitude) 1811 success_procs.append(sorted_legs) 1812 permutations.append(permutation) 1813 non_permuted_procs.append(fast_proc) 1814 logger.info("Crossed process found for %s, reuse diagrams." % \ 1815 process.base_string()) 1816 continue 1817 1818 # Create new amplitude 1819 amplitude = cls.get_amplitude_from_proc(process, 1820 loop_filter=loop_filter) 1821 1822 try: 1823 result = amplitude.generate_diagrams(diagram_filter=diagram_filter) 1824 except InvalidCmd as error: 1825 failed_procs.append(sorted_legs) 1826 else: 1827 # Succeeded in generating diagrams 1828 if amplitude.get('diagrams'): 1829 amplitudes.append(amplitude) 1830 success_procs.append(sorted_legs) 1831 permutations.append(permutation) 1832 non_permuted_procs.append(fast_proc) 1833 elif not result: 1834 # Diagram generation failed for all crossings 1835 failed_procs.append(sorted_legs) 1836 1837 # Raise exception if there are no amplitudes for this process 1838 if not amplitudes: 1839 if len(failed_procs) == 1 and 'error' in locals(): 1840 raise error 1841 else: 1842 raise NoDiagramException, \ 1843 "No amplitudes generated from process %s. Please enter a valid process" % \ 1844 process_definition.nice_string() 1845 1846 1847 # Return the produced amplitudes 1848 return amplitudes
1849 1850 @classmethod
1851 - def get_amplitude_from_proc(cls,proc,**opts):
1852 """ Return the correct amplitude type according to the characteristics of 1853 the process proc. The only option that could be specified here is 1854 loop_filter and it is of course not relevant for a tree amplitude.""" 1855 1856 return Amplitude({"process": proc})
1857 1858 1859 @staticmethod
1860 - def find_optimal_process_orders(process_definition, diagram_filter=False):
1861 """Find the minimal WEIGHTED order for this set of processes. 1862 1863 The algorithm: 1864 1865 1) Check the coupling hierarchy of the model. Assign all 1866 particles to the different coupling hierarchies so that a 1867 particle is considered to be in the highest hierarchy (i.e., 1868 with lowest value) where it has an interaction. 1869 1870 2) Pick out the legs in the multiprocess according to the 1871 highest hierarchy represented (so don't mix particles from 1872 different hierarchy classes in the same multiparticles!) 1873 1874 3) Find the starting maximum WEIGHTED order as the sum of the 1875 highest n-2 weighted orders 1876 1877 4) Pick out required s-channel particle hierarchies, and use 1878 the highest of the maximum WEIGHTED order from the legs and 1879 the minimum WEIGHTED order extracted from 2*s-channel 1880 hierarchys plus the n-2-2*(number of s-channels) lowest 1881 leg weighted orders. 1882 1883 5) Run process generation with the WEIGHTED order determined 1884 in 3)-4) - # final state gluons, with all gluons removed from 1885 the final state 1886 1887 6) If no process is found, increase WEIGHTED order by 1 and go 1888 back to 5), until we find a process which passes. Return that 1889 order. 1890 1891 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1892 If still no process has passed, return 1893 WEIGHTED = (n-2)*(highest hierarchy) 1894 """ 1895 1896 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1897 "%s not valid ProcessDefinition object" % \ 1898 repr(process_definition) 1899 1900 processes = base_objects.ProcessList() 1901 amplitudes = AmplitudeList() 1902 1903 # If there are already couplings defined, return 1904 if process_definition.get('orders') or \ 1905 process_definition.get('overall_orders') or \ 1906 process_definition.get('NLO_mode')=='virt': 1907 return process_definition.get('orders') 1908 1909 # If this is a decay process (and not a decay chain), return 1910 if process_definition.get_ninitial() == 1 and not \ 1911 process_definition.get('is_decay_chain'): 1912 return process_definition.get('orders') 1913 1914 logger.info("Checking for minimal orders which gives processes.") 1915 logger.info("Please specify coupling orders to bypass this step.") 1916 1917 # Calculate minimum starting guess for WEIGHTED order 1918 max_order_now, particles, hierarchy = \ 1919 process_definition.get_minimum_WEIGHTED() 1920 coupling = 'WEIGHTED' 1921 1922 model = process_definition.get('model') 1923 1924 # Extract the initial and final leg ids 1925 isids = [leg['ids'] for leg in \ 1926 filter(lambda leg: leg['state'] == False, process_definition['legs'])] 1927 fsids = [leg['ids'] for leg in \ 1928 filter(lambda leg: leg['state'] == True, process_definition['legs'])] 1929 1930 max_WEIGHTED_order = \ 1931 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1932 # get the definition of the WEIGHTED 1933 hierarchydef = process_definition['model'].get('order_hierarchy') 1934 tmp = [] 1935 hierarchy = hierarchydef.items() 1936 hierarchy.sort() 1937 for key, value in hierarchydef.items(): 1938 if value>1: 1939 tmp.append('%s*%s' % (value,key)) 1940 else: 1941 tmp.append('%s' % key) 1942 wgtdef = '+'.join(tmp) 1943 # Run diagram generation with increasing max_order_now until 1944 # we manage to get diagrams 1945 while max_order_now < max_WEIGHTED_order: 1946 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1947 1948 oldloglevel = logger.level 1949 logger.setLevel(logging.WARNING) 1950 1951 # failed_procs are processes that have already failed 1952 # based on crossing symmetry 1953 failed_procs = [] 1954 # Generate all combinations for the initial state 1955 for prod in apply(itertools.product, isids): 1956 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1957 for id in prod] 1958 1959 # Generate all combinations for the final state, and make 1960 # sure to remove double counting 1961 1962 red_fsidlist = [] 1963 1964 for prod in apply(itertools.product, fsids): 1965 1966 # Remove double counting between final states 1967 if tuple(sorted(prod)) in red_fsidlist: 1968 continue 1969 1970 red_fsidlist.append(tuple(sorted(prod))); 1971 1972 # Remove gluons from final state if QCD is among 1973 # the highest coupling hierarchy 1974 nglue = 0 1975 if 21 in particles[0]: 1976 nglue = len([id for id in prod if id == 21]) 1977 prod = [id for id in prod if id != 21] 1978 1979 # Generate leg list for process 1980 leg_list = [copy.copy(leg) for leg in islegs] 1981 1982 leg_list.extend([\ 1983 base_objects.Leg({'id':id, 'state': True}) \ 1984 for id in prod]) 1985 1986 legs = base_objects.LegList(leg_list) 1987 1988 # Set summed coupling order according to max_order_now 1989 # subtracting the removed gluons 1990 coupling_orders_now = {coupling: max_order_now - \ 1991 nglue * model['order_hierarchy']['QCD']} 1992 1993 # Setup process 1994 process = base_objects.Process({\ 1995 'legs':legs, 1996 'model':model, 1997 'id': process_definition.get('id'), 1998 'orders': coupling_orders_now, 1999 'required_s_channels': \ 2000 process_definition.get('required_s_channels'), 2001 'forbidden_onsh_s_channels': \ 2002 process_definition.get('forbidden_onsh_s_channels'), 2003 'sqorders_types': \ 2004 process_definition.get('sqorders_types'), 2005 'squared_orders': \ 2006 process_definition.get('squared_orders'), 2007 'split_orders': \ 2008 process_definition.get('split_orders'), 2009 'forbidden_s_channels': \ 2010 process_definition.get('forbidden_s_channels'), 2011 'forbidden_particles': \ 2012 process_definition.get('forbidden_particles'), 2013 'is_decay_chain': \ 2014 process_definition.get('is_decay_chain'), 2015 'overall_orders': \ 2016 process_definition.get('overall_orders'), 2017 'split_orders': \ 2018 process_definition.get('split_orders')}) 2019 2020 # Check for couplings with given expansion orders 2021 process.check_expansion_orders() 2022 2023 # Check for crossed processes 2024 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 2025 # Check if crossed process has already failed 2026 # In that case don't check process 2027 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'): 2028 continue 2029 2030 amplitude = Amplitude({'process': process}) 2031 try: 2032 amplitude.generate_diagrams(diagram_filter=diagram_filter) 2033 except InvalidCmd, error: 2034 failed_procs.append(tuple(sorted_legs)) 2035 else: 2036 if amplitude.get('diagrams'): 2037 # We found a valid amplitude. Return this order number 2038 logger.setLevel(oldloglevel) 2039 return {coupling: max_order_now} 2040 else: 2041 failed_procs.append(tuple(sorted_legs)) 2042 # No processes found, increase max_order_now 2043 max_order_now += 1 2044 logger.setLevel(oldloglevel) 2045 2046 # If no valid processes found with nfinal-1 couplings, return maximal 2047 return {coupling: max_order_now}
2048 2049 @staticmethod
2050 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2051 """Return the amplitude crossed with the permutation new_perm""" 2052 # Create dict from original leg numbers to new leg numbers 2053 perm_map = dict(zip(org_perm, new_perm)) 2054 # Initiate new amplitude 2055 new_amp = copy.copy(amplitude) 2056 # Number legs 2057 for i, leg in enumerate(process.get('legs')): 2058 leg.set('number', i+1) 2059 # Set process 2060 new_amp.set('process', process) 2061 # Now replace the leg numbers in the diagrams 2062 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2063 process.get('legs'),) for \ 2064 d in new_amp.get('diagrams')]) 2065 new_amp.set('diagrams', diagrams) 2066 new_amp.trim_diagrams() 2067 2068 # Make sure to reset mirror process 2069 new_amp.set('has_mirror_process', False) 2070 2071 return new_amp
2072
2073 #=============================================================================== 2074 # Global helper methods 2075 #=============================================================================== 2076 2077 -def expand_list(mylist):
2078 """Takes a list of lists and elements and returns a list of flat lists. 2079 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2080 """ 2081 2082 # Check that argument is a list 2083 assert isinstance(mylist, list), "Expand_list argument must be a list" 2084 2085 res = [] 2086 2087 tmplist = [] 2088 for item in mylist: 2089 if isinstance(item, list): 2090 tmplist.append(item) 2091 else: 2092 tmplist.append([item]) 2093 2094 for item in apply(itertools.product, tmplist): 2095 res.append(list(item)) 2096 2097 return res
2098
2099 -def expand_list_list(mylist):
2100 """Recursive function. Takes a list of lists and lists of lists 2101 and returns a list of flat lists. 2102 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2103 """ 2104 2105 res = [] 2106 2107 if not mylist or len(mylist) == 1 and not mylist[0]: 2108 return [[]] 2109 2110 # Check the first element is at least a list 2111 assert isinstance(mylist[0], list), \ 2112 "Expand_list_list needs a list of lists and lists of lists" 2113 2114 # Recursion stop condition, one single element 2115 if len(mylist) == 1: 2116 if isinstance(mylist[0][0], list): 2117 return mylist[0] 2118 else: 2119 return mylist 2120 2121 if isinstance(mylist[0][0], list): 2122 for item in mylist[0]: 2123 # Here the recursion happens, create lists starting with 2124 # each element of the first item and completed with 2125 # the rest expanded 2126 for rest in expand_list_list(mylist[1:]): 2127 reslist = copy.copy(item) 2128 reslist.extend(rest) 2129 res.append(reslist) 2130 else: 2131 for rest in expand_list_list(mylist[1:]): 2132 reslist = copy.copy(mylist[0]) 2133 reslist.extend(rest) 2134 res.append(reslist) 2135 2136 2137 return res
2138