Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  import array 
  23  import copy 
  24  import itertools 
  25  import logging 
  26   
  27  import madgraph.core.base_objects as base_objects 
  28  import madgraph.various.misc as misc 
  29  from madgraph import InvalidCmd, MadGraph5Error 
  30   
  31  logger = logging.getLogger('madgraph.diagram_generation') 
32 33 34 -class NoDiagramException(InvalidCmd): pass
35
36 #=============================================================================== 37 # DiagramTag mother class 38 #=============================================================================== 39 40 -class DiagramTag(object):
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 42 PDG code/interaction id (for comparing diagrams from the same amplitude), 43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 44 Algorithm: Create chains starting from external particles: 45 1 \ / 6 46 2 /\______/\ 7 47 3_ / | \_ 8 48 4 / 5 \_ 9 49 \ 10 50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 51 (((1,2,id12),(3,4,id34)),id1234), 52 5,id91086712345) 53 where idN is the id of the corresponding interaction. The ordering within 54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 56 The determination of central vertex is based on minimizing the chain length 57 for the longest subchain. 58 This gives a unique tag which can be used to identify diagrams 59 (instead of symmetry), as well as identify identical matrix elements from 60 different processes.""" 61
62 - class DiagramTagError(Exception):
63 """Exception for any problems in DiagramTags""" 64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to 68 the diagram, and figure out if we need to shift the central vertex.""" 69 70 # wf_dict keeps track of the intermediate particles 71 leg_dict = {} 72 # Create the chain which will be the diagram tag 73 for vertex in diagram.get('vertices'): 74 # Only add incoming legs 75 legs = vertex.get('legs')[:-1] 76 lastvx = vertex == diagram.get('vertices')[-1] 77 if lastvx: 78 # If last vertex, all legs are incoming 79 legs = vertex.get('legs') 80 # Add links corresponding to the relevant legs 81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 82 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 83 for leg in legs], 84 self.vertex_id_from_vertex(vertex, 85 lastvx, 86 model, 87 ninitial)) 88 # Add vertex to leg_dict if not last one 89 if not lastvx: 90 leg_dict[vertex.get('legs')[-1].get('number')] = link 91 92 # The resulting link is the hypothetical result 93 self.tag = link 94 95 # Now make sure to find the central vertex in the diagram, 96 # defined by the longest leg being as short as possible 97 done = max([l.depth for l in self.tag.links]) == 0 98 while not done: 99 # Identify the longest chain in the tag 100 longest_chain = self.tag.links[0] 101 # Create a new link corresponding to moving one step 102 new_link = DiagramTagChainLink(self.tag.links[1:], 103 self.flip_vertex(\ 104 self.tag.vertex_id, 105 longest_chain.vertex_id, 106 self.tag.links[1:])) 107 # Create a new final vertex in the direction of the longest link 108 other_links = list(longest_chain.links) + [new_link] 109 other_link = DiagramTagChainLink(other_links, 110 self.flip_vertex(\ 111 longest_chain.vertex_id, 112 self.tag.vertex_id, 113 other_links)) 114 115 if other_link.links[0] < self.tag.links[0]: 116 # Switch to new tag, continue search 117 self.tag = other_link 118 else: 119 # We have found the central vertex 120 done = True
121
122 - def get_external_numbers(self):
123 """Get the order of external particles in this tag""" 124 125 return self.tag.get_external_numbers()
126
127 - def diagram_from_tag(self, model):
128 """Output a diagram from a DiagramTag. Note that each daughter 129 class must implement the static functions id_from_vertex_id 130 (if the vertex id is something else than an integer) and 131 leg_from_link (to pass the correct info from an end link to a 132 leg).""" 133 134 # Create the vertices, starting from the final vertex 135 diagram = base_objects.Diagram({'vertices': \ 136 self.vertices_from_link(self.tag, 137 model, 138 True)}) 139 diagram.calculate_orders(model) 140 return diagram
141 142 @classmethod 177 178 @classmethod
179 - def legPDGs_from_vertex_id(cls, vertex_id,model):
180 """Returns the list of external PDGs of the interaction corresponding 181 to this vertex_id.""" 182 183 # In case we have to deal with a regular vertex, we return the list 184 # external PDGs as given by the model information on that integer 185 # vertex id. 186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 187 return vertex_id[2]['PDGs'] 188 else: 189 return [part.get_pdg_code() for part in model.get_interaction( 190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191 192 @classmethod
193 - def leg_from_legs(cls,legs, vertex_id, model):
194 """Return a leg from a leg list and the model info""" 195 196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 197 198 # Extract the resulting pdg code from the interaction pdgs 199 for pdg in [leg.get('id') for leg in legs]: 200 pdgs.remove(pdg) 201 202 assert len(pdgs) == 1 203 # Prepare the new leg properties 204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 205 number = min([l.get('number') for l in legs]) 206 # State is False for t-channel, True for s-channel 207 state = (len([l for l in legs if l.get('state') == False]) != 1) 208 # Note that this needs to be done before combining decay chains 209 onshell= False 210 211 return base_objects.Leg({'id': pdg, 212 'number': number, 213 'state': state, 214 'onshell': onshell})
215 216 @classmethod 229 230 @staticmethod 243 244 @staticmethod
245 - def id_from_vertex_id(vertex_id):
246 """Return the numerical vertex id from a link.vertex_id""" 247 248 return vertex_id[0][0]
249 250 @staticmethod
251 - def loop_info_from_vertex_id(vertex_id):
252 """Return the loop_info stored in this vertex id. Notice that the 253 IdentifyME tag does not store the loop_info, but should normally never 254 need access to it.""" 255 256 return vertex_id[2]
257 258 @staticmethod
259 - def reorder_permutation(perm, start_perm):
260 """Reorder a permutation with respect to start_perm. Note that 261 both need to start from 1.""" 262 if perm == start_perm: 263 return range(len(perm)) 264 order = [i for (p,i) in \ 265 sorted([(p,i) for (i,p) in enumerate(perm)])] 266 return [start_perm[i]-1 for i in order]
267 268 @staticmethod 279 280 @staticmethod
281 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
282 """Returns the default vertex id: just the interaction id 283 Note that in the vertex id, like the leg, only the first entry is 284 taken into account in the tag comparison, while the second is for 285 storing information that is not to be used in comparisons and the 286 third for additional info regarding the shrunk loop vertex.""" 287 288 if isinstance(vertex,base_objects.ContractedVertex): 289 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 290 return ((vertex.get('id'),vertex.get('loop_tag')),(), 291 {'PDGs':vertex.get('PDGs')}) 292 else: 293 return ((vertex.get('id'),()),(),{})
294 295 @staticmethod
296 - def flip_vertex(new_vertex, old_vertex, links):
297 """Returns the default vertex flip: just the new_vertex""" 298 return new_vertex
299
300 - def __eq__(self, other):
301 """Equal if same tag""" 302 if type(self) != type(other): 303 return False 304 return self.tag == other.tag
305
306 - def __ne__(self, other):
307 return not self.__eq__(other)
308
309 - def __str__(self):
310 return str(self.tag)
311
312 - def __lt__(self, other):
313 return self.tag < other.tag
314
315 - def __gt__(self, other):
316 return self.tag > other.tag
317 318 __repr__ = __str__
319 405
406 #=============================================================================== 407 # Amplitude 408 #=============================================================================== 409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered) 411 Initialize with a process, then call generate_diagrams() to 412 generate the diagrams for the amplitude 413 """ 414
415 - def default_setup(self):
416 """Default values for all properties""" 417 418 self['process'] = base_objects.Process() 419 self['diagrams'] = None 420 # has_mirror_process is True if the same process but with the 421 # two incoming particles interchanged has been generated 422 self['has_mirror_process'] = False
423
424 - def __init__(self, argument=None):
425 """Allow initialization with Process""" 426 if isinstance(argument, base_objects.Process): 427 super(Amplitude, self).__init__() 428 self.set('process', argument) 429 self.generate_diagrams() 430 elif argument != None: 431 # call the mother routine 432 super(Amplitude, self).__init__(argument) 433 else: 434 # call the mother routine 435 super(Amplitude, self).__init__()
436
437 - def filter(self, name, value):
438 """Filter for valid amplitude property values.""" 439 440 if name == 'process': 441 if not isinstance(value, base_objects.Process): 442 raise self.PhysicsObjectError, \ 443 "%s is not a valid Process object" % str(value) 444 if name == 'diagrams': 445 if not isinstance(value, base_objects.DiagramList): 446 raise self.PhysicsObjectError, \ 447 "%s is not a valid DiagramList object" % str(value) 448 if name == 'has_mirror_process': 449 if not isinstance(value, bool): 450 raise self.PhysicsObjectError, \ 451 "%s is not a valid boolean" % str(value) 452 return True
453
454 - def get(self, name):
455 """Get the value of the property name.""" 456 457 if name == 'diagrams' and self[name] == None: 458 # Have not yet generated diagrams for this process 459 if self['process']: 460 self.generate_diagrams() 461 462 return super(Amplitude, self).get(name)
463 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 464 465
466 - def get_sorted_keys(self):
467 """Return diagram property names as a nicely sorted list.""" 468 469 return ['process', 'diagrams', 'has_mirror_process']
470
471 - def get_number_of_diagrams(self):
472 """Returns number of diagrams for this amplitude""" 473 return len(self.get('diagrams'))
474
475 - def get_amplitudes(self):
476 """Return an AmplitudeList with just this amplitude. 477 Needed for DecayChainAmplitude.""" 478 479 return AmplitudeList([self])
480
481 - def nice_string(self, indent=0):
482 """Returns a nicely formatted string of the amplitude content.""" 483 return self.get('process').nice_string(indent) + "\n" + \ 484 self.get('diagrams').nice_string(indent)
485
486 - def nice_string_processes(self, indent=0):
487 """Returns a nicely formatted string of the amplitude process.""" 488 return self.get('process').nice_string(indent)
489
490 - def get_ninitial(self):
491 """Returns the number of initial state particles in the process.""" 492 return self.get('process').get_ninitial()
493
494 - def has_loop_process(self):
495 """ Returns wether this amplitude has a loop process.""" 496 497 return self.get('process').get('perturbation_couplings')
498
499 - def generate_diagrams(self, returndiag=False, diagram_filter=False):
500 """Generate diagrams. Algorithm: 501 502 1. Define interaction dictionaries: 503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 505 506 2. Set flag from_group=true for all external particles. 507 Flip particle/anti particle for incoming particles. 508 509 3. If there is a dictionary n->0 with n=number of external 510 particles, create if possible the combination [(1,2,3,4,...)] 511 with *at least two* from_group==true. This will give a 512 finished (set of) diagram(s) (done by reduce_leglist) 513 514 4. Create all allowed groupings of particles with at least one 515 from_group==true (according to dictionaries n->1): 516 [(1,2),3,4...],[1,(2,3),4,...],..., 517 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 518 (done by combine_legs) 519 520 5. Replace each group with a (list of) new particle(s) with number 521 n = min(group numbers). Set from_group true for these 522 particles and false for all other particles. Store vertex info. 523 (done by merge_comb_legs) 524 525 6. Stop algorithm when at most 2 particles remain. 526 Return all diagrams (lists of vertices). 527 528 7. Repeat from 3 (recursion done by reduce_leglist) 529 530 8. Replace final p=p vertex 531 532 Be aware that the resulting vertices have all particles outgoing, 533 so need to flip for incoming particles when used. 534 535 SPECIAL CASE: For A>BC... processes which are legs in decay 536 chains, we need to ensure that BC... combine first, giving A=A 537 as a final vertex. This case is defined by the Process 538 property is_decay_chain = True. 539 This function can also be called by the generate_diagram function 540 of LoopAmplitudes, in which case the generated diagrams here must not 541 be directly assigned to the 'diagrams' attributed but returned as a 542 DiagramList by the function. This is controlled by the argument 543 returndiag. 544 """ 545 546 process = self.get('process') 547 model = process.get('model') 548 legs = process.get('legs') 549 # Make sure orders is the minimum of orders and overall_orders 550 for key in process.get('overall_orders').keys(): 551 try: 552 process.get('orders')[key] = \ 553 min(process.get('orders')[key], 554 process.get('overall_orders')[key]) 555 except KeyError: 556 process.get('orders')[key] = process.get('overall_orders')[key] 557 558 assert model.get('particles'), \ 559 "particles are missing in model: %s" % model.get('particles') 560 561 assert model.get('interactions'), \ 562 "interactions are missing in model" 563 564 565 res = base_objects.DiagramList() 566 # First check that the number of fermions is even 567 if len(filter(lambda leg: model.get('particle_dict')[\ 568 leg.get('id')].is_fermion(), legs)) % 2 == 1: 569 if not returndiag: 570 self['diagrams'] = res 571 raise InvalidCmd, 'The number of fermion is odd' 572 else: 573 return False, res 574 575 # Then check same number of incoming and outgoing fermions (if 576 # no Majorana particles in model) 577 if not model.get('got_majoranas') and \ 578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \ 579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)): 580 if not returndiag: 581 self['diagrams'] = res 582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different' 583 else: 584 return False, res 585 586 # Finally check that charge (conserve by all interactions) of the process 587 #is globally conserve for this process. 588 for charge in model.get('conserved_charge'): 589 total = 0 590 for leg in legs: 591 part = model.get('particle_dict')[leg.get('id')] 592 try: 593 value = part.get(charge) 594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 595 try: 596 value = getattr(part, charge) 597 except AttributeError: 598 value = 0 599 600 if (leg.get('id') != part['pdg_code']) != leg['state']: 601 total -= value 602 else: 603 total += value 604 605 if abs(total) > 1e-10: 606 if not returndiag: 607 self['diagrams'] = res 608 raise InvalidCmd, 'No %s conservation for this process ' % charge 609 return res 610 else: 611 raise InvalidCmd, 'No %s conservation for this process ' % charge 612 return res, res 613 614 if not returndiag: 615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 616 617 # Give numbers to legs in process 618 for i in range(0, len(process.get('legs'))): 619 # Make sure legs are unique 620 leg = copy.copy(process.get('legs')[i]) 621 process.get('legs')[i] = leg 622 if leg.get('number') == 0: 623 leg.set('number', i + 1) 624 625 # Copy leglist from process, so we can flip leg identities 626 # without affecting the original process 627 leglist = self.copy_leglist(process.get('legs')) 628 629 for leg in leglist: 630 631 # For the first step, ensure the tag from_group 632 # is true for all legs 633 leg.set('from_group', True) 634 635 # Need to flip part-antipart for incoming particles, 636 # so they are all outgoing 637 if leg.get('state') == False: 638 part = model.get('particle_dict')[leg.get('id')] 639 leg.set('id', part.get_anti_pdg_code()) 640 641 # Calculate the maximal multiplicity of n-1>1 configurations 642 # to restrict possible leg combinations 643 max_multi_to1 = max([len(key) for key in \ 644 model.get('ref_dict_to1').keys()]) 645 646 647 # Reduce the leg list and return the corresponding 648 # list of vertices 649 650 # For decay processes, generate starting from final-state 651 # combined only as the last particle. This allows to use these 652 # in decay chains later on. 653 is_decay_proc = process.get_ninitial() == 1 654 if is_decay_proc: 655 part = model.get('particle_dict')[leglist[0].get('id')] 656 # For decay chain legs, we want everything to combine to 657 # the initial leg. This is done by only allowing the 658 # initial leg to combine as a final identity. 659 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 660 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 661 # Need to set initial leg from_group to None, to make sure 662 # it can only be combined at the end. 663 leglist[0].set('from_group', None) 664 reduced_leglist = self.reduce_leglist(leglist, 665 max_multi_to1, 666 ref_dict_to0, 667 is_decay_proc, 668 process.get('orders')) 669 else: 670 reduced_leglist = self.reduce_leglist(leglist, 671 max_multi_to1, 672 model.get('ref_dict_to0'), 673 is_decay_proc, 674 process.get('orders')) 675 676 #In LoopAmplitude the function below is overloaded such that it 677 #converts back all DGLoopLegs to Legs. In the default tree-level 678 #diagram generation, this does nothing. 679 self.convert_dgleg_to_leg(reduced_leglist) 680 681 if reduced_leglist: 682 for vertex_list in reduced_leglist: 683 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 684 685 # Record whether or not we failed generation before required 686 # s-channel propagators are taken into account 687 failed_crossing = not res 688 689 # Required s-channels is a list of id-lists. Select the 690 # diagrams where all required s-channel propagators in any of 691 # the lists are present (i.e., the different lists correspond 692 # to "or", while the elements of the list correspond to 693 # "and"). 694 if process.get('required_s_channels') and \ 695 process.get('required_s_channels')[0]: 696 # We shouldn't look at the last vertex in each diagram, 697 # since that is the n->0 vertex 698 lastvx = -1 699 # For decay chain processes, there is an "artificial" 700 # extra vertex corresponding to particle 1=1, so we need 701 # to exclude the two last vertexes. 702 if is_decay_proc: lastvx = -2 703 ninitial = len(filter(lambda leg: leg.get('state') == False, 704 process.get('legs'))) 705 # Check required s-channels for each list in required_s_channels 706 old_res = res 707 res = base_objects.DiagramList() 708 for id_list in process.get('required_s_channels'): 709 res_diags = filter(lambda diagram: \ 710 all([req_s_channel in \ 711 [vertex.get_s_channel_id(\ 712 process.get('model'), ninitial) \ 713 for vertex in diagram.get('vertices')[:lastvx]] \ 714 for req_s_channel in \ 715 id_list]), old_res) 716 # Add diagrams only if not already in res 717 res.extend([diag for diag in res_diags if diag not in res]) 718 719 # Remove all diagrams with a "double" forbidden s-channel propagator 720 # is present. 721 # Note that we shouldn't look at the last vertex in each 722 # diagram, since that is the n->0 vertex 723 if process.get('forbidden_s_channels'): 724 ninitial = len(filter(lambda leg: leg.get('state') == False, 725 process.get('legs'))) 726 if ninitial == 2: 727 res = base_objects.DiagramList(\ 728 filter(lambda diagram: \ 729 not any([vertex.get_s_channel_id(\ 730 process.get('model'), ninitial) \ 731 in process.get('forbidden_s_channels') 732 for vertex in diagram.get('vertices')[:-1]]), 733 res)) 734 else: 735 # split since we need to avoid that the initial particle is forbidden 736 # as well. 737 newres= [] 738 for diagram in res: 739 leg1 = 1 740 #check the latest vertex to see if the leg 1 is inside if it 741 #is we need to inverse the look-up and allow the first s-channel 742 # of the associate particles. 743 vertex = diagram.get('vertices')[-1] 744 if any([l['number'] ==1 for l in vertex.get('legs')]): 745 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 746 to_loop = range(len(diagram.get('vertices'))-1) 747 if leg1 >1: 748 to_loop.reverse() 749 for i in to_loop: 750 vertex = diagram.get('vertices')[i] 751 if leg1: 752 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 753 leg1 = 0 754 continue 755 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 756 in process.get('forbidden_s_channels'): 757 break 758 else: 759 newres.append(diagram) 760 res = base_objects.DiagramList(newres) 761 762 763 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 764 # generation. 765 if process.get('forbidden_onsh_s_channels'): 766 ninitial = len(filter(lambda leg: leg.get('state') == False, 767 process.get('legs'))) 768 769 verts = base_objects.VertexList(sum([[vertex for vertex \ 770 in diagram.get('vertices')[:-1] 771 if vertex.get_s_channel_id(\ 772 process.get('model'), ninitial) \ 773 in process.get('forbidden_onsh_s_channels')] \ 774 for diagram in res], [])) 775 for vert in verts: 776 # Use onshell = False to indicate that this s-channel is forbidden 777 newleg = copy.copy(vert.get('legs').pop(-1)) 778 newleg.set('onshell', False) 779 vert.get('legs').append(newleg) 780 781 # Set actual coupling orders for each diagram 782 for diagram in res: 783 diagram.calculate_orders(model) 784 785 # Filter the diagrams according to the squared coupling order 786 # constraints and possible the negative one. Remember that OrderName=-n 787 # means that the user wants to include everything up to the N^(n+1)LO 788 # contribution in that order and at most one order can be restricted 789 # in this way. We shall do this only if the diagrams are not asked to 790 # be returned, as it is the case for NLO because it this case the 791 # interference are not necessarily among the diagrams generated here only. 792 if not returndiag and len(res)>0: 793 res = self.apply_squared_order_constraints(res) 794 795 if diagram_filter: 796 res = self.apply_user_filter(res) 797 798 # Replace final id=0 vertex if necessary 799 if not process.get('is_decay_chain'): 800 for diagram in res: 801 vertices = diagram.get('vertices') 802 if len(vertices) > 1 and vertices[-1].get('id') == 0: 803 # Need to "glue together" last and next-to-last 804 # vertex, by replacing the (incoming) last leg of the 805 # next-to-last vertex with the (outgoing) leg in the 806 # last vertex 807 vertices = copy.copy(vertices) 808 lastvx = vertices.pop() 809 nexttolastvertex = copy.copy(vertices.pop()) 810 legs = copy.copy(nexttolastvertex.get('legs')) 811 ntlnumber = legs[-1].get('number') 812 lastleg = filter(lambda leg: leg.get('number') != ntlnumber, 813 lastvx.get('legs'))[0] 814 # Reset onshell in case we have forbidden s-channels 815 if lastleg.get('onshell') == False: 816 lastleg.set('onshell', None) 817 # Replace the last leg of nexttolastvertex 818 legs[-1] = lastleg 819 nexttolastvertex.set('legs', legs) 820 vertices.append(nexttolastvertex) 821 diagram.set('vertices', vertices) 822 823 if res and not returndiag: 824 logger.info("Process has %d diagrams" % len(res)) 825 826 # Trim down number of legs and vertices used to save memory 827 self.trim_diagrams(diaglist=res) 828 829 # Sort process legs according to leg number 830 pertur = 'QCD' 831 if self.get('process')['perturbation_couplings']: 832 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 833 self.get('process').get('legs').sort(pert=pertur) 834 835 # Set diagrams to res if not asked to be returned 836 if not returndiag: 837 self['diagrams'] = res 838 return not failed_crossing 839 else: 840 return not failed_crossing, res
841
842 - def apply_squared_order_constraints(self, diag_list):
843 """Applies the user specified squared order constraints on the diagram 844 list in argument.""" 845 846 res = copy.copy(diag_list) 847 848 # Apply the filtering on constrained amplitude (== and >) 849 # No need to iterate on this one 850 for name, (value, operator) in self['process'].get('constrained_orders').items(): 851 res.filter_constrained_orders(name, value, operator) 852 853 # Iterate the filtering since the applying the constraint on one 854 # type of coupling order can impact what the filtering on a previous 855 # one (relevant for the '==' type of constraint). 856 while True: 857 new_res = res.apply_positive_sq_orders(res, 858 self['process'].get('squared_orders'), 859 self['process']['sqorders_types']) 860 # Exit condition 861 if len(res)==len(new_res): 862 break 863 elif (len(new_res)>len(res)): 864 raise MadGraph5Error( 865 'Inconsistency in function apply_squared_order_constraints().') 866 # Actualizing the list of diagram for the next iteration 867 res = new_res 868 869 870 871 # Now treat the negative squared order constraint (at most one) 872 neg_orders = [(order, value) for order, value in \ 873 self['process'].get('squared_orders').items() if value<0] 874 if len(neg_orders)==1: 875 neg_order, neg_value = neg_orders[0] 876 # Now check any negative order constraint 877 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 878 neg_value, self['process']['sqorders_types'][neg_order]) 879 # Substitute the negative value to this positive one so that 880 # the resulting computed constraints appears in the print out 881 # and at the output stage we no longer have to deal with 882 # negative valued target orders 883 self['process']['squared_orders'][neg_order]=target_order 884 elif len(neg_orders)>1: 885 raise InvalidCmd('At most one negative squared order constraint'+\ 886 ' can be specified, not %s.'%str(neg_orders)) 887 888 return res
889
890 - def apply_user_filter(self, diag_list):
891 """Applies the user specified squared order constraints on the diagram 892 list in argument.""" 893 894 if True: 895 try: 896 from PLUGIN.user_filter import remove_diag 897 except ImportError: 898 raise MadGraph5Error, 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the daigram has to be removed' 899 else: 900 #example and simple tests 901 def remove_diag(diag): 902 for vertex in diag['vertices']: #last 903 if vertex['id'] == 0: #special final vertex 904 continue 905 if vertex['legs'][-1]['number'] < 3: #this means T-channel 906 if abs(vertex['legs'][-1]['id']) <6: 907 return True 908 return False
909 910 res = diag_list.__class__() 911 nb_removed = 0 912 for diag in diag_list: 913 if remove_diag(diag): 914 nb_removed +=1 915 else: 916 res.append(diag) 917 918 if nb_removed: 919 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed) 920 921 return res
922 923 924
925 - def create_diagram(self, vertexlist):
926 """ Return a Diagram created from the vertex list. This function can be 927 overloaded by daughter classes.""" 928 return base_objects.Diagram({'vertices':vertexlist})
929
930 - def convert_dgleg_to_leg(self, vertexdoublelist):
931 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 932 In Amplitude, there is nothing to do. """ 933 934 return True
935
936 - def copy_leglist(self, legs):
937 """ Simply returns a copy of the leg list. This function is 938 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 939 The DGLoopLeg has some additional parameters only useful during 940 loop diagram generation""" 941 942 return base_objects.LegList(\ 943 [ copy.copy(leg) for leg in legs ])
944
945 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 946 is_decay_proc = False, coupling_orders = None):
947 """Recursive function to reduce N LegList to N-1 948 For algorithm, see doc for generate_diagrams. 949 """ 950 951 # Result variable which is a list of lists of vertices 952 # to be added 953 res = [] 954 955 # Stop condition. If LegList is None, that means that this 956 # diagram must be discarded 957 if curr_leglist is None: 958 return None 959 960 # Extract ref dict information 961 model = self.get('process').get('model') 962 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 963 964 965 # If all legs can be combined in one single vertex, add this 966 # vertex to res and continue. 967 # Special treatment for decay chain legs 968 969 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 970 # Extract the interaction id associated to the vertex 971 972 vertex_ids = self.get_combined_vertices(curr_leglist, 973 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 974 leg in curr_leglist]))])) 975 976 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 977 'id':vertex_id}) for \ 978 vertex_id in vertex_ids] 979 # Check for coupling orders. If orders < 0, skip vertex 980 for final_vertex in final_vertices: 981 if self.reduce_orders(coupling_orders, model, 982 [final_vertex.get('id')]) != False: 983 res.append([final_vertex]) 984 # Stop condition 2: if the leglist contained exactly two particles, 985 # return the result, if any, and stop. 986 if len(curr_leglist) == 2: 987 if res: 988 return res 989 else: 990 return None 991 992 # Create a list of all valid combinations of legs 993 comb_lists = self.combine_legs(curr_leglist, 994 ref_dict_to1, max_multi_to1) 995 996 # Create a list of leglists/vertices by merging combinations 997 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 998 999 # Consider all the pairs 1000 for leg_vertex_tuple in leg_vertex_list: 1001 1002 # Remove forbidden particles 1003 if self.get('process').get('forbidden_particles') and \ 1004 any([abs(vertex.get('legs')[-1].get('id')) in \ 1005 self.get('process').get('forbidden_particles') \ 1006 for vertex in leg_vertex_tuple[1]]): 1007 continue 1008 1009 # Check for coupling orders. If couplings < 0, skip recursion. 1010 new_coupling_orders = self.reduce_orders(coupling_orders, 1011 model, 1012 [vertex.get('id') for vertex in \ 1013 leg_vertex_tuple[1]]) 1014 if new_coupling_orders == False: 1015 # Some coupling order < 0 1016 continue 1017 1018 # This is where recursion happens 1019 # First, reduce again the leg part 1020 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 1021 max_multi_to1, 1022 ref_dict_to0, 1023 is_decay_proc, 1024 new_coupling_orders) 1025 # If there is a reduced diagram 1026 if reduced_diagram: 1027 vertex_list_list = [list(leg_vertex_tuple[1])] 1028 vertex_list_list.append(reduced_diagram) 1029 expanded_list = expand_list_list(vertex_list_list) 1030 res.extend(expanded_list) 1031 1032 return res
1033
1034 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1035 """Return False if the coupling orders for any coupling is < 1036 0, otherwise return the new coupling orders with the vertex 1037 orders subtracted. If coupling_orders is not given, return 1038 None (which counts as success). 1039 WEIGHTED is a special order, which corresponds to the sum of 1040 order hierarchies for the couplings. 1041 We ignore negative constraints as these cannot be taken into 1042 account on the fly but only after generation.""" 1043 1044 if not coupling_orders: 1045 return None 1046 1047 present_couplings = copy.copy(coupling_orders) 1048 for id in vertex_id_list: 1049 # Don't check for identity vertex (id = 0) 1050 if not id: 1051 continue 1052 inter = model.get("interaction_dict")[id] 1053 for coupling in inter.get('orders').keys(): 1054 # Note that we don't consider a missing coupling as a 1055 # constraint 1056 if coupling in present_couplings and \ 1057 present_couplings[coupling]>=0: 1058 # Reduce the number of couplings that are left 1059 present_couplings[coupling] -= \ 1060 inter.get('orders')[coupling] 1061 if present_couplings[coupling] < 0: 1062 # We have too many couplings of this type 1063 return False 1064 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1065 if 'WEIGHTED' in present_couplings and \ 1066 present_couplings['WEIGHTED']>=0: 1067 weight = sum([model.get('order_hierarchy')[c]*n for \ 1068 (c,n) in inter.get('orders').items()]) 1069 present_couplings['WEIGHTED'] -= weight 1070 if present_couplings['WEIGHTED'] < 0: 1071 # Total coupling weight too large 1072 return False 1073 1074 return present_couplings
1075
1076 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1077 """Recursive function. Take a list of legs as an input, with 1078 the reference dictionary n-1->1, and output a list of list of 1079 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1080 1081 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1082 1083 2. For each combination, say [34]. Check if combination is valid. 1084 If so: 1085 1086 a. Append [12[34]56] to result array 1087 1088 b. Split [123456] at index(first element in combination+1), 1089 i.e. [12],[456] and subtract combination from second half, 1090 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1091 1092 3. Take result array from call to 1. (here, [[56]]) and append 1093 (first half in step b - combination) + combination + (result 1094 from 1.) = [12[34][56]] to result array 1095 1096 4. After appending results from all n-combinations, return 1097 resulting array. Example, if [13] and [45] are valid 1098 combinations: 1099 [[[13]2456],[[13]2[45]6],[123[45]6]] 1100 """ 1101 1102 res = [] 1103 1104 # loop over possible combination lengths (+1 is for range convention!) 1105 for comb_length in range(2, max_multi_to1 + 1): 1106 1107 # Check the considered length is not longer than the list length 1108 if comb_length > len(list_legs): 1109 return res 1110 1111 # itertools.combinations returns all possible combinations 1112 # of comb_length elements from list_legs 1113 for comb in itertools.combinations(list_legs, comb_length): 1114 1115 # Check if the combination is valid 1116 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1117 1118 # Identify the rest, create a list [comb,rest] and 1119 # add it to res 1120 res_list = copy.copy(list_legs) 1121 for leg in comb: 1122 res_list.remove(leg) 1123 res_list.insert(list_legs.index(comb[0]), comb) 1124 res.append(res_list) 1125 1126 # Now, deal with cases with more than 1 combination 1127 1128 # First, split the list into two, according to the 1129 # position of the first element in comb, and remove 1130 # all elements form comb 1131 res_list1 = list_legs[0:list_legs.index(comb[0])] 1132 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1133 for leg in comb[1:]: 1134 res_list2.remove(leg) 1135 1136 # Create a list of type [comb,rest1,rest2(combined)] 1137 res_list = res_list1 1138 res_list.append(comb) 1139 # This is where recursion actually happens, 1140 # on the second part 1141 for item in self.combine_legs(res_list2, 1142 ref_dict_to1, 1143 max_multi_to1): 1144 final_res_list = copy.copy(res_list) 1145 final_res_list.extend(item) 1146 res.append(final_res_list) 1147 1148 return res
1149 1150
1151 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1152 """Takes a list of allowed leg combinations as an input and returns 1153 a set of lists where combinations have been properly replaced 1154 (one list per element in the ref_dict, so that all possible intermediate 1155 particles are included). For each list, give the list of vertices 1156 corresponding to the executed merging, group the two as a tuple. 1157 """ 1158 1159 res = [] 1160 1161 for comb_list in comb_lists: 1162 1163 reduced_list = [] 1164 vertex_list = [] 1165 1166 for entry in comb_list: 1167 1168 # Act on all leg combinations 1169 if isinstance(entry, tuple): 1170 1171 # Build the leg object which will replace the combination: 1172 # 1) leg ids is as given in the ref_dict 1173 leg_vert_ids = copy.copy(ref_dict_to1[\ 1174 tuple(sorted([leg.get('id') for leg in entry]))]) 1175 # 2) number is the minimum of leg numbers involved in the 1176 # combination 1177 number = min([leg.get('number') for leg in entry]) 1178 # 3) state is final, unless there is exactly one initial 1179 # state particle involved in the combination -> t-channel 1180 if len(filter(lambda leg: leg.get('state') == False, 1181 entry)) == 1: 1182 state = False 1183 else: 1184 state = True 1185 # 4) from_group is True, by definition 1186 1187 # Create and add the object. This is done by a 1188 # separate routine, to allow overloading by 1189 # daughter classes 1190 new_leg_vert_ids = [] 1191 if leg_vert_ids: 1192 new_leg_vert_ids = self.get_combined_legs(entry, 1193 leg_vert_ids, 1194 number, 1195 state) 1196 1197 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1198 1199 1200 # Create and add the corresponding vertex 1201 # Extract vertex ids corresponding to the various legs 1202 # in mylegs 1203 vlist = base_objects.VertexList() 1204 for (myleg, vert_id) in new_leg_vert_ids: 1205 # Start with the considered combination... 1206 myleglist = base_objects.LegList(list(entry)) 1207 # ... and complete with legs after reducing 1208 myleglist.append(myleg) 1209 # ... and consider the correct vertex id 1210 vlist.append(base_objects.Vertex( 1211 {'legs':myleglist, 1212 'id':vert_id})) 1213 1214 vertex_list.append(vlist) 1215 1216 # If entry is not a combination, switch the from_group flag 1217 # and add it 1218 else: 1219 cp_entry = copy.copy(entry) 1220 # Need special case for from_group == None; this 1221 # is for initial state leg of decay chain process 1222 # (see Leg.can_combine_to_0) 1223 if cp_entry.get('from_group') != None: 1224 cp_entry.set('from_group', False) 1225 reduced_list.append(cp_entry) 1226 1227 # Flatten the obtained leg and vertex lists 1228 flat_red_lists = expand_list(reduced_list) 1229 flat_vx_lists = expand_list(vertex_list) 1230 1231 # Combine the two lists in a list of tuple 1232 for i in range(0, len(flat_vx_lists)): 1233 res.append((base_objects.LegList(flat_red_lists[i]), \ 1234 base_objects.VertexList(flat_vx_lists[i]))) 1235 1236 return res
1237
1238 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1239 """Create a set of new legs from the info given. This can be 1240 overloaded by daughter classes.""" 1241 1242 mylegs = [(base_objects.Leg({'id':leg_id, 1243 'number':number, 1244 'state':state, 1245 'from_group':True}), 1246 vert_id)\ 1247 for leg_id, vert_id in leg_vert_ids] 1248 1249 return mylegs
1250
1251 - def get_combined_vertices(self, legs, vert_ids):
1252 """Allow for selection of vertex ids. This can be 1253 overloaded by daughter classes.""" 1254 1255 return vert_ids
1256
1257 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1258 """Reduce the number of legs and vertices used in memory. 1259 When called by a diagram generation initiated by LoopAmplitude, 1260 this function should not trim the diagrams in the attribute 'diagrams' 1261 but rather a given list in the 'diaglist' argument.""" 1262 1263 legs = [] 1264 vertices = [] 1265 1266 if diaglist is None: 1267 diaglist=self.get('diagrams') 1268 1269 # Flag decaying legs in the core process by onshell = True 1270 process = self.get('process') 1271 for leg in process.get('legs'): 1272 if leg.get('state') and leg.get('id') in decay_ids: 1273 leg.set('onshell', True) 1274 1275 for diagram in diaglist: 1276 # Keep track of external legs (leg numbers already used) 1277 leg_external = set() 1278 for ivx, vertex in enumerate(diagram.get('vertices')): 1279 for ileg, leg in enumerate(vertex.get('legs')): 1280 # Ensure that only external legs get decay flag 1281 if leg.get('state') and leg.get('id') in decay_ids and \ 1282 leg.get('number') not in leg_external: 1283 # Use onshell to indicate decaying legs, 1284 # i.e. legs that have decay chains 1285 leg = copy.copy(leg) 1286 leg.set('onshell', True) 1287 try: 1288 index = legs.index(leg) 1289 except ValueError: 1290 vertex.get('legs')[ileg] = leg 1291 legs.append(leg) 1292 else: # Found a leg 1293 vertex.get('legs')[ileg] = legs[index] 1294 leg_external.add(leg.get('number')) 1295 try: 1296 index = vertices.index(vertex) 1297 diagram.get('vertices')[ivx] = vertices[index] 1298 except ValueError: 1299 vertices.append(vertex)
1300
1301 #=============================================================================== 1302 # AmplitudeList 1303 #=============================================================================== 1304 -class AmplitudeList(base_objects.PhysicsObjectList):
1305 """List of Amplitude objects 1306 """ 1307
1308 - def has_any_loop_process(self):
1309 """ Check the content of all processes of the amplitudes in this list to 1310 see if there is any which defines perturbation couplings. """ 1311 1312 for amp in self: 1313 if amp.has_loop_process(): 1314 return True
1315
1316 - def is_valid_element(self, obj):
1317 """Test if object obj is a valid Amplitude for the list.""" 1318 1319 return isinstance(obj, Amplitude)
1320
1321 #=============================================================================== 1322 # DecayChainAmplitude 1323 #=============================================================================== 1324 -class DecayChainAmplitude(Amplitude):
1325 """A list of amplitudes + a list of decay chain amplitude lists; 1326 corresponding to a ProcessDefinition with a list of decay chains 1327 """ 1328
1329 - def default_setup(self):
1330 """Default values for all properties""" 1331 1332 self['amplitudes'] = AmplitudeList() 1333 self['decay_chains'] = DecayChainAmplitudeList()
1334
1335 - def __init__(self, argument = None, collect_mirror_procs = False, 1336 ignore_six_quark_processes = False, loop_filter=None):
1337 """Allow initialization with Process and with ProcessDefinition""" 1338 1339 if isinstance(argument, base_objects.Process): 1340 super(DecayChainAmplitude, self).__init__() 1341 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1342 if argument['perturbation_couplings']: 1343 MultiProcessClass=LoopMultiProcess 1344 else: 1345 MultiProcessClass=MultiProcess 1346 if isinstance(argument, base_objects.ProcessDefinition): 1347 self['amplitudes'].extend(\ 1348 MultiProcessClass.generate_multi_amplitudes(argument, 1349 collect_mirror_procs, 1350 ignore_six_quark_processes, 1351 loop_filter=loop_filter)) 1352 else: 1353 self['amplitudes'].append(\ 1354 MultiProcessClass.get_amplitude_from_proc(argument, 1355 loop_filter=loop_filter)) 1356 # Clean decay chains from process, since we haven't 1357 # combined processes with decay chains yet 1358 process = copy.copy(self.get('amplitudes')[0].get('process')) 1359 process.set('decay_chains', base_objects.ProcessList()) 1360 self['amplitudes'][0].set('process', process) 1361 1362 for process in argument.get('decay_chains'): 1363 if process.get('perturbation_couplings'): 1364 raise MadGraph5Error,\ 1365 "Decay processes can not be perturbed" 1366 process.set('overall_orders', argument.get('overall_orders')) 1367 if not process.get('is_decay_chain'): 1368 process.set('is_decay_chain',True) 1369 if not process.get_ninitial() == 1: 1370 raise InvalidCmd,\ 1371 "Decay chain process must have exactly one" + \ 1372 " incoming particle" 1373 self['decay_chains'].append(\ 1374 DecayChainAmplitude(process, collect_mirror_procs, 1375 ignore_six_quark_processes)) 1376 1377 # Flag decaying legs in the core diagrams by onshell = True 1378 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1379 for a in dec.get('amplitudes')] for dec in \ 1380 self['decay_chains']], []) 1381 decay_ids = set(decay_ids) 1382 for amp in self['amplitudes']: 1383 amp.trim_diagrams(decay_ids) 1384 1385 # Check that all decay ids are present in at least some process 1386 for amp in self['amplitudes']: 1387 for l in amp.get('process').get('legs'): 1388 if l.get('id') in decay_ids: 1389 decay_ids.remove(l.get('id')) 1390 1391 if decay_ids: 1392 model = amp.get('process').get('model') 1393 names = [model.get_particle(id).get('name') for id in decay_ids] 1394 1395 logger.warning( 1396 "$RED Decay without corresponding particle in core process found.\n" + \ 1397 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1398 "Please check your process definition carefully. \n" + \ 1399 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1400 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1401 1402 # Remove unused decays from the process list 1403 for dc in reversed(self['decay_chains']): 1404 for a in reversed(dc.get('amplitudes')): 1405 # Remove the amplitudes from this decay chain 1406 if a.get('process').get('legs')[0].get('id') in decay_ids: 1407 dc.get('amplitudes').remove(a) 1408 if not dc.get('amplitudes'): 1409 # If no amplitudes left, remove the decay chain 1410 self['decay_chains'].remove(dc) 1411 1412 # Finally, write a fat warning if any decay process has 1413 # the decaying particle (or its antiparticle) in the final state 1414 bad_procs = [] 1415 for dc in self['decay_chains']: 1416 for amp in dc.get('amplitudes'): 1417 legs = amp.get('process').get('legs') 1418 fs_parts = [abs(l.get('id')) for l in legs if 1419 l.get('state')] 1420 is_part = [l.get('id') for l in legs if not 1421 l.get('state')][0] 1422 if abs(is_part) in fs_parts: 1423 bad_procs.append(amp.get('process')) 1424 1425 if bad_procs: 1426 logger.warning( 1427 "$RED Decay(s) with particle decaying to itself:\n" + \ 1428 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1429 "\nPlease check your process definition carefully. \n") 1430 1431 1432 elif argument != None: 1433 # call the mother routine 1434 super(DecayChainAmplitude, self).__init__(argument) 1435 else: 1436 # call the mother routine 1437 super(DecayChainAmplitude, self).__init__()
1438
1439 - def filter(self, name, value):
1440 """Filter for valid amplitude property values.""" 1441 1442 if name == 'amplitudes': 1443 if not isinstance(value, AmplitudeList): 1444 raise self.PhysicsObjectError, \ 1445 "%s is not a valid AmplitudeList" % str(value) 1446 if name == 'decay_chains': 1447 if not isinstance(value, DecayChainAmplitudeList): 1448 raise self.PhysicsObjectError, \ 1449 "%s is not a valid DecayChainAmplitudeList object" % \ 1450 str(value) 1451 return True
1452
1453 - def get_sorted_keys(self):
1454 """Return diagram property names as a nicely sorted list.""" 1455 1456 return ['amplitudes', 'decay_chains']
1457 1458 # Helper functions 1459
1460 - def get_number_of_diagrams(self):
1461 """Returns number of diagrams for this amplitude""" 1462 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1463 + sum(d.get_number_of_diagrams() for d in \ 1464 self.get('decay_chains'))
1465
1466 - def nice_string(self, indent = 0):
1467 """Returns a nicely formatted string of the amplitude content.""" 1468 mystr = "" 1469 for amplitude in self.get('amplitudes'): 1470 mystr = mystr + amplitude.nice_string(indent) + "\n" 1471 1472 if self.get('decay_chains'): 1473 mystr = mystr + " " * indent + "Decays:\n" 1474 for dec in self.get('decay_chains'): 1475 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1476 1477 return mystr[:-1]
1478
1479 - def nice_string_processes(self, indent = 0):
1480 """Returns a nicely formatted string of the amplitude processes.""" 1481 mystr = "" 1482 for amplitude in self.get('amplitudes'): 1483 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1484 1485 if self.get('decay_chains'): 1486 mystr = mystr + " " * indent + "Decays:\n" 1487 for dec in self.get('decay_chains'): 1488 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1489 1490 return mystr[:-1]
1491
1492 - def get_ninitial(self):
1493 """Returns the number of initial state particles in the process.""" 1494 return self.get('amplitudes')[0].get('process').get_ninitial()
1495
1496 - def get_decay_ids(self):
1497 """Returns a set of all particle ids for which a decay is defined""" 1498 1499 decay_ids = [] 1500 1501 # Get all amplitudes for the decay processes 1502 for amp in sum([dc.get('amplitudes') for dc \ 1503 in self['decay_chains']], []): 1504 # For each amplitude, find the initial state leg 1505 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1506 1507 # Return a list with unique ids 1508 return list(set(decay_ids))
1509
1510 - def has_loop_process(self):
1511 """ Returns wether this amplitude has a loop process.""" 1512 return self['amplitudes'].has_any_loop_process()
1513
1514 - def get_amplitudes(self):
1515 """Recursive function to extract all amplitudes for this process""" 1516 1517 amplitudes = AmplitudeList() 1518 1519 amplitudes.extend(self.get('amplitudes')) 1520 for decay in self.get('decay_chains'): 1521 amplitudes.extend(decay.get_amplitudes()) 1522 1523 return amplitudes
1524
1525 1526 #=============================================================================== 1527 # DecayChainAmplitudeList 1528 #=============================================================================== 1529 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1530 """List of DecayChainAmplitude objects 1531 """ 1532
1533 - def is_valid_element(self, obj):
1534 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1535 1536 return isinstance(obj, DecayChainAmplitude)
1537
1538 1539 #=============================================================================== 1540 # MultiProcess 1541 #=============================================================================== 1542 -class MultiProcess(base_objects.PhysicsObject):
1543 """MultiProcess: list of process definitions 1544 list of processes (after cleaning) 1545 list of amplitudes (after generation) 1546 """ 1547
1548 - def default_setup(self):
1549 """Default values for all properties""" 1550 1551 self['process_definitions'] = base_objects.ProcessDefinitionList() 1552 # self['amplitudes'] can be an AmplitudeList or a 1553 # DecayChainAmplitudeList, depending on whether there are 1554 # decay chains in the process definitions or not. 1555 self['amplitudes'] = AmplitudeList() 1556 # Flag for whether to combine IS mirror processes together 1557 self['collect_mirror_procs'] = False 1558 # List of quark flavors where we ignore processes with at 1559 # least 6 quarks (three quark lines) 1560 self['ignore_six_quark_processes'] = [] 1561 # Allow to use the model parameter numerical value for optimization. 1562 #This is currently use for 1->N generation(check mass). 1563 self['use_numerical'] = False
1564
1565 - def __init__(self, argument=None, collect_mirror_procs = False, 1566 ignore_six_quark_processes = [], optimize=False, 1567 loop_filter=None, diagram_filter=None):
1568 """Allow initialization with ProcessDefinition or 1569 ProcessDefinitionList 1570 optimize allows to use param_card information. (usefull for 1-.N)""" 1571 1572 if isinstance(argument, base_objects.ProcessDefinition): 1573 super(MultiProcess, self).__init__() 1574 self['process_definitions'].append(argument) 1575 elif isinstance(argument, base_objects.ProcessDefinitionList): 1576 super(MultiProcess, self).__init__() 1577 self['process_definitions'] = argument 1578 elif argument != None: 1579 # call the mother routine 1580 super(MultiProcess, self).__init__(argument) 1581 else: 1582 # call the mother routine 1583 super(MultiProcess, self).__init__() 1584 1585 self['collect_mirror_procs'] = collect_mirror_procs 1586 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1587 self['use_numerical'] = optimize 1588 self['loop_filter'] = loop_filter 1589 self['diagram_filter'] = diagram_filter # only True/False so far 1590 1591 if isinstance(argument, base_objects.ProcessDefinition) or \ 1592 isinstance(argument, base_objects.ProcessDefinitionList): 1593 # Generate the diagrams 1594 self.get('amplitudes')
1595 1596
1597 - def filter(self, name, value):
1598 """Filter for valid process property values.""" 1599 1600 if name == 'process_definitions': 1601 if not isinstance(value, base_objects.ProcessDefinitionList): 1602 raise self.PhysicsObjectError, \ 1603 "%s is not a valid ProcessDefinitionList object" % str(value) 1604 1605 if name == 'amplitudes': 1606 if not isinstance(value, AmplitudeList): 1607 raise self.PhysicsObjectError, \ 1608 "%s is not a valid AmplitudeList object" % str(value) 1609 1610 if name in ['collect_mirror_procs']: 1611 if not isinstance(value, bool): 1612 raise self.PhysicsObjectError, \ 1613 "%s is not a valid boolean" % str(value) 1614 1615 if name == 'ignore_six_quark_processes': 1616 if not isinstance(value, list): 1617 raise self.PhysicsObjectError, \ 1618 "%s is not a valid list" % str(value) 1619 1620 return True
1621
1622 - def get(self, name):
1623 """Get the value of the property name.""" 1624 1625 if (name == 'amplitudes') and not self[name]: 1626 for process_def in self.get('process_definitions'): 1627 if process_def.get('decay_chains'): 1628 # This is a decay chain process 1629 # Store amplitude(s) as DecayChainAmplitude 1630 self['amplitudes'].append(\ 1631 DecayChainAmplitude(process_def, 1632 self.get('collect_mirror_procs'), 1633 self.get('ignore_six_quark_processes'))) 1634 else: 1635 self['amplitudes'].extend(\ 1636 self.generate_multi_amplitudes(process_def, 1637 self.get('collect_mirror_procs'), 1638 self.get('ignore_six_quark_processes'), 1639 self['use_numerical'], 1640 loop_filter=self['loop_filter'], 1641 diagram_filter=self['diagram_filter'])) 1642 1643 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1644
1645 - def get_sorted_keys(self):
1646 """Return process property names as a nicely sorted list.""" 1647 1648 return ['process_definitions', 'amplitudes']
1649 1650 @classmethod
1651 - def generate_multi_amplitudes(cls,process_definition, 1652 collect_mirror_procs = False, 1653 ignore_six_quark_processes = [], 1654 use_numerical=False, 1655 loop_filter=None, 1656 diagram_filter=False):
1657 """Generate amplitudes in a semi-efficient way. 1658 Make use of crossing symmetry for processes that fail diagram 1659 generation, but not for processes that succeed diagram 1660 generation. Doing so will risk making it impossible to 1661 identify processes with identical amplitudes. 1662 """ 1663 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1664 "%s not valid ProcessDefinition object" % \ 1665 repr(process_definition) 1666 1667 # Set automatic coupling orders 1668 process_definition.set('orders', MultiProcess.\ 1669 find_optimal_process_orders(process_definition)) 1670 # Check for maximum orders from the model 1671 process_definition.check_expansion_orders() 1672 1673 processes = base_objects.ProcessList() 1674 amplitudes = AmplitudeList() 1675 1676 # failed_procs and success_procs are sorted processes that have 1677 # already failed/succeeded based on crossing symmetry 1678 failed_procs = [] 1679 success_procs = [] 1680 # Complete processes, for identification of mirror processes 1681 non_permuted_procs = [] 1682 # permutations keeps the permutations of the crossed processes 1683 permutations = [] 1684 1685 # Store the diagram tags for processes, to allow for 1686 # identifying identical matrix elements already at this stage. 1687 model = process_definition['model'] 1688 1689 isids = [leg['ids'] for leg in process_definition['legs'] \ 1690 if leg['state'] == False] 1691 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1692 if leg['state'] == True] 1693 # Generate all combinations for the initial state 1694 1695 for prod in itertools.product(*isids): 1696 islegs = [\ 1697 base_objects.Leg({'id':id, 'state': False}) \ 1698 for id in prod] 1699 1700 # Generate all combinations for the final state, and make 1701 # sure to remove double counting 1702 1703 red_fsidlist = [] 1704 1705 for prod in itertools.product(*fsids): 1706 1707 # Remove double counting between final states 1708 if tuple(sorted(prod)) in red_fsidlist: 1709 continue 1710 1711 red_fsidlist.append(tuple(sorted(prod))); 1712 1713 # Generate leg list for process 1714 leg_list = [copy.copy(leg) for leg in islegs] 1715 1716 leg_list.extend([\ 1717 base_objects.Leg({'id':id, 'state': True}) \ 1718 for id in prod]) 1719 1720 legs = base_objects.LegList(leg_list) 1721 1722 # Check for crossed processes 1723 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1724 enumerate(legs.get_outgoing_id_list(model))]) 1725 permutation = [l[1] for l in sorted_legs] 1726 1727 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1728 1729 # Check for six-quark processes 1730 if ignore_six_quark_processes and \ 1731 len([i for i in sorted_legs if abs(i) in \ 1732 ignore_six_quark_processes]) >= 6: 1733 continue 1734 1735 # Check if crossed process has already failed, 1736 # in that case don't check process 1737 if sorted_legs in failed_procs: 1738 continue 1739 1740 # If allowed check mass validity [assume 1->N] 1741 if use_numerical: 1742 # check that final state has lower mass than initial state 1743 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1744 if initial_mass == 0: 1745 continue 1746 for leg in legs[1:]: 1747 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1748 initial_mass -= abs(m) 1749 if initial_mass.real <= 0: 1750 continue 1751 1752 # Setup process 1753 process = process_definition.get_process_with_legs(legs) 1754 1755 fast_proc = \ 1756 array.array('i',[leg.get('id') for leg in legs]) 1757 if collect_mirror_procs and \ 1758 process_definition.get_ninitial() == 2: 1759 # Check if mirrored process is already generated 1760 mirror_proc = \ 1761 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1762 list(fast_proc[2:])) 1763 try: 1764 mirror_amp = \ 1765 amplitudes[non_permuted_procs.index(mirror_proc)] 1766 except Exception: 1767 # Didn't find any mirror process 1768 pass 1769 else: 1770 # Mirror process found 1771 mirror_amp.set('has_mirror_process', True) 1772 logger.info("Process %s added to mirror process %s" % \ 1773 (process.base_string(), 1774 mirror_amp.get('process').base_string())) 1775 continue 1776 1777 # Check for successful crossings, unless we have specified 1778 # properties that break crossing symmetry 1779 if not process.get('required_s_channels') and \ 1780 not process.get('forbidden_onsh_s_channels') and \ 1781 not process.get('forbidden_s_channels') and \ 1782 not process.get('is_decay_chain'): 1783 try: 1784 crossed_index = success_procs.index(sorted_legs) 1785 # The relabeling of legs for loop amplitudes is cumbersome 1786 # and does not save so much time. It is disable here and 1787 # we use the key 'loop_diagrams' to decide whether 1788 # it is an instance of LoopAmplitude. 1789 if 'loop_diagrams' in amplitudes[crossed_index]: 1790 raise ValueError 1791 except ValueError: 1792 # No crossing found, just continue 1793 pass 1794 else: 1795 # Found crossing - reuse amplitude 1796 amplitude = MultiProcess.cross_amplitude(\ 1797 amplitudes[crossed_index], 1798 process, 1799 permutations[crossed_index], 1800 permutation) 1801 amplitudes.append(amplitude) 1802 success_procs.append(sorted_legs) 1803 permutations.append(permutation) 1804 non_permuted_procs.append(fast_proc) 1805 logger.info("Crossed process found for %s, reuse diagrams." % \ 1806 process.base_string()) 1807 continue 1808 1809 # Create new amplitude 1810 amplitude = cls.get_amplitude_from_proc(process, 1811 loop_filter=loop_filter) 1812 1813 try: 1814 result = amplitude.generate_diagrams(diagram_filter=diagram_filter) 1815 except InvalidCmd as error: 1816 failed_procs.append(sorted_legs) 1817 else: 1818 # Succeeded in generating diagrams 1819 if amplitude.get('diagrams'): 1820 amplitudes.append(amplitude) 1821 success_procs.append(sorted_legs) 1822 permutations.append(permutation) 1823 non_permuted_procs.append(fast_proc) 1824 elif not result: 1825 # Diagram generation failed for all crossings 1826 failed_procs.append(sorted_legs) 1827 1828 # Raise exception if there are no amplitudes for this process 1829 if not amplitudes: 1830 if len(failed_procs) == 1 and 'error' in locals(): 1831 raise error 1832 else: 1833 raise NoDiagramException, \ 1834 "No amplitudes generated from process %s. Please enter a valid process" % \ 1835 process_definition.nice_string() 1836 1837 1838 # Return the produced amplitudes 1839 return amplitudes
1840 1841 @classmethod
1842 - def get_amplitude_from_proc(cls,proc,**opts):
1843 """ Return the correct amplitude type according to the characteristics of 1844 the process proc. The only option that could be specified here is 1845 loop_filter and it is of course not relevant for a tree amplitude.""" 1846 1847 return Amplitude({"process": proc})
1848 1849 1850 @staticmethod
1851 - def find_optimal_process_orders(process_definition):
1852 """Find the minimal WEIGHTED order for this set of processes. 1853 1854 The algorithm: 1855 1856 1) Check the coupling hierarchy of the model. Assign all 1857 particles to the different coupling hierarchies so that a 1858 particle is considered to be in the highest hierarchy (i.e., 1859 with lowest value) where it has an interaction. 1860 1861 2) Pick out the legs in the multiprocess according to the 1862 highest hierarchy represented (so don't mix particles from 1863 different hierarchy classes in the same multiparticles!) 1864 1865 3) Find the starting maximum WEIGHTED order as the sum of the 1866 highest n-2 weighted orders 1867 1868 4) Pick out required s-channel particle hierarchies, and use 1869 the highest of the maximum WEIGHTED order from the legs and 1870 the minimum WEIGHTED order extracted from 2*s-channel 1871 hierarchys plus the n-2-2*(number of s-channels) lowest 1872 leg weighted orders. 1873 1874 5) Run process generation with the WEIGHTED order determined 1875 in 3)-4) - # final state gluons, with all gluons removed from 1876 the final state 1877 1878 6) If no process is found, increase WEIGHTED order by 1 and go 1879 back to 5), until we find a process which passes. Return that 1880 order. 1881 1882 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1883 If still no process has passed, return 1884 WEIGHTED = (n-2)*(highest hierarchy) 1885 """ 1886 1887 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1888 "%s not valid ProcessDefinition object" % \ 1889 repr(process_definition) 1890 1891 processes = base_objects.ProcessList() 1892 amplitudes = AmplitudeList() 1893 1894 # If there are already couplings defined, return 1895 if process_definition.get('orders') or \ 1896 process_definition.get('overall_orders') or \ 1897 process_definition.get('NLO_mode')=='virt': 1898 return process_definition.get('orders') 1899 1900 # If this is a decay process (and not a decay chain), return 1901 if process_definition.get_ninitial() == 1 and not \ 1902 process_definition.get('is_decay_chain'): 1903 return process_definition.get('orders') 1904 1905 logger.info("Checking for minimal orders which gives processes.") 1906 logger.info("Please specify coupling orders to bypass this step.") 1907 1908 # Calculate minimum starting guess for WEIGHTED order 1909 max_order_now, particles, hierarchy = \ 1910 process_definition.get_minimum_WEIGHTED() 1911 coupling = 'WEIGHTED' 1912 1913 model = process_definition.get('model') 1914 1915 # Extract the initial and final leg ids 1916 isids = [leg['ids'] for leg in \ 1917 filter(lambda leg: leg['state'] == False, process_definition['legs'])] 1918 fsids = [leg['ids'] for leg in \ 1919 filter(lambda leg: leg['state'] == True, process_definition['legs'])] 1920 1921 max_WEIGHTED_order = \ 1922 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1923 1924 # get the definition of the WEIGHTED 1925 hierarchydef = process_definition['model'].get('order_hierarchy') 1926 tmp = [] 1927 hierarchy = hierarchydef.items() 1928 hierarchy.sort() 1929 for key, value in hierarchydef.items(): 1930 if value>1: 1931 tmp.append('%s*%s' % (value,key)) 1932 else: 1933 tmp.append('%s' % key) 1934 wgtdef = '+'.join(tmp) 1935 # Run diagram generation with increasing max_order_now until 1936 # we manage to get diagrams 1937 while max_order_now < max_WEIGHTED_order: 1938 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1939 1940 oldloglevel = logger.level 1941 logger.setLevel(logging.WARNING) 1942 1943 # failed_procs are processes that have already failed 1944 # based on crossing symmetry 1945 failed_procs = [] 1946 1947 # Generate all combinations for the initial state 1948 for prod in apply(itertools.product, isids): 1949 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1950 for id in prod] 1951 1952 # Generate all combinations for the final state, and make 1953 # sure to remove double counting 1954 1955 red_fsidlist = [] 1956 1957 for prod in apply(itertools.product, fsids): 1958 1959 # Remove double counting between final states 1960 if tuple(sorted(prod)) in red_fsidlist: 1961 continue 1962 1963 red_fsidlist.append(tuple(sorted(prod))); 1964 1965 # Remove gluons from final state if QCD is among 1966 # the highest coupling hierarchy 1967 nglue = 0 1968 if 21 in particles[0]: 1969 nglue = len([id for id in prod if id == 21]) 1970 prod = [id for id in prod if id != 21] 1971 1972 # Generate leg list for process 1973 leg_list = [copy.copy(leg) for leg in islegs] 1974 1975 leg_list.extend([\ 1976 base_objects.Leg({'id':id, 'state': True}) \ 1977 for id in prod]) 1978 1979 legs = base_objects.LegList(leg_list) 1980 1981 # Set summed coupling order according to max_order_now 1982 # subtracting the removed gluons 1983 coupling_orders_now = {coupling: max_order_now - \ 1984 nglue * model['order_hierarchy']['QCD']} 1985 1986 # Setup process 1987 process = base_objects.Process({\ 1988 'legs':legs, 1989 'model':model, 1990 'id': process_definition.get('id'), 1991 'orders': coupling_orders_now, 1992 'required_s_channels': \ 1993 process_definition.get('required_s_channels'), 1994 'forbidden_onsh_s_channels': \ 1995 process_definition.get('forbidden_onsh_s_channels'), 1996 'sqorders_types': \ 1997 process_definition.get('sqorders_types'), 1998 'squared_orders': \ 1999 process_definition.get('squared_orders'), 2000 'split_orders': \ 2001 process_definition.get('split_orders'), 2002 'forbidden_s_channels': \ 2003 process_definition.get('forbidden_s_channels'), 2004 'forbidden_particles': \ 2005 process_definition.get('forbidden_particles'), 2006 'is_decay_chain': \ 2007 process_definition.get('is_decay_chain'), 2008 'overall_orders': \ 2009 process_definition.get('overall_orders'), 2010 'split_orders': \ 2011 process_definition.get('split_orders')}) 2012 2013 # Check for couplings with given expansion orders 2014 process.check_expansion_orders() 2015 2016 # Check for crossed processes 2017 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 2018 # Check if crossed process has already failed 2019 # In that case don't check process 2020 if tuple(sorted_legs) in failed_procs: 2021 continue 2022 2023 amplitude = Amplitude({'process': process}) 2024 try: 2025 amplitude.generate_diagrams() 2026 except InvalidCmd: 2027 failed_procs.append(tuple(sorted_legs)) 2028 else: 2029 if amplitude.get('diagrams'): 2030 # We found a valid amplitude. Return this order number 2031 logger.setLevel(oldloglevel) 2032 return {coupling: max_order_now} 2033 else: 2034 failed_procs.append(tuple(sorted_legs)) 2035 2036 # No processes found, increase max_order_now 2037 max_order_now += 1 2038 logger.setLevel(oldloglevel) 2039 2040 # If no valid processes found with nfinal-1 couplings, return maximal 2041 return {coupling: max_order_now}
2042 2043 @staticmethod
2044 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2045 """Return the amplitude crossed with the permutation new_perm""" 2046 # Create dict from original leg numbers to new leg numbers 2047 perm_map = dict(zip(org_perm, new_perm)) 2048 # Initiate new amplitude 2049 new_amp = copy.copy(amplitude) 2050 # Number legs 2051 for i, leg in enumerate(process.get('legs')): 2052 leg.set('number', i+1) 2053 # Set process 2054 new_amp.set('process', process) 2055 # Now replace the leg numbers in the diagrams 2056 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2057 process.get('legs'),) for \ 2058 d in new_amp.get('diagrams')]) 2059 new_amp.set('diagrams', diagrams) 2060 new_amp.trim_diagrams() 2061 2062 # Make sure to reset mirror process 2063 new_amp.set('has_mirror_process', False) 2064 2065 return new_amp
2066
2067 #=============================================================================== 2068 # Global helper methods 2069 #=============================================================================== 2070 2071 -def expand_list(mylist):
2072 """Takes a list of lists and elements and returns a list of flat lists. 2073 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2074 """ 2075 2076 # Check that argument is a list 2077 assert isinstance(mylist, list), "Expand_list argument must be a list" 2078 2079 res = [] 2080 2081 tmplist = [] 2082 for item in mylist: 2083 if isinstance(item, list): 2084 tmplist.append(item) 2085 else: 2086 tmplist.append([item]) 2087 2088 for item in apply(itertools.product, tmplist): 2089 res.append(list(item)) 2090 2091 return res
2092
2093 -def expand_list_list(mylist):
2094 """Recursive function. Takes a list of lists and lists of lists 2095 and returns a list of flat lists. 2096 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2097 """ 2098 2099 res = [] 2100 2101 if not mylist or len(mylist) == 1 and not mylist[0]: 2102 return [[]] 2103 2104 # Check the first element is at least a list 2105 assert isinstance(mylist[0], list), \ 2106 "Expand_list_list needs a list of lists and lists of lists" 2107 2108 # Recursion stop condition, one single element 2109 if len(mylist) == 1: 2110 if isinstance(mylist[0][0], list): 2111 return mylist[0] 2112 else: 2113 return mylist 2114 2115 if isinstance(mylist[0][0], list): 2116 for item in mylist[0]: 2117 # Here the recursion happens, create lists starting with 2118 # each element of the first item and completed with 2119 # the rest expanded 2120 for rest in expand_list_list(mylist[1:]): 2121 reslist = copy.copy(item) 2122 reslist.extend(rest) 2123 res.append(reslist) 2124 else: 2125 for rest in expand_list_list(mylist[1:]): 2126 reslist = copy.copy(mylist[0]) 2127 reslist.extend(rest) 2128 res.append(reslist) 2129 2130 2131 return res
2132