Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import, division 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  from fractions import Fraction 
  20  """Methods and classes to export matrix elements to v4 format.""" 
  21   
  22  import copy 
  23  from six import StringIO 
  24  import itertools 
  25  import fractions 
  26  import glob 
  27  import logging 
  28  import math 
  29  import os 
  30  import io 
  31  import re 
  32  import shutil 
  33  import subprocess 
  34  import sys 
  35  import time 
  36  import traceback 
  37  import  collections 
  38   
  39  import aloha 
  40   
  41  import madgraph.core.base_objects as base_objects 
  42  import madgraph.core.color_algebra as color 
  43  import madgraph.core.helas_objects as helas_objects 
  44  import madgraph.iolibs.drawing_eps as draw 
  45  import madgraph.iolibs.files as files 
  46  import madgraph.iolibs.group_subprocs as group_subprocs 
  47  import madgraph.iolibs.file_writers as writers 
  48  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  49  import madgraph.iolibs.template_files as template_files 
  50  import madgraph.iolibs.ufo_expression_parsers as parsers 
  51  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  52  import madgraph.interface.common_run_interface as common_run_interface 
  53  import madgraph.various.diagram_symmetry as diagram_symmetry 
  54  import madgraph.various.misc as misc 
  55  import madgraph.various.banner as banner_mod 
  56  import madgraph.various.process_checks as process_checks 
  57  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  58  import madgraph 
  59  import aloha.create_aloha as create_aloha 
  60  import models.import_ufo as import_ufo 
  61  import models.write_param_card as param_writer 
  62  import models.check_param_card as check_param_card 
  63  from models import UFOError 
  64   
  65   
  66  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  67  from madgraph.iolibs.files import cp, ln, mv 
  68   
  69  from madgraph import InvalidCmd 
  70   
  71  pjoin = os.path.join 
  72   
  73  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  74  logger = logging.getLogger('madgraph.export_v4') 
  75   
  76  default_compiler= {'fortran': 'gfortran', 
  77                         'f2py': 'f2py', 
  78                         'cpp':'g++'} 
79 80 81 -class VirtualExporter(object):
82 83 #exporter variable who modified the way madgraph interacts with this class 84 85 grouped_mode = 'madevent' 86 # This variable changes the type of object called within 'generate_subprocess_directory' 87 #functions. 88 # False to avoid grouping (only identical matrix element are merged) 89 # 'madevent' group the massless quark and massless lepton 90 # 'madweight' group the gluon with the massless quark 91 sa_symmetry = False 92 # If no grouped_mode=False, uu~ and u~u will be called independently. 93 #Putting sa_symmetry generates only one of the two matrix-element. 94 check = True 95 # Ask madgraph to check if the directory already exists and propose to the user to 96 #remove it first if this is the case 97 output = 'Template' 98 # [Template, None, dir] 99 # - Template, madgraph will call copy_template 100 # - dir, madgraph will just create an empty directory for initialisation 101 # - None, madgraph do nothing for initialisation 102 exporter = 'v4' 103 # language of the output 'v4' for Fortran output 104 # 'cpp' for C++ output 105 106
107 - def __init__(self, dir_path = "", opt=None):
108 # cmd_options is a dictionary with all the optional argurment passed at output time 109 110 # Activate some monkey patching for the helas call writer. 111 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 112 self.helas_call_writer_custom
113 114 115 # helper function for customise helas writter 116 @staticmethod
117 - def custom_helas_call(call, arg):
118 """static method to customise the way aloha function call are written 119 call is the default template for the call 120 arg are the dictionary used for the call 121 """ 122 return call, arg
123 124 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 125 126
127 - def copy_template(self, model):
128 return
129
130 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
131 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 132 return 0 # return an integer stating the number of call to helicity routine
133
134 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
135 return
136
137 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
138 return
139 140
141 - def pass_information_from_cmd(self, cmd):
142 """pass information from the command interface to the exporter. 143 Please do not modify any object of the interface from the exporter. 144 """ 145 return
146
147 - def modify_grouping(self, matrix_element):
148 return False, matrix_element
149
150 - def export_model_files(self, model_v4_path):
151 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 152 return
153
154 - def export_helas(self, HELAS_PATH):
155 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 156 return
157
158 #=============================================================================== 159 # ProcessExporterFortran 160 #=============================================================================== 161 -class ProcessExporterFortran(VirtualExporter):
162 """Class to take care of exporting a set of matrix elements to 163 Fortran (v4) format.""" 164 165 default_opt = {'clean': False, 'complex_mass':False, 166 'export_format':'madevent', 'mp': False, 167 'v5_model': True, 168 'output_options':{} 169 } 170 grouped_mode = False 171 jamp_optim = False 172
173 - def __init__(self, dir_path = "", opt=None):
174 """Initiate the ProcessExporterFortran with directory information""" 175 self.mgme_dir = MG5DIR 176 self.dir_path = dir_path 177 self.model = None 178 179 self.opt = dict(self.default_opt) 180 if opt: 181 self.opt.update(opt) 182 self.cmd_options = self.opt['output_options'] 183 184 #place holder to pass information to the run_interface 185 self.proc_characteristic = banner_mod.ProcCharacteristic() 186 # call mother class 187 super(ProcessExporterFortran,self).__init__(dir_path, opt)
188 189 190 #=========================================================================== 191 # process exporter fortran switch between group and not grouped 192 #===========================================================================
193 - def export_processes(self, matrix_elements, fortran_model):
194 """Make the switch between grouped and not grouped output""" 195 196 calls = 0 197 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 198 for (group_number, me_group) in enumerate(matrix_elements): 199 calls = calls + self.generate_subprocess_directory(\ 200 me_group, fortran_model, group_number) 201 else: 202 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 203 calls = calls + self.generate_subprocess_directory(\ 204 me, fortran_model, me_number) 205 206 return calls
207 208 209 #=========================================================================== 210 # create the run_card 211 #===========================================================================
212 - def create_run_card(self, matrix_elements, history):
213 """ """ 214 215 216 # bypass this for the loop-check 217 import madgraph.loop.loop_helas_objects as loop_helas_objects 218 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 219 matrix_elements = None 220 221 run_card = banner_mod.RunCard() 222 223 224 default=True 225 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 226 processes = [me.get('processes') for megroup in matrix_elements 227 for me in megroup['matrix_elements']] 228 elif matrix_elements: 229 processes = [me.get('processes') 230 for me in matrix_elements['matrix_elements']] 231 else: 232 default =False 233 234 if default: 235 run_card.create_default_for_process(self.proc_characteristic, 236 history, 237 processes) 238 239 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 240 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 241 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
242 243 244 245 #=========================================================================== 246 # copy the Template in a new directory. 247 #===========================================================================
248 - def copy_template(self, model):
249 """create the directory run_name as a copy of the MadEvent 250 Template, and clean the directory 251 """ 252 253 #First copy the full template tree if dir_path doesn't exit 254 if not os.path.isdir(self.dir_path): 255 assert self.mgme_dir, \ 256 "No valid MG_ME path given for MG4 run directory creation." 257 logger.info('initialize a new directory: %s' % \ 258 os.path.basename(self.dir_path)) 259 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 260 self.dir_path, True) 261 # misc.copytree since dir_path already exists 262 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 263 self.dir_path) 264 # copy plot_card 265 for card in ['plot_card']: 266 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 267 try: 268 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 269 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 270 except IOError: 271 logger.warning("Failed to copy " + card + ".dat to default") 272 elif os.getcwd() == os.path.realpath(self.dir_path): 273 logger.info('working in local directory: %s' % \ 274 os.path.realpath(self.dir_path)) 275 # misc.copytree since dir_path already exists 276 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 277 self.dir_path) 278 # for name in misc.glob('Template/LO/*', self.mgme_dir): 279 # name = os.path.basename(name) 280 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 281 # if os.path.isfile(filename): 282 # files.cp(filename, pjoin(self.dir_path,name)) 283 # elif os.path.isdir(filename): 284 # misc.copytree(filename, pjoin(self.dir_path,name), True) 285 # misc.copytree since dir_path already exists 286 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 287 self.dir_path) 288 # Copy plot_card 289 for card in ['plot_card']: 290 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 291 try: 292 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 293 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 294 except IOError: 295 logger.warning("Failed to copy " + card + ".dat to default") 296 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 297 assert self.mgme_dir, \ 298 "No valid MG_ME path given for MG4 run directory creation." 299 try: 300 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 301 except IOError: 302 MG5_version = misc.get_pkg_info() 303 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 304 305 #Ensure that the Template is clean 306 if self.opt['clean']: 307 logger.info('remove old information in %s' % \ 308 os.path.basename(self.dir_path)) 309 if 'MADGRAPH_BASE' in os.environ: 310 misc.call([pjoin('bin', 'internal', 'clean_template'), 311 '--web'], cwd=self.dir_path) 312 else: 313 try: 314 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 315 cwd=self.dir_path) 316 except Exception as why: 317 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 318 % (os.path.basename(self.dir_path),why)) 319 320 #Write version info 321 MG_version = misc.get_pkg_info() 322 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 323 MG_version['version']) 324 325 # add the makefile in Source directory 326 filename = pjoin(self.dir_path,'Source','makefile') 327 self.write_source_makefile(writers.FileWriter(filename)) 328 329 # add the DiscreteSampler information 330 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 331 pjoin(self.dir_path, 'Source')) 332 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 333 pjoin(self.dir_path, 'Source')) 334 335 # We need to create the correct open_data for the pdf 336 self.write_pdf_opendata()
337 338 339 #=========================================================================== 340 # Call MadAnalysis5 to generate the default cards for this process 341 #===========================================================================
342 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 343 ma5_path, output_dir, levels = ['parton','hadron']):
344 """ Call MA5 so that it writes default cards for both parton and 345 post-shower levels, tailored for this particular process.""" 346 347 if len(levels)==0: 348 return 349 start = time.time() 350 logger.info('Generating MadAnalysis5 default cards tailored to this process') 351 try: 352 MA5_interpreter = common_run_interface.CommonRunCmd.\ 353 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 354 except (Exception, SystemExit) as e: 355 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 356 return 357 if MA5_interpreter is None: 358 return 359 360 MA5_main = MA5_interpreter.main 361 for lvl in ['parton','hadron']: 362 if lvl in levels: 363 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 364 try: 365 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 366 except (Exception, SystemExit) as e: 367 # keep the default card (skip only) 368 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 369 ' default analysis card for this process.') 370 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 371 error=StringIO() 372 traceback.print_exc(file=error) 373 logger.debug('MadAnalysis5 error was:') 374 logger.debug('-'*60) 375 logger.debug(error.getvalue()[:-1]) 376 logger.debug('-'*60) 377 else: 378 open(card_to_generate,'w').write(text) 379 stop = time.time() 380 if stop-start >1: 381 logger.info('Cards created in %.2fs' % (stop-start))
382 383 #=========================================================================== 384 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 385 #===========================================================================
386 - def write_procdef_mg5(self, file_pos, modelname, process_str):
387 """ write an equivalent of the MG4 proc_card in order that all the Madevent 388 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 389 390 proc_card_template = template_files.mg4_proc_card.mg4_template 391 process_template = template_files.mg4_proc_card.process_template 392 process_text = '' 393 coupling = '' 394 new_process_content = [] 395 396 397 # First find the coupling and suppress the coupling from process_str 398 #But first ensure that coupling are define whithout spaces: 399 process_str = process_str.replace(' =', '=') 400 process_str = process_str.replace('= ', '=') 401 process_str = process_str.replace(',',' , ') 402 #now loop on the element and treat all the coupling 403 for info in process_str.split(): 404 if '=' in info: 405 coupling += info + '\n' 406 else: 407 new_process_content.append(info) 408 # Recombine the process_str (which is the input process_str without coupling 409 #info) 410 process_str = ' '.join(new_process_content) 411 412 #format the SubProcess 413 replace_dict = {'process': process_str, 414 'coupling': coupling} 415 process_text += process_template.substitute(replace_dict) 416 417 replace_dict = {'process': process_text, 418 'model': modelname, 419 'multiparticle':''} 420 text = proc_card_template.substitute(replace_dict) 421 422 if file_pos: 423 ff = open(file_pos, 'w') 424 ff.write(text) 425 ff.close() 426 else: 427 return replace_dict
428 429
430 - def pass_information_from_cmd(self, cmd):
431 """Pass information for MA5""" 432 433 self.proc_defs = cmd._curr_proc_defs
434 435 #=========================================================================== 436 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 437 #===========================================================================
438 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
439 """Function to finalize v4 directory, for inheritance.""" 440 441 self.create_run_card(matrix_elements, history) 442 self.create_MA5_cards(matrix_elements, history)
443
444 - def create_MA5_cards(self,matrix_elements,history):
445 """ A wrapper around the creation of the MA5 cards so that it can be 446 bypassed by daughter classes (i.e. in standalone).""" 447 if 'madanalysis5_path' in self.opt and not \ 448 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 449 processes = None 450 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 451 processes = [me.get('processes') for megroup in matrix_elements 452 for me in megroup['matrix_elements']] 453 elif matrix_elements: 454 processes = [me.get('processes') 455 for me in matrix_elements['matrix_elements']] 456 457 self.create_default_madanalysis5_cards( 458 history, self.proc_defs, processes, 459 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 460 levels = ['hadron','parton']) 461 462 for level in ['hadron','parton']: 463 # Copying these cards turn on the use of MadAnalysis5 by default. 464 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 465 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 466 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
467 468 #=========================================================================== 469 # Create the proc_characteristic file passing information to the run_interface 470 #===========================================================================
471 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
472 473 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
474 475 #=========================================================================== 476 # write_matrix_element_v4 477 #===========================================================================
478 - def write_matrix_element_v4(self):
479 """Function to write a matrix.f file, for inheritance. 480 """ 481 pass
482 483 #=========================================================================== 484 # write_pdf_opendata 485 #===========================================================================
486 - def write_pdf_opendata(self):
487 """ modify the pdf opendata file, to allow direct access to cluster node 488 repository if configure""" 489 490 if not self.opt["cluster_local_path"]: 491 changer = {"pdf_systemwide": ""} 492 else: 493 to_add = """ 494 tempname='%(path)s'//Tablefile 495 open(IU,file=tempname,status='old',ERR=1) 496 return 497 1 tempname='%(path)s/Pdfdata/'//Tablefile 498 open(IU,file=tempname,status='old',ERR=2) 499 return 500 2 tempname='%(path)s/lhapdf'//Tablefile 501 open(IU,file=tempname,status='old',ERR=3) 502 return 503 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 504 open(IU,file=tempname,status='old',ERR=4) 505 return 506 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 507 open(IU,file=tempname,status='old',ERR=5) 508 return 509 """ % {"path" : self.opt["cluster_local_path"]} 510 511 changer = {"pdf_systemwide": to_add} 512 513 514 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 515 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 516 ff.writelines(template % changer) 517 518 # Do the same for lhapdf set 519 if not self.opt["cluster_local_path"]: 520 changer = {"cluster_specific_path": ""} 521 else: 522 to_add=""" 523 LHAPath='%(path)s/PDFsets' 524 Inquire(File=LHAPath, exist=exists) 525 if(exists)return 526 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 527 Inquire(File=LHAPath, exist=exists) 528 if(exists)return 529 LHAPath='%(path)s/../lhapdf/pdfsets/' 530 Inquire(File=LHAPath, exist=exists) 531 if(exists)return 532 LHAPath='./PDFsets' 533 """ % {"path" : self.opt["cluster_local_path"]} 534 changer = {"cluster_specific_path": to_add} 535 536 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 537 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 538 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 539 ff.writelines(template % changer) 540 541 542 return
543 544 545 546 #=========================================================================== 547 # write_maxparticles_file 548 #===========================================================================
549 - def write_maxparticles_file(self, writer, matrix_elements):
550 """Write the maxparticles.inc file for MadEvent""" 551 552 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 553 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 554 matrix_elements.get('matrix_elements')]) 555 else: 556 maxparticles = max([me.get_nexternal_ninitial()[0] \ 557 for me in matrix_elements]) 558 559 lines = "integer max_particles\n" 560 lines += "parameter(max_particles=%d)" % maxparticles 561 562 # Write the file 563 writer.writelines(lines) 564 565 return True
566 567 568 #=========================================================================== 569 # export the model 570 #===========================================================================
571 - def export_model_files(self, model_path):
572 """Configure the files/link of the process according to the model""" 573 574 # Import the model 575 for file in os.listdir(model_path): 576 if os.path.isfile(pjoin(model_path, file)): 577 shutil.copy2(pjoin(model_path, file), \ 578 pjoin(self.dir_path, 'Source', 'MODEL'))
579 580 594 602 603 604 #=========================================================================== 605 # export the helas routine 606 #===========================================================================
607 - def export_helas(self, helas_path):
608 """Configure the files/link of the process according to the model""" 609 610 # Import helas routine 611 for filename in os.listdir(helas_path): 612 filepos = pjoin(helas_path, filename) 613 if os.path.isfile(filepos): 614 if filepos.endswith('Makefile.template'): 615 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 616 elif filepos.endswith('Makefile'): 617 pass 618 else: 619 cp(filepos, self.dir_path + '/Source/DHELAS')
620 # following lines do the same but whithout symbolic link 621 # 622 #def export_helas(mgme_dir, dir_path): 623 # 624 # # Copy the HELAS directory 625 # helas_dir = pjoin(mgme_dir, 'HELAS') 626 # for filename in os.listdir(helas_dir): 627 # if os.path.isfile(pjoin(helas_dir, filename)): 628 # shutil.copy2(pjoin(helas_dir, filename), 629 # pjoin(dir_path, 'Source', 'DHELAS')) 630 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 631 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 632 # 633 634 #=========================================================================== 635 # generate_subprocess_directory 636 #===========================================================================
637 - def generate_subprocess_directory(self, matrix_element, 638 fortran_model, 639 me_number):
640 """Routine to generate a subprocess directory (for inheritance)""" 641 642 pass
643 644 #=========================================================================== 645 # get_source_libraries_list 646 #===========================================================================
647 - def get_source_libraries_list(self):
648 """ Returns the list of libraries to be compiling when compiling the 649 SOURCE directory. It is different for loop_induced processes and 650 also depends on the value of the 'output_dependencies' option""" 651 652 return ['$(LIBDIR)libdhelas.$(libext)', 653 '$(LIBDIR)libpdf.$(libext)', 654 '$(LIBDIR)libmodel.$(libext)', 655 '$(LIBDIR)libcernlib.$(libext)', 656 '$(LIBDIR)libbias.$(libext)']
657 658 #=========================================================================== 659 # write_source_makefile 660 #===========================================================================
661 - def write_source_makefile(self, writer):
662 """Write the nexternal.inc file for MG4""" 663 664 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 665 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 666 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 667 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 668 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 669 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 670 else: 671 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 672 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 673 674 replace_dict= {'libraries': set_of_lib, 675 'model':model_line, 676 'additional_dsample': '', 677 'additional_dependencies':''} 678 679 if writer: 680 text = open(path).read() % replace_dict 681 writer.write(text) 682 683 return replace_dict
684 685 #=========================================================================== 686 # write_nexternal_madspin 687 #===========================================================================
688 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
689 """Write the nexternal_prod.inc file for madspin""" 690 691 replace_dict = {} 692 693 replace_dict['nexternal'] = nexternal 694 replace_dict['ninitial'] = ninitial 695 696 file = """ \ 697 integer nexternal_prod 698 parameter (nexternal_prod=%(nexternal)d) 699 integer nincoming_prod 700 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 701 702 # Write the file 703 if writer: 704 writer.writelines(file) 705 return True 706 else: 707 return replace_dict
708 709 #=========================================================================== 710 # write_helamp_madspin 711 #===========================================================================
712 - def write_helamp_madspin(self, writer, ncomb):
713 """Write the helamp.inc file for madspin""" 714 715 replace_dict = {} 716 717 replace_dict['ncomb'] = ncomb 718 719 file = """ \ 720 integer ncomb1 721 parameter (ncomb1=%(ncomb)d) 722 double precision helamp(ncomb1) 723 common /to_helamp/helamp """ % replace_dict 724 725 # Write the file 726 if writer: 727 writer.writelines(file) 728 return True 729 else: 730 return replace_dict
731 732 733 734 #=========================================================================== 735 # write_nexternal_file 736 #===========================================================================
737 - def write_nexternal_file(self, writer, nexternal, ninitial):
738 """Write the nexternal.inc file for MG4""" 739 740 replace_dict = {} 741 742 replace_dict['nexternal'] = nexternal 743 replace_dict['ninitial'] = ninitial 744 745 file = """ \ 746 integer nexternal 747 parameter (nexternal=%(nexternal)d) 748 integer nincoming 749 parameter (nincoming=%(ninitial)d)""" % replace_dict 750 751 # Write the file 752 if writer: 753 writer.writelines(file) 754 return True 755 else: 756 return replace_dict
757 #=========================================================================== 758 # write_pmass_file 759 #===========================================================================
760 - def write_pmass_file(self, writer, matrix_element):
761 """Write the pmass.inc file for MG4""" 762 763 model = matrix_element.get('processes')[0].get('model') 764 765 lines = [] 766 for wf in matrix_element.get_external_wavefunctions(): 767 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 768 if mass.lower() != "zero": 769 mass = "abs(%s)" % mass 770 771 lines.append("pmass(%d)=%s" % \ 772 (wf.get('number_external'), mass)) 773 774 # Write the file 775 writer.writelines(lines) 776 777 return True
778 779 #=========================================================================== 780 # write_ngraphs_file 781 #===========================================================================
782 - def write_ngraphs_file(self, writer, nconfigs):
783 """Write the ngraphs.inc file for MG4. Needs input from 784 write_configs_file.""" 785 786 file = " integer n_max_cg\n" 787 file = file + "parameter (n_max_cg=%d)" % nconfigs 788 789 # Write the file 790 writer.writelines(file) 791 792 return True
793 794 #=========================================================================== 795 # write_leshouche_file 796 #===========================================================================
797 - def write_leshouche_file(self, writer, matrix_element):
798 """Write the leshouche.inc file for MG4""" 799 800 # Write the file 801 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 802 803 return True
804 805 #=========================================================================== 806 # get_leshouche_lines 807 #===========================================================================
808 - def get_leshouche_lines(self, matrix_element, numproc):
809 """Write the leshouche.inc file for MG4""" 810 811 # Extract number of external particles 812 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 813 814 lines = [] 815 for iproc, proc in enumerate(matrix_element.get('processes')): 816 legs = proc.get_legs_with_decays() 817 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 818 (iproc + 1, numproc+1, nexternal, 819 ",".join([str(l.get('id')) for l in legs]))) 820 if iproc == 0 and numproc == 0: 821 for i in [1, 2]: 822 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 823 (i, nexternal, 824 ",".join([ "%3r" % 0 ] * ninitial + \ 825 [ "%3r" % i ] * (nexternal - ninitial)))) 826 827 # Here goes the color connections corresponding to the JAMPs 828 # Only one output, for the first subproc! 829 if iproc == 0: 830 # If no color basis, just output trivial color flow 831 if not matrix_element.get('color_basis'): 832 for i in [1, 2]: 833 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 834 (i, numproc+1,nexternal, 835 ",".join([ "%3r" % 0 ] * nexternal))) 836 837 else: 838 # First build a color representation dictionnary 839 repr_dict = {} 840 for l in legs: 841 repr_dict[l.get('number')] = \ 842 proc.get('model').get_particle(l.get('id')).get_color()\ 843 * (-1)**(1+l.get('state')) 844 # Get the list of color flows 845 color_flow_list = \ 846 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 847 ninitial) 848 # And output them properly 849 for cf_i, color_flow_dict in enumerate(color_flow_list): 850 for i in [0, 1]: 851 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 852 (i + 1, cf_i + 1, numproc+1, nexternal, 853 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 854 for l in legs]))) 855 856 return lines
857 858 859 860 861 #=========================================================================== 862 # write_maxamps_file 863 #===========================================================================
864 - def write_maxamps_file(self, writer, maxamps, maxflows, 865 maxproc,maxsproc):
866 """Write the maxamps.inc file for MG4.""" 867 868 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 869 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 870 (maxamps, maxflows) 871 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 872 (maxproc, maxsproc) 873 874 # Write the file 875 writer.writelines(file) 876 877 return True
878 879 880 #=========================================================================== 881 # Routines to output UFO models in MG4 format 882 #=========================================================================== 883
884 - def convert_model(self, model, wanted_lorentz = [], 885 wanted_couplings = []):
886 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 887 888 # Make sure aloha is in quadruple precision if needed 889 old_aloha_mp=aloha.mp_precision 890 aloha.mp_precision=self.opt['mp'] 891 self.model = model 892 # create the MODEL 893 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 894 self.opt['exporter'] = self.__class__ 895 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 896 model_builder.build(wanted_couplings) 897 898 # Backup the loop mode, because it can be changed in what follows. 899 old_loop_mode = aloha.loop_mode 900 901 # Create the aloha model or use the existing one (for loop exporters 902 # this is useful as the aloha model will be used again in the 903 # LoopHelasMatrixElements generated). We do not save the model generated 904 # here if it didn't exist already because it would be a waste of 905 # memory for tree level applications since aloha is only needed at the 906 # time of creating the aloha fortran subroutines. 907 if hasattr(self, 'aloha_model'): 908 aloha_model = self.aloha_model 909 else: 910 try: 911 with misc.MuteLogger(['madgraph.models'], [60]): 912 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 913 except (ImportError, UFOError): 914 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 915 aloha_model.add_Lorentz_object(model.get('lorentz')) 916 917 # Compute the subroutines 918 if wanted_lorentz: 919 aloha_model.compute_subset(wanted_lorentz) 920 else: 921 aloha_model.compute_all(save=False) 922 923 # Write them out 924 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 925 aloha_model.write(write_dir, 'Fortran') 926 927 # Revert the original aloha loop mode 928 aloha.loop_mode = old_loop_mode 929 930 #copy Helas Template 931 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 932 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 933 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 934 write_dir+'/aloha_functions.f') 935 aloha_model.loop_mode = False 936 else: 937 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 938 write_dir+'/aloha_functions.f') 939 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 940 941 # Make final link in the Process 942 self.make_model_symbolic_link() 943 944 # Re-establish original aloha mode 945 aloha.mp_precision=old_aloha_mp
946 947 948 #=========================================================================== 949 # Helper functions 950 #===========================================================================
951 - def modify_grouping(self, matrix_element):
952 """allow to modify the grouping (if grouping is in place) 953 return two value: 954 - True/False if the matrix_element was modified 955 - the new(or old) matrix element""" 956 957 return False, matrix_element
958 959 #=========================================================================== 960 # Helper functions 961 #===========================================================================
962 - def get_mg5_info_lines(self):
963 """Return info lines for MG5, suitable to place at beginning of 964 Fortran files""" 965 966 info = misc.get_pkg_info() 967 info_lines = "" 968 if info and 'version' in info and 'date' in info: 969 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 970 (info['version'], info['date']) 971 info_lines = info_lines + \ 972 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 973 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 974 else: 975 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 976 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 977 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 978 979 return info_lines
980
981 - def get_process_info_lines(self, matrix_element):
982 """Return info lines describing the processes for this matrix element""" 983 984 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 985 for process in matrix_element.get('processes')])
986 987
988 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
989 """Return the Helicity matrix definition lines for this matrix element""" 990 991 helicity_line_list = [] 992 i = 0 993 for helicities in matrix_element.get_helicity_matrix(): 994 i = i + 1 995 int_list = [i, len(helicities)] 996 int_list.extend(helicities) 997 helicity_line_list.append(\ 998 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 999 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 1000 1001 return "\n".join(helicity_line_list)
1002
1003 - def get_ic_line(self, matrix_element):
1004 """Return the IC definition line coming after helicities, required by 1005 switchmom in madevent""" 1006 1007 nexternal = matrix_element.get_nexternal_ninitial()[0] 1008 int_list = list(range(1, nexternal + 1)) 1009 1010 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1011 ",".join([str(i) for \ 1012 i in int_list]))
1013
1014 - def set_chosen_SO_index(self, process, squared_orders):
1015 """ From the squared order constraints set by the user, this function 1016 finds what indices of the squared_orders list the user intends to pick. 1017 It returns this as a string of comma-separated successive '.true.' or 1018 '.false.' for each index.""" 1019 1020 user_squared_orders = process.get('squared_orders') 1021 split_orders = process.get('split_orders') 1022 1023 if len(user_squared_orders)==0: 1024 return ','.join(['.true.']*len(squared_orders)) 1025 1026 res = [] 1027 for sqsos in squared_orders: 1028 is_a_match = True 1029 for user_sqso, value in user_squared_orders.items(): 1030 if (process.get_squared_order_type(user_sqso) =='==' and \ 1031 value!=sqsos[split_orders.index(user_sqso)]) or \ 1032 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1033 value<sqsos[split_orders.index(user_sqso)]) or \ 1034 (process.get_squared_order_type(user_sqso) == '>' and \ 1035 value>=sqsos[split_orders.index(user_sqso)]): 1036 is_a_match = False 1037 break 1038 res.append('.true.' if is_a_match else '.false.') 1039 1040 return ','.join(res)
1041
1042 - def get_split_orders_lines(self, orders, array_name, n=5):
1043 """ Return the split orders definition as defined in the list orders and 1044 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1045 1046 ret_list = [] 1047 for index, order in enumerate(orders): 1048 for k in range(0, len(order), n): 1049 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1050 (array_name,index + 1, k + 1, min(k + n, len(order)), 1051 ','.join(["%5r" % i for i in order[k:k + n]]))) 1052 return ret_list
1053
1054 - def format_integer_list(self, list, name, n=5):
1055 """ Return an initialization of the python list in argument following 1056 the fortran syntax using the data keyword assignment, filling an array 1057 of name 'name'. It splits rows in chunks of size n.""" 1058 1059 ret_list = [] 1060 for k in range(0, len(list), n): 1061 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1062 (name, k + 1, min(k + n, len(list)), 1063 ','.join(["%5r" % i for i in list[k:k + n]]))) 1064 return ret_list
1065
1066 - def get_color_data_lines(self, matrix_element, n=6):
1067 """Return the color matrix definition lines for this matrix element. Split 1068 rows in chunks of size n.""" 1069 1070 if not matrix_element.get('color_matrix'): 1071 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1072 else: 1073 ret_list = [] 1074 my_cs = color.ColorString() 1075 for index, denominator in \ 1076 enumerate(matrix_element.get('color_matrix').\ 1077 get_line_denominators()): 1078 # First write the common denominator for this color matrix line 1079 #ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1080 # Then write the numerators for the matrix elements 1081 num_list = matrix_element.get('color_matrix').\ 1082 get_line_numerators(index, denominator) 1083 1084 assert all([int(i)==i for i in num_list]) 1085 1086 for k in range(0, len(num_list), n): 1087 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1088 (index + 1, k + 1, min(k + n, len(num_list)), 1089 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 1090 1091 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1092 ret_list.append("C %s" % repr(my_cs)) 1093 return ret_list
1094 1095
1096 - def get_den_factor_line(self, matrix_element):
1097 """Return the denominator factor line for this matrix element""" 1098 1099 return "DATA IDEN/%2r/" % \ 1100 matrix_element.get_denominator_factor()
1101
1102 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1103 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1104 which configs (diagrams).""" 1105 1106 ret_list = [] 1107 1108 booldict = {False: ".false.", True: ".true."} 1109 1110 if not matrix_element.get('color_basis'): 1111 # No color, so only one color factor. Simply write a ".true." 1112 # for each config (i.e., each diagram with only 3 particle 1113 # vertices 1114 configs = len(mapconfigs) 1115 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1116 (num_matrix_element, configs, 1117 ','.join([".true." for i in range(configs)]))) 1118 return ret_list 1119 1120 1121 # There is a color basis - create a list showing which JAMPs have 1122 # contributions to which configs 1123 1124 # Only want to include leading color flows, so find max_Nc 1125 color_basis = matrix_element.get('color_basis') 1126 1127 # We don't want to include the power of Nc's which come from the potential 1128 # loop color trace (i.e. in the case of a closed fermion loop for example) 1129 # so we subtract it here when computing max_Nc 1130 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1131 color_basis.values()],[])) 1132 1133 # Crate dictionary between diagram number and JAMP number 1134 diag_jamp = {} 1135 for ijamp, col_basis_elem in \ 1136 enumerate(sorted(matrix_element.get('color_basis').keys())): 1137 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1138 # Only use color flows with Nc == max_Nc. However, notice that 1139 # we don't want to include the Nc power coming from the loop 1140 # in this counting. 1141 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1142 diag_num = diag_tuple[0] + 1 1143 # Add this JAMP number to this diag_num 1144 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1145 [ijamp+1] 1146 else: 1147 self.proc_characteristic['single_color'] = False 1148 1149 colamps = ijamp + 1 1150 for iconfig, num_diag in enumerate(mapconfigs): 1151 if num_diag == 0: 1152 continue 1153 1154 # List of True or False 1155 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1156 # Add line 1157 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1158 (iconfig+1, num_matrix_element, colamps, 1159 ','.join(["%s" % booldict[b] for b in \ 1160 bool_list]))) 1161 1162 return ret_list
1163
1164 - def get_amp2_lines(self, matrix_element, config_map = [], replace_dict=None):
1165 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1166 1167 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1168 # Get minimum legs in a vertex 1169 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1170 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1171 minvert = min(vert_list) if vert_list!=[] else 0 1172 1173 ret_lines = [] 1174 if config_map: 1175 # In this case, we need to sum up all amplitudes that have 1176 # identical topologies, as given by the config_map (which 1177 # gives the topology/config for each of the diagrams 1178 diagrams = matrix_element.get('diagrams') 1179 # Combine the diagrams with identical topologies 1180 config_to_diag_dict = {} 1181 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1182 if config_map[idiag] == 0: 1183 continue 1184 try: 1185 config_to_diag_dict[config_map[idiag]].append(idiag) 1186 except KeyError: 1187 config_to_diag_dict[config_map[idiag]] = [idiag] 1188 # Write out the AMP2s summing squares of amplitudes belonging 1189 # to eiher the same diagram or different diagrams with 1190 # identical propagator properties. Note that we need to use 1191 # AMP2 number corresponding to the first diagram number used 1192 # for that AMP2. 1193 for config in sorted(config_to_diag_dict.keys()): 1194 1195 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1196 {"num": (config_to_diag_dict[config][0] + 1)} 1197 1198 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1199 sum([diagrams[idiag].get('amplitudes') for \ 1200 idiag in config_to_diag_dict[config]], [])]) 1201 1202 # Not using \sum |M|^2 anymore since this creates troubles 1203 # when ckm is not diagonal due to the JIM mechanism. 1204 if '+' in amp: 1205 amp = "(%s)*dconjg(%s)" % (amp, amp) 1206 else: 1207 amp = "%s*dconjg(%s)" % (amp, amp) 1208 1209 line = line + "%s" % (amp) 1210 #line += " * get_channel_cut(p, %s) " % (config) 1211 ret_lines.append(line) 1212 else: 1213 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1214 # Ignore any diagrams with 4-particle vertices. 1215 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1216 continue 1217 # Now write out the expression for AMP2, meaning the sum of 1218 # squared amplitudes belonging to the same diagram 1219 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1220 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1221 {"num": a.get('number')} for a in \ 1222 diag.get('amplitudes')]) 1223 ret_lines.append(line) 1224 1225 return ret_lines
1226 1227 #=========================================================================== 1228 # Returns the data statements initializing the coeffictients for the JAMP 1229 # decomposition. It is used when the JAMP initialization is decided to be 1230 # done through big arrays containing the projection coefficients. 1231 #===========================================================================
1232 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1233 n=50, Nc_value=3):
1234 """This functions return the lines defining the DATA statement setting 1235 the coefficients building the JAMPS out of the AMPS. Split rows in 1236 bunches of size n. 1237 One can specify the color_basis from which the color amplitudes originates 1238 so that there are commentaries telling what color structure each JAMP 1239 corresponds to.""" 1240 1241 if(not isinstance(color_amplitudes,list) or 1242 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1243 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1244 1245 res_list = [] 1246 my_cs = color.ColorString() 1247 for index, coeff_list in enumerate(color_amplitudes): 1248 # Create the list of the complete numerical coefficient. 1249 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1250 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1251 coefficient in coeff_list] 1252 # Create the list of the numbers of the contributing amplitudes. 1253 # Mutliply by -1 for those which have an imaginary coefficient. 1254 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1255 for coefficient in coeff_list] 1256 # Find the common denominator. 1257 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1258 num_list=[(coefficient*commondenom).numerator \ 1259 for coefficient in coefs_list] 1260 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1261 index+1,len(num_list))) 1262 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1263 index+1,commondenom)) 1264 if color_basis: 1265 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1266 res_list.append("C %s" % repr(my_cs)) 1267 for k in range(0, len(num_list), n): 1268 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1269 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1270 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1271 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1272 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1273 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1274 pass 1275 return res_list
1276 1277
1278 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1279 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1280 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1281 defined as a matrix element or directly as a color_amplitudes dictionary. 1282 The split_order_amps specifies the group of amplitudes sharing the same 1283 amplitude orders which should be put in together in a given set of JAMPS. 1284 The split_order_amps is supposed to have the format of the second output 1285 of the function get_split_orders_mapping function in helas_objects.py. 1286 The split_order_names is optional (it should correspond to the process 1287 'split_orders' attribute) and only present to provide comments in the 1288 JAMP definitions in the code.""" 1289 1290 # Let the user call get_JAMP_lines_split_order directly from a 1291 error_msg="Malformed '%s' argument passed to the "+\ 1292 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1293 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1294 color_amplitudes=col_amps.get_color_amplitudes() 1295 elif(isinstance(col_amps,list)): 1296 if(col_amps and isinstance(col_amps[0],list)): 1297 color_amplitudes=col_amps 1298 else: 1299 raise MadGraph5Error(error_msg%'col_amps') 1300 else: 1301 raise MadGraph5Error(error_msg%'col_amps') 1302 1303 # Verify the sanity of the split_order_amps and split_order_names args 1304 if isinstance(split_order_amps,list): 1305 for elem in split_order_amps: 1306 if len(elem)!=2: 1307 raise MadGraph5Error(error_msg%'split_order_amps') 1308 # Check the first element of the two lists to make sure they are 1309 # integers, although in principle they should all be integers. 1310 if not isinstance(elem[0],tuple) or \ 1311 not isinstance(elem[1],tuple) or \ 1312 not isinstance(elem[0][0],int) or \ 1313 not isinstance(elem[1][0],int): 1314 raise MadGraph5Error(error_msg%'split_order_amps') 1315 else: 1316 raise MadGraph5Error(error_msg%'split_order_amps') 1317 1318 if not split_order_names is None: 1319 if isinstance(split_order_names,list): 1320 # Should specify the same number of names as there are elements 1321 # in the key of the split_order_amps. 1322 if len(split_order_names)!=len(split_order_amps[0][0]): 1323 raise MadGraph5Error(error_msg%'split_order_names') 1324 # Check the first element of the list to be a string 1325 if not isinstance(split_order_names[0],str): 1326 raise MadGraph5Error(error_msg%'split_order_names') 1327 else: 1328 raise MadGraph5Error(error_msg%'split_order_names') 1329 1330 # Now scan all contributing orders to be individually computed and 1331 # construct the list of color_amplitudes for JAMP to be constructed 1332 # accordingly. 1333 res_list=[] 1334 max_tmp = 0 1335 for i, amp_order in enumerate(split_order_amps): 1336 col_amps_order = [] 1337 for jamp in color_amplitudes: 1338 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1339 if split_order_names: 1340 res_list.append('C JAMPs contributing to orders '+' '.join( 1341 ['%s=%i'%order for order in zip(split_order_names, 1342 amp_order[0])])) 1343 if self.opt['export_format'] in ['madloop_matchbox']: 1344 res_list.extend(self.get_JAMP_lines(col_amps_order, 1345 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1346 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))[0]) 1347 else: 1348 toadd, nb_tmp = self.get_JAMP_lines(col_amps_order, 1349 JAMP_format="JAMP(%s,{0})".format(str(i+1))) 1350 res_list.extend(toadd) 1351 max_tmp = max(max_tmp, nb_tmp) 1352 1353 return res_list, max_tmp
1354 1355
1356 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1357 split=-1):
1358 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1359 defined as a matrix element or directly as a color_amplitudes dictionary, 1360 Jamp_formatLC should be define to allow to add LeadingColor computation 1361 (usefull for MatchBox) 1362 The split argument defines how the JAMP lines should be split in order 1363 not to be too long.""" 1364 1365 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1366 # the color amplitudes lists. 1367 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1368 color_amplitudes=col_amps.get_color_amplitudes() 1369 elif(isinstance(col_amps,list)): 1370 if(col_amps and isinstance(col_amps[0],list)): 1371 color_amplitudes=col_amps 1372 else: 1373 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1374 else: 1375 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1376 1377 all_element = {} 1378 res_list = [] 1379 for i, coeff_list in enumerate(color_amplitudes): 1380 # It might happen that coeff_list is empty if this function was 1381 # called from get_JAMP_lines_split_order (i.e. if some color flow 1382 # does not contribute at all for a given order). 1383 # In this case we simply set it to 0. 1384 if coeff_list==[]: 1385 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1386 continue 1387 # Break the JAMP definition into 'n=split' pieces to avoid having 1388 # arbitrarly long lines. 1389 first=True 1390 n = (len(coeff_list)+1 if split<=0 else split) 1391 while coeff_list!=[]: 1392 coefs=coeff_list[:n] 1393 coeff_list=coeff_list[n:] 1394 res = ((JAMP_format+"=") % str(i + 1)) + \ 1395 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1396 1397 first=False 1398 # Optimization: if all contributions to that color basis element have 1399 # the same coefficient (up to a sign), put it in front 1400 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1401 common_factor = False 1402 diff_fracs = list(set(list_fracs)) 1403 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1404 common_factor = True 1405 global_factor = diff_fracs[0] 1406 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1407 1408 # loop for JAMP 1409 for (coefficient, amp_number) in coefs: 1410 if not coefficient: 1411 continue 1412 value = (1j if coefficient[2] else 1)* coefficient[0] * coefficient[1] * fractions.Fraction(3)**coefficient[3] 1413 if (i+1, amp_number) not in all_element: 1414 all_element[(i+1, amp_number)] = value 1415 else: 1416 all_element[(i+1, amp_number)] += value 1417 if common_factor: 1418 res = (res + "%s" + AMP_format) % \ 1419 (self.coeff(coefficient[0], 1420 coefficient[1] / abs(coefficient[1]), 1421 coefficient[2], 1422 coefficient[3]), 1423 str(amp_number)) 1424 else: 1425 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1426 coefficient[1], 1427 coefficient[2], 1428 coefficient[3]), 1429 str(amp_number)) 1430 1431 if common_factor: 1432 res = res + ')' 1433 res_list.append(res) 1434 1435 if 'jamp_optim' in self.cmd_options: 1436 jamp_optim = banner_mod.ConfigFile.format_variable(self.cmd_options['jamp_optim'], bool, 'jamp_optim') 1437 else: 1438 # class default 1439 jamp_optim = self.jamp_optim 1440 1441 if not jamp_optim: 1442 return res_list, 0 1443 else: 1444 saved = list(res_list) 1445 1446 if len(all_element) > 1000: 1447 logger.info("Computing Color-Flow optimization [%s term]", len(all_element)) 1448 start_time = time.time() 1449 else: 1450 start_time = 0 1451 1452 res_list = [] 1453 #misc.sprint(len(all_element)) 1454 1455 self.myjamp_count = 0 1456 for key in all_element: 1457 all_element[key] = complex(all_element[key]) 1458 new_mat, defs = self.optimise_jamp(all_element) 1459 if start_time: 1460 logger.info("Color-Flow passed to %s term in %ss. Introduce %i contraction", len(new_mat), int(time.time()-start_time), len(defs)) 1461 1462 1463 #misc.sprint("number of iteration", self.myjamp_count) 1464 def format(frac): 1465 if isinstance(frac, Fraction): 1466 if frac.denominator == 1: 1467 return str(frac.numerator) 1468 else: 1469 return "%id0/%id0" % (frac.numerator, frac.denominator) 1470 elif frac.real == frac: 1471 #misc.sprint(frac.real, frac) 1472 return ('%.15e' % frac.real).replace('e','d') 1473 #str(float(frac.real)).replace('e','d') 1474 else: 1475 return ('(%.15e,%.15e)' % (frac.real, frac.imag)).replace('e','d')
1476 #str(frac).replace('e','d').replace('j','*imag1') 1477 1478 1479 1480 for i, amp1, amp2, frac, nb in defs: 1481 if amp1 > 0: 1482 amp1 = AMP_format % amp1 1483 else: 1484 amp1 = "TMP_JAMP(%d)" % -amp1 1485 if amp2 > 0: 1486 amp2 = AMP_format % amp2 1487 else: 1488 amp2 = "TMP_JAMP(%d)" % -amp2 1489 1490 if frac not in [1., -1]: 1491 res_list.append(' TMP_JAMP(%d) = %s + (%s) * %s ! used %d times' % (i,amp1, format(frac), amp2, nb)) 1492 elif frac == 1.: 1493 res_list.append(' TMP_JAMP(%d) = %s + %s ! used %d times' % (i,amp1, amp2, nb)) 1494 else: 1495 res_list.append(' TMP_JAMP(%d) = %s - %s ! used %d times' % (i,amp1, amp2, nb)) 1496 1497 1498 # misc.sprint(new_mat) 1499 jamp_res = collections.defaultdict(list) 1500 max_jamp=0 1501 for (jamp, var), factor in new_mat.items(): 1502 if var > 0: 1503 name = AMP_format % var 1504 else: 1505 name = "TMP_JAMP(%d)" % -var 1506 if factor not in [1.]: 1507 jamp_res[jamp].append("(%s)*%s" % (format(factor), name)) 1508 elif factor ==1: 1509 jamp_res[jamp].append("%s" % (name)) 1510 max_jamp = max(max_jamp, jamp) 1511 1512 1513 for i in range(1,max_jamp+1): 1514 name = JAMP_format % i 1515 if not jamp_res[i]: 1516 res_list.append(" %s = 0d0" %(name)) 1517 else: 1518 res_list.append(" %s = %s" %(name, '+'.join(jamp_res[i]))) 1519 1520 return res_list, len(defs)
1521
1522 - def optimise_jamp(self, all_element, nb_line=0, nb_col=0, added=0):
1523 """ optimise problem of type Y = A X 1524 A is a matrix (all_element) 1525 X is the fortran name of the input. 1526 The code iteratively add sub-expression jtemp[sub_add] 1527 and recall itself (this is add to the X size) 1528 """ 1529 self.myjamp_count +=1 1530 1531 if not nb_line: 1532 for i,j in all_element: 1533 if i+1 > nb_line: 1534 nb_line = i+1 1535 if j+1> nb_col: 1536 nb_col = j+1 1537 1538 max_count = 0 1539 all_index = [] 1540 operation = collections.defaultdict(lambda: collections.defaultdict(int)) 1541 for i in range(nb_line): 1542 for j1 in range(-added, nb_col): 1543 v1 = all_element.get((i,j1), 0) 1544 if not v1: 1545 continue 1546 for j2 in range(j1+1, nb_col): 1547 R = all_element.get((i,j2), 0)/v1 1548 if not R: 1549 continue 1550 1551 operation[(j1,j2)][R] +=1 1552 if operation[(j1,j2)][R] > max_count: 1553 max_count = operation[(j1,j2)][R] 1554 all_index = [(j1,j2, R)] 1555 elif operation[(j1,j2)][R] == max_count: 1556 all_index.append((j1,j2, R)) 1557 if max_count <= 1: 1558 return all_element, [] 1559 #added += 1 1560 #misc.sprint(max_count, len(all_index)) 1561 #misc.sprint(operation) 1562 to_add = [] 1563 for index in all_index: 1564 j1,j2,R = index 1565 first = True 1566 for i in range(nb_line): 1567 v1 = all_element.get((i,j1), 0) 1568 v2 = all_element.get((i,j2), 0) 1569 if not v1 or not v2: 1570 continue 1571 if v2/v1 == R: 1572 if first: 1573 first = False 1574 added +=1 1575 to_add.append((added,j1,j2,R, max_count)) 1576 1577 all_element[(i,-added)] = v1 1578 del all_element[(i,j1)] #= 0 1579 del all_element[(i,j2)] #= 0 1580 1581 logger.log(5,"Define %d new shortcut reused %d times", len(to_add), max_count) 1582 new_element, new_def = self.optimise_jamp(all_element, nb_line=nb_line, nb_col=nb_col, added=added) 1583 for one_def in to_add: 1584 new_def.insert(0, one_def) 1585 return new_element, new_def
1586 1587 1588 1589 1590
1591 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1592 """Generate the PDF lines for the auto_dsig.f file""" 1593 1594 processes = matrix_element.get('processes') 1595 model = processes[0].get('model') 1596 1597 pdf_definition_lines = "" 1598 pdf_data_lines = "" 1599 pdf_lines = "" 1600 1601 if ninitial == 1: 1602 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1603 for i, proc in enumerate(processes): 1604 process_line = proc.base_string() 1605 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1606 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1607 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1608 else: 1609 # Pick out all initial state particles for the two beams 1610 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1611 p in processes]))), 1612 sorted(list(set([p.get_initial_pdg(2) for \ 1613 p in processes])))] 1614 1615 # Prepare all variable names 1616 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1617 sum(initial_states,[])]) 1618 for key,val in pdf_codes.items(): 1619 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1620 1621 # Set conversion from PDG code to number used in PDF calls 1622 pdgtopdf = {21: 0, 22: 7} 1623 1624 # Fill in missing entries of pdgtopdf 1625 for pdg in sum(initial_states,[]): 1626 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1627 pdgtopdf[pdg] = pdg 1628 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1629 # If any particle has pdg code 7, we need to use something else 1630 pdgtopdf[pdg] = 6000000 + pdg 1631 1632 # Get PDF variable declarations for all initial states 1633 for i in [0,1]: 1634 pdf_definition_lines += "DOUBLE PRECISION " + \ 1635 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1636 for pdg in \ 1637 initial_states[i]]) + \ 1638 "\n" 1639 1640 # Get PDF data lines for all initial states 1641 for i in [0,1]: 1642 pdf_data_lines += "DATA " + \ 1643 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1644 for pdg in initial_states[i]]) + \ 1645 "/%d*1D0/" % len(initial_states[i]) + \ 1646 "\n" 1647 1648 # Get PDF lines for all different initial states 1649 for i, init_states in enumerate(initial_states): 1650 if subproc_group: 1651 pdf_lines = pdf_lines + \ 1652 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1653 % (i + 1, i + 1) 1654 else: 1655 pdf_lines = pdf_lines + \ 1656 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1657 % (i + 1, i + 1) 1658 1659 for nbi,initial_state in enumerate(init_states): 1660 if initial_state in list(pdf_codes.keys()): 1661 if subproc_group: 1662 pdf_lines = pdf_lines + \ 1663 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1664 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1665 (pdf_codes[initial_state], 1666 i + 1, i + 1, pdgtopdf[initial_state], 1667 i + 1, i + 1) 1668 else: 1669 pdf_lines = pdf_lines + \ 1670 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1671 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1672 (pdf_codes[initial_state], 1673 i + 1, i + 1, pdgtopdf[initial_state], 1674 i + 1, 1675 i + 1, i + 1) 1676 pdf_lines = pdf_lines + "ENDIF\n" 1677 1678 # Add up PDFs for the different initial state particles 1679 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1680 for proc in processes: 1681 process_line = proc.base_string() 1682 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1683 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1684 for ibeam in [1, 2]: 1685 initial_state = proc.get_initial_pdg(ibeam) 1686 if initial_state in list(pdf_codes.keys()): 1687 pdf_lines = pdf_lines + "%s%d*" % \ 1688 (pdf_codes[initial_state], ibeam) 1689 else: 1690 pdf_lines = pdf_lines + "1d0*" 1691 # Remove last "*" from pdf_lines 1692 pdf_lines = pdf_lines[:-1] + "\n" 1693 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1694 1695 # Remove last line break from the return variables 1696 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1697 1698 #=========================================================================== 1699 # write_props_file 1700 #===========================================================================
1701 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1702 """Write the props.inc file for MadEvent. Needs input from 1703 write_configs_file.""" 1704 1705 lines = [] 1706 1707 particle_dict = matrix_element.get('processes')[0].get('model').\ 1708 get('particle_dict') 1709 1710 for iconf, configs in enumerate(s_and_t_channels): 1711 for vertex in configs[0] + configs[1][:-1]: 1712 leg = vertex.get('legs')[-1] 1713 if leg.get('id') not in particle_dict: 1714 # Fake propagator used in multiparticle vertices 1715 mass = 'zero' 1716 width = 'zero' 1717 pow_part = 0 1718 else: 1719 particle = particle_dict[leg.get('id')] 1720 # Get mass 1721 if particle.get('mass').lower() == 'zero': 1722 mass = particle.get('mass') 1723 else: 1724 mass = "abs(%s)" % particle.get('mass') 1725 # Get width 1726 if particle.get('width').lower() == 'zero': 1727 width = particle.get('width') 1728 else: 1729 width = "abs(%s)" % particle.get('width') 1730 1731 pow_part = 1 + int(particle.is_boson()) 1732 1733 lines.append("prmass(%d,%d) = %s" % \ 1734 (leg.get('number'), iconf + 1, mass)) 1735 lines.append("prwidth(%d,%d) = %s" % \ 1736 (leg.get('number'), iconf + 1, width)) 1737 lines.append("pow(%d,%d) = %d" % \ 1738 (leg.get('number'), iconf + 1, pow_part)) 1739 1740 # Write the file 1741 writer.writelines(lines) 1742 1743 return True
1744 1745 #=========================================================================== 1746 # write_configs_file 1747 #===========================================================================
1748 - def write_configs_file(self, writer, matrix_element):
1749 """Write the configs.inc file for MadEvent""" 1750 1751 # Extract number of external particles 1752 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1753 1754 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1755 mapconfigs = [c[0] for c in configs] 1756 model = matrix_element.get('processes')[0].get('model') 1757 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1758 [[c[1]] for c in configs], 1759 mapconfigs, 1760 nexternal, ninitial, 1761 model)
1762 1763 #=========================================================================== 1764 # write_configs_file_from_diagrams 1765 #===========================================================================
1766 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1767 nexternal, ninitial, model):
1768 """Write the actual configs.inc file. 1769 1770 configs is the diagrams corresponding to configs (each 1771 diagrams is a list of corresponding diagrams for all 1772 subprocesses, with None if there is no corresponding diagrams 1773 for a given process). 1774 mapconfigs gives the diagram number for each config. 1775 1776 For s-channels, we need to output one PDG for each subprocess in 1777 the subprocess group, in order to be able to pick the right 1778 one for multiprocesses.""" 1779 1780 lines = [] 1781 1782 s_and_t_channels = [] 1783 1784 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1785 for config in configs if [d for d in config if d][0].\ 1786 get_vertex_leg_numbers()!=[]] 1787 minvert = min(vert_list) if vert_list!=[] else 0 1788 1789 # Number of subprocesses 1790 nsubprocs = len(configs[0]) 1791 1792 nconfigs = 0 1793 1794 new_pdg = model.get_first_non_pdg() 1795 1796 for iconfig, helas_diags in enumerate(configs): 1797 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1798 [0].get_vertex_leg_numbers()) : 1799 # Only 3-vertices allowed in configs.inc except for vertices 1800 # which originate from a shrunk loop. 1801 continue 1802 nconfigs += 1 1803 1804 # Need s- and t-channels for all subprocesses, including 1805 # those that don't contribute to this config 1806 empty_verts = [] 1807 stchannels = [] 1808 for h in helas_diags: 1809 if h: 1810 # get_s_and_t_channels gives vertices starting from 1811 # final state external particles and working inwards 1812 stchannels.append(h.get('amplitudes')[0].\ 1813 get_s_and_t_channels(ninitial, model, new_pdg)) 1814 else: 1815 stchannels.append((empty_verts, None)) 1816 1817 # For t-channels, just need the first non-empty one 1818 tchannels = [t for s,t in stchannels if t != None][0] 1819 1820 # For s_and_t_channels (to be used later) use only first config 1821 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1822 tchannels]) 1823 1824 # Make sure empty_verts is same length as real vertices 1825 if any([s for s,t in stchannels]): 1826 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1827 1828 # Reorganize s-channel vertices to get a list of all 1829 # subprocesses for each vertex 1830 schannels = list(zip(*[s for s,t in stchannels])) 1831 else: 1832 schannels = [] 1833 1834 allchannels = schannels 1835 if len(tchannels) > 1: 1836 # Write out tchannels only if there are any non-trivial ones 1837 allchannels = schannels + tchannels 1838 1839 # Write out propagators for s-channel and t-channel vertices 1840 1841 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1842 # Correspondance between the config and the diagram = amp2 1843 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1844 mapconfigs[iconfig])) 1845 1846 for verts in allchannels: 1847 if verts in schannels: 1848 vert = [v for v in verts if v][0] 1849 else: 1850 vert = verts 1851 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1852 last_leg = vert.get('legs')[-1] 1853 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1854 (last_leg.get('number'), nconfigs, len(daughters), 1855 ",".join([str(d) for d in daughters]))) 1856 if verts in schannels: 1857 pdgs = [] 1858 for v in verts: 1859 if v: 1860 pdgs.append(v.get('legs')[-1].get('id')) 1861 else: 1862 pdgs.append(0) 1863 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1864 (last_leg.get('number'), nconfigs, nsubprocs, 1865 ",".join([str(d) for d in pdgs]))) 1866 lines.append("data tprid(%d,%d)/0/" % \ 1867 (last_leg.get('number'), nconfigs)) 1868 elif verts in tchannels: 1869 lines.append("data tprid(%d,%d)/%d/" % \ 1870 (last_leg.get('number'), nconfigs, 1871 abs(last_leg.get('id')))) 1872 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1873 (last_leg.get('number'), nconfigs, nsubprocs, 1874 ",".join(['0'] * nsubprocs))) 1875 1876 # Write out number of configs 1877 lines.append("# Number of configs") 1878 lines.append("data mapconfig(0)/%d/" % nconfigs) 1879 1880 # Write the file 1881 writer.writelines(lines) 1882 1883 return s_and_t_channels
1884 1885 #=========================================================================== 1886 # Global helper methods 1887 #=========================================================================== 1888
1889 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1890 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1891 1892 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1893 1894 if total_coeff == 1: 1895 if is_imaginary: 1896 return '+imag1*' 1897 else: 1898 return '+' 1899 elif total_coeff == -1: 1900 if is_imaginary: 1901 return '-imag1*' 1902 else: 1903 return '-' 1904 1905 res_str = '%+iD0' % total_coeff.numerator 1906 1907 if total_coeff.denominator != 1: 1908 # Check if total_coeff is an integer 1909 res_str = res_str + '/%iD0' % total_coeff.denominator 1910 1911 if is_imaginary: 1912 res_str = res_str + '*imag1' 1913 1914 return res_str + '*'
1915 1916
1917 - def set_fortran_compiler(self, default_compiler, force=False):
1918 """Set compiler based on what's available on the system""" 1919 1920 # Check for compiler 1921 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1922 f77_compiler = default_compiler['fortran'] 1923 elif misc.which('gfortran'): 1924 f77_compiler = 'gfortran' 1925 elif misc.which('g77'): 1926 f77_compiler = 'g77' 1927 elif misc.which('f77'): 1928 f77_compiler = 'f77' 1929 elif default_compiler['fortran']: 1930 logger.warning('No Fortran Compiler detected! Please install one') 1931 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1932 else: 1933 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1934 logger.info('Use Fortran compiler ' + f77_compiler) 1935 1936 1937 # Check for compiler. 1. set default. 1938 if default_compiler['f2py']: 1939 f2py_compiler = default_compiler['f2py'] 1940 else: 1941 f2py_compiler = '' 1942 # Try to find the correct one. 1943 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1944 f2py_compiler = default_compiler['f2py'] 1945 elif misc.which('f2py'): 1946 f2py_compiler = 'f2py' 1947 elif sys.version_info[1] == 6: 1948 if misc.which('f2py-2.6'): 1949 f2py_compiler = 'f2py-2.6' 1950 elif misc.which('f2py2.6'): 1951 f2py_compiler = 'f2py2.6' 1952 elif sys.version_info[1] == 7: 1953 if misc.which('f2py-2.7'): 1954 f2py_compiler = 'f2py-2.7' 1955 elif misc.which('f2py2.7'): 1956 f2py_compiler = 'f2py2.7' 1957 1958 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1959 1960 1961 self.replace_make_opt_f_compiler(to_replace) 1962 # Replace also for Template but not for cluster 1963 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1964 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1965 1966 return f77_compiler
1967 1968 # an alias for backward compatibility 1969 set_compiler = set_fortran_compiler 1970 1971
1972 - def set_cpp_compiler(self, default_compiler, force=False):
1973 """Set compiler based on what's available on the system""" 1974 1975 # Check for compiler 1976 if default_compiler and misc.which(default_compiler): 1977 compiler = default_compiler 1978 elif misc.which('g++'): 1979 #check if clang version 1980 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1981 stderr=subprocess.PIPE) 1982 out, _ = p.communicate() 1983 out = out.decode(errors='ignore') 1984 if 'clang' in str(out) and misc.which('clang'): 1985 compiler = 'clang' 1986 else: 1987 compiler = 'g++' 1988 elif misc.which('c++'): 1989 compiler = 'c++' 1990 elif misc.which('clang'): 1991 compiler = 'clang' 1992 elif default_compiler: 1993 logger.warning('No c++ Compiler detected! Please install one') 1994 compiler = default_compiler # maybe misc fail so try with it 1995 else: 1996 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1997 logger.info('Use c++ compiler ' + compiler) 1998 self.replace_make_opt_c_compiler(compiler) 1999 # Replace also for Template but not for cluster 2000 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 2001 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 2002 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 2003 2004 return compiler
2005 2006
2007 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
2008 """Set FC=compiler in Source/make_opts""" 2009 2010 assert isinstance(compilers, dict) 2011 2012 mod = False #avoid to rewrite the file if not needed 2013 if not root_dir: 2014 root_dir = self.dir_path 2015 2016 compiler= compilers['fortran'] 2017 f2py_compiler = compilers['f2py'] 2018 if not f2py_compiler: 2019 f2py_compiler = 'f2py' 2020 for_update= {'DEFAULT_F_COMPILER':compiler, 2021 'DEFAULT_F2PY_COMPILER':f2py_compiler} 2022 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2023 2024 try: 2025 common_run_interface.CommonRunCmd.update_make_opts_full( 2026 make_opts, for_update) 2027 except IOError: 2028 if root_dir == self.dir_path: 2029 logger.info('Fail to set compiler. Trying to continue anyway.')
2030
2031 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
2032 """Set CXX=compiler in Source/make_opts. 2033 The version is also checked, in order to set some extra flags 2034 if the compiler is clang (on MACOS)""" 2035 2036 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 2037 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 2038 2039 2040 # list of the variable to set in the make_opts file 2041 for_update= {'DEFAULT_CPP_COMPILER':compiler, 2042 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 2043 'STDLIB': '-lc++' if is_lc else '-lstdc++', 2044 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 2045 } 2046 2047 # for MOJAVE remove the MACFLAG: 2048 if is_clang: 2049 import platform 2050 version, _, _ = platform.mac_ver() 2051 if not version:# not linux 2052 version = 14 # set version to remove MACFLAG 2053 else: 2054 majversion, version = [int(x) for x in version.split('.',3)[:2]] 2055 2056 if majversion >= 11 or (majversion ==10 and version >= 14): 2057 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 2058 2059 if not root_dir: 2060 root_dir = self.dir_path 2061 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2062 2063 try: 2064 common_run_interface.CommonRunCmd.update_make_opts_full( 2065 make_opts, for_update) 2066 except IOError: 2067 if root_dir == self.dir_path: 2068 logger.info('Fail to set compiler. Trying to continue anyway.') 2069 2070 return
2071
2072 #=============================================================================== 2073 # ProcessExporterFortranSA 2074 #=============================================================================== 2075 -class ProcessExporterFortranSA(ProcessExporterFortran):
2076 """Class to take care of exporting a set of matrix elements to 2077 MadGraph v4 StandAlone format.""" 2078 2079 matrix_template = "matrix_standalone_v4.inc" 2080
2081 - def __init__(self, *args,**opts):
2082 """add the format information compare to standard init""" 2083 2084 if 'format' in opts: 2085 self.format = opts['format'] 2086 del opts['format'] 2087 else: 2088 self.format = 'standalone' 2089 2090 self.prefix_info = {} 2091 ProcessExporterFortran.__init__(self, *args, **opts)
2092
2093 - def copy_template(self, model):
2094 """Additional actions needed for setup of Template 2095 """ 2096 2097 #First copy the full template tree if dir_path doesn't exit 2098 if os.path.isdir(self.dir_path): 2099 return 2100 2101 logger.info('initialize a new standalone directory: %s' % \ 2102 os.path.basename(self.dir_path)) 2103 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 2104 2105 # Create the directory structure 2106 os.mkdir(self.dir_path) 2107 os.mkdir(pjoin(self.dir_path, 'Source')) 2108 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 2109 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 2110 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 2111 os.mkdir(pjoin(self.dir_path, 'bin')) 2112 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 2113 os.mkdir(pjoin(self.dir_path, 'lib')) 2114 os.mkdir(pjoin(self.dir_path, 'Cards')) 2115 2116 # Information at top-level 2117 #Write version info 2118 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 2119 try: 2120 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 2121 except IOError: 2122 MG5_version = misc.get_pkg_info() 2123 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 2124 "5." + MG5_version['version']) 2125 2126 2127 # Add file in SubProcesses 2128 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 2129 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 2130 2131 if self.format == 'standalone': 2132 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 2133 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 2134 2135 # Add file in Source 2136 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 2137 pjoin(self.dir_path, 'Source')) 2138 # add the makefile 2139 filename = pjoin(self.dir_path,'Source','makefile') 2140 self.write_source_makefile(writers.FileWriter(filename))
2141 2142 #=========================================================================== 2143 # export model files 2144 #===========================================================================
2145 - def export_model_files(self, model_path):
2146 """export the model dependent files for V4 model""" 2147 2148 super(ProcessExporterFortranSA,self).export_model_files(model_path) 2149 # Add the routine update_as_param in v4 model 2150 # This is a function created in the UFO 2151 text=""" 2152 subroutine update_as_param() 2153 call setpara('param_card.dat',.false.) 2154 return 2155 end 2156 """ 2157 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2158 ff.write(text) 2159 ff.close() 2160 2161 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 2162 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 2163 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 2164 fsock.write(text) 2165 fsock.close() 2166 2167 self.make_model_symbolic_link()
2168 2169 #=========================================================================== 2170 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 2171 #===========================================================================
2172 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2173 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2174 Perl script of MadEvent4 are still working properly for pure MG5 run. 2175 Not needed for StandAlone so just return 2176 """ 2177 2178 return
2179 2180 2181 #=========================================================================== 2182 # Make the Helas and Model directories for Standalone directory 2183 #===========================================================================
2184 - def make(self):
2185 """Run make in the DHELAS and MODEL directories, to set up 2186 everything for running standalone 2187 """ 2188 2189 source_dir = pjoin(self.dir_path, "Source") 2190 logger.info("Running make for Helas") 2191 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2192 logger.info("Running make for Model") 2193 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2194 2195 #=========================================================================== 2196 # Create proc_card_mg5.dat for Standalone directory 2197 #===========================================================================
2198 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2199 """Finalize Standalone MG4 directory by 2200 generation proc_card_mg5.dat 2201 generate a global makefile 2202 """ 2203 2204 compiler = {'fortran': mg5options['fortran_compiler'], 2205 'cpp': mg5options['cpp_compiler'], 2206 'f2py': mg5options['f2py_compiler']} 2207 2208 self.compiler_choice(compiler) 2209 self.make() 2210 2211 # Write command history as proc_card_mg5 2212 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2213 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2214 history.write(output_file) 2215 2216 ProcessExporterFortran.finalize(self, matrix_elements, 2217 history, mg5options, flaglist) 2218 open(pjoin(self.dir_path,'__init__.py'),'w') 2219 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2220 2221 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2222 #add the module to hande the NLO weight 2223 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2224 pjoin(self.dir_path, 'Source')) 2225 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2226 pjoin(self.dir_path, 'Source', 'PDF')) 2227 self.write_pdf_opendata() 2228 2229 if self.prefix_info: 2230 self.write_f2py_splitter() 2231 self.write_f2py_makefile() 2232 self.write_f2py_check_sa(matrix_elements, 2233 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2234 else: 2235 # create a single makefile to compile all the subprocesses 2236 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2237 deppython = '' 2238 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2239 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2240 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2241 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2242 text+='all: %s\n\techo \'done\'' % deppython 2243 2244 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2245 ff.write(text) 2246 ff.close()
2247
2248 - def write_f2py_splitter(self):
2249 """write a function to call the correct matrix element""" 2250 2251 template = """ 2252 %(python_information)s 2253 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2254 IMPLICIT NONE 2255 C ALPHAS is given at scale2 (SHOULD be different of 0 for loop induced, ignore for LO) 2256 2257 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2258 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2259 CF2PY integer, intent(in):: procid 2260 CF2PY integer, intent(in) :: npdg 2261 CF2PY double precision, intent(out) :: ANS 2262 CF2PY double precision, intent(in) :: ALPHAS 2263 CF2PY double precision, intent(in) :: SCALE2 2264 integer pdgs(*) 2265 integer npdg, nhel, procid 2266 double precision p(*) 2267 double precision ANS, ALPHAS, PI,SCALE2 2268 include 'coupl.inc' 2269 2270 PI = 3.141592653589793D0 2271 G = 2* DSQRT(ALPHAS*PI) 2272 CALL UPDATE_AS_PARAM() 2273 c if (scale2.ne.0d0) stop 1 2274 2275 %(smatrixhel)s 2276 2277 return 2278 end 2279 2280 SUBROUTINE INITIALISE(PATH) 2281 C ROUTINE FOR F2PY to read the benchmark point. 2282 IMPLICIT NONE 2283 CHARACTER*512 PATH 2284 CF2PY INTENT(IN) :: PATH 2285 CALL SETPARA(PATH) !first call to setup the paramaters 2286 RETURN 2287 END 2288 2289 2290 subroutine CHANGE_PARA(name, value) 2291 implicit none 2292 CF2PY intent(in) :: name 2293 CF2PY intent(in) :: value 2294 2295 character*512 name 2296 double precision value 2297 2298 %(helreset_def)s 2299 2300 include '../Source/MODEL/input.inc' 2301 include '../Source/MODEL/coupl.inc' 2302 2303 %(helreset_setup)s 2304 2305 SELECT CASE (name) 2306 %(parameter_setup)s 2307 CASE DEFAULT 2308 write(*,*) 'no parameter matching', name, value 2309 END SELECT 2310 2311 return 2312 end 2313 2314 subroutine update_all_coup() 2315 implicit none 2316 call coup() 2317 return 2318 end 2319 2320 2321 subroutine get_pdg_order(PDG, ALLPROC) 2322 IMPLICIT NONE 2323 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2324 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2325 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2326 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2327 DATA PDGS/ %(pdgs)s / 2328 DATA PIDS/ %(pids)s / 2329 PDG = PDGS 2330 ALLPROC = PIDS 2331 RETURN 2332 END 2333 2334 subroutine get_prefix(PREFIX) 2335 IMPLICIT NONE 2336 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2337 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2338 DATA PREF / '%(prefix)s'/ 2339 PREFIX = PREF 2340 RETURN 2341 END 2342 2343 2344 """ 2345 2346 allids = list(self.prefix_info.keys()) 2347 allprefix = [self.prefix_info[key][0] for key in allids] 2348 min_nexternal = min([len(ids[0]) for ids in allids]) 2349 max_nexternal = max([len(ids[0]) for ids in allids]) 2350 2351 info = [] 2352 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2353 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2354 2355 2356 text = [] 2357 for n_ext in range(min_nexternal, max_nexternal+1): 2358 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2359 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2360 if not current_id: 2361 continue 2362 if min_nexternal != max_nexternal: 2363 if n_ext == min_nexternal: 2364 text.append(' if (npdg.eq.%i)then' % n_ext) 2365 else: 2366 text.append(' else if (npdg.eq.%i)then' % n_ext) 2367 for ii,pdgs in enumerate(current_id): 2368 pid = current_pid[ii] 2369 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2370 if ii==0: 2371 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2372 else: 2373 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2374 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2375 text.append(' endif') 2376 #close the function 2377 if min_nexternal != max_nexternal: 2378 text.append('endif') 2379 2380 params = self.get_model_parameter(self.model) 2381 parameter_setup =[] 2382 for key, var in params.items(): 2383 parameter_setup.append(' CASE ("%s")\n %s = value' 2384 % (key, var)) 2385 2386 # part for the resetting of the helicity 2387 helreset_def = [] 2388 helreset_setup = [] 2389 for prefix in set(allprefix): 2390 helreset_setup.append(' %shelreset = .true. ' % prefix) 2391 helreset_def.append(' logical %shelreset \n common /%shelreset/ %shelreset' % (prefix, prefix, prefix)) 2392 2393 2394 formatting = {'python_information':'\n'.join(info), 2395 'smatrixhel': '\n'.join(text), 2396 'maxpart': max_nexternal, 2397 'nb_me': len(allids), 2398 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2399 for i in range(max_nexternal) for (pdg,pid) in allids), 2400 'prefix':'\',\''.join(allprefix), 2401 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2402 'parameter_setup': '\n'.join(parameter_setup), 2403 'helreset_def' : '\n'.join(helreset_def), 2404 'helreset_setup' : '\n'.join(helreset_setup), 2405 } 2406 formatting['lenprefix'] = len(formatting['prefix']) 2407 text = template % formatting 2408 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2409 fsock.writelines(text) 2410 fsock.close()
2411
2412 - def get_model_parameter(self, model):
2413 """ returns all the model parameter 2414 """ 2415 params = {} 2416 for p in model.get('parameters')[('external',)]: 2417 name = p.name 2418 nopref = name[4:] if name.startswith('mdl_') else name 2419 params[nopref] = name 2420 2421 block = p.lhablock 2422 lha = '_'.join([str(i) for i in p.lhacode]) 2423 params['%s_%s' % (block.upper(), lha)] = name 2424 2425 return params
2426 2427 2428 2429 2430
2431 - def write_f2py_check_sa(self, matrix_element, writer):
2432 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2433 # To be implemented. It is just an example file, i.e. not crucial. 2434 return
2435
2436 - def write_f2py_makefile(self):
2437 """ """ 2438 # Add file in SubProcesses 2439 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2440 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2441
2442 - def create_MA5_cards(self,*args,**opts):
2443 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2444 pass
2445
2446 - def compiler_choice(self, compiler):
2447 """ Different daughter classes might want different compilers. 2448 So this function is meant to be overloaded if desired.""" 2449 2450 self.set_compiler(compiler)
2451 2452 #=========================================================================== 2453 # generate_subprocess_directory 2454 #===========================================================================
2455 - def generate_subprocess_directory(self, matrix_element, 2456 fortran_model, number):
2457 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2458 including the necessary matrix.f and nexternal.inc files""" 2459 2460 cwd = os.getcwd() 2461 # Create the directory PN_xx_xxxxx in the specified path 2462 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2463 "P%s" % matrix_element.get('processes')[0].shell_string()) 2464 2465 if self.opt['sa_symmetry']: 2466 # avoid symmetric output 2467 for i,proc in enumerate(matrix_element.get('processes')): 2468 2469 tag = proc.get_tag() 2470 legs = proc.get('legs')[:] 2471 leg0 = proc.get('legs')[0] 2472 leg1 = proc.get('legs')[1] 2473 if not leg1.get('state'): 2474 proc.get('legs')[0] = leg1 2475 proc.get('legs')[1] = leg0 2476 flegs = proc.get('legs')[2:] 2477 for perm in itertools.permutations(flegs): 2478 for i,p in enumerate(perm): 2479 proc.get('legs')[i+2] = p 2480 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2481 "P%s" % proc.shell_string()) 2482 #restore original order 2483 proc.get('legs')[2:] = legs[2:] 2484 if os.path.exists(dirpath2): 2485 proc.get('legs')[:] = legs 2486 return 0 2487 proc.get('legs')[:] = legs 2488 2489 try: 2490 os.mkdir(dirpath) 2491 except os.error as error: 2492 logger.warning(error.strerror + " " + dirpath) 2493 2494 #try: 2495 # os.chdir(dirpath) 2496 #except os.error: 2497 # logger.error('Could not cd to directory %s' % dirpath) 2498 # return 0 2499 2500 logger.info('Creating files in directory %s' % dirpath) 2501 2502 # Extract number of external particles 2503 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2504 2505 # Create the matrix.f file and the nexternal.inc file 2506 if self.opt['export_format']=='standalone_msP': 2507 filename = pjoin(dirpath, 'matrix_prod.f') 2508 else: 2509 filename = pjoin(dirpath, 'matrix.f') 2510 2511 proc_prefix = '' 2512 if 'prefix' in self.cmd_options: 2513 if self.cmd_options['prefix'] == 'int': 2514 proc_prefix = 'M%s_' % number 2515 elif self.cmd_options['prefix'] == 'proc': 2516 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2517 else: 2518 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2519 for proc in matrix_element.get('processes'): 2520 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2521 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2522 2523 calls = self.write_matrix_element_v4( 2524 writers.FortranWriter(filename), 2525 matrix_element, 2526 fortran_model, 2527 proc_prefix=proc_prefix) 2528 2529 if self.opt['export_format'] == 'standalone_msP': 2530 filename = pjoin(dirpath,'configs_production.inc') 2531 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2532 writers.FortranWriter(filename), 2533 matrix_element) 2534 2535 filename = pjoin(dirpath,'props_production.inc') 2536 self.write_props_file(writers.FortranWriter(filename), 2537 matrix_element, 2538 s_and_t_channels) 2539 2540 filename = pjoin(dirpath,'nexternal_prod.inc') 2541 self.write_nexternal_madspin(writers.FortranWriter(filename), 2542 nexternal, ninitial) 2543 2544 if self.opt['export_format']=='standalone_msF': 2545 filename = pjoin(dirpath, 'helamp.inc') 2546 ncomb=matrix_element.get_helicity_combinations() 2547 self.write_helamp_madspin(writers.FortranWriter(filename), 2548 ncomb) 2549 2550 filename = pjoin(dirpath, 'nexternal.inc') 2551 self.write_nexternal_file(writers.FortranWriter(filename), 2552 nexternal, ninitial) 2553 2554 filename = pjoin(dirpath, 'pmass.inc') 2555 self.write_pmass_file(writers.FortranWriter(filename), 2556 matrix_element) 2557 2558 filename = pjoin(dirpath, 'ngraphs.inc') 2559 self.write_ngraphs_file(writers.FortranWriter(filename), 2560 len(matrix_element.get_all_amplitudes())) 2561 2562 # Generate diagrams 2563 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2564 filename = pjoin(dirpath, "matrix.ps") 2565 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2566 get('diagrams'), 2567 filename, 2568 model=matrix_element.get('processes')[0].\ 2569 get('model'), 2570 amplitude=True) 2571 logger.info("Generating Feynman diagrams for " + \ 2572 matrix_element.get('processes')[0].nice_string()) 2573 plot.draw() 2574 2575 linkfiles = ['check_sa.f', 'coupl.inc'] 2576 2577 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2578 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2579 pat = re.compile('smatrix', re.I) 2580 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2581 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2582 f.write(new_text) 2583 linkfiles.pop(0) 2584 2585 for file in linkfiles: 2586 ln('../%s' % file, cwd=dirpath) 2587 ln('../makefileP', name='makefile', cwd=dirpath) 2588 # Return to original PWD 2589 #os.chdir(cwd) 2590 2591 if not calls: 2592 calls = 0 2593 return calls
2594 2595 2596 #=========================================================================== 2597 # write_source_makefile 2598 #===========================================================================
2599 - def write_source_makefile(self, writer):
2600 """Write the nexternal.inc file for MG4""" 2601 2602 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2603 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2604 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2605 2606 replace_dict= {'libraries': set_of_lib, 2607 'model':model_line, 2608 'additional_dsample': '', 2609 'additional_dependencies':''} 2610 2611 text = open(path).read() % replace_dict 2612 2613 if writer: 2614 writer.write(text) 2615 2616 return replace_dict
2617 2618 #=========================================================================== 2619 # write_matrix_element_v4 2620 #===========================================================================
2621 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2622 write=True, proc_prefix=''):
2623 """Export a matrix element to a matrix.f file in MG4 standalone format 2624 if write is on False, just return the replace_dict and not write anything.""" 2625 2626 2627 if not matrix_element.get('processes') or \ 2628 not matrix_element.get('diagrams'): 2629 return 0 2630 2631 if writer: 2632 if not isinstance(writer, writers.FortranWriter): 2633 raise writers.FortranWriter.FortranWriterError(\ 2634 "writer not FortranWriter but %s" % type(writer)) 2635 # Set lowercase/uppercase Fortran code 2636 writers.FortranWriter.downcase = False 2637 2638 2639 if 'sa_symmetry' not in self.opt: 2640 self.opt['sa_symmetry']=False 2641 2642 2643 # The proc_id is for MadEvent grouping which is never used in SA. 2644 replace_dict = {'global_variable':'', 'amp2_lines':'', 2645 'proc_prefix':proc_prefix, 'proc_id':''} 2646 2647 # Extract helas calls 2648 helas_calls = fortran_model.get_matrix_element_calls(\ 2649 matrix_element) 2650 2651 replace_dict['helas_calls'] = "\n".join(helas_calls) 2652 2653 # Extract version number and date from VERSION file 2654 info_lines = self.get_mg5_info_lines() 2655 replace_dict['info_lines'] = info_lines 2656 2657 # Extract process info lines 2658 process_lines = self.get_process_info_lines(matrix_element) 2659 replace_dict['process_lines'] = process_lines 2660 2661 # Extract number of external particles 2662 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2663 replace_dict['nexternal'] = nexternal 2664 replace_dict['nincoming'] = ninitial 2665 2666 # Extract ncomb 2667 ncomb = matrix_element.get_helicity_combinations() 2668 replace_dict['ncomb'] = ncomb 2669 2670 # Extract helicity lines 2671 helicity_lines = self.get_helicity_lines(matrix_element) 2672 replace_dict['helicity_lines'] = helicity_lines 2673 2674 # Extract overall denominator 2675 # Averaging initial state color, spin, and identical FS particles 2676 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2677 2678 # Extract ngraphs 2679 ngraphs = matrix_element.get_number_of_amplitudes() 2680 replace_dict['ngraphs'] = ngraphs 2681 2682 # Extract nwavefuncs 2683 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2684 replace_dict['nwavefuncs'] = nwavefuncs 2685 2686 # Extract ncolor 2687 ncolor = max(1, len(matrix_element.get('color_basis'))) 2688 replace_dict['ncolor'] = ncolor 2689 2690 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2691 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2692 matrix_element.get_beams_hel_avg_factor() 2693 2694 # Extract color data lines 2695 color_data_lines = self.get_color_data_lines(matrix_element) 2696 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2697 2698 if self.opt['export_format']=='standalone_msP': 2699 # For MadSpin need to return the AMP2 2700 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2701 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2702 replace_dict['global_variable'] = \ 2703 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2704 2705 # JAMP definition, depends on the number of independent split orders 2706 split_orders=matrix_element.get('processes')[0].get('split_orders') 2707 2708 if len(split_orders)==0: 2709 replace_dict['nSplitOrders']='' 2710 # Extract JAMP lines 2711 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2712 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2713 # set all amplitude order to weight 1 and only one squared order 2714 # contribution which is of course ALL_ORDERS=2. 2715 squared_orders = [(2,),] 2716 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2717 replace_dict['chosen_so_configs'] = '.TRUE.' 2718 replace_dict['nSqAmpSplitOrders']=1 2719 replace_dict['split_order_str_list']='' 2720 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2721 2722 else: 2723 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2724 replace_dict['nAmpSplitOrders']=len(amp_orders) 2725 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2726 replace_dict['nSplitOrders']=len(split_orders) 2727 replace_dict['split_order_str_list']=str(split_orders) 2728 amp_so = self.get_split_orders_lines( 2729 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2730 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2731 replace_dict['ampsplitorders']='\n'.join(amp_so) 2732 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2733 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2734 matrix_element,amp_orders,split_order_names=split_orders) 2735 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2736 # Now setup the array specifying what squared split order is chosen 2737 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2738 matrix_element.get('processes')[0],squared_orders) 2739 2740 # For convenience we also write the driver check_sa_splitOrders.f 2741 # that explicitely writes out the contribution from each squared order. 2742 # The original driver still works and is compiled with 'make' while 2743 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2744 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2745 self.write_check_sa_splitOrders(squared_orders,split_orders, 2746 nexternal,ninitial,proc_prefix,check_sa_writer) 2747 2748 if write: 2749 writers.FortranWriter('nsqso_born.inc').writelines( 2750 """INTEGER NSQSO_BORN 2751 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2752 2753 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2754 2755 matrix_template = self.matrix_template 2756 if self.opt['export_format']=='standalone_msP' : 2757 matrix_template = 'matrix_standalone_msP_v4.inc' 2758 elif self.opt['export_format']=='standalone_msF': 2759 matrix_template = 'matrix_standalone_msF_v4.inc' 2760 elif self.opt['export_format']=='matchbox': 2761 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2762 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2763 2764 if len(split_orders)>0: 2765 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2766 logger.debug("Warning: The export format %s is not "+\ 2767 " available for individual ME evaluation of given coupl. orders."+\ 2768 " Only the total ME will be computed.", self.opt['export_format']) 2769 elif self.opt['export_format'] in ['madloop_matchbox']: 2770 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2771 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2772 else: 2773 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2774 2775 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2776 replace_dict['template_file2'] = pjoin(_file_path, \ 2777 'iolibs/template_files/split_orders_helping_functions.inc') 2778 if write and writer: 2779 path = replace_dict['template_file'] 2780 content = open(path).read() 2781 content = content % replace_dict 2782 # Write the file 2783 writer.writelines(content) 2784 # Add the helper functions. 2785 if len(split_orders)>0: 2786 content = '\n' + open(replace_dict['template_file2'])\ 2787 .read()%replace_dict 2788 writer.writelines(content) 2789 return len([call for call in helas_calls if call.find('#') != 0]) 2790 else: 2791 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2792 return replace_dict # for subclass update
2793
2794 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2795 nincoming, proc_prefix, writer):
2796 """ Write out a more advanced version of the check_sa drivers that 2797 individually returns the matrix element for each contributing squared 2798 order.""" 2799 2800 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2801 'template_files', 'check_sa_splitOrders.f')).read() 2802 printout_sq_orders=[] 2803 for i, squared_order in enumerate(squared_orders): 2804 sq_orders=[] 2805 for j, sqo in enumerate(squared_order): 2806 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2807 printout_sq_orders.append(\ 2808 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2809 %(i+1,' '.join(sq_orders),i+1)) 2810 printout_sq_orders='\n'.join(printout_sq_orders) 2811 replace_dict = {'printout_sqorders':printout_sq_orders, 2812 'nSplitOrders':len(squared_orders), 2813 'nexternal':nexternal, 2814 'nincoming':nincoming, 2815 'proc_prefix':proc_prefix} 2816 2817 if writer: 2818 writer.writelines(check_sa_content % replace_dict) 2819 else: 2820 return replace_dict
2821
2822 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2823 """class to take care of exporting a set of matrix element for the Matchbox 2824 code in the case of Born only routine""" 2825 2826 default_opt = {'clean': False, 'complex_mass':False, 2827 'export_format':'matchbox', 'mp': False, 2828 'sa_symmetry': True} 2829 2830 #specific template of the born 2831 2832 2833 matrix_template = "matrix_standalone_matchbox.inc" 2834 2835 @staticmethod
2836 - def get_color_string_lines(matrix_element):
2837 """Return the color matrix definition lines for this matrix element. Split 2838 rows in chunks of size n.""" 2839 2840 if not matrix_element.get('color_matrix'): 2841 return "\n".join(["out = 1"]) 2842 2843 #start the real work 2844 color_denominators = matrix_element.get('color_matrix').\ 2845 get_line_denominators() 2846 matrix_strings = [] 2847 my_cs = color.ColorString() 2848 for i_color in range(len(color_denominators)): 2849 # Then write the numerators for the matrix elements 2850 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2851 t_str=repr(my_cs) 2852 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2853 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2854 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2855 all_matches = t_match.findall(t_str) 2856 output = {} 2857 arg=[] 2858 for match in all_matches: 2859 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2860 if ctype in ['ColorOne' ]: 2861 continue 2862 if ctype not in ['T', 'Tr' ]: 2863 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2864 tmparg += ['0'] 2865 arg +=tmparg 2866 for j, v in enumerate(arg): 2867 output[(i_color,j)] = v 2868 2869 for key in output: 2870 if matrix_strings == []: 2871 #first entry 2872 matrix_strings.append(""" 2873 if (in1.eq.%s.and.in2.eq.%s)then 2874 out = %s 2875 """ % (key[0], key[1], output[key])) 2876 else: 2877 #not first entry 2878 matrix_strings.append(""" 2879 elseif (in1.eq.%s.and.in2.eq.%s)then 2880 out = %s 2881 """ % (key[0], key[1], output[key])) 2882 if len(matrix_strings): 2883 matrix_strings.append(" else \n out = - 1 \n endif") 2884 else: 2885 return "\n out = - 1 \n " 2886 return "\n".join(matrix_strings)
2887
2888 - def make(self,*args,**opts):
2889 pass
2890
2891 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2892 JAMP_formatLC=None):
2893 2894 """Adding leading color part of the colorflow""" 2895 2896 if not JAMP_formatLC: 2897 JAMP_formatLC= "LN%s" % JAMP_format 2898 2899 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2900 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2901 col_amps=col_amps.get_color_amplitudes() 2902 elif(isinstance(col_amps,list)): 2903 if(col_amps and isinstance(col_amps[0],list)): 2904 col_amps=col_amps 2905 else: 2906 raise MadGraph5Error(error_msg % 'col_amps') 2907 else: 2908 raise MadGraph5Error(error_msg % 'col_amps') 2909 2910 text, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2911 JAMP_format=JAMP_format, 2912 AMP_format=AMP_format, 2913 split=-1) 2914 2915 2916 # Filter the col_ampls to generate only those without any 1/NC terms 2917 2918 LC_col_amps = [] 2919 for coeff_list in col_amps: 2920 to_add = [] 2921 for (coefficient, amp_number) in coeff_list: 2922 if coefficient[3]==0: 2923 to_add.append( (coefficient, amp_number) ) 2924 LC_col_amps.append(to_add) 2925 2926 text2, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2927 JAMP_format=JAMP_formatLC, 2928 AMP_format=AMP_format, 2929 split=-1) 2930 text += text2 2931 2932 return text, 0
2933
2934 2935 2936 2937 #=============================================================================== 2938 # ProcessExporterFortranMW 2939 #=============================================================================== 2940 -class ProcessExporterFortranMW(ProcessExporterFortran):
2941 """Class to take care of exporting a set of matrix elements to 2942 MadGraph v4 - MadWeight format.""" 2943 2944 matrix_file="matrix_standalone_v4.inc" 2945 jamp_optim = False 2946
2947 - def copy_template(self, model):
2948 """Additional actions needed for setup of Template 2949 """ 2950 2951 super(ProcessExporterFortranMW, self).copy_template(model) 2952 2953 # Add the MW specific file 2954 misc.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2955 pjoin(self.dir_path, 'Source','MadWeight'), True) 2956 misc.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2957 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2958 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2959 pjoin(self.dir_path, 'Source','setrun.f')) 2960 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2961 pjoin(self.dir_path, 'Source','run.inc')) 2962 # File created from Template (Different in some child class) 2963 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2964 self.write_run_config_file(writers.FortranWriter(filename)) 2965 2966 try: 2967 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2968 stdout = os.open(os.devnull, os.O_RDWR), 2969 stderr = os.open(os.devnull, os.O_RDWR), 2970 cwd=self.dir_path) 2971 except OSError: 2972 # Probably madweight already called 2973 pass 2974 2975 # Copy the different python file in the Template 2976 self.copy_python_file() 2977 # create the appropriate cuts.f 2978 self.get_mw_cuts_version() 2979 2980 # add the makefile in Source directory 2981 filename = os.path.join(self.dir_path,'Source','makefile') 2982 self.write_source_makefile(writers.FortranWriter(filename))
2983 2984 2985 2986 2987 #=========================================================================== 2988 # convert_model 2989 #===========================================================================
2990 - def convert_model(self, model, wanted_lorentz = [], 2991 wanted_couplings = []):
2992 2993 super(ProcessExporterFortranMW,self).convert_model(model, 2994 wanted_lorentz, wanted_couplings) 2995 2996 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2997 try: 2998 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2999 except OSError as error: 3000 pass 3001 model_path = model.get('modelpath') 3002 # This is not safe if there is a '##' or '-' in the path. 3003 misc.copytree(model_path, 3004 pjoin(self.dir_path,'bin','internal','ufomodel'), 3005 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3006 if hasattr(model, 'restrict_card'): 3007 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3008 'restrict_default.dat') 3009 if isinstance(model.restrict_card, check_param_card.ParamCard): 3010 model.restrict_card.write(out_path) 3011 else: 3012 files.cp(model.restrict_card, out_path)
3013 3014 #=========================================================================== 3015 # generate_subprocess_directory 3016 #===========================================================================
3017 - def copy_python_file(self):
3018 """copy the python file require for the Template""" 3019 3020 # madevent interface 3021 cp(_file_path+'/interface/madweight_interface.py', 3022 self.dir_path+'/bin/internal/madweight_interface.py') 3023 cp(_file_path+'/interface/extended_cmd.py', 3024 self.dir_path+'/bin/internal/extended_cmd.py') 3025 cp(_file_path+'/interface/common_run_interface.py', 3026 self.dir_path+'/bin/internal/common_run_interface.py') 3027 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3028 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3029 cp(_file_path+'/iolibs/save_load_object.py', 3030 self.dir_path+'/bin/internal/save_load_object.py') 3031 cp(_file_path+'/madevent/gen_crossxhtml.py', 3032 self.dir_path+'/bin/internal/gen_crossxhtml.py') 3033 cp(_file_path+'/madevent/sum_html.py', 3034 self.dir_path+'/bin/internal/sum_html.py') 3035 cp(_file_path+'/various/FO_analyse_card.py', 3036 self.dir_path+'/bin/internal/FO_analyse_card.py') 3037 cp(_file_path+'/iolibs/file_writers.py', 3038 self.dir_path+'/bin/internal/file_writers.py') 3039 #model file 3040 cp(_file_path+'../models/check_param_card.py', 3041 self.dir_path+'/bin/internal/check_param_card.py') 3042 3043 #madevent file 3044 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3045 cp(_file_path+'/various/lhe_parser.py', 3046 self.dir_path+'/bin/internal/lhe_parser.py') 3047 3048 cp(_file_path+'/various/banner.py', 3049 self.dir_path+'/bin/internal/banner.py') 3050 cp(_file_path+'/various/shower_card.py', 3051 self.dir_path+'/bin/internal/shower_card.py') 3052 cp(_file_path+'/various/cluster.py', 3053 self.dir_path+'/bin/internal/cluster.py') 3054 3055 # logging configuration 3056 cp(_file_path+'/interface/.mg5_logging.conf', 3057 self.dir_path+'/bin/internal/me5_logging.conf') 3058 cp(_file_path+'/interface/coloring_logging.py', 3059 self.dir_path+'/bin/internal/coloring_logging.py')
3060 3061 3062 #=========================================================================== 3063 # Change the version of cuts.f to the one compatible with MW 3064 #===========================================================================
3065 - def get_mw_cuts_version(self, outpath=None):
3066 """create the appropriate cuts.f 3067 This is based on the one associated to ME output but: 3068 1) No clustering (=> remove initcluster/setclscales) 3069 2) Adding the definition of cut_bw at the file. 3070 """ 3071 3072 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 3073 3074 text = StringIO() 3075 #1) remove all dependencies in ickkw >1: 3076 nb_if = 0 3077 for line in template: 3078 if 'if(xqcut.gt.0d0' in line: 3079 nb_if = 1 3080 if nb_if == 0: 3081 text.write(line) 3082 continue 3083 if re.search(r'if\(.*\)\s*then', line): 3084 nb_if += 1 3085 elif 'endif' in line: 3086 nb_if -= 1 3087 3088 #2) add fake cut_bw (have to put the true one later) 3089 text.write(""" 3090 logical function cut_bw(p) 3091 include 'madweight_param.inc' 3092 double precision p(*) 3093 if (bw_cut) then 3094 cut_bw = .true. 3095 else 3096 stop 1 3097 endif 3098 return 3099 end 3100 """) 3101 3102 final = text.getvalue() 3103 #3) remove the call to initcluster: 3104 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 3105 template = template.replace('genps.inc', 'maxparticles.inc') 3106 #Now we can write it 3107 if not outpath: 3108 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 3109 elif isinstance(outpath, str): 3110 fsock = open(outpath, 'w') 3111 else: 3112 fsock = outpath 3113 fsock.write(template)
3114 3115 3116 3117 #=========================================================================== 3118 # Make the Helas and Model directories for Standalone directory 3119 #===========================================================================
3120 - def make(self):
3121 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 3122 everything for running madweight 3123 """ 3124 3125 source_dir = os.path.join(self.dir_path, "Source") 3126 logger.info("Running make for Helas") 3127 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 3128 logger.info("Running make for Model") 3129 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 3130 logger.info("Running make for PDF") 3131 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 3132 logger.info("Running make for CERNLIB") 3133 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 3134 logger.info("Running make for GENERIC") 3135 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 3136 logger.info("Running make for blocks") 3137 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 3138 logger.info("Running make for tools") 3139 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
3140 3141 #=========================================================================== 3142 # Create proc_card_mg5.dat for MadWeight directory 3143 #===========================================================================
3144 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3145 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 3146 3147 compiler = {'fortran': mg5options['fortran_compiler'], 3148 'cpp': mg5options['cpp_compiler'], 3149 'f2py': mg5options['f2py_compiler']} 3150 3151 3152 3153 #proc_charac 3154 self.create_proc_charac() 3155 3156 # Write maxparticles.inc based on max of ME's/subprocess groups 3157 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3158 self.write_maxparticles_file(writers.FortranWriter(filename), 3159 matrix_elements) 3160 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3161 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 3162 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3163 pjoin(self.dir_path, 'Source','MadWeight','tools')) 3164 3165 self.set_compiler(compiler) 3166 self.make() 3167 3168 # Write command history as proc_card_mg5 3169 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 3170 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 3171 history.write(output_file) 3172 3173 ProcessExporterFortran.finalize(self, matrix_elements, 3174 history, mg5options, flaglist)
3175 3176 3177 3178 #=========================================================================== 3179 # create the run_card for MW 3180 #===========================================================================
3181 - def create_run_card(self, matrix_elements, history):
3182 """ """ 3183 3184 run_card = banner_mod.RunCard() 3185 3186 # pass to default for MW 3187 run_card["run_tag"] = "\'not_use\'" 3188 run_card["fixed_ren_scale"] = "T" 3189 run_card["fixed_fac_scale"] = "T" 3190 run_card.remove_all_cut() 3191 3192 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3193 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3194 python_template=True) 3195 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3196 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3197 python_template=True)
3198 3199 #=========================================================================== 3200 # export model files 3201 #===========================================================================
3202 - def export_model_files(self, model_path):
3203 """export the model dependent files for V4 model""" 3204 3205 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3206 # Add the routine update_as_param in v4 model 3207 # This is a function created in the UFO 3208 text=""" 3209 subroutine update_as_param() 3210 call setpara('param_card.dat',.false.) 3211 return 3212 end 3213 """ 3214 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3215 ff.write(text) 3216 ff.close() 3217 3218 # Modify setrun.f 3219 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3220 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3221 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3222 fsock.write(text) 3223 fsock.close() 3224 3225 # Modify initialization.f 3226 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3227 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3228 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3229 fsock.write(text) 3230 fsock.close() 3231 3232 3233 self.make_model_symbolic_link()
3234 3235 #=========================================================================== 3236 # generate_subprocess_directory 3237 #===========================================================================
3238 - def generate_subprocess_directory(self, matrix_element, 3239 fortran_model,number):
3240 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3241 including the necessary matrix.f and nexternal.inc files""" 3242 3243 cwd = os.getcwd() 3244 # Create the directory PN_xx_xxxxx in the specified path 3245 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3246 "P%s" % matrix_element.get('processes')[0].shell_string()) 3247 3248 try: 3249 os.mkdir(dirpath) 3250 except os.error as error: 3251 logger.warning(error.strerror + " " + dirpath) 3252 3253 #try: 3254 # os.chdir(dirpath) 3255 #except os.error: 3256 # logger.error('Could not cd to directory %s' % dirpath) 3257 # return 0 3258 3259 logger.info('Creating files in directory %s' % dirpath) 3260 3261 # Extract number of external particles 3262 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3263 3264 # Create the matrix.f file and the nexternal.inc file 3265 filename = pjoin(dirpath,'matrix.f') 3266 calls,ncolor = self.write_matrix_element_v4( 3267 writers.FortranWriter(filename), 3268 matrix_element, 3269 fortran_model) 3270 3271 filename = pjoin(dirpath, 'auto_dsig.f') 3272 self.write_auto_dsig_file(writers.FortranWriter(filename), 3273 matrix_element) 3274 3275 filename = pjoin(dirpath, 'configs.inc') 3276 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3277 writers.FortranWriter(filename), 3278 matrix_element) 3279 3280 filename = pjoin(dirpath, 'nexternal.inc') 3281 self.write_nexternal_file(writers.FortranWriter(filename), 3282 nexternal, ninitial) 3283 3284 filename = pjoin(dirpath, 'leshouche.inc') 3285 self.write_leshouche_file(writers.FortranWriter(filename), 3286 matrix_element) 3287 3288 filename = pjoin(dirpath, 'props.inc') 3289 self.write_props_file(writers.FortranWriter(filename), 3290 matrix_element, 3291 s_and_t_channels) 3292 3293 filename = pjoin(dirpath, 'pmass.inc') 3294 self.write_pmass_file(writers.FortranWriter(filename), 3295 matrix_element) 3296 3297 filename = pjoin(dirpath, 'ngraphs.inc') 3298 self.write_ngraphs_file(writers.FortranWriter(filename), 3299 len(matrix_element.get_all_amplitudes())) 3300 3301 filename = pjoin(dirpath, 'maxamps.inc') 3302 self.write_maxamps_file(writers.FortranWriter(filename), 3303 len(matrix_element.get('diagrams')), 3304 ncolor, 3305 len(matrix_element.get('processes')), 3306 1) 3307 3308 filename = pjoin(dirpath, 'phasespace.inc') 3309 self.write_phasespace_file(writers.FortranWriter(filename), 3310 len(matrix_element.get('diagrams')), 3311 ) 3312 3313 # Generate diagrams 3314 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3315 filename = pjoin(dirpath, "matrix.ps") 3316 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3317 get('diagrams'), 3318 filename, 3319 model=matrix_element.get('processes')[0].\ 3320 get('model'), 3321 amplitude='') 3322 logger.info("Generating Feynman diagrams for " + \ 3323 matrix_element.get('processes')[0].nice_string()) 3324 plot.draw() 3325 3326 #import genps.inc and maxconfigs.inc into Subprocesses 3327 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3328 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3329 3330 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3331 3332 for file in linkfiles: 3333 ln('../%s' % file, starting_dir=cwd) 3334 3335 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3336 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3337 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3338 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3339 # Return to original PWD 3340 #os.chdir(cwd) 3341 3342 if not calls: 3343 calls = 0 3344 return calls
3345 3346 #=========================================================================== 3347 # write_matrix_element_v4 3348 #===========================================================================
3349 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3350 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3351 3352 if not matrix_element.get('processes') or \ 3353 not matrix_element.get('diagrams'): 3354 return 0 3355 3356 if writer: 3357 if not isinstance(writer, writers.FortranWriter): 3358 raise writers.FortranWriter.FortranWriterError(\ 3359 "writer not FortranWriter") 3360 3361 # Set lowercase/uppercase Fortran code 3362 writers.FortranWriter.downcase = False 3363 3364 replace_dict = {} 3365 3366 # Extract version number and date from VERSION file 3367 info_lines = self.get_mg5_info_lines() 3368 replace_dict['info_lines'] = info_lines 3369 3370 # Extract process info lines 3371 process_lines = self.get_process_info_lines(matrix_element) 3372 replace_dict['process_lines'] = process_lines 3373 3374 # Set proc_id 3375 replace_dict['proc_id'] = proc_id 3376 3377 # Extract number of external particles 3378 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3379 replace_dict['nexternal'] = nexternal 3380 3381 # Extract ncomb 3382 ncomb = matrix_element.get_helicity_combinations() 3383 replace_dict['ncomb'] = ncomb 3384 3385 # Extract helicity lines 3386 helicity_lines = self.get_helicity_lines(matrix_element) 3387 replace_dict['helicity_lines'] = helicity_lines 3388 3389 # Extract overall denominator 3390 # Averaging initial state color, spin, and identical FS particles 3391 den_factor_line = self.get_den_factor_line(matrix_element) 3392 replace_dict['den_factor_line'] = den_factor_line 3393 3394 # Extract ngraphs 3395 ngraphs = matrix_element.get_number_of_amplitudes() 3396 replace_dict['ngraphs'] = ngraphs 3397 3398 # Extract nwavefuncs 3399 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3400 replace_dict['nwavefuncs'] = nwavefuncs 3401 3402 # Extract ncolor 3403 ncolor = max(1, len(matrix_element.get('color_basis'))) 3404 replace_dict['ncolor'] = ncolor 3405 3406 # Extract color data lines 3407 color_data_lines = self.get_color_data_lines(matrix_element) 3408 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3409 3410 # Extract helas calls 3411 helas_calls = fortran_model.get_matrix_element_calls(\ 3412 matrix_element) 3413 3414 replace_dict['helas_calls'] = "\n".join(helas_calls) 3415 3416 # Extract JAMP lines 3417 jamp_lines, nb = self.get_JAMP_lines(matrix_element) 3418 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3419 3420 replace_dict['template_file'] = os.path.join(_file_path, \ 3421 'iolibs/template_files/%s' % self.matrix_file) 3422 replace_dict['template_file2'] = '' 3423 3424 if writer: 3425 file = open(replace_dict['template_file']).read() 3426 file = file % replace_dict 3427 # Write the file 3428 writer.writelines(file) 3429 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3430 else: 3431 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3432 3433 #=========================================================================== 3434 # write_source_makefile 3435 #===========================================================================
3436 - def write_source_makefile(self, writer):
3437 """Write the nexternal.inc file for madweight""" 3438 3439 3440 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3441 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3442 text = open(path).read() % {'libraries': set_of_lib} 3443 writer.write(text) 3444 3445 return True
3446
3447 - def write_phasespace_file(self, writer, nb_diag):
3448 """ """ 3449 3450 template = """ include 'maxparticles.inc' 3451 integer max_branches 3452 parameter (max_branches=max_particles-1) 3453 integer max_configs 3454 parameter (max_configs=%(nb_diag)s) 3455 3456 c channel position 3457 integer config_pos,perm_pos 3458 common /to_config/config_pos,perm_pos 3459 3460 """ 3461 3462 writer.write(template % {'nb_diag': nb_diag})
3463 3464 3465 #=========================================================================== 3466 # write_auto_dsig_file 3467 #===========================================================================
3468 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3469 """Write the auto_dsig.f file for the differential cross section 3470 calculation, includes pdf call information (MadWeight format)""" 3471 3472 if not matrix_element.get('processes') or \ 3473 not matrix_element.get('diagrams'): 3474 return 0 3475 3476 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3477 3478 if ninitial < 1 or ninitial > 2: 3479 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3480 3481 replace_dict = {} 3482 3483 # Extract version number and date from VERSION file 3484 info_lines = self.get_mg5_info_lines() 3485 replace_dict['info_lines'] = info_lines 3486 3487 # Extract process info lines 3488 process_lines = self.get_process_info_lines(matrix_element) 3489 replace_dict['process_lines'] = process_lines 3490 3491 # Set proc_id 3492 replace_dict['proc_id'] = proc_id 3493 replace_dict['numproc'] = 1 3494 3495 # Set dsig_line 3496 if ninitial == 1: 3497 # No conversion, since result of decay should be given in GeV 3498 dsig_line = "pd(0)*dsiguu" 3499 else: 3500 # Convert result (in GeV) to pb 3501 dsig_line = "pd(0)*conv*dsiguu" 3502 3503 replace_dict['dsig_line'] = dsig_line 3504 3505 # Extract pdf lines 3506 pdf_vars, pdf_data, pdf_lines = \ 3507 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3508 replace_dict['pdf_vars'] = pdf_vars 3509 replace_dict['pdf_data'] = pdf_data 3510 replace_dict['pdf_lines'] = pdf_lines 3511 3512 # Lines that differ between subprocess group and regular 3513 if proc_id: 3514 replace_dict['numproc'] = int(proc_id) 3515 replace_dict['passcuts_begin'] = "" 3516 replace_dict['passcuts_end'] = "" 3517 # Set lines for subprocess group version 3518 # Set define_iconfigs_lines 3519 replace_dict['define_subdiag_lines'] = \ 3520 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3521 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3522 else: 3523 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3524 replace_dict['passcuts_end'] = "ENDIF" 3525 replace_dict['define_subdiag_lines'] = "" 3526 3527 if writer: 3528 file = open(os.path.join(_file_path, \ 3529 'iolibs/template_files/auto_dsig_mw.inc')).read() 3530 3531 file = file % replace_dict 3532 # Write the file 3533 writer.writelines(file) 3534 else: 3535 return replace_dict
3536 #=========================================================================== 3537 # write_configs_file 3538 #===========================================================================
3539 - def write_configs_file(self, writer, matrix_element):
3540 """Write the configs.inc file for MadEvent""" 3541 3542 # Extract number of external particles 3543 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3544 3545 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3546 mapconfigs = [c[0] for c in configs] 3547 model = matrix_element.get('processes')[0].get('model') 3548 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3549 [[c[1]] for c in configs], 3550 mapconfigs, 3551 nexternal, ninitial,matrix_element, model)
3552 3553 #=========================================================================== 3554 # write_run_configs_file 3555 #===========================================================================
3556 - def write_run_config_file(self, writer):
3557 """Write the run_configs.inc file for MadWeight""" 3558 3559 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3560 text = open(path).read() % {'chanperjob':'5'} 3561 writer.write(text) 3562 return True
3563 3564 #=========================================================================== 3565 # write_configs_file_from_diagrams 3566 #===========================================================================
3567 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3568 nexternal, ninitial, matrix_element, model):
3569 """Write the actual configs.inc file. 3570 3571 configs is the diagrams corresponding to configs (each 3572 diagrams is a list of corresponding diagrams for all 3573 subprocesses, with None if there is no corresponding diagrams 3574 for a given process). 3575 mapconfigs gives the diagram number for each config. 3576 3577 For s-channels, we need to output one PDG for each subprocess in 3578 the subprocess group, in order to be able to pick the right 3579 one for multiprocesses.""" 3580 3581 lines = [] 3582 3583 particle_dict = matrix_element.get('processes')[0].get('model').\ 3584 get('particle_dict') 3585 3586 s_and_t_channels = [] 3587 3588 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3589 for config in configs if [d for d in config if d][0].\ 3590 get_vertex_leg_numbers()!=[]] 3591 3592 minvert = min(vert_list) if vert_list!=[] else 0 3593 # Number of subprocesses 3594 nsubprocs = len(configs[0]) 3595 3596 nconfigs = 0 3597 3598 new_pdg = model.get_first_non_pdg() 3599 3600 for iconfig, helas_diags in enumerate(configs): 3601 if any([vert > minvert for vert in 3602 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3603 # Only 3-vertices allowed in configs.inc 3604 continue 3605 nconfigs += 1 3606 3607 # Need s- and t-channels for all subprocesses, including 3608 # those that don't contribute to this config 3609 empty_verts = [] 3610 stchannels = [] 3611 for h in helas_diags: 3612 if h: 3613 # get_s_and_t_channels gives vertices starting from 3614 # final state external particles and working inwards 3615 stchannels.append(h.get('amplitudes')[0].\ 3616 get_s_and_t_channels(ninitial,model,new_pdg)) 3617 else: 3618 stchannels.append((empty_verts, None)) 3619 3620 # For t-channels, just need the first non-empty one 3621 tchannels = [t for s,t in stchannels if t != None][0] 3622 3623 # For s_and_t_channels (to be used later) use only first config 3624 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3625 tchannels]) 3626 3627 # Make sure empty_verts is same length as real vertices 3628 if any([s for s,t in stchannels]): 3629 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3630 3631 # Reorganize s-channel vertices to get a list of all 3632 # subprocesses for each vertex 3633 schannels = list(zip(*[s for s,t in stchannels])) 3634 else: 3635 schannels = [] 3636 3637 allchannels = schannels 3638 if len(tchannels) > 1: 3639 # Write out tchannels only if there are any non-trivial ones 3640 allchannels = schannels + tchannels 3641 3642 # Write out propagators for s-channel and t-channel vertices 3643 3644 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3645 # Correspondance between the config and the diagram = amp2 3646 lines.append("* %d %d " % (nconfigs, 3647 mapconfigs[iconfig])) 3648 3649 for verts in allchannels: 3650 if verts in schannels: 3651 vert = [v for v in verts if v][0] 3652 else: 3653 vert = verts 3654 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3655 last_leg = vert.get('legs')[-1] 3656 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3657 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3658 # (last_leg.get('number'), nconfigs, len(daughters), 3659 # ",".join([str(d) for d in daughters]))) 3660 3661 if last_leg.get('id') == 21 and 21 not in particle_dict: 3662 # Fake propagator used in multiparticle vertices 3663 mass = 'zero' 3664 width = 'zero' 3665 pow_part = 0 3666 else: 3667 if (last_leg.get('id')!=7): 3668 particle = particle_dict[last_leg.get('id')] 3669 # Get mass 3670 mass = particle.get('mass') 3671 # Get width 3672 width = particle.get('width') 3673 else : # fake propagator used in multiparticle vertices 3674 mass= 'zero' 3675 width= 'zero' 3676 3677 line=line+" "+mass+" "+width+" " 3678 3679 if verts in schannels: 3680 pdgs = [] 3681 for v in verts: 3682 if v: 3683 pdgs.append(v.get('legs')[-1].get('id')) 3684 else: 3685 pdgs.append(0) 3686 lines.append(line+" S "+str(last_leg.get('id'))) 3687 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3688 # (last_leg.get('number'), nconfigs, nsubprocs, 3689 # ",".join([str(d) for d in pdgs]))) 3690 # lines.append("data tprid(%d,%d)/0/" % \ 3691 # (last_leg.get('number'), nconfigs)) 3692 elif verts in tchannels: 3693 lines.append(line+" T "+str(last_leg.get('id'))) 3694 # lines.append("data tprid(%d,%d)/%d/" % \ 3695 # (last_leg.get('number'), nconfigs, 3696 # abs(last_leg.get('id')))) 3697 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3698 # (last_leg.get('number'), nconfigs, nsubprocs, 3699 # ",".join(['0'] * nsubprocs))) 3700 3701 # Write out number of configs 3702 # lines.append("# Number of configs") 3703 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3704 lines.append(" * ") # a line with just a star indicates this is the end of file 3705 # Write the file 3706 writer.writelines(lines) 3707 3708 return s_and_t_channels
3709
3710 3711 3712 #=============================================================================== 3713 # ProcessExporterFortranME 3714 #=============================================================================== 3715 -class ProcessExporterFortranME(ProcessExporterFortran):
3716 """Class to take care of exporting a set of matrix elements to 3717 MadEvent format.""" 3718 3719 matrix_file = "matrix_madevent_v4.inc" 3720 done_warning_tchannel = False 3721 3722 default_opt = {'clean': False, 'complex_mass':False, 3723 'export_format':'madevent', 'mp': False, 3724 'v5_model': True, 3725 'output_options':{}, 3726 'hel_recycling': False 3727 } 3728 jamp_optim = True 3729 3730
3731 - def __new__(cls, *args, **opts):
3732 """wrapper needed for some plugin""" 3733 3734 return super(ProcessExporterFortranME, cls).__new__(cls)
3735 3736
3737 - def __init__(self, dir_path = "", opt=None):
3738 3739 super(ProcessExporterFortranME, self).__init__(dir_path, opt) 3740 3741 # check and format the hel_recycling options as it should if provided 3742 if opt and isinstance(opt['output_options'], dict) and \ 3743 'hel_recycling' in opt['output_options']: 3744 self.opt['hel_recycling'] = banner_mod.ConfigFile.format_variable( 3745 opt['output_options']['hel_recycling'], bool, 'hel_recycling') 3746 3747 if opt and isinstance(opt['output_options'], dict) and \ 3748 't_strategy' in opt['output_options']: 3749 self.opt['t_strategy'] = banner_mod.ConfigFile.format_variable( 3750 opt['output_options']['t_strategy'], int, 't_strategy')
3751 3752 # helper function for customise helas writter 3753 @staticmethod
3754 - def custom_helas_call(call, arg):
3755 if arg['mass'] == '%(M)s,%(W)s,': 3756 arg['mass'] = '%(M)s, fk_%(W)s,' 3757 elif '%(W)s' in arg['mass']: 3758 raise Exception 3759 return call, arg
3760
3761 - def copy_template(self, model):
3762 """Additional actions needed for setup of Template 3763 """ 3764 3765 super(ProcessExporterFortranME, self).copy_template(model) 3766 3767 # File created from Template (Different in some child class) 3768 filename = pjoin(self.dir_path,'Source','run_config.inc') 3769 self.write_run_config_file(writers.FortranWriter(filename)) 3770 3771 # The next file are model dependant (due to SLAH convention) 3772 self.model_name = model.get('name') 3773 # Add the symmetry.f 3774 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3775 self.write_symmetry(writers.FortranWriter(filename)) 3776 # 3777 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3778 self.write_addmothers(writers.FortranWriter(filename)) 3779 # Copy the different python file in the Template 3780 self.copy_python_file()
3781 3782 3783 3784 3785 3786 3787 #=========================================================================== 3788 # generate_subprocess_directory 3789 #===========================================================================
3790 - def copy_python_file(self):
3791 """copy the python file require for the Template""" 3792 3793 # madevent interface 3794 cp(_file_path+'/interface/madevent_interface.py', 3795 self.dir_path+'/bin/internal/madevent_interface.py') 3796 cp(_file_path+'/interface/extended_cmd.py', 3797 self.dir_path+'/bin/internal/extended_cmd.py') 3798 cp(_file_path+'/interface/common_run_interface.py', 3799 self.dir_path+'/bin/internal/common_run_interface.py') 3800 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3801 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3802 cp(_file_path+'/iolibs/save_load_object.py', 3803 self.dir_path+'/bin/internal/save_load_object.py') 3804 cp(_file_path+'/iolibs/file_writers.py', 3805 self.dir_path+'/bin/internal/file_writers.py') 3806 #model file 3807 cp(_file_path+'../models/check_param_card.py', 3808 self.dir_path+'/bin/internal/check_param_card.py') 3809 3810 #copy all the file present in madevent directory 3811 for name in os.listdir(pjoin(_file_path, 'madevent')): 3812 if name not in ['__init__.py'] and name.endswith('.py'): 3813 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3814 3815 #madevent file 3816 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3817 cp(_file_path+'/various/lhe_parser.py', 3818 self.dir_path+'/bin/internal/lhe_parser.py') 3819 cp(_file_path+'/various/banner.py', 3820 self.dir_path+'/bin/internal/banner.py') 3821 cp(_file_path+'/various/histograms.py', 3822 self.dir_path+'/bin/internal/histograms.py') 3823 cp(_file_path+'/various/plot_djrs.py', 3824 self.dir_path+'/bin/internal/plot_djrs.py') 3825 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3826 3827 cp(_file_path+'/various/cluster.py', 3828 self.dir_path+'/bin/internal/cluster.py') 3829 cp(_file_path+'/madevent/combine_runs.py', 3830 self.dir_path+'/bin/internal/combine_runs.py') 3831 # logging configuration 3832 cp(_file_path+'/interface/.mg5_logging.conf', 3833 self.dir_path+'/bin/internal/me5_logging.conf') 3834 cp(_file_path+'/interface/coloring_logging.py', 3835 self.dir_path+'/bin/internal/coloring_logging.py') 3836 # shower card and FO_analyse_card. 3837 # Although not needed, it is imported by banner.py 3838 cp(_file_path+'/various/shower_card.py', 3839 self.dir_path+'/bin/internal/shower_card.py') 3840 cp(_file_path+'/various/FO_analyse_card.py', 3841 self.dir_path+'/bin/internal/FO_analyse_card.py')
3842 3843
3844 - def convert_model(self, model, wanted_lorentz = [], 3845 wanted_couplings = []):
3846 3847 super(ProcessExporterFortranME,self).convert_model(model, 3848 wanted_lorentz, wanted_couplings) 3849 3850 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3851 try: 3852 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3853 except OSError as error: 3854 pass 3855 model_path = model.get('modelpath') 3856 # This is not safe if there is a '##' or '-' in the path. 3857 misc.copytree(model_path, 3858 pjoin(self.dir_path,'bin','internal','ufomodel'), 3859 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3860 if hasattr(model, 'restrict_card'): 3861 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3862 'restrict_default.dat') 3863 if isinstance(model.restrict_card, check_param_card.ParamCard): 3864 model.restrict_card.write(out_path) 3865 else: 3866 files.cp(model.restrict_card, out_path)
3867 3868 #=========================================================================== 3869 # export model files 3870 #===========================================================================
3871 - def export_model_files(self, model_path):
3872 """export the model dependent files""" 3873 3874 super(ProcessExporterFortranME,self).export_model_files(model_path) 3875 3876 # Add the routine update_as_param in v4 model 3877 # This is a function created in the UFO 3878 text=""" 3879 subroutine update_as_param() 3880 call setpara('param_card.dat',.false.) 3881 return 3882 end 3883 """ 3884 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3885 ff.write(text) 3886 ff.close() 3887 3888 # Add the symmetry.f 3889 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3890 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3891 3892 # Modify setrun.f 3893 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3894 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3895 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3896 fsock.write(text) 3897 fsock.close() 3898 3899 self.make_model_symbolic_link()
3900 3901 #=========================================================================== 3902 # generate_subprocess_directory 3903 #===========================================================================
3904 - def generate_subprocess_directory(self, matrix_element, 3905 fortran_model, 3906 me_number):
3907 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3908 including the necessary matrix.f and various helper files""" 3909 3910 cwd = os.getcwd() 3911 path = pjoin(self.dir_path, 'SubProcesses') 3912 3913 3914 if not self.model: 3915 self.model = matrix_element.get('processes')[0].get('model') 3916 3917 #os.chdir(path) 3918 # Create the directory PN_xx_xxxxx in the specified path 3919 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3920 try: 3921 os.mkdir(pjoin(path,subprocdir)) 3922 except os.error as error: 3923 logger.warning(error.strerror + " " + subprocdir) 3924 3925 #try: 3926 # os.chdir(subprocdir) 3927 #except os.error: 3928 # logger.error('Could not cd to directory %s' % subprocdir) 3929 # return 0 3930 3931 logger.info('Creating files in directory %s' % subprocdir) 3932 Ppath = pjoin(path, subprocdir) 3933 3934 # Extract number of external particles 3935 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3936 3937 # Add the driver.f 3938 ncomb = matrix_element.get_helicity_combinations() 3939 filename = pjoin(Ppath,'driver.f') 3940 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3941 v5=self.opt['v5_model']) 3942 3943 3944 # Create the matrix.f file, auto_dsig.f file and all inc files 3945 if self.opt['hel_recycling']: 3946 filename = pjoin(Ppath, 'matrix_orig.f') 3947 else: 3948 filename = pjoin(Ppath, 'matrix.f') 3949 calls, ncolor = \ 3950 self.write_matrix_element_v4(writers.FortranWriter(filename), 3951 matrix_element, fortran_model, subproc_number = me_number) 3952 3953 filename = pjoin(Ppath, 'auto_dsig.f') 3954 self.write_auto_dsig_file(writers.FortranWriter(filename), 3955 matrix_element) 3956 3957 filename = pjoin(Ppath, 'configs.inc') 3958 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3959 writers.FortranWriter(filename), 3960 matrix_element) 3961 3962 filename = pjoin(Ppath, 'config_nqcd.inc') 3963 self.write_config_nqcd_file(writers.FortranWriter(filename), 3964 nqcd_list) 3965 3966 filename = pjoin(Ppath, 'config_subproc_map.inc') 3967 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3968 s_and_t_channels) 3969 3970 filename = pjoin(Ppath, 'coloramps.inc') 3971 self.write_coloramps_file(writers.FortranWriter(filename), 3972 mapconfigs, 3973 matrix_element) 3974 3975 filename = pjoin(Ppath, 'get_color.f') 3976 self.write_colors_file(writers.FortranWriter(filename), 3977 matrix_element) 3978 3979 filename = pjoin(Ppath, 'decayBW.inc') 3980 self.write_decayBW_file(writers.FortranWriter(filename), 3981 s_and_t_channels) 3982 3983 filename = pjoin(Ppath, 'dname.mg') 3984 self.write_dname_file(writers.FileWriter(filename), 3985 "P"+matrix_element.get('processes')[0].shell_string()) 3986 3987 filename = pjoin(Ppath, 'iproc.dat') 3988 self.write_iproc_file(writers.FortranWriter(filename), 3989 me_number) 3990 3991 filename = pjoin(Ppath, 'leshouche.inc') 3992 self.write_leshouche_file(writers.FortranWriter(filename), 3993 matrix_element) 3994 3995 filename = pjoin(Ppath, 'maxamps.inc') 3996 self.write_maxamps_file(writers.FortranWriter(filename), 3997 len(matrix_element.get('diagrams')), 3998 ncolor, 3999 len(matrix_element.get('processes')), 4000 1) 4001 4002 filename = pjoin(Ppath, 'mg.sym') 4003 self.write_mg_sym_file(writers.FortranWriter(filename), 4004 matrix_element) 4005 4006 filename = pjoin(Ppath, 'ncombs.inc') 4007 self.write_ncombs_file(writers.FortranWriter(filename), 4008 nexternal) 4009 4010 filename = pjoin(Ppath, 'nexternal.inc') 4011 self.write_nexternal_file(writers.FortranWriter(filename), 4012 nexternal, ninitial) 4013 4014 filename = pjoin(Ppath, 'ngraphs.inc') 4015 self.write_ngraphs_file(writers.FortranWriter(filename), 4016 len(mapconfigs)) 4017 4018 4019 filename = pjoin(Ppath, 'pmass.inc') 4020 self.write_pmass_file(writers.FortranWriter(filename), 4021 matrix_element) 4022 4023 filename = pjoin(Ppath, 'props.inc') 4024 self.write_props_file(writers.FortranWriter(filename), 4025 matrix_element, 4026 s_and_t_channels) 4027 4028 # Find config symmetries and permutations 4029 symmetry, perms, ident_perms = \ 4030 diagram_symmetry.find_symmetry(matrix_element) 4031 4032 filename = pjoin(Ppath, 'symswap.inc') 4033 self.write_symswap_file(writers.FortranWriter(filename), 4034 ident_perms) 4035 4036 filename = pjoin(Ppath, 'symfact_orig.dat') 4037 self.write_symfact_file(open(filename, 'w'), symmetry) 4038 4039 # Generate diagrams 4040 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 4041 filename = pjoin(Ppath, "matrix.ps") 4042 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4043 get('diagrams'), 4044 filename, 4045 model=matrix_element.get('processes')[0].\ 4046 get('model'), 4047 amplitude=True) 4048 logger.info("Generating Feynman diagrams for " + \ 4049 matrix_element.get('processes')[0].nice_string()) 4050 plot.draw() 4051 4052 self.link_files_in_SubProcess(Ppath) 4053 4054 #import nexternal/leshouche in Source 4055 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 4056 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 4057 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 4058 # Return to SubProcesses dir 4059 #os.chdir(os.path.pardir) 4060 4061 # Add subprocess to subproc.mg 4062 filename = pjoin(path, 'subproc.mg') 4063 files.append_to_file(filename, 4064 self.write_subproc, 4065 subprocdir) 4066 4067 # Return to original dir 4068 #os.chdir(cwd) 4069 4070 # Generate info page 4071 gen_infohtml.make_info_html(self.dir_path) 4072 4073 4074 if not calls: 4075 calls = 0 4076 return calls
4077 4078 link_Sub_files = ['addmothers.f', 4079 'cluster.f', 4080 'cluster.inc', 4081 'coupl.inc', 4082 'cuts.f', 4083 'cuts.inc', 4084 'genps.f', 4085 'genps.inc', 4086 'idenparts.f', 4087 'initcluster.f', 4088 'makefile', 4089 'message.inc', 4090 'myamp.f', 4091 'reweight.f', 4092 'run.inc', 4093 'maxconfigs.inc', 4094 'maxparticles.inc', 4095 'run_config.inc', 4096 'lhe_event_infos.inc', 4097 'setcuts.f', 4098 'setscales.f', 4099 'sudakov.inc', 4100 'symmetry.f', 4101 'unwgt.f', 4102 'dummy_fct.f' 4103 ] 4104 4118 4119
4120 - def finalize(self, matrix_elements, history, mg5options, flaglist):
4121 """Finalize ME v4 directory by creating jpeg diagrams, html 4122 pages,proc_card_mg5.dat and madevent.tar.gz.""" 4123 4124 if 'nojpeg' in flaglist: 4125 makejpg = False 4126 else: 4127 makejpg = True 4128 if 'online' in flaglist: 4129 online = True 4130 else: 4131 online = False 4132 4133 compiler = {'fortran': mg5options['fortran_compiler'], 4134 'cpp': mg5options['cpp_compiler'], 4135 'f2py': mg5options['f2py_compiler']} 4136 4137 # indicate that the output type is not grouped 4138 if not isinstance(self, ProcessExporterFortranMEGroup): 4139 self.proc_characteristic['grouped_matrix'] = False 4140 4141 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 4142 4143 # set limitation linked to the model 4144 4145 4146 # indicate the PDG of all initial particle 4147 try: 4148 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4149 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4150 except AttributeError: 4151 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4152 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4153 self.proc_characteristic['pdg_initial1'] = pdgs1 4154 self.proc_characteristic['pdg_initial2'] = pdgs2 4155 4156 4157 modelname = self.opt['model'] 4158 if modelname == 'mssm' or modelname.startswith('mssm-'): 4159 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 4160 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 4161 check_param_card.convert_to_mg5card(param_card, mg5_param) 4162 check_param_card.check_valid_param_card(mg5_param) 4163 4164 # Add the combine_events.f modify param_card path/number of @X 4165 filename = pjoin(self.dir_path,'Source','combine_events.f') 4166 try: 4167 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 4168 except AttributeError: 4169 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 4170 nb_proc = len(set(nb_proc)) 4171 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 4172 # Write maxconfigs.inc based on max of ME's/subprocess groups 4173 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 4174 self.write_maxconfigs_file(writers.FortranWriter(filename), 4175 matrix_elements) 4176 4177 # Write maxparticles.inc based on max of ME's/subprocess groups 4178 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 4179 self.write_maxparticles_file(writers.FortranWriter(filename), 4180 matrix_elements) 4181 4182 # Touch "done" file 4183 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 4184 4185 # Check for compiler 4186 self.set_compiler(compiler) 4187 self.set_cpp_compiler(compiler['cpp']) 4188 4189 4190 old_pos = os.getcwd() 4191 subpath = pjoin(self.dir_path, 'SubProcesses') 4192 4193 P_dir_list = [proc for proc in os.listdir(subpath) 4194 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 4195 4196 devnull = os.open(os.devnull, os.O_RDWR) 4197 # Convert the poscript in jpg files (if authorize) 4198 if makejpg: 4199 try: 4200 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 4201 except Exception as error: 4202 pass 4203 4204 if misc.which('gs'): 4205 logger.info("Generate jpeg diagrams") 4206 for Pdir in P_dir_list: 4207 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 4208 stdout = devnull, cwd=pjoin(subpath, Pdir)) 4209 4210 logger.info("Generate web pages") 4211 # Create the WebPage using perl script 4212 4213 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 4214 stdout = devnull,cwd=pjoin(self.dir_path)) 4215 4216 #os.chdir(os.path.pardir) 4217 4218 obj = gen_infohtml.make_info_html(self.dir_path) 4219 4220 if online: 4221 nb_channel = obj.rep_rule['nb_gen_diag'] 4222 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4223 #add the information to proc_charac 4224 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4225 4226 # Write command history as proc_card_mg5 4227 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4228 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4229 history.write(output_file) 4230 4231 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4232 stdout = devnull) 4233 4234 #crate the proc_characteristic file 4235 self.create_proc_charac(matrix_elements, history) 4236 4237 # create the run_card 4238 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4239 4240 # Run "make" to generate madevent.tar.gz file 4241 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4242 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4243 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4244 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4245 stdout = devnull, cwd=self.dir_path) 4246 4247 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4248 stdout = devnull, cwd=self.dir_path)
4249 4250 4251 4252 4253 4254 4255 #return to the initial dir 4256 #os.chdir(old_pos) 4257 4258 #=========================================================================== 4259 # write_matrix_element_v4 4260 #===========================================================================
4261 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4262 proc_id = "", config_map = [], subproc_number = ""):
4263 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4264 4265 if not matrix_element.get('processes') or \ 4266 not matrix_element.get('diagrams'): 4267 return 0 4268 4269 if writer: 4270 if not isinstance(writer, writers.FortranWriter): 4271 raise writers.FortranWriter.FortranWriterError(\ 4272 "writer not FortranWriter") 4273 # Set lowercase/uppercase Fortran code 4274 writers.FortranWriter.downcase = False 4275 4276 # check if MLM/.../ is supported for this matrix-element and update associate flag 4277 if self.model and 'MLM' in self.model["limitations"]: 4278 if 'MLM' not in self.proc_characteristic["limitations"]: 4279 used_couplings = matrix_element.get_used_couplings(output="set") 4280 for vertex in self.model.get('interactions'): 4281 particles = [p for p in vertex.get('particles')] 4282 if 21 in [p.get('pdg_code') for p in particles]: 4283 colors = [par.get('color') for par in particles] 4284 if 1 in colors: 4285 continue 4286 elif 'QCD' not in vertex.get('orders'): 4287 for bad_coup in vertex.get('couplings').values(): 4288 if bad_coup in used_couplings: 4289 self.proc_characteristic["limitations"].append('MLM') 4290 break 4291 4292 # The proc prefix is not used for MadEvent output so it can safely be set 4293 # to an empty string. 4294 replace_dict = {'proc_prefix':''} 4295 4296 4297 # Extract helas calls 4298 helas_calls = fortran_model.get_matrix_element_calls(\ 4299 matrix_element) 4300 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4301 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4302 ProcessExporterFortranME.done_warning_tchannel = True 4303 4304 replace_dict['helas_calls'] = "\n".join(helas_calls) 4305 4306 4307 #adding the support for the fake width (forbidding too small width) 4308 mass_width = matrix_element.get_all_mass_widths() 4309 mass_width = sorted(list(mass_width)) 4310 width_list = set([e[1] for e in mass_width]) 4311 4312 replace_dict['fake_width_declaration'] = \ 4313 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4314 replace_dict['fake_width_declaration'] += \ 4315 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4316 fk_w_defs = [] 4317 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4318 for m, w in mass_width: 4319 if w == 'zero': 4320 if ' fk_zero = 0d0' not in fk_w_defs: 4321 fk_w_defs.append(' fk_zero = 0d0') 4322 continue 4323 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4324 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4325 4326 # Extract version number and date from VERSION file 4327 info_lines = self.get_mg5_info_lines() 4328 replace_dict['info_lines'] = info_lines 4329 4330 # Extract process info lines 4331 process_lines = self.get_process_info_lines(matrix_element) 4332 replace_dict['process_lines'] = process_lines 4333 4334 # Set proc_id 4335 replace_dict['proc_id'] = proc_id 4336 4337 # Extract ncomb 4338 ncomb = matrix_element.get_helicity_combinations() 4339 replace_dict['ncomb'] = ncomb 4340 4341 # Extract helicity lines 4342 helicity_lines = self.get_helicity_lines(matrix_element) 4343 replace_dict['helicity_lines'] = helicity_lines 4344 4345 # Extract IC line 4346 ic_line = self.get_ic_line(matrix_element) 4347 replace_dict['ic_line'] = ic_line 4348 4349 # Extract overall denominator 4350 # Averaging initial state color, spin, and identical FS particles 4351 den_factor_line = self.get_den_factor_line(matrix_element) 4352 replace_dict['den_factor_line'] = den_factor_line 4353 4354 # Extract ngraphs 4355 ngraphs = matrix_element.get_number_of_amplitudes() 4356 replace_dict['ngraphs'] = ngraphs 4357 4358 # Extract ndiags 4359 ndiags = len(matrix_element.get('diagrams')) 4360 replace_dict['ndiags'] = ndiags 4361 4362 # Set define_iconfigs_lines 4363 replace_dict['define_iconfigs_lines'] = \ 4364 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4365 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4366 4367 if proc_id: 4368 # Set lines for subprocess group version 4369 # Set define_iconfigs_lines 4370 replace_dict['define_iconfigs_lines'] += \ 4371 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4372 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4373 # Set set_amp2_line 4374 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4375 proc_id 4376 else: 4377 # Standard running 4378 # Set set_amp2_line 4379 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4380 4381 # Extract nwavefuncs 4382 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4383 replace_dict['nwavefuncs'] = nwavefuncs 4384 4385 # Extract ncolor 4386 ncolor = max(1, len(matrix_element.get('color_basis'))) 4387 replace_dict['ncolor'] = ncolor 4388 4389 # Extract color data lines 4390 color_data_lines = self.get_color_data_lines(matrix_element) 4391 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4392 4393 4394 # Set the size of Wavefunction 4395 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4396 replace_dict['wavefunctionsize'] = 18 4397 else: 4398 replace_dict['wavefunctionsize'] = 6 4399 4400 # Extract amp2 lines 4401 amp2_lines = self.get_amp2_lines(matrix_element, config_map, replace_dict) 4402 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4403 4404 # The JAMP definition depends on the splitting order 4405 split_orders=matrix_element.get('processes')[0].get('split_orders') 4406 if len(split_orders)>0: 4407 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4408 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4409 matrix_element.get('processes')[0],squared_orders) 4410 replace_dict['select_configs_if'] = ' IF (CHOSEN_SO_CONFIGS(SQSOINDEX%(proc_id)s(M,N))) THEN' % replace_dict 4411 replace_dict['select_configs_endif'] = ' endif' 4412 else: 4413 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4414 # set all amplitude order to weight 1 and only one squared order 4415 # contribution which is of course ALL_ORDERS=2. 4416 squared_orders = [(2,),] 4417 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4418 replace_dict['chosen_so_configs'] = '.TRUE.' 4419 # addtionally set the function to NOT be called 4420 replace_dict['select_configs_if'] = '' 4421 replace_dict['select_configs_endif'] = '' 4422 4423 replace_dict['nAmpSplitOrders']=len(amp_orders) 4424 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4425 replace_dict['split_order_str_list']=str(split_orders) 4426 replace_dict['nSplitOrders']=max(len(split_orders),1) 4427 amp_so = self.get_split_orders_lines( 4428 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4429 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4430 replace_dict['ampsplitorders']='\n'.join(amp_so) 4431 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4432 4433 4434 # Extract JAMP lines 4435 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4436 jamp_lines, nb_temp = self.get_JAMP_lines_split_order(\ 4437 matrix_element,amp_orders,split_order_names= 4438 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4439 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4440 replace_dict['nb_temp_jamp'] = nb_temp 4441 4442 replace_dict['template_file'] = pjoin(_file_path, \ 4443 'iolibs/template_files/%s' % self.matrix_file) 4444 replace_dict['template_file2'] = pjoin(_file_path, \ 4445 'iolibs/template_files/split_orders_helping_functions.inc') 4446 4447 s1,s2 = matrix_element.get_spin_state_initial() 4448 replace_dict['nb_spin_state1'] = s1 4449 replace_dict['nb_spin_state2'] = s2 4450 4451 if writer: 4452 file = open(replace_dict['template_file']).read() 4453 file = file % replace_dict 4454 # Add the split orders helper functions. 4455 file = file + '\n' + open(replace_dict['template_file2'])\ 4456 .read()%replace_dict 4457 # Write the file 4458 writer.writelines(file) 4459 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4460 else: 4461 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4462 return replace_dict
4463 4464 #=========================================================================== 4465 # write_auto_dsig_file 4466 #===========================================================================
4467 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4468 """Write the auto_dsig.f file for the differential cross section 4469 calculation, includes pdf call information""" 4470 4471 if not matrix_element.get('processes') or \ 4472 not matrix_element.get('diagrams'): 4473 return 0 4474 4475 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4476 self.proc_characteristic['ninitial'] = ninitial 4477 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4478 4479 # Add information relevant for MLM matching: 4480 # Maximum QCD power in all the contributions 4481 max_qcd_order = 0 4482 for diag in matrix_element.get('diagrams'): 4483 orders = diag.calculate_orders() 4484 if 'QCD' in orders: 4485 max_qcd_order = max(max_qcd_order,orders['QCD']) 4486 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4487 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4488 proc.get('model').get_particle(id).get('color')>1]) 4489 for proc in matrix_element.get('processes')) 4490 # Maximum number of final state light jets to be matched 4491 self.proc_characteristic['max_n_matched_jets'] = max( 4492 self.proc_characteristic['max_n_matched_jets'], 4493 min(max_qcd_order,max_n_light_final_partons)) 4494 4495 # List of default pdgs to be considered for the CKKWl merging cut 4496 self.proc_characteristic['colored_pdgs'] = \ 4497 sorted(list(set([abs(p.get('pdg_code')) for p in 4498 matrix_element.get('processes')[0].get('model').get('particles') if 4499 p.get('color')>1]))) 4500 4501 if ninitial < 1 or ninitial > 2: 4502 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4503 4504 replace_dict = {} 4505 4506 # Extract version number and date from VERSION file 4507 info_lines = self.get_mg5_info_lines() 4508 replace_dict['info_lines'] = info_lines 4509 4510 # Extract process info lines 4511 process_lines = self.get_process_info_lines(matrix_element) 4512 replace_dict['process_lines'] = process_lines 4513 4514 # Set proc_id 4515 replace_dict['proc_id'] = proc_id 4516 replace_dict['numproc'] = 1 4517 4518 # Set dsig_line 4519 if ninitial == 1: 4520 # No conversion, since result of decay should be given in GeV 4521 dsig_line = "pd(0)*dsiguu" 4522 else: 4523 # Convert result (in GeV) to pb 4524 dsig_line = "pd(0)*conv*dsiguu" 4525 4526 replace_dict['dsig_line'] = dsig_line 4527 4528 # Extract pdf lines 4529 pdf_vars, pdf_data, pdf_lines = \ 4530 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4531 replace_dict['pdf_vars'] = pdf_vars 4532 replace_dict['pdf_data'] = pdf_data 4533 replace_dict['pdf_lines'] = pdf_lines 4534 4535 # Lines that differ between subprocess group and regular 4536 if proc_id: 4537 replace_dict['numproc'] = int(proc_id) 4538 replace_dict['passcuts_begin'] = "" 4539 replace_dict['passcuts_end'] = "" 4540 # Set lines for subprocess group version 4541 # Set define_iconfigs_lines 4542 replace_dict['define_subdiag_lines'] = \ 4543 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4544 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4545 replace_dict['cutsdone'] = "" 4546 else: 4547 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4548 replace_dict['passcuts_end'] = "ENDIF" 4549 replace_dict['define_subdiag_lines'] = "" 4550 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4551 4552 if not isinstance(self, ProcessExporterFortranMEGroup): 4553 ncomb=matrix_element.get_helicity_combinations() 4554 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4555 else: 4556 replace_dict['read_write_good_hel'] = "" 4557 4558 context = {'read_write_good_hel':True} 4559 4560 if writer: 4561 file = open(pjoin(_file_path, \ 4562 'iolibs/template_files/auto_dsig_v4.inc')).read() 4563 file = file % replace_dict 4564 4565 # Write the file 4566 writer.writelines(file, context=context) 4567 else: 4568 return replace_dict, context
4569 #=========================================================================== 4570 # write_coloramps_file 4571 #===========================================================================
4572 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4573 """Write the coloramps.inc file for MadEvent""" 4574 4575 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4576 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4577 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4578 len(mapconfigs))) 4579 4580 4581 # Write the file 4582 writer.writelines(lines) 4583 4584 return True
4585 4586 #=========================================================================== 4587 # write_colors_file 4588 #===========================================================================
4589 - def write_colors_file(self, writer, matrix_elements):
4590 """Write the get_color.f file for MadEvent, which returns color 4591 for all particles used in the matrix element.""" 4592 4593 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4594 matrix_elements = [matrix_elements] 4595 4596 model = matrix_elements[0].get('processes')[0].get('model') 4597 4598 # We need the both particle and antiparticle wf_ids, since the identity 4599 # depends on the direction of the wf. 4600 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4601 for wf in d.get('wavefunctions')],[]) \ 4602 for d in me.get('diagrams')], []) \ 4603 for me in matrix_elements], [])) 4604 4605 leg_ids = set(sum([sum([sum([[l.get('id'), 4606 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4607 for l in p.get_legs_with_decays()], []) \ 4608 for p in me.get('processes')], []) \ 4609 for me in matrix_elements], [])) 4610 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4611 4612 lines = """function get_color(ipdg) 4613 implicit none 4614 integer get_color, ipdg 4615 4616 if(ipdg.eq.%d)then 4617 get_color=%d 4618 return 4619 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4620 4621 for part_id in particle_ids[1:]: 4622 lines += """else if(ipdg.eq.%d)then 4623 get_color=%d 4624 return 4625 """ % (part_id, model.get_particle(part_id).get_color()) 4626 # Dummy particle for multiparticle vertices with pdg given by 4627 # first code not in the model 4628 lines += """else if(ipdg.eq.%d)then 4629 c This is dummy particle used in multiparticle vertices 4630 get_color=2 4631 return 4632 """ % model.get_first_non_pdg() 4633 lines += """else 4634 write(*,*)'Error: No color given for pdg ',ipdg 4635 get_color=0 4636 return 4637 endif 4638 end 4639 """ 4640 4641 # Write the file 4642 writer.writelines(lines) 4643 4644 return True
4645 4646 #=========================================================================== 4647 # write_config_nqcd_file 4648 #===========================================================================
4649 - def write_config_nqcd_file(self, writer, nqcd_list):
4650 """Write the config_nqcd.inc with the number of QCD couplings 4651 for each config""" 4652 4653 lines = [] 4654 for iconf, n in enumerate(nqcd_list): 4655 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4656 4657 # Write the file 4658 writer.writelines(lines) 4659 4660 return True
4661 4662 #=========================================================================== 4663 # write_maxconfigs_file 4664 #===========================================================================
4665 - def write_maxconfigs_file(self, writer, matrix_elements):
4666 """Write the maxconfigs.inc file for MadEvent""" 4667 4668 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4669 maxconfigs = max([me.get_num_configs() for me in \ 4670 matrix_elements.get('matrix_elements')]) 4671 else: 4672 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4673 4674 lines = "integer lmaxconfigs\n" 4675 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4676 4677 # Write the file 4678 writer.writelines(lines) 4679 4680 return True
4681 4682 #=========================================================================== 4683 # read_write_good_hel 4684 #===========================================================================
4685 - def read_write_good_hel(self, ncomb):
4686 """return the code to read/write the good_hel common_block""" 4687 4688 convert = {'ncomb' : ncomb} 4689 output = """ 4690 subroutine write_good_hel(stream_id) 4691 implicit none 4692 integer stream_id 4693 INTEGER NCOMB 4694 PARAMETER ( NCOMB=%(ncomb)d) 4695 LOGICAL GOODHEL(NCOMB) 4696 INTEGER NTRY 4697 common/BLOCK_GOODHEL/NTRY,GOODHEL 4698 write(stream_id,*) GOODHEL 4699 return 4700 end 4701 4702 4703 subroutine read_good_hel(stream_id) 4704 implicit none 4705 include 'genps.inc' 4706 integer stream_id 4707 INTEGER NCOMB 4708 PARAMETER ( NCOMB=%(ncomb)d) 4709 LOGICAL GOODHEL(NCOMB) 4710 INTEGER NTRY 4711 common/BLOCK_GOODHEL/NTRY,GOODHEL 4712 read(stream_id,*) GOODHEL 4713 NTRY = MAXTRIES + 1 4714 return 4715 end 4716 4717 subroutine init_good_hel() 4718 implicit none 4719 INTEGER NCOMB 4720 PARAMETER ( NCOMB=%(ncomb)d) 4721 LOGICAL GOODHEL(NCOMB) 4722 INTEGER NTRY 4723 INTEGER I 4724 4725 do i=1,NCOMB 4726 GOODHEL(I) = .false. 4727 enddo 4728 NTRY = 0 4729 end 4730 4731 integer function get_maxsproc() 4732 implicit none 4733 get_maxsproc = 1 4734 return 4735 end 4736 4737 """ % convert 4738 4739 return output
4740 4741 #=========================================================================== 4742 # write_config_subproc_map_file 4743 #===========================================================================
4744 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4745 """Write a dummy config_subproc.inc file for MadEvent""" 4746 4747 lines = [] 4748 4749 for iconfig in range(len(s_and_t_channels)): 4750 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4751 (iconfig + 1)) 4752 4753 # Write the file 4754 writer.writelines(lines) 4755 4756 return True
4757 4758 #=========================================================================== 4759 # write_configs_file 4760 #===========================================================================
4761 - def write_configs_file(self, writer, matrix_element):
4762 """Write the configs.inc file for MadEvent""" 4763 4764 # Extract number of external particles 4765 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4766 4767 model = matrix_element.get('processes')[0].get('model') 4768 configs = [(i+1, d) for (i, d) in \ 4769 enumerate(matrix_element.get('diagrams'))] 4770 mapconfigs = [c[0] for c in configs] 4771 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4772 [[c[1]] for c in configs], 4773 mapconfigs, 4774 nexternal, ninitial, 4775 model)
4776 4777 #=========================================================================== 4778 # write_run_configs_file 4779 #===========================================================================
4780 - def write_run_config_file(self, writer):
4781 """Write the run_configs.inc file for MadEvent""" 4782 4783 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4784 4785 if self.proc_characteristic['loop_induced']: 4786 job_per_chan = 1 4787 else: 4788 job_per_chan = 5 4789 4790 if writer: 4791 text = open(path).read() % {'chanperjob': job_per_chan} 4792 writer.write(text) 4793 return True 4794 else: 4795 return {'chanperjob': job_per_chan}
4796 4797 #=========================================================================== 4798 # write_configs_file_from_diagrams 4799 #===========================================================================
4800 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4801 nexternal, ninitial, model):
4802 """Write the actual configs.inc file. 4803 4804 configs is the diagrams corresponding to configs (each 4805 diagrams is a list of corresponding diagrams for all 4806 subprocesses, with None if there is no corresponding diagrams 4807 for a given process). 4808 mapconfigs gives the diagram number for each config. 4809 4810 For s-channels, we need to output one PDG for each subprocess in 4811 the subprocess group, in order to be able to pick the right 4812 one for multiprocesses.""" 4813 4814 lines = [] 4815 4816 s_and_t_channels = [] 4817 4818 nqcd_list = [] 4819 4820 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4821 for config in configs if [d for d in config if d][0].\ 4822 get_vertex_leg_numbers()!=[]] 4823 minvert = min(vert_list) if vert_list!=[] else 0 4824 4825 # Number of subprocesses 4826 nsubprocs = len(configs[0]) 4827 4828 nconfigs = 0 4829 4830 new_pdg = model.get_first_non_pdg() 4831 4832 for iconfig, helas_diags in enumerate(configs): 4833 if any([vert > minvert for vert in 4834 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4835 # Only 3-vertices allowed in configs.inc 4836 continue 4837 nconfigs += 1 4838 4839 # Need s- and t-channels for all subprocesses, including 4840 # those that don't contribute to this config 4841 empty_verts = [] 4842 stchannels = [] 4843 for h in helas_diags: 4844 if h: 4845 # get_s_and_t_channels gives vertices starting from 4846 # final state external particles and working inwards 4847 stchannels.append(h.get('amplitudes')[0].\ 4848 get_s_and_t_channels(ninitial, model, 4849 new_pdg)) 4850 else: 4851 stchannels.append((empty_verts, None)) 4852 4853 4854 # For t-channels, just need the first non-empty one 4855 tchannels = [t for s,t in stchannels if t != None][0] 4856 4857 # pass to ping-pong strategy for t-channel for 3 ore more T-channel 4858 # this is directly related to change in genps.f 4859 tstrat = self.opt.get('t_strategy', 0) 4860 if isinstance(self, madgraph.loop.loop_exporters.LoopInducedExporterMEGroup): 4861 tstrat = 2 4862 tchannels, tchannels_strategy = ProcessExporterFortranME.reorder_tchannels(tchannels, tstrat, self.model) 4863 4864 # For s_and_t_channels (to be used later) use only first config 4865 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4866 tchannels, tchannels_strategy]) 4867 4868 # Make sure empty_verts is same length as real vertices 4869 if any([s for s,t in stchannels]): 4870 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4871 4872 # Reorganize s-channel vertices to get a list of all 4873 # subprocesses for each vertex 4874 schannels = list(zip(*[s for s,t in stchannels])) 4875 else: 4876 schannels = [] 4877 4878 allchannels = schannels 4879 if len(tchannels) > 1: 4880 # Write out tchannels only if there are any non-trivial ones 4881 allchannels = schannels + tchannels 4882 4883 # Write out propagators for s-channel and t-channel vertices 4884 4885 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4886 # Correspondance between the config and the diagram = amp2 4887 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4888 mapconfigs[iconfig])) 4889 lines.append("data tstrategy(%d)/%d/" % (nconfigs, tchannels_strategy)) 4890 # Number of QCD couplings in this diagram 4891 nqcd = 0 4892 for h in helas_diags: 4893 if h: 4894 try: 4895 nqcd = h.calculate_orders()['QCD'] 4896 except KeyError: 4897 pass 4898 break 4899 else: 4900 continue 4901 4902 nqcd_list.append(nqcd) 4903 4904 for verts in allchannels: 4905 if verts in schannels: 4906 vert = [v for v in verts if v][0] 4907 else: 4908 vert = verts 4909 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4910 last_leg = vert.get('legs')[-1] 4911 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4912 (last_leg.get('number'), nconfigs, len(daughters), 4913 ",".join([str(d) for d in daughters]))) 4914 if verts in schannels: 4915 pdgs = [] 4916 for v in verts: 4917 if v: 4918 pdgs.append(v.get('legs')[-1].get('id')) 4919 else: 4920 pdgs.append(0) 4921 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4922 (last_leg.get('number'), nconfigs, nsubprocs, 4923 ",".join([str(d) for d in pdgs]))) 4924 lines.append("data tprid(%d,%d)/0/" % \ 4925 (last_leg.get('number'), nconfigs)) 4926 elif verts in tchannels: 4927 lines.append("data tprid(%d,%d)/%d/" % \ 4928 (last_leg.get('number'), nconfigs, 4929 abs(last_leg.get('id')))) 4930 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4931 (last_leg.get('number'), nconfigs, nsubprocs, 4932 ",".join(['0'] * nsubprocs))) 4933 4934 # Write out number of configs 4935 lines.append("# Number of configs") 4936 lines.append("data mapconfig(0)/%d/" % nconfigs) 4937 4938 # Write the file 4939 writer.writelines(lines) 4940 4941 return s_and_t_channels, nqcd_list
4942 4943 4944 4945 #=========================================================================== 4946 # reoder t-channels 4947 #=========================================================================== 4948 4949 #ordering = 0 4950 @staticmethod
4951 - def reorder_tchannels(tchannels, tstrat, model):
4952 # no need to modified anything if 1 or less T-Channel 4953 #Note that this counts the number of vertex (one more vertex compare to T) 4954 #ProcessExporterFortranME.ordering +=1 4955 4956 if len(tchannels) < 3 or tstrat == 2 or not model: 4957 return tchannels, 2 4958 elif tstrat == 1: 4959 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4960 elif tstrat == -2: 4961 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4962 elif tstrat == -1: 4963 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels, 1), -1 4964 elif len(tchannels) < 4: 4965 # 4966 first = tchannels[0]['legs'][1]['number'] 4967 t1 = tchannels[0]['legs'][-1]['id'] 4968 last = tchannels[-1]['legs'][1]['number'] 4969 t2 = tchannels[-1]['legs'][0]['id'] 4970 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4971 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4972 if m2 and not m1: 4973 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4974 elif m1 and not m2: 4975 return tchannels, 2 4976 elif first < last: 4977 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4978 else: 4979 return tchannels, 2 4980 else: 4981 first = tchannels[0]['legs'][1]['number'] 4982 t1 = tchannels[0]['legs'][-1]['id'] 4983 last = tchannels[-1]['legs'][1]['number'] 4984 t2 = tchannels[-1]['legs'][0]['id'] 4985 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4986 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4987 4988 t12 = tchannels[1]['legs'][-1]['id'] 4989 m12 = model.get_particle(t12).get('mass') == 'ZERO' 4990 t22 = tchannels[-2]['legs'][0]['id'] 4991 m22 = model.get_particle(t22).get('mass') == 'ZERO' 4992 if m2 and not m1: 4993 if m22: 4994 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4995 else: 4996 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4997 elif m1 and not m2: 4998 if m12: 4999 return tchannels, 2 5000 else: 5001 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 5002 elif m1 and m2 and len(tchannels) == 4 and not m12: # 3 T propa 5003 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 5004 # this case seems quite sensitive we tested method 2 specifically and this was not helping in general 5005 elif not m1 and not m2 and len(tchannels) == 4 and m12: 5006 if first < last: 5007 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 5008 return tchannels, 2 5009 else: 5010 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2
5011 5012 5013 5014 5015 @staticmethod
5016 - def reorder_tchannels_flipside(tchannels):
5017 """change the tchannel ordering to pass to a ping-pong strategy. 5018 assume ninitial == 2 5019 5020 We assume that we receive something like this 5021 5022 1 ----- X ------- -2 5023 | 5024 | (-X) 5025 | 5026 X -------- 4 5027 | 5028 | (-X-1) 5029 | 5030 X --------- -1 5031 5032 X---------- 3 5033 | 5034 | (-N+2) 5035 | 5036 X --------- L 5037 | 5038 | (-N+1) 5039 | 5040 -N ----- X ------- P 5041 5042 coded as 5043 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5044 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5045 5046 we want to convert this as: 5047 -N ----- X ------- -2 5048 | 5049 | (-N+1) 5050 | 5051 X -------- 4 5052 | 5053 | (-N+2) 5054 | 5055 X --------- -1 5056 5057 X---------- 3 5058 | 5059 | (-X-1) 5060 | 5061 X --------- L 5062 | 5063 | (-X) 5064 | 5065 2 ----- X ------- P 5066 5067 coded as 5068 ( 2 P > -X) (-X L > -X-1) (-X-1 3 > -X-2)... (-X-L -2 > -N) 5069 """ 5070 5071 # no need to modified anything if 1 or less T-Channel 5072 #Note that this counts the number of vertex (one more vertex compare to T) 5073 if len(tchannels) < 2: 5074 return tchannels 5075 5076 out = [] 5077 oldid2new = {} 5078 5079 # initialisation 5080 # id of the first T-channel (-X) 5081 propa_id = tchannels[0]['legs'][-1]['number'] 5082 # 5083 # Setup the last vertex to refenence the second id beam 5084 # -N (need to setup it to 2. 5085 initialid = tchannels[-1]['legs'][-1]['number'] 5086 oldid2new[initialid] = 2 5087 oldid2new[1] = initialid 5088 5089 i = 0 5090 while tchannels: 5091 old_vert = tchannels.pop() 5092 5093 #copy the vertex /leglist to avoid side effects 5094 new_vert = copy.copy(old_vert) 5095 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5096 # vertex taken from the bottom we have 5097 # (-N+1 X > -N) we need to flip to pass to 5098 # -N X > -N+1 (and then relabel -N and -N+1 5099 legs = new_vert['legs'] # shorcut 5100 id1 = legs[0]['number'] 5101 id2 = legs[1]['number'] 5102 id3 = legs[2]['number'] 5103 # to be secure we also support (X -N+1 > -N) 5104 if id3 == id2 -1 and id1 !=1: 5105 legs[0], legs[1] = legs[1], legs[0] 5106 #flipping side 5107 legs[0], legs[2] = legs[2], legs[0] 5108 5109 # the only new relabelling is the last element of the list 5110 # always thanks to the above flipping 5111 old_propa_id = new_vert['legs'][-1]['number'] 5112 oldid2new[old_propa_id] = propa_id 5113 5114 5115 #pass to new convention for leg numbering: 5116 for l in new_vert['legs']: 5117 if l['number'] in oldid2new: 5118 l['number'] = oldid2new[l['number']] 5119 5120 # new_vert is now ready 5121 out.append(new_vert) 5122 # prepare next iteration 5123 propa_id -=1 5124 i +=1 5125 5126 return out
5127 5128 @staticmethod
5129 - def reorder_tchannels_pingpong(tchannels, id=2):
5130 """change the tchannel ordering to pass to a ping-pong strategy. 5131 assume ninitial == 2 5132 5133 We assume that we receive something like this 5134 5135 1 ----- X ------- -2 5136 | 5137 | (-X) 5138 | 5139 X -------- 4 5140 | 5141 | (-X-1) 5142 | 5143 X --------- -1 5144 5145 X---------- 3 5146 | 5147 | (-N+2) 5148 | 5149 X --------- L 5150 | 5151 | (-N+1) 5152 | 5153 -N ----- X ------- P 5154 5155 coded as 5156 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5157 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5158 5159 we want to convert this as: 5160 1 ----- X ------- -2 5161 | 5162 | (-X) 5163 | 5164 X -------- 4 5165 | 5166 | (-X-2) 5167 | 5168 X --------- -1 5169 5170 X---------- 3 5171 | 5172 | (-X-3) 5173 | 5174 X --------- L 5175 | 5176 | (-X-1) 5177 | 5178 2 ----- X ------- P 5179 5180 coded as 5181 (1 -2 > -X) (2 P > -X-1) (-X 4 > -X-2) (-X-1 L > -X-3) ... 5182 """ 5183 5184 # no need to modified anything if 1 or less T-Channel 5185 #Note that this counts the number of vertex (one more vertex compare to T) 5186 if len(tchannels) < 2: 5187 return tchannels 5188 5189 out = [] 5190 oldid2new = {} 5191 5192 # initialisation 5193 # id of the first T-channel (-X) 5194 propa_id = tchannels[0]['legs'][-1]['number'] 5195 # 5196 # Setup the last vertex to refenence the second id beam 5197 # -N (need to setup it to 2. 5198 initialid = tchannels[-1]['legs'][-1]['number'] 5199 oldid2new[initialid] = id 5200 5201 5202 5203 i = 0 5204 while tchannels: 5205 #ping pong by taking first/last element in aternance 5206 if id ==2: 5207 if i % 2 == 0: 5208 old_vert = tchannels.pop(0) 5209 else: 5210 old_vert = tchannels.pop() 5211 else: 5212 if i % 2 != 0: 5213 old_vert = tchannels.pop(0) 5214 else: 5215 old_vert = tchannels.pop() 5216 5217 #copy the vertex /leglist to avoid side effects 5218 new_vert = base_objects.Vertex(old_vert) 5219 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5220 # if vertex taken from the bottom we have 5221 # (-N+1 X > -N) we need to flip to pass to 5222 # -N X > -N+1 (and then relabel -N and -N+1 5223 # to be secure we also support (X -N+1 > -N) 5224 if (i % 2 ==1 and id ==2) or (i %2 == 0 and id ==1): 5225 legs = new_vert['legs'] # shorcut 5226 id1 = legs[0]['number'] 5227 id2 = legs[1]['number'] 5228 if id1 > id2: 5229 legs[0], legs[1] = legs[1], legs[0] 5230 else: 5231 legs[0], legs[2] = legs[2], legs[0] 5232 5233 # the only new relabelling is the last element of the list 5234 # always thanks to the above flipping 5235 old_propa_id = new_vert['legs'][-1]['number'] 5236 oldid2new[old_propa_id] = propa_id 5237 5238 if i==0 and id==1: 5239 legs[0]['number'] = 2 5240 5241 #pass to new convention for leg numbering: 5242 for l in new_vert['legs']: 5243 if l['number'] in oldid2new: 5244 l['number'] = oldid2new[l['number']] 5245 5246 # new_vert is now ready 5247 out.append(new_vert) 5248 # prepare next iteration 5249 propa_id -=1 5250 i +=1 5251 5252 return out
5253 5254 5255 5256 5257 5258 #=========================================================================== 5259 # write_decayBW_file 5260 #===========================================================================
5261 - def write_decayBW_file(self, writer, s_and_t_channels):
5262 """Write the decayBW.inc file for MadEvent""" 5263 5264 lines = [] 5265 5266 booldict = {None: "0", True: "1", False: "2"} 5267 5268 for iconf, config in enumerate(s_and_t_channels): 5269 schannels = config[0] 5270 for vertex in schannels: 5271 # For the resulting leg, pick out whether it comes from 5272 # decay or not, as given by the onshell flag 5273 leg = vertex.get('legs')[-1] 5274 lines.append("data gForceBW(%d,%d)/%s/" % \ 5275 (leg.get('number'), iconf + 1, 5276 booldict[leg.get('onshell')])) 5277 5278 # Write the file 5279 writer.writelines(lines) 5280 5281 return True
5282 5283 #=========================================================================== 5284 # write_dname_file 5285 #===========================================================================
5286 - def write_dname_file(self, writer, dir_name):
5287 """Write the dname.mg file for MG4""" 5288 5289 line = "DIRNAME=%s" % dir_name 5290 5291 # Write the file 5292 writer.write(line + "\n") 5293 5294 return True
5295 5296 #=========================================================================== 5297 # write_driver 5298 #===========================================================================
5299 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
5300 """Write the SubProcess/driver.f file for MG4""" 5301 5302 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 5303 5304 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5305 card = 'Source/MODEL/MG5_param.dat' 5306 else: 5307 card = 'param_card.dat' 5308 # Requiring each helicity configuration to be probed by 10 points for 5309 # matrix element before using the resulting grid for MC over helicity 5310 # sampling. 5311 # We multiply this by 2 because each grouped subprocess is called at most 5312 # twice for each IMIRROR. 5313 replace_dict = {'param_card_name':card, 5314 'ncomb':ncomb, 5315 'hel_init_points':n_grouped_proc*10*2} 5316 if not v5: 5317 replace_dict['secondparam']=',.true.' 5318 else: 5319 replace_dict['secondparam']='' 5320 5321 if writer: 5322 text = open(path).read() % replace_dict 5323 writer.write(text) 5324 return True 5325 else: 5326 return replace_dict
5327 5328 #=========================================================================== 5329 # write_addmothers 5330 #===========================================================================
5331 - def write_addmothers(self, writer):
5332 """Write the SubProcess/addmothers.f""" 5333 5334 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5335 5336 text = open(path).read() % {'iconfig': 'diag_number'} 5337 writer.write(text) 5338 5339 return True
5340 5341 5342 #=========================================================================== 5343 # write_combine_events 5344 #===========================================================================
5345 - def write_combine_events(self, writer, nb_proc=100):
5346 """Write the SubProcess/driver.f file for MG4""" 5347 5348 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 5349 5350 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5351 card = 'Source/MODEL/MG5_param.dat' 5352 else: 5353 card = 'param_card.dat' 5354 5355 #set maxpup (number of @X in the process card) 5356 5357 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 5358 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 5359 writer.write(text) 5360 5361 return True
5362 5363 5364 #=========================================================================== 5365 # write_symmetry 5366 #===========================================================================
5367 - def write_symmetry(self, writer, v5=True):
5368 """Write the SubProcess/driver.f file for ME""" 5369 5370 5371 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 5372 5373 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5374 card = 'Source/MODEL/MG5_param.dat' 5375 else: 5376 card = 'param_card.dat' 5377 5378 if v5: 5379 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 5380 else: 5381 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 5382 5383 if writer: 5384 text = open(path).read() 5385 text = text % replace_dict 5386 writer.write(text) 5387 return True 5388 else: 5389 return replace_dict
5390 5391 5392 5393 #=========================================================================== 5394 # write_iproc_file 5395 #===========================================================================
5396 - def write_iproc_file(self, writer, me_number):
5397 """Write the iproc.dat file for MG4""" 5398 line = "%d" % (me_number + 1) 5399 5400 # Write the file 5401 for line_to_write in writer.write_line(line): 5402 writer.write(line_to_write) 5403 return True
5404 5405 #=========================================================================== 5406 # write_mg_sym_file 5407 #===========================================================================
5408 - def write_mg_sym_file(self, writer, matrix_element):
5409 """Write the mg.sym file for MadEvent.""" 5410 5411 lines = [] 5412 5413 # Extract process with all decays included 5414 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 5415 5416 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 5417 5418 identical_indices = {} 5419 5420 # Extract identical particle info 5421 for i, leg in enumerate(final_legs): 5422 if leg.get('id') in identical_indices: 5423 identical_indices[leg.get('id')].append(\ 5424 i + ninitial + 1) 5425 else: 5426 identical_indices[leg.get('id')] = [i + ninitial + 1] 5427 5428 # Remove keys which have only one particle 5429 for key in list(identical_indices.keys()): 5430 if len(identical_indices[key]) < 2: 5431 del identical_indices[key] 5432 5433 # Write mg.sym file 5434 lines.append(str(len(list(identical_indices.keys())))) 5435 for key in identical_indices.keys(): 5436 lines.append(str(len(identical_indices[key]))) 5437 for number in identical_indices[key]: 5438 lines.append(str(number)) 5439 5440 # Write the file 5441 writer.writelines(lines) 5442 5443 return True
5444 5445 #=========================================================================== 5446 # write_mg_sym_file 5447 #===========================================================================
5448 - def write_default_mg_sym_file(self, writer):
5449 """Write the mg.sym file for MadEvent.""" 5450 5451 lines = "0" 5452 5453 # Write the file 5454 writer.writelines(lines) 5455 5456 return True
5457 5458 #=========================================================================== 5459 # write_ncombs_file 5460 #===========================================================================
5461 - def write_ncombs_file(self, writer, nexternal):
5462 """Write the ncombs.inc file for MadEvent.""" 5463 5464 # ncomb (used for clustering) is 2^nexternal 5465 file = " integer n_max_cl\n" 5466 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 5467 5468 # Write the file 5469 writer.writelines(file) 5470 5471 return True
5472 5473 #=========================================================================== 5474 # write_processes_file 5475 #===========================================================================
5476 - def write_processes_file(self, writer, subproc_group):
5477 """Write the processes.dat file with info about the subprocesses 5478 in this group.""" 5479 5480 lines = [] 5481 5482 for ime, me in \ 5483 enumerate(subproc_group.get('matrix_elements')): 5484 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 5485 ",".join(p.base_string() for p in \ 5486 me.get('processes')))) 5487 if me.get('has_mirror_process'): 5488 mirror_procs = [copy.copy(p) for p in me.get('processes')] 5489 for proc in mirror_procs: 5490 legs = copy.copy(proc.get('legs_with_decays')) 5491 legs.insert(0, legs.pop(1)) 5492 proc.set("legs_with_decays", legs) 5493 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 5494 mirror_procs)) 5495 else: 5496 lines.append("mirror none") 5497 5498 # Write the file 5499 writer.write("\n".join(lines)) 5500 5501 return True
5502 5503 #=========================================================================== 5504 # write_symswap_file 5505 #===========================================================================
5506 - def write_symswap_file(self, writer, ident_perms):
5507 """Write the file symswap.inc for MG4 by comparing diagrams using 5508 the internal matrix element value functionality.""" 5509 5510 lines = [] 5511 5512 # Write out lines for symswap.inc file (used to permute the 5513 # external leg momenta 5514 for iperm, perm in enumerate(ident_perms): 5515 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 5516 (iperm+1, ",".join([str(i+1) for i in perm]))) 5517 lines.append("data nsym/%d/" % len(ident_perms)) 5518 5519 # Write the file 5520 writer.writelines(lines) 5521 5522 return True
5523 5524 #=========================================================================== 5525 # write_symfact_file 5526 #===========================================================================
5527 - def write_symfact_file(self, writer, symmetry):
5528 """Write the files symfact.dat for MG4 by comparing diagrams using 5529 the internal matrix element value functionality.""" 5530 5531 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 5532 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 5533 # Write out lines for symswap.inc file (used to permute the 5534 # external leg momenta 5535 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 5536 # Write the file 5537 writer.write('\n'.join(lines)) 5538 writer.write('\n') 5539 5540 return True
5541 5542 #=========================================================================== 5543 # write_symperms_file 5544 #===========================================================================
5545 - def write_symperms_file(self, writer, perms):
5546 """Write the symperms.inc file for subprocess group, used for 5547 symmetric configurations""" 5548 5549 lines = [] 5550 for iperm, perm in enumerate(perms): 5551 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 5552 (iperm+1, ",".join([str(i+1) for i in perm]))) 5553 5554 # Write the file 5555 writer.writelines(lines) 5556 5557 return True
5558 5559 #=========================================================================== 5560 # write_subproc 5561 #===========================================================================
5562 - def write_subproc(self, writer, subprocdir):
5563 """Append this subprocess to the subproc.mg file for MG4""" 5564 5565 # Write line to file 5566 writer.write(subprocdir + "\n") 5567 5568 return True
5569
5570 #=============================================================================== 5571 # ProcessExporterFortranMEGroup 5572 #=============================================================================== 5573 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5574 """Class to take care of exporting a set of matrix elements to 5575 MadEvent subprocess group format.""" 5576 5577 5578 matrix_file = "matrix_madevent_group_v4.inc" 5579 grouped_mode = 'madevent' 5580 default_opt = {'clean': False, 'complex_mass':False, 5581 'export_format':'madevent', 'mp': False, 5582 'v5_model': True, 5583 'output_options':{}, 5584 'hel_recycling': True 5585 } 5586 5587 5588 #=========================================================================== 5589 # generate_subprocess_directory 5590 #===========================================================================
5591 - def generate_subprocess_directory(self, subproc_group, 5592 fortran_model, 5593 group_number):
5594 """Generate the Pn directory for a subprocess group in MadEvent, 5595 including the necessary matrix_N.f files, configs.inc and various 5596 other helper files.""" 5597 5598 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5599 "subproc_group object not SubProcessGroup" 5600 5601 if not self.model: 5602 self.model = subproc_group.get('matrix_elements')[0].\ 5603 get('processes')[0].get('model') 5604 5605 cwd = os.getcwd() 5606 path = pjoin(self.dir_path, 'SubProcesses') 5607 5608 os.chdir(path) 5609 pathdir = os.getcwd() 5610 5611 # Create the directory PN in the specified path 5612 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5613 subproc_group.get('name')) 5614 try: 5615 os.mkdir(subprocdir) 5616 except os.error as error: 5617 logger.warning(error.strerror + " " + subprocdir) 5618 5619 try: 5620 os.chdir(subprocdir) 5621 except os.error: 5622 logger.error('Could not cd to directory %s' % subprocdir) 5623 return 0 5624 5625 logger.info('Creating files in directory %s' % subprocdir) 5626 5627 # Create the matrix.f files, auto_dsig.f files and all inc files 5628 # for all subprocesses in the group 5629 5630 maxamps = 0 5631 maxflows = 0 5632 tot_calls = 0 5633 5634 matrix_elements = subproc_group.get('matrix_elements') 5635 5636 # Add the driver.f, all grouped ME's must share the same number of 5637 # helicity configuration 5638 ncomb = matrix_elements[0].get_helicity_combinations() 5639 for me in matrix_elements[1:]: 5640 if ncomb!=me.get_helicity_combinations(): 5641 raise MadGraph5Error("All grouped processes must share the "+\ 5642 "same number of helicity configurations.") 5643 5644 filename = 'driver.f' 5645 self.write_driver(writers.FortranWriter(filename),ncomb, 5646 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5647 5648 try: 5649 self.proc_characteristic['hel_recycling'] = self.opt['hel_recycling'] 5650 except KeyError: 5651 self.proc_characteristic['hel_recycling'] = False 5652 self.opt['hel_recycling'] = False 5653 for ime, matrix_element in \ 5654 enumerate(matrix_elements): 5655 if self.opt['hel_recycling']: 5656 filename = 'matrix%d_orig.f' % (ime+1) 5657 replace_dict = self.write_matrix_element_v4(None, 5658 matrix_element, 5659 fortran_model, 5660 proc_id=str(ime+1), 5661 config_map=subproc_group.get('diagram_maps')[ime], 5662 subproc_number=group_number) 5663 calls,ncolor = replace_dict['return_value'] 5664 tfile = open(replace_dict['template_file']).read() 5665 file = tfile % replace_dict 5666 # Add the split orders helper functions. 5667 file = file + '\n' + open(replace_dict['template_file2'])\ 5668 .read()%replace_dict 5669 # Write the file 5670 writer = writers.FortranWriter(filename) 5671 writer.writelines(file) 5672 5673 # 5674 # write the dedicated template for helicity recycling 5675 # 5676 tfile = open(replace_dict['template_file'].replace('.inc',"_hel.inc")).read() 5677 file = tfile % replace_dict 5678 # Add the split orders helper functions. 5679 file = file + '\n' + open(replace_dict['template_file2'])\ 5680 .read()%replace_dict 5681 # Write the file 5682 writer = writers.FortranWriter('template_matrix%d.f' % (ime+1)) 5683 writer.uniformcase = False 5684 writer.writelines(file) 5685 5686 5687 5688 5689 else: 5690 filename = 'matrix%d.f' % (ime+1) 5691 calls, ncolor = \ 5692 self.write_matrix_element_v4(writers.FortranWriter(filename), 5693 matrix_element, 5694 fortran_model, 5695 proc_id=str(ime+1), 5696 config_map=subproc_group.get('diagram_maps')[ime], 5697 subproc_number=group_number) 5698 5699 5700 5701 filename = 'auto_dsig%d.f' % (ime+1) 5702 self.write_auto_dsig_file(writers.FortranWriter(filename), 5703 matrix_element, 5704 str(ime+1)) 5705 5706 # Keep track of needed quantities 5707 tot_calls += int(calls) 5708 maxflows = max(maxflows, ncolor) 5709 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5710 5711 # Draw diagrams 5712 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5713 filename = "matrix%d.ps" % (ime+1) 5714 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5715 get('diagrams'), 5716 filename, 5717 model = \ 5718 matrix_element.get('processes')[0].\ 5719 get('model'), 5720 amplitude=True) 5721 logger.info("Generating Feynman diagrams for " + \ 5722 matrix_element.get('processes')[0].nice_string()) 5723 plot.draw() 5724 5725 # Extract number of external particles 5726 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5727 5728 # Generate a list of diagrams corresponding to each configuration 5729 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5730 # If a subprocess has no diagrams for this config, the number is 0 5731 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5732 5733 filename = 'auto_dsig.f' 5734 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5735 subproc_group) 5736 5737 filename = 'coloramps.inc' 5738 self.write_coloramps_file(writers.FortranWriter(filename), 5739 subproc_diagrams_for_config, 5740 maxflows, 5741 matrix_elements) 5742 5743 filename = 'get_color.f' 5744 self.write_colors_file(writers.FortranWriter(filename), 5745 matrix_elements) 5746 5747 filename = 'config_subproc_map.inc' 5748 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5749 subproc_diagrams_for_config) 5750 5751 filename = 'configs.inc' 5752 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5753 writers.FortranWriter(filename), 5754 subproc_group, 5755 subproc_diagrams_for_config) 5756 5757 filename = 'config_nqcd.inc' 5758 self.write_config_nqcd_file(writers.FortranWriter(filename), 5759 nqcd_list) 5760 5761 filename = 'decayBW.inc' 5762 self.write_decayBW_file(writers.FortranWriter(filename), 5763 s_and_t_channels) 5764 5765 filename = 'dname.mg' 5766 self.write_dname_file(writers.FortranWriter(filename), 5767 subprocdir) 5768 5769 filename = 'iproc.dat' 5770 self.write_iproc_file(writers.FortranWriter(filename), 5771 group_number) 5772 5773 filename = 'leshouche.inc' 5774 self.write_leshouche_file(writers.FortranWriter(filename), 5775 subproc_group) 5776 5777 filename = 'maxamps.inc' 5778 self.write_maxamps_file(writers.FortranWriter(filename), 5779 maxamps, 5780 maxflows, 5781 max([len(me.get('processes')) for me in \ 5782 matrix_elements]), 5783 len(matrix_elements)) 5784 5785 # Note that mg.sym is not relevant for this case 5786 filename = 'mg.sym' 5787 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5788 5789 filename = 'mirrorprocs.inc' 5790 self.write_mirrorprocs(writers.FortranWriter(filename), 5791 subproc_group) 5792 5793 filename = 'ncombs.inc' 5794 self.write_ncombs_file(writers.FortranWriter(filename), 5795 nexternal) 5796 5797 filename = 'nexternal.inc' 5798 self.write_nexternal_file(writers.FortranWriter(filename), 5799 nexternal, ninitial) 5800 5801 filename = 'ngraphs.inc' 5802 self.write_ngraphs_file(writers.FortranWriter(filename), 5803 nconfigs) 5804 5805 filename = 'pmass.inc' 5806 self.write_pmass_file(writers.FortranWriter(filename), 5807 matrix_element) 5808 5809 filename = 'props.inc' 5810 self.write_props_file(writers.FortranWriter(filename), 5811 matrix_element, 5812 s_and_t_channels) 5813 5814 filename = 'processes.dat' 5815 files.write_to_file(filename, 5816 self.write_processes_file, 5817 subproc_group) 5818 5819 # Find config symmetries and permutations 5820 symmetry, perms, ident_perms = \ 5821 diagram_symmetry.find_symmetry(subproc_group) 5822 5823 filename = 'symswap.inc' 5824 self.write_symswap_file(writers.FortranWriter(filename), 5825 ident_perms) 5826 5827 filename = 'symfact_orig.dat' 5828 self.write_symfact_file(open(filename, 'w'), symmetry) 5829 5830 # check consistency 5831 for i, sym_fact in enumerate(symmetry): 5832 5833 if sym_fact >= 0: 5834 continue 5835 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5836 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5837 raise Exception("identical diagram with different QCD powwer") 5838 5839 5840 filename = 'symperms.inc' 5841 self.write_symperms_file(writers.FortranWriter(filename), 5842 perms) 5843 5844 # Generate jpgs -> pass in make_html 5845 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5846 5847 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5848 5849 #import nexternal/leshouch in Source 5850 ln('nexternal.inc', '../../Source', log=False) 5851 ln('leshouche.inc', '../../Source', log=False) 5852 ln('maxamps.inc', '../../Source', log=False) 5853 5854 # Return to SubProcesses dir) 5855 os.chdir(pathdir) 5856 5857 # Add subprocess to subproc.mg 5858 filename = 'subproc.mg' 5859 files.append_to_file(filename, 5860 self.write_subproc, 5861 subprocdir) 5862 5863 # Return to original dir 5864 os.chdir(cwd) 5865 5866 if not tot_calls: 5867 tot_calls = 0 5868 return tot_calls
5869 5870 #=========================================================================== 5871 # write_super_auto_dsig_file 5872 #===========================================================================
5873 - def write_super_auto_dsig_file(self, writer, subproc_group):
5874 """Write the auto_dsig.f file selecting between the subprocesses 5875 in subprocess group mode""" 5876 5877 replace_dict = {} 5878 5879 # Extract version number and date from VERSION file 5880 info_lines = self.get_mg5_info_lines() 5881 replace_dict['info_lines'] = info_lines 5882 5883 matrix_elements = subproc_group.get('matrix_elements') 5884 5885 # Extract process info lines 5886 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5887 matrix_elements]) 5888 replace_dict['process_lines'] = process_lines 5889 5890 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5891 replace_dict['nexternal'] = nexternal 5892 5893 replace_dict['nsprocs'] = 2*len(matrix_elements) 5894 5895 # Generate dsig definition line 5896 dsig_def_line = "DOUBLE PRECISION " + \ 5897 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5898 range(len(matrix_elements))]) 5899 replace_dict["dsig_def_line"] = dsig_def_line 5900 5901 # Generate dsig process lines 5902 call_dsig_proc_lines = [] 5903 for iproc in range(len(matrix_elements)): 5904 call_dsig_proc_lines.append(\ 5905 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5906 {"num": iproc + 1, 5907 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5908 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5909 5910 ncomb=matrix_elements[0].get_helicity_combinations() 5911 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5912 5913 s1,s2 = matrix_elements[0].get_spin_state_initial() 5914 replace_dict['nb_spin_state1'] = s1 5915 replace_dict['nb_spin_state2'] = s2 5916 5917 printzeroamp = [] 5918 for iproc in range(len(matrix_elements)): 5919 printzeroamp.append(\ 5920 " call print_zero_amp_%i()" % ( iproc + 1)) 5921 replace_dict['print_zero_amp'] = "\n".join(printzeroamp) 5922 5923 5924 if writer: 5925 file = open(pjoin(_file_path, \ 5926 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5927 file = file % replace_dict 5928 5929 # Write the file 5930 writer.writelines(file) 5931 else: 5932 return replace_dict
5933 5934 #=========================================================================== 5935 # write_mirrorprocs 5936 #===========================================================================
5937 - def write_mirrorprocs(self, writer, subproc_group):
5938 """Write the mirrorprocs.inc file determining which processes have 5939 IS mirror process in subprocess group mode.""" 5940 5941 lines = [] 5942 bool_dict = {True: '.true.', False: '.false.'} 5943 matrix_elements = subproc_group.get('matrix_elements') 5944 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5945 (len(matrix_elements), 5946 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5947 me in matrix_elements]))) 5948 # Write the file 5949 writer.writelines(lines)
5950 5951 #=========================================================================== 5952 # write_addmothers 5953 #===========================================================================
5954 - def write_addmothers(self, writer):
5955 """Write the SubProcess/addmothers.f""" 5956 5957 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5958 5959 text = open(path).read() % {'iconfig': 'lconfig'} 5960 writer.write(text) 5961 5962 return True
5963 5964 5965 #=========================================================================== 5966 # write_coloramps_file 5967 #===========================================================================
5968 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5969 matrix_elements):
5970 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5971 5972 # Create a map from subprocess (matrix element) to a list of 5973 # the diagrams corresponding to each config 5974 5975 lines = [] 5976 5977 subproc_to_confdiag = {} 5978 for config in diagrams_for_config: 5979 for subproc, diag in enumerate(config): 5980 try: 5981 subproc_to_confdiag[subproc].append(diag) 5982 except KeyError: 5983 subproc_to_confdiag[subproc] = [diag] 5984 5985 for subproc in sorted(subproc_to_confdiag.keys()): 5986 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5987 matrix_elements[subproc], 5988 subproc + 1)) 5989 5990 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5991 (maxflows, 5992 len(diagrams_for_config), 5993 len(matrix_elements))) 5994 5995 # Write the file 5996 writer.writelines(lines) 5997 5998 return True
5999 6000 #=========================================================================== 6001 # write_config_subproc_map_file 6002 #===========================================================================
6003 - def write_config_subproc_map_file(self, writer, config_subproc_map):
6004 """Write the config_subproc_map.inc file for subprocess groups""" 6005 6006 lines = [] 6007 # Output only configs that have some corresponding diagrams 6008 iconfig = 0 6009 for config in config_subproc_map: 6010 if set(config) == set([0]): 6011 continue 6012 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 6013 (iconfig + 1, len(config), 6014 ",".join([str(i) for i in config]))) 6015 iconfig += 1 6016 # Write the file 6017 writer.writelines(lines) 6018 6019 return True
6020 6021 #=========================================================================== 6022 # read_write_good_hel 6023 #===========================================================================
6024 - def read_write_good_hel(self, ncomb):
6025 """return the code to read/write the good_hel common_block""" 6026 6027 convert = {'ncomb' : ncomb} 6028 6029 output = """ 6030 subroutine write_good_hel(stream_id) 6031 implicit none 6032 integer stream_id 6033 INTEGER NCOMB 6034 PARAMETER ( NCOMB=%(ncomb)d) 6035 LOGICAL GOODHEL(NCOMB, 2) 6036 INTEGER NTRY(2) 6037 common/BLOCK_GOODHEL/NTRY,GOODHEL 6038 write(stream_id,*) GOODHEL 6039 return 6040 end 6041 6042 6043 subroutine read_good_hel(stream_id) 6044 implicit none 6045 include 'genps.inc' 6046 integer stream_id 6047 INTEGER NCOMB 6048 PARAMETER ( NCOMB=%(ncomb)d) 6049 LOGICAL GOODHEL(NCOMB, 2) 6050 INTEGER NTRY(2) 6051 common/BLOCK_GOODHEL/NTRY,GOODHEL 6052 read(stream_id,*) GOODHEL 6053 NTRY(1) = MAXTRIES + 1 6054 NTRY(2) = MAXTRIES + 1 6055 return 6056 end 6057 6058 subroutine init_good_hel() 6059 implicit none 6060 INTEGER NCOMB 6061 PARAMETER ( NCOMB=%(ncomb)d) 6062 LOGICAL GOODHEL(NCOMB, 2) 6063 INTEGER NTRY(2) 6064 INTEGER I 6065 6066 do i=1,NCOMB 6067 GOODHEL(I,1) = .false. 6068 GOODHEL(I,2) = .false. 6069 enddo 6070 NTRY(1) = 0 6071 NTRY(2) = 0 6072 end 6073 6074 integer function get_maxsproc() 6075 implicit none 6076 include 'maxamps.inc' 6077 6078 get_maxsproc = maxsproc 6079 return 6080 end 6081 6082 """ % convert 6083 6084 return output
6085 6086 6087 6088 #=========================================================================== 6089 # write_configs_file 6090 #===========================================================================
6091 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6092 """Write the configs.inc file with topology information for a 6093 subprocess group. Use the first subprocess with a diagram for each 6094 configuration.""" 6095 6096 matrix_elements = subproc_group.get('matrix_elements') 6097 model = matrix_elements[0].get('processes')[0].get('model') 6098 6099 diagrams = [] 6100 config_numbers = [] 6101 for iconfig, config in enumerate(diagrams_for_config): 6102 # Check if any diagrams correspond to this config 6103 if set(config) == set([0]): 6104 continue 6105 subproc_diags = [] 6106 for s,d in enumerate(config): 6107 if d: 6108 subproc_diags.append(matrix_elements[s].\ 6109 get('diagrams')[d-1]) 6110 else: 6111 subproc_diags.append(None) 6112 diagrams.append(subproc_diags) 6113 config_numbers.append(iconfig + 1) 6114 6115 # Extract number of external particles 6116 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6117 6118 return len(diagrams), \ 6119 self.write_configs_file_from_diagrams(writer, diagrams, 6120 config_numbers, 6121 nexternal, ninitial, 6122 model)
6123 6124 #=========================================================================== 6125 # write_run_configs_file 6126 #===========================================================================
6127 - def write_run_config_file(self, writer):
6128 """Write the run_configs.inc file for MadEvent""" 6129 6130 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 6131 if self.proc_characteristic['loop_induced']: 6132 job_per_chan = 1 6133 else: 6134 job_per_chan = 2 6135 text = open(path).read() % {'chanperjob':job_per_chan} 6136 writer.write(text) 6137 return True
6138 6139 6140 #=========================================================================== 6141 # write_leshouche_file 6142 #===========================================================================
6143 - def write_leshouche_file(self, writer, subproc_group):
6144 """Write the leshouche.inc file for MG4""" 6145 6146 all_lines = [] 6147 6148 for iproc, matrix_element in \ 6149 enumerate(subproc_group.get('matrix_elements')): 6150 all_lines.extend(self.get_leshouche_lines(matrix_element, 6151 iproc)) 6152 # Write the file 6153 writer.writelines(all_lines) 6154 return True
6155 6156
6157 - def finalize(self,*args, **opts):
6158 6159 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 6160 #ensure that the grouping information is on the correct value 6161 self.proc_characteristic['grouped_matrix'] = True
6162 6163 6164 #=============================================================================== 6165 # UFO_model_to_mg4 6166 #=============================================================================== 6167 6168 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
6169 6170 -class UFO_model_to_mg4(object):
6171 """ A converter of the UFO-MG5 Model to the MG4 format """ 6172 6173 # The list below shows the only variables the user is allowed to change by 6174 # himself for each PS point. If he changes any other, then calling 6175 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 6176 # correctly account for the change. 6177 PS_dependent_key = ['aS','MU_R'] 6178 mp_complex_format = 'complex*32' 6179 mp_real_format = 'real*16' 6180 # Warning, it is crucial none of the couplings/parameters of the model 6181 # starts with this prefix. I should add a check for this. 6182 # You can change it as the global variable to check_param_card.ParamCard 6183 mp_prefix = check_param_card.ParamCard.mp_prefix 6184
6185 - def __init__(self, model, output_path, opt=None):
6186 """ initialization of the objects """ 6187 6188 self.model = model 6189 self.model_name = model['name'] 6190 self.dir_path = output_path 6191 6192 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 6193 'loop_induced': False} 6194 if opt: 6195 self.opt.update(opt) 6196 6197 self.coups_dep = [] # (name, expression, type) 6198 self.coups_indep = [] # (name, expression, type) 6199 self.params_dep = [] # (name, expression, type) 6200 self.params_indep = [] # (name, expression, type) 6201 self.params_ext = [] # external parameter 6202 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 6203 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
6204 6205
6207 """modify the parameter if some of them are identical up to the case""" 6208 6209 lower_dict={} 6210 duplicate = set() 6211 keys = list(self.model['parameters'].keys()) 6212 keys.sort() 6213 for key in keys: 6214 for param in self.model['parameters'][key]: 6215 lower_name = param.name.lower() 6216 if not lower_name: 6217 continue 6218 try: 6219 lower_dict[lower_name].append(param) 6220 except KeyError as error: 6221 lower_dict[lower_name] = [param] 6222 else: 6223 duplicate.add(lower_name) 6224 logger.debug('%s is define both as lower case and upper case.' 6225 % lower_name) 6226 if not duplicate: 6227 return 6228 6229 re_expr = r'''\b(%s)\b''' 6230 to_change = [] 6231 change={} 6232 for value in duplicate: 6233 for i, var in enumerate(lower_dict[value]): 6234 to_change.append(var.name) 6235 new_name = '%s%s' % (var.name.lower(), 6236 ('__%d'%(i+1) if i>0 else '')) 6237 change[var.name] = new_name 6238 var.name = new_name 6239 6240 # Apply the modification to the map_CTcoup_CTparam of the model 6241 # if it has one (giving for each coupling the CT parameters whcih 6242 # are necessary and which should be exported to the model. 6243 if hasattr(self.model,'map_CTcoup_CTparam'): 6244 for coup, ctparams in self.model.map_CTcoup_CTparam: 6245 for i, ctparam in enumerate(ctparams): 6246 try: 6247 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 6248 except KeyError: 6249 pass 6250 6251 replace = lambda match_pattern: change[match_pattern.groups()[0]] 6252 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 6253 6254 # change parameters 6255 for key in keys: 6256 if key == ('external',): 6257 continue 6258 for param in self.model['parameters'][key]: 6259 param.expr = rep_pattern.sub(replace, param.expr) 6260 6261 # change couplings 6262 for key in self.model['couplings'].keys(): 6263 for coup in self.model['couplings'][key]: 6264 coup.expr = rep_pattern.sub(replace, coup.expr) 6265 6266 # change mass/width 6267 for part in self.model['particles']: 6268 if str(part.get('mass')) in to_change: 6269 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 6270 if str(part.get('width')) in to_change: 6271 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
6272
6273 - def refactorize(self, wanted_couplings = []):
6274 """modify the couplings to fit with MG4 convention """ 6275 6276 # Keep only separation in alphaS 6277 keys = list(self.model['parameters'].keys()) 6278 keys.sort(key=len) 6279 for key in keys: 6280 to_add = [o for o in self.model['parameters'][key] if o.name] 6281 6282 if key == ('external',): 6283 self.params_ext += to_add 6284 elif any([(k in key) for k in self.PS_dependent_key]): 6285 self.params_dep += to_add 6286 else: 6287 self.params_indep += to_add 6288 # same for couplings 6289 keys = list(self.model['couplings'].keys()) 6290 keys.sort(key=len) 6291 for key, coup_list in self.model['couplings'].items(): 6292 if any([(k in key) for k in self.PS_dependent_key]): 6293 self.coups_dep += [c for c in coup_list if 6294 (not wanted_couplings or c.name in \ 6295 wanted_couplings)] 6296 else: 6297 self.coups_indep += [c for c in coup_list if 6298 (not wanted_couplings or c.name in \ 6299 wanted_couplings)] 6300 6301 # MG4 use G and not aS as it basic object for alphas related computation 6302 #Pass G in the independant list 6303 if 'G' in self.params_dep: 6304 index = self.params_dep.index('G') 6305 G = self.params_dep.pop(index) 6306 # G.expr = '2*cmath.sqrt(as*pi)' 6307 # self.params_indep.insert(0, self.params_dep.pop(index)) 6308 # No need to add it if not defined 6309 6310 if 'aS' not in self.params_ext: 6311 logger.critical('aS not define as external parameter adding it!') 6312 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 6313 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 6314 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
6315 - def build(self, wanted_couplings = [], full=True):
6316 """modify the couplings to fit with MG4 convention and creates all the 6317 different files""" 6318 6319 self.pass_parameter_to_case_insensitive() 6320 self.refactorize(wanted_couplings) 6321 6322 # write the files 6323 if full: 6324 if wanted_couplings: 6325 # extract the wanted ct parameters 6326 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 6327 self.write_all()
6328 6329
6330 - def open(self, name, comment='c', format='default'):
6331 """ Open the file name in the correct directory and with a valid 6332 header.""" 6333 6334 file_path = pjoin(self.dir_path, name) 6335 6336 if format == 'fortran': 6337 fsock = writers.FortranWriter(file_path, 'w') 6338 write_class = io.FileIO 6339 6340 write_class.writelines(fsock, comment * 77 + '\n') 6341 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 6342 {'comment': comment + (6 - len(comment)) * ' '}) 6343 write_class.writelines(fsock, comment * 77 + '\n\n') 6344 else: 6345 fsock = open(file_path, 'w') 6346 fsock.writelines(comment * 77 + '\n') 6347 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 6348 {'comment': comment + (6 - len(comment)) * ' '}) 6349 fsock.writelines(comment * 77 + '\n\n') 6350 return fsock
6351 6352
6353 - def write_all(self):
6354 """ write all the files """ 6355 #write the part related to the external parameter 6356 self.create_ident_card() 6357 self.create_param_read() 6358 6359 #write the definition of the parameter 6360 self.create_input() 6361 self.create_intparam_def(dp=True,mp=False) 6362 if self.opt['mp']: 6363 self.create_intparam_def(dp=False,mp=True) 6364 6365 # definition of the coupling. 6366 self.create_actualize_mp_ext_param_inc() 6367 self.create_coupl_inc() 6368 self.create_write_couplings() 6369 self.create_couplings() 6370 6371 # the makefile 6372 self.create_makeinc() 6373 self.create_param_write() 6374 6375 # The model functions 6376 self.create_model_functions_inc() 6377 self.create_model_functions_def() 6378 6379 # The param_card.dat 6380 self.create_param_card() 6381 6382 6383 # All the standard files 6384 self.copy_standard_file()
6385 6386 ############################################################################ 6387 ## ROUTINE CREATING THE FILES ############################################ 6388 ############################################################################ 6389
6390 - def copy_standard_file(self):
6391 """Copy the standard files for the fortran model.""" 6392 6393 #copy the library files 6394 file_to_link = ['formats.inc','printout.f', \ 6395 'rw_para.f', 'testprog.f'] 6396 6397 for filename in file_to_link: 6398 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 6399 self.dir_path) 6400 6401 file = open(os.path.join(MG5DIR,\ 6402 'models/template_files/fortran/rw_para.f')).read() 6403 6404 includes=["include \'coupl.inc\'","include \'input.inc\'", 6405 "include \'model_functions.inc\'"] 6406 if self.opt['mp']: 6407 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 6408 # In standalone and madloop we do no use the compiled param card but 6409 # still parse the .dat one so we must load it. 6410 if self.opt['loop_induced']: 6411 #loop induced follow MadEvent way to handle the card. 6412 load_card = '' 6413 lha_read_filename='lha_read.f' 6414 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 6415 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6416 lha_read_filename='lha_read_mp.f' 6417 elif self.opt['export_format'].startswith('standalone') \ 6418 or self.opt['export_format'] in ['madweight', 'plugin']\ 6419 or self.opt['export_format'].startswith('matchbox'): 6420 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6421 lha_read_filename='lha_read.f' 6422 else: 6423 load_card = '' 6424 lha_read_filename='lha_read.f' 6425 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 6426 os.path.join(self.dir_path,'lha_read.f')) 6427 6428 file=file%{'includes':'\n '.join(includes), 6429 'load_card':load_card} 6430 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 6431 writer.writelines(file) 6432 writer.close() 6433 6434 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6435 or self.opt['loop_induced']: 6436 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 6437 self.dir_path + '/makefile') 6438 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 6439 path = pjoin(self.dir_path, 'makefile') 6440 text = open(path).read() 6441 text = text.replace('madevent','aMCatNLO') 6442 open(path, 'w').writelines(text) 6443 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 6444 'madloop','madloop_optimized', 'standalone_rw', 6445 'madweight','matchbox','madloop_matchbox', 'plugin']: 6446 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 6447 self.dir_path + '/makefile') 6448 #elif self.opt['export_format'] in []: 6449 #pass 6450 else: 6451 raise MadGraph5Error('Unknown format')
6452
6453 - def create_coupl_inc(self):
6454 """ write coupling.inc """ 6455 6456 fsock = self.open('coupl.inc', format='fortran') 6457 if self.opt['mp']: 6458 mp_fsock = self.open('mp_coupl.inc', format='fortran') 6459 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 6460 format='fortran') 6461 6462 # Write header 6463 header = """double precision G 6464 common/strong/ G 6465 6466 double complex gal(2) 6467 common/weak/ gal 6468 6469 double precision MU_R 6470 common/rscale/ MU_R 6471 6472 double precision Nf 6473 parameter(Nf=%d) 6474 """ % self.model.get_nflav() 6475 6476 fsock.writelines(header) 6477 6478 if self.opt['mp']: 6479 header = """%(real_mp_format)s %(mp_prefix)sG 6480 common/MP_strong/ %(mp_prefix)sG 6481 6482 %(complex_mp_format)s %(mp_prefix)sgal(2) 6483 common/MP_weak/ %(mp_prefix)sgal 6484 6485 %(complex_mp_format)s %(mp_prefix)sMU_R 6486 common/MP_rscale/ %(mp_prefix)sMU_R 6487 6488 """ 6489 6490 6491 6492 6493 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 6494 'complex_mp_format':self.mp_complex_format, 6495 'mp_prefix':self.mp_prefix}) 6496 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 6497 'complex_mp_format':self.mp_complex_format, 6498 'mp_prefix':''}) 6499 6500 # Write the Mass definition/ common block 6501 masses = set() 6502 widths = set() 6503 if self.opt['complex_mass']: 6504 complex_mass = set() 6505 6506 for particle in self.model.get('particles'): 6507 #find masses 6508 one_mass = particle.get('mass') 6509 if one_mass.lower() != 'zero': 6510 masses.add(one_mass) 6511 6512 # find width 6513 one_width = particle.get('width') 6514 if one_width.lower() != 'zero': 6515 widths.add(one_width) 6516 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 6517 complex_mass.add('CMASS_%s' % one_mass) 6518 6519 if masses: 6520 fsock.writelines('double precision '+','.join(masses)+'\n') 6521 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 6522 if self.opt['mp']: 6523 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6524 ','.join(masses)+'\n') 6525 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 6526 ','.join(masses)+'\n\n') 6527 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6528 self.mp_prefix+m for m in masses])+'\n') 6529 mp_fsock.writelines('common/MP_masses/ '+\ 6530 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 6531 6532 if widths: 6533 fsock.writelines('double precision '+','.join(widths)+'\n') 6534 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 6535 if self.opt['mp']: 6536 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6537 ','.join(widths)+'\n') 6538 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 6539 ','.join(widths)+'\n\n') 6540 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6541 self.mp_prefix+w for w in widths])+'\n') 6542 mp_fsock.writelines('common/MP_widths/ '+\ 6543 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 6544 6545 # Write the Couplings 6546 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 6547 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 6548 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 6549 if self.opt['mp']: 6550 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6551 ','.join(coupling_list)+'\n') 6552 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 6553 ','.join(coupling_list)+'\n\n') 6554 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6555 self.mp_prefix+c for c in coupling_list])+'\n') 6556 mp_fsock.writelines('common/MP_couplings/ '+\ 6557 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 6558 6559 # Write complex mass for complex mass scheme (if activated) 6560 if self.opt['complex_mass'] and complex_mass: 6561 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 6562 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 6563 if self.opt['mp']: 6564 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6565 ','.join(complex_mass)+'\n') 6566 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 6567 ','.join(complex_mass)+'\n\n') 6568 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6569 self.mp_prefix+cm for cm in complex_mass])+'\n') 6570 mp_fsock.writelines('common/MP_complex_mass/ '+\ 6571 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
6572
6573 - def create_write_couplings(self):
6574 """ write the file coupl_write.inc """ 6575 6576 fsock = self.open('coupl_write.inc', format='fortran') 6577 6578 fsock.writelines("""write(*,*) ' Couplings of %s' 6579 write(*,*) ' ---------------------------------' 6580 write(*,*) ' '""" % self.model_name) 6581 def format(coupl): 6582 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
6583 6584 # Write the Couplings 6585 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 6586 fsock.writelines('\n'.join(lines)) 6587 6588
6589 - def create_input(self):
6590 """create input.inc containing the definition of the parameters""" 6591 6592 fsock = self.open('input.inc', format='fortran') 6593 if self.opt['mp']: 6594 mp_fsock = self.open('mp_input.inc', format='fortran') 6595 6596 #find mass/ width since they are already define 6597 already_def = set() 6598 for particle in self.model.get('particles'): 6599 already_def.add(particle.get('mass').lower()) 6600 already_def.add(particle.get('width').lower()) 6601 if self.opt['complex_mass']: 6602 already_def.add('cmass_%s' % particle.get('mass').lower()) 6603 6604 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 6605 name.lower() not in already_def 6606 6607 real_parameters = [param.name for param in self.params_dep + 6608 self.params_indep if param.type == 'real' 6609 and is_valid(param.name)] 6610 6611 real_parameters += [param.name for param in self.params_ext 6612 if param.type == 'real'and 6613 is_valid(param.name)] 6614 6615 # check the parameter is a CT parameter or not 6616 # if yes, just use the needed ones 6617 real_parameters = [param for param in real_parameters \ 6618 if self.check_needed_param(param)] 6619 6620 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6621 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6622 if self.opt['mp']: 6623 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6624 self.mp_prefix+p for p in real_parameters])+'\n') 6625 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6626 self.mp_prefix+p for p in real_parameters])+'\n\n') 6627 6628 complex_parameters = [param.name for param in self.params_dep + 6629 self.params_indep if param.type == 'complex' and 6630 is_valid(param.name)] 6631 6632 # check the parameter is a CT parameter or not 6633 # if yes, just use the needed ones 6634 complex_parameters = [param for param in complex_parameters \ 6635 if self.check_needed_param(param)] 6636 6637 if complex_parameters: 6638 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6639 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6640 if self.opt['mp']: 6641 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6642 self.mp_prefix+p for p in complex_parameters])+'\n') 6643 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6644 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6645
6646 - def check_needed_param(self, param):
6647 """ Returns whether the parameter in argument is needed for this 6648 specific computation or not.""" 6649 6650 # If this is a leading order model or if there was no CT parameter 6651 # employed in this NLO model, one can directly return that the 6652 # parameter is needed since only CTParameters are filtered. 6653 if not hasattr(self, 'allCTparameters') or \ 6654 self.allCTparameters is None or self.usedCTparameters is None or \ 6655 len(self.allCTparameters)==0: 6656 return True 6657 6658 # We must allow the conjugate shorthand for the complex parameter as 6659 # well so we check wether either the parameter name or its name with 6660 # 'conjg__' substituted with '' is present in the list. 6661 # This is acceptable even if some parameter had an original name 6662 # including 'conjg__' in it, because at worst we export a parameter 6663 # was not needed. 6664 param = param.lower() 6665 cjg_param = param.replace('conjg__','',1) 6666 6667 # First make sure it is a CTparameter 6668 if param not in self.allCTparameters and \ 6669 cjg_param not in self.allCTparameters: 6670 return True 6671 6672 # Now check if it is in the list of CTparameters actually used 6673 return (param in self.usedCTparameters or \ 6674 cjg_param in self.usedCTparameters)
6675
6676 - def extract_needed_CTparam(self,wanted_couplings=[]):
6677 """ Extract what are the needed CT parameters given the wanted_couplings""" 6678 6679 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6680 # Setting these lists to none wil disable the filtering in 6681 # check_needed_param 6682 self.allCTparameters = None 6683 self.usedCTparameters = None 6684 return 6685 6686 # All CTparameters appearin in all CT couplings 6687 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6688 # Define in this class the list of all CT parameters 6689 self.allCTparameters=list(\ 6690 set(itertools.chain.from_iterable(allCTparameters))) 6691 6692 # All used CT couplings 6693 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6694 allUsedCTCouplings = [coupl for coupl in 6695 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6696 6697 # Now define the list of all CT parameters that are actually used 6698 self.usedCTparameters=list(\ 6699 set(itertools.chain.from_iterable([ 6700 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6701 ]))) 6702 6703 # Now at last, make these list case insensitive 6704 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6705 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6706
6707 - def create_intparam_def(self, dp=True, mp=False):
6708 """ create intparam_definition.inc setting the internal parameters. 6709 Output the double precision and/or the multiple precision parameters 6710 depending on the parameters dp and mp. If mp only, then the file names 6711 get the 'mp_' prefix. 6712 """ 6713 6714 fsock = self.open('%sintparam_definition.inc'% 6715 ('mp_' if mp and not dp else ''), format='fortran') 6716 6717 fsock.write_comments(\ 6718 "Parameters that should not be recomputed event by event.\n") 6719 fsock.writelines("if(readlha) then\n") 6720 if dp: 6721 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6722 if mp: 6723 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6724 6725 for param in self.params_indep: 6726 if param.name == 'ZERO': 6727 continue 6728 # check whether the parameter is a CT parameter 6729 # if yes,just used the needed ones 6730 if not self.check_needed_param(param.name): 6731 continue 6732 if dp: 6733 fsock.writelines("%s = %s\n" % (param.name, 6734 self.p_to_f.parse(param.expr))) 6735 if mp: 6736 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6737 self.mp_p_to_f.parse(param.expr))) 6738 6739 fsock.writelines('endif') 6740 6741 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6742 if dp: 6743 fsock.writelines("aS = G**2/4/pi\n") 6744 if mp: 6745 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6746 for param in self.params_dep: 6747 # check whether the parameter is a CT parameter 6748 # if yes,just used the needed ones 6749 if not self.check_needed_param(param.name): 6750 continue 6751 if dp: 6752 fsock.writelines("%s = %s\n" % (param.name, 6753 self.p_to_f.parse(param.expr))) 6754 elif mp: 6755 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6756 self.mp_p_to_f.parse(param.expr))) 6757 6758 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6759 if ('aEWM1',) in self.model['parameters']: 6760 if dp: 6761 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6762 gal(2) = 1d0 6763 """) 6764 elif mp: 6765 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6766 %(mp_prefix)sgal(2) = 1d0 6767 """ %{'mp_prefix':self.mp_prefix}) 6768 pass 6769 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6770 elif ('Gf',) in self.model['parameters']: 6771 if dp: 6772 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6773 gal(2) = 1d0 6774 """) 6775 elif mp: 6776 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6777 %(mp_prefix)sgal(2) = 1d0 6778 """ %{'mp_prefix':self.mp_prefix}) 6779 pass 6780 else: 6781 if dp: 6782 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6783 fsock.writelines(""" gal(1) = 1d0 6784 gal(2) = 1d0 6785 """) 6786 elif mp: 6787 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6788 %(mp_prefix)sgal(2) = 1e0_16 6789 """%{'mp_prefix':self.mp_prefix})
6790 6791
6792 - def create_couplings(self):
6793 """ create couplings.f and all couplingsX.f """ 6794 6795 nb_def_by_file = 25 6796 6797 self.create_couplings_main(nb_def_by_file) 6798 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6799 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6800 6801 for i in range(nb_coup_indep): 6802 # For the independent couplings, we compute the double and multiple 6803 # precision ones together 6804 data = self.coups_indep[nb_def_by_file * i: 6805 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6806 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6807 6808 for i in range(nb_coup_dep): 6809 # For the dependent couplings, we compute the double and multiple 6810 # precision ones in separate subroutines. 6811 data = self.coups_dep[nb_def_by_file * i: 6812 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6813 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6814 dp=True,mp=False) 6815 if self.opt['mp']: 6816 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6817 dp=False,mp=True)
6818 6819
6820 - def create_couplings_main(self, nb_def_by_file=25):
6821 """ create couplings.f """ 6822 6823 fsock = self.open('couplings.f', format='fortran') 6824 6825 fsock.writelines("""subroutine coup() 6826 6827 implicit none 6828 double precision PI, ZERO 6829 logical READLHA 6830 parameter (PI=3.141592653589793d0) 6831 parameter (ZERO=0d0) 6832 include \'model_functions.inc\'""") 6833 if self.opt['mp']: 6834 fsock.writelines("""%s MP__PI, MP__ZERO 6835 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6836 parameter (MP__ZERO=0e0_16) 6837 include \'mp_input.inc\' 6838 include \'mp_coupl.inc\' 6839 """%self.mp_real_format) 6840 fsock.writelines("""include \'input.inc\' 6841 include \'coupl.inc\' 6842 READLHA = .true. 6843 include \'intparam_definition.inc\'""") 6844 if self.opt['mp']: 6845 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6846 6847 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6848 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6849 6850 fsock.writelines('\n'.join(\ 6851 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6852 6853 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6854 6855 fsock.writelines('\n'.join(\ 6856 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6857 for i in range(nb_coup_dep)])) 6858 if self.opt['mp']: 6859 fsock.writelines('\n'.join(\ 6860 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6861 for i in range(nb_coup_dep)])) 6862 fsock.writelines('''\n return \n end\n''') 6863 6864 fsock.writelines("""subroutine update_as_param() 6865 6866 implicit none 6867 double precision PI, ZERO 6868 logical READLHA 6869 parameter (PI=3.141592653589793d0) 6870 parameter (ZERO=0d0) 6871 include \'model_functions.inc\'""") 6872 fsock.writelines("""include \'input.inc\' 6873 include \'coupl.inc\' 6874 READLHA = .false.""") 6875 fsock.writelines(""" 6876 include \'intparam_definition.inc\'\n 6877 """) 6878 6879 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6880 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6881 6882 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6883 6884 fsock.writelines('\n'.join(\ 6885 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6886 for i in range(nb_coup_dep)])) 6887 fsock.writelines('''\n return \n end\n''') 6888 6889 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6890 6891 implicit none 6892 double precision PI 6893 parameter (PI=3.141592653589793d0) 6894 double precision mu_r2, as2 6895 include \'model_functions.inc\'""") 6896 fsock.writelines("""include \'input.inc\' 6897 include \'coupl.inc\'""") 6898 fsock.writelines(""" 6899 if (mu_r2.gt.0d0) MU_R = mu_r2 6900 G = SQRT(4.0d0*PI*AS2) 6901 AS = as2 6902 6903 CALL UPDATE_AS_PARAM() 6904 """) 6905 fsock.writelines('''\n return \n end\n''') 6906 6907 if self.opt['mp']: 6908 fsock.writelines("""subroutine mp_update_as_param() 6909 6910 implicit none 6911 logical READLHA 6912 include \'model_functions.inc\'""") 6913 fsock.writelines("""%s MP__PI, MP__ZERO 6914 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6915 parameter (MP__ZERO=0e0_16) 6916 include \'mp_input.inc\' 6917 include \'mp_coupl.inc\' 6918 """%self.mp_real_format) 6919 fsock.writelines("""include \'input.inc\' 6920 include \'coupl.inc\' 6921 include \'actualize_mp_ext_params.inc\' 6922 READLHA = .false. 6923 include \'mp_intparam_definition.inc\'\n 6924 """) 6925 6926 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6927 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6928 6929 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6930 6931 fsock.writelines('\n'.join(\ 6932 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6933 for i in range(nb_coup_dep)])) 6934 fsock.writelines('''\n return \n end\n''')
6935
6936 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6937 """ create couplings[nb_file].f containing information coming from data. 6938 Outputs the computation of the double precision and/or the multiple 6939 precision couplings depending on the parameters dp and mp. 6940 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6941 filename and subroutine name. 6942 """ 6943 6944 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6945 nb_file), format='fortran') 6946 fsock.writelines("""subroutine %scoup%s() 6947 6948 implicit none 6949 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6950 if dp: 6951 fsock.writelines(""" 6952 double precision PI, ZERO 6953 parameter (PI=3.141592653589793d0) 6954 parameter (ZERO=0d0) 6955 include 'input.inc' 6956 include 'coupl.inc'""") 6957 if mp: 6958 fsock.writelines("""%s MP__PI, MP__ZERO 6959 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6960 parameter (MP__ZERO=0e0_16) 6961 include \'mp_input.inc\' 6962 include \'mp_coupl.inc\' 6963 """%self.mp_real_format) 6964 6965 for coupling in data: 6966 if dp: 6967 fsock.writelines('%s = %s' % (coupling.name, 6968 self.p_to_f.parse(coupling.expr))) 6969 if mp: 6970 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6971 self.mp_p_to_f.parse(coupling.expr))) 6972 fsock.writelines('end')
6973
6974 - def create_model_functions_inc(self):
6975 """ Create model_functions.inc which contains the various declarations 6976 of auxiliary functions which might be used in the couplings expressions 6977 """ 6978 6979 additional_fct = [] 6980 # check for functions define in the UFO model 6981 ufo_fct = self.model.get('functions') 6982 if ufo_fct: 6983 for fct in ufo_fct: 6984 # already handle by default 6985 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6986 "csc", "asec", "acsc", "theta_function", "cond", 6987 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6988 "grreglog","regsqrt"]: 6989 additional_fct.append(fct.name) 6990 6991 fsock = self.open('model_functions.inc', format='fortran') 6992 fsock.writelines("""double complex cond 6993 double complex condif 6994 double complex reglog 6995 double complex reglogp 6996 double complex reglogm 6997 double complex recms 6998 double complex arg 6999 double complex grreglog 7000 double complex regsqrt 7001 %s 7002 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 7003 7004 7005 if self.opt['mp']: 7006 fsock.writelines("""%(complex_mp_format)s mp_cond 7007 %(complex_mp_format)s mp_condif 7008 %(complex_mp_format)s mp_reglog 7009 %(complex_mp_format)s mp_reglogp 7010 %(complex_mp_format)s mp_reglogm 7011 %(complex_mp_format)s mp_recms 7012 %(complex_mp_format)s mp_arg 7013 %(complex_mp_format)s mp_grreglog 7014 %(complex_mp_format)s mp_regsqrt 7015 %(additional)s 7016 """ %\ 7017 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 7018 'complex_mp_format':self.mp_complex_format 7019 })
7020
7021 - def create_model_functions_def(self):
7022 """ Create model_functions.f which contains the various definitions 7023 of auxiliary functions which might be used in the couplings expressions 7024 Add the functions.f functions for formfactors support 7025 """ 7026 7027 fsock = self.open('model_functions.f', format='fortran') 7028 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 7029 implicit none 7030 double complex condition,truecase,falsecase 7031 if(condition.eq.(0.0d0,0.0d0)) then 7032 cond=truecase 7033 else 7034 cond=falsecase 7035 endif 7036 end 7037 7038 double complex function condif(condition,truecase,falsecase) 7039 implicit none 7040 logical condition 7041 double complex truecase,falsecase 7042 if(condition) then 7043 condif=truecase 7044 else 7045 condif=falsecase 7046 endif 7047 end 7048 7049 double complex function recms(condition,expr) 7050 implicit none 7051 logical condition 7052 double complex expr 7053 if(condition)then 7054 recms=expr 7055 else 7056 recms=dcmplx(dble(expr)) 7057 endif 7058 end 7059 7060 double complex function reglog(arg) 7061 implicit none 7062 double complex TWOPII 7063 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7064 double complex arg 7065 if(arg.eq.(0.0d0,0.0d0)) then 7066 reglog=(0.0d0,0.0d0) 7067 else 7068 reglog=log(arg) 7069 endif 7070 end 7071 7072 double complex function reglogp(arg) 7073 implicit none 7074 double complex TWOPII 7075 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7076 double complex arg 7077 if(arg.eq.(0.0d0,0.0d0))then 7078 reglogp=(0.0d0,0.0d0) 7079 else 7080 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 7081 reglogp=log(arg) + TWOPII 7082 else 7083 reglogp=log(arg) 7084 endif 7085 endif 7086 end 7087 7088 double complex function reglogm(arg) 7089 implicit none 7090 double complex TWOPII 7091 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7092 double complex arg 7093 if(arg.eq.(0.0d0,0.0d0))then 7094 reglogm=(0.0d0,0.0d0) 7095 else 7096 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 7097 reglogm=log(arg) - TWOPII 7098 else 7099 reglogm=log(arg) 7100 endif 7101 endif 7102 end 7103 7104 double complex function regsqrt(arg_in) 7105 implicit none 7106 double complex arg_in 7107 double complex arg 7108 arg=arg_in 7109 if(dabs(dimag(arg)).eq.0.0d0)then 7110 arg=dcmplx(dble(arg),0.0d0) 7111 endif 7112 if(dabs(dble(arg)).eq.0.0d0)then 7113 arg=dcmplx(0.0d0,dimag(arg)) 7114 endif 7115 regsqrt=sqrt(arg) 7116 end 7117 7118 double complex function grreglog(logsw,expr1_in,expr2_in) 7119 implicit none 7120 double complex TWOPII 7121 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7122 double complex expr1_in,expr2_in 7123 double complex expr1,expr2 7124 double precision logsw 7125 double precision imagexpr 7126 logical firstsheet 7127 expr1=expr1_in 7128 expr2=expr2_in 7129 if(dabs(dimag(expr1)).eq.0.0d0)then 7130 expr1=dcmplx(dble(expr1),0.0d0) 7131 endif 7132 if(dabs(dble(expr1)).eq.0.0d0)then 7133 expr1=dcmplx(0.0d0,dimag(expr1)) 7134 endif 7135 if(dabs(dimag(expr2)).eq.0.0d0)then 7136 expr2=dcmplx(dble(expr2),0.0d0) 7137 endif 7138 if(dabs(dble(expr2)).eq.0.0d0)then 7139 expr2=dcmplx(0.0d0,dimag(expr2)) 7140 endif 7141 if(expr1.eq.(0.0d0,0.0d0))then 7142 grreglog=(0.0d0,0.0d0) 7143 else 7144 imagexpr=dimag(expr1)*dimag(expr2) 7145 firstsheet=imagexpr.ge.0.0d0 7146 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 7147 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 7148 if(firstsheet)then 7149 grreglog=log(expr1) 7150 else 7151 if(dimag(expr1).gt.0.0d0)then 7152 grreglog=log(expr1) - logsw*TWOPII 7153 else 7154 grreglog=log(expr1) + logsw*TWOPII 7155 endif 7156 endif 7157 endif 7158 end 7159 7160 double complex function arg(comnum) 7161 implicit none 7162 double complex comnum 7163 double complex iim 7164 iim = (0.0d0,1.0d0) 7165 if(comnum.eq.(0.0d0,0.0d0)) then 7166 arg=(0.0d0,0.0d0) 7167 else 7168 arg=log(comnum/abs(comnum))/iim 7169 endif 7170 end""") 7171 if self.opt['mp']: 7172 fsock.writelines(""" 7173 7174 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 7175 implicit none 7176 %(complex_mp_format)s condition,truecase,falsecase 7177 if(condition.eq.(0.0e0_16,0.0e0_16)) then 7178 mp_cond=truecase 7179 else 7180 mp_cond=falsecase 7181 endif 7182 end 7183 7184 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 7185 implicit none 7186 logical condition 7187 %(complex_mp_format)s truecase,falsecase 7188 if(condition) then 7189 mp_condif=truecase 7190 else 7191 mp_condif=falsecase 7192 endif 7193 end 7194 7195 %(complex_mp_format)s function mp_recms(condition,expr) 7196 implicit none 7197 logical condition 7198 %(complex_mp_format)s expr 7199 if(condition)then 7200 mp_recms=expr 7201 else 7202 mp_recms=cmplx(real(expr),kind=16) 7203 endif 7204 end 7205 7206 %(complex_mp_format)s function mp_reglog(arg) 7207 implicit none 7208 %(complex_mp_format)s TWOPII 7209 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7210 %(complex_mp_format)s arg 7211 if(arg.eq.(0.0e0_16,0.0e0_16)) then 7212 mp_reglog=(0.0e0_16,0.0e0_16) 7213 else 7214 mp_reglog=log(arg) 7215 endif 7216 end 7217 7218 %(complex_mp_format)s function mp_reglogp(arg) 7219 implicit none 7220 %(complex_mp_format)s TWOPII 7221 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7222 %(complex_mp_format)s arg 7223 if(arg.eq.(0.0e0_16,0.0e0_16))then 7224 mp_reglogp=(0.0e0_16,0.0e0_16) 7225 else 7226 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 7227 mp_reglogp=log(arg) + TWOPII 7228 else 7229 mp_reglogp=log(arg) 7230 endif 7231 endif 7232 end 7233 7234 %(complex_mp_format)s function mp_reglogm(arg) 7235 implicit none 7236 %(complex_mp_format)s TWOPII 7237 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7238 %(complex_mp_format)s arg 7239 if(arg.eq.(0.0e0_16,0.0e0_16))then 7240 mp_reglogm=(0.0e0_16,0.0e0_16) 7241 else 7242 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 7243 mp_reglogm=log(arg) - TWOPII 7244 else 7245 mp_reglogm=log(arg) 7246 endif 7247 endif 7248 end 7249 7250 %(complex_mp_format)s function mp_regsqrt(arg_in) 7251 implicit none 7252 %(complex_mp_format)s arg_in 7253 %(complex_mp_format)s arg 7254 arg=arg_in 7255 if(abs(imagpart(arg)).eq.0.0e0_16)then 7256 arg=cmplx(real(arg,kind=16),0.0e0_16) 7257 endif 7258 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 7259 arg=cmplx(0.0e0_16,imagpart(arg)) 7260 endif 7261 mp_regsqrt=sqrt(arg) 7262 end 7263 7264 7265 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 7266 implicit none 7267 %(complex_mp_format)s TWOPII 7268 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7269 %(complex_mp_format)s expr1_in,expr2_in 7270 %(complex_mp_format)s expr1,expr2 7271 %(real_mp_format)s logsw 7272 %(real_mp_format)s imagexpr 7273 logical firstsheet 7274 expr1=expr1_in 7275 expr2=expr2_in 7276 if(abs(imagpart(expr1)).eq.0.0e0_16)then 7277 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 7278 endif 7279 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 7280 expr1=cmplx(0.0e0_16,imagpart(expr1)) 7281 endif 7282 if(abs(imagpart(expr2)).eq.0.0e0_16)then 7283 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 7284 endif 7285 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 7286 expr2=cmplx(0.0e0_16,imagpart(expr2)) 7287 endif 7288 if(expr1.eq.(0.0e0_16,0.0e0_16))then 7289 mp_grreglog=(0.0e0_16,0.0e0_16) 7290 else 7291 imagexpr=imagpart(expr1)*imagpart(expr2) 7292 firstsheet=imagexpr.ge.0.0e0_16 7293 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 7294 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 7295 if(firstsheet)then 7296 mp_grreglog=log(expr1) 7297 else 7298 if(imagpart(expr1).gt.0.0e0_16)then 7299 mp_grreglog=log(expr1) - logsw*TWOPII 7300 else 7301 mp_grreglog=log(expr1) + logsw*TWOPII 7302 endif 7303 endif 7304 endif 7305 end 7306 7307 %(complex_mp_format)s function mp_arg(comnum) 7308 implicit none 7309 %(complex_mp_format)s comnum 7310 %(complex_mp_format)s imm 7311 imm = (0.0e0_16,1.0e0_16) 7312 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 7313 mp_arg=(0.0e0_16,0.0e0_16) 7314 else 7315 mp_arg=log(comnum/abs(comnum))/imm 7316 endif 7317 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 7318 7319 7320 #check for the file functions.f 7321 model_path = self.model.get('modelpath') 7322 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 7323 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 7324 input = pjoin(model_path,'Fortran','functions.f') 7325 fsock.writelines(open(input).read()) 7326 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 7327 7328 # check for functions define in the UFO model 7329 ufo_fct = self.model.get('functions') 7330 if ufo_fct: 7331 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 7332 done = [] 7333 for fct in ufo_fct: 7334 # already handle by default 7335 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 7336 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 7337 "grreglog","regsqrt"] + done: 7338 done.append(str(fct.name.lower())) 7339 ufo_fct_template = """ 7340 double complex function %(name)s(%(args)s) 7341 implicit none 7342 double complex %(args)s 7343 %(definitions)s 7344 %(name)s = %(fct)s 7345 7346 return 7347 end 7348 """ 7349 str_fct = self.p_to_f.parse(fct.expr) 7350 if not self.p_to_f.to_define: 7351 definitions = [] 7352 else: 7353 definitions=[] 7354 for d in self.p_to_f.to_define: 7355 if d == 'pi': 7356 definitions.append(' double precision pi') 7357 definitions.append(' data pi /3.1415926535897932d0/') 7358 else: 7359 definitions.append(' double complex %s' % d) 7360 7361 text = ufo_fct_template % { 7362 'name': fct.name, 7363 'args': ", ".join(fct.arguments), 7364 'fct': str_fct, 7365 'definitions': '\n'.join(definitions) 7366 } 7367 7368 fsock.writelines(text) 7369 if self.opt['mp']: 7370 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 7371 for fct in ufo_fct: 7372 # already handle by default 7373 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 7374 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 7375 "grreglog","regsqrt"]: 7376 ufo_fct_template = """ 7377 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 7378 implicit none 7379 %(complex_mp_format)s mp__%(args)s 7380 %(definitions)s 7381 mp_%(name)s = %(fct)s 7382 7383 return 7384 end 7385 """ 7386 str_fct = self.mp_p_to_f.parse(fct.expr) 7387 if not self.mp_p_to_f.to_define: 7388 definitions = [] 7389 else: 7390 definitions=[] 7391 for d in self.mp_p_to_f.to_define: 7392 if d == 'pi': 7393 definitions.append(' %s mp__pi' % self.mp_real_format) 7394 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 7395 else: 7396 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 7397 text = ufo_fct_template % { 7398 'name': fct.name, 7399 'args': ", mp__".join(fct.arguments), 7400 'fct': str_fct, 7401 'definitions': '\n'.join(definitions), 7402 'complex_mp_format': self.mp_complex_format 7403 } 7404 fsock.writelines(text) 7405 7406 7407 7408 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
7409 7410 7411
7412 - def create_makeinc(self):
7413 """create makeinc.inc containing the file to compile """ 7414 7415 fsock = self.open('makeinc.inc', comment='#') 7416 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 7417 text += ' model_functions.o ' 7418 7419 nb_coup_indep = 1 + len(self.coups_dep) // 25 7420 nb_coup_dep = 1 + len(self.coups_indep) // 25 7421 couplings_files=['couplings%s.o' % (i+1) \ 7422 for i in range(nb_coup_dep + nb_coup_indep) ] 7423 if self.opt['mp']: 7424 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 7425 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 7426 text += ' '.join(couplings_files) 7427 fsock.writelines(text)
7428
7429 - def create_param_write(self):
7430 """ create param_write """ 7431 7432 fsock = self.open('param_write.inc', format='fortran') 7433 7434 fsock.writelines("""write(*,*) ' External Params' 7435 write(*,*) ' ---------------------------------' 7436 write(*,*) ' '""") 7437 def format(name): 7438 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
7439 7440 # Write the external parameter 7441 lines = [format(param.name) for param in self.params_ext] 7442 fsock.writelines('\n'.join(lines)) 7443 7444 fsock.writelines("""write(*,*) ' Internal Params' 7445 write(*,*) ' ---------------------------------' 7446 write(*,*) ' '""") 7447 lines = [format(data.name) for data in self.params_indep 7448 if data.name != 'ZERO' and self.check_needed_param(data.name)] 7449 fsock.writelines('\n'.join(lines)) 7450 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 7451 write(*,*) ' ----------------------------------------' 7452 write(*,*) ' '""") 7453 lines = [format(data.name) for data in self.params_dep \ 7454 if self.check_needed_param(data.name)] 7455 7456 fsock.writelines('\n'.join(lines)) 7457 7458 7459
7460 - def create_ident_card(self):
7461 """ create the ident_card.dat """ 7462 7463 def format(parameter): 7464 """return the line for the ident_card corresponding to this parameter""" 7465 colum = [parameter.lhablock.lower()] + \ 7466 [str(value) for value in parameter.lhacode] + \ 7467 [parameter.name] 7468 if not parameter.name: 7469 return '' 7470 return ' '.join(colum)+'\n'
7471 7472 fsock = self.open('ident_card.dat') 7473 7474 external_param = [format(param) for param in self.params_ext] 7475 fsock.writelines('\n'.join(external_param)) 7476
7477 - def create_actualize_mp_ext_param_inc(self):
7478 """ create the actualize_mp_ext_params.inc code """ 7479 7480 # In principle one should actualize all external, but for now, it is 7481 # hardcoded that only AS and MU_R can by dynamically changed by the user 7482 # so that we only update those ones. 7483 # Of course, to be on the safe side, one could decide to update all 7484 # external parameters. 7485 update_params_list=[p for p in self.params_ext if p.name in 7486 self.PS_dependent_key] 7487 7488 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 7489 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 7490 for param in update_params_list] 7491 # When read_lha is false, it is G which is taken in input and not AS, so 7492 # this is what should be reset here too. 7493 if 'aS' in [param.name for param in update_params_list]: 7494 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 7495 7496 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 7497 fsock.writelines('\n'.join(res_strings))
7498
7499 - def create_param_read(self):
7500 """create param_read""" 7501 7502 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 7503 or self.opt['loop_induced']: 7504 fsock = self.open('param_read.inc', format='fortran') 7505 fsock.writelines(' include \'../param_card.inc\'') 7506 return 7507 7508 def format_line(parameter): 7509 """return the line for the ident_card corresponding to this 7510 parameter""" 7511 template = \ 7512 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 7513 % {'name': parameter.name, 7514 'value': self.p_to_f.parse(str(parameter.value.real))} 7515 if self.opt['mp']: 7516 template = template+ \ 7517 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 7518 "%(mp_prefix)s%(name)s,%(value)s)") \ 7519 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 7520 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 7521 7522 if parameter.lhablock.lower() == 'loop': 7523 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 7524 7525 return template 7526 7527 fsock = self.open('param_read.inc', format='fortran') 7528 res_strings = [format_line(param) \ 7529 for param in self.params_ext] 7530 7531 # Correct width sign for Majorana particles (where the width 7532 # and mass need to have the same sign) 7533 for particle in self.model.get('particles'): 7534 if particle.is_fermion() and particle.get('self_antipart') and \ 7535 particle.get('width').lower() != 'zero': 7536 7537 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 7538 {'width': particle.get('width'), 'mass': particle.get('mass')}) 7539 if self.opt['mp']: 7540 res_strings.append(\ 7541 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 7542 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 7543 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 7544 7545 fsock.writelines('\n'.join(res_strings)) 7546 7547 7548 @staticmethod
7549 - def create_param_card_static(model, output_path, rule_card_path=False, 7550 mssm_convert=True, write_special=True):
7551 """ create the param_card.dat for a givent model --static method-- """ 7552 #1. Check if a default param_card is present: 7553 done = False 7554 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 7555 restrict_name = os.path.basename(model.restrict_card)[9:-4] 7556 model_path = model.get('modelpath') 7557 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 7558 done = True 7559 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 7560 output_path) 7561 if not done: 7562 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 7563 7564 if rule_card_path: 7565 if hasattr(model, 'rule_card'): 7566 model.rule_card.write_file(rule_card_path) 7567 7568 if mssm_convert: 7569 model_name = model.get('name') 7570 # IF MSSM convert the card to SLAH1 7571 if model_name == 'mssm' or model_name.startswith('mssm-'): 7572 import models.check_param_card as translator 7573 # Check the format of the param_card for Pythia and make it correct 7574 if rule_card_path: 7575 translator.make_valid_param_card(output_path, rule_card_path) 7576 translator.convert_to_slha1(output_path)
7577
7578 - def create_param_card(self, write_special=True):
7579 """ create the param_card.dat """ 7580 7581 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 7582 if not hasattr(self.model, 'rule_card'): 7583 rule_card=False 7584 write_special = True 7585 if 'exporter' in self.opt: 7586 import madgraph.loop.loop_exporters as loop_exporters 7587 import madgraph.iolibs.export_fks as export_fks 7588 write_special = False 7589 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 7590 write_special = True 7591 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 7592 write_special = False 7593 7594 self.create_param_card_static(self.model, 7595 output_path=pjoin(self.dir_path, 'param_card.dat'), 7596 rule_card_path=rule_card, 7597 mssm_convert=True, 7598 write_special=write_special)
7599
7600 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
7601 """ Determine which Export_v4 class is required. cmd is the command 7602 interface containing all potential usefull information. 7603 The output_type argument specifies from which context the output 7604 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 7605 and 'default' for tree-level outputs.""" 7606 7607 opt = dict(cmd.options) 7608 opt['output_options'] = cmd_options 7609 7610 # ========================================================================== 7611 # First check whether Ninja must be installed. 7612 # Ninja would only be required if: 7613 # a) Loop optimized output is selected 7614 # b) the process gathered from the amplitude generated use loops 7615 7616 if len(cmd._curr_amps)>0: 7617 try: 7618 curr_proc = cmd._curr_amps[0].get('process') 7619 except base_objects.PhysicsObject.PhysicsObjectError: 7620 curr_proc = None 7621 elif hasattr(cmd,'_fks_multi_proc') and \ 7622 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7623 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7624 else: 7625 curr_proc = None 7626 7627 requires_reduction_tool = opt['loop_optimized_output'] and \ 7628 (not curr_proc is None) and \ 7629 (curr_proc.get('perturbation_couplings') != [] and \ 7630 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7631 7632 # An installation is required then, but only if the specified path is the 7633 # default local one and that the Ninja library appears missing. 7634 if requires_reduction_tool: 7635 cmd.install_reduction_library() 7636 7637 # ========================================================================== 7638 # First treat the MadLoop5 standalone case 7639 MadLoop_SA_options = {'clean': not noclean, 7640 'complex_mass':cmd.options['complex_mass_scheme'], 7641 'export_format':'madloop', 7642 'mp':True, 7643 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7644 'cuttools_dir': cmd._cuttools_dir, 7645 'iregi_dir':cmd._iregi_dir, 7646 'golem_dir':cmd.options['golem'], 7647 'samurai_dir':cmd.options['samurai'], 7648 'ninja_dir':cmd.options['ninja'], 7649 'collier_dir':cmd.options['collier'], 7650 'fortran_compiler':cmd.options['fortran_compiler'], 7651 'f2py_compiler':cmd.options['f2py_compiler'], 7652 'output_dependencies':cmd.options['output_dependencies'], 7653 'SubProc_prefix':'P', 7654 'compute_color_flows':cmd.options['loop_color_flows'], 7655 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7656 'cluster_local_path': cmd.options['cluster_local_path'], 7657 'output_options': cmd_options 7658 } 7659 7660 if output_type.startswith('madloop'): 7661 import madgraph.loop.loop_exporters as loop_exporters 7662 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7663 ExporterClass=None 7664 if not cmd.options['loop_optimized_output']: 7665 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7666 else: 7667 if output_type == "madloop": 7668 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7669 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7670 elif output_type == "madloop_matchbox": 7671 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7672 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7673 else: 7674 raise Exception("output_type not recognize %s" % output_type) 7675 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7676 else: 7677 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7678 ' in %s'%str(cmd._mgme_dir)) 7679 7680 # Then treat the aMC@NLO output 7681 elif output_type=='amcatnlo': 7682 import madgraph.iolibs.export_fks as export_fks 7683 ExporterClass=None 7684 amcatnlo_options = dict(opt) 7685 amcatnlo_options.update(MadLoop_SA_options) 7686 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7687 if not cmd.options['loop_optimized_output']: 7688 logger.info("Writing out the aMC@NLO code") 7689 ExporterClass = export_fks.ProcessExporterFortranFKS 7690 amcatnlo_options['export_format']='FKS5_default' 7691 else: 7692 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7693 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7694 amcatnlo_options['export_format']='FKS5_optimized' 7695 return ExporterClass(cmd._export_dir, amcatnlo_options) 7696 7697 7698 # Then the default tree-level output 7699 elif output_type=='default': 7700 assert group_subprocesses in [True, False] 7701 7702 opt = dict(opt) 7703 opt.update({'clean': not noclean, 7704 'complex_mass': cmd.options['complex_mass_scheme'], 7705 'export_format':cmd._export_format, 7706 'mp': False, 7707 'sa_symmetry':False, 7708 'model': cmd._curr_model.get('name'), 7709 'v5_model': False if cmd._model_v4_path else True }) 7710 7711 format = cmd._export_format #shortcut 7712 7713 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7714 opt['sa_symmetry'] = True 7715 elif format == 'plugin': 7716 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7717 7718 loop_induced_opt = dict(opt) 7719 loop_induced_opt.update(MadLoop_SA_options) 7720 loop_induced_opt['export_format'] = 'madloop_optimized' 7721 loop_induced_opt['SubProc_prefix'] = 'PV' 7722 # For loop_induced output with MadEvent, we must have access to the 7723 # color flows. 7724 loop_induced_opt['compute_color_flows'] = True 7725 for key in opt: 7726 if key not in loop_induced_opt: 7727 loop_induced_opt[key] = opt[key] 7728 7729 # Madevent output supports MadAnalysis5 7730 if format in ['madevent']: 7731 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7732 7733 if format == 'matrix' or format.startswith('standalone'): 7734 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7735 7736 elif format in ['madevent'] and group_subprocesses: 7737 if isinstance(cmd._curr_amps[0], 7738 loop_diagram_generation.LoopAmplitude): 7739 import madgraph.loop.loop_exporters as loop_exporters 7740 return loop_exporters.LoopInducedExporterMEGroup( 7741 cmd._export_dir,loop_induced_opt) 7742 else: 7743 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7744 elif format in ['madevent']: 7745 if isinstance(cmd._curr_amps[0], 7746 loop_diagram_generation.LoopAmplitude): 7747 import madgraph.loop.loop_exporters as loop_exporters 7748 return loop_exporters.LoopInducedExporterMENoGroup( 7749 cmd._export_dir,loop_induced_opt) 7750 else: 7751 return ProcessExporterFortranME(cmd._export_dir,opt) 7752 elif format in ['matchbox']: 7753 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7754 elif cmd._export_format in ['madweight'] and group_subprocesses: 7755 7756 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7757 elif cmd._export_format in ['madweight']: 7758 return ProcessExporterFortranMW(cmd._export_dir, opt) 7759 elif format == 'plugin': 7760 if isinstance(cmd._curr_amps[0], 7761 loop_diagram_generation.LoopAmplitude): 7762 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7763 else: 7764 return cmd._export_plugin(cmd._export_dir, opt) 7765 7766 else: 7767 raise Exception('Wrong export_v4 format') 7768 else: 7769 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7770
7771 7772 7773 7774 #=============================================================================== 7775 # ProcessExporterFortranMWGroup 7776 #=============================================================================== 7777 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7778 """Class to take care of exporting a set of matrix elements to 7779 MadEvent subprocess group format.""" 7780 7781 matrix_file = "matrix_madweight_group_v4.inc" 7782 grouped_mode = 'madweight' 7783 #=========================================================================== 7784 # generate_subprocess_directory 7785 #===========================================================================
7786 - def generate_subprocess_directory(self, subproc_group, 7787 fortran_model, 7788 group_number):
7789 """Generate the Pn directory for a subprocess group in MadEvent, 7790 including the necessary matrix_N.f files, configs.inc and various 7791 other helper files.""" 7792 7793 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7794 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7795 7796 if not self.model: 7797 self.model = subproc_group.get('matrix_elements')[0].\ 7798 get('processes')[0].get('model') 7799 7800 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7801 7802 # Create the directory PN in the specified path 7803 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7804 subproc_group.get('name')) 7805 try: 7806 os.mkdir(pjoin(pathdir, subprocdir)) 7807 except os.error as error: 7808 logger.warning(error.strerror + " " + subprocdir) 7809 7810 7811 logger.info('Creating files in directory %s' % subprocdir) 7812 Ppath = pjoin(pathdir, subprocdir) 7813 7814 # Create the matrix.f files, auto_dsig.f files and all inc files 7815 # for all subprocesses in the group 7816 7817 maxamps = 0 7818 maxflows = 0 7819 tot_calls = 0 7820 7821 matrix_elements = subproc_group.get('matrix_elements') 7822 7823 for ime, matrix_element in \ 7824 enumerate(matrix_elements): 7825 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7826 calls, ncolor = \ 7827 self.write_matrix_element_v4(writers.FortranWriter(filename), 7828 matrix_element, 7829 fortran_model, 7830 str(ime+1), 7831 subproc_group.get('diagram_maps')[\ 7832 ime]) 7833 7834 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7835 self.write_auto_dsig_file(writers.FortranWriter(filename), 7836 matrix_element, 7837 str(ime+1)) 7838 7839 # Keep track of needed quantities 7840 tot_calls += int(calls) 7841 maxflows = max(maxflows, ncolor) 7842 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7843 7844 # Draw diagrams 7845 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7846 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7847 get('diagrams'), 7848 filename, 7849 model = \ 7850 matrix_element.get('processes')[0].\ 7851 get('model'), 7852 amplitude=True) 7853 logger.info("Generating Feynman diagrams for " + \ 7854 matrix_element.get('processes')[0].nice_string()) 7855 plot.draw() 7856 7857 # Extract number of external particles 7858 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7859 7860 # Generate a list of diagrams corresponding to each configuration 7861 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7862 # If a subprocess has no diagrams for this config, the number is 0 7863 7864 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7865 7866 filename = pjoin(Ppath, 'auto_dsig.f') 7867 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7868 subproc_group) 7869 7870 filename = pjoin(Ppath,'configs.inc') 7871 nconfigs, s_and_t_channels = self.write_configs_file(\ 7872 writers.FortranWriter(filename), 7873 subproc_group, 7874 subproc_diagrams_for_config) 7875 7876 filename = pjoin(Ppath, 'leshouche.inc') 7877 self.write_leshouche_file(writers.FortranWriter(filename), 7878 subproc_group) 7879 7880 filename = pjoin(Ppath, 'phasespace.inc') 7881 self.write_phasespace_file(writers.FortranWriter(filename), 7882 nconfigs) 7883 7884 7885 filename = pjoin(Ppath, 'maxamps.inc') 7886 self.write_maxamps_file(writers.FortranWriter(filename), 7887 maxamps, 7888 maxflows, 7889 max([len(me.get('processes')) for me in \ 7890 matrix_elements]), 7891 len(matrix_elements)) 7892 7893 filename = pjoin(Ppath, 'mirrorprocs.inc') 7894 self.write_mirrorprocs(writers.FortranWriter(filename), 7895 subproc_group) 7896 7897 filename = pjoin(Ppath, 'nexternal.inc') 7898 self.write_nexternal_file(writers.FortranWriter(filename), 7899 nexternal, ninitial) 7900 7901 filename = pjoin(Ppath, 'pmass.inc') 7902 self.write_pmass_file(writers.FortranWriter(filename), 7903 matrix_element) 7904 7905 filename = pjoin(Ppath, 'props.inc') 7906 self.write_props_file(writers.FortranWriter(filename), 7907 matrix_element, 7908 s_and_t_channels) 7909 7910 # filename = pjoin(Ppath, 'processes.dat') 7911 # files.write_to_file(filename, 7912 # self.write_processes_file, 7913 # subproc_group) 7914 7915 # Generate jpgs -> pass in make_html 7916 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7917 7918 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7919 7920 for file in linkfiles: 7921 ln('../%s' % file, cwd=Ppath) 7922 7923 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7924 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7925 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7926 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7927 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7928 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7929 if not tot_calls: 7930 tot_calls = 0 7931 return tot_calls
7932 7933 7934 #=========================================================================== 7935 # Helper functions 7936 #===========================================================================
7937 - def modify_grouping(self, matrix_element):
7938 """allow to modify the grouping (if grouping is in place) 7939 return two value: 7940 - True/False if the matrix_element was modified 7941 - the new(or old) matrix element""" 7942 7943 return True, matrix_element.split_lepton_grouping()
7944 7945 #=========================================================================== 7946 # write_super_auto_dsig_file 7947 #===========================================================================
7948 - def write_super_auto_dsig_file(self, writer, subproc_group):
7949 """Write the auto_dsig.f file selecting between the subprocesses 7950 in subprocess group mode""" 7951 7952 replace_dict = {} 7953 7954 # Extract version number and date from VERSION file 7955 info_lines = self.get_mg5_info_lines() 7956 replace_dict['info_lines'] = info_lines 7957 7958 matrix_elements = subproc_group.get('matrix_elements') 7959 7960 # Extract process info lines 7961 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7962 matrix_elements]) 7963 replace_dict['process_lines'] = process_lines 7964 7965 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7966 replace_dict['nexternal'] = nexternal 7967 7968 replace_dict['nsprocs'] = 2*len(matrix_elements) 7969 7970 # Generate dsig definition line 7971 dsig_def_line = "DOUBLE PRECISION " + \ 7972 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7973 range(len(matrix_elements))]) 7974 replace_dict["dsig_def_line"] = dsig_def_line 7975 7976 # Generate dsig process lines 7977 call_dsig_proc_lines = [] 7978 for iproc in range(len(matrix_elements)): 7979 call_dsig_proc_lines.append(\ 7980 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7981 {"num": iproc + 1, 7982 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7983 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7984 7985 if writer: 7986 file = open(os.path.join(_file_path, \ 7987 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7988 file = file % replace_dict 7989 # Write the file 7990 writer.writelines(file) 7991 else: 7992 return replace_dict
7993 7994 #=========================================================================== 7995 # write_mirrorprocs 7996 #===========================================================================
7997 - def write_mirrorprocs(self, writer, subproc_group):
7998 """Write the mirrorprocs.inc file determining which processes have 7999 IS mirror process in subprocess group mode.""" 8000 8001 lines = [] 8002 bool_dict = {True: '.true.', False: '.false.'} 8003 matrix_elements = subproc_group.get('matrix_elements') 8004 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 8005 (len(matrix_elements), 8006 ",".join([bool_dict[me.get('has_mirror_process')] for \ 8007 me in matrix_elements]))) 8008 # Write the file 8009 writer.writelines(lines)
8010 8011 #=========================================================================== 8012 # write_configs_file 8013 #===========================================================================
8014 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
8015 """Write the configs.inc file with topology information for a 8016 subprocess group. Use the first subprocess with a diagram for each 8017 configuration.""" 8018 8019 matrix_elements = subproc_group.get('matrix_elements') 8020 model = matrix_elements[0].get('processes')[0].get('model') 8021 8022 diagrams = [] 8023 config_numbers = [] 8024 for iconfig, config in enumerate(diagrams_for_config): 8025 # Check if any diagrams correspond to this config 8026 if set(config) == set([0]): 8027 continue 8028 subproc_diags = [] 8029 for s,d in enumerate(config): 8030 if d: 8031 subproc_diags.append(matrix_elements[s].\ 8032 get('diagrams')[d-1]) 8033 else: 8034 subproc_diags.append(None) 8035 diagrams.append(subproc_diags) 8036 config_numbers.append(iconfig + 1) 8037 8038 # Extract number of external particles 8039 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 8040 8041 return len(diagrams), \ 8042 self.write_configs_file_from_diagrams(writer, diagrams, 8043 config_numbers, 8044 nexternal, ninitial, 8045 matrix_elements[0],model)
8046 8047 #=========================================================================== 8048 # write_run_configs_file 8049 #===========================================================================
8050 - def write_run_config_file(self, writer):
8051 """Write the run_configs.inc file for MadEvent""" 8052 8053 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 8054 text = open(path).read() % {'chanperjob':'2'} 8055 writer.write(text) 8056 return True
8057 8058 8059 #=========================================================================== 8060 # write_leshouche_file 8061 #===========================================================================
8062 - def write_leshouche_file(self, writer, subproc_group):
8063 """Write the leshouche.inc file for MG4""" 8064 8065 all_lines = [] 8066 8067 for iproc, matrix_element in \ 8068 enumerate(subproc_group.get('matrix_elements')): 8069 all_lines.extend(self.get_leshouche_lines(matrix_element, 8070 iproc)) 8071 8072 # Write the file 8073 writer.writelines(all_lines) 8074 8075 return True
8076