Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30   
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.various.diagram_symmetry as diagram_symmetry 
  46  import madgraph.various.misc as misc 
  47  import madgraph.various.banner as banner_mod 
  48  import madgraph.various.process_checks as process_checks 
  49  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  50  import aloha.create_aloha as create_aloha 
  51  import models.import_ufo as import_ufo 
  52  import models.write_param_card as param_writer 
  53  import models.check_param_card as check_param_card 
  54   
  55   
  56  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  57  from madgraph.iolibs.files import cp, ln, mv 
  58   
  59  pjoin = os.path.join 
  60   
  61  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  62  logger = logging.getLogger('madgraph.export_v4') 
63 64 #=============================================================================== 65 # ProcessExporterFortran 66 #=============================================================================== 67 -class ProcessExporterFortran(object):
68 """Class to take care of exporting a set of matrix elements to 69 Fortran (v4) format.""" 70 71 default_opt = {'clean': False, 'complex_mass':False, 72 'export_format':'madevent', 'mp': False 73 } 74
75 - def __init__(self, mgme_dir = "", dir_path = "", opt=None):
76 """Initiate the ProcessExporterFortran with directory information""" 77 self.mgme_dir = mgme_dir 78 self.dir_path = dir_path 79 self.model = None 80 81 self.opt = dict(self.default_opt) 82 if opt: 83 self.opt.update(opt) 84 85 #place holder to pass information to the run_interface 86 self.proc_characteristic = banner_mod.ProcCharacteristic()
87 88 89 #=========================================================================== 90 # process exporter fortran switch between group and not grouped 91 #===========================================================================
92 - def export_processes(self, matrix_elements, fortran_model):
93 """Make the switch between grouped and not grouped output""" 94 95 calls = 0 96 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 97 for (group_number, me_group) in enumerate(matrix_elements): 98 calls = calls + self.generate_subprocess_directory_v4(\ 99 me_group, fortran_model, group_number) 100 else: 101 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 102 calls = calls + self.generate_subprocess_directory_v4(\ 103 me, fortran_model, me_number) 104 105 return calls
106 107 108 109 #=========================================================================== 110 # create the run_card 111 #===========================================================================
112 - def create_run_card(self, matrix_elements, history):
113 """ """ 114 115 run_card = banner_mod.RunCard() 116 117 118 default=True 119 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 120 processes = [me.get('processes') for megroup in matrix_elements 121 for me in megroup['matrix_elements']] 122 elif matrix_elements: 123 processes = [me.get('processes') 124 for me in matrix_elements['matrix_elements']] 125 else: 126 default =False 127 128 if default: 129 run_card.create_default_for_process(self.proc_characteristic, 130 history, 131 processes) 132 133 134 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 135 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
136 137 138 #=========================================================================== 139 # copy the Template in a new directory. 140 #===========================================================================
141 - def copy_v4template(self, modelname):
142 """create the directory run_name as a copy of the MadEvent 143 Template, and clean the directory 144 """ 145 146 #First copy the full template tree if dir_path doesn't exit 147 if not os.path.isdir(self.dir_path): 148 assert self.mgme_dir, \ 149 "No valid MG_ME path given for MG4 run directory creation." 150 logger.info('initialize a new directory: %s' % \ 151 os.path.basename(self.dir_path)) 152 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 153 self.dir_path, True) 154 # distutils.dir_util.copy_tree since dir_path already exists 155 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 156 self.dir_path) 157 # Duplicate run_card and plot_card 158 for card in ['plot_card']: 159 try: 160 shutil.copy(pjoin(self.dir_path, 'Cards', 161 card + '.dat'), 162 pjoin(self.dir_path, 'Cards', 163 card + '_default.dat')) 164 except IOError: 165 logger.warning("Failed to copy " + card + ".dat to default") 166 elif os.getcwd() == os.path.realpath(self.dir_path): 167 logger.info('working in local directory: %s' % \ 168 os.path.realpath(self.dir_path)) 169 # distutils.dir_util.copy_tree since dir_path already exists 170 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 171 self.dir_path) 172 # for name in glob.glob(pjoin(self.mgme_dir, 'Template/LO/*')): 173 # name = os.path.basename(name) 174 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 175 # if os.path.isfile(filename): 176 # files.cp(filename, pjoin(self.dir_path,name)) 177 # elif os.path.isdir(filename): 178 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 179 # distutils.dir_util.copy_tree since dir_path already exists 180 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 181 self.dir_path) 182 # Duplicate run_card and plot_card 183 for card in ['plot_card']: 184 try: 185 shutil.copy(pjoin(self.dir_path, 'Cards', 186 card + '.dat'), 187 pjoin(self.dir_path, 'Cards', 188 card + '_default.dat')) 189 except IOError: 190 logger.warning("Failed to copy " + card + ".dat to default") 191 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 192 assert self.mgme_dir, \ 193 "No valid MG_ME path given for MG4 run directory creation." 194 try: 195 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 196 except IOError: 197 MG5_version = misc.get_pkg_info() 198 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 199 "5." + MG5_version['version']) 200 201 #Ensure that the Template is clean 202 if self.opt['clean']: 203 logger.info('remove old information in %s' % \ 204 os.path.basename(self.dir_path)) 205 if os.environ.has_key('MADGRAPH_BASE'): 206 misc.call([pjoin('bin', 'internal', 'clean_template'), 207 '--web'], cwd=self.dir_path) 208 else: 209 try: 210 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 211 cwd=self.dir_path) 212 except Exception, why: 213 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 214 % (os.path.basename(self.dir_path),why)) 215 216 #Write version info 217 MG_version = misc.get_pkg_info() 218 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 219 MG_version['version']) 220 221 222 # add the makefile in Source directory 223 filename = pjoin(self.dir_path,'Source','makefile') 224 self.write_source_makefile(writers.FileWriter(filename)) 225 226 # add the DiscreteSampler information 227 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 228 pjoin(self.dir_path, 'Source')) 229 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 230 pjoin(self.dir_path, 'Source')) 231 232 # We need to create the correct open_data for the pdf 233 self.write_pdf_opendata()
234 235 236 237 238 #=========================================================================== 239 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 240 #===========================================================================
241 - def write_procdef_mg5(self, file_pos, modelname, process_str):
242 """ write an equivalent of the MG4 proc_card in order that all the Madevent 243 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 244 245 proc_card_template = template_files.mg4_proc_card.mg4_template 246 process_template = template_files.mg4_proc_card.process_template 247 process_text = '' 248 coupling = '' 249 new_process_content = [] 250 251 252 # First find the coupling and suppress the coupling from process_str 253 #But first ensure that coupling are define whithout spaces: 254 process_str = process_str.replace(' =', '=') 255 process_str = process_str.replace('= ', '=') 256 process_str = process_str.replace(',',' , ') 257 #now loop on the element and treat all the coupling 258 for info in process_str.split(): 259 if '=' in info: 260 coupling += info + '\n' 261 else: 262 new_process_content.append(info) 263 # Recombine the process_str (which is the input process_str without coupling 264 #info) 265 process_str = ' '.join(new_process_content) 266 267 #format the SubProcess 268 process_text += process_template.substitute({'process': process_str, \ 269 'coupling': coupling}) 270 271 text = proc_card_template.substitute({'process': process_text, 272 'model': modelname, 273 'multiparticle':''}) 274 ff = open(file_pos, 'w') 275 ff.write(text) 276 ff.close()
277 278 #=========================================================================== 279 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 280 #===========================================================================
281 - def finalize_v4_directory(self, matrix_elements, history = "", makejpg = False, 282 online = False, compiler='g77'):
283 """Function to finalize v4 directory, for inheritance. 284 """ 285 286 self.create_run_card(matrix_elements, history) 287 288 pass
289 290 #=========================================================================== 291 # Create the proc_characteristic file passing information to the run_interface 292 #===========================================================================
293 - def create_proc_charac(self, matrix_elements=None, history= "", **opts):
294 295 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
296 297 #=========================================================================== 298 # write_matrix_element_v4 299 #===========================================================================
300 - def write_matrix_element_v4(self):
301 """Function to write a matrix.f file, for inheritance. 302 """ 303 pass
304 305 #=========================================================================== 306 # write_pdf_opendata 307 #===========================================================================
308 - def write_pdf_opendata(self):
309 """ modify the pdf opendata file, to allow direct access to cluster node 310 repository if configure""" 311 312 if not self.opt["cluster_local_path"]: 313 changer = {"pdf_systemwide": ""} 314 else: 315 to_add = """ 316 tempname='%(path)s'//Tablefile 317 open(IU,file=tempname,status='old',ERR=1) 318 return 319 1 tempname='%(path)s/Pdfdata/'//Tablefile 320 open(IU,file=tempname,status='old',ERR=2) 321 return 322 2 tempname='%(path)s/lhapdf'//Tablefile 323 open(IU,file=tempname,status='old',ERR=3) 324 return 325 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 326 open(IU,file=tempname,status='old',ERR=4) 327 return 328 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 329 open(IU,file=tempname,status='old',ERR=5) 330 return 331 """ % {"path" : self.opt["cluster_local_path"]} 332 333 changer = {"pdf_systemwide": to_add} 334 335 ff = open(pjoin(self.dir_path, "Source", "PDF", "opendata.f"),"w") 336 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 337 ff.write(template % changer) 338 339 # Do the same for lhapdf set 340 if not self.opt["cluster_local_path"]: 341 changer = {"cluster_specific_path": ""} 342 else: 343 to_add=""" 344 LHAPath='%(path)s/PDFsets' 345 Inquire(File=LHAPath, exist=exists) 346 if(exists)return 347 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 348 Inquire(File=LHAPath, exist=exists) 349 if(exists)return 350 LHAPath='%(path)s/../lhapdf/pdfsets/' 351 Inquire(File=LHAPath, exist=exists) 352 if(exists)return 353 LHAPath='./PDFsets' 354 """ % {"path" : self.opt["cluster_local_path"]} 355 changer = {"cluster_specific_path": to_add} 356 357 ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 358 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 359 ff.write(template % changer) 360 361 362 return
363 364 365 366 #=========================================================================== 367 # write_maxparticles_file 368 #===========================================================================
369 - def write_maxparticles_file(self, writer, matrix_elements):
370 """Write the maxparticles.inc file for MadEvent""" 371 372 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 373 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 374 matrix_elements.get('matrix_elements')]) 375 else: 376 maxparticles = max([me.get_nexternal_ninitial()[0] \ 377 for me in matrix_elements]) 378 379 lines = "integer max_particles\n" 380 lines += "parameter(max_particles=%d)" % maxparticles 381 382 # Write the file 383 writer.writelines(lines) 384 385 return True
386 387 388 #=========================================================================== 389 # export the model 390 #===========================================================================
391 - def export_model_files(self, model_path):
392 """Configure the files/link of the process according to the model""" 393 394 # Import the model 395 for file in os.listdir(model_path): 396 if os.path.isfile(pjoin(model_path, file)): 397 shutil.copy2(pjoin(model_path, file), \ 398 pjoin(self.dir_path, 'Source', 'MODEL'))
399 400 414 421 422 #=========================================================================== 423 # export the helas routine 424 #===========================================================================
425 - def export_helas(self, helas_path):
426 """Configure the files/link of the process according to the model""" 427 428 # Import helas routine 429 for filename in os.listdir(helas_path): 430 filepos = pjoin(helas_path, filename) 431 if os.path.isfile(filepos): 432 if filepos.endswith('Makefile.template'): 433 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 434 elif filepos.endswith('Makefile'): 435 pass 436 else: 437 cp(filepos, self.dir_path + '/Source/DHELAS')
438 # following lines do the same but whithout symbolic link 439 # 440 #def export_helas(mgme_dir, dir_path): 441 # 442 # # Copy the HELAS directory 443 # helas_dir = pjoin(mgme_dir, 'HELAS') 444 # for filename in os.listdir(helas_dir): 445 # if os.path.isfile(pjoin(helas_dir, filename)): 446 # shutil.copy2(pjoin(helas_dir, filename), 447 # pjoin(dir_path, 'Source', 'DHELAS')) 448 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 449 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 450 # 451 452 #=========================================================================== 453 # generate_subprocess_directory_v4 454 #===========================================================================
455 - def generate_subprocess_directory_v4(self, matrix_element, 456 fortran_model, 457 me_number):
458 """Routine to generate a subprocess directory (for inheritance)""" 459 460 pass
461 462 #=========================================================================== 463 # get_source_libraries_list 464 #===========================================================================
465 - def get_source_libraries_list(self):
466 """ Returns the list of libraries to be compiling when compiling the 467 SOURCE directory. It is different for loop_induced processes and 468 also depends on the value of the 'output_dependencies' option""" 469 470 return ['$(LIBDIR)libdhelas.$(libext)', 471 '$(LIBDIR)libpdf.$(libext)', 472 '$(LIBDIR)libmodel.$(libext)', 473 '$(LIBDIR)libcernlib.$(libext)']
474 475 #=========================================================================== 476 # write_source_makefile 477 #===========================================================================
478 - def write_source_makefile(self, writer):
479 """Write the nexternal.inc file for MG4""" 480 481 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 482 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 483 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 484 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 485 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 486 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 487 else: 488 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 489 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 490 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 491 writer.write(text) 492 493 return True
494 495 #=========================================================================== 496 # write_nexternal_madspin 497 #===========================================================================
498 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
499 """Write the nexternal_prod.inc file for madspin""" 500 501 replace_dict = {} 502 503 replace_dict['nexternal'] = nexternal 504 replace_dict['ninitial'] = ninitial 505 506 file = """ \ 507 integer nexternal_prod 508 parameter (nexternal_prod=%(nexternal)d) 509 integer nincoming_prod 510 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 511 512 # Write the file 513 writer.writelines(file) 514 515 return True
516 517 #=========================================================================== 518 # write_helamp_madspin 519 #===========================================================================
520 - def write_helamp_madspin(self, writer, ncomb):
521 """Write the helamp.inc file for madspin""" 522 523 replace_dict = {} 524 525 replace_dict['ncomb'] = ncomb 526 527 file = """ \ 528 integer ncomb1 529 parameter (ncomb1=%(ncomb)d) 530 double precision helamp(ncomb1) 531 common /to_helamp/helamp """ % replace_dict 532 533 # Write the file 534 writer.writelines(file) 535 536 return True
537 538 539 #=========================================================================== 540 # write_nexternal_file 541 #===========================================================================
542 - def write_nexternal_file(self, writer, nexternal, ninitial):
543 """Write the nexternal.inc file for MG4""" 544 545 replace_dict = {} 546 547 replace_dict['nexternal'] = nexternal 548 replace_dict['ninitial'] = ninitial 549 550 file = """ \ 551 integer nexternal 552 parameter (nexternal=%(nexternal)d) 553 integer nincoming 554 parameter (nincoming=%(ninitial)d)""" % replace_dict 555 556 # Write the file 557 writer.writelines(file) 558 559 return True
560 561 #=========================================================================== 562 # write_pmass_file 563 #===========================================================================
564 - def write_pmass_file(self, writer, matrix_element):
565 """Write the pmass.inc file for MG4""" 566 567 model = matrix_element.get('processes')[0].get('model') 568 569 lines = [] 570 for wf in matrix_element.get_external_wavefunctions(): 571 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 572 if mass.lower() != "zero": 573 mass = "abs(%s)" % mass 574 575 lines.append("pmass(%d)=%s" % \ 576 (wf.get('number_external'), mass)) 577 578 # Write the file 579 writer.writelines(lines) 580 581 return True
582 583 #=========================================================================== 584 # write_ngraphs_file 585 #===========================================================================
586 - def write_ngraphs_file(self, writer, nconfigs):
587 """Write the ngraphs.inc file for MG4. Needs input from 588 write_configs_file.""" 589 590 file = " integer n_max_cg\n" 591 file = file + "parameter (n_max_cg=%d)" % nconfigs 592 593 # Write the file 594 writer.writelines(file) 595 596 return True
597 598 #=========================================================================== 599 # write_leshouche_file 600 #===========================================================================
601 - def write_leshouche_file(self, writer, matrix_element):
602 """Write the leshouche.inc file for MG4""" 603 604 # Write the file 605 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 606 607 return True
608 609 #=========================================================================== 610 # get_leshouche_lines 611 #===========================================================================
612 - def get_leshouche_lines(self, matrix_element, numproc):
613 """Write the leshouche.inc file for MG4""" 614 615 # Extract number of external particles 616 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 617 618 lines = [] 619 for iproc, proc in enumerate(matrix_element.get('processes')): 620 legs = proc.get_legs_with_decays() 621 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 622 (iproc + 1, numproc+1, nexternal, 623 ",".join([str(l.get('id')) for l in legs]))) 624 if iproc == 0 and numproc == 0: 625 for i in [1, 2]: 626 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 627 (i, nexternal, 628 ",".join([ "%3r" % 0 ] * ninitial + \ 629 [ "%3r" % i ] * (nexternal - ninitial)))) 630 631 # Here goes the color connections corresponding to the JAMPs 632 # Only one output, for the first subproc! 633 if iproc == 0: 634 # If no color basis, just output trivial color flow 635 if not matrix_element.get('color_basis'): 636 for i in [1, 2]: 637 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 638 (i, numproc+1,nexternal, 639 ",".join([ "%3r" % 0 ] * nexternal))) 640 641 else: 642 # First build a color representation dictionnary 643 repr_dict = {} 644 for l in legs: 645 repr_dict[l.get('number')] = \ 646 proc.get('model').get_particle(l.get('id')).get_color()\ 647 * (-1)**(1+l.get('state')) 648 # Get the list of color flows 649 color_flow_list = \ 650 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 651 ninitial) 652 # And output them properly 653 for cf_i, color_flow_dict in enumerate(color_flow_list): 654 for i in [0, 1]: 655 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 656 (i + 1, cf_i + 1, numproc+1, nexternal, 657 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 658 for l in legs]))) 659 660 return lines
661 662 663 664 665 #=========================================================================== 666 # write_maxamps_file 667 #===========================================================================
668 - def write_maxamps_file(self, writer, maxamps, maxflows, 669 maxproc,maxsproc):
670 """Write the maxamps.inc file for MG4.""" 671 672 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 673 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 674 (maxamps, maxflows) 675 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 676 (maxproc, maxsproc) 677 678 # Write the file 679 writer.writelines(file) 680 681 return True
682 683 #=========================================================================== 684 # write_props_file 685 #===========================================================================
686 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
687 """Write the props.inc file for MadEvent. Needs input from 688 write_configs_file.""" 689 690 lines = [] 691 692 particle_dict = matrix_element.get('processes')[0].get('model').\ 693 get('particle_dict') 694 695 for iconf, configs in enumerate(s_and_t_channels): 696 for vertex in configs[0] + configs[1][:-1]: 697 leg = vertex.get('legs')[-1] 698 if leg.get('id') not in particle_dict: 699 # Fake propagator used in multiparticle vertices 700 mass = 'zero' 701 width = 'zero' 702 pow_part = 0 703 else: 704 particle = particle_dict[leg.get('id')] 705 # Get mass 706 if particle.get('mass').lower() == 'zero': 707 mass = particle.get('mass') 708 else: 709 mass = "abs(%s)" % particle.get('mass') 710 # Get width 711 if particle.get('width').lower() == 'zero': 712 width = particle.get('width') 713 else: 714 width = "abs(%s)" % particle.get('width') 715 716 pow_part = 1 + int(particle.is_boson()) 717 718 lines.append("prmass(%d,%d) = %s" % \ 719 (leg.get('number'), iconf + 1, mass)) 720 lines.append("prwidth(%d,%d) = %s" % \ 721 (leg.get('number'), iconf + 1, width)) 722 lines.append("pow(%d,%d) = %d" % \ 723 (leg.get('number'), iconf + 1, pow_part)) 724 725 # Write the file 726 writer.writelines(lines) 727 728 return True
729 730 731 732 733 734 #=========================================================================== 735 # Routines to output UFO models in MG4 format 736 #=========================================================================== 737
738 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 739 wanted_couplings = []):
740 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 741 742 # Make sure aloha is in quadruple precision if needed 743 old_aloha_mp=aloha.mp_precision 744 aloha.mp_precision=self.opt['mp'] 745 746 # create the MODEL 747 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 748 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 749 model_builder.build(wanted_couplings) 750 751 # Backup the loop mode, because it can be changed in what follows. 752 old_loop_mode = aloha.loop_mode 753 754 # Create the aloha model or use the existing one (for loop exporters 755 # this is useful as the aloha model will be used again in the 756 # LoopHelasMatrixElements generated). We do not save the model generated 757 # here if it didn't exist already because it would be a waste of 758 # memory for tree level applications since aloha is only needed at the 759 # time of creating the aloha fortran subroutines. 760 if hasattr(self, 'aloha_model'): 761 aloha_model = self.aloha_model 762 else: 763 aloha_model = create_aloha.AbstractALOHAModel(model.get('name')) 764 aloha_model.add_Lorentz_object(model.get('lorentz')) 765 766 # Compute the subroutines 767 if wanted_lorentz: 768 aloha_model.compute_subset(wanted_lorentz) 769 else: 770 aloha_model.compute_all(save=False) 771 772 # Write them out 773 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 774 aloha_model.write(write_dir, 'Fortran') 775 776 # Revert the original aloha loop mode 777 aloha.loop_mode = old_loop_mode 778 779 #copy Helas Template 780 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 781 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 782 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', write_dir+'/aloha_functions.f') 783 aloha_model.loop_mode = False 784 else: 785 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', write_dir+'/aloha_functions.f') 786 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 787 788 # Make final link in the Process 789 self.make_model_symbolic_link() 790 791 # Re-establish original aloha mode 792 aloha.mp_precision=old_aloha_mp
793 794 #=========================================================================== 795 # Helper functions 796 #===========================================================================
797 - def get_mg5_info_lines(self):
798 """Return info lines for MG5, suitable to place at beginning of 799 Fortran files""" 800 801 info = misc.get_pkg_info() 802 info_lines = "" 803 if info and info.has_key('version') and info.has_key('date'): 804 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 805 (info['version'], info['date']) 806 info_lines = info_lines + \ 807 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 808 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 809 else: 810 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 811 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 812 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 813 814 return info_lines
815
816 - def get_process_info_lines(self, matrix_element):
817 """Return info lines describing the processes for this matrix element""" 818 819 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 820 for process in matrix_element.get('processes')])
821 822
823 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
824 """Return the Helicity matrix definition lines for this matrix element""" 825 826 helicity_line_list = [] 827 i = 0 828 for helicities in matrix_element.get_helicity_matrix(): 829 i = i + 1 830 int_list = [i, len(helicities)] 831 int_list.extend(helicities) 832 helicity_line_list.append(\ 833 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 834 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 835 836 return "\n".join(helicity_line_list)
837
838 - def get_ic_line(self, matrix_element):
839 """Return the IC definition line coming after helicities, required by 840 switchmom in madevent""" 841 842 nexternal = matrix_element.get_nexternal_ninitial()[0] 843 int_list = range(1, nexternal + 1) 844 845 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 846 ",".join([str(i) for \ 847 i in int_list]))
848
849 - def set_chosen_SO_index(self, process, squared_orders):
850 """ From the squared order constraints set by the user, this function 851 finds what indices of the squared_orders list the user intends to pick. 852 It returns this as a string of comma-separated successive '.true.' or 853 '.false.' for each index.""" 854 855 user_squared_orders = process.get('squared_orders') 856 split_orders = process.get('split_orders') 857 858 if len(user_squared_orders)==0: 859 return ','.join(['.true.']*len(squared_orders)) 860 861 res = [] 862 for sqsos in squared_orders: 863 is_a_match = True 864 for user_sqso, value in user_squared_orders.items(): 865 if (process.get_squared_order_type(user_sqso) =='==' and \ 866 value!=sqsos[split_orders.index(user_sqso)]) or \ 867 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 868 value<sqsos[split_orders.index(user_sqso)]) or \ 869 (process.get_squared_order_type(user_sqso) == '>' and \ 870 value>=sqsos[split_orders.index(user_sqso)]): 871 is_a_match = False 872 break 873 res.append('.true.' if is_a_match else '.false.') 874 875 return ','.join(res)
876
877 - def get_split_orders_lines(self, orders, array_name, n=5):
878 """ Return the split orders definition as defined in the list orders and 879 for the name of the array 'array_name'. Split rows in chunks of size n.""" 880 881 ret_list = [] 882 for index, order in enumerate(orders): 883 for k in xrange(0, len(order), n): 884 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 885 (array_name,index + 1, k + 1, min(k + n, len(order)), 886 ','.join(["%5r" % i for i in order[k:k + n]]))) 887 return ret_list
888
889 - def format_integer_list(self, list, name, n=5):
890 """ Return an initialization of the python list in argument following 891 the fortran syntax using the data keyword assignment, filling an array 892 of name 'name'. It splits rows in chunks of size n.""" 893 894 ret_list = [] 895 for k in xrange(0, len(list), n): 896 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 897 (name, k + 1, min(k + n, len(list)), 898 ','.join(["%5r" % i for i in list[k:k + n]]))) 899 return ret_list
900
901 - def get_color_data_lines(self, matrix_element, n=6):
902 """Return the color matrix definition lines for this matrix element. Split 903 rows in chunks of size n.""" 904 905 if not matrix_element.get('color_matrix'): 906 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 907 else: 908 ret_list = [] 909 my_cs = color.ColorString() 910 for index, denominator in \ 911 enumerate(matrix_element.get('color_matrix').\ 912 get_line_denominators()): 913 # First write the common denominator for this color matrix line 914 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 915 # Then write the numerators for the matrix elements 916 num_list = matrix_element.get('color_matrix').\ 917 get_line_numerators(index, denominator) 918 919 for k in xrange(0, len(num_list), n): 920 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 921 (index + 1, k + 1, min(k + n, len(num_list)), 922 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 923 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 924 ret_list.append("C %s" % repr(my_cs)) 925 return ret_list
926 927
928 - def get_den_factor_line(self, matrix_element):
929 """Return the denominator factor line for this matrix element""" 930 931 return "DATA IDEN/%2r/" % \ 932 matrix_element.get_denominator_factor()
933
934 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
935 """Return the ICOLAMP matrix, showing which JAMPs contribute to 936 which configs (diagrams).""" 937 938 ret_list = [] 939 940 booldict = {False: ".false.", True: ".true."} 941 942 if not matrix_element.get('color_basis'): 943 # No color, so only one color factor. Simply write a ".true." 944 # for each config (i.e., each diagram with only 3 particle 945 # vertices 946 configs = len(mapconfigs) 947 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 948 (num_matrix_element, configs, 949 ','.join([".true." for i in range(configs)]))) 950 return ret_list 951 952 # There is a color basis - create a list showing which JAMPs have 953 # contributions to which configs 954 955 # Only want to include leading color flows, so find max_Nc 956 color_basis = matrix_element.get('color_basis') 957 958 # We don't want to include the power of Nc's which come from the potential 959 # loop color trace (i.e. in the case of a closed fermion loop for example) 960 # so we subtract it here when computing max_Nc 961 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 962 color_basis.values()],[])) 963 964 # Crate dictionary between diagram number and JAMP number 965 diag_jamp = {} 966 for ijamp, col_basis_elem in \ 967 enumerate(sorted(matrix_element.get('color_basis').keys())): 968 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 969 # Only use color flows with Nc == max_Nc. However, notice that 970 # we don't want to include the Nc power coming from the loop 971 # in this counting. 972 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 973 diag_num = diag_tuple[0] + 1 974 # Add this JAMP number to this diag_num 975 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 976 [ijamp+1] 977 978 colamps = ijamp + 1 979 for iconfig, num_diag in enumerate(mapconfigs): 980 if num_diag == 0: 981 continue 982 983 # List of True or False 984 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 985 # Add line 986 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 987 (iconfig+1, num_matrix_element, colamps, 988 ','.join(["%s" % booldict[b] for b in \ 989 bool_list]))) 990 991 return ret_list
992
993 - def get_amp2_lines(self, matrix_element, config_map = []):
994 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 995 996 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 997 # Get minimum legs in a vertex 998 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 999 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1000 minvert = min(vert_list) if vert_list!=[] else 0 1001 1002 ret_lines = [] 1003 if config_map: 1004 # In this case, we need to sum up all amplitudes that have 1005 # identical topologies, as given by the config_map (which 1006 # gives the topology/config for each of the diagrams 1007 diagrams = matrix_element.get('diagrams') 1008 # Combine the diagrams with identical topologies 1009 config_to_diag_dict = {} 1010 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1011 if config_map[idiag] == 0: 1012 continue 1013 try: 1014 config_to_diag_dict[config_map[idiag]].append(idiag) 1015 except KeyError: 1016 config_to_diag_dict[config_map[idiag]] = [idiag] 1017 # Write out the AMP2s summing squares of amplitudes belonging 1018 # to eiher the same diagram or different diagrams with 1019 # identical propagator properties. Note that we need to use 1020 # AMP2 number corresponding to the first diagram number used 1021 # for that AMP2. 1022 for config in sorted(config_to_diag_dict.keys()): 1023 1024 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1025 {"num": (config_to_diag_dict[config][0] + 1)} 1026 1027 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1028 sum([diagrams[idiag].get('amplitudes') for \ 1029 idiag in config_to_diag_dict[config]], [])]) 1030 1031 # Not using \sum |M|^2 anymore since this creates troubles 1032 # when ckm is not diagonal due to the JIM mechanism. 1033 if '+' in amp: 1034 line += "(%s)*dconjg(%s)" % (amp, amp) 1035 else: 1036 line += "%s*dconjg(%s)" % (amp, amp) 1037 ret_lines.append(line) 1038 else: 1039 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1040 # Ignore any diagrams with 4-particle vertices. 1041 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1042 continue 1043 # Now write out the expression for AMP2, meaning the sum of 1044 # squared amplitudes belonging to the same diagram 1045 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1046 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1047 {"num": a.get('number')} for a in \ 1048 diag.get('amplitudes')]) 1049 ret_lines.append(line) 1050 1051 return ret_lines
1052 1053 #=========================================================================== 1054 # Returns the data statements initializing the coeffictients for the JAMP 1055 # decomposition. It is used when the JAMP initialization is decided to be 1056 # done through big arrays containing the projection coefficients. 1057 #===========================================================================
1058 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1059 n=50, Nc_value=3):
1060 """This functions return the lines defining the DATA statement setting 1061 the coefficients building the JAMPS out of the AMPS. Split rows in 1062 bunches of size n. 1063 One can specify the color_basis from which the color amplitudes originates 1064 so that there are commentaries telling what color structure each JAMP 1065 corresponds to.""" 1066 1067 if(not isinstance(color_amplitudes,list) or 1068 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1069 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1070 1071 res_list = [] 1072 my_cs = color.ColorString() 1073 for index, coeff_list in enumerate(color_amplitudes): 1074 # Create the list of the complete numerical coefficient. 1075 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1076 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1077 coefficient in coeff_list] 1078 # Create the list of the numbers of the contributing amplitudes. 1079 # Mutliply by -1 for those which have an imaginary coefficient. 1080 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1081 for coefficient in coeff_list] 1082 # Find the common denominator. 1083 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1084 num_list=[(coefficient*commondenom).numerator \ 1085 for coefficient in coefs_list] 1086 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1087 index+1,len(num_list))) 1088 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1089 index+1,commondenom)) 1090 if color_basis: 1091 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1092 res_list.append("C %s" % repr(my_cs)) 1093 for k in xrange(0, len(num_list), n): 1094 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1095 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1096 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1097 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1098 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1099 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1100 pass 1101 return res_list
1102 1103
1104 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1105 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1106 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1107 defined as a matrix element or directly as a color_amplitudes dictionary. 1108 The split_order_amps specifies the group of amplitudes sharing the same 1109 amplitude orders which should be put in together in a given set of JAMPS. 1110 The split_order_amps is supposed to have the format of the second output 1111 of the function get_split_orders_mapping function in helas_objects.py. 1112 The split_order_names is optional (it should correspond to the process 1113 'split_orders' attribute) and only present to provide comments in the 1114 JAMP definitions in the code.""" 1115 1116 # Let the user call get_JAMP_lines_split_order directly from a 1117 error_msg="Malformed '%s' argument passed to the "+\ 1118 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1119 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1120 color_amplitudes=col_amps.get_color_amplitudes() 1121 elif(isinstance(col_amps,list)): 1122 if(col_amps and isinstance(col_amps[0],list)): 1123 color_amplitudes=col_amps 1124 else: 1125 raise MadGraph5Error, error_msg%'col_amps' 1126 else: 1127 raise MadGraph5Error, error_msg%'col_amps' 1128 1129 # Verify the sanity of the split_order_amps and split_order_names args 1130 if isinstance(split_order_amps,list): 1131 for elem in split_order_amps: 1132 if len(elem)!=2: 1133 raise MadGraph5Error, error_msg%'split_order_amps' 1134 # Check the first element of the two lists to make sure they are 1135 # integers, although in principle they should all be integers. 1136 if not isinstance(elem[0],tuple) or \ 1137 not isinstance(elem[1],tuple) or \ 1138 not isinstance(elem[0][0],int) or \ 1139 not isinstance(elem[1][0],int): 1140 raise MadGraph5Error, error_msg%'split_order_amps' 1141 else: 1142 raise MadGraph5Error, error_msg%'split_order_amps' 1143 1144 if not split_order_names is None: 1145 if isinstance(split_order_names,list): 1146 # Should specify the same number of names as there are elements 1147 # in the key of the split_order_amps. 1148 if len(split_order_names)!=len(split_order_amps[0][0]): 1149 raise MadGraph5Error, error_msg%'split_order_names' 1150 # Check the first element of the list to be a string 1151 if not isinstance(split_order_names[0],str): 1152 raise MadGraph5Error, error_msg%'split_order_names' 1153 else: 1154 raise MadGraph5Error, error_msg%'split_order_names' 1155 1156 # Now scan all contributing orders to be individually computed and 1157 # construct the list of color_amplitudes for JAMP to be constructed 1158 # accordingly. 1159 res_list=[] 1160 for i, amp_order in enumerate(split_order_amps): 1161 col_amps_order = [] 1162 for jamp in color_amplitudes: 1163 col_amps_order.append(filter(lambda col_amp: 1164 col_amp[1] in amp_order[1],jamp)) 1165 if split_order_names: 1166 res_list.append('C JAMPs contributing to orders '+' '.join( 1167 ['%s=%i'%order for order in zip(split_order_names, 1168 amp_order[0])])) 1169 if self.opt['export_format'] in ['madloop_matchbox']: 1170 res_list.extend(self.get_JAMP_lines(col_amps_order, 1171 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1172 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1173 else: 1174 res_list.extend(self.get_JAMP_lines(col_amps_order, 1175 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1176 1177 1178 1179 1180 1181 1182 1183 1184 return res_list
1185 1186
1187 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1188 split=-1):
1189 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1190 defined as a matrix element or directly as a color_amplitudes dictionary, 1191 Jamp_formatLC should be define to allow to add LeadingColor computation 1192 (usefull for MatchBox) 1193 The split argument defines how the JAMP lines should be split in order 1194 not to be too long.""" 1195 1196 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1197 # the color amplitudes lists. 1198 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1199 color_amplitudes=col_amps.get_color_amplitudes() 1200 elif(isinstance(col_amps,list)): 1201 if(col_amps and isinstance(col_amps[0],list)): 1202 color_amplitudes=col_amps 1203 else: 1204 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1205 else: 1206 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1207 1208 1209 res_list = [] 1210 for i, coeff_list in enumerate(color_amplitudes): 1211 # It might happen that coeff_list is empty if this function was 1212 # called from get_JAMP_lines_split_order (i.e. if some color flow 1213 # does not contribute at all for a given order). 1214 # In this case we simply set it to 0. 1215 if coeff_list==[]: 1216 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1217 continue 1218 # Break the JAMP definition into 'n=split' pieces to avoid having 1219 # arbitrarly long lines. 1220 first=True 1221 n = (len(coeff_list)+1 if split<=0 else split) 1222 while coeff_list!=[]: 1223 coefs=coeff_list[:n] 1224 coeff_list=coeff_list[n:] 1225 res = ((JAMP_format+"=") % str(i + 1)) + \ 1226 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1227 1228 first=False 1229 # Optimization: if all contributions to that color basis element have 1230 # the same coefficient (up to a sign), put it in front 1231 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1232 common_factor = False 1233 diff_fracs = list(set(list_fracs)) 1234 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1235 common_factor = True 1236 global_factor = diff_fracs[0] 1237 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1238 1239 # loop for JAMP 1240 for (coefficient, amp_number) in coefs: 1241 if not coefficient: 1242 continue 1243 if common_factor: 1244 res = (res + "%s" + AMP_format) % \ 1245 (self.coeff(coefficient[0], 1246 coefficient[1] / abs(coefficient[1]), 1247 coefficient[2], 1248 coefficient[3]), 1249 str(amp_number)) 1250 else: 1251 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1252 coefficient[1], 1253 coefficient[2], 1254 coefficient[3]), 1255 str(amp_number)) 1256 1257 if common_factor: 1258 res = res + ')' 1259 1260 res_list.append(res) 1261 1262 return res_list
1263
1264 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1265 """Generate the PDF lines for the auto_dsig.f file""" 1266 1267 processes = matrix_element.get('processes') 1268 model = processes[0].get('model') 1269 1270 pdf_definition_lines = "" 1271 pdf_data_lines = "" 1272 pdf_lines = "" 1273 1274 if ninitial == 1: 1275 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1276 for i, proc in enumerate(processes): 1277 process_line = proc.base_string() 1278 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1279 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1280 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1281 else: 1282 # Pick out all initial state particles for the two beams 1283 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1284 p in processes]))), 1285 sorted(list(set([p.get_initial_pdg(2) for \ 1286 p in processes])))] 1287 1288 # Prepare all variable names 1289 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1290 sum(initial_states,[])]) 1291 for key,val in pdf_codes.items(): 1292 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1293 1294 # Set conversion from PDG code to number used in PDF calls 1295 pdgtopdf = {21: 0, 22: 7} 1296 1297 # Fill in missing entries of pdgtopdf 1298 for pdg in sum(initial_states,[]): 1299 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1300 pdgtopdf[pdg] = pdg 1301 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1302 # If any particle has pdg code 7, we need to use something else 1303 pdgtopdf[pdg] = 6000000 + pdg 1304 1305 # Get PDF variable declarations for all initial states 1306 for i in [0,1]: 1307 pdf_definition_lines += "DOUBLE PRECISION " + \ 1308 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1309 for pdg in \ 1310 initial_states[i]]) + \ 1311 "\n" 1312 1313 # Get PDF data lines for all initial states 1314 for i in [0,1]: 1315 pdf_data_lines += "DATA " + \ 1316 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1317 for pdg in initial_states[i]]) + \ 1318 "/%d*1D0/" % len(initial_states[i]) + \ 1319 "\n" 1320 1321 # Get PDF lines for all different initial states 1322 for i, init_states in enumerate(initial_states): 1323 if subproc_group: 1324 pdf_lines = pdf_lines + \ 1325 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1326 % (i + 1, i + 1) 1327 else: 1328 pdf_lines = pdf_lines + \ 1329 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1330 % (i + 1, i + 1) 1331 1332 for initial_state in init_states: 1333 if initial_state in pdf_codes.keys(): 1334 if subproc_group: 1335 pdf_lines = pdf_lines + \ 1336 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1337 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1338 (pdf_codes[initial_state], 1339 i + 1, i + 1, pdgtopdf[initial_state], 1340 i + 1, i + 1) 1341 else: 1342 pdf_lines = pdf_lines + \ 1343 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1344 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1345 (pdf_codes[initial_state], 1346 i + 1, i + 1, pdgtopdf[initial_state], 1347 i + 1, i + 1) 1348 pdf_lines = pdf_lines + "ENDIF\n" 1349 1350 # Add up PDFs for the different initial state particles 1351 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1352 for proc in processes: 1353 process_line = proc.base_string() 1354 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1355 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1356 for ibeam in [1, 2]: 1357 initial_state = proc.get_initial_pdg(ibeam) 1358 if initial_state in pdf_codes.keys(): 1359 pdf_lines = pdf_lines + "%s%d*" % \ 1360 (pdf_codes[initial_state], ibeam) 1361 else: 1362 pdf_lines = pdf_lines + "1d0*" 1363 # Remove last "*" from pdf_lines 1364 pdf_lines = pdf_lines[:-1] + "\n" 1365 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1366 1367 # Remove last line break from the return variables 1368 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1369 1370 #=========================================================================== 1371 # write_props_file 1372 #===========================================================================
1373 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1374 """Write the props.inc file for MadEvent. Needs input from 1375 write_configs_file.""" 1376 1377 lines = [] 1378 1379 particle_dict = matrix_element.get('processes')[0].get('model').\ 1380 get('particle_dict') 1381 1382 for iconf, configs in enumerate(s_and_t_channels): 1383 for vertex in configs[0] + configs[1][:-1]: 1384 leg = vertex.get('legs')[-1] 1385 if leg.get('id') not in particle_dict: 1386 # Fake propagator used in multiparticle vertices 1387 mass = 'zero' 1388 width = 'zero' 1389 pow_part = 0 1390 else: 1391 particle = particle_dict[leg.get('id')] 1392 # Get mass 1393 if particle.get('mass').lower() == 'zero': 1394 mass = particle.get('mass') 1395 else: 1396 mass = "abs(%s)" % particle.get('mass') 1397 # Get width 1398 if particle.get('width').lower() == 'zero': 1399 width = particle.get('width') 1400 else: 1401 width = "abs(%s)" % particle.get('width') 1402 1403 pow_part = 1 + int(particle.is_boson()) 1404 1405 lines.append("prmass(%d,%d) = %s" % \ 1406 (leg.get('number'), iconf + 1, mass)) 1407 lines.append("prwidth(%d,%d) = %s" % \ 1408 (leg.get('number'), iconf + 1, width)) 1409 lines.append("pow(%d,%d) = %d" % \ 1410 (leg.get('number'), iconf + 1, pow_part)) 1411 1412 # Write the file 1413 writer.writelines(lines) 1414 1415 return True
1416 1417 #=========================================================================== 1418 # write_configs_file 1419 #===========================================================================
1420 - def write_configs_file(self, writer, matrix_element):
1421 """Write the configs.inc file for MadEvent""" 1422 1423 # Extract number of external particles 1424 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1425 1426 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1427 mapconfigs = [c[0] for c in configs] 1428 model = matrix_element.get('processes')[0].get('model') 1429 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1430 [[c[1]] for c in configs], 1431 mapconfigs, 1432 nexternal, ninitial, 1433 model)
1434 1435 #=========================================================================== 1436 # write_configs_file_from_diagrams 1437 #===========================================================================
1438 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1439 nexternal, ninitial, model):
1440 """Write the actual configs.inc file. 1441 1442 configs is the diagrams corresponding to configs (each 1443 diagrams is a list of corresponding diagrams for all 1444 subprocesses, with None if there is no corresponding diagrams 1445 for a given process). 1446 mapconfigs gives the diagram number for each config. 1447 1448 For s-channels, we need to output one PDG for each subprocess in 1449 the subprocess group, in order to be able to pick the right 1450 one for multiprocesses.""" 1451 1452 lines = [] 1453 1454 s_and_t_channels = [] 1455 1456 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1457 for config in configs if [d for d in config if d][0].\ 1458 get_vertex_leg_numbers()!=[]] 1459 minvert = min(vert_list) if vert_list!=[] else 0 1460 1461 # Number of subprocesses 1462 nsubprocs = len(configs[0]) 1463 1464 nconfigs = 0 1465 1466 new_pdg = model.get_first_non_pdg() 1467 1468 for iconfig, helas_diags in enumerate(configs): 1469 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1470 [0].get_vertex_leg_numbers()) : 1471 # Only 3-vertices allowed in configs.inc except for vertices 1472 # which originate from a shrunk loop. 1473 continue 1474 nconfigs += 1 1475 1476 # Need s- and t-channels for all subprocesses, including 1477 # those that don't contribute to this config 1478 empty_verts = [] 1479 stchannels = [] 1480 for h in helas_diags: 1481 if h: 1482 # get_s_and_t_channels gives vertices starting from 1483 # final state external particles and working inwards 1484 stchannels.append(h.get('amplitudes')[0].\ 1485 get_s_and_t_channels(ninitial, model, new_pdg)) 1486 else: 1487 stchannels.append((empty_verts, None)) 1488 1489 # For t-channels, just need the first non-empty one 1490 tchannels = [t for s,t in stchannels if t != None][0] 1491 1492 # For s_and_t_channels (to be used later) use only first config 1493 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1494 tchannels]) 1495 1496 # Make sure empty_verts is same length as real vertices 1497 if any([s for s,t in stchannels]): 1498 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1499 1500 # Reorganize s-channel vertices to get a list of all 1501 # subprocesses for each vertex 1502 schannels = zip(*[s for s,t in stchannels]) 1503 else: 1504 schannels = [] 1505 1506 allchannels = schannels 1507 if len(tchannels) > 1: 1508 # Write out tchannels only if there are any non-trivial ones 1509 allchannels = schannels + tchannels 1510 1511 # Write out propagators for s-channel and t-channel vertices 1512 1513 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1514 # Correspondance between the config and the diagram = amp2 1515 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1516 mapconfigs[iconfig])) 1517 1518 for verts in allchannels: 1519 if verts in schannels: 1520 vert = [v for v in verts if v][0] 1521 else: 1522 vert = verts 1523 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1524 last_leg = vert.get('legs')[-1] 1525 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1526 (last_leg.get('number'), nconfigs, len(daughters), 1527 ",".join([str(d) for d in daughters]))) 1528 if verts in schannels: 1529 pdgs = [] 1530 for v in verts: 1531 if v: 1532 pdgs.append(v.get('legs')[-1].get('id')) 1533 else: 1534 pdgs.append(0) 1535 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1536 (last_leg.get('number'), nconfigs, nsubprocs, 1537 ",".join([str(d) for d in pdgs]))) 1538 lines.append("data tprid(%d,%d)/0/" % \ 1539 (last_leg.get('number'), nconfigs)) 1540 elif verts in tchannels[:-1]: 1541 lines.append("data tprid(%d,%d)/%d/" % \ 1542 (last_leg.get('number'), nconfigs, 1543 abs(last_leg.get('id')))) 1544 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1545 (last_leg.get('number'), nconfigs, nsubprocs, 1546 ",".join(['0'] * nsubprocs))) 1547 1548 # Write out number of configs 1549 lines.append("# Number of configs") 1550 lines.append("data mapconfig(0)/%d/" % nconfigs) 1551 1552 # Write the file 1553 writer.writelines(lines) 1554 1555 return s_and_t_channels
1556 1557 #=========================================================================== 1558 # Global helper methods 1559 #=========================================================================== 1560
1561 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1562 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1563 1564 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1565 1566 if total_coeff == 1: 1567 if is_imaginary: 1568 return '+imag1*' 1569 else: 1570 return '+' 1571 elif total_coeff == -1: 1572 if is_imaginary: 1573 return '-imag1*' 1574 else: 1575 return '-' 1576 1577 res_str = '%+iD0' % total_coeff.numerator 1578 1579 if total_coeff.denominator != 1: 1580 # Check if total_coeff is an integer 1581 res_str = res_str + '/%iD0' % total_coeff.denominator 1582 1583 if is_imaginary: 1584 res_str = res_str + '*imag1' 1585 1586 return res_str + '*'
1587 1588
1589 - def set_fortran_compiler(self, default_compiler, force=False):
1590 """Set compiler based on what's available on the system""" 1591 1592 # Check for compiler 1593 if default_compiler and misc.which(default_compiler): 1594 compiler = default_compiler 1595 elif misc.which('gfortran'): 1596 compiler = 'gfortran' 1597 elif misc.which('g77'): 1598 compiler = 'g77' 1599 elif misc.which('f77'): 1600 compiler = 'f77' 1601 elif default_compiler: 1602 logger.warning('No Fortran Compiler detected! Please install one') 1603 compiler = default_compiler # maybe misc fail so try with it 1604 else: 1605 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1606 logger.info('Use Fortran compiler ' + compiler) 1607 self.replace_make_opt_f_compiler(compiler) 1608 # Replace also for Template but not for cluster 1609 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1610 self.replace_make_opt_f_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1611 1612 return compiler
1613 1614 # an alias for backward compatibility 1615 set_compiler = set_fortran_compiler 1616 1617
1618 - def set_cpp_compiler(self, default_compiler, force=False):
1619 """Set compiler based on what's available on the system""" 1620 1621 # Check for compiler 1622 if default_compiler and misc.which(default_compiler): 1623 compiler = default_compiler 1624 elif misc.which('g++'): 1625 compiler = 'g++' 1626 elif misc.which('c++'): 1627 compiler = 'c++' 1628 elif misc.which('clang'): 1629 compiler = 'clang' 1630 elif default_compiler: 1631 logger.warning('No c++ Compiler detected! Please install one') 1632 compiler = default_compiler # maybe misc fail so try with it 1633 else: 1634 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1635 logger.info('Use c++ compiler ' + compiler) 1636 self.replace_make_opt_c_compiler(compiler) 1637 # Replace also for Template but not for cluster 1638 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1639 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1640 1641 return compiler
1642 1643
1644 - def replace_make_opt_f_compiler(self, compiler, root_dir = ""):
1645 """Set FC=compiler in Source/make_opts""" 1646 1647 mod = False #avoid to rewrite the file if not needed 1648 if not root_dir: 1649 root_dir = self.dir_path 1650 1651 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1652 lines = open(make_opts).read().split('\n') 1653 FC_re = re.compile('^(\s*)FC\s*=\s*(.+)\s*$') 1654 for iline, line in enumerate(lines): 1655 FC_result = FC_re.match(line) 1656 if FC_result: 1657 if compiler != FC_result.group(2): 1658 mod = True 1659 lines[iline] = FC_result.group(1) + "FC=" + compiler 1660 if not mod: 1661 return 1662 try: 1663 outfile = open(make_opts, 'w') 1664 except IOError: 1665 if root_dir == self.dir_path: 1666 logger.info('Fail to set compiler. Trying to continue anyway.') 1667 return 1668 outfile.write('\n'.join(lines))
1669 1670
1671 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1672 """Set CXX=compiler in Source/make_opts. 1673 The version is also checked, in order to set some extra flags 1674 if the compiler is clang (on MACOS)""" 1675 1676 1677 p = misc.Popen([compiler, '--version'], stdout=subprocess.PIPE, 1678 stderr=subprocess.PIPE) 1679 output, error = p.communicate() 1680 is_clang = 'LLVM' in output 1681 1682 mod = False #avoid to rewrite the file if not needed 1683 if not root_dir: 1684 root_dir = self.dir_path 1685 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1686 lines = open(make_opts).read().split('\n') 1687 CC_re = re.compile('^(\s*)CXX\s*=\s*(.+)\s*$') 1688 for iline, line in enumerate(lines): 1689 CC_result = CC_re.match(line) 1690 if CC_result: 1691 if compiler != CC_result.group(2): 1692 mod = True 1693 lines[iline] = CC_result.group(1) + "CXX=" + compiler 1694 1695 if is_clang: 1696 CFLAGS_re=re.compile('^(\s*)CFLAGS\s*=\s*(.+)\s*$') 1697 CXXFLAGS_re=re.compile('^(\s*)CXXFLAGS\s*=\s*(.+)\s*$') 1698 flags= '-O -stdlib=libstdc++ -mmacosx-version-min=10.6' 1699 for iline, line in enumerate(lines): 1700 CF_result = CFLAGS_re.match(line) 1701 CXXF_result = CXXFLAGS_re.match(line) 1702 if CF_result: 1703 lines[iline] = CF_result.group(1) + "CFLAGS= " + flags 1704 if CXXF_result: 1705 lines[iline] = CXXF_result.group(1) + "CXXFLAGS= " + flags 1706 if not mod: 1707 return 1708 try: 1709 outfile = open(make_opts, 'w') 1710 except IOError: 1711 if root_dir == self.dir_path: 1712 logger.info('Fail to set compiler. Trying to continue anyway.') 1713 return 1714 outfile.write('\n'.join(lines))
1715
1716 #=============================================================================== 1717 # ProcessExporterFortranSA 1718 #=============================================================================== 1719 -class ProcessExporterFortranSA(ProcessExporterFortran):
1720 """Class to take care of exporting a set of matrix elements to 1721 MadGraph v4 StandAlone format.""" 1722 1723 matrix_template = "matrix_standalone_v4.inc" 1724
1725 - def __init__(self, *args, **opts):
1726 """add the format information compare to standard init""" 1727 1728 if 'format' in opts: 1729 self.format = opts['format'] 1730 del opts['format'] 1731 else: 1732 self.format = 'standalone' 1733 ProcessExporterFortran.__init__(self, *args, **opts)
1734
1735 - def copy_v4template(self, modelname):
1736 """Additional actions needed for setup of Template 1737 """ 1738 1739 #First copy the full template tree if dir_path doesn't exit 1740 if os.path.isdir(self.dir_path): 1741 return 1742 1743 logger.info('initialize a new standalone directory: %s' % \ 1744 os.path.basename(self.dir_path)) 1745 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1746 1747 # Create the directory structure 1748 os.mkdir(self.dir_path) 1749 os.mkdir(pjoin(self.dir_path, 'Source')) 1750 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1751 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1752 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1753 os.mkdir(pjoin(self.dir_path, 'bin')) 1754 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1755 os.mkdir(pjoin(self.dir_path, 'lib')) 1756 os.mkdir(pjoin(self.dir_path, 'Cards')) 1757 1758 # Information at top-level 1759 #Write version info 1760 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1761 try: 1762 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1763 except IOError: 1764 MG5_version = misc.get_pkg_info() 1765 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1766 "5." + MG5_version['version']) 1767 1768 # Add file in bin directory 1769 #shutil.copy(pjoin(temp_dir, 'bin', 'change_compiler.py'), 1770 # pjoin(self.dir_path, 'bin')) 1771 1772 # Add file in SubProcesses 1773 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1774 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1775 1776 if self.format == 'standalone': 1777 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1778 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1779 elif self.format == 'standalone_rw': 1780 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'driver_reweight.f'), 1781 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1782 1783 # Add file in Source 1784 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1785 pjoin(self.dir_path, 'Source')) 1786 # add the makefile 1787 filename = pjoin(self.dir_path,'Source','makefile') 1788 self.write_source_makefile(writers.FileWriter(filename))
1789 1790 #=========================================================================== 1791 # export model files 1792 #===========================================================================
1793 - def export_model_files(self, model_path):
1794 """export the model dependent files for V4 model""" 1795 1796 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1797 # Add the routine update_as_param in v4 model 1798 # This is a function created in the UFO 1799 1800 1801 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1802 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1803 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1804 fsock.write(text) 1805 fsock.close() 1806 1807 self.make_model_symbolic_link()
1808 1809 #=========================================================================== 1810 # Make the Helas and Model directories for Standalone directory 1811 #===========================================================================
1812 - def make(self):
1813 """Run make in the DHELAS and MODEL directories, to set up 1814 everything for running standalone 1815 """ 1816 1817 source_dir = pjoin(self.dir_path, "Source") 1818 logger.info("Running make for Helas") 1819 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1820 logger.info("Running make for Model") 1821 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1822 1823 #=========================================================================== 1824 # Create proc_card_mg5.dat for Standalone directory 1825 #===========================================================================
1826 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 1827 online = False, compiler='gfortran'):
1828 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 1829 1830 self.compiler_choice(compiler) 1831 self.make() 1832 1833 # Write command history as proc_card_mg5 1834 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1835 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1836 history.write(output_file) 1837 1838 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler)
1839
1840 - def compiler_choice(self, compiler):
1841 """ Different daughter classes might want different compilers. 1842 So this function is meant to be overloaded if desired.""" 1843 1844 self.set_compiler(compiler)
1845 1846 #=========================================================================== 1847 # generate_subprocess_directory_v4 1848 #===========================================================================
1849 - def generate_subprocess_directory_v4(self, matrix_element, 1850 fortran_model):
1851 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 1852 including the necessary matrix.f and nexternal.inc files""" 1853 1854 cwd = os.getcwd() 1855 1856 # Create the directory PN_xx_xxxxx in the specified path 1857 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 1858 "P%s" % matrix_element.get('processes')[0].shell_string()) 1859 1860 if self.opt['sa_symmetry']: 1861 # avoid symmetric output 1862 for i,proc in enumerate(matrix_element.get('processes')): 1863 1864 initial = [] #filled in the next line 1865 final = [l.get('id') for l in proc.get('legs')\ 1866 if l.get('state') or initial.append(l.get('id'))] 1867 decay_finals = proc.get_final_ids_after_decay() 1868 decay_finals.sort() 1869 tag = (tuple(initial), tuple(decay_finals)) 1870 legs = proc.get('legs')[:] 1871 leg0 = proc.get('legs')[0] 1872 leg1 = proc.get('legs')[1] 1873 if not leg1.get('state'): 1874 proc.get('legs')[0] = leg1 1875 proc.get('legs')[1] = leg0 1876 flegs = proc.get('legs')[2:] 1877 for perm in itertools.permutations(flegs): 1878 for i,p in enumerate(perm): 1879 proc.get('legs')[i+2] = p 1880 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 1881 "P%s" % proc.shell_string()) 1882 #restore original order 1883 proc.get('legs')[2:] = legs[2:] 1884 if os.path.exists(dirpath2): 1885 proc.get('legs')[:] = legs 1886 return 0 1887 proc.get('legs')[:] = legs 1888 1889 try: 1890 os.mkdir(dirpath) 1891 except os.error as error: 1892 logger.warning(error.strerror + " " + dirpath) 1893 1894 #try: 1895 # os.chdir(dirpath) 1896 #except os.error: 1897 # logger.error('Could not cd to directory %s' % dirpath) 1898 # return 0 1899 1900 logger.info('Creating files in directory %s' % dirpath) 1901 1902 # Extract number of external particles 1903 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1904 1905 # Create the matrix.f file and the nexternal.inc file 1906 if self.opt['export_format']=='standalone_msP': 1907 filename = pjoin(dirpath, 'matrix_prod.f') 1908 else: 1909 filename = pjoin(dirpath, 'matrix.f') 1910 calls = self.write_matrix_element_v4( 1911 writers.FortranWriter(filename), 1912 matrix_element, 1913 fortran_model) 1914 1915 if self.opt['export_format'] == 'standalone_msP': 1916 filename = pjoin(dirpath,'configs_production.inc') 1917 mapconfigs, s_and_t_channels = self.write_configs_file(\ 1918 writers.FortranWriter(filename), 1919 matrix_element) 1920 1921 filename = pjoin(dirpath,'props_production.inc') 1922 self.write_props_file(writers.FortranWriter(filename), 1923 matrix_element, 1924 s_and_t_channels) 1925 1926 filename = pjoin(dirpath,'nexternal_prod.inc') 1927 self.write_nexternal_madspin(writers.FortranWriter(filename), 1928 nexternal, ninitial) 1929 1930 if self.opt['export_format']=='standalone_msF': 1931 filename = pjoin(dirpath, 'helamp.inc') 1932 ncomb=matrix_element.get_helicity_combinations() 1933 self.write_helamp_madspin(writers.FortranWriter(filename), 1934 ncomb) 1935 1936 filename = pjoin(dirpath, 'nexternal.inc') 1937 self.write_nexternal_file(writers.FortranWriter(filename), 1938 nexternal, ninitial) 1939 1940 filename = pjoin(dirpath, 'pmass.inc') 1941 self.write_pmass_file(writers.FortranWriter(filename), 1942 matrix_element) 1943 1944 filename = pjoin(dirpath, 'ngraphs.inc') 1945 self.write_ngraphs_file(writers.FortranWriter(filename), 1946 len(matrix_element.get_all_amplitudes())) 1947 1948 # Generate diagrams 1949 filename = pjoin(dirpath, "matrix.ps") 1950 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1951 get('diagrams'), 1952 filename, 1953 model=matrix_element.get('processes')[0].\ 1954 get('model'), 1955 amplitude=True) 1956 logger.info("Generating Feynman diagrams for " + \ 1957 matrix_element.get('processes')[0].nice_string()) 1958 plot.draw() 1959 1960 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 1961 1962 for file in linkfiles: 1963 ln('../%s' % file, cwd=dirpath) 1964 1965 # Return to original PWD 1966 #os.chdir(cwd) 1967 1968 if not calls: 1969 calls = 0 1970 return calls
1971 1972 1973 #=========================================================================== 1974 # write_source_makefile 1975 #===========================================================================
1976 - def write_source_makefile(self, writer):
1977 """Write the nexternal.inc file for MG4""" 1978 1979 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 1980 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 1981 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 1982 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 1983 writer.write(text) 1984 1985 return True
1986 1987 #=========================================================================== 1988 # write_matrix_element_v4 1989 #===========================================================================
1990 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 1991 write=True, proc_prefix=''):
1992 """Export a matrix element to a matrix.f file in MG4 standalone format 1993 if write is on False, just return the replace_dict and not write anything.""" 1994 1995 1996 if not matrix_element.get('processes') or \ 1997 not matrix_element.get('diagrams'): 1998 return 0 1999 2000 if not isinstance(writer, writers.FortranWriter): 2001 raise writers.FortranWriter.FortranWriterError(\ 2002 "writer not FortranWriter but %s" % type(writer)) 2003 2004 if not self.opt.has_key('sa_symmetry'): 2005 self.opt['sa_symmetry']=False 2006 2007 # Set lowercase/uppercase Fortran code 2008 writers.FortranWriter.downcase = False 2009 2010 # The proc_id is for MadEvent grouping which is never used in SA. 2011 replace_dict = {'global_variable':'', 'amp2_lines':'', 2012 'proc_prefix':proc_prefix, 'proc_id':''} 2013 2014 # Extract helas calls 2015 helas_calls = fortran_model.get_matrix_element_calls(\ 2016 matrix_element) 2017 2018 replace_dict['helas_calls'] = "\n".join(helas_calls) 2019 2020 # Extract version number and date from VERSION file 2021 info_lines = self.get_mg5_info_lines() 2022 replace_dict['info_lines'] = info_lines 2023 2024 # Extract process info lines 2025 process_lines = self.get_process_info_lines(matrix_element) 2026 replace_dict['process_lines'] = process_lines 2027 2028 # Extract number of external particles 2029 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2030 replace_dict['nexternal'] = nexternal 2031 2032 # Extract ncomb 2033 ncomb = matrix_element.get_helicity_combinations() 2034 replace_dict['ncomb'] = ncomb 2035 2036 # Extract helicity lines 2037 helicity_lines = self.get_helicity_lines(matrix_element) 2038 replace_dict['helicity_lines'] = helicity_lines 2039 2040 # Extract overall denominator 2041 # Averaging initial state color, spin, and identical FS particles 2042 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2043 2044 # Extract ngraphs 2045 ngraphs = matrix_element.get_number_of_amplitudes() 2046 replace_dict['ngraphs'] = ngraphs 2047 2048 # Extract nwavefuncs 2049 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2050 replace_dict['nwavefuncs'] = nwavefuncs 2051 2052 # Extract ncolor 2053 ncolor = max(1, len(matrix_element.get('color_basis'))) 2054 replace_dict['ncolor'] = ncolor 2055 2056 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2057 2058 # Extract color data lines 2059 color_data_lines = self.get_color_data_lines(matrix_element) 2060 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2061 2062 if self.opt['export_format']=='standalone_msP': 2063 # For MadSpin need to return the AMP2 2064 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2065 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2066 replace_dict['global_variable'] = \ 2067 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2068 2069 # JAMP definition, depends on the number of independent split orders 2070 split_orders=matrix_element.get('processes')[0].get('split_orders') 2071 2072 if len(split_orders)==0: 2073 replace_dict['nSplitOrders']='' 2074 # Extract JAMP lines 2075 jamp_lines = self.get_JAMP_lines(matrix_element) 2076 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2077 # set all amplitude order to weight 1 and only one squared order 2078 # contribution which is of course ALL_ORDERS=2. 2079 squared_orders = [(2,),] 2080 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2081 replace_dict['chosen_so_configs'] = '.TRUE.' 2082 replace_dict['nSqAmpSplitOrders']=1 2083 replace_dict['split_order_str_list']='' 2084 else: 2085 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2086 replace_dict['nAmpSplitOrders']=len(amp_orders) 2087 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2088 replace_dict['nSplitOrders']=len(split_orders) 2089 replace_dict['split_order_str_list']=str(split_orders) 2090 amp_so = self.get_split_orders_lines( 2091 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2092 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2093 replace_dict['ampsplitorders']='\n'.join(amp_so) 2094 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2095 jamp_lines = self.get_JAMP_lines_split_order(\ 2096 matrix_element,amp_orders,split_order_names=split_orders) 2097 2098 # Now setup the array specifying what squared split order is chosen 2099 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2100 matrix_element.get('processes')[0],squared_orders) 2101 2102 # For convenience we also write the driver check_sa_splitOrders.f 2103 # that explicitely writes out the contribution from each squared order. 2104 # The original driver still works and is compiled with 'make' while 2105 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2106 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2107 self.write_check_sa_splitOrders(squared_orders,split_orders, 2108 nexternal,ninitial,proc_prefix,check_sa_writer) 2109 2110 if write: 2111 writers.FortranWriter('nsqso_born.inc').writelines( 2112 """INTEGER NSQSO_BORN 2113 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2114 2115 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2116 2117 matrix_template = self.matrix_template 2118 if self.opt['export_format']=='standalone_msP' : 2119 matrix_template = 'matrix_standalone_msP_v4.inc' 2120 elif self.opt['export_format']=='standalone_msF': 2121 matrix_template = 'matrix_standalone_msF_v4.inc' 2122 elif self.opt['export_format']=='matchbox': 2123 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2124 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2125 2126 if len(split_orders)>0: 2127 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2128 logger.debug("Warning: The export format %s is not "+\ 2129 " available for individual ME evaluation of given coupl. orders."+\ 2130 " Only the total ME will be computed.", self.opt['export_format']) 2131 elif self.opt['export_format'] in ['madloop_matchbox']: 2132 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2133 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2134 else: 2135 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2136 2137 if write: 2138 path = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2139 content = open(path).read() 2140 content = content % replace_dict 2141 # Write the file 2142 writer.writelines(content) 2143 # Add the helper functions. 2144 if len(split_orders)>0: 2145 content = '\n' + open(pjoin(_file_path, \ 2146 'iolibs/template_files/split_orders_helping_functions.inc'))\ 2147 .read()%replace_dict 2148 writer.writelines(content) 2149 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2150 else: 2151 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2152 return replace_dict # for subclass update
2153
2154 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2155 nincoming, proc_prefix, writer):
2156 """ Write out a more advanced version of the check_sa drivers that 2157 individually returns the matrix element for each contributing squared 2158 order.""" 2159 2160 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2161 'template_files', 'check_sa_splitOrders.f')).read() 2162 printout_sq_orders=[] 2163 for i, squared_order in enumerate(squared_orders): 2164 sq_orders=[] 2165 for j, sqo in enumerate(squared_order): 2166 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2167 printout_sq_orders.append(\ 2168 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2169 %(i+1,' '.join(sq_orders),i+1)) 2170 printout_sq_orders='\n'.join(printout_sq_orders) 2171 writer.writelines(check_sa_content%{\ 2172 'printout_sqorders':printout_sq_orders, 2173 'nSplitOrders':len(squared_orders), 2174 'nexternal':nexternal, 2175 'nincoming':nincoming, 2176 'proc_prefix':proc_prefix})
2177
2178 2179 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2180 """class to take care of exporting a set of matrix element for the Matchbox 2181 code in the case of Born only routine""" 2182 2183 default_opt = {'clean': False, 'complex_mass':False, 2184 'export_format':'matchbox', 'mp': False, 2185 'sa_symmetry': True} 2186 2187 #specific template of the born 2188 2189 2190 matrix_template = "matrix_standalone_matchbox.inc" 2191 2192 @staticmethod
2193 - def get_color_string_lines(matrix_element):
2194 """Return the color matrix definition lines for this matrix element. Split 2195 rows in chunks of size n.""" 2196 2197 if not matrix_element.get('color_matrix'): 2198 return "\n".join(["out = 1"]) 2199 2200 #start the real work 2201 color_denominators = matrix_element.get('color_matrix').\ 2202 get_line_denominators() 2203 matrix_strings = [] 2204 my_cs = color.ColorString() 2205 for i_color in xrange(len(color_denominators)): 2206 # Then write the numerators for the matrix elements 2207 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2208 t_str=repr(my_cs) 2209 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2210 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2211 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2212 all_matches = t_match.findall(t_str) 2213 output = {} 2214 arg=[] 2215 for match in all_matches: 2216 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2217 if ctype in ['ColorOne' ]: 2218 continue 2219 if ctype not in ['T', 'Tr' ]: 2220 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2221 tmparg += ['0'] 2222 arg +=tmparg 2223 for j, v in enumerate(arg): 2224 output[(i_color,j)] = v 2225 2226 for key in output: 2227 if matrix_strings == []: 2228 #first entry 2229 matrix_strings.append(""" 2230 if (in1.eq.%s.and.in2.eq.%s)then 2231 out = %s 2232 """ % (key[0], key[1], output[key])) 2233 else: 2234 #not first entry 2235 matrix_strings.append(""" 2236 elseif (in1.eq.%s.and.in2.eq.%s)then 2237 out = %s 2238 """ % (key[0], key[1], output[key])) 2239 if len(matrix_strings): 2240 matrix_strings.append(" else \n out = - 1 \n endif") 2241 else: 2242 return "\n out = - 1 \n " 2243 return "\n".join(matrix_strings)
2244
2245 - def make(self,*args,**opts):
2246 pass
2247
2248 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2249 JAMP_formatLC=None):
2250 2251 """Adding leading color part of the colorflow""" 2252 2253 if not JAMP_formatLC: 2254 JAMP_formatLC= "LN%s" % JAMP_format 2255 2256 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2257 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2258 col_amps=col_amps.get_color_amplitudes() 2259 elif(isinstance(col_amps,list)): 2260 if(col_amps and isinstance(col_amps[0],list)): 2261 col_amps=col_amps 2262 else: 2263 raise MadGraph5Error, error_msg % 'col_amps' 2264 else: 2265 raise MadGraph5Error, error_msg % 'col_amps' 2266 2267 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2268 JAMP_format=JAMP_format, 2269 AMP_format=AMP_format, 2270 split=-1) 2271 2272 2273 # Filter the col_ampls to generate only those without any 1/NC terms 2274 2275 LC_col_amps = [] 2276 for coeff_list in col_amps: 2277 to_add = [] 2278 for (coefficient, amp_number) in coeff_list: 2279 if coefficient[3]==0: 2280 to_add.append( (coefficient, amp_number) ) 2281 LC_col_amps.append(to_add) 2282 2283 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2284 JAMP_format=JAMP_formatLC, 2285 AMP_format=AMP_format, 2286 split=-1) 2287 2288 return text
2289
2290 2291 2292 2293 #=============================================================================== 2294 # ProcessExporterFortranMW 2295 #=============================================================================== 2296 -class ProcessExporterFortranMW(ProcessExporterFortran):
2297 """Class to take care of exporting a set of matrix elements to 2298 MadGraph v4 - MadWeight format.""" 2299 2300 matrix_file="matrix_standalone_v4.inc" 2301
2302 - def copy_v4template(self, modelname):
2303 """Additional actions needed for setup of Template 2304 """ 2305 2306 super(ProcessExporterFortranMW, self).copy_v4template(modelname) 2307 2308 # Add the MW specific file 2309 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2310 pjoin(self.dir_path, 'Source','MadWeight'), True) 2311 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2312 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2313 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2314 pjoin(self.dir_path, 'Source','setrun.f')) 2315 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2316 pjoin(self.dir_path, 'Source','run.inc')) 2317 # File created from Template (Different in some child class) 2318 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2319 self.write_run_config_file(writers.FortranWriter(filename)) 2320 2321 try: 2322 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2323 stdout = os.open(os.devnull, os.O_RDWR), 2324 stderr = os.open(os.devnull, os.O_RDWR), 2325 cwd=self.dir_path) 2326 except OSError: 2327 # Probably madweight already called 2328 pass 2329 2330 # Copy the different python file in the Template 2331 self.copy_python_file() 2332 # create the appropriate cuts.f 2333 self.get_mw_cuts_version() 2334 2335 # add the makefile in Source directory 2336 filename = os.path.join(self.dir_path,'Source','makefile') 2337 self.write_source_makefile(writers.FortranWriter(filename))
2338 2339 2340 2341 2342 #=========================================================================== 2343 # convert_model_to_mg4 2344 #===========================================================================
2345 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 2346 wanted_couplings = []):
2347 2348 super(ProcessExporterFortranMW,self).convert_model_to_mg4(model, 2349 wanted_lorentz, wanted_couplings) 2350 2351 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2352 try: 2353 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2354 except OSError as error: 2355 pass 2356 model_path = model.get('modelpath') 2357 # This is not safe if there is a '##' or '-' in the path. 2358 shutil.copytree(model_path, 2359 pjoin(self.dir_path,'bin','internal','ufomodel'), 2360 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2361 if hasattr(model, 'restrict_card'): 2362 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2363 'restrict_default.dat') 2364 if isinstance(model.restrict_card, check_param_card.ParamCard): 2365 model.restrict_card.write(out_path) 2366 else: 2367 files.cp(model.restrict_card, out_path)
2368 2369 #=========================================================================== 2370 # generate_subprocess_directory_v4 2371 #===========================================================================
2372 - def copy_python_file(self):
2373 """copy the python file require for the Template""" 2374 2375 # madevent interface 2376 cp(_file_path+'/interface/madweight_interface.py', 2377 self.dir_path+'/bin/internal/madweight_interface.py') 2378 cp(_file_path+'/interface/extended_cmd.py', 2379 self.dir_path+'/bin/internal/extended_cmd.py') 2380 cp(_file_path+'/interface/common_run_interface.py', 2381 self.dir_path+'/bin/internal/common_run_interface.py') 2382 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2383 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2384 #cp(_file_path+'/iolibs/save_load_object.py', 2385 # self.dir_path+'/bin/internal/save_load_object.py') 2386 cp(_file_path+'/iolibs/file_writers.py', 2387 self.dir_path+'/bin/internal/file_writers.py') 2388 #model file 2389 cp(_file_path+'../models/check_param_card.py', 2390 self.dir_path+'/bin/internal/check_param_card.py') 2391 2392 #madevent file 2393 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2394 cp(_file_path+'/various/lhe_parser.py', 2395 self.dir_path+'/bin/internal/lhe_parser.py') 2396 2397 cp(_file_path+'/various/banner.py', 2398 self.dir_path+'/bin/internal/banner.py') 2399 cp(_file_path+'/various/shower_card.py', 2400 self.dir_path+'/bin/internal/shower_card.py') 2401 cp(_file_path+'/various/cluster.py', 2402 self.dir_path+'/bin/internal/cluster.py') 2403 2404 # logging configuration 2405 cp(_file_path+'/interface/.mg5_logging.conf', 2406 self.dir_path+'/bin/internal/me5_logging.conf') 2407 cp(_file_path+'/interface/coloring_logging.py', 2408 self.dir_path+'/bin/internal/coloring_logging.py')
2409 2410 2411 #=========================================================================== 2412 # Change the version of cuts.f to the one compatible with MW 2413 #===========================================================================
2414 - def get_mw_cuts_version(self, outpath=None):
2415 """create the appropriate cuts.f 2416 This is based on the one associated to ME output but: 2417 1) No clustering (=> remove initcluster/setclscales) 2418 2) Adding the definition of cut_bw at the file. 2419 """ 2420 2421 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2422 2423 text = StringIO() 2424 #1) remove all dependencies in ickkw >1: 2425 nb_if = 0 2426 for line in template: 2427 if 'if(xqcut.gt.0d0' in line: 2428 nb_if = 1 2429 if nb_if == 0: 2430 text.write(line) 2431 continue 2432 if re.search(r'if\(.*\)\s*then', line): 2433 nb_if += 1 2434 elif 'endif' in line: 2435 nb_if -= 1 2436 2437 #2) add fake cut_bw (have to put the true one later) 2438 text.write(""" 2439 logical function cut_bw(p) 2440 include 'madweight_param.inc' 2441 double precision p(*) 2442 if (bw_cut) then 2443 cut_bw = .true. 2444 else 2445 stop 1 2446 endif 2447 return 2448 end 2449 """) 2450 2451 final = text.getvalue() 2452 #3) remove the call to initcluster: 2453 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2454 template = template.replace('genps.inc', 'maxparticles.inc') 2455 #Now we can write it 2456 if not outpath: 2457 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2458 elif isinstance(outpath, str): 2459 fsock = open(outpath, 'w') 2460 else: 2461 fsock = outpath 2462 fsock.write(template)
2463 2464 2465 2466 #=========================================================================== 2467 # Make the Helas and Model directories for Standalone directory 2468 #===========================================================================
2469 - def make(self):
2470 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2471 everything for running madweight 2472 """ 2473 2474 source_dir = os.path.join(self.dir_path, "Source") 2475 logger.info("Running make for Helas") 2476 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2477 logger.info("Running make for Model") 2478 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2479 logger.info("Running make for PDF") 2480 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2481 logger.info("Running make for CERNLIB") 2482 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2483 logger.info("Running make for GENERIC") 2484 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2485 logger.info("Running make for blocks") 2486 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2487 logger.info("Running make for tools") 2488 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2489 2490 #=========================================================================== 2491 # Create proc_card_mg5.dat for MadWeight directory 2492 #===========================================================================
2493 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 2494 online = False, compiler='g77'):
2495 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2496 2497 #proc_charac 2498 self.create_proc_charac() 2499 2500 # Write maxparticles.inc based on max of ME's/subprocess groups 2501 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2502 self.write_maxparticles_file(writers.FortranWriter(filename), 2503 matrix_elements) 2504 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2505 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2506 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2507 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2508 2509 self.set_compiler(compiler) 2510 self.make() 2511 2512 # Write command history as proc_card_mg5 2513 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2514 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2515 history.write(output_file) 2516 2517 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler)
2518 2519 2520 #=========================================================================== 2521 # create the run_card for MW 2522 #===========================================================================
2523 - def create_run_card(self, matrix_elements, history):
2524 """ """ 2525 2526 run_card = banner_mod.RunCard() 2527 2528 # pass to default for MW 2529 run_card["run_tag"] = "\'not_use\'" 2530 run_card["fixed_ren_scale"] = "T" 2531 run_card["fixed_fac_scale"] = "T" 2532 run_card.remove_all_cut() 2533 2534 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2535 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2536 python_template=True) 2537 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2538 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2539 python_template=True)
2540 2541 #=========================================================================== 2542 # export model files 2543 #===========================================================================
2544 - def export_model_files(self, model_path):
2545 """export the model dependent files for V4 model""" 2546 2547 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2548 # Add the routine update_as_param in v4 model 2549 # This is a function created in the UFO 2550 text=""" 2551 subroutine update_as_param() 2552 call setpara('param_card.dat',.false.) 2553 return 2554 end 2555 """ 2556 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2557 ff.write(text) 2558 ff.close() 2559 2560 # Modify setrun.f 2561 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2562 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2563 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2564 fsock.write(text) 2565 fsock.close() 2566 2567 # Modify initialization.f 2568 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2569 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2570 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2571 fsock.write(text) 2572 fsock.close() 2573 2574 2575 self.make_model_symbolic_link()
2576 2577 #=========================================================================== 2578 # generate_subprocess_directory_v4 2579 #===========================================================================
2580 - def generate_subprocess_directory_v4(self, matrix_element, 2581 fortran_model,number):
2582 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2583 including the necessary matrix.f and nexternal.inc files""" 2584 2585 cwd = os.getcwd() 2586 2587 # Create the directory PN_xx_xxxxx in the specified path 2588 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2589 "P%s" % matrix_element.get('processes')[0].shell_string()) 2590 2591 try: 2592 os.mkdir(dirpath) 2593 except os.error as error: 2594 logger.warning(error.strerror + " " + dirpath) 2595 2596 #try: 2597 # os.chdir(dirpath) 2598 #except os.error: 2599 # logger.error('Could not cd to directory %s' % dirpath) 2600 # return 0 2601 2602 logger.info('Creating files in directory %s' % dirpath) 2603 2604 # Extract number of external particles 2605 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2606 2607 # Create the matrix.f file and the nexternal.inc file 2608 filename = pjoin(dirpath,'matrix.f') 2609 calls,ncolor = self.write_matrix_element_v4( 2610 writers.FortranWriter(filename), 2611 matrix_element, 2612 fortran_model) 2613 2614 filename = pjoin(dirpath, 'auto_dsig.f') 2615 self.write_auto_dsig_file(writers.FortranWriter(filename), 2616 matrix_element) 2617 2618 filename = pjoin(dirpath, 'configs.inc') 2619 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2620 writers.FortranWriter(filename), 2621 matrix_element) 2622 2623 filename = pjoin(dirpath, 'nexternal.inc') 2624 self.write_nexternal_file(writers.FortranWriter(filename), 2625 nexternal, ninitial) 2626 2627 filename = pjoin(dirpath, 'leshouche.inc') 2628 self.write_leshouche_file(writers.FortranWriter(filename), 2629 matrix_element) 2630 2631 filename = pjoin(dirpath, 'props.inc') 2632 self.write_props_file(writers.FortranWriter(filename), 2633 matrix_element, 2634 s_and_t_channels) 2635 2636 filename = pjoin(dirpath, 'pmass.inc') 2637 self.write_pmass_file(writers.FortranWriter(filename), 2638 matrix_element) 2639 2640 filename = pjoin(dirpath, 'ngraphs.inc') 2641 self.write_ngraphs_file(writers.FortranWriter(filename), 2642 len(matrix_element.get_all_amplitudes())) 2643 2644 filename = pjoin(dirpath, 'maxamps.inc') 2645 self.write_maxamps_file(writers.FortranWriter(filename), 2646 len(matrix_element.get('diagrams')), 2647 ncolor, 2648 len(matrix_element.get('processes')), 2649 1) 2650 2651 filename = pjoin(dirpath, 'phasespace.inc') 2652 self.write_phasespace_file(writers.FortranWriter(filename), 2653 len(matrix_element.get('diagrams')), 2654 ) 2655 2656 # Generate diagrams 2657 filename = pjoin(dirpath, "matrix.ps") 2658 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2659 get('diagrams'), 2660 filename, 2661 model=matrix_element.get('processes')[0].\ 2662 get('model'), 2663 amplitude='') 2664 logger.info("Generating Feynman diagrams for " + \ 2665 matrix_element.get('processes')[0].nice_string()) 2666 plot.draw() 2667 2668 #import genps.inc and maxconfigs.inc into Subprocesses 2669 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2670 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2671 2672 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2673 2674 for file in linkfiles: 2675 ln('../%s' % file, starting_dir=cwd) 2676 2677 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2678 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2679 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2680 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2681 # Return to original PWD 2682 #os.chdir(cwd) 2683 2684 if not calls: 2685 calls = 0 2686 return calls
2687 2688 #=========================================================================== 2689 # write_matrix_element_v4 2690 #===========================================================================
2691 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2692 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2693 2694 if not matrix_element.get('processes') or \ 2695 not matrix_element.get('diagrams'): 2696 return 0 2697 2698 if not isinstance(writer, writers.FortranWriter): 2699 raise writers.FortranWriter.FortranWriterError(\ 2700 "writer not FortranWriter") 2701 2702 # Set lowercase/uppercase Fortran code 2703 writers.FortranWriter.downcase = False 2704 2705 replace_dict = {} 2706 2707 # Extract version number and date from VERSION file 2708 info_lines = self.get_mg5_info_lines() 2709 replace_dict['info_lines'] = info_lines 2710 2711 # Extract process info lines 2712 process_lines = self.get_process_info_lines(matrix_element) 2713 replace_dict['process_lines'] = process_lines 2714 2715 # Set proc_id 2716 replace_dict['proc_id'] = proc_id 2717 2718 # Extract number of external particles 2719 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2720 replace_dict['nexternal'] = nexternal 2721 2722 # Extract ncomb 2723 ncomb = matrix_element.get_helicity_combinations() 2724 replace_dict['ncomb'] = ncomb 2725 2726 # Extract helicity lines 2727 helicity_lines = self.get_helicity_lines(matrix_element) 2728 replace_dict['helicity_lines'] = helicity_lines 2729 2730 # Extract overall denominator 2731 # Averaging initial state color, spin, and identical FS particles 2732 den_factor_line = self.get_den_factor_line(matrix_element) 2733 replace_dict['den_factor_line'] = den_factor_line 2734 2735 # Extract ngraphs 2736 ngraphs = matrix_element.get_number_of_amplitudes() 2737 replace_dict['ngraphs'] = ngraphs 2738 2739 # Extract nwavefuncs 2740 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2741 replace_dict['nwavefuncs'] = nwavefuncs 2742 2743 # Extract ncolor 2744 ncolor = max(1, len(matrix_element.get('color_basis'))) 2745 replace_dict['ncolor'] = ncolor 2746 2747 # Extract color data lines 2748 color_data_lines = self.get_color_data_lines(matrix_element) 2749 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2750 2751 # Extract helas calls 2752 helas_calls = fortran_model.get_matrix_element_calls(\ 2753 matrix_element) 2754 2755 replace_dict['helas_calls'] = "\n".join(helas_calls) 2756 2757 # Extract JAMP lines 2758 jamp_lines = self.get_JAMP_lines(matrix_element) 2759 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2760 2761 file = open(os.path.join(_file_path, \ 2762 'iolibs/template_files/%s' % self.matrix_file)).read() 2763 file = file % replace_dict 2764 2765 2766 # Write the file 2767 writer.writelines(file) 2768 2769 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor
2770 2771 #=========================================================================== 2772 # write_source_makefile 2773 #===========================================================================
2774 - def write_source_makefile(self, writer):
2775 """Write the nexternal.inc file for madweight""" 2776 2777 2778 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2779 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2780 text = open(path).read() % {'libraries': set_of_lib} 2781 writer.write(text) 2782 2783 return True
2784
2785 - def write_phasespace_file(self, writer, nb_diag):
2786 """ """ 2787 2788 template = """ include 'maxparticles.inc' 2789 integer max_branches 2790 parameter (max_branches=max_particles-1) 2791 integer max_configs 2792 parameter (max_configs=%(nb_diag)s) 2793 2794 c channel position 2795 integer config_pos,perm_pos 2796 common /to_config/config_pos,perm_pos 2797 2798 """ 2799 2800 writer.write(template % {'nb_diag': nb_diag})
2801 2802 2803 #=========================================================================== 2804 # write_auto_dsig_file 2805 #===========================================================================
2806 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2807 """Write the auto_dsig.f file for the differential cross section 2808 calculation, includes pdf call information (MadWeight format)""" 2809 2810 if not matrix_element.get('processes') or \ 2811 not matrix_element.get('diagrams'): 2812 return 0 2813 2814 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2815 2816 if ninitial < 1 or ninitial > 2: 2817 raise writers.FortranWriter.FortranWriterError, \ 2818 """Need ninitial = 1 or 2 to write auto_dsig file""" 2819 2820 replace_dict = {} 2821 2822 # Extract version number and date from VERSION file 2823 info_lines = self.get_mg5_info_lines() 2824 replace_dict['info_lines'] = info_lines 2825 2826 # Extract process info lines 2827 process_lines = self.get_process_info_lines(matrix_element) 2828 replace_dict['process_lines'] = process_lines 2829 2830 # Set proc_id 2831 replace_dict['proc_id'] = proc_id 2832 replace_dict['numproc'] = 1 2833 2834 # Set dsig_line 2835 if ninitial == 1: 2836 # No conversion, since result of decay should be given in GeV 2837 dsig_line = "pd(0)*dsiguu" 2838 else: 2839 # Convert result (in GeV) to pb 2840 dsig_line = "pd(0)*conv*dsiguu" 2841 2842 replace_dict['dsig_line'] = dsig_line 2843 2844 # Extract pdf lines 2845 pdf_vars, pdf_data, pdf_lines = \ 2846 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 2847 replace_dict['pdf_vars'] = pdf_vars 2848 replace_dict['pdf_data'] = pdf_data 2849 replace_dict['pdf_lines'] = pdf_lines 2850 2851 # Lines that differ between subprocess group and regular 2852 if proc_id: 2853 replace_dict['numproc'] = int(proc_id) 2854 replace_dict['passcuts_begin'] = "" 2855 replace_dict['passcuts_end'] = "" 2856 # Set lines for subprocess group version 2857 # Set define_iconfigs_lines 2858 replace_dict['define_subdiag_lines'] = \ 2859 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 2860 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 2861 else: 2862 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 2863 replace_dict['passcuts_end'] = "ENDIF" 2864 replace_dict['define_subdiag_lines'] = "" 2865 2866 file = open(os.path.join(_file_path, \ 2867 'iolibs/template_files/auto_dsig_mw.inc')).read() 2868 2869 file = file % replace_dict 2870 2871 2872 # Write the file 2873 writer.writelines(file)
2874 2875 #=========================================================================== 2876 # write_configs_file 2877 #===========================================================================
2878 - def write_configs_file(self, writer, matrix_element):
2879 """Write the configs.inc file for MadEvent""" 2880 2881 # Extract number of external particles 2882 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2883 2884 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 2885 mapconfigs = [c[0] for c in configs] 2886 model = matrix_element.get('processes')[0].get('model') 2887 return mapconfigs, self.write_configs_file_from_diagrams(writer, 2888 [[c[1]] for c in configs], 2889 mapconfigs, 2890 nexternal, ninitial,matrix_element, model)
2891 2892 #=========================================================================== 2893 # write_run_configs_file 2894 #===========================================================================
2895 - def write_run_config_file(self, writer):
2896 """Write the run_configs.inc file for MadWeight""" 2897 2898 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 2899 text = open(path).read() % {'chanperjob':'5'} 2900 writer.write(text) 2901 return True
2902 2903 #=========================================================================== 2904 # write_configs_file_from_diagrams 2905 #===========================================================================
2906 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 2907 nexternal, ninitial, matrix_element, model):
2908 """Write the actual configs.inc file. 2909 2910 configs is the diagrams corresponding to configs (each 2911 diagrams is a list of corresponding diagrams for all 2912 subprocesses, with None if there is no corresponding diagrams 2913 for a given process). 2914 mapconfigs gives the diagram number for each config. 2915 2916 For s-channels, we need to output one PDG for each subprocess in 2917 the subprocess group, in order to be able to pick the right 2918 one for multiprocesses.""" 2919 2920 lines = [] 2921 2922 particle_dict = matrix_element.get('processes')[0].get('model').\ 2923 get('particle_dict') 2924 2925 s_and_t_channels = [] 2926 2927 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 2928 for config in configs if [d for d in config if d][0].\ 2929 get_vertex_leg_numbers()!=[]] 2930 2931 minvert = min(vert_list) if vert_list!=[] else 0 2932 # Number of subprocesses 2933 nsubprocs = len(configs[0]) 2934 2935 nconfigs = 0 2936 2937 new_pdg = model.get_first_non_pdg() 2938 2939 for iconfig, helas_diags in enumerate(configs): 2940 if any([vert > minvert for vert in 2941 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 2942 # Only 3-vertices allowed in configs.inc 2943 continue 2944 nconfigs += 1 2945 2946 # Need s- and t-channels for all subprocesses, including 2947 # those that don't contribute to this config 2948 empty_verts = [] 2949 stchannels = [] 2950 for h in helas_diags: 2951 if h: 2952 # get_s_and_t_channels gives vertices starting from 2953 # final state external particles and working inwards 2954 stchannels.append(h.get('amplitudes')[0].\ 2955 get_s_and_t_channels(ninitial,model,new_pdg)) 2956 else: 2957 stchannels.append((empty_verts, None)) 2958 2959 # For t-channels, just need the first non-empty one 2960 tchannels = [t for s,t in stchannels if t != None][0] 2961 2962 # For s_and_t_channels (to be used later) use only first config 2963 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 2964 tchannels]) 2965 2966 # Make sure empty_verts is same length as real vertices 2967 if any([s for s,t in stchannels]): 2968 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 2969 2970 # Reorganize s-channel vertices to get a list of all 2971 # subprocesses for each vertex 2972 schannels = zip(*[s for s,t in stchannels]) 2973 else: 2974 schannels = [] 2975 2976 allchannels = schannels 2977 if len(tchannels) > 1: 2978 # Write out tchannels only if there are any non-trivial ones 2979 allchannels = schannels + tchannels 2980 2981 # Write out propagators for s-channel and t-channel vertices 2982 2983 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 2984 # Correspondance between the config and the diagram = amp2 2985 lines.append("* %d %d " % (nconfigs, 2986 mapconfigs[iconfig])) 2987 2988 for verts in allchannels: 2989 if verts in schannels: 2990 vert = [v for v in verts if v][0] 2991 else: 2992 vert = verts 2993 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2994 last_leg = vert.get('legs')[-1] 2995 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 2996 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 2997 # (last_leg.get('number'), nconfigs, len(daughters), 2998 # ",".join([str(d) for d in daughters]))) 2999 3000 if last_leg.get('id') == 21 and 21 not in particle_dict: 3001 # Fake propagator used in multiparticle vertices 3002 mass = 'zero' 3003 width = 'zero' 3004 pow_part = 0 3005 else: 3006 if (last_leg.get('id')!=7): 3007 particle = particle_dict[last_leg.get('id')] 3008 # Get mass 3009 mass = particle.get('mass') 3010 # Get width 3011 width = particle.get('width') 3012 else : # fake propagator used in multiparticle vertices 3013 mass= 'zero' 3014 width= 'zero' 3015 3016 line=line+" "+mass+" "+width+" " 3017 3018 if verts in schannels: 3019 pdgs = [] 3020 for v in verts: 3021 if v: 3022 pdgs.append(v.get('legs')[-1].get('id')) 3023 else: 3024 pdgs.append(0) 3025 lines.append(line+" S "+str(last_leg.get('id'))) 3026 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3027 # (last_leg.get('number'), nconfigs, nsubprocs, 3028 # ",".join([str(d) for d in pdgs]))) 3029 # lines.append("data tprid(%d,%d)/0/" % \ 3030 # (last_leg.get('number'), nconfigs)) 3031 elif verts in tchannels[:-1]: 3032 lines.append(line+" T "+str(last_leg.get('id'))) 3033 # lines.append("data tprid(%d,%d)/%d/" % \ 3034 # (last_leg.get('number'), nconfigs, 3035 # abs(last_leg.get('id')))) 3036 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3037 # (last_leg.get('number'), nconfigs, nsubprocs, 3038 # ",".join(['0'] * nsubprocs))) 3039 3040 # Write out number of configs 3041 # lines.append("# Number of configs") 3042 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3043 lines.append(" * ") # a line with just a star indicates this is the end of file 3044 # Write the file 3045 writer.writelines(lines) 3046 3047 return s_and_t_channels
3048
3049 3050 #=============================================================================== 3051 # ProcessExporterFortranME 3052 #=============================================================================== 3053 -class ProcessExporterFortranME(ProcessExporterFortran):
3054 """Class to take care of exporting a set of matrix elements to 3055 MadEvent format.""" 3056 3057 matrix_file = "matrix_madevent_v4.inc" 3058
3059 - def copy_v4template(self, modelname):
3060 """Additional actions needed for setup of Template 3061 """ 3062 3063 super(ProcessExporterFortranME, self).copy_v4template(modelname) 3064 3065 # File created from Template (Different in some child class) 3066 filename = pjoin(self.dir_path,'Source','run_config.inc') 3067 self.write_run_config_file(writers.FortranWriter(filename)) 3068 3069 # The next file are model dependant (due to SLAH convention) 3070 self.model_name = modelname 3071 # Add the symmetry.f 3072 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3073 self.write_symmetry(writers.FortranWriter(filename)) 3074 # 3075 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3076 self.write_addmothers(writers.FortranWriter(filename)) 3077 # Copy the different python file in the Template 3078 self.copy_python_file()
3079 3080 3081 3082 3083 3084 #=========================================================================== 3085 # generate_subprocess_directory_v4 3086 #===========================================================================
3087 - def copy_python_file(self):
3088 """copy the python file require for the Template""" 3089 3090 # madevent interface 3091 cp(_file_path+'/interface/madevent_interface.py', 3092 self.dir_path+'/bin/internal/madevent_interface.py') 3093 cp(_file_path+'/interface/extended_cmd.py', 3094 self.dir_path+'/bin/internal/extended_cmd.py') 3095 cp(_file_path+'/interface/common_run_interface.py', 3096 self.dir_path+'/bin/internal/common_run_interface.py') 3097 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3098 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3099 cp(_file_path+'/iolibs/save_load_object.py', 3100 self.dir_path+'/bin/internal/save_load_object.py') 3101 cp(_file_path+'/iolibs/file_writers.py', 3102 self.dir_path+'/bin/internal/file_writers.py') 3103 #model file 3104 cp(_file_path+'../models/check_param_card.py', 3105 self.dir_path+'/bin/internal/check_param_card.py') 3106 3107 #copy all the file present in madevent directory 3108 for name in os.listdir(pjoin(_file_path, 'madevent')): 3109 if name not in ['__init__.py'] and name.endswith('.py'): 3110 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3111 3112 #madevent file 3113 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3114 cp(_file_path+'/various/lhe_parser.py', 3115 self.dir_path+'/bin/internal/lhe_parser.py') 3116 cp(_file_path+'/various/banner.py', 3117 self.dir_path+'/bin/internal/banner.py') 3118 cp(_file_path+'/various/cluster.py', 3119 self.dir_path+'/bin/internal/cluster.py') 3120 cp(_file_path+'/madevent/combine_runs.py', 3121 self.dir_path+'/bin/internal/combine_runs.py') 3122 # logging configuration 3123 cp(_file_path+'/interface/.mg5_logging.conf', 3124 self.dir_path+'/bin/internal/me5_logging.conf') 3125 cp(_file_path+'/interface/coloring_logging.py', 3126 self.dir_path+'/bin/internal/coloring_logging.py') 3127 # shower card and FO_analyse_card. 3128 # Although not needed, it is imported by banner.py 3129 cp(_file_path+'/various/shower_card.py', 3130 self.dir_path+'/bin/internal/shower_card.py') 3131 cp(_file_path+'/various/FO_analyse_card.py', 3132 self.dir_path+'/bin/internal/FO_analyse_card.py')
3133 3134
3135 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 3136 wanted_couplings = []):
3137 3138 super(ProcessExporterFortranME,self).convert_model_to_mg4(model, 3139 wanted_lorentz, wanted_couplings) 3140 3141 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3142 try: 3143 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3144 except OSError as error: 3145 pass 3146 model_path = model.get('modelpath') 3147 # This is not safe if there is a '##' or '-' in the path. 3148 shutil.copytree(model_path, 3149 pjoin(self.dir_path,'bin','internal','ufomodel'), 3150 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3151 if hasattr(model, 'restrict_card'): 3152 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3153 'restrict_default.dat') 3154 if isinstance(model.restrict_card, check_param_card.ParamCard): 3155 model.restrict_card.write(out_path) 3156 else: 3157 files.cp(model.restrict_card, out_path)
3158 3159 #=========================================================================== 3160 # export model files 3161 #===========================================================================
3162 - def export_model_files(self, model_path):
3163 """export the model dependent files""" 3164 3165 super(ProcessExporterFortranME,self).export_model_files(model_path) 3166 3167 # Add the routine update_as_param in v4 model 3168 # This is a function created in the UFO 3169 text=""" 3170 subroutine update_as_param() 3171 call setpara('param_card.dat',.false.) 3172 return 3173 end 3174 """ 3175 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3176 ff.write(text) 3177 ff.close() 3178 3179 # Add the symmetry.f 3180 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3181 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3182 3183 # Modify setrun.f 3184 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3185 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3186 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3187 fsock.write(text) 3188 fsock.close() 3189 3190 self.make_model_symbolic_link()
3191 3192 3193 #=========================================================================== 3194 # generate_subprocess_directory_v4 3195 #===========================================================================
3196 - def generate_subprocess_directory_v4(self, matrix_element, 3197 fortran_model, 3198 me_number):
3199 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3200 including the necessary matrix.f and various helper files""" 3201 3202 cwd = os.getcwd() 3203 path = pjoin(self.dir_path, 'SubProcesses') 3204 3205 3206 if not self.model: 3207 self.model = matrix_element.get('processes')[0].get('model') 3208 3209 3210 3211 #os.chdir(path) 3212 # Create the directory PN_xx_xxxxx in the specified path 3213 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3214 try: 3215 os.mkdir(pjoin(path,subprocdir)) 3216 except os.error as error: 3217 logger.warning(error.strerror + " " + subprocdir) 3218 3219 #try: 3220 # os.chdir(subprocdir) 3221 #except os.error: 3222 # logger.error('Could not cd to directory %s' % subprocdir) 3223 # return 0 3224 3225 logger.info('Creating files in directory %s' % subprocdir) 3226 Ppath = pjoin(path, subprocdir) 3227 3228 # Extract number of external particles 3229 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3230 3231 # Add the driver.f 3232 ncomb = matrix_element.get_helicity_combinations() 3233 filename = pjoin(Ppath,'driver.f') 3234 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1) 3235 3236 # Create the matrix.f file, auto_dsig.f file and all inc files 3237 filename = pjoin(Ppath, 'matrix.f') 3238 calls, ncolor = \ 3239 self.write_matrix_element_v4(writers.FortranWriter(filename), 3240 matrix_element, fortran_model, subproc_number = me_number) 3241 3242 filename = pjoin(Ppath, 'auto_dsig.f') 3243 self.write_auto_dsig_file(writers.FortranWriter(filename), 3244 matrix_element) 3245 3246 filename = pjoin(Ppath, 'configs.inc') 3247 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3248 writers.FortranWriter(filename), 3249 matrix_element) 3250 3251 filename = pjoin(Ppath, 'config_nqcd.inc') 3252 self.write_config_nqcd_file(writers.FortranWriter(filename), 3253 nqcd_list) 3254 3255 filename = pjoin(Ppath, 'config_subproc_map.inc') 3256 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3257 s_and_t_channels) 3258 3259 filename = pjoin(Ppath, 'coloramps.inc') 3260 self.write_coloramps_file(writers.FortranWriter(filename), 3261 mapconfigs, 3262 matrix_element) 3263 3264 filename = pjoin(Ppath, 'get_color.f') 3265 self.write_colors_file(writers.FortranWriter(filename), 3266 matrix_element) 3267 3268 filename = pjoin(Ppath, 'decayBW.inc') 3269 self.write_decayBW_file(writers.FortranWriter(filename), 3270 s_and_t_channels) 3271 3272 filename = pjoin(Ppath, 'dname.mg') 3273 self.write_dname_file(writers.FileWriter(filename), 3274 "P"+matrix_element.get('processes')[0].shell_string()) 3275 3276 filename = pjoin(Ppath, 'iproc.dat') 3277 self.write_iproc_file(writers.FortranWriter(filename), 3278 me_number) 3279 3280 filename = pjoin(Ppath, 'leshouche.inc') 3281 self.write_leshouche_file(writers.FortranWriter(filename), 3282 matrix_element) 3283 3284 filename = pjoin(Ppath, 'maxamps.inc') 3285 self.write_maxamps_file(writers.FortranWriter(filename), 3286 len(matrix_element.get('diagrams')), 3287 ncolor, 3288 len(matrix_element.get('processes')), 3289 1) 3290 3291 filename = pjoin(Ppath, 'mg.sym') 3292 self.write_mg_sym_file(writers.FortranWriter(filename), 3293 matrix_element) 3294 3295 filename = pjoin(Ppath, 'ncombs.inc') 3296 self.write_ncombs_file(writers.FortranWriter(filename), 3297 nexternal) 3298 3299 filename = pjoin(Ppath, 'nexternal.inc') 3300 self.write_nexternal_file(writers.FortranWriter(filename), 3301 nexternal, ninitial) 3302 3303 filename = pjoin(Ppath, 'ngraphs.inc') 3304 self.write_ngraphs_file(writers.FortranWriter(filename), 3305 len(mapconfigs)) 3306 3307 3308 filename = pjoin(Ppath, 'pmass.inc') 3309 self.write_pmass_file(writers.FortranWriter(filename), 3310 matrix_element) 3311 3312 filename = pjoin(Ppath, 'props.inc') 3313 self.write_props_file(writers.FortranWriter(filename), 3314 matrix_element, 3315 s_and_t_channels) 3316 3317 # Find config symmetries and permutations 3318 symmetry, perms, ident_perms = \ 3319 diagram_symmetry.find_symmetry(matrix_element) 3320 3321 filename = pjoin(Ppath, 'symswap.inc') 3322 self.write_symswap_file(writers.FortranWriter(filename), 3323 ident_perms) 3324 3325 filename = pjoin(Ppath, 'symfact_orig.dat') 3326 self.write_symfact_file(open(filename, 'w'), symmetry) 3327 3328 # Generate diagrams 3329 filename = pjoin(Ppath, "matrix.ps") 3330 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3331 get('diagrams'), 3332 filename, 3333 model=matrix_element.get('processes')[0].\ 3334 get('model'), 3335 amplitude=True) 3336 logger.info("Generating Feynman diagrams for " + \ 3337 matrix_element.get('processes')[0].nice_string()) 3338 plot.draw() 3339 3340 self.link_files_in_SubProcess(Ppath) 3341 3342 #import nexternal/leshouche in Source 3343 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3344 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3345 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3346 # Return to SubProcesses dir 3347 #os.chdir(os.path.pardir) 3348 3349 # Add subprocess to subproc.mg 3350 filename = pjoin(path, 'subproc.mg') 3351 files.append_to_file(filename, 3352 self.write_subproc, 3353 subprocdir) 3354 3355 # Return to original dir 3356 #os.chdir(cwd) 3357 3358 # Generate info page 3359 gen_infohtml.make_info_html(self.dir_path) 3360 3361 3362 if not calls: 3363 calls = 0 3364 return calls
3365 3401
3402 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 3403 online = False, compiler='gfortran'):
3404 """Finalize ME v4 directory by creating jpeg diagrams, html 3405 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3406 3407 # indicate that the output type is not grouped 3408 if not isinstance(self, ProcessExporterFortranMEGroup): 3409 self.proc_characteristic['grouped_matrix'] = False 3410 3411 modelname = self.opt['model'] 3412 if modelname == 'mssm' or modelname.startswith('mssm-'): 3413 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3414 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3415 check_param_card.convert_to_mg5card(param_card, mg5_param) 3416 check_param_card.check_valid_param_card(mg5_param) 3417 3418 # Add the combine_events.f modify param_card path/number of @X 3419 filename = pjoin(self.dir_path,'Source','combine_events.f') 3420 try: 3421 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3422 except AttributeError: 3423 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3424 nb_proc = len(set(nb_proc)) 3425 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3426 # Write maxconfigs.inc based on max of ME's/subprocess groups 3427 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3428 self.write_maxconfigs_file(writers.FortranWriter(filename), 3429 matrix_elements) 3430 3431 # Write maxparticles.inc based on max of ME's/subprocess groups 3432 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3433 self.write_maxparticles_file(writers.FortranWriter(filename), 3434 matrix_elements) 3435 3436 # Touch "done" file 3437 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3438 3439 # Check for compiler 3440 self.set_compiler(compiler) 3441 3442 old_pos = os.getcwd() 3443 subpath = pjoin(self.dir_path, 'SubProcesses') 3444 3445 P_dir_list = [proc for proc in os.listdir(subpath) 3446 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3447 3448 devnull = os.open(os.devnull, os.O_RDWR) 3449 # Convert the poscript in jpg files (if authorize) 3450 if makejpg: 3451 try: 3452 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3453 except Exception, error: 3454 pass 3455 logger.info("Generate jpeg diagrams") 3456 for Pdir in P_dir_list: 3457 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3458 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3459 3460 logger.info("Generate web pages") 3461 # Create the WebPage using perl script 3462 3463 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3464 stdout = devnull,cwd=pjoin(self.dir_path)) 3465 3466 #os.chdir(os.path.pardir) 3467 3468 obj = gen_infohtml.make_info_html(self.dir_path) 3469 3470 if online: 3471 nb_channel = obj.rep_rule['nb_gen_diag'] 3472 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3473 #add the information to proc_charac 3474 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3475 3476 # Write command history as proc_card_mg5 3477 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3478 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3479 history.write(output_file) 3480 3481 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3482 stdout = devnull) 3483 3484 #crate the proc_characteristic file 3485 self.create_proc_charac(matrix_elements, history) 3486 3487 # create the run_card 3488 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler) 3489 3490 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3491 stdout = devnull) 3492 3493 # Run "make" to generate madevent.tar.gz file 3494 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3495 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3496 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3497 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3498 stdout = devnull, cwd=self.dir_path)
3499 3500 3501 3502 3503 3504 3505 3506 3507 #return to the initial dir 3508 #os.chdir(old_pos) 3509 3510 #=========================================================================== 3511 # write_matrix_element_v4 3512 #===========================================================================
3513 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3514 proc_id = "", config_map = [], subproc_number = ""):
3515 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3516 3517 if not matrix_element.get('processes') or \ 3518 not matrix_element.get('diagrams'): 3519 return 0 3520 3521 if not isinstance(writer, writers.FortranWriter): 3522 raise writers.FortranWriter.FortranWriterError(\ 3523 "writer not FortranWriter") 3524 3525 3526 # Set lowercase/uppercase Fortran code 3527 writers.FortranWriter.downcase = False 3528 3529 # The proc prefix is not used for MadEvent output so it can safely be set 3530 # to an empty string. 3531 replace_dict = {'proc_prefix':''} 3532 3533 # Extract helas calls 3534 helas_calls = fortran_model.get_matrix_element_calls(\ 3535 matrix_element) 3536 3537 replace_dict['helas_calls'] = "\n".join(helas_calls) 3538 3539 3540 # Extract version number and date from VERSION file 3541 info_lines = self.get_mg5_info_lines() 3542 replace_dict['info_lines'] = info_lines 3543 3544 # Extract process info lines 3545 process_lines = self.get_process_info_lines(matrix_element) 3546 replace_dict['process_lines'] = process_lines 3547 3548 # Set proc_id 3549 replace_dict['proc_id'] = proc_id 3550 3551 # Extract ncomb 3552 ncomb = matrix_element.get_helicity_combinations() 3553 replace_dict['ncomb'] = ncomb 3554 3555 # Extract helicity lines 3556 helicity_lines = self.get_helicity_lines(matrix_element) 3557 replace_dict['helicity_lines'] = helicity_lines 3558 3559 # Extract IC line 3560 ic_line = self.get_ic_line(matrix_element) 3561 replace_dict['ic_line'] = ic_line 3562 3563 # Extract overall denominator 3564 # Averaging initial state color, spin, and identical FS particles 3565 den_factor_line = self.get_den_factor_line(matrix_element) 3566 replace_dict['den_factor_line'] = den_factor_line 3567 3568 # Extract ngraphs 3569 ngraphs = matrix_element.get_number_of_amplitudes() 3570 replace_dict['ngraphs'] = ngraphs 3571 3572 # Extract ndiags 3573 ndiags = len(matrix_element.get('diagrams')) 3574 replace_dict['ndiags'] = ndiags 3575 3576 # Set define_iconfigs_lines 3577 replace_dict['define_iconfigs_lines'] = \ 3578 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3579 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3580 3581 if proc_id: 3582 # Set lines for subprocess group version 3583 # Set define_iconfigs_lines 3584 replace_dict['define_iconfigs_lines'] += \ 3585 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3586 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3587 # Set set_amp2_line 3588 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3589 proc_id 3590 else: 3591 # Standard running 3592 # Set set_amp2_line 3593 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3594 3595 # Extract nwavefuncs 3596 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3597 replace_dict['nwavefuncs'] = nwavefuncs 3598 3599 # Extract ncolor 3600 ncolor = max(1, len(matrix_element.get('color_basis'))) 3601 replace_dict['ncolor'] = ncolor 3602 3603 # Extract color data lines 3604 color_data_lines = self.get_color_data_lines(matrix_element) 3605 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3606 3607 3608 # Set the size of Wavefunction 3609 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3610 replace_dict['wavefunctionsize'] = 18 3611 else: 3612 replace_dict['wavefunctionsize'] = 6 3613 3614 # Extract amp2 lines 3615 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3616 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3617 3618 # The JAMP definition depends on the splitting order 3619 split_orders=matrix_element.get('processes')[0].get('split_orders') 3620 if len(split_orders)>0: 3621 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3622 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3623 matrix_element.get('processes')[0],squared_orders) 3624 else: 3625 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3626 # set all amplitude order to weight 1 and only one squared order 3627 # contribution which is of course ALL_ORDERS=2. 3628 squared_orders = [(2,),] 3629 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3630 replace_dict['chosen_so_configs'] = '.TRUE.' 3631 3632 replace_dict['nAmpSplitOrders']=len(amp_orders) 3633 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3634 replace_dict['split_order_str_list']=str(split_orders) 3635 replace_dict['nSplitOrders']=max(len(split_orders),1) 3636 amp_so = self.get_split_orders_lines( 3637 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3638 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3639 replace_dict['ampsplitorders']='\n'.join(amp_so) 3640 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3641 3642 3643 # Extract JAMP lines 3644 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3645 jamp_lines = self.get_JAMP_lines_split_order(\ 3646 matrix_element,amp_orders,split_order_names= 3647 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3648 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3649 3650 file = open(pjoin(_file_path, \ 3651 'iolibs/template_files/%s' % self.matrix_file)).read() 3652 3653 file = file % replace_dict 3654 3655 # Add the split orders helper functions. 3656 file = file + '\n' + open(pjoin(_file_path, \ 3657 'iolibs/template_files/split_orders_helping_functions.inc'))\ 3658 .read()%replace_dict 3659 # Write the file 3660 writer.writelines(file) 3661 3662 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
3663 3664 #=========================================================================== 3665 # write_auto_dsig_file 3666 #===========================================================================
3667 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3668 """Write the auto_dsig.f file for the differential cross section 3669 calculation, includes pdf call information""" 3670 3671 if not matrix_element.get('processes') or \ 3672 not matrix_element.get('diagrams'): 3673 return 0 3674 3675 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3676 self.proc_characteristic['ninitial'] = ninitial 3677 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3678 3679 if ninitial < 1 or ninitial > 2: 3680 raise writers.FortranWriter.FortranWriterError, \ 3681 """Need ninitial = 1 or 2 to write auto_dsig file""" 3682 3683 replace_dict = {} 3684 3685 # Extract version number and date from VERSION file 3686 info_lines = self.get_mg5_info_lines() 3687 replace_dict['info_lines'] = info_lines 3688 3689 # Extract process info lines 3690 process_lines = self.get_process_info_lines(matrix_element) 3691 replace_dict['process_lines'] = process_lines 3692 3693 # Set proc_id 3694 replace_dict['proc_id'] = proc_id 3695 replace_dict['numproc'] = 1 3696 3697 # Set dsig_line 3698 if ninitial == 1: 3699 # No conversion, since result of decay should be given in GeV 3700 dsig_line = "pd(0)*dsiguu" 3701 else: 3702 # Convert result (in GeV) to pb 3703 dsig_line = "pd(0)*conv*dsiguu" 3704 3705 replace_dict['dsig_line'] = dsig_line 3706 3707 # Extract pdf lines 3708 pdf_vars, pdf_data, pdf_lines = \ 3709 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3710 replace_dict['pdf_vars'] = pdf_vars 3711 replace_dict['pdf_data'] = pdf_data 3712 replace_dict['pdf_lines'] = pdf_lines 3713 3714 # Lines that differ between subprocess group and regular 3715 if proc_id: 3716 replace_dict['numproc'] = int(proc_id) 3717 replace_dict['passcuts_begin'] = "" 3718 replace_dict['passcuts_end'] = "" 3719 # Set lines for subprocess group version 3720 # Set define_iconfigs_lines 3721 replace_dict['define_subdiag_lines'] = \ 3722 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3723 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3724 replace_dict['cutsdone'] = "" 3725 else: 3726 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3727 replace_dict['passcuts_end'] = "ENDIF" 3728 replace_dict['define_subdiag_lines'] = "" 3729 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3730 3731 if not isinstance(self, ProcessExporterFortranMEGroup): 3732 ncomb=matrix_element.get_helicity_combinations() 3733 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3734 else: 3735 replace_dict['read_write_good_hel'] = "" 3736 3737 3738 3739 file = open(pjoin(_file_path, \ 3740 'iolibs/template_files/auto_dsig_v4.inc')).read() 3741 file = file % replace_dict 3742 3743 # Write the file 3744 writer.writelines(file, context={'read_write_good_hel':True})
3745 3746 #=========================================================================== 3747 # write_coloramps_file 3748 #===========================================================================
3749 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3750 """Write the coloramps.inc file for MadEvent""" 3751 3752 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3753 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3754 (max(len(matrix_element.get('color_basis').keys()), 1), 3755 len(mapconfigs))) 3756 3757 3758 # Write the file 3759 writer.writelines(lines) 3760 3761 return True
3762 3763 #=========================================================================== 3764 # write_colors_file 3765 #===========================================================================
3766 - def write_colors_file(self, writer, matrix_elements):
3767 """Write the get_color.f file for MadEvent, which returns color 3768 for all particles used in the matrix element.""" 3769 3770 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3771 matrix_elements = [matrix_elements] 3772 3773 model = matrix_elements[0].get('processes')[0].get('model') 3774 3775 # We need the both particle and antiparticle wf_ids, since the identity 3776 # depends on the direction of the wf. 3777 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3778 for wf in d.get('wavefunctions')],[]) \ 3779 for d in me.get('diagrams')], []) \ 3780 for me in matrix_elements], [])) 3781 3782 leg_ids = set(sum([sum([sum([[l.get('id'), 3783 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 3784 for l in p.get_legs_with_decays()], []) \ 3785 for p in me.get('processes')], []) \ 3786 for me in matrix_elements], [])) 3787 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3788 3789 lines = """function get_color(ipdg) 3790 implicit none 3791 integer get_color, ipdg 3792 3793 if(ipdg.eq.%d)then 3794 get_color=%d 3795 return 3796 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3797 3798 for part_id in particle_ids[1:]: 3799 lines += """else if(ipdg.eq.%d)then 3800 get_color=%d 3801 return 3802 """ % (part_id, model.get_particle(part_id).get_color()) 3803 # Dummy particle for multiparticle vertices with pdg given by 3804 # first code not in the model 3805 lines += """else if(ipdg.eq.%d)then 3806 c This is dummy particle used in multiparticle vertices 3807 get_color=2 3808 return 3809 """ % model.get_first_non_pdg() 3810 lines += """else 3811 write(*,*)'Error: No color given for pdg ',ipdg 3812 get_color=0 3813 return 3814 endif 3815 end 3816 """ 3817 3818 # Write the file 3819 writer.writelines(lines) 3820 3821 return True
3822 3823 #=========================================================================== 3824 # write_config_nqcd_file 3825 #===========================================================================
3826 - def write_config_nqcd_file(self, writer, nqcd_list):
3827 """Write the config_nqcd.inc with the number of QCD couplings 3828 for each config""" 3829 3830 lines = [] 3831 for iconf, n in enumerate(nqcd_list): 3832 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 3833 3834 # Write the file 3835 writer.writelines(lines) 3836 3837 return True
3838 3839 #=========================================================================== 3840 # write_maxconfigs_file 3841 #===========================================================================
3842 - def write_maxconfigs_file(self, writer, matrix_elements):
3843 """Write the maxconfigs.inc file for MadEvent""" 3844 3845 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 3846 maxconfigs = max([me.get_num_configs() for me in \ 3847 matrix_elements.get('matrix_elements')]) 3848 else: 3849 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 3850 3851 lines = "integer lmaxconfigs\n" 3852 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 3853 3854 # Write the file 3855 writer.writelines(lines) 3856 3857 return True
3858 3859 #=========================================================================== 3860 # read_write_good_hel 3861 #===========================================================================
3862 - def read_write_good_hel(self, ncomb):
3863 """return the code to read/write the good_hel common_block""" 3864 3865 convert = {'ncomb' : ncomb} 3866 output = """ 3867 subroutine write_good_hel(stream_id) 3868 implicit none 3869 integer stream_id 3870 INTEGER NCOMB 3871 PARAMETER ( NCOMB=%(ncomb)d) 3872 LOGICAL GOODHEL(NCOMB) 3873 INTEGER NTRY 3874 common/BLOCK_GOODHEL/NTRY,GOODHEL 3875 write(stream_id,*) GOODHEL 3876 return 3877 end 3878 3879 3880 subroutine read_good_hel(stream_id) 3881 implicit none 3882 include 'genps.inc' 3883 integer stream_id 3884 INTEGER NCOMB 3885 PARAMETER ( NCOMB=%(ncomb)d) 3886 LOGICAL GOODHEL(NCOMB) 3887 INTEGER NTRY 3888 common/BLOCK_GOODHEL/NTRY,GOODHEL 3889 read(stream_id,*) GOODHEL 3890 NTRY = MAXTRIES + 1 3891 return 3892 end 3893 3894 subroutine init_good_hel() 3895 implicit none 3896 INTEGER NCOMB 3897 PARAMETER ( NCOMB=%(ncomb)d) 3898 LOGICAL GOODHEL(NCOMB) 3899 INTEGER NTRY 3900 INTEGER I 3901 3902 do i=1,NCOMB 3903 GOODHEL(I) = .false. 3904 enddo 3905 NTRY = 0 3906 end 3907 3908 integer function get_maxsproc() 3909 implicit none 3910 get_maxsproc = 1 3911 return 3912 end 3913 3914 """ % convert 3915 3916 return output
3917 3918 #=========================================================================== 3919 # write_config_subproc_map_file 3920 #===========================================================================
3921 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3922 """Write a dummy config_subproc.inc file for MadEvent""" 3923 3924 lines = [] 3925 3926 for iconfig in range(len(s_and_t_channels)): 3927 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3928 (iconfig + 1)) 3929 3930 # Write the file 3931 writer.writelines(lines) 3932 3933 return True
3934 3935 #=========================================================================== 3936 # write_configs_file 3937 #===========================================================================
3938 - def write_configs_file(self, writer, matrix_element):
3939 """Write the configs.inc file for MadEvent""" 3940 3941 # Extract number of external particles 3942 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3943 3944 model = matrix_element.get('processes')[0].get('model') 3945 configs = [(i+1, d) for (i, d) in \ 3946 enumerate(matrix_element.get('diagrams'))] 3947 mapconfigs = [c[0] for c in configs] 3948 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3949 [[c[1]] for c in configs], 3950 mapconfigs, 3951 nexternal, ninitial, 3952 model)
3953 3954 #=========================================================================== 3955 # write_run_configs_file 3956 #===========================================================================
3957 - def write_run_config_file(self, writer):
3958 """Write the run_configs.inc file for MadEvent""" 3959 3960 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 3961 3962 if self.proc_characteristic['loop_induced']: 3963 job_per_chan = 1 3964 else: 3965 job_per_chan = 5 3966 text = open(path).read() % {'chanperjob': job_per_chan} 3967 writer.write(text) 3968 return True
3969 3970 3971 #=========================================================================== 3972 # write_configs_file_from_diagrams 3973 #===========================================================================
3974 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3975 nexternal, ninitial, model):
3976 """Write the actual configs.inc file. 3977 3978 configs is the diagrams corresponding to configs (each 3979 diagrams is a list of corresponding diagrams for all 3980 subprocesses, with None if there is no corresponding diagrams 3981 for a given process). 3982 mapconfigs gives the diagram number for each config. 3983 3984 For s-channels, we need to output one PDG for each subprocess in 3985 the subprocess group, in order to be able to pick the right 3986 one for multiprocesses.""" 3987 3988 lines = [] 3989 3990 s_and_t_channels = [] 3991 3992 nqcd_list = [] 3993 3994 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3995 for config in configs if [d for d in config if d][0].\ 3996 get_vertex_leg_numbers()!=[]] 3997 minvert = min(vert_list) if vert_list!=[] else 0 3998 3999 # Number of subprocesses 4000 nsubprocs = len(configs[0]) 4001 4002 nconfigs = 0 4003 4004 new_pdg = model.get_first_non_pdg() 4005 4006 for iconfig, helas_diags in enumerate(configs): 4007 if any([vert > minvert for vert in 4008 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4009 # Only 3-vertices allowed in configs.inc 4010 continue 4011 nconfigs += 1 4012 4013 # Need s- and t-channels for all subprocesses, including 4014 # those that don't contribute to this config 4015 empty_verts = [] 4016 stchannels = [] 4017 for h in helas_diags: 4018 if h: 4019 # get_s_and_t_channels gives vertices starting from 4020 # final state external particles and working inwards 4021 stchannels.append(h.get('amplitudes')[0].\ 4022 get_s_and_t_channels(ninitial, model, 4023 new_pdg)) 4024 else: 4025 stchannels.append((empty_verts, None)) 4026 4027 # For t-channels, just need the first non-empty one 4028 tchannels = [t for s,t in stchannels if t != None][0] 4029 4030 # For s_and_t_channels (to be used later) use only first config 4031 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4032 tchannels]) 4033 4034 # Make sure empty_verts is same length as real vertices 4035 if any([s for s,t in stchannels]): 4036 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4037 4038 # Reorganize s-channel vertices to get a list of all 4039 # subprocesses for each vertex 4040 schannels = zip(*[s for s,t in stchannels]) 4041 else: 4042 schannels = [] 4043 4044 allchannels = schannels 4045 if len(tchannels) > 1: 4046 # Write out tchannels only if there are any non-trivial ones 4047 allchannels = schannels + tchannels 4048 4049 # Write out propagators for s-channel and t-channel vertices 4050 4051 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4052 # Correspondance between the config and the diagram = amp2 4053 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4054 mapconfigs[iconfig])) 4055 # Number of QCD couplings in this diagram 4056 nqcd = 0 4057 for h in helas_diags: 4058 if h: 4059 try: 4060 nqcd = h.calculate_orders()['QCD'] 4061 except KeyError: 4062 pass 4063 break 4064 else: 4065 continue 4066 4067 nqcd_list.append(nqcd) 4068 4069 for verts in allchannels: 4070 if verts in schannels: 4071 vert = [v for v in verts if v][0] 4072 else: 4073 vert = verts 4074 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4075 last_leg = vert.get('legs')[-1] 4076 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4077 (last_leg.get('number'), nconfigs, len(daughters), 4078 ",".join([str(d) for d in daughters]))) 4079 if verts in schannels: 4080 pdgs = [] 4081 for v in verts: 4082 if v: 4083 pdgs.append(v.get('legs')[-1].get('id')) 4084 else: 4085 pdgs.append(0) 4086 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4087 (last_leg.get('number'), nconfigs, nsubprocs, 4088 ",".join([str(d) for d in pdgs]))) 4089 lines.append("data tprid(%d,%d)/0/" % \ 4090 (last_leg.get('number'), nconfigs)) 4091 elif verts in tchannels[:-1]: 4092 lines.append("data tprid(%d,%d)/%d/" % \ 4093 (last_leg.get('number'), nconfigs, 4094 abs(last_leg.get('id')))) 4095 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4096 (last_leg.get('number'), nconfigs, nsubprocs, 4097 ",".join(['0'] * nsubprocs))) 4098 4099 # Write out number of configs 4100 lines.append("# Number of configs") 4101 lines.append("data mapconfig(0)/%d/" % nconfigs) 4102 4103 # Write the file 4104 writer.writelines(lines) 4105 4106 return s_and_t_channels, nqcd_list
4107 4108 #=========================================================================== 4109 # write_decayBW_file 4110 #===========================================================================
4111 - def write_decayBW_file(self, writer, s_and_t_channels):
4112 """Write the decayBW.inc file for MadEvent""" 4113 4114 lines = [] 4115 4116 booldict = {None: "0", True: "1", False: "2"} 4117 4118 for iconf, config in enumerate(s_and_t_channels): 4119 schannels = config[0] 4120 for vertex in schannels: 4121 # For the resulting leg, pick out whether it comes from 4122 # decay or not, as given by the onshell flag 4123 leg = vertex.get('legs')[-1] 4124 lines.append("data gForceBW(%d,%d)/%s/" % \ 4125 (leg.get('number'), iconf + 1, 4126 booldict[leg.get('onshell')])) 4127 4128 # Write the file 4129 writer.writelines(lines) 4130 4131 return True
4132 4133 #=========================================================================== 4134 # write_dname_file 4135 #===========================================================================
4136 - def write_dname_file(self, writer, dir_name):
4137 """Write the dname.mg file for MG4""" 4138 4139 line = "DIRNAME=%s" % dir_name 4140 4141 # Write the file 4142 writer.write(line + "\n") 4143 4144 return True
4145 4146 #=========================================================================== 4147 # write_driver 4148 #===========================================================================
4149 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4150 """Write the SubProcess/driver.f file for MG4""" 4151 4152 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4153 4154 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4155 card = 'Source/MODEL/MG5_param.dat' 4156 else: 4157 card = 'param_card.dat' 4158 # Requiring each helicity configuration to be probed by 10 points for 4159 # matrix element before using the resulting grid for MC over helicity 4160 # sampling. 4161 # We multiply this by 2 because each grouped subprocess is called at most 4162 # twice for each IMIRROR. 4163 replace_dict = {'param_card_name':card, 4164 'ncomb':ncomb, 4165 'hel_init_points':n_grouped_proc*10*2} 4166 if v5: 4167 replace_dict['secondparam']=',.true.' 4168 else: 4169 replace_dict['secondparam']='' 4170 4171 text = open(path).read() % replace_dict 4172 4173 writer.write(text) 4174 4175 return True
4176 4177 #=========================================================================== 4178 # write_addmothers 4179 #===========================================================================
4180 - def write_addmothers(self, writer):
4181 """Write the SubProcess/addmothers.f""" 4182 4183 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4184 4185 text = open(path).read() % {'iconfig': 'diag_number'} 4186 writer.write(text) 4187 4188 return True
4189 4190 4191 #=========================================================================== 4192 # write_combine_events 4193 #===========================================================================
4194 - def write_combine_events(self, writer, nb_proc=100):
4195 """Write the SubProcess/driver.f file for MG4""" 4196 4197 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4198 4199 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4200 card = 'Source/MODEL/MG5_param.dat' 4201 else: 4202 card = 'param_card.dat' 4203 4204 #set maxpup (number of @X in the process card) 4205 4206 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4207 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4208 writer.write(text) 4209 4210 return True
4211 4212 4213 #=========================================================================== 4214 # write_symmetry 4215 #===========================================================================
4216 - def write_symmetry(self, writer, v5=True):
4217 """Write the SubProcess/driver.f file for ME""" 4218 4219 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4220 4221 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4222 card = 'Source/MODEL/MG5_param.dat' 4223 else: 4224 card = 'param_card.dat' 4225 text = open(path).read() 4226 4227 if v5: 4228 text = text % {'param_card_name':card, 'setparasecondarg':''} 4229 else: 4230 text = text % {'param_card_name':card, 'setparasecondarg':',.true.'} 4231 writer.write(text) 4232 4233 return True
4234 4235 4236 4237 4238 #=========================================================================== 4239 # write_iproc_file 4240 #===========================================================================
4241 - def write_iproc_file(self, writer, me_number):
4242 """Write the iproc.dat file for MG4""" 4243 line = "%d" % (me_number + 1) 4244 4245 # Write the file 4246 for line_to_write in writer.write_line(line): 4247 writer.write(line_to_write) 4248 return True
4249 4250 #=========================================================================== 4251 # write_mg_sym_file 4252 #===========================================================================
4253 - def write_mg_sym_file(self, writer, matrix_element):
4254 """Write the mg.sym file for MadEvent.""" 4255 4256 lines = [] 4257 4258 # Extract process with all decays included 4259 final_legs = filter(lambda leg: leg.get('state') == True, 4260 matrix_element.get('processes')[0].get_legs_with_decays()) 4261 4262 ninitial = len(filter(lambda leg: leg.get('state') == False, 4263 matrix_element.get('processes')[0].get('legs'))) 4264 4265 identical_indices = {} 4266 4267 # Extract identical particle info 4268 for i, leg in enumerate(final_legs): 4269 if leg.get('id') in identical_indices: 4270 identical_indices[leg.get('id')].append(\ 4271 i + ninitial + 1) 4272 else: 4273 identical_indices[leg.get('id')] = [i + ninitial + 1] 4274 4275 # Remove keys which have only one particle 4276 for key in identical_indices.keys(): 4277 if len(identical_indices[key]) < 2: 4278 del identical_indices[key] 4279 4280 # Write mg.sym file 4281 lines.append(str(len(identical_indices.keys()))) 4282 for key in identical_indices.keys(): 4283 lines.append(str(len(identical_indices[key]))) 4284 for number in identical_indices[key]: 4285 lines.append(str(number)) 4286 4287 # Write the file 4288 writer.writelines(lines) 4289 4290 return True
4291 4292 #=========================================================================== 4293 # write_mg_sym_file 4294 #===========================================================================
4295 - def write_default_mg_sym_file(self, writer):
4296 """Write the mg.sym file for MadEvent.""" 4297 4298 lines = "0" 4299 4300 # Write the file 4301 writer.writelines(lines) 4302 4303 return True
4304 4305 #=========================================================================== 4306 # write_ncombs_file 4307 #===========================================================================
4308 - def write_ncombs_file(self, writer, nexternal):
4309 """Write the ncombs.inc file for MadEvent.""" 4310 4311 # ncomb (used for clustering) is 2^nexternal 4312 file = " integer n_max_cl\n" 4313 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4314 4315 # Write the file 4316 writer.writelines(file) 4317 4318 return True
4319 4320 #=========================================================================== 4321 # write_processes_file 4322 #===========================================================================
4323 - def write_processes_file(self, writer, subproc_group):
4324 """Write the processes.dat file with info about the subprocesses 4325 in this group.""" 4326 4327 lines = [] 4328 4329 for ime, me in \ 4330 enumerate(subproc_group.get('matrix_elements')): 4331 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4332 ",".join(p.base_string() for p in \ 4333 me.get('processes')))) 4334 if me.get('has_mirror_process'): 4335 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4336 for proc in mirror_procs: 4337 legs = copy.copy(proc.get('legs_with_decays')) 4338 legs.insert(0, legs.pop(1)) 4339 proc.set("legs_with_decays", legs) 4340 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4341 mirror_procs)) 4342 else: 4343 lines.append("mirror none") 4344 4345 # Write the file 4346 writer.write("\n".join(lines)) 4347 4348 return True
4349 4350 #=========================================================================== 4351 # write_symswap_file 4352 #===========================================================================
4353 - def write_symswap_file(self, writer, ident_perms):
4354 """Write the file symswap.inc for MG4 by comparing diagrams using 4355 the internal matrix element value functionality.""" 4356 4357 lines = [] 4358 4359 # Write out lines for symswap.inc file (used to permute the 4360 # external leg momenta 4361 for iperm, perm in enumerate(ident_perms): 4362 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4363 (iperm+1, ",".join([str(i+1) for i in perm]))) 4364 lines.append("data nsym/%d/" % len(ident_perms)) 4365 4366 # Write the file 4367 writer.writelines(lines) 4368 4369 return True
4370 4371 #=========================================================================== 4372 # write_symfact_file 4373 #===========================================================================
4374 - def write_symfact_file(self, writer, symmetry):
4375 """Write the files symfact.dat for MG4 by comparing diagrams using 4376 the internal matrix element value functionality.""" 4377 4378 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4379 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4380 # Write out lines for symswap.inc file (used to permute the 4381 # external leg momenta 4382 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4383 # Write the file 4384 writer.write('\n'.join(lines)) 4385 writer.write('\n') 4386 4387 return True
4388 4389 #=========================================================================== 4390 # write_symperms_file 4391 #===========================================================================
4392 - def write_symperms_file(self, writer, perms):
4393 """Write the symperms.inc file for subprocess group, used for 4394 symmetric configurations""" 4395 4396 lines = [] 4397 for iperm, perm in enumerate(perms): 4398 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4399 (iperm+1, ",".join([str(i+1) for i in perm]))) 4400 4401 # Write the file 4402 writer.writelines(lines) 4403 4404 return True
4405 4406 #=========================================================================== 4407 # write_subproc 4408 #===========================================================================
4409 - def write_subproc(self, writer, subprocdir):
4410 """Append this subprocess to the subproc.mg file for MG4""" 4411 4412 # Write line to file 4413 writer.write(subprocdir + "\n") 4414 4415 return True
4416
4417 #=============================================================================== 4418 # ProcessExporterFortranMEGroup 4419 #=============================================================================== 4420 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4421 """Class to take care of exporting a set of matrix elements to 4422 MadEvent subprocess group format.""" 4423 4424 matrix_file = "matrix_madevent_group_v4.inc" 4425 4426 #=========================================================================== 4427 # generate_subprocess_directory_v4 4428 #===========================================================================
4429 - def generate_subprocess_directory_v4(self, subproc_group, 4430 fortran_model, 4431 group_number):
4432 """Generate the Pn directory for a subprocess group in MadEvent, 4433 including the necessary matrix_N.f files, configs.inc and various 4434 other helper files""" 4435 4436 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4437 "subproc_group object not SubProcessGroup" 4438 4439 if not self.model: 4440 self.model = subproc_group.get('matrix_elements')[0].\ 4441 get('processes')[0].get('model') 4442 4443 cwd = os.getcwd() 4444 path = pjoin(self.dir_path, 'SubProcesses') 4445 4446 os.chdir(path) 4447 pathdir = os.getcwd() 4448 4449 # Create the directory PN in the specified path 4450 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4451 subproc_group.get('name')) 4452 try: 4453 os.mkdir(subprocdir) 4454 except os.error as error: 4455 logger.warning(error.strerror + " " + subprocdir) 4456 4457 try: 4458 os.chdir(subprocdir) 4459 except os.error: 4460 logger.error('Could not cd to directory %s' % subprocdir) 4461 return 0 4462 4463 logger.info('Creating files in directory %s' % subprocdir) 4464 4465 # Create the matrix.f files, auto_dsig.f files and all inc files 4466 # for all subprocesses in the group 4467 4468 maxamps = 0 4469 maxflows = 0 4470 tot_calls = 0 4471 4472 matrix_elements = subproc_group.get('matrix_elements') 4473 4474 # Add the driver.f, all grouped ME's must share the same number of 4475 # helicity configuration 4476 ncomb = matrix_elements[0].get_helicity_combinations() 4477 for me in matrix_elements[1:]: 4478 if ncomb!=me.get_helicity_combinations(): 4479 raise MadGraph5Error, "All grouped processes must share the "+\ 4480 "same number of helicity configurations." 4481 4482 filename = 'driver.f' 4483 self.write_driver(writers.FortranWriter(filename),ncomb, 4484 n_grouped_proc=len(matrix_elements), v5=False) 4485 4486 for ime, matrix_element in \ 4487 enumerate(matrix_elements): 4488 filename = 'matrix%d.f' % (ime+1) 4489 calls, ncolor = \ 4490 self.write_matrix_element_v4(writers.FortranWriter(filename), 4491 matrix_element, 4492 fortran_model, 4493 proc_id=str(ime+1), 4494 config_map=subproc_group.get('diagram_maps')[ime], 4495 subproc_number=group_number) 4496 4497 filename = 'auto_dsig%d.f' % (ime+1) 4498 self.write_auto_dsig_file(writers.FortranWriter(filename), 4499 matrix_element, 4500 str(ime+1)) 4501 4502 # Keep track of needed quantities 4503 tot_calls += int(calls) 4504 maxflows = max(maxflows, ncolor) 4505 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4506 4507 # Draw diagrams 4508 filename = "matrix%d.ps" % (ime+1) 4509 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4510 get('diagrams'), 4511 filename, 4512 model = \ 4513 matrix_element.get('processes')[0].\ 4514 get('model'), 4515 amplitude=True) 4516 logger.info("Generating Feynman diagrams for " + \ 4517 matrix_element.get('processes')[0].nice_string()) 4518 plot.draw() 4519 4520 # Extract number of external particles 4521 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4522 4523 # Generate a list of diagrams corresponding to each configuration 4524 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4525 # If a subprocess has no diagrams for this config, the number is 0 4526 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4527 4528 filename = 'auto_dsig.f' 4529 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4530 subproc_group) 4531 4532 filename = 'coloramps.inc' 4533 self.write_coloramps_file(writers.FortranWriter(filename), 4534 subproc_diagrams_for_config, 4535 maxflows, 4536 matrix_elements) 4537 4538 filename = 'get_color.f' 4539 self.write_colors_file(writers.FortranWriter(filename), 4540 matrix_elements) 4541 4542 filename = 'config_subproc_map.inc' 4543 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4544 subproc_diagrams_for_config) 4545 4546 filename = 'configs.inc' 4547 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4548 writers.FortranWriter(filename), 4549 subproc_group, 4550 subproc_diagrams_for_config) 4551 4552 filename = 'config_nqcd.inc' 4553 self.write_config_nqcd_file(writers.FortranWriter(filename), 4554 nqcd_list) 4555 4556 filename = 'decayBW.inc' 4557 self.write_decayBW_file(writers.FortranWriter(filename), 4558 s_and_t_channels) 4559 4560 filename = 'dname.mg' 4561 self.write_dname_file(writers.FortranWriter(filename), 4562 subprocdir) 4563 4564 filename = 'iproc.dat' 4565 self.write_iproc_file(writers.FortranWriter(filename), 4566 group_number) 4567 4568 filename = 'leshouche.inc' 4569 self.write_leshouche_file(writers.FortranWriter(filename), 4570 subproc_group) 4571 4572 filename = 'maxamps.inc' 4573 self.write_maxamps_file(writers.FortranWriter(filename), 4574 maxamps, 4575 maxflows, 4576 max([len(me.get('processes')) for me in \ 4577 matrix_elements]), 4578 len(matrix_elements)) 4579 4580 # Note that mg.sym is not relevant for this case 4581 filename = 'mg.sym' 4582 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4583 4584 filename = 'mirrorprocs.inc' 4585 self.write_mirrorprocs(writers.FortranWriter(filename), 4586 subproc_group) 4587 4588 filename = 'ncombs.inc' 4589 self.write_ncombs_file(writers.FortranWriter(filename), 4590 nexternal) 4591 4592 filename = 'nexternal.inc' 4593 self.write_nexternal_file(writers.FortranWriter(filename), 4594 nexternal, ninitial) 4595 4596 filename = 'ngraphs.inc' 4597 self.write_ngraphs_file(writers.FortranWriter(filename), 4598 nconfigs) 4599 4600 filename = 'pmass.inc' 4601 self.write_pmass_file(writers.FortranWriter(filename), 4602 matrix_element) 4603 4604 filename = 'props.inc' 4605 self.write_props_file(writers.FortranWriter(filename), 4606 matrix_element, 4607 s_and_t_channels) 4608 4609 filename = 'processes.dat' 4610 files.write_to_file(filename, 4611 self.write_processes_file, 4612 subproc_group) 4613 4614 # Find config symmetries and permutations 4615 symmetry, perms, ident_perms = \ 4616 diagram_symmetry.find_symmetry(subproc_group) 4617 4618 filename = 'symswap.inc' 4619 self.write_symswap_file(writers.FortranWriter(filename), 4620 ident_perms) 4621 4622 filename = 'symfact_orig.dat' 4623 self.write_symfact_file(open(filename, 'w'), symmetry) 4624 4625 filename = 'symperms.inc' 4626 self.write_symperms_file(writers.FortranWriter(filename), 4627 perms) 4628 4629 # Generate jpgs -> pass in make_html 4630 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4631 4632 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4633 4634 #import nexternal/leshouch in Source 4635 ln('nexternal.inc', '../../Source', log=False) 4636 ln('leshouche.inc', '../../Source', log=False) 4637 ln('maxamps.inc', '../../Source', log=False) 4638 4639 # Return to SubProcesses dir) 4640 os.chdir(pathdir) 4641 4642 # Add subprocess to subproc.mg 4643 filename = 'subproc.mg' 4644 files.append_to_file(filename, 4645 self.write_subproc, 4646 subprocdir) 4647 4648 # Return to original dir 4649 os.chdir(cwd) 4650 4651 if not tot_calls: 4652 tot_calls = 0 4653 return tot_calls
4654 4655 #=========================================================================== 4656 # write_super_auto_dsig_file 4657 #===========================================================================
4658 - def write_super_auto_dsig_file(self, writer, subproc_group):
4659 """Write the auto_dsig.f file selecting between the subprocesses 4660 in subprocess group mode""" 4661 4662 replace_dict = {} 4663 4664 # Extract version number and date from VERSION file 4665 info_lines = self.get_mg5_info_lines() 4666 replace_dict['info_lines'] = info_lines 4667 4668 matrix_elements = subproc_group.get('matrix_elements') 4669 4670 # Extract process info lines 4671 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4672 matrix_elements]) 4673 replace_dict['process_lines'] = process_lines 4674 4675 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4676 replace_dict['nexternal'] = nexternal 4677 4678 replace_dict['nsprocs'] = 2*len(matrix_elements) 4679 4680 # Generate dsig definition line 4681 dsig_def_line = "DOUBLE PRECISION " + \ 4682 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4683 range(len(matrix_elements))]) 4684 replace_dict["dsig_def_line"] = dsig_def_line 4685 4686 # Generate dsig process lines 4687 call_dsig_proc_lines = [] 4688 for iproc in range(len(matrix_elements)): 4689 call_dsig_proc_lines.append(\ 4690 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4691 {"num": iproc + 1, 4692 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4693 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4694 4695 ncomb=matrix_elements[0].get_helicity_combinations() 4696 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4697 4698 file = open(pjoin(_file_path, \ 4699 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4700 file = file % replace_dict 4701 4702 # Write the file 4703 writer.writelines(file)
4704 4705 #=========================================================================== 4706 # write_mirrorprocs 4707 #===========================================================================
4708 - def write_mirrorprocs(self, writer, subproc_group):
4709 """Write the mirrorprocs.inc file determining which processes have 4710 IS mirror process in subprocess group mode.""" 4711 4712 lines = [] 4713 bool_dict = {True: '.true.', False: '.false.'} 4714 matrix_elements = subproc_group.get('matrix_elements') 4715 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4716 (len(matrix_elements), 4717 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4718 me in matrix_elements]))) 4719 # Write the file 4720 writer.writelines(lines)
4721 4722 #=========================================================================== 4723 # write_addmothers 4724 #===========================================================================
4725 - def write_addmothers(self, writer):
4726 """Write the SubProcess/addmothers.f""" 4727 4728 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4729 4730 text = open(path).read() % {'iconfig': 'lconfig'} 4731 writer.write(text) 4732 4733 return True
4734 4735 4736 #=========================================================================== 4737 # write_coloramps_file 4738 #===========================================================================
4739 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4740 matrix_elements):
4741 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4742 4743 # Create a map from subprocess (matrix element) to a list of 4744 # the diagrams corresponding to each config 4745 4746 lines = [] 4747 4748 subproc_to_confdiag = {} 4749 for config in diagrams_for_config: 4750 for subproc, diag in enumerate(config): 4751 try: 4752 subproc_to_confdiag[subproc].append(diag) 4753 except KeyError: 4754 subproc_to_confdiag[subproc] = [diag] 4755 4756 for subproc in sorted(subproc_to_confdiag.keys()): 4757 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 4758 matrix_elements[subproc], 4759 subproc + 1)) 4760 4761 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 4762 (maxflows, 4763 len(diagrams_for_config), 4764 len(matrix_elements))) 4765 4766 # Write the file 4767 writer.writelines(lines) 4768 4769 return True
4770 4771 #=========================================================================== 4772 # write_config_subproc_map_file 4773 #===========================================================================
4774 - def write_config_subproc_map_file(self, writer, config_subproc_map):
4775 """Write the config_subproc_map.inc file for subprocess groups""" 4776 4777 lines = [] 4778 # Output only configs that have some corresponding diagrams 4779 iconfig = 0 4780 for config in config_subproc_map: 4781 if set(config) == set([0]): 4782 continue 4783 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 4784 (iconfig + 1, len(config), 4785 ",".join([str(i) for i in config]))) 4786 iconfig += 1 4787 # Write the file 4788 writer.writelines(lines) 4789 4790 return True
4791 4792 #=========================================================================== 4793 # read_write_good_hel 4794 #===========================================================================
4795 - def read_write_good_hel(self, ncomb):
4796 """return the code to read/write the good_hel common_block""" 4797 4798 convert = {'ncomb' : ncomb} 4799 4800 output = """ 4801 subroutine write_good_hel(stream_id) 4802 implicit none 4803 integer stream_id 4804 INTEGER NCOMB 4805 PARAMETER ( NCOMB=%(ncomb)d) 4806 LOGICAL GOODHEL(NCOMB, 2) 4807 INTEGER NTRY(2) 4808 common/BLOCK_GOODHEL/NTRY,GOODHEL 4809 write(stream_id,*) GOODHEL 4810 return 4811 end 4812 4813 4814 subroutine read_good_hel(stream_id) 4815 implicit none 4816 include 'genps.inc' 4817 integer stream_id 4818 INTEGER NCOMB 4819 PARAMETER ( NCOMB=%(ncomb)d) 4820 LOGICAL GOODHEL(NCOMB, 2) 4821 INTEGER NTRY(2) 4822 common/BLOCK_GOODHEL/NTRY,GOODHEL 4823 read(stream_id,*) GOODHEL 4824 NTRY(1) = MAXTRIES + 1 4825 NTRY(2) = MAXTRIES + 1 4826 return 4827 end 4828 4829 subroutine init_good_hel() 4830 implicit none 4831 INTEGER NCOMB 4832 PARAMETER ( NCOMB=%(ncomb)d) 4833 LOGICAL GOODHEL(NCOMB, 2) 4834 INTEGER NTRY(2) 4835 INTEGER I 4836 4837 do i=1,NCOMB 4838 GOODHEL(I,1) = .false. 4839 GOODHEL(I,2) = .false. 4840 enddo 4841 NTRY(1) = 0 4842 NTRY(2) = 0 4843 end 4844 4845 integer function get_maxsproc() 4846 implicit none 4847 include 'maxamps.inc' 4848 4849 get_maxsproc = maxsproc 4850 return 4851 end 4852 4853 """ % convert 4854 4855 return output
4856 4857 4858 4859 #=========================================================================== 4860 # write_configs_file 4861 #===========================================================================
4862 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
4863 """Write the configs.inc file with topology information for a 4864 subprocess group. Use the first subprocess with a diagram for each 4865 configuration.""" 4866 4867 matrix_elements = subproc_group.get('matrix_elements') 4868 model = matrix_elements[0].get('processes')[0].get('model') 4869 4870 diagrams = [] 4871 config_numbers = [] 4872 for iconfig, config in enumerate(diagrams_for_config): 4873 # Check if any diagrams correspond to this config 4874 if set(config) == set([0]): 4875 continue 4876 subproc_diags = [] 4877 for s,d in enumerate(config): 4878 if d: 4879 subproc_diags.append(matrix_elements[s].\ 4880 get('diagrams')[d-1]) 4881 else: 4882 subproc_diags.append(None) 4883 diagrams.append(subproc_diags) 4884 config_numbers.append(iconfig + 1) 4885 4886 # Extract number of external particles 4887 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 4888 4889 return len(diagrams), \ 4890 self.write_configs_file_from_diagrams(writer, diagrams, 4891 config_numbers, 4892 nexternal, ninitial, 4893 model)
4894 4895 #=========================================================================== 4896 # write_run_configs_file 4897 #===========================================================================
4898 - def write_run_config_file(self, writer):
4899 """Write the run_configs.inc file for MadEvent""" 4900 4901 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4902 if self.proc_characteristic['loop_induced']: 4903 job_per_chan = 1 4904 else: 4905 job_per_chan = 2 4906 text = open(path).read() % {'chanperjob':job_per_chan} 4907 writer.write(text) 4908 return True
4909 4910 4911 #=========================================================================== 4912 # write_leshouche_file 4913 #===========================================================================
4914 - def write_leshouche_file(self, writer, subproc_group):
4915 """Write the leshouche.inc file for MG4""" 4916 4917 all_lines = [] 4918 4919 for iproc, matrix_element in \ 4920 enumerate(subproc_group.get('matrix_elements')): 4921 all_lines.extend(self.get_leshouche_lines(matrix_element, 4922 iproc)) 4923 4924 # Write the file 4925 writer.writelines(all_lines) 4926 4927 return True
4928 4929 4930
4931 - def finalize_v4_directory(self,*args, **opts):
4932 4933 4934 4935 super(ProcessExporterFortranMEGroup, self).finalize_v4_directory(*args, **opts) 4936 #ensure that the grouping information is on the correct value 4937 self.proc_characteristic['grouped_matrix'] = True
4938 4939 4940 #=============================================================================== 4941 # UFO_model_to_mg4 4942 #=============================================================================== 4943 4944 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
4945 4946 -class UFO_model_to_mg4(object):
4947 """ A converter of the UFO-MG5 Model to the MG4 format """ 4948 4949 # The list below shows the only variables the user is allowed to change by 4950 # himself for each PS point. If he changes any other, then calling 4951 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 4952 # correctly account for the change. 4953 PS_dependent_key = ['aS','MU_R'] 4954 mp_complex_format = 'complex*32' 4955 mp_real_format = 'real*16' 4956 # Warning, it is crucial none of the couplings/parameters of the model 4957 # starts with this prefix. I should add a check for this. 4958 # You can change it as the global variable to check_param_card.ParamCard 4959 mp_prefix = check_param_card.ParamCard.mp_prefix 4960
4961 - def __init__(self, model, output_path, opt=None):
4962 """ initialization of the objects """ 4963 4964 self.model = model 4965 self.model_name = model['name'] 4966 self.dir_path = output_path 4967 if opt: 4968 self.opt = opt 4969 else: 4970 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 4971 'loop_induced': False} 4972 4973 self.coups_dep = [] # (name, expression, type) 4974 self.coups_indep = [] # (name, expression, type) 4975 self.params_dep = [] # (name, expression, type) 4976 self.params_indep = [] # (name, expression, type) 4977 self.params_ext = [] # external parameter 4978 self.p_to_f = parsers.UFOExpressionParserFortran() 4979 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
4980
4982 """modify the parameter if some of them are identical up to the case""" 4983 4984 lower_dict={} 4985 duplicate = set() 4986 keys = self.model['parameters'].keys() 4987 for key in keys: 4988 for param in self.model['parameters'][key]: 4989 lower_name = param.name.lower() 4990 if not lower_name: 4991 continue 4992 try: 4993 lower_dict[lower_name].append(param) 4994 except KeyError,error: 4995 lower_dict[lower_name] = [param] 4996 else: 4997 duplicate.add(lower_name) 4998 logger.debug('%s is define both as lower case and upper case.' 4999 % lower_name) 5000 if not duplicate: 5001 return 5002 5003 re_expr = r'''\b(%s)\b''' 5004 to_change = [] 5005 change={} 5006 for value in duplicate: 5007 for i, var in enumerate(lower_dict[value][1:]): 5008 to_change.append(var.name) 5009 change[var.name] = '%s__%s' %( var.name.lower(), i+2) 5010 var.name = '%s__%s' %( var.name.lower(), i+2) 5011 5012 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5013 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5014 5015 # change parameters 5016 for key in keys: 5017 if key == ('external',): 5018 continue 5019 for param in self.model['parameters'][key]: 5020 param.expr = rep_pattern.sub(replace, param.expr) 5021 5022 # change couplings 5023 for key in self.model['couplings'].keys(): 5024 for coup in self.model['couplings'][key]: 5025 coup.expr = rep_pattern.sub(replace, coup.expr) 5026 5027 # change mass/width 5028 for part in self.model['particles']: 5029 if str(part.get('mass')) in to_change: 5030 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5031 if str(part.get('width')) in to_change: 5032 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5033
5034 - def refactorize(self, wanted_couplings = []):
5035 """modify the couplings to fit with MG4 convention """ 5036 5037 # Keep only separation in alphaS 5038 keys = self.model['parameters'].keys() 5039 keys.sort(key=len) 5040 for key in keys: 5041 to_add = [o for o in self.model['parameters'][key] if o.name] 5042 5043 if key == ('external',): 5044 self.params_ext += to_add 5045 elif any([(k in key) for k in self.PS_dependent_key]): 5046 self.params_dep += to_add 5047 else: 5048 self.params_indep += to_add 5049 # same for couplings 5050 keys = self.model['couplings'].keys() 5051 keys.sort(key=len) 5052 for key, coup_list in self.model['couplings'].items(): 5053 if any([(k in key) for k in self.PS_dependent_key]): 5054 self.coups_dep += [c for c in coup_list if 5055 (not wanted_couplings or c.name in \ 5056 wanted_couplings)] 5057 else: 5058 self.coups_indep += [c for c in coup_list if 5059 (not wanted_couplings or c.name in \ 5060 wanted_couplings)] 5061 5062 # MG4 use G and not aS as it basic object for alphas related computation 5063 #Pass G in the independant list 5064 if 'G' in self.params_dep: 5065 index = self.params_dep.index('G') 5066 G = self.params_dep.pop(index) 5067 # G.expr = '2*cmath.sqrt(as*pi)' 5068 # self.params_indep.insert(0, self.params_dep.pop(index)) 5069 # No need to add it if not defined 5070 5071 if 'aS' not in self.params_ext: 5072 logger.critical('aS not define as external parameter adding it!') 5073 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5074 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5075 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5076 - def build(self, wanted_couplings = [], full=True):
5077 """modify the couplings to fit with MG4 convention and creates all the 5078 different files""" 5079 5080 self.pass_parameter_to_case_insensitive() 5081 self.refactorize(wanted_couplings) 5082 5083 # write the files 5084 if full: 5085 self.write_all()
5086 5087
5088 - def open(self, name, comment='c', format='default'):
5089 """ Open the file name in the correct directory and with a valid 5090 header.""" 5091 5092 file_path = pjoin(self.dir_path, name) 5093 5094 if format == 'fortran': 5095 fsock = writers.FortranWriter(file_path, 'w') 5096 else: 5097 fsock = open(file_path, 'w') 5098 5099 file.writelines(fsock, comment * 77 + '\n') 5100 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5101 {'comment': comment + (6 - len(comment)) * ' '}) 5102 file.writelines(fsock, comment * 77 + '\n\n') 5103 return fsock
5104 5105
5106 - def write_all(self):
5107 """ write all the files """ 5108 #write the part related to the external parameter 5109 self.create_ident_card() 5110 self.create_param_read() 5111 5112 #write the definition of the parameter 5113 self.create_input() 5114 self.create_intparam_def(dp=True,mp=False) 5115 if self.opt['mp']: 5116 self.create_intparam_def(dp=False,mp=True) 5117 5118 5119 # definition of the coupling. 5120 self.create_actualize_mp_ext_param_inc() 5121 self.create_coupl_inc() 5122 self.create_write_couplings() 5123 self.create_couplings() 5124 5125 # the makefile 5126 self.create_makeinc() 5127 self.create_param_write() 5128 5129 # The model functions 5130 self.create_model_functions_inc() 5131 self.create_model_functions_def() 5132 5133 # The param_card.dat 5134 self.create_param_card() 5135 5136 5137 # All the standard files 5138 self.copy_standard_file()
5139 5140 ############################################################################ 5141 ## ROUTINE CREATING THE FILES ############################################ 5142 ############################################################################ 5143
5144 - def copy_standard_file(self):
5145 """Copy the standard files for the fortran model.""" 5146 5147 5148 #copy the library files 5149 file_to_link = ['formats.inc','printout.f', \ 5150 'rw_para.f', 'testprog.f'] 5151 5152 for filename in file_to_link: 5153 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5154 self.dir_path) 5155 5156 file = open(os.path.join(MG5DIR,\ 5157 'models/template_files/fortran/rw_para.f')).read() 5158 5159 includes=["include \'coupl.inc\'","include \'input.inc\'"] 5160 if self.opt['mp']: 5161 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5162 # In standalone and madloop we do no use the compiled param card but 5163 # still parse the .dat one so we must load it. 5164 if self.opt['loop_induced']: 5165 #loop induced follow MadEvent way to handle the card. 5166 load_card = '' 5167 lha_read_filename='lha_read.f' 5168 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5169 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5170 lha_read_filename='lha_read_mp.f' 5171 elif self.opt['export_format'].startswith('standalone') or self.opt['export_format'] in ['madweight']\ 5172 or self.opt['export_format'].startswith('matchbox'): 5173 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5174 lha_read_filename='lha_read.f' 5175 else: 5176 load_card = '' 5177 lha_read_filename='lha_read.f' 5178 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5179 os.path.join(self.dir_path,'lha_read.f')) 5180 5181 file=file%{'includes':'\n '.join(includes), 5182 'load_card':load_card} 5183 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5184 writer.writelines(file) 5185 writer.close() 5186 5187 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5188 or self.opt['loop_induced']: 5189 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5190 self.dir_path + '/makefile') 5191 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5192 path = pjoin(self.dir_path, 'makefile') 5193 text = open(path).read() 5194 text = text.replace('madevent','aMCatNLO') 5195 open(path, 'w').writelines(text) 5196 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5197 'madloop','madloop_optimized', 'standalone_rw', 'madweight','matchbox','madloop_matchbox']: 5198 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5199 self.dir_path + '/makefile') 5200 #elif self.opt['export_format'] in []: 5201 #pass 5202 else: 5203 raise MadGraph5Error('Unknown format')
5204
5205 - def create_coupl_inc(self):
5206 """ write coupling.inc """ 5207 5208 fsock = self.open('coupl.inc', format='fortran') 5209 if self.opt['mp']: 5210 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5211 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5212 format='fortran') 5213 5214 # Write header 5215 header = """double precision G 5216 common/strong/ G 5217 5218 double complex gal(2) 5219 common/weak/ gal 5220 5221 double precision MU_R 5222 common/rscale/ MU_R 5223 5224 double precision Nf 5225 parameter(Nf=%d) 5226 """ % self.model.get_nflav() 5227 5228 fsock.writelines(header) 5229 5230 if self.opt['mp']: 5231 header = """%(real_mp_format)s %(mp_prefix)sG 5232 common/MP_strong/ %(mp_prefix)sG 5233 5234 %(complex_mp_format)s %(mp_prefix)sgal(2) 5235 common/MP_weak/ %(mp_prefix)sgal 5236 5237 %(complex_mp_format)s %(mp_prefix)sMU_R 5238 common/MP_rscale/ %(mp_prefix)sMU_R 5239 5240 """ 5241 5242 5243 5244 5245 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5246 'complex_mp_format':self.mp_complex_format, 5247 'mp_prefix':self.mp_prefix}) 5248 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5249 'complex_mp_format':self.mp_complex_format, 5250 'mp_prefix':''}) 5251 5252 # Write the Mass definition/ common block 5253 masses = set() 5254 widths = set() 5255 if self.opt['complex_mass']: 5256 complex_mass = set() 5257 5258 for particle in self.model.get('particles'): 5259 #find masses 5260 one_mass = particle.get('mass') 5261 if one_mass.lower() != 'zero': 5262 masses.add(one_mass) 5263 5264 # find width 5265 one_width = particle.get('width') 5266 if one_width.lower() != 'zero': 5267 widths.add(one_width) 5268 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5269 complex_mass.add('CMASS_%s' % one_mass) 5270 5271 if masses: 5272 fsock.writelines('double precision '+','.join(masses)+'\n') 5273 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5274 if self.opt['mp']: 5275 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5276 ','.join(masses)+'\n') 5277 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5278 ','.join(masses)+'\n\n') 5279 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5280 self.mp_prefix+m for m in masses])+'\n') 5281 mp_fsock.writelines('common/MP_masses/ '+\ 5282 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5283 5284 if widths: 5285 fsock.writelines('double precision '+','.join(widths)+'\n') 5286 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5287 if self.opt['mp']: 5288 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5289 ','.join(widths)+'\n') 5290 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5291 ','.join(widths)+'\n\n') 5292 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5293 self.mp_prefix+w for w in widths])+'\n') 5294 mp_fsock.writelines('common/MP_widths/ '+\ 5295 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5296 5297 # Write the Couplings 5298 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5299 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5300 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5301 if self.opt['mp']: 5302 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5303 ','.join(coupling_list)+'\n') 5304 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5305 ','.join(coupling_list)+'\n\n') 5306 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5307 self.mp_prefix+c for c in coupling_list])+'\n') 5308 mp_fsock.writelines('common/MP_couplings/ '+\ 5309 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5310 5311 # Write complex mass for complex mass scheme (if activated) 5312 if self.opt['complex_mass'] and complex_mass: 5313 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5314 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5315 if self.opt['mp']: 5316 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5317 ','.join(complex_mass)+'\n') 5318 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5319 ','.join(complex_mass)+'\n\n') 5320 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5321 self.mp_prefix+cm for cm in complex_mass])+'\n') 5322 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5323 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5324
5325 - def create_write_couplings(self):
5326 """ write the file coupl_write.inc """ 5327 5328 fsock = self.open('coupl_write.inc', format='fortran') 5329 5330 fsock.writelines("""write(*,*) ' Couplings of %s' 5331 write(*,*) ' ---------------------------------' 5332 write(*,*) ' '""" % self.model_name) 5333 def format(coupl): 5334 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5335 5336 # Write the Couplings 5337 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5338 fsock.writelines('\n'.join(lines)) 5339 5340
5341 - def create_input(self):
5342 """create input.inc containing the definition of the parameters""" 5343 5344 fsock = self.open('input.inc', format='fortran') 5345 if self.opt['mp']: 5346 mp_fsock = self.open('mp_input.inc', format='fortran') 5347 5348 #find mass/ width since they are already define 5349 already_def = set() 5350 for particle in self.model.get('particles'): 5351 already_def.add(particle.get('mass').lower()) 5352 already_def.add(particle.get('width').lower()) 5353 if self.opt['complex_mass']: 5354 already_def.add('cmass_%s' % particle.get('mass').lower()) 5355 5356 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5357 name.lower() not in already_def 5358 5359 real_parameters = [param.name for param in self.params_dep + 5360 self.params_indep if param.type == 'real' 5361 and is_valid(param.name)] 5362 5363 real_parameters += [param.name for param in self.params_ext 5364 if param.type == 'real'and 5365 is_valid(param.name)] 5366 5367 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5368 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5369 if self.opt['mp']: 5370 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5371 self.mp_prefix+p for p in real_parameters])+'\n') 5372 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5373 self.mp_prefix+p for p in real_parameters])+'\n\n') 5374 5375 complex_parameters = [param.name for param in self.params_dep + 5376 self.params_indep if param.type == 'complex' and 5377 is_valid(param.name)] 5378 5379 if complex_parameters: 5380 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5381 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5382 if self.opt['mp']: 5383 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5384 self.mp_prefix+p for p in complex_parameters])+'\n') 5385 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5386 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5387
5388 - def create_intparam_def(self, dp=True, mp=False):
5389 """ create intparam_definition.inc setting the internal parameters. 5390 Output the double precision and/or the multiple precision parameters 5391 depending on the parameters dp and mp. If mp only, then the file names 5392 get the 'mp_' prefix. 5393 """ 5394 5395 fsock = self.open('%sintparam_definition.inc'% 5396 ('mp_' if mp and not dp else ''), format='fortran') 5397 5398 fsock.write_comments(\ 5399 "Parameters that should not be recomputed event by event.\n") 5400 fsock.writelines("if(readlha) then\n") 5401 if dp: 5402 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5403 if mp: 5404 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5405 for param in self.params_indep: 5406 if param.name == 'ZERO': 5407 continue 5408 if dp: 5409 fsock.writelines("%s = %s\n" % (param.name, 5410 self.p_to_f.parse(param.expr))) 5411 if mp: 5412 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5413 self.mp_p_to_f.parse(param.expr))) 5414 5415 fsock.writelines('endif') 5416 5417 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5418 if dp: 5419 fsock.writelines("aS = G**2/4/pi\n") 5420 if mp: 5421 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5422 for param in self.params_dep: 5423 if dp: 5424 fsock.writelines("%s = %s\n" % (param.name, 5425 self.p_to_f.parse(param.expr))) 5426 elif mp: 5427 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5428 self.mp_p_to_f.parse(param.expr))) 5429 5430 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5431 if ('aEWM1',) in self.model['parameters']: 5432 if dp: 5433 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5434 gal(2) = 1d0 5435 """) 5436 elif mp: 5437 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5438 %(mp_prefix)sgal(2) = 1d0 5439 """ %{'mp_prefix':self.mp_prefix}) 5440 pass 5441 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5442 elif ('Gf',) in self.model['parameters']: 5443 if dp: 5444 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*MDL_SW*DSQRT(MDL_Gf) 5445 gal(2) = 1d0 5446 """) 5447 elif mp: 5448 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*MP__MDL_SW*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5449 %(mp_prefix)sgal(2) = 1d0 5450 """ %{'mp_prefix':self.mp_prefix}) 5451 pass 5452 else: 5453 if dp: 5454 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5455 fsock.writelines(""" gal(1) = 1d0 5456 gal(2) = 1d0 5457 """) 5458 elif mp: 5459 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5460 %(mp_prefix)sgal(2) = 1e0_16 5461 """%{'mp_prefix':self.mp_prefix})
5462 5463
5464 - def create_couplings(self):
5465 """ create couplings.f and all couplingsX.f """ 5466 5467 nb_def_by_file = 25 5468 5469 self.create_couplings_main(nb_def_by_file) 5470 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5471 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5472 5473 for i in range(nb_coup_indep): 5474 # For the independent couplings, we compute the double and multiple 5475 # precision ones together 5476 data = self.coups_indep[nb_def_by_file * i: 5477 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5478 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5479 5480 for i in range(nb_coup_dep): 5481 # For the dependent couplings, we compute the double and multiple 5482 # precision ones in separate subroutines. 5483 data = self.coups_dep[nb_def_by_file * i: 5484 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5485 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5486 dp=True,mp=False) 5487 if self.opt['mp']: 5488 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5489 dp=False,mp=True)
5490 5491
5492 - def create_couplings_main(self, nb_def_by_file=25):
5493 """ create couplings.f """ 5494 5495 fsock = self.open('couplings.f', format='fortran') 5496 5497 fsock.writelines("""subroutine coup() 5498 5499 implicit none 5500 double precision PI, ZERO 5501 logical READLHA 5502 parameter (PI=3.141592653589793d0) 5503 parameter (ZERO=0d0)""") 5504 if self.opt['mp']: 5505 fsock.writelines("""%s MP__PI, MP__ZERO 5506 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5507 parameter (MP__ZERO=0e0_16) 5508 include \'mp_input.inc\' 5509 include \'mp_coupl.inc\' 5510 """%self.mp_real_format) 5511 fsock.writelines("""include \'input.inc\' 5512 include \'coupl.inc\' 5513 READLHA = .true. 5514 include \'intparam_definition.inc\'""") 5515 if self.opt['mp']: 5516 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5517 5518 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5519 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5520 5521 fsock.writelines('\n'.join(\ 5522 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5523 5524 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5525 5526 fsock.writelines('\n'.join(\ 5527 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5528 for i in range(nb_coup_dep)])) 5529 if self.opt['mp']: 5530 fsock.writelines('\n'.join(\ 5531 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5532 for i in range(nb_coup_dep)])) 5533 fsock.writelines('''\n return \n end\n''') 5534 5535 fsock.writelines("""subroutine update_as_param() 5536 5537 implicit none 5538 double precision PI, ZERO 5539 logical READLHA 5540 parameter (PI=3.141592653589793d0) 5541 parameter (ZERO=0d0)""") 5542 fsock.writelines("""include \'input.inc\' 5543 include \'coupl.inc\' 5544 READLHA = .false.""") 5545 fsock.writelines(""" 5546 include \'intparam_definition.inc\'\n 5547 """) 5548 5549 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5550 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5551 5552 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5553 5554 fsock.writelines('\n'.join(\ 5555 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5556 for i in range(nb_coup_dep)])) 5557 fsock.writelines('''\n return \n end\n''') 5558 5559 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5560 5561 implicit none 5562 double precision PI 5563 parameter (PI=3.141592653589793d0) 5564 double precision mu_r2, as2""") 5565 fsock.writelines("""include \'input.inc\' 5566 include \'coupl.inc\'""") 5567 fsock.writelines(""" 5568 MU_R = mu_r2 5569 G = SQRT(4.0d0*PI*AS2) 5570 AS = as2 5571 5572 CALL UPDATE_AS_PARAM() 5573 """) 5574 fsock.writelines('''\n return \n end\n''') 5575 5576 if self.opt['mp']: 5577 fsock.writelines("""subroutine mp_update_as_param() 5578 5579 implicit none 5580 logical READLHA""") 5581 fsock.writelines("""%s MP__PI, MP__ZERO 5582 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5583 parameter (MP__ZERO=0e0_16) 5584 include \'mp_input.inc\' 5585 include \'mp_coupl.inc\' 5586 """%self.mp_real_format) 5587 fsock.writelines("""include \'input.inc\' 5588 include \'coupl.inc\' 5589 include \'actualize_mp_ext_params.inc\' 5590 READLHA = .false. 5591 include \'mp_intparam_definition.inc\'\n 5592 """) 5593 5594 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5595 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5596 5597 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5598 5599 fsock.writelines('\n'.join(\ 5600 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5601 for i in range(nb_coup_dep)])) 5602 fsock.writelines('''\n return \n end\n''')
5603
5604 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5605 """ create couplings[nb_file].f containing information coming from data. 5606 Outputs the computation of the double precision and/or the multiple 5607 precision couplings depending on the parameters dp and mp. 5608 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5609 filename and subroutine name. 5610 """ 5611 5612 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5613 nb_file), format='fortran') 5614 fsock.writelines("""subroutine %scoup%s() 5615 5616 implicit none"""%('mp_' if mp and not dp else '',nb_file)) 5617 if dp: 5618 fsock.writelines(""" 5619 double precision PI, ZERO 5620 parameter (PI=3.141592653589793d0) 5621 parameter (ZERO=0d0) 5622 include 'input.inc' 5623 include 'coupl.inc'""") 5624 if mp: 5625 fsock.writelines("""%s MP__PI, MP__ZERO 5626 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5627 parameter (MP__ZERO=0e0_16) 5628 include \'mp_input.inc\' 5629 include \'mp_coupl.inc\' 5630 """%self.mp_real_format) 5631 fsock.writelines(""" 5632 include 'model_functions.inc'""") 5633 for coupling in data: 5634 if dp: 5635 fsock.writelines('%s = %s' % (coupling.name, 5636 self.p_to_f.parse(coupling.expr))) 5637 if mp: 5638 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5639 self.mp_p_to_f.parse(coupling.expr))) 5640 fsock.writelines('end')
5641
5642 - def create_model_functions_inc(self):
5643 """ Create model_functions.inc which contains the various declarations 5644 of auxiliary functions which might be used in the couplings expressions 5645 """ 5646 5647 fsock = self.open('model_functions.inc', format='fortran') 5648 fsock.writelines("""double complex cond 5649 double complex condif 5650 double complex reglog 5651 double complex arg""") 5652 if self.opt['mp']: 5653 fsock.writelines("""%(complex_mp_format)s mp_cond 5654 %(complex_mp_format)s mp_condif 5655 %(complex_mp_format)s mp_reglog 5656 %(complex_mp_format)s mp_arg"""\ 5657 %{'complex_mp_format':self.mp_complex_format})
5658
5659 - def create_model_functions_def(self):
5660 """ Create model_functions.f which contains the various definitions 5661 of auxiliary functions which might be used in the couplings expressions 5662 Add the functions.f functions for formfactors support 5663 """ 5664 5665 fsock = self.open('model_functions.f', format='fortran') 5666 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 5667 implicit none 5668 double complex condition,truecase,falsecase 5669 if(condition.eq.(0.0d0,0.0d0)) then 5670 cond=truecase 5671 else 5672 cond=falsecase 5673 endif 5674 end 5675 5676 double complex function condif(condition,truecase,falsecase) 5677 implicit none 5678 logical condition 5679 double complex truecase,falsecase 5680 if(condition) then 5681 condif=truecase 5682 else 5683 condif=falsecase 5684 endif 5685 end 5686 5687 double complex function reglog(arg) 5688 implicit none 5689 double complex arg 5690 if(arg.eq.(0.0d0,0.0d0)) then 5691 reglog=(0.0d0,0.0d0) 5692 else 5693 reglog=log(arg) 5694 endif 5695 end 5696 5697 double complex function arg(comnum) 5698 implicit none 5699 double complex comnum 5700 double complex iim 5701 iim = (0.0d0,1.0d0) 5702 if(comnum.eq.(0.0d0,0.0d0)) then 5703 arg=(0.0d0,0.0d0) 5704 else 5705 arg=log(comnum/abs(comnum))/iim 5706 endif 5707 end""") 5708 if self.opt['mp']: 5709 fsock.writelines(""" 5710 5711 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 5712 implicit none 5713 %(complex_mp_format)s condition,truecase,falsecase 5714 if(condition.eq.(0.0e0_16,0.0e0_16)) then 5715 mp_cond=truecase 5716 else 5717 mp_cond=falsecase 5718 endif 5719 end 5720 5721 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 5722 implicit none 5723 logical condition 5724 %(complex_mp_format)s truecase,falsecase 5725 if(condition) then 5726 mp_condif=truecase 5727 else 5728 mp_condif=falsecase 5729 endif 5730 end 5731 5732 %(complex_mp_format)s function mp_reglog(arg) 5733 implicit none 5734 %(complex_mp_format)s arg 5735 if(arg.eq.(0.0e0_16,0.0e0_16)) then 5736 mp_reglog=(0.0e0_16,0.0e0_16) 5737 else 5738 mp_reglog=log(arg) 5739 endif 5740 end 5741 5742 %(complex_mp_format)s function mp_arg(comnum) 5743 implicit none 5744 %(complex_mp_format)s comnum 5745 %(complex_mp_format)s imm 5746 imm = (0.0e0_16,1.0e0_16) 5747 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 5748 mp_arg=(0.0e0_16,0.0e0_16) 5749 else 5750 mp_arg=log(comnum/abs(comnum))/imm 5751 endif 5752 end"""%{'complex_mp_format':self.mp_complex_format}) 5753 5754 #check for the file functions.f 5755 model_path = self.model.get('modelpath') 5756 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 5757 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 5758 input = pjoin(model_path,'Fortran','functions.f') 5759 file.writelines(fsock, open(input).read()) 5760 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 5761 5762 # check for functions define in the UFO model 5763 ufo_fct = self.model.get('functions') 5764 if ufo_fct: 5765 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 5766 for fct in ufo_fct: 5767 # already handle by default 5768 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", 5769 "theta_function", "cond", "reglog", "arg"]: 5770 ufo_fct_template = """ 5771 double complex function %(name)s(%(args)s) 5772 implicit none 5773 double complex %(args)s 5774 %(name)s = %(fct)s 5775 5776 return 5777 end 5778 """ 5779 text = ufo_fct_template % { 5780 'name': fct.name, 5781 'args': ", ".join(fct.arguments), 5782 'fct': self.p_to_f.parse(fct.expr) 5783 } 5784 fsock.writelines(text) 5785 if self.opt['mp']: 5786 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 5787 for fct in ufo_fct: 5788 # already handle by default 5789 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", 5790 "theta_function", "cond", "reglog", "arg"]: 5791 ufo_fct_template = """ 5792 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 5793 implicit none 5794 %(complex_mp_format)s mp__%(args)s 5795 mp__%(name)s = %(fct)s 5796 5797 return 5798 end 5799 """ 5800 text = ufo_fct_template % { 5801 'name': fct.name, 5802 'args': ", mp__".join(fct.arguments), 5803 'fct': self.mp_p_to_f.parse(fct.expr), 5804 'complex_mp_format': self.mp_complex_format 5805 } 5806 fsock.writelines(text) 5807 5808 5809 5810 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
5811 5812 5813
5814 - def create_makeinc(self):
5815 """create makeinc.inc containing the file to compile """ 5816 5817 fsock = self.open('makeinc.inc', comment='#') 5818 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 5819 text += ' model_functions.o ' 5820 5821 nb_coup_indep = 1 + len(self.coups_dep) // 25 5822 nb_coup_dep = 1 + len(self.coups_indep) // 25 5823 couplings_files=['couplings%s.o' % (i+1) \ 5824 for i in range(nb_coup_dep + nb_coup_indep) ] 5825 if self.opt['mp']: 5826 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 5827 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 5828 text += ' '.join(couplings_files) 5829 fsock.writelines(text)
5830
5831 - def create_param_write(self):
5832 """ create param_write """ 5833 5834 fsock = self.open('param_write.inc', format='fortran') 5835 5836 fsock.writelines("""write(*,*) ' External Params' 5837 write(*,*) ' ---------------------------------' 5838 write(*,*) ' '""") 5839 def format(name): 5840 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
5841 5842 # Write the external parameter 5843 lines = [format(param.name) for param in self.params_ext] 5844 fsock.writelines('\n'.join(lines)) 5845 5846 fsock.writelines("""write(*,*) ' Internal Params' 5847 write(*,*) ' ---------------------------------' 5848 write(*,*) ' '""") 5849 lines = [format(data.name) for data in self.params_indep 5850 if data.name != 'ZERO'] 5851 fsock.writelines('\n'.join(lines)) 5852 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 5853 write(*,*) ' ----------------------------------------' 5854 write(*,*) ' '""") 5855 lines = [format(data.name) for data in self.params_dep] 5856 5857 fsock.writelines('\n'.join(lines)) 5858 5859 5860
5861 - def create_ident_card(self):
5862 """ create the ident_card.dat """ 5863 5864 def format(parameter): 5865 """return the line for the ident_card corresponding to this parameter""" 5866 colum = [parameter.lhablock.lower()] + \ 5867 [str(value) for value in parameter.lhacode] + \ 5868 [parameter.name] 5869 if not parameter.name: 5870 return '' 5871 return ' '.join(colum)+'\n'
5872 5873 fsock = self.open('ident_card.dat') 5874 5875 external_param = [format(param) for param in self.params_ext] 5876 fsock.writelines('\n'.join(external_param)) 5877
5878 - def create_actualize_mp_ext_param_inc(self):
5879 """ create the actualize_mp_ext_params.inc code """ 5880 5881 # In principle one should actualize all external, but for now, it is 5882 # hardcoded that only AS and MU_R can by dynamically changed by the user 5883 # so that we only update those ones. 5884 # Of course, to be on the safe side, one could decide to update all 5885 # external parameters. 5886 update_params_list=[p for p in self.params_ext if p.name in 5887 self.PS_dependent_key] 5888 5889 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 5890 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 5891 for param in update_params_list] 5892 # When read_lha is false, it is G which is taken in input and not AS, so 5893 # this is what should be reset here too. 5894 if 'aS' in [param.name for param in update_params_list]: 5895 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 5896 5897 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 5898 fsock.writelines('\n'.join(res_strings))
5899
5900 - def create_param_read(self):
5901 """create param_read""" 5902 5903 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5904 or self.opt['loop_induced']: 5905 fsock = self.open('param_read.inc', format='fortran') 5906 fsock.writelines(' include \'../param_card.inc\'') 5907 return 5908 5909 def format_line(parameter): 5910 """return the line for the ident_card corresponding to this 5911 parameter""" 5912 template = \ 5913 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 5914 % {'name': parameter.name, 5915 'value': self.p_to_f.parse(str(parameter.value.real))} 5916 if self.opt['mp']: 5917 template = template+ \ 5918 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 5919 "%(mp_prefix)s%(name)s,%(value)s)") \ 5920 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 5921 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 5922 return template 5923 5924 fsock = self.open('param_read.inc', format='fortran') 5925 res_strings = [format_line(param) \ 5926 for param in self.params_ext] 5927 5928 # Correct width sign for Majorana particles (where the width 5929 # and mass need to have the same sign) 5930 for particle in self.model.get('particles'): 5931 if particle.is_fermion() and particle.get('self_antipart') and \ 5932 particle.get('width').lower() != 'zero': 5933 5934 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 5935 {'width': particle.get('width'), 'mass': particle.get('mass')}) 5936 if self.opt['mp']: 5937 res_strings.append(\ 5938 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 5939 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 5940 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 5941 5942 fsock.writelines('\n'.join(res_strings)) 5943
5944 - def create_param_card(self):
5945 """ create the param_card.dat """ 5946 5947 #1. Check if a default param_card is present: 5948 done = False 5949 if hasattr(self.model, 'restrict_card') and isinstance(self.model.restrict_card, str): 5950 restrict_name = os.path.basename(self.model.restrict_card)[9:-4] 5951 model_path = self.model.get('modelpath') 5952 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 5953 done = True 5954 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 5955 pjoin(self.dir_path, 'param_card.dat')) 5956 if not done: 5957 out_path = pjoin(self.dir_path, 'param_card.dat') 5958 param_writer.ParamCardWriter(self.model, out_path) 5959 5960 out_path2 = None 5961 if hasattr(self.model, 'rule_card'): 5962 out_path2 = pjoin(self.dir_path, 'param_card_rule.dat') 5963 self.model.rule_card.write_file(out_path2) 5964 5965 # IF MSSM convert the card to SLAH1 5966 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5967 import models.check_param_card as translator 5968 5969 # Check the format of the param_card for Pythia and make it correct 5970 if out_path2: 5971 translator.make_valid_param_card(out_path, out_path2) 5972 translator.convert_to_slha1(out_path)
5973
5974 -def ExportV4Factory(cmd, noclean, output_type='default'):
5975 """ Determine which Export_v4 class is required. cmd is the command 5976 interface containing all potential usefull information. 5977 The output_type argument specifies from which context the output 5978 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 5979 and 'default' for tree-level outputs.""" 5980 5981 group_subprocesses = cmd.options['group_subprocesses'] 5982 5983 opt = cmd.options 5984 5985 # First treat the MadLoop5 standalone case 5986 MadLoop_SA_options = {'clean': not noclean, 5987 'complex_mass':cmd.options['complex_mass_scheme'], 5988 'export_format':'madloop', 5989 'mp':True, 5990 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 5991 'cuttools_dir': cmd._cuttools_dir, 5992 'iregi_dir':cmd._iregi_dir, 5993 'pjfry_dir':cmd.options["pjfry"], 5994 'golem_dir':cmd.options["golem"], 5995 'fortran_compiler':cmd.options['fortran_compiler'], 5996 'output_dependencies':cmd.options['output_dependencies'], 5997 'SubProc_prefix':'P', 5998 'compute_color_flows':cmd.options['loop_color_flows']} 5999 6000 if output_type.startswith('madloop'): 6001 import madgraph.loop.loop_exporters as loop_exporters 6002 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6003 ExporterClass=None 6004 if not cmd.options['loop_optimized_output']: 6005 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6006 else: 6007 if output_type == "madloop": 6008 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6009 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6010 elif output_type == "madloop_matchbox": 6011 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6012 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6013 else: 6014 raise Exception, "output_type not recognize %s" % output_type 6015 return ExporterClass(cmd._mgme_dir, cmd._export_dir, MadLoop_SA_options) 6016 else: 6017 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6018 ' in %s'%str(cmd._mgme_dir)) 6019 6020 # Then treat the aMC@NLO output 6021 elif output_type=='amcatnlo': 6022 import madgraph.iolibs.export_fks as export_fks 6023 ExporterClass=None 6024 amcatnlo_options = dict(opt) 6025 amcatnlo_options.update(MadLoop_SA_options) 6026 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6027 if not cmd.options['loop_optimized_output']: 6028 logger.info("Writing out the aMC@NLO code") 6029 ExporterClass = export_fks.ProcessExporterFortranFKS 6030 amcatnlo_options['export_format']='FKS5_default' 6031 else: 6032 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6033 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6034 amcatnlo_options['export_format']='FKS5_optimized' 6035 return ExporterClass(cmd._mgme_dir, cmd._export_dir, amcatnlo_options) 6036 6037 # Then the default tree-level output 6038 elif output_type=='default': 6039 6040 #check if we need to group processes 6041 if cmd.options['group_subprocesses'] == 'Auto': 6042 if cmd._curr_amps[0].get_ninitial() == 2: 6043 group_subprocesses = True 6044 else: 6045 group_subprocesses = False 6046 6047 assert group_subprocesses in [True, False] 6048 6049 opt = dict(opt) 6050 opt.update({'clean': not noclean, 6051 'complex_mass': cmd.options['complex_mass_scheme'], 6052 'export_format':cmd._export_format, 6053 'mp': False, 6054 'sa_symmetry':False, 6055 'model': cmd._curr_model.get('name') }) 6056 6057 format = cmd._export_format #shortcut 6058 6059 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6060 opt['sa_symmetry'] = True 6061 6062 loop_induced_opt = dict(opt) 6063 loop_induced_opt.update(MadLoop_SA_options) 6064 loop_induced_opt['export_format'] = 'madloop_optimized' 6065 loop_induced_opt['SubProc_prefix'] = 'PV' 6066 # For loop_induced output with MadEvent, we must have access to the 6067 # color flows. 6068 loop_induced_opt['compute_color_flows'] = True 6069 for key in opt: 6070 if key not in loop_induced_opt: 6071 loop_induced_opt[key] = opt[key] 6072 6073 if format == 'matrix' or format.startswith('standalone'): 6074 return ProcessExporterFortranSA(cmd._mgme_dir, cmd._export_dir, opt, 6075 format=format) 6076 6077 elif format in ['madevent'] and group_subprocesses: 6078 if isinstance(cmd._curr_amps[0], 6079 loop_diagram_generation.LoopAmplitude): 6080 import madgraph.loop.loop_exporters as loop_exporters 6081 return loop_exporters.LoopInducedExporterMEGroup(cmd._mgme_dir, 6082 cmd._export_dir,loop_induced_opt) 6083 else: 6084 return ProcessExporterFortranMEGroup(cmd._mgme_dir, 6085 cmd._export_dir,opt) 6086 elif format in ['madevent']: 6087 if isinstance(cmd._curr_amps[0], 6088 loop_diagram_generation.LoopAmplitude): 6089 import madgraph.loop.loop_exporters as loop_exporters 6090 return loop_exporters.LoopInducedExporterMENoGroup(cmd._mgme_dir, 6091 cmd._export_dir,loop_induced_opt) 6092 else: 6093 return ProcessExporterFortranME(cmd._mgme_dir, 6094 cmd._export_dir,opt) 6095 elif format in ['matchbox']: 6096 return ProcessExporterFortranMatchBox(cmd._mgme_dir, cmd._export_dir,opt) 6097 elif cmd._export_format in ['madweight'] and group_subprocesses: 6098 6099 return ProcessExporterFortranMWGroup(cmd._mgme_dir, cmd._export_dir, 6100 opt) 6101 elif cmd._export_format in ['madweight']: 6102 return ProcessExporterFortranMW(cmd._mgme_dir, cmd._export_dir, opt) 6103 else: 6104 raise Exception, 'Wrong export_v4 format' 6105 else: 6106 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6107
6108 6109 6110 6111 #=============================================================================== 6112 # ProcessExporterFortranMWGroup 6113 #=============================================================================== 6114 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6115 """Class to take care of exporting a set of matrix elements to 6116 MadEvent subprocess group format.""" 6117 6118 matrix_file = "matrix_madweight_group_v4.inc" 6119 6120 #=========================================================================== 6121 # generate_subprocess_directory_v4 6122 #===========================================================================
6123 - def generate_subprocess_directory_v4(self, subproc_group, 6124 fortran_model, 6125 group_number):
6126 """Generate the Pn directory for a subprocess group in MadEvent, 6127 including the necessary matrix_N.f files, configs.inc and various 6128 other helper files""" 6129 6130 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6131 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6132 "subproc_group object not SubProcessGroup" 6133 6134 if not self.model: 6135 self.model = subproc_group.get('matrix_elements')[0].\ 6136 get('processes')[0].get('model') 6137 6138 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6139 6140 # Create the directory PN in the specified path 6141 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6142 subproc_group.get('name')) 6143 try: 6144 os.mkdir(pjoin(pathdir, subprocdir)) 6145 except os.error as error: 6146 logger.warning(error.strerror + " " + subprocdir) 6147 6148 6149 logger.info('Creating files in directory %s' % subprocdir) 6150 Ppath = pjoin(pathdir, subprocdir) 6151 6152 # Create the matrix.f files, auto_dsig.f files and all inc files 6153 # for all subprocesses in the group 6154 6155 maxamps = 0 6156 maxflows = 0 6157 tot_calls = 0 6158 6159 matrix_elements = subproc_group.get('matrix_elements') 6160 6161 for ime, matrix_element in \ 6162 enumerate(matrix_elements): 6163 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6164 calls, ncolor = \ 6165 self.write_matrix_element_v4(writers.FortranWriter(filename), 6166 matrix_element, 6167 fortran_model, 6168 str(ime+1), 6169 subproc_group.get('diagram_maps')[\ 6170 ime]) 6171 6172 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6173 self.write_auto_dsig_file(writers.FortranWriter(filename), 6174 matrix_element, 6175 str(ime+1)) 6176 6177 # Keep track of needed quantities 6178 tot_calls += int(calls) 6179 maxflows = max(maxflows, ncolor) 6180 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6181 6182 # Draw diagrams 6183 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6184 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6185 get('diagrams'), 6186 filename, 6187 model = \ 6188 matrix_element.get('processes')[0].\ 6189 get('model'), 6190 amplitude=True) 6191 logger.info("Generating Feynman diagrams for " + \ 6192 matrix_element.get('processes')[0].nice_string()) 6193 plot.draw() 6194 6195 # Extract number of external particles 6196 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6197 6198 # Generate a list of diagrams corresponding to each configuration 6199 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6200 # If a subprocess has no diagrams for this config, the number is 0 6201 6202 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6203 6204 filename = pjoin(Ppath, 'auto_dsig.f') 6205 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6206 subproc_group) 6207 6208 filename = pjoin(Ppath,'configs.inc') 6209 nconfigs, s_and_t_channels = self.write_configs_file(\ 6210 writers.FortranWriter(filename), 6211 subproc_group, 6212 subproc_diagrams_for_config) 6213 6214 filename = pjoin(Ppath, 'leshouche.inc') 6215 self.write_leshouche_file(writers.FortranWriter(filename), 6216 subproc_group) 6217 6218 filename = pjoin(Ppath, 'phasespace.inc') 6219 self.write_phasespace_file(writers.FortranWriter(filename), 6220 nconfigs) 6221 6222 6223 filename = pjoin(Ppath, 'maxamps.inc') 6224 self.write_maxamps_file(writers.FortranWriter(filename), 6225 maxamps, 6226 maxflows, 6227 max([len(me.get('processes')) for me in \ 6228 matrix_elements]), 6229 len(matrix_elements)) 6230 6231 filename = pjoin(Ppath, 'mirrorprocs.inc') 6232 self.write_mirrorprocs(writers.FortranWriter(filename), 6233 subproc_group) 6234 6235 filename = pjoin(Ppath, 'nexternal.inc') 6236 self.write_nexternal_file(writers.FortranWriter(filename), 6237 nexternal, ninitial) 6238 6239 filename = pjoin(Ppath, 'pmass.inc') 6240 self.write_pmass_file(writers.FortranWriter(filename), 6241 matrix_element) 6242 6243 filename = pjoin(Ppath, 'props.inc') 6244 self.write_props_file(writers.FortranWriter(filename), 6245 matrix_element, 6246 s_and_t_channels) 6247 6248 # filename = pjoin(Ppath, 'processes.dat') 6249 # files.write_to_file(filename, 6250 # self.write_processes_file, 6251 # subproc_group) 6252 6253 # Generate jpgs -> pass in make_html 6254 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6255 6256 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6257 6258 for file in linkfiles: 6259 ln('../%s' % file, cwd=Ppath) 6260 6261 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6262 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6263 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6264 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6265 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6266 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6267 if not tot_calls: 6268 tot_calls = 0 6269 return tot_calls
6270 6271 #=========================================================================== 6272 # write_super_auto_dsig_file 6273 #===========================================================================
6274 - def write_super_auto_dsig_file(self, writer, subproc_group):
6275 """Write the auto_dsig.f file selecting between the subprocesses 6276 in subprocess group mode""" 6277 6278 replace_dict = {} 6279 6280 # Extract version number and date from VERSION file 6281 info_lines = self.get_mg5_info_lines() 6282 replace_dict['info_lines'] = info_lines 6283 6284 matrix_elements = subproc_group.get('matrix_elements') 6285 6286 # Extract process info lines 6287 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6288 matrix_elements]) 6289 replace_dict['process_lines'] = process_lines 6290 6291 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6292 replace_dict['nexternal'] = nexternal 6293 6294 replace_dict['nsprocs'] = 2*len(matrix_elements) 6295 6296 # Generate dsig definition line 6297 dsig_def_line = "DOUBLE PRECISION " + \ 6298 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6299 range(len(matrix_elements))]) 6300 replace_dict["dsig_def_line"] = dsig_def_line 6301 6302 # Generate dsig process lines 6303 call_dsig_proc_lines = [] 6304 for iproc in range(len(matrix_elements)): 6305 call_dsig_proc_lines.append(\ 6306 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6307 {"num": iproc + 1, 6308 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6309 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6310 6311 file = open(os.path.join(_file_path, \ 6312 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6313 file = file % replace_dict 6314 6315 # Write the file 6316 writer.writelines(file)
6317 6318 #=========================================================================== 6319 # write_mirrorprocs 6320 #===========================================================================
6321 - def write_mirrorprocs(self, writer, subproc_group):
6322 """Write the mirrorprocs.inc file determining which processes have 6323 IS mirror process in subprocess group mode.""" 6324 6325 lines = [] 6326 bool_dict = {True: '.true.', False: '.false.'} 6327 matrix_elements = subproc_group.get('matrix_elements') 6328 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6329 (len(matrix_elements), 6330 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6331 me in matrix_elements]))) 6332 # Write the file 6333 writer.writelines(lines)
6334 6335 #=========================================================================== 6336 # write_configs_file 6337 #===========================================================================
6338 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6339 """Write the configs.inc file with topology information for a 6340 subprocess group. Use the first subprocess with a diagram for each 6341 configuration.""" 6342 6343 matrix_elements = subproc_group.get('matrix_elements') 6344 model = matrix_elements[0].get('processes')[0].get('model') 6345 6346 diagrams = [] 6347 config_numbers = [] 6348 for iconfig, config in enumerate(diagrams_for_config): 6349 # Check if any diagrams correspond to this config 6350 if set(config) == set([0]): 6351 continue 6352 subproc_diags = [] 6353 for s,d in enumerate(config): 6354 if d: 6355 subproc_diags.append(matrix_elements[s].\ 6356 get('diagrams')[d-1]) 6357 else: 6358 subproc_diags.append(None) 6359 diagrams.append(subproc_diags) 6360 config_numbers.append(iconfig + 1) 6361 6362 # Extract number of external particles 6363 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6364 6365 return len(diagrams), \ 6366 self.write_configs_file_from_diagrams(writer, diagrams, 6367 config_numbers, 6368 nexternal, ninitial, 6369 matrix_elements[0],model)
6370 6371 #=========================================================================== 6372 # write_run_configs_file 6373 #===========================================================================
6374 - def write_run_config_file(self, writer):
6375 """Write the run_configs.inc file for MadEvent""" 6376 6377 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6378 text = open(path).read() % {'chanperjob':'2'} 6379 writer.write(text) 6380 return True
6381 6382 6383 #=========================================================================== 6384 # write_leshouche_file 6385 #===========================================================================
6386 - def write_leshouche_file(self, writer, subproc_group):
6387 """Write the leshouche.inc file for MG4""" 6388 6389 all_lines = [] 6390 6391 for iproc, matrix_element in \ 6392 enumerate(subproc_group.get('matrix_elements')): 6393 all_lines.extend(self.get_leshouche_lines(matrix_element, 6394 iproc)) 6395 6396 # Write the file 6397 writer.writelines(all_lines) 6398 6399 return True
6400