Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30   
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.various.diagram_symmetry as diagram_symmetry 
  46  import madgraph.various.misc as misc 
  47  import madgraph.various.banner as banner_mod 
  48  import madgraph.various.process_checks as process_checks 
  49  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  50  import aloha.create_aloha as create_aloha 
  51  import models.import_ufo as import_ufo 
  52  import models.write_param_card as param_writer 
  53  import models.check_param_card as check_param_card 
  54   
  55   
  56  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  57  from madgraph.iolibs.files import cp, ln, mv 
  58   
  59  pjoin = os.path.join 
  60   
  61  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  62  logger = logging.getLogger('madgraph.export_v4') 
  63   
  64  default_compiler= {'fortran': 'gfortran', 
  65                         'f2py': 'f2py'} 
66 67 #=============================================================================== 68 # ProcessExporterFortran 69 #=============================================================================== 70 -class ProcessExporterFortran(object):
71 """Class to take care of exporting a set of matrix elements to 72 Fortran (v4) format.""" 73 74 default_opt = {'clean': False, 'complex_mass':False, 75 'export_format':'madevent', 'mp': False 76 } 77
78 - def __init__(self, mgme_dir = "", dir_path = "", opt=None):
79 """Initiate the ProcessExporterFortran with directory information""" 80 self.mgme_dir = mgme_dir 81 self.dir_path = dir_path 82 self.model = None 83 84 self.opt = dict(self.default_opt) 85 if opt: 86 self.opt.update(opt) 87 88 #place holder to pass information to the run_interface 89 self.proc_characteristic = banner_mod.ProcCharacteristic()
90 91 92 #=========================================================================== 93 # process exporter fortran switch between group and not grouped 94 #===========================================================================
95 - def export_processes(self, matrix_elements, fortran_model):
96 """Make the switch between grouped and not grouped output""" 97 98 calls = 0 99 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 100 for (group_number, me_group) in enumerate(matrix_elements): 101 calls = calls + self.generate_subprocess_directory_v4(\ 102 me_group, fortran_model, group_number) 103 else: 104 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 105 calls = calls + self.generate_subprocess_directory_v4(\ 106 me, fortran_model, me_number) 107 108 return calls
109 110 111 112 #=========================================================================== 113 # create the run_card 114 #===========================================================================
115 - def create_run_card(self, matrix_elements, history):
116 """ """ 117 118 run_card = banner_mod.RunCard() 119 120 121 default=True 122 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 123 processes = [me.get('processes') for megroup in matrix_elements 124 for me in megroup['matrix_elements']] 125 elif matrix_elements: 126 processes = [me.get('processes') 127 for me in matrix_elements['matrix_elements']] 128 else: 129 default =False 130 131 if default: 132 run_card.create_default_for_process(self.proc_characteristic, 133 history, 134 processes) 135 136 137 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 138 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
139 140 141 #=========================================================================== 142 # copy the Template in a new directory. 143 #===========================================================================
144 - def copy_v4template(self, modelname):
145 """create the directory run_name as a copy of the MadEvent 146 Template, and clean the directory 147 """ 148 149 #First copy the full template tree if dir_path doesn't exit 150 if not os.path.isdir(self.dir_path): 151 assert self.mgme_dir, \ 152 "No valid MG_ME path given for MG4 run directory creation." 153 logger.info('initialize a new directory: %s' % \ 154 os.path.basename(self.dir_path)) 155 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 156 self.dir_path, True) 157 # distutils.dir_util.copy_tree since dir_path already exists 158 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 159 self.dir_path) 160 # Duplicate run_card and plot_card 161 for card in ['plot_card']: 162 try: 163 shutil.copy(pjoin(self.dir_path, 'Cards', 164 card + '.dat'), 165 pjoin(self.dir_path, 'Cards', 166 card + '_default.dat')) 167 except IOError: 168 logger.warning("Failed to copy " + card + ".dat to default") 169 elif os.getcwd() == os.path.realpath(self.dir_path): 170 logger.info('working in local directory: %s' % \ 171 os.path.realpath(self.dir_path)) 172 # distutils.dir_util.copy_tree since dir_path already exists 173 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 174 self.dir_path) 175 # for name in glob.glob(pjoin(self.mgme_dir, 'Template/LO/*')): 176 # name = os.path.basename(name) 177 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 178 # if os.path.isfile(filename): 179 # files.cp(filename, pjoin(self.dir_path,name)) 180 # elif os.path.isdir(filename): 181 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 182 # distutils.dir_util.copy_tree since dir_path already exists 183 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 184 self.dir_path) 185 # Duplicate run_card and plot_card 186 for card in ['plot_card']: 187 try: 188 shutil.copy(pjoin(self.dir_path, 'Cards', 189 card + '.dat'), 190 pjoin(self.dir_path, 'Cards', 191 card + '_default.dat')) 192 except IOError: 193 logger.warning("Failed to copy " + card + ".dat to default") 194 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 195 assert self.mgme_dir, \ 196 "No valid MG_ME path given for MG4 run directory creation." 197 try: 198 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 199 except IOError: 200 MG5_version = misc.get_pkg_info() 201 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 202 "5." + MG5_version['version']) 203 204 #Ensure that the Template is clean 205 if self.opt['clean']: 206 logger.info('remove old information in %s' % \ 207 os.path.basename(self.dir_path)) 208 if os.environ.has_key('MADGRAPH_BASE'): 209 misc.call([pjoin('bin', 'internal', 'clean_template'), 210 '--web'], cwd=self.dir_path) 211 else: 212 try: 213 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 214 cwd=self.dir_path) 215 except Exception, why: 216 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 217 % (os.path.basename(self.dir_path),why)) 218 219 #Write version info 220 MG_version = misc.get_pkg_info() 221 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 222 MG_version['version']) 223 224 225 # add the makefile in Source directory 226 filename = pjoin(self.dir_path,'Source','makefile') 227 self.write_source_makefile(writers.FileWriter(filename)) 228 229 # add the DiscreteSampler information 230 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 231 pjoin(self.dir_path, 'Source')) 232 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 233 pjoin(self.dir_path, 'Source')) 234 235 # We need to create the correct open_data for the pdf 236 self.write_pdf_opendata()
237 238 239 240 241 #=========================================================================== 242 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 243 #===========================================================================
244 - def write_procdef_mg5(self, file_pos, modelname, process_str):
245 """ write an equivalent of the MG4 proc_card in order that all the Madevent 246 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 247 248 proc_card_template = template_files.mg4_proc_card.mg4_template 249 process_template = template_files.mg4_proc_card.process_template 250 process_text = '' 251 coupling = '' 252 new_process_content = [] 253 254 255 # First find the coupling and suppress the coupling from process_str 256 #But first ensure that coupling are define whithout spaces: 257 process_str = process_str.replace(' =', '=') 258 process_str = process_str.replace('= ', '=') 259 process_str = process_str.replace(',',' , ') 260 #now loop on the element and treat all the coupling 261 for info in process_str.split(): 262 if '=' in info: 263 coupling += info + '\n' 264 else: 265 new_process_content.append(info) 266 # Recombine the process_str (which is the input process_str without coupling 267 #info) 268 process_str = ' '.join(new_process_content) 269 270 #format the SubProcess 271 process_text += process_template.substitute({'process': process_str, \ 272 'coupling': coupling}) 273 274 text = proc_card_template.substitute({'process': process_text, 275 'model': modelname, 276 'multiparticle':''}) 277 ff = open(file_pos, 'w') 278 ff.write(text) 279 ff.close()
280 281 #=========================================================================== 282 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 283 #===========================================================================
284 - def finalize_v4_directory(self, matrix_elements, history = "", makejpg = False, 285 online = False, compiler=default_compiler):
286 """Function to finalize v4 directory, for inheritance. 287 """ 288 289 self.create_run_card(matrix_elements, history) 290 291 pass
292 293 #=========================================================================== 294 # Create the proc_characteristic file passing information to the run_interface 295 #===========================================================================
296 - def create_proc_charac(self, matrix_elements=None, history= "", **opts):
297 298 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
299 300 #=========================================================================== 301 # write_matrix_element_v4 302 #===========================================================================
303 - def write_matrix_element_v4(self):
304 """Function to write a matrix.f file, for inheritance. 305 """ 306 pass
307 308 #=========================================================================== 309 # write_pdf_opendata 310 #===========================================================================
311 - def write_pdf_opendata(self):
312 """ modify the pdf opendata file, to allow direct access to cluster node 313 repository if configure""" 314 315 if not self.opt["cluster_local_path"]: 316 changer = {"pdf_systemwide": ""} 317 else: 318 to_add = """ 319 tempname='%(path)s'//Tablefile 320 open(IU,file=tempname,status='old',ERR=1) 321 return 322 1 tempname='%(path)s/Pdfdata/'//Tablefile 323 open(IU,file=tempname,status='old',ERR=2) 324 return 325 2 tempname='%(path)s/lhapdf'//Tablefile 326 open(IU,file=tempname,status='old',ERR=3) 327 return 328 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 329 open(IU,file=tempname,status='old',ERR=4) 330 return 331 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 332 open(IU,file=tempname,status='old',ERR=5) 333 return 334 """ % {"path" : self.opt["cluster_local_path"]} 335 336 changer = {"pdf_systemwide": to_add} 337 338 ff = open(pjoin(self.dir_path, "Source", "PDF", "opendata.f"),"w") 339 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 340 ff.write(template % changer) 341 342 # Do the same for lhapdf set 343 if not self.opt["cluster_local_path"]: 344 changer = {"cluster_specific_path": ""} 345 else: 346 to_add=""" 347 LHAPath='%(path)s/PDFsets' 348 Inquire(File=LHAPath, exist=exists) 349 if(exists)return 350 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 351 Inquire(File=LHAPath, exist=exists) 352 if(exists)return 353 LHAPath='%(path)s/../lhapdf/pdfsets/' 354 Inquire(File=LHAPath, exist=exists) 355 if(exists)return 356 LHAPath='./PDFsets' 357 """ % {"path" : self.opt["cluster_local_path"]} 358 changer = {"cluster_specific_path": to_add} 359 360 ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 361 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 362 ff.write(template % changer) 363 364 365 return
366 367 368 369 #=========================================================================== 370 # write_maxparticles_file 371 #===========================================================================
372 - def write_maxparticles_file(self, writer, matrix_elements):
373 """Write the maxparticles.inc file for MadEvent""" 374 375 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 376 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 377 matrix_elements.get('matrix_elements')]) 378 else: 379 maxparticles = max([me.get_nexternal_ninitial()[0] \ 380 for me in matrix_elements]) 381 382 lines = "integer max_particles\n" 383 lines += "parameter(max_particles=%d)" % maxparticles 384 385 # Write the file 386 writer.writelines(lines) 387 388 return True
389 390 391 #=========================================================================== 392 # export the model 393 #===========================================================================
394 - def export_model_files(self, model_path):
395 """Configure the files/link of the process according to the model""" 396 397 # Import the model 398 for file in os.listdir(model_path): 399 if os.path.isfile(pjoin(model_path, file)): 400 shutil.copy2(pjoin(model_path, file), \ 401 pjoin(self.dir_path, 'Source', 'MODEL'))
402 403 417 424 425 #=========================================================================== 426 # export the helas routine 427 #===========================================================================
428 - def export_helas(self, helas_path):
429 """Configure the files/link of the process according to the model""" 430 431 # Import helas routine 432 for filename in os.listdir(helas_path): 433 filepos = pjoin(helas_path, filename) 434 if os.path.isfile(filepos): 435 if filepos.endswith('Makefile.template'): 436 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 437 elif filepos.endswith('Makefile'): 438 pass 439 else: 440 cp(filepos, self.dir_path + '/Source/DHELAS')
441 # following lines do the same but whithout symbolic link 442 # 443 #def export_helas(mgme_dir, dir_path): 444 # 445 # # Copy the HELAS directory 446 # helas_dir = pjoin(mgme_dir, 'HELAS') 447 # for filename in os.listdir(helas_dir): 448 # if os.path.isfile(pjoin(helas_dir, filename)): 449 # shutil.copy2(pjoin(helas_dir, filename), 450 # pjoin(dir_path, 'Source', 'DHELAS')) 451 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 452 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 453 # 454 455 #=========================================================================== 456 # generate_subprocess_directory_v4 457 #===========================================================================
458 - def generate_subprocess_directory_v4(self, matrix_element, 459 fortran_model, 460 me_number):
461 """Routine to generate a subprocess directory (for inheritance)""" 462 463 pass
464 465 #=========================================================================== 466 # get_source_libraries_list 467 #===========================================================================
468 - def get_source_libraries_list(self):
469 """ Returns the list of libraries to be compiling when compiling the 470 SOURCE directory. It is different for loop_induced processes and 471 also depends on the value of the 'output_dependencies' option""" 472 473 return ['$(LIBDIR)libdhelas.$(libext)', 474 '$(LIBDIR)libpdf.$(libext)', 475 '$(LIBDIR)libmodel.$(libext)', 476 '$(LIBDIR)libcernlib.$(libext)']
477 478 #=========================================================================== 479 # write_source_makefile 480 #===========================================================================
481 - def write_source_makefile(self, writer):
482 """Write the nexternal.inc file for MG4""" 483 484 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 485 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 486 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 487 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 488 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 489 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 490 else: 491 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 492 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 493 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 494 writer.write(text) 495 496 return True
497 498 #=========================================================================== 499 # write_nexternal_madspin 500 #===========================================================================
501 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
502 """Write the nexternal_prod.inc file for madspin""" 503 504 replace_dict = {} 505 506 replace_dict['nexternal'] = nexternal 507 replace_dict['ninitial'] = ninitial 508 509 file = """ \ 510 integer nexternal_prod 511 parameter (nexternal_prod=%(nexternal)d) 512 integer nincoming_prod 513 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 514 515 # Write the file 516 writer.writelines(file) 517 518 return True
519 520 #=========================================================================== 521 # write_helamp_madspin 522 #===========================================================================
523 - def write_helamp_madspin(self, writer, ncomb):
524 """Write the helamp.inc file for madspin""" 525 526 replace_dict = {} 527 528 replace_dict['ncomb'] = ncomb 529 530 file = """ \ 531 integer ncomb1 532 parameter (ncomb1=%(ncomb)d) 533 double precision helamp(ncomb1) 534 common /to_helamp/helamp """ % replace_dict 535 536 # Write the file 537 writer.writelines(file) 538 539 return True
540 541 542 #=========================================================================== 543 # write_nexternal_file 544 #===========================================================================
545 - def write_nexternal_file(self, writer, nexternal, ninitial):
546 """Write the nexternal.inc file for MG4""" 547 548 replace_dict = {} 549 550 replace_dict['nexternal'] = nexternal 551 replace_dict['ninitial'] = ninitial 552 553 file = """ \ 554 integer nexternal 555 parameter (nexternal=%(nexternal)d) 556 integer nincoming 557 parameter (nincoming=%(ninitial)d)""" % replace_dict 558 559 # Write the file 560 writer.writelines(file) 561 562 return True
563 564 #=========================================================================== 565 # write_pmass_file 566 #===========================================================================
567 - def write_pmass_file(self, writer, matrix_element):
568 """Write the pmass.inc file for MG4""" 569 570 model = matrix_element.get('processes')[0].get('model') 571 572 lines = [] 573 for wf in matrix_element.get_external_wavefunctions(): 574 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 575 if mass.lower() != "zero": 576 mass = "abs(%s)" % mass 577 578 lines.append("pmass(%d)=%s" % \ 579 (wf.get('number_external'), mass)) 580 581 # Write the file 582 writer.writelines(lines) 583 584 return True
585 586 #=========================================================================== 587 # write_ngraphs_file 588 #===========================================================================
589 - def write_ngraphs_file(self, writer, nconfigs):
590 """Write the ngraphs.inc file for MG4. Needs input from 591 write_configs_file.""" 592 593 file = " integer n_max_cg\n" 594 file = file + "parameter (n_max_cg=%d)" % nconfigs 595 596 # Write the file 597 writer.writelines(file) 598 599 return True
600 601 #=========================================================================== 602 # write_leshouche_file 603 #===========================================================================
604 - def write_leshouche_file(self, writer, matrix_element):
605 """Write the leshouche.inc file for MG4""" 606 607 # Write the file 608 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 609 610 return True
611 612 #=========================================================================== 613 # get_leshouche_lines 614 #===========================================================================
615 - def get_leshouche_lines(self, matrix_element, numproc):
616 """Write the leshouche.inc file for MG4""" 617 618 # Extract number of external particles 619 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 620 621 lines = [] 622 for iproc, proc in enumerate(matrix_element.get('processes')): 623 legs = proc.get_legs_with_decays() 624 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 625 (iproc + 1, numproc+1, nexternal, 626 ",".join([str(l.get('id')) for l in legs]))) 627 if iproc == 0 and numproc == 0: 628 for i in [1, 2]: 629 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 630 (i, nexternal, 631 ",".join([ "%3r" % 0 ] * ninitial + \ 632 [ "%3r" % i ] * (nexternal - ninitial)))) 633 634 # Here goes the color connections corresponding to the JAMPs 635 # Only one output, for the first subproc! 636 if iproc == 0: 637 # If no color basis, just output trivial color flow 638 if not matrix_element.get('color_basis'): 639 for i in [1, 2]: 640 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 641 (i, numproc+1,nexternal, 642 ",".join([ "%3r" % 0 ] * nexternal))) 643 644 else: 645 # First build a color representation dictionnary 646 repr_dict = {} 647 for l in legs: 648 repr_dict[l.get('number')] = \ 649 proc.get('model').get_particle(l.get('id')).get_color()\ 650 * (-1)**(1+l.get('state')) 651 # Get the list of color flows 652 color_flow_list = \ 653 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 654 ninitial) 655 # And output them properly 656 for cf_i, color_flow_dict in enumerate(color_flow_list): 657 for i in [0, 1]: 658 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 659 (i + 1, cf_i + 1, numproc+1, nexternal, 660 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 661 for l in legs]))) 662 663 return lines
664 665 666 667 668 #=========================================================================== 669 # write_maxamps_file 670 #===========================================================================
671 - def write_maxamps_file(self, writer, maxamps, maxflows, 672 maxproc,maxsproc):
673 """Write the maxamps.inc file for MG4.""" 674 675 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 676 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 677 (maxamps, maxflows) 678 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 679 (maxproc, maxsproc) 680 681 # Write the file 682 writer.writelines(file) 683 684 return True
685 686 #=========================================================================== 687 # write_props_file 688 #===========================================================================
689 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
690 """Write the props.inc file for MadEvent. Needs input from 691 write_configs_file.""" 692 693 lines = [] 694 695 particle_dict = matrix_element.get('processes')[0].get('model').\ 696 get('particle_dict') 697 698 for iconf, configs in enumerate(s_and_t_channels): 699 for vertex in configs[0] + configs[1][:-1]: 700 leg = vertex.get('legs')[-1] 701 if leg.get('id') not in particle_dict: 702 # Fake propagator used in multiparticle vertices 703 mass = 'zero' 704 width = 'zero' 705 pow_part = 0 706 else: 707 particle = particle_dict[leg.get('id')] 708 # Get mass 709 if particle.get('mass').lower() == 'zero': 710 mass = particle.get('mass') 711 else: 712 mass = "abs(%s)" % particle.get('mass') 713 # Get width 714 if particle.get('width').lower() == 'zero': 715 width = particle.get('width') 716 else: 717 width = "abs(%s)" % particle.get('width') 718 719 pow_part = 1 + int(particle.is_boson()) 720 721 lines.append("prmass(%d,%d) = %s" % \ 722 (leg.get('number'), iconf + 1, mass)) 723 lines.append("prwidth(%d,%d) = %s" % \ 724 (leg.get('number'), iconf + 1, width)) 725 lines.append("pow(%d,%d) = %d" % \ 726 (leg.get('number'), iconf + 1, pow_part)) 727 728 # Write the file 729 writer.writelines(lines) 730 731 return True
732 733 734 735 736 737 #=========================================================================== 738 # Routines to output UFO models in MG4 format 739 #=========================================================================== 740
741 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 742 wanted_couplings = []):
743 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 744 745 # Make sure aloha is in quadruple precision if needed 746 old_aloha_mp=aloha.mp_precision 747 aloha.mp_precision=self.opt['mp'] 748 749 # create the MODEL 750 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 751 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 752 model_builder.build(wanted_couplings) 753 754 # Backup the loop mode, because it can be changed in what follows. 755 old_loop_mode = aloha.loop_mode 756 757 # Create the aloha model or use the existing one (for loop exporters 758 # this is useful as the aloha model will be used again in the 759 # LoopHelasMatrixElements generated). We do not save the model generated 760 # here if it didn't exist already because it would be a waste of 761 # memory for tree level applications since aloha is only needed at the 762 # time of creating the aloha fortran subroutines. 763 if hasattr(self, 'aloha_model'): 764 aloha_model = self.aloha_model 765 else: 766 aloha_model = create_aloha.AbstractALOHAModel(model.get('name')) 767 aloha_model.add_Lorentz_object(model.get('lorentz')) 768 769 # Compute the subroutines 770 if wanted_lorentz: 771 aloha_model.compute_subset(wanted_lorentz) 772 else: 773 aloha_model.compute_all(save=False) 774 775 # Write them out 776 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 777 aloha_model.write(write_dir, 'Fortran') 778 779 # Revert the original aloha loop mode 780 aloha.loop_mode = old_loop_mode 781 782 #copy Helas Template 783 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 784 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 785 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', write_dir+'/aloha_functions.f') 786 aloha_model.loop_mode = False 787 else: 788 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', write_dir+'/aloha_functions.f') 789 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 790 791 # Make final link in the Process 792 self.make_model_symbolic_link() 793 794 # Re-establish original aloha mode 795 aloha.mp_precision=old_aloha_mp
796 797 #=========================================================================== 798 # Helper functions 799 #===========================================================================
800 - def get_mg5_info_lines(self):
801 """Return info lines for MG5, suitable to place at beginning of 802 Fortran files""" 803 804 info = misc.get_pkg_info() 805 info_lines = "" 806 if info and info.has_key('version') and info.has_key('date'): 807 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 808 (info['version'], info['date']) 809 info_lines = info_lines + \ 810 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 811 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 812 else: 813 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 814 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 815 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 816 817 return info_lines
818
819 - def get_process_info_lines(self, matrix_element):
820 """Return info lines describing the processes for this matrix element""" 821 822 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 823 for process in matrix_element.get('processes')])
824 825
826 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
827 """Return the Helicity matrix definition lines for this matrix element""" 828 829 helicity_line_list = [] 830 i = 0 831 for helicities in matrix_element.get_helicity_matrix(): 832 i = i + 1 833 int_list = [i, len(helicities)] 834 int_list.extend(helicities) 835 helicity_line_list.append(\ 836 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 837 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 838 839 return "\n".join(helicity_line_list)
840
841 - def get_ic_line(self, matrix_element):
842 """Return the IC definition line coming after helicities, required by 843 switchmom in madevent""" 844 845 nexternal = matrix_element.get_nexternal_ninitial()[0] 846 int_list = range(1, nexternal + 1) 847 848 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 849 ",".join([str(i) for \ 850 i in int_list]))
851
852 - def set_chosen_SO_index(self, process, squared_orders):
853 """ From the squared order constraints set by the user, this function 854 finds what indices of the squared_orders list the user intends to pick. 855 It returns this as a string of comma-separated successive '.true.' or 856 '.false.' for each index.""" 857 858 user_squared_orders = process.get('squared_orders') 859 split_orders = process.get('split_orders') 860 861 if len(user_squared_orders)==0: 862 return ','.join(['.true.']*len(squared_orders)) 863 864 res = [] 865 for sqsos in squared_orders: 866 is_a_match = True 867 for user_sqso, value in user_squared_orders.items(): 868 if (process.get_squared_order_type(user_sqso) =='==' and \ 869 value!=sqsos[split_orders.index(user_sqso)]) or \ 870 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 871 value<sqsos[split_orders.index(user_sqso)]) or \ 872 (process.get_squared_order_type(user_sqso) == '>' and \ 873 value>=sqsos[split_orders.index(user_sqso)]): 874 is_a_match = False 875 break 876 res.append('.true.' if is_a_match else '.false.') 877 878 return ','.join(res)
879
880 - def get_split_orders_lines(self, orders, array_name, n=5):
881 """ Return the split orders definition as defined in the list orders and 882 for the name of the array 'array_name'. Split rows in chunks of size n.""" 883 884 ret_list = [] 885 for index, order in enumerate(orders): 886 for k in xrange(0, len(order), n): 887 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 888 (array_name,index + 1, k + 1, min(k + n, len(order)), 889 ','.join(["%5r" % i for i in order[k:k + n]]))) 890 return ret_list
891
892 - def format_integer_list(self, list, name, n=5):
893 """ Return an initialization of the python list in argument following 894 the fortran syntax using the data keyword assignment, filling an array 895 of name 'name'. It splits rows in chunks of size n.""" 896 897 ret_list = [] 898 for k in xrange(0, len(list), n): 899 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 900 (name, k + 1, min(k + n, len(list)), 901 ','.join(["%5r" % i for i in list[k:k + n]]))) 902 return ret_list
903
904 - def get_color_data_lines(self, matrix_element, n=6):
905 """Return the color matrix definition lines for this matrix element. Split 906 rows in chunks of size n.""" 907 908 if not matrix_element.get('color_matrix'): 909 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 910 else: 911 ret_list = [] 912 my_cs = color.ColorString() 913 for index, denominator in \ 914 enumerate(matrix_element.get('color_matrix').\ 915 get_line_denominators()): 916 # First write the common denominator for this color matrix line 917 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 918 # Then write the numerators for the matrix elements 919 num_list = matrix_element.get('color_matrix').\ 920 get_line_numerators(index, denominator) 921 922 for k in xrange(0, len(num_list), n): 923 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 924 (index + 1, k + 1, min(k + n, len(num_list)), 925 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 926 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 927 ret_list.append("C %s" % repr(my_cs)) 928 return ret_list
929 930
931 - def get_den_factor_line(self, matrix_element):
932 """Return the denominator factor line for this matrix element""" 933 934 return "DATA IDEN/%2r/" % \ 935 matrix_element.get_denominator_factor()
936
937 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
938 """Return the ICOLAMP matrix, showing which JAMPs contribute to 939 which configs (diagrams).""" 940 941 ret_list = [] 942 943 booldict = {False: ".false.", True: ".true."} 944 945 if not matrix_element.get('color_basis'): 946 # No color, so only one color factor. Simply write a ".true." 947 # for each config (i.e., each diagram with only 3 particle 948 # vertices 949 configs = len(mapconfigs) 950 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 951 (num_matrix_element, configs, 952 ','.join([".true." for i in range(configs)]))) 953 return ret_list 954 955 # There is a color basis - create a list showing which JAMPs have 956 # contributions to which configs 957 958 # Only want to include leading color flows, so find max_Nc 959 color_basis = matrix_element.get('color_basis') 960 961 # We don't want to include the power of Nc's which come from the potential 962 # loop color trace (i.e. in the case of a closed fermion loop for example) 963 # so we subtract it here when computing max_Nc 964 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 965 color_basis.values()],[])) 966 967 # Crate dictionary between diagram number and JAMP number 968 diag_jamp = {} 969 for ijamp, col_basis_elem in \ 970 enumerate(sorted(matrix_element.get('color_basis').keys())): 971 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 972 # Only use color flows with Nc == max_Nc. However, notice that 973 # we don't want to include the Nc power coming from the loop 974 # in this counting. 975 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 976 diag_num = diag_tuple[0] + 1 977 # Add this JAMP number to this diag_num 978 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 979 [ijamp+1] 980 981 colamps = ijamp + 1 982 for iconfig, num_diag in enumerate(mapconfigs): 983 if num_diag == 0: 984 continue 985 986 # List of True or False 987 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 988 # Add line 989 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 990 (iconfig+1, num_matrix_element, colamps, 991 ','.join(["%s" % booldict[b] for b in \ 992 bool_list]))) 993 994 return ret_list
995
996 - def get_amp2_lines(self, matrix_element, config_map = []):
997 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 998 999 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1000 # Get minimum legs in a vertex 1001 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1002 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1003 minvert = min(vert_list) if vert_list!=[] else 0 1004 1005 ret_lines = [] 1006 if config_map: 1007 # In this case, we need to sum up all amplitudes that have 1008 # identical topologies, as given by the config_map (which 1009 # gives the topology/config for each of the diagrams 1010 diagrams = matrix_element.get('diagrams') 1011 # Combine the diagrams with identical topologies 1012 config_to_diag_dict = {} 1013 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1014 if config_map[idiag] == 0: 1015 continue 1016 try: 1017 config_to_diag_dict[config_map[idiag]].append(idiag) 1018 except KeyError: 1019 config_to_diag_dict[config_map[idiag]] = [idiag] 1020 # Write out the AMP2s summing squares of amplitudes belonging 1021 # to eiher the same diagram or different diagrams with 1022 # identical propagator properties. Note that we need to use 1023 # AMP2 number corresponding to the first diagram number used 1024 # for that AMP2. 1025 for config in sorted(config_to_diag_dict.keys()): 1026 1027 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1028 {"num": (config_to_diag_dict[config][0] + 1)} 1029 1030 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1031 sum([diagrams[idiag].get('amplitudes') for \ 1032 idiag in config_to_diag_dict[config]], [])]) 1033 1034 # Not using \sum |M|^2 anymore since this creates troubles 1035 # when ckm is not diagonal due to the JIM mechanism. 1036 if '+' in amp: 1037 line += "(%s)*dconjg(%s)" % (amp, amp) 1038 else: 1039 line += "%s*dconjg(%s)" % (amp, amp) 1040 ret_lines.append(line) 1041 else: 1042 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1043 # Ignore any diagrams with 4-particle vertices. 1044 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1045 continue 1046 # Now write out the expression for AMP2, meaning the sum of 1047 # squared amplitudes belonging to the same diagram 1048 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1049 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1050 {"num": a.get('number')} for a in \ 1051 diag.get('amplitudes')]) 1052 ret_lines.append(line) 1053 1054 return ret_lines
1055 1056 #=========================================================================== 1057 # Returns the data statements initializing the coeffictients for the JAMP 1058 # decomposition. It is used when the JAMP initialization is decided to be 1059 # done through big arrays containing the projection coefficients. 1060 #===========================================================================
1061 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1062 n=50, Nc_value=3):
1063 """This functions return the lines defining the DATA statement setting 1064 the coefficients building the JAMPS out of the AMPS. Split rows in 1065 bunches of size n. 1066 One can specify the color_basis from which the color amplitudes originates 1067 so that there are commentaries telling what color structure each JAMP 1068 corresponds to.""" 1069 1070 if(not isinstance(color_amplitudes,list) or 1071 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1072 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1073 1074 res_list = [] 1075 my_cs = color.ColorString() 1076 for index, coeff_list in enumerate(color_amplitudes): 1077 # Create the list of the complete numerical coefficient. 1078 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1079 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1080 coefficient in coeff_list] 1081 # Create the list of the numbers of the contributing amplitudes. 1082 # Mutliply by -1 for those which have an imaginary coefficient. 1083 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1084 for coefficient in coeff_list] 1085 # Find the common denominator. 1086 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1087 num_list=[(coefficient*commondenom).numerator \ 1088 for coefficient in coefs_list] 1089 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1090 index+1,len(num_list))) 1091 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1092 index+1,commondenom)) 1093 if color_basis: 1094 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1095 res_list.append("C %s" % repr(my_cs)) 1096 for k in xrange(0, len(num_list), n): 1097 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1098 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1099 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1100 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1101 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1102 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1103 pass 1104 return res_list
1105 1106
1107 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1108 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1109 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1110 defined as a matrix element or directly as a color_amplitudes dictionary. 1111 The split_order_amps specifies the group of amplitudes sharing the same 1112 amplitude orders which should be put in together in a given set of JAMPS. 1113 The split_order_amps is supposed to have the format of the second output 1114 of the function get_split_orders_mapping function in helas_objects.py. 1115 The split_order_names is optional (it should correspond to the process 1116 'split_orders' attribute) and only present to provide comments in the 1117 JAMP definitions in the code.""" 1118 1119 # Let the user call get_JAMP_lines_split_order directly from a 1120 error_msg="Malformed '%s' argument passed to the "+\ 1121 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1122 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1123 color_amplitudes=col_amps.get_color_amplitudes() 1124 elif(isinstance(col_amps,list)): 1125 if(col_amps and isinstance(col_amps[0],list)): 1126 color_amplitudes=col_amps 1127 else: 1128 raise MadGraph5Error, error_msg%'col_amps' 1129 else: 1130 raise MadGraph5Error, error_msg%'col_amps' 1131 1132 # Verify the sanity of the split_order_amps and split_order_names args 1133 if isinstance(split_order_amps,list): 1134 for elem in split_order_amps: 1135 if len(elem)!=2: 1136 raise MadGraph5Error, error_msg%'split_order_amps' 1137 # Check the first element of the two lists to make sure they are 1138 # integers, although in principle they should all be integers. 1139 if not isinstance(elem[0],tuple) or \ 1140 not isinstance(elem[1],tuple) or \ 1141 not isinstance(elem[0][0],int) or \ 1142 not isinstance(elem[1][0],int): 1143 raise MadGraph5Error, error_msg%'split_order_amps' 1144 else: 1145 raise MadGraph5Error, error_msg%'split_order_amps' 1146 1147 if not split_order_names is None: 1148 if isinstance(split_order_names,list): 1149 # Should specify the same number of names as there are elements 1150 # in the key of the split_order_amps. 1151 if len(split_order_names)!=len(split_order_amps[0][0]): 1152 raise MadGraph5Error, error_msg%'split_order_names' 1153 # Check the first element of the list to be a string 1154 if not isinstance(split_order_names[0],str): 1155 raise MadGraph5Error, error_msg%'split_order_names' 1156 else: 1157 raise MadGraph5Error, error_msg%'split_order_names' 1158 1159 # Now scan all contributing orders to be individually computed and 1160 # construct the list of color_amplitudes for JAMP to be constructed 1161 # accordingly. 1162 res_list=[] 1163 for i, amp_order in enumerate(split_order_amps): 1164 col_amps_order = [] 1165 for jamp in color_amplitudes: 1166 col_amps_order.append(filter(lambda col_amp: 1167 col_amp[1] in amp_order[1],jamp)) 1168 if split_order_names: 1169 res_list.append('C JAMPs contributing to orders '+' '.join( 1170 ['%s=%i'%order for order in zip(split_order_names, 1171 amp_order[0])])) 1172 if self.opt['export_format'] in ['madloop_matchbox']: 1173 res_list.extend(self.get_JAMP_lines(col_amps_order, 1174 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1175 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1176 else: 1177 res_list.extend(self.get_JAMP_lines(col_amps_order, 1178 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1179 1180 1181 1182 1183 1184 1185 1186 1187 return res_list
1188 1189
1190 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1191 split=-1):
1192 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1193 defined as a matrix element or directly as a color_amplitudes dictionary, 1194 Jamp_formatLC should be define to allow to add LeadingColor computation 1195 (usefull for MatchBox) 1196 The split argument defines how the JAMP lines should be split in order 1197 not to be too long.""" 1198 1199 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1200 # the color amplitudes lists. 1201 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1202 color_amplitudes=col_amps.get_color_amplitudes() 1203 elif(isinstance(col_amps,list)): 1204 if(col_amps and isinstance(col_amps[0],list)): 1205 color_amplitudes=col_amps 1206 else: 1207 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1208 else: 1209 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1210 1211 1212 res_list = [] 1213 for i, coeff_list in enumerate(color_amplitudes): 1214 # It might happen that coeff_list is empty if this function was 1215 # called from get_JAMP_lines_split_order (i.e. if some color flow 1216 # does not contribute at all for a given order). 1217 # In this case we simply set it to 0. 1218 if coeff_list==[]: 1219 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1220 continue 1221 # Break the JAMP definition into 'n=split' pieces to avoid having 1222 # arbitrarly long lines. 1223 first=True 1224 n = (len(coeff_list)+1 if split<=0 else split) 1225 while coeff_list!=[]: 1226 coefs=coeff_list[:n] 1227 coeff_list=coeff_list[n:] 1228 res = ((JAMP_format+"=") % str(i + 1)) + \ 1229 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1230 1231 first=False 1232 # Optimization: if all contributions to that color basis element have 1233 # the same coefficient (up to a sign), put it in front 1234 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1235 common_factor = False 1236 diff_fracs = list(set(list_fracs)) 1237 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1238 common_factor = True 1239 global_factor = diff_fracs[0] 1240 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1241 1242 # loop for JAMP 1243 for (coefficient, amp_number) in coefs: 1244 if not coefficient: 1245 continue 1246 if common_factor: 1247 res = (res + "%s" + AMP_format) % \ 1248 (self.coeff(coefficient[0], 1249 coefficient[1] / abs(coefficient[1]), 1250 coefficient[2], 1251 coefficient[3]), 1252 str(amp_number)) 1253 else: 1254 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1255 coefficient[1], 1256 coefficient[2], 1257 coefficient[3]), 1258 str(amp_number)) 1259 1260 if common_factor: 1261 res = res + ')' 1262 1263 res_list.append(res) 1264 1265 return res_list
1266
1267 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1268 """Generate the PDF lines for the auto_dsig.f file""" 1269 1270 processes = matrix_element.get('processes') 1271 model = processes[0].get('model') 1272 1273 pdf_definition_lines = "" 1274 pdf_data_lines = "" 1275 pdf_lines = "" 1276 1277 if ninitial == 1: 1278 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1279 for i, proc in enumerate(processes): 1280 process_line = proc.base_string() 1281 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1282 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1283 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1284 else: 1285 # Pick out all initial state particles for the two beams 1286 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1287 p in processes]))), 1288 sorted(list(set([p.get_initial_pdg(2) for \ 1289 p in processes])))] 1290 1291 # Prepare all variable names 1292 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1293 sum(initial_states,[])]) 1294 for key,val in pdf_codes.items(): 1295 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1296 1297 # Set conversion from PDG code to number used in PDF calls 1298 pdgtopdf = {21: 0, 22: 7} 1299 1300 # Fill in missing entries of pdgtopdf 1301 for pdg in sum(initial_states,[]): 1302 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1303 pdgtopdf[pdg] = pdg 1304 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1305 # If any particle has pdg code 7, we need to use something else 1306 pdgtopdf[pdg] = 6000000 + pdg 1307 1308 # Get PDF variable declarations for all initial states 1309 for i in [0,1]: 1310 pdf_definition_lines += "DOUBLE PRECISION " + \ 1311 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1312 for pdg in \ 1313 initial_states[i]]) + \ 1314 "\n" 1315 1316 # Get PDF data lines for all initial states 1317 for i in [0,1]: 1318 pdf_data_lines += "DATA " + \ 1319 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1320 for pdg in initial_states[i]]) + \ 1321 "/%d*1D0/" % len(initial_states[i]) + \ 1322 "\n" 1323 1324 # Get PDF lines for all different initial states 1325 for i, init_states in enumerate(initial_states): 1326 if subproc_group: 1327 pdf_lines = pdf_lines + \ 1328 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1329 % (i + 1, i + 1) 1330 else: 1331 pdf_lines = pdf_lines + \ 1332 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1333 % (i + 1, i + 1) 1334 1335 for initial_state in init_states: 1336 if initial_state in pdf_codes.keys(): 1337 if subproc_group: 1338 pdf_lines = pdf_lines + \ 1339 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1340 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1341 (pdf_codes[initial_state], 1342 i + 1, i + 1, pdgtopdf[initial_state], 1343 i + 1, i + 1) 1344 else: 1345 pdf_lines = pdf_lines + \ 1346 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1347 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1348 (pdf_codes[initial_state], 1349 i + 1, i + 1, pdgtopdf[initial_state], 1350 i + 1, i + 1) 1351 pdf_lines = pdf_lines + "ENDIF\n" 1352 1353 # Add up PDFs for the different initial state particles 1354 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1355 for proc in processes: 1356 process_line = proc.base_string() 1357 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1358 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1359 for ibeam in [1, 2]: 1360 initial_state = proc.get_initial_pdg(ibeam) 1361 if initial_state in pdf_codes.keys(): 1362 pdf_lines = pdf_lines + "%s%d*" % \ 1363 (pdf_codes[initial_state], ibeam) 1364 else: 1365 pdf_lines = pdf_lines + "1d0*" 1366 # Remove last "*" from pdf_lines 1367 pdf_lines = pdf_lines[:-1] + "\n" 1368 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1369 1370 # Remove last line break from the return variables 1371 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1372 1373 #=========================================================================== 1374 # write_props_file 1375 #===========================================================================
1376 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1377 """Write the props.inc file for MadEvent. Needs input from 1378 write_configs_file.""" 1379 1380 lines = [] 1381 1382 particle_dict = matrix_element.get('processes')[0].get('model').\ 1383 get('particle_dict') 1384 1385 for iconf, configs in enumerate(s_and_t_channels): 1386 for vertex in configs[0] + configs[1][:-1]: 1387 leg = vertex.get('legs')[-1] 1388 if leg.get('id') not in particle_dict: 1389 # Fake propagator used in multiparticle vertices 1390 mass = 'zero' 1391 width = 'zero' 1392 pow_part = 0 1393 else: 1394 particle = particle_dict[leg.get('id')] 1395 # Get mass 1396 if particle.get('mass').lower() == 'zero': 1397 mass = particle.get('mass') 1398 else: 1399 mass = "abs(%s)" % particle.get('mass') 1400 # Get width 1401 if particle.get('width').lower() == 'zero': 1402 width = particle.get('width') 1403 else: 1404 width = "abs(%s)" % particle.get('width') 1405 1406 pow_part = 1 + int(particle.is_boson()) 1407 1408 lines.append("prmass(%d,%d) = %s" % \ 1409 (leg.get('number'), iconf + 1, mass)) 1410 lines.append("prwidth(%d,%d) = %s" % \ 1411 (leg.get('number'), iconf + 1, width)) 1412 lines.append("pow(%d,%d) = %d" % \ 1413 (leg.get('number'), iconf + 1, pow_part)) 1414 1415 # Write the file 1416 writer.writelines(lines) 1417 1418 return True
1419 1420 #=========================================================================== 1421 # write_configs_file 1422 #===========================================================================
1423 - def write_configs_file(self, writer, matrix_element):
1424 """Write the configs.inc file for MadEvent""" 1425 1426 # Extract number of external particles 1427 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1428 1429 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1430 mapconfigs = [c[0] for c in configs] 1431 model = matrix_element.get('processes')[0].get('model') 1432 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1433 [[c[1]] for c in configs], 1434 mapconfigs, 1435 nexternal, ninitial, 1436 model)
1437 1438 #=========================================================================== 1439 # write_configs_file_from_diagrams 1440 #===========================================================================
1441 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1442 nexternal, ninitial, model):
1443 """Write the actual configs.inc file. 1444 1445 configs is the diagrams corresponding to configs (each 1446 diagrams is a list of corresponding diagrams for all 1447 subprocesses, with None if there is no corresponding diagrams 1448 for a given process). 1449 mapconfigs gives the diagram number for each config. 1450 1451 For s-channels, we need to output one PDG for each subprocess in 1452 the subprocess group, in order to be able to pick the right 1453 one for multiprocesses.""" 1454 1455 lines = [] 1456 1457 s_and_t_channels = [] 1458 1459 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1460 for config in configs if [d for d in config if d][0].\ 1461 get_vertex_leg_numbers()!=[]] 1462 minvert = min(vert_list) if vert_list!=[] else 0 1463 1464 # Number of subprocesses 1465 nsubprocs = len(configs[0]) 1466 1467 nconfigs = 0 1468 1469 new_pdg = model.get_first_non_pdg() 1470 1471 for iconfig, helas_diags in enumerate(configs): 1472 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1473 [0].get_vertex_leg_numbers()) : 1474 # Only 3-vertices allowed in configs.inc except for vertices 1475 # which originate from a shrunk loop. 1476 continue 1477 nconfigs += 1 1478 1479 # Need s- and t-channels for all subprocesses, including 1480 # those that don't contribute to this config 1481 empty_verts = [] 1482 stchannels = [] 1483 for h in helas_diags: 1484 if h: 1485 # get_s_and_t_channels gives vertices starting from 1486 # final state external particles and working inwards 1487 stchannels.append(h.get('amplitudes')[0].\ 1488 get_s_and_t_channels(ninitial, model, new_pdg)) 1489 else: 1490 stchannels.append((empty_verts, None)) 1491 1492 # For t-channels, just need the first non-empty one 1493 tchannels = [t for s,t in stchannels if t != None][0] 1494 1495 # For s_and_t_channels (to be used later) use only first config 1496 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1497 tchannels]) 1498 1499 # Make sure empty_verts is same length as real vertices 1500 if any([s for s,t in stchannels]): 1501 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1502 1503 # Reorganize s-channel vertices to get a list of all 1504 # subprocesses for each vertex 1505 schannels = zip(*[s for s,t in stchannels]) 1506 else: 1507 schannels = [] 1508 1509 allchannels = schannels 1510 if len(tchannels) > 1: 1511 # Write out tchannels only if there are any non-trivial ones 1512 allchannels = schannels + tchannels 1513 1514 # Write out propagators for s-channel and t-channel vertices 1515 1516 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1517 # Correspondance between the config and the diagram = amp2 1518 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1519 mapconfigs[iconfig])) 1520 1521 for verts in allchannels: 1522 if verts in schannels: 1523 vert = [v for v in verts if v][0] 1524 else: 1525 vert = verts 1526 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1527 last_leg = vert.get('legs')[-1] 1528 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1529 (last_leg.get('number'), nconfigs, len(daughters), 1530 ",".join([str(d) for d in daughters]))) 1531 if verts in schannels: 1532 pdgs = [] 1533 for v in verts: 1534 if v: 1535 pdgs.append(v.get('legs')[-1].get('id')) 1536 else: 1537 pdgs.append(0) 1538 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1539 (last_leg.get('number'), nconfigs, nsubprocs, 1540 ",".join([str(d) for d in pdgs]))) 1541 lines.append("data tprid(%d,%d)/0/" % \ 1542 (last_leg.get('number'), nconfigs)) 1543 elif verts in tchannels[:-1]: 1544 lines.append("data tprid(%d,%d)/%d/" % \ 1545 (last_leg.get('number'), nconfigs, 1546 abs(last_leg.get('id')))) 1547 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1548 (last_leg.get('number'), nconfigs, nsubprocs, 1549 ",".join(['0'] * nsubprocs))) 1550 1551 # Write out number of configs 1552 lines.append("# Number of configs") 1553 lines.append("data mapconfig(0)/%d/" % nconfigs) 1554 1555 # Write the file 1556 writer.writelines(lines) 1557 1558 return s_and_t_channels
1559 1560 #=========================================================================== 1561 # Global helper methods 1562 #=========================================================================== 1563
1564 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1565 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1566 1567 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1568 1569 if total_coeff == 1: 1570 if is_imaginary: 1571 return '+imag1*' 1572 else: 1573 return '+' 1574 elif total_coeff == -1: 1575 if is_imaginary: 1576 return '-imag1*' 1577 else: 1578 return '-' 1579 1580 res_str = '%+iD0' % total_coeff.numerator 1581 1582 if total_coeff.denominator != 1: 1583 # Check if total_coeff is an integer 1584 res_str = res_str + '/%iD0' % total_coeff.denominator 1585 1586 if is_imaginary: 1587 res_str = res_str + '*imag1' 1588 1589 return res_str + '*'
1590 1591
1592 - def set_fortran_compiler(self, default_compiler, force=False):
1593 """Set compiler based on what's available on the system""" 1594 1595 # Check for compiler 1596 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1597 f77_compiler = default_compiler['fortran'] 1598 elif misc.which('gfortran'): 1599 f77_compiler = 'gfortran' 1600 elif misc.which('g77'): 1601 f77_compiler = 'g77' 1602 elif misc.which('f77'): 1603 f77_compiler = 'f77' 1604 elif default_compiler['fortran']: 1605 logger.warning('No Fortran Compiler detected! Please install one') 1606 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1607 else: 1608 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1609 logger.info('Use Fortran compiler ' + f77_compiler) 1610 1611 1612 # Check for compiler. 1. set default. 1613 if default_compiler['f2py']: 1614 f2py_compiler = default_compiler['f2py'] 1615 else: 1616 f2py_compiler = '' 1617 # Try to find the correct one. 1618 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1619 f2py_compiler = default_compiler 1620 elif misc.which('f2py'): 1621 f2py_compiler = 'f2py' 1622 elif sys.version_info[1] == 6: 1623 if misc.which('f2py-2.6'): 1624 f2py_compiler = 'f2py-2.6' 1625 elif misc.which('f2py2.6'): 1626 f2py_compiler = 'f2py2.6' 1627 elif sys.version_info[1] == 7: 1628 if misc.which('f2py-2.7'): 1629 f2py_compiler = 'f2py-2.7' 1630 elif misc.which('f2py2.7'): 1631 f2py_compiler = 'f2py2.7' 1632 1633 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1634 1635 1636 self.replace_make_opt_f_compiler(to_replace) 1637 # Replace also for Template but not for cluster 1638 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1639 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1640 1641 return f77_compiler
1642 1643 # an alias for backward compatibility 1644 set_compiler = set_fortran_compiler 1645 1646
1647 - def set_cpp_compiler(self, default_compiler, force=False):
1648 """Set compiler based on what's available on the system""" 1649 1650 # Check for compiler 1651 if default_compiler and misc.which(default_compiler): 1652 compiler = default_compiler 1653 elif misc.which('g++'): 1654 compiler = 'g++' 1655 elif misc.which('c++'): 1656 compiler = 'c++' 1657 elif misc.which('clang'): 1658 compiler = 'clang' 1659 elif default_compiler: 1660 logger.warning('No c++ Compiler detected! Please install one') 1661 compiler = default_compiler # maybe misc fail so try with it 1662 else: 1663 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1664 logger.info('Use c++ compiler ' + compiler) 1665 self.replace_make_opt_c_compiler(compiler) 1666 # Replace also for Template but not for cluster 1667 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1668 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1669 1670 return compiler
1671 1672
1673 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1674 """Set FC=compiler in Source/make_opts""" 1675 1676 assert isinstance(compilers, dict) 1677 1678 mod = False #avoid to rewrite the file if not needed 1679 if not root_dir: 1680 root_dir = self.dir_path 1681 1682 compiler= compilers['fortran'] 1683 f2py_compiler = compilers['f2py'] 1684 if not f2py_compiler: 1685 f2py_compiler = 'f2py' 1686 1687 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1688 lines = open(make_opts).read().split('\n') 1689 FC_re = re.compile('^(\s*)(FC|F2PY)\s*=\s*(.+)\s*$') 1690 for iline, line in enumerate(lines): 1691 1692 FC_result = FC_re.match(line) 1693 if FC_result: 1694 if 'FC' == FC_result.group(2): 1695 if compiler != FC_result.group(3): 1696 mod = True 1697 lines[iline] = FC_result.group(1) + "FC=" + compiler 1698 elif 'F2PY' == FC_result.group(2): 1699 if f2py_compiler != FC_result.group(3): 1700 mod = True 1701 lines[iline] = FC_result.group(1) + "F2PY=" + f2py_compiler 1702 if not mod: 1703 return 1704 1705 try: 1706 outfile = open(make_opts, 'w') 1707 except IOError: 1708 if root_dir == self.dir_path: 1709 logger.info('Fail to set compiler. Trying to continue anyway.') 1710 return 1711 outfile.write('\n'.join(lines))
1712 1713
1714 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1715 """Set CXX=compiler in Source/make_opts. 1716 The version is also checked, in order to set some extra flags 1717 if the compiler is clang (on MACOS)""" 1718 1719 1720 p = misc.Popen([compiler, '--version'], stdout=subprocess.PIPE, 1721 stderr=subprocess.PIPE) 1722 output, error = p.communicate() 1723 is_clang = 'LLVM' in output 1724 1725 mod = False #avoid to rewrite the file if not needed 1726 if not root_dir: 1727 root_dir = self.dir_path 1728 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1729 lines = open(make_opts).read().split('\n') 1730 CC_re = re.compile('^(\s*)CXX\s*=\s*(.+)\s*$') 1731 for iline, line in enumerate(lines): 1732 CC_result = CC_re.match(line) 1733 if CC_result: 1734 if compiler != CC_result.group(2): 1735 mod = True 1736 lines[iline] = CC_result.group(1) + "CXX=" + compiler 1737 1738 if is_clang: 1739 CFLAGS_re=re.compile('^(\s*)CFLAGS\s*=\s*(.+)\s*$') 1740 CXXFLAGS_re=re.compile('^(\s*)CXXFLAGS\s*=\s*(.+)\s*$') 1741 flags= '-O -stdlib=libstdc++ -mmacosx-version-min=10.6' 1742 for iline, line in enumerate(lines): 1743 CF_result = CFLAGS_re.match(line) 1744 CXXF_result = CXXFLAGS_re.match(line) 1745 if CF_result: 1746 lines[iline] = CF_result.group(1) + "CFLAGS= " + flags 1747 if CXXF_result: 1748 lines[iline] = CXXF_result.group(1) + "CXXFLAGS= " + flags 1749 if not mod: 1750 return 1751 try: 1752 outfile = open(make_opts, 'w') 1753 except IOError: 1754 if root_dir == self.dir_path: 1755 logger.info('Fail to set compiler. Trying to continue anyway.') 1756 return 1757 outfile.write('\n'.join(lines))
1758
1759 #=============================================================================== 1760 # ProcessExporterFortranSA 1761 #=============================================================================== 1762 -class ProcessExporterFortranSA(ProcessExporterFortran):
1763 """Class to take care of exporting a set of matrix elements to 1764 MadGraph v4 StandAlone format.""" 1765 1766 matrix_template = "matrix_standalone_v4.inc" 1767
1768 - def __init__(self, *args, **opts):
1769 """add the format information compare to standard init""" 1770 1771 if 'format' in opts: 1772 self.format = opts['format'] 1773 del opts['format'] 1774 else: 1775 self.format = 'standalone' 1776 ProcessExporterFortran.__init__(self, *args, **opts)
1777
1778 - def copy_v4template(self, modelname):
1779 """Additional actions needed for setup of Template 1780 """ 1781 1782 #First copy the full template tree if dir_path doesn't exit 1783 if os.path.isdir(self.dir_path): 1784 return 1785 1786 logger.info('initialize a new standalone directory: %s' % \ 1787 os.path.basename(self.dir_path)) 1788 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1789 1790 # Create the directory structure 1791 os.mkdir(self.dir_path) 1792 os.mkdir(pjoin(self.dir_path, 'Source')) 1793 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1794 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1795 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1796 os.mkdir(pjoin(self.dir_path, 'bin')) 1797 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1798 os.mkdir(pjoin(self.dir_path, 'lib')) 1799 os.mkdir(pjoin(self.dir_path, 'Cards')) 1800 1801 # Information at top-level 1802 #Write version info 1803 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1804 try: 1805 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1806 except IOError: 1807 MG5_version = misc.get_pkg_info() 1808 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1809 "5." + MG5_version['version']) 1810 1811 1812 # Add file in SubProcesses 1813 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1814 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1815 1816 if self.format == 'standalone': 1817 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1818 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1819 1820 # Add file in Source 1821 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1822 pjoin(self.dir_path, 'Source')) 1823 # add the makefile 1824 filename = pjoin(self.dir_path,'Source','makefile') 1825 self.write_source_makefile(writers.FileWriter(filename))
1826 1827 #=========================================================================== 1828 # export model files 1829 #===========================================================================
1830 - def export_model_files(self, model_path):
1831 """export the model dependent files for V4 model""" 1832 1833 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1834 # Add the routine update_as_param in v4 model 1835 # This is a function created in the UFO 1836 text=""" 1837 subroutine update_as_param() 1838 call setpara('param_card.dat',.false.) 1839 return 1840 end 1841 """ 1842 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1843 ff.write(text) 1844 ff.close() 1845 1846 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1847 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1848 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1849 fsock.write(text) 1850 fsock.close() 1851 1852 self.make_model_symbolic_link()
1853 1854 #=========================================================================== 1855 # Make the Helas and Model directories for Standalone directory 1856 #===========================================================================
1857 - def make(self):
1858 """Run make in the DHELAS and MODEL directories, to set up 1859 everything for running standalone 1860 """ 1861 1862 source_dir = pjoin(self.dir_path, "Source") 1863 logger.info("Running make for Helas") 1864 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1865 logger.info("Running make for Model") 1866 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1867 1868 #=========================================================================== 1869 # Create proc_card_mg5.dat for Standalone directory 1870 #===========================================================================
1871 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 1872 online = False, compiler=default_compiler):
1873 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 1874 1875 self.compiler_choice(compiler) 1876 self.make() 1877 1878 # Write command history as proc_card_mg5 1879 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1880 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1881 history.write(output_file) 1882 1883 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler) 1884 open(pjoin(self.dir_path,'__init__.py'),'w') 1885 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w')
1886 1887
1888 - def compiler_choice(self, compiler):
1889 """ Different daughter classes might want different compilers. 1890 So this function is meant to be overloaded if desired.""" 1891 1892 self.set_compiler(compiler)
1893 1894 #=========================================================================== 1895 # generate_subprocess_directory_v4 1896 #===========================================================================
1897 - def generate_subprocess_directory_v4(self, matrix_element, 1898 fortran_model):
1899 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 1900 including the necessary matrix.f and nexternal.inc files""" 1901 1902 cwd = os.getcwd() 1903 1904 # Create the directory PN_xx_xxxxx in the specified path 1905 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 1906 "P%s" % matrix_element.get('processes')[0].shell_string()) 1907 1908 if self.opt['sa_symmetry']: 1909 # avoid symmetric output 1910 for i,proc in enumerate(matrix_element.get('processes')): 1911 1912 initial = [] #filled in the next line 1913 final = [l.get('id') for l in proc.get('legs')\ 1914 if l.get('state') or initial.append(l.get('id'))] 1915 decay_finals = proc.get_final_ids_after_decay() 1916 decay_finals.sort() 1917 tag = (tuple(initial), tuple(decay_finals)) 1918 legs = proc.get('legs')[:] 1919 leg0 = proc.get('legs')[0] 1920 leg1 = proc.get('legs')[1] 1921 if not leg1.get('state'): 1922 proc.get('legs')[0] = leg1 1923 proc.get('legs')[1] = leg0 1924 flegs = proc.get('legs')[2:] 1925 for perm in itertools.permutations(flegs): 1926 for i,p in enumerate(perm): 1927 proc.get('legs')[i+2] = p 1928 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 1929 "P%s" % proc.shell_string()) 1930 #restore original order 1931 proc.get('legs')[2:] = legs[2:] 1932 if os.path.exists(dirpath2): 1933 proc.get('legs')[:] = legs 1934 return 0 1935 proc.get('legs')[:] = legs 1936 1937 try: 1938 os.mkdir(dirpath) 1939 except os.error as error: 1940 logger.warning(error.strerror + " " + dirpath) 1941 1942 #try: 1943 # os.chdir(dirpath) 1944 #except os.error: 1945 # logger.error('Could not cd to directory %s' % dirpath) 1946 # return 0 1947 1948 logger.info('Creating files in directory %s' % dirpath) 1949 1950 # Extract number of external particles 1951 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1952 1953 # Create the matrix.f file and the nexternal.inc file 1954 if self.opt['export_format']=='standalone_msP': 1955 filename = pjoin(dirpath, 'matrix_prod.f') 1956 else: 1957 filename = pjoin(dirpath, 'matrix.f') 1958 calls = self.write_matrix_element_v4( 1959 writers.FortranWriter(filename), 1960 matrix_element, 1961 fortran_model) 1962 1963 if self.opt['export_format'] == 'standalone_msP': 1964 filename = pjoin(dirpath,'configs_production.inc') 1965 mapconfigs, s_and_t_channels = self.write_configs_file(\ 1966 writers.FortranWriter(filename), 1967 matrix_element) 1968 1969 filename = pjoin(dirpath,'props_production.inc') 1970 self.write_props_file(writers.FortranWriter(filename), 1971 matrix_element, 1972 s_and_t_channels) 1973 1974 filename = pjoin(dirpath,'nexternal_prod.inc') 1975 self.write_nexternal_madspin(writers.FortranWriter(filename), 1976 nexternal, ninitial) 1977 1978 if self.opt['export_format']=='standalone_msF': 1979 filename = pjoin(dirpath, 'helamp.inc') 1980 ncomb=matrix_element.get_helicity_combinations() 1981 self.write_helamp_madspin(writers.FortranWriter(filename), 1982 ncomb) 1983 1984 filename = pjoin(dirpath, 'nexternal.inc') 1985 self.write_nexternal_file(writers.FortranWriter(filename), 1986 nexternal, ninitial) 1987 1988 filename = pjoin(dirpath, 'pmass.inc') 1989 self.write_pmass_file(writers.FortranWriter(filename), 1990 matrix_element) 1991 1992 filename = pjoin(dirpath, 'ngraphs.inc') 1993 self.write_ngraphs_file(writers.FortranWriter(filename), 1994 len(matrix_element.get_all_amplitudes())) 1995 1996 # Generate diagrams 1997 filename = pjoin(dirpath, "matrix.ps") 1998 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1999 get('diagrams'), 2000 filename, 2001 model=matrix_element.get('processes')[0].\ 2002 get('model'), 2003 amplitude=True) 2004 logger.info("Generating Feynman diagrams for " + \ 2005 matrix_element.get('processes')[0].nice_string()) 2006 plot.draw() 2007 2008 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 2009 2010 for file in linkfiles: 2011 ln('../%s' % file, cwd=dirpath) 2012 2013 # Return to original PWD 2014 #os.chdir(cwd) 2015 2016 if not calls: 2017 calls = 0 2018 return calls
2019 2020 2021 #=========================================================================== 2022 # write_source_makefile 2023 #===========================================================================
2024 - def write_source_makefile(self, writer):
2025 """Write the nexternal.inc file for MG4""" 2026 2027 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2028 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2029 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2030 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2031 writer.write(text) 2032 2033 return True
2034 2035 #=========================================================================== 2036 # write_matrix_element_v4 2037 #===========================================================================
2038 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2039 write=True, proc_prefix=''):
2040 """Export a matrix element to a matrix.f file in MG4 standalone format 2041 if write is on False, just return the replace_dict and not write anything.""" 2042 2043 2044 if not matrix_element.get('processes') or \ 2045 not matrix_element.get('diagrams'): 2046 return 0 2047 2048 if not isinstance(writer, writers.FortranWriter): 2049 raise writers.FortranWriter.FortranWriterError(\ 2050 "writer not FortranWriter but %s" % type(writer)) 2051 2052 if not self.opt.has_key('sa_symmetry'): 2053 self.opt['sa_symmetry']=False 2054 2055 # Set lowercase/uppercase Fortran code 2056 writers.FortranWriter.downcase = False 2057 2058 # The proc_id is for MadEvent grouping which is never used in SA. 2059 replace_dict = {'global_variable':'', 'amp2_lines':'', 2060 'proc_prefix':proc_prefix, 'proc_id':''} 2061 2062 # Extract helas calls 2063 helas_calls = fortran_model.get_matrix_element_calls(\ 2064 matrix_element) 2065 2066 replace_dict['helas_calls'] = "\n".join(helas_calls) 2067 2068 # Extract version number and date from VERSION file 2069 info_lines = self.get_mg5_info_lines() 2070 replace_dict['info_lines'] = info_lines 2071 2072 # Extract process info lines 2073 process_lines = self.get_process_info_lines(matrix_element) 2074 replace_dict['process_lines'] = process_lines 2075 2076 # Extract number of external particles 2077 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2078 replace_dict['nexternal'] = nexternal 2079 2080 # Extract ncomb 2081 ncomb = matrix_element.get_helicity_combinations() 2082 replace_dict['ncomb'] = ncomb 2083 2084 # Extract helicity lines 2085 helicity_lines = self.get_helicity_lines(matrix_element) 2086 replace_dict['helicity_lines'] = helicity_lines 2087 2088 # Extract overall denominator 2089 # Averaging initial state color, spin, and identical FS particles 2090 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2091 2092 # Extract ngraphs 2093 ngraphs = matrix_element.get_number_of_amplitudes() 2094 replace_dict['ngraphs'] = ngraphs 2095 2096 # Extract nwavefuncs 2097 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2098 replace_dict['nwavefuncs'] = nwavefuncs 2099 2100 # Extract ncolor 2101 ncolor = max(1, len(matrix_element.get('color_basis'))) 2102 replace_dict['ncolor'] = ncolor 2103 2104 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2105 2106 # Extract color data lines 2107 color_data_lines = self.get_color_data_lines(matrix_element) 2108 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2109 2110 if self.opt['export_format']=='standalone_msP': 2111 # For MadSpin need to return the AMP2 2112 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2113 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2114 replace_dict['global_variable'] = \ 2115 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2116 2117 # JAMP definition, depends on the number of independent split orders 2118 split_orders=matrix_element.get('processes')[0].get('split_orders') 2119 2120 if len(split_orders)==0: 2121 replace_dict['nSplitOrders']='' 2122 # Extract JAMP lines 2123 jamp_lines = self.get_JAMP_lines(matrix_element) 2124 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2125 # set all amplitude order to weight 1 and only one squared order 2126 # contribution which is of course ALL_ORDERS=2. 2127 squared_orders = [(2,),] 2128 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2129 replace_dict['chosen_so_configs'] = '.TRUE.' 2130 replace_dict['nSqAmpSplitOrders']=1 2131 replace_dict['split_order_str_list']='' 2132 else: 2133 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2134 replace_dict['nAmpSplitOrders']=len(amp_orders) 2135 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2136 replace_dict['nSplitOrders']=len(split_orders) 2137 replace_dict['split_order_str_list']=str(split_orders) 2138 amp_so = self.get_split_orders_lines( 2139 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2140 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2141 replace_dict['ampsplitorders']='\n'.join(amp_so) 2142 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2143 jamp_lines = self.get_JAMP_lines_split_order(\ 2144 matrix_element,amp_orders,split_order_names=split_orders) 2145 2146 # Now setup the array specifying what squared split order is chosen 2147 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2148 matrix_element.get('processes')[0],squared_orders) 2149 2150 # For convenience we also write the driver check_sa_splitOrders.f 2151 # that explicitely writes out the contribution from each squared order. 2152 # The original driver still works and is compiled with 'make' while 2153 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2154 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2155 self.write_check_sa_splitOrders(squared_orders,split_orders, 2156 nexternal,ninitial,proc_prefix,check_sa_writer) 2157 2158 if write: 2159 writers.FortranWriter('nsqso_born.inc').writelines( 2160 """INTEGER NSQSO_BORN 2161 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2162 2163 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2164 2165 matrix_template = self.matrix_template 2166 if self.opt['export_format']=='standalone_msP' : 2167 matrix_template = 'matrix_standalone_msP_v4.inc' 2168 elif self.opt['export_format']=='standalone_msF': 2169 matrix_template = 'matrix_standalone_msF_v4.inc' 2170 elif self.opt['export_format']=='matchbox': 2171 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2172 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2173 2174 if len(split_orders)>0: 2175 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2176 logger.debug("Warning: The export format %s is not "+\ 2177 " available for individual ME evaluation of given coupl. orders."+\ 2178 " Only the total ME will be computed.", self.opt['export_format']) 2179 elif self.opt['export_format'] in ['madloop_matchbox']: 2180 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2181 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2182 else: 2183 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2184 2185 if write: 2186 path = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2187 content = open(path).read() 2188 content = content % replace_dict 2189 # Write the file 2190 writer.writelines(content) 2191 # Add the helper functions. 2192 if len(split_orders)>0: 2193 content = '\n' + open(pjoin(_file_path, \ 2194 'iolibs/template_files/split_orders_helping_functions.inc'))\ 2195 .read()%replace_dict 2196 writer.writelines(content) 2197 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2198 else: 2199 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2200 return replace_dict # for subclass update
2201
2202 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2203 nincoming, proc_prefix, writer):
2204 """ Write out a more advanced version of the check_sa drivers that 2205 individually returns the matrix element for each contributing squared 2206 order.""" 2207 2208 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2209 'template_files', 'check_sa_splitOrders.f')).read() 2210 printout_sq_orders=[] 2211 for i, squared_order in enumerate(squared_orders): 2212 sq_orders=[] 2213 for j, sqo in enumerate(squared_order): 2214 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2215 printout_sq_orders.append(\ 2216 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2217 %(i+1,' '.join(sq_orders),i+1)) 2218 printout_sq_orders='\n'.join(printout_sq_orders) 2219 writer.writelines(check_sa_content%{\ 2220 'printout_sqorders':printout_sq_orders, 2221 'nSplitOrders':len(squared_orders), 2222 'nexternal':nexternal, 2223 'nincoming':nincoming, 2224 'proc_prefix':proc_prefix})
2225
2226 2227 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2228 """class to take care of exporting a set of matrix element for the Matchbox 2229 code in the case of Born only routine""" 2230 2231 default_opt = {'clean': False, 'complex_mass':False, 2232 'export_format':'matchbox', 'mp': False, 2233 'sa_symmetry': True} 2234 2235 #specific template of the born 2236 2237 2238 matrix_template = "matrix_standalone_matchbox.inc" 2239 2240 @staticmethod
2241 - def get_color_string_lines(matrix_element):
2242 """Return the color matrix definition lines for this matrix element. Split 2243 rows in chunks of size n.""" 2244 2245 if not matrix_element.get('color_matrix'): 2246 return "\n".join(["out = 1"]) 2247 2248 #start the real work 2249 color_denominators = matrix_element.get('color_matrix').\ 2250 get_line_denominators() 2251 matrix_strings = [] 2252 my_cs = color.ColorString() 2253 for i_color in xrange(len(color_denominators)): 2254 # Then write the numerators for the matrix elements 2255 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2256 t_str=repr(my_cs) 2257 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2258 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2259 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2260 all_matches = t_match.findall(t_str) 2261 output = {} 2262 arg=[] 2263 for match in all_matches: 2264 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2265 if ctype in ['ColorOne' ]: 2266 continue 2267 if ctype not in ['T', 'Tr' ]: 2268 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2269 tmparg += ['0'] 2270 arg +=tmparg 2271 for j, v in enumerate(arg): 2272 output[(i_color,j)] = v 2273 2274 for key in output: 2275 if matrix_strings == []: 2276 #first entry 2277 matrix_strings.append(""" 2278 if (in1.eq.%s.and.in2.eq.%s)then 2279 out = %s 2280 """ % (key[0], key[1], output[key])) 2281 else: 2282 #not first entry 2283 matrix_strings.append(""" 2284 elseif (in1.eq.%s.and.in2.eq.%s)then 2285 out = %s 2286 """ % (key[0], key[1], output[key])) 2287 if len(matrix_strings): 2288 matrix_strings.append(" else \n out = - 1 \n endif") 2289 else: 2290 return "\n out = - 1 \n " 2291 return "\n".join(matrix_strings)
2292
2293 - def make(self,*args,**opts):
2294 pass
2295
2296 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2297 JAMP_formatLC=None):
2298 2299 """Adding leading color part of the colorflow""" 2300 2301 if not JAMP_formatLC: 2302 JAMP_formatLC= "LN%s" % JAMP_format 2303 2304 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2305 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2306 col_amps=col_amps.get_color_amplitudes() 2307 elif(isinstance(col_amps,list)): 2308 if(col_amps and isinstance(col_amps[0],list)): 2309 col_amps=col_amps 2310 else: 2311 raise MadGraph5Error, error_msg % 'col_amps' 2312 else: 2313 raise MadGraph5Error, error_msg % 'col_amps' 2314 2315 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2316 JAMP_format=JAMP_format, 2317 AMP_format=AMP_format, 2318 split=-1) 2319 2320 2321 # Filter the col_ampls to generate only those without any 1/NC terms 2322 2323 LC_col_amps = [] 2324 for coeff_list in col_amps: 2325 to_add = [] 2326 for (coefficient, amp_number) in coeff_list: 2327 if coefficient[3]==0: 2328 to_add.append( (coefficient, amp_number) ) 2329 LC_col_amps.append(to_add) 2330 2331 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2332 JAMP_format=JAMP_formatLC, 2333 AMP_format=AMP_format, 2334 split=-1) 2335 2336 return text
2337
2338 2339 2340 2341 #=============================================================================== 2342 # ProcessExporterFortranMW 2343 #=============================================================================== 2344 -class ProcessExporterFortranMW(ProcessExporterFortran):
2345 """Class to take care of exporting a set of matrix elements to 2346 MadGraph v4 - MadWeight format.""" 2347 2348 matrix_file="matrix_standalone_v4.inc" 2349
2350 - def copy_v4template(self, modelname):
2351 """Additional actions needed for setup of Template 2352 """ 2353 2354 super(ProcessExporterFortranMW, self).copy_v4template(modelname) 2355 2356 # Add the MW specific file 2357 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2358 pjoin(self.dir_path, 'Source','MadWeight'), True) 2359 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2360 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2361 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2362 pjoin(self.dir_path, 'Source','setrun.f')) 2363 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2364 pjoin(self.dir_path, 'Source','run.inc')) 2365 # File created from Template (Different in some child class) 2366 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2367 self.write_run_config_file(writers.FortranWriter(filename)) 2368 2369 try: 2370 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2371 stdout = os.open(os.devnull, os.O_RDWR), 2372 stderr = os.open(os.devnull, os.O_RDWR), 2373 cwd=self.dir_path) 2374 except OSError: 2375 # Probably madweight already called 2376 pass 2377 2378 # Copy the different python file in the Template 2379 self.copy_python_file() 2380 # create the appropriate cuts.f 2381 self.get_mw_cuts_version() 2382 2383 # add the makefile in Source directory 2384 filename = os.path.join(self.dir_path,'Source','makefile') 2385 self.write_source_makefile(writers.FortranWriter(filename))
2386 2387 2388 2389 2390 #=========================================================================== 2391 # convert_model_to_mg4 2392 #===========================================================================
2393 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 2394 wanted_couplings = []):
2395 2396 super(ProcessExporterFortranMW,self).convert_model_to_mg4(model, 2397 wanted_lorentz, wanted_couplings) 2398 2399 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2400 try: 2401 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2402 except OSError as error: 2403 pass 2404 model_path = model.get('modelpath') 2405 # This is not safe if there is a '##' or '-' in the path. 2406 shutil.copytree(model_path, 2407 pjoin(self.dir_path,'bin','internal','ufomodel'), 2408 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2409 if hasattr(model, 'restrict_card'): 2410 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2411 'restrict_default.dat') 2412 if isinstance(model.restrict_card, check_param_card.ParamCard): 2413 model.restrict_card.write(out_path) 2414 else: 2415 files.cp(model.restrict_card, out_path)
2416 2417 #=========================================================================== 2418 # generate_subprocess_directory_v4 2419 #===========================================================================
2420 - def copy_python_file(self):
2421 """copy the python file require for the Template""" 2422 2423 # madevent interface 2424 cp(_file_path+'/interface/madweight_interface.py', 2425 self.dir_path+'/bin/internal/madweight_interface.py') 2426 cp(_file_path+'/interface/extended_cmd.py', 2427 self.dir_path+'/bin/internal/extended_cmd.py') 2428 cp(_file_path+'/interface/common_run_interface.py', 2429 self.dir_path+'/bin/internal/common_run_interface.py') 2430 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2431 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2432 #cp(_file_path+'/iolibs/save_load_object.py', 2433 # self.dir_path+'/bin/internal/save_load_object.py') 2434 cp(_file_path+'/iolibs/file_writers.py', 2435 self.dir_path+'/bin/internal/file_writers.py') 2436 #model file 2437 cp(_file_path+'../models/check_param_card.py', 2438 self.dir_path+'/bin/internal/check_param_card.py') 2439 2440 #madevent file 2441 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2442 cp(_file_path+'/various/lhe_parser.py', 2443 self.dir_path+'/bin/internal/lhe_parser.py') 2444 2445 cp(_file_path+'/various/banner.py', 2446 self.dir_path+'/bin/internal/banner.py') 2447 cp(_file_path+'/various/shower_card.py', 2448 self.dir_path+'/bin/internal/shower_card.py') 2449 cp(_file_path+'/various/cluster.py', 2450 self.dir_path+'/bin/internal/cluster.py') 2451 2452 # logging configuration 2453 cp(_file_path+'/interface/.mg5_logging.conf', 2454 self.dir_path+'/bin/internal/me5_logging.conf') 2455 cp(_file_path+'/interface/coloring_logging.py', 2456 self.dir_path+'/bin/internal/coloring_logging.py')
2457 2458 2459 #=========================================================================== 2460 # Change the version of cuts.f to the one compatible with MW 2461 #===========================================================================
2462 - def get_mw_cuts_version(self, outpath=None):
2463 """create the appropriate cuts.f 2464 This is based on the one associated to ME output but: 2465 1) No clustering (=> remove initcluster/setclscales) 2466 2) Adding the definition of cut_bw at the file. 2467 """ 2468 2469 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2470 2471 text = StringIO() 2472 #1) remove all dependencies in ickkw >1: 2473 nb_if = 0 2474 for line in template: 2475 if 'if(xqcut.gt.0d0' in line: 2476 nb_if = 1 2477 if nb_if == 0: 2478 text.write(line) 2479 continue 2480 if re.search(r'if\(.*\)\s*then', line): 2481 nb_if += 1 2482 elif 'endif' in line: 2483 nb_if -= 1 2484 2485 #2) add fake cut_bw (have to put the true one later) 2486 text.write(""" 2487 logical function cut_bw(p) 2488 include 'madweight_param.inc' 2489 double precision p(*) 2490 if (bw_cut) then 2491 cut_bw = .true. 2492 else 2493 stop 1 2494 endif 2495 return 2496 end 2497 """) 2498 2499 final = text.getvalue() 2500 #3) remove the call to initcluster: 2501 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2502 template = template.replace('genps.inc', 'maxparticles.inc') 2503 #Now we can write it 2504 if not outpath: 2505 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2506 elif isinstance(outpath, str): 2507 fsock = open(outpath, 'w') 2508 else: 2509 fsock = outpath 2510 fsock.write(template)
2511 2512 2513 2514 #=========================================================================== 2515 # Make the Helas and Model directories for Standalone directory 2516 #===========================================================================
2517 - def make(self):
2518 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2519 everything for running madweight 2520 """ 2521 2522 source_dir = os.path.join(self.dir_path, "Source") 2523 logger.info("Running make for Helas") 2524 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2525 logger.info("Running make for Model") 2526 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2527 logger.info("Running make for PDF") 2528 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2529 logger.info("Running make for CERNLIB") 2530 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2531 logger.info("Running make for GENERIC") 2532 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2533 logger.info("Running make for blocks") 2534 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2535 logger.info("Running make for tools") 2536 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2537 2538 #=========================================================================== 2539 # Create proc_card_mg5.dat for MadWeight directory 2540 #===========================================================================
2541 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 2542 online = False, compiler=default_compiler):
2543 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2544 2545 #proc_charac 2546 self.create_proc_charac() 2547 2548 # Write maxparticles.inc based on max of ME's/subprocess groups 2549 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2550 self.write_maxparticles_file(writers.FortranWriter(filename), 2551 matrix_elements) 2552 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2553 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2554 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2555 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2556 2557 self.set_compiler(compiler) 2558 self.make() 2559 2560 # Write command history as proc_card_mg5 2561 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2562 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2563 history.write(output_file) 2564 2565 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler)
2566 2567 2568 #=========================================================================== 2569 # create the run_card for MW 2570 #===========================================================================
2571 - def create_run_card(self, matrix_elements, history):
2572 """ """ 2573 2574 run_card = banner_mod.RunCard() 2575 2576 # pass to default for MW 2577 run_card["run_tag"] = "\'not_use\'" 2578 run_card["fixed_ren_scale"] = "T" 2579 run_card["fixed_fac_scale"] = "T" 2580 run_card.remove_all_cut() 2581 2582 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2583 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2584 python_template=True) 2585 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2586 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2587 python_template=True)
2588 2589 #=========================================================================== 2590 # export model files 2591 #===========================================================================
2592 - def export_model_files(self, model_path):
2593 """export the model dependent files for V4 model""" 2594 2595 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2596 # Add the routine update_as_param in v4 model 2597 # This is a function created in the UFO 2598 text=""" 2599 subroutine update_as_param() 2600 call setpara('param_card.dat',.false.) 2601 return 2602 end 2603 """ 2604 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2605 ff.write(text) 2606 ff.close() 2607 2608 # Modify setrun.f 2609 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2610 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2611 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2612 fsock.write(text) 2613 fsock.close() 2614 2615 # Modify initialization.f 2616 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2617 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2618 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2619 fsock.write(text) 2620 fsock.close() 2621 2622 2623 self.make_model_symbolic_link()
2624 2625 #=========================================================================== 2626 # generate_subprocess_directory_v4 2627 #===========================================================================
2628 - def generate_subprocess_directory_v4(self, matrix_element, 2629 fortran_model,number):
2630 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2631 including the necessary matrix.f and nexternal.inc files""" 2632 2633 cwd = os.getcwd() 2634 2635 # Create the directory PN_xx_xxxxx in the specified path 2636 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2637 "P%s" % matrix_element.get('processes')[0].shell_string()) 2638 2639 try: 2640 os.mkdir(dirpath) 2641 except os.error as error: 2642 logger.warning(error.strerror + " " + dirpath) 2643 2644 #try: 2645 # os.chdir(dirpath) 2646 #except os.error: 2647 # logger.error('Could not cd to directory %s' % dirpath) 2648 # return 0 2649 2650 logger.info('Creating files in directory %s' % dirpath) 2651 2652 # Extract number of external particles 2653 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2654 2655 # Create the matrix.f file and the nexternal.inc file 2656 filename = pjoin(dirpath,'matrix.f') 2657 calls,ncolor = self.write_matrix_element_v4( 2658 writers.FortranWriter(filename), 2659 matrix_element, 2660 fortran_model) 2661 2662 filename = pjoin(dirpath, 'auto_dsig.f') 2663 self.write_auto_dsig_file(writers.FortranWriter(filename), 2664 matrix_element) 2665 2666 filename = pjoin(dirpath, 'configs.inc') 2667 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2668 writers.FortranWriter(filename), 2669 matrix_element) 2670 2671 filename = pjoin(dirpath, 'nexternal.inc') 2672 self.write_nexternal_file(writers.FortranWriter(filename), 2673 nexternal, ninitial) 2674 2675 filename = pjoin(dirpath, 'leshouche.inc') 2676 self.write_leshouche_file(writers.FortranWriter(filename), 2677 matrix_element) 2678 2679 filename = pjoin(dirpath, 'props.inc') 2680 self.write_props_file(writers.FortranWriter(filename), 2681 matrix_element, 2682 s_and_t_channels) 2683 2684 filename = pjoin(dirpath, 'pmass.inc') 2685 self.write_pmass_file(writers.FortranWriter(filename), 2686 matrix_element) 2687 2688 filename = pjoin(dirpath, 'ngraphs.inc') 2689 self.write_ngraphs_file(writers.FortranWriter(filename), 2690 len(matrix_element.get_all_amplitudes())) 2691 2692 filename = pjoin(dirpath, 'maxamps.inc') 2693 self.write_maxamps_file(writers.FortranWriter(filename), 2694 len(matrix_element.get('diagrams')), 2695 ncolor, 2696 len(matrix_element.get('processes')), 2697 1) 2698 2699 filename = pjoin(dirpath, 'phasespace.inc') 2700 self.write_phasespace_file(writers.FortranWriter(filename), 2701 len(matrix_element.get('diagrams')), 2702 ) 2703 2704 # Generate diagrams 2705 filename = pjoin(dirpath, "matrix.ps") 2706 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2707 get('diagrams'), 2708 filename, 2709 model=matrix_element.get('processes')[0].\ 2710 get('model'), 2711 amplitude='') 2712 logger.info("Generating Feynman diagrams for " + \ 2713 matrix_element.get('processes')[0].nice_string()) 2714 plot.draw() 2715 2716 #import genps.inc and maxconfigs.inc into Subprocesses 2717 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2718 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2719 2720 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2721 2722 for file in linkfiles: 2723 ln('../%s' % file, starting_dir=cwd) 2724 2725 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2726 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2727 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2728 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2729 # Return to original PWD 2730 #os.chdir(cwd) 2731 2732 if not calls: 2733 calls = 0 2734 return calls
2735 2736 #=========================================================================== 2737 # write_matrix_element_v4 2738 #===========================================================================
2739 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2740 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2741 2742 if not matrix_element.get('processes') or \ 2743 not matrix_element.get('diagrams'): 2744 return 0 2745 2746 if not isinstance(writer, writers.FortranWriter): 2747 raise writers.FortranWriter.FortranWriterError(\ 2748 "writer not FortranWriter") 2749 2750 # Set lowercase/uppercase Fortran code 2751 writers.FortranWriter.downcase = False 2752 2753 replace_dict = {} 2754 2755 # Extract version number and date from VERSION file 2756 info_lines = self.get_mg5_info_lines() 2757 replace_dict['info_lines'] = info_lines 2758 2759 # Extract process info lines 2760 process_lines = self.get_process_info_lines(matrix_element) 2761 replace_dict['process_lines'] = process_lines 2762 2763 # Set proc_id 2764 replace_dict['proc_id'] = proc_id 2765 2766 # Extract number of external particles 2767 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2768 replace_dict['nexternal'] = nexternal 2769 2770 # Extract ncomb 2771 ncomb = matrix_element.get_helicity_combinations() 2772 replace_dict['ncomb'] = ncomb 2773 2774 # Extract helicity lines 2775 helicity_lines = self.get_helicity_lines(matrix_element) 2776 replace_dict['helicity_lines'] = helicity_lines 2777 2778 # Extract overall denominator 2779 # Averaging initial state color, spin, and identical FS particles 2780 den_factor_line = self.get_den_factor_line(matrix_element) 2781 replace_dict['den_factor_line'] = den_factor_line 2782 2783 # Extract ngraphs 2784 ngraphs = matrix_element.get_number_of_amplitudes() 2785 replace_dict['ngraphs'] = ngraphs 2786 2787 # Extract nwavefuncs 2788 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2789 replace_dict['nwavefuncs'] = nwavefuncs 2790 2791 # Extract ncolor 2792 ncolor = max(1, len(matrix_element.get('color_basis'))) 2793 replace_dict['ncolor'] = ncolor 2794 2795 # Extract color data lines 2796 color_data_lines = self.get_color_data_lines(matrix_element) 2797 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2798 2799 # Extract helas calls 2800 helas_calls = fortran_model.get_matrix_element_calls(\ 2801 matrix_element) 2802 2803 replace_dict['helas_calls'] = "\n".join(helas_calls) 2804 2805 # Extract JAMP lines 2806 jamp_lines = self.get_JAMP_lines(matrix_element) 2807 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2808 2809 file = open(os.path.join(_file_path, \ 2810 'iolibs/template_files/%s' % self.matrix_file)).read() 2811 file = file % replace_dict 2812 2813 2814 # Write the file 2815 writer.writelines(file) 2816 2817 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor
2818 2819 #=========================================================================== 2820 # write_source_makefile 2821 #===========================================================================
2822 - def write_source_makefile(self, writer):
2823 """Write the nexternal.inc file for madweight""" 2824 2825 2826 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2827 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2828 text = open(path).read() % {'libraries': set_of_lib} 2829 writer.write(text) 2830 2831 return True
2832
2833 - def write_phasespace_file(self, writer, nb_diag):
2834 """ """ 2835 2836 template = """ include 'maxparticles.inc' 2837 integer max_branches 2838 parameter (max_branches=max_particles-1) 2839 integer max_configs 2840 parameter (max_configs=%(nb_diag)s) 2841 2842 c channel position 2843 integer config_pos,perm_pos 2844 common /to_config/config_pos,perm_pos 2845 2846 """ 2847 2848 writer.write(template % {'nb_diag': nb_diag})
2849 2850 2851 #=========================================================================== 2852 # write_auto_dsig_file 2853 #===========================================================================
2854 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2855 """Write the auto_dsig.f file for the differential cross section 2856 calculation, includes pdf call information (MadWeight format)""" 2857 2858 if not matrix_element.get('processes') or \ 2859 not matrix_element.get('diagrams'): 2860 return 0 2861 2862 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2863 2864 if ninitial < 1 or ninitial > 2: 2865 raise writers.FortranWriter.FortranWriterError, \ 2866 """Need ninitial = 1 or 2 to write auto_dsig file""" 2867 2868 replace_dict = {} 2869 2870 # Extract version number and date from VERSION file 2871 info_lines = self.get_mg5_info_lines() 2872 replace_dict['info_lines'] = info_lines 2873 2874 # Extract process info lines 2875 process_lines = self.get_process_info_lines(matrix_element) 2876 replace_dict['process_lines'] = process_lines 2877 2878 # Set proc_id 2879 replace_dict['proc_id'] = proc_id 2880 replace_dict['numproc'] = 1 2881 2882 # Set dsig_line 2883 if ninitial == 1: 2884 # No conversion, since result of decay should be given in GeV 2885 dsig_line = "pd(0)*dsiguu" 2886 else: 2887 # Convert result (in GeV) to pb 2888 dsig_line = "pd(0)*conv*dsiguu" 2889 2890 replace_dict['dsig_line'] = dsig_line 2891 2892 # Extract pdf lines 2893 pdf_vars, pdf_data, pdf_lines = \ 2894 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 2895 replace_dict['pdf_vars'] = pdf_vars 2896 replace_dict['pdf_data'] = pdf_data 2897 replace_dict['pdf_lines'] = pdf_lines 2898 2899 # Lines that differ between subprocess group and regular 2900 if proc_id: 2901 replace_dict['numproc'] = int(proc_id) 2902 replace_dict['passcuts_begin'] = "" 2903 replace_dict['passcuts_end'] = "" 2904 # Set lines for subprocess group version 2905 # Set define_iconfigs_lines 2906 replace_dict['define_subdiag_lines'] = \ 2907 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 2908 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 2909 else: 2910 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 2911 replace_dict['passcuts_end'] = "ENDIF" 2912 replace_dict['define_subdiag_lines'] = "" 2913 2914 file = open(os.path.join(_file_path, \ 2915 'iolibs/template_files/auto_dsig_mw.inc')).read() 2916 2917 file = file % replace_dict 2918 2919 2920 # Write the file 2921 writer.writelines(file)
2922 2923 #=========================================================================== 2924 # write_configs_file 2925 #===========================================================================
2926 - def write_configs_file(self, writer, matrix_element):
2927 """Write the configs.inc file for MadEvent""" 2928 2929 # Extract number of external particles 2930 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2931 2932 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 2933 mapconfigs = [c[0] for c in configs] 2934 model = matrix_element.get('processes')[0].get('model') 2935 return mapconfigs, self.write_configs_file_from_diagrams(writer, 2936 [[c[1]] for c in configs], 2937 mapconfigs, 2938 nexternal, ninitial,matrix_element, model)
2939 2940 #=========================================================================== 2941 # write_run_configs_file 2942 #===========================================================================
2943 - def write_run_config_file(self, writer):
2944 """Write the run_configs.inc file for MadWeight""" 2945 2946 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 2947 text = open(path).read() % {'chanperjob':'5'} 2948 writer.write(text) 2949 return True
2950 2951 #=========================================================================== 2952 # write_configs_file_from_diagrams 2953 #===========================================================================
2954 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 2955 nexternal, ninitial, matrix_element, model):
2956 """Write the actual configs.inc file. 2957 2958 configs is the diagrams corresponding to configs (each 2959 diagrams is a list of corresponding diagrams for all 2960 subprocesses, with None if there is no corresponding diagrams 2961 for a given process). 2962 mapconfigs gives the diagram number for each config. 2963 2964 For s-channels, we need to output one PDG for each subprocess in 2965 the subprocess group, in order to be able to pick the right 2966 one for multiprocesses.""" 2967 2968 lines = [] 2969 2970 particle_dict = matrix_element.get('processes')[0].get('model').\ 2971 get('particle_dict') 2972 2973 s_and_t_channels = [] 2974 2975 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 2976 for config in configs if [d for d in config if d][0].\ 2977 get_vertex_leg_numbers()!=[]] 2978 2979 minvert = min(vert_list) if vert_list!=[] else 0 2980 # Number of subprocesses 2981 nsubprocs = len(configs[0]) 2982 2983 nconfigs = 0 2984 2985 new_pdg = model.get_first_non_pdg() 2986 2987 for iconfig, helas_diags in enumerate(configs): 2988 if any([vert > minvert for vert in 2989 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 2990 # Only 3-vertices allowed in configs.inc 2991 continue 2992 nconfigs += 1 2993 2994 # Need s- and t-channels for all subprocesses, including 2995 # those that don't contribute to this config 2996 empty_verts = [] 2997 stchannels = [] 2998 for h in helas_diags: 2999 if h: 3000 # get_s_and_t_channels gives vertices starting from 3001 # final state external particles and working inwards 3002 stchannels.append(h.get('amplitudes')[0].\ 3003 get_s_and_t_channels(ninitial,model,new_pdg)) 3004 else: 3005 stchannels.append((empty_verts, None)) 3006 3007 # For t-channels, just need the first non-empty one 3008 tchannels = [t for s,t in stchannels if t != None][0] 3009 3010 # For s_and_t_channels (to be used later) use only first config 3011 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3012 tchannels]) 3013 3014 # Make sure empty_verts is same length as real vertices 3015 if any([s for s,t in stchannels]): 3016 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3017 3018 # Reorganize s-channel vertices to get a list of all 3019 # subprocesses for each vertex 3020 schannels = zip(*[s for s,t in stchannels]) 3021 else: 3022 schannels = [] 3023 3024 allchannels = schannels 3025 if len(tchannels) > 1: 3026 # Write out tchannels only if there are any non-trivial ones 3027 allchannels = schannels + tchannels 3028 3029 # Write out propagators for s-channel and t-channel vertices 3030 3031 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3032 # Correspondance between the config and the diagram = amp2 3033 lines.append("* %d %d " % (nconfigs, 3034 mapconfigs[iconfig])) 3035 3036 for verts in allchannels: 3037 if verts in schannels: 3038 vert = [v for v in verts if v][0] 3039 else: 3040 vert = verts 3041 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3042 last_leg = vert.get('legs')[-1] 3043 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3044 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3045 # (last_leg.get('number'), nconfigs, len(daughters), 3046 # ",".join([str(d) for d in daughters]))) 3047 3048 if last_leg.get('id') == 21 and 21 not in particle_dict: 3049 # Fake propagator used in multiparticle vertices 3050 mass = 'zero' 3051 width = 'zero' 3052 pow_part = 0 3053 else: 3054 if (last_leg.get('id')!=7): 3055 particle = particle_dict[last_leg.get('id')] 3056 # Get mass 3057 mass = particle.get('mass') 3058 # Get width 3059 width = particle.get('width') 3060 else : # fake propagator used in multiparticle vertices 3061 mass= 'zero' 3062 width= 'zero' 3063 3064 line=line+" "+mass+" "+width+" " 3065 3066 if verts in schannels: 3067 pdgs = [] 3068 for v in verts: 3069 if v: 3070 pdgs.append(v.get('legs')[-1].get('id')) 3071 else: 3072 pdgs.append(0) 3073 lines.append(line+" S "+str(last_leg.get('id'))) 3074 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3075 # (last_leg.get('number'), nconfigs, nsubprocs, 3076 # ",".join([str(d) for d in pdgs]))) 3077 # lines.append("data tprid(%d,%d)/0/" % \ 3078 # (last_leg.get('number'), nconfigs)) 3079 elif verts in tchannels[:-1]: 3080 lines.append(line+" T "+str(last_leg.get('id'))) 3081 # lines.append("data tprid(%d,%d)/%d/" % \ 3082 # (last_leg.get('number'), nconfigs, 3083 # abs(last_leg.get('id')))) 3084 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3085 # (last_leg.get('number'), nconfigs, nsubprocs, 3086 # ",".join(['0'] * nsubprocs))) 3087 3088 # Write out number of configs 3089 # lines.append("# Number of configs") 3090 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3091 lines.append(" * ") # a line with just a star indicates this is the end of file 3092 # Write the file 3093 writer.writelines(lines) 3094 3095 return s_and_t_channels
3096
3097 3098 #=============================================================================== 3099 # ProcessExporterFortranME 3100 #=============================================================================== 3101 -class ProcessExporterFortranME(ProcessExporterFortran):
3102 """Class to take care of exporting a set of matrix elements to 3103 MadEvent format.""" 3104 3105 matrix_file = "matrix_madevent_v4.inc" 3106
3107 - def copy_v4template(self, modelname):
3108 """Additional actions needed for setup of Template 3109 """ 3110 3111 super(ProcessExporterFortranME, self).copy_v4template(modelname) 3112 3113 # File created from Template (Different in some child class) 3114 filename = pjoin(self.dir_path,'Source','run_config.inc') 3115 self.write_run_config_file(writers.FortranWriter(filename)) 3116 3117 # The next file are model dependant (due to SLAH convention) 3118 self.model_name = modelname 3119 # Add the symmetry.f 3120 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3121 self.write_symmetry(writers.FortranWriter(filename)) 3122 # 3123 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3124 self.write_addmothers(writers.FortranWriter(filename)) 3125 # Copy the different python file in the Template 3126 self.copy_python_file()
3127 3128 3129 3130 3131 3132 #=========================================================================== 3133 # generate_subprocess_directory_v4 3134 #===========================================================================
3135 - def copy_python_file(self):
3136 """copy the python file require for the Template""" 3137 3138 # madevent interface 3139 cp(_file_path+'/interface/madevent_interface.py', 3140 self.dir_path+'/bin/internal/madevent_interface.py') 3141 cp(_file_path+'/interface/extended_cmd.py', 3142 self.dir_path+'/bin/internal/extended_cmd.py') 3143 cp(_file_path+'/interface/common_run_interface.py', 3144 self.dir_path+'/bin/internal/common_run_interface.py') 3145 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3146 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3147 cp(_file_path+'/iolibs/save_load_object.py', 3148 self.dir_path+'/bin/internal/save_load_object.py') 3149 cp(_file_path+'/iolibs/file_writers.py', 3150 self.dir_path+'/bin/internal/file_writers.py') 3151 #model file 3152 cp(_file_path+'../models/check_param_card.py', 3153 self.dir_path+'/bin/internal/check_param_card.py') 3154 3155 #copy all the file present in madevent directory 3156 for name in os.listdir(pjoin(_file_path, 'madevent')): 3157 if name not in ['__init__.py'] and name.endswith('.py'): 3158 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3159 3160 #madevent file 3161 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3162 cp(_file_path+'/various/lhe_parser.py', 3163 self.dir_path+'/bin/internal/lhe_parser.py') 3164 cp(_file_path+'/various/banner.py', 3165 self.dir_path+'/bin/internal/banner.py') 3166 cp(_file_path+'/various/cluster.py', 3167 self.dir_path+'/bin/internal/cluster.py') 3168 cp(_file_path+'/madevent/combine_runs.py', 3169 self.dir_path+'/bin/internal/combine_runs.py') 3170 # logging configuration 3171 cp(_file_path+'/interface/.mg5_logging.conf', 3172 self.dir_path+'/bin/internal/me5_logging.conf') 3173 cp(_file_path+'/interface/coloring_logging.py', 3174 self.dir_path+'/bin/internal/coloring_logging.py') 3175 # shower card and FO_analyse_card. 3176 # Although not needed, it is imported by banner.py 3177 cp(_file_path+'/various/shower_card.py', 3178 self.dir_path+'/bin/internal/shower_card.py') 3179 cp(_file_path+'/various/FO_analyse_card.py', 3180 self.dir_path+'/bin/internal/FO_analyse_card.py')
3181 3182
3183 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 3184 wanted_couplings = []):
3185 3186 super(ProcessExporterFortranME,self).convert_model_to_mg4(model, 3187 wanted_lorentz, wanted_couplings) 3188 3189 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3190 try: 3191 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3192 except OSError as error: 3193 pass 3194 model_path = model.get('modelpath') 3195 # This is not safe if there is a '##' or '-' in the path. 3196 shutil.copytree(model_path, 3197 pjoin(self.dir_path,'bin','internal','ufomodel'), 3198 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3199 if hasattr(model, 'restrict_card'): 3200 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3201 'restrict_default.dat') 3202 if isinstance(model.restrict_card, check_param_card.ParamCard): 3203 model.restrict_card.write(out_path) 3204 else: 3205 files.cp(model.restrict_card, out_path)
3206 3207 #=========================================================================== 3208 # export model files 3209 #===========================================================================
3210 - def export_model_files(self, model_path):
3211 """export the model dependent files""" 3212 3213 super(ProcessExporterFortranME,self).export_model_files(model_path) 3214 3215 # Add the routine update_as_param in v4 model 3216 # This is a function created in the UFO 3217 text=""" 3218 subroutine update_as_param() 3219 call setpara('param_card.dat',.false.) 3220 return 3221 end 3222 """ 3223 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3224 ff.write(text) 3225 ff.close() 3226 3227 # Add the symmetry.f 3228 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3229 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3230 3231 # Modify setrun.f 3232 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3233 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3234 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3235 fsock.write(text) 3236 fsock.close() 3237 3238 self.make_model_symbolic_link()
3239 3240 3241 #=========================================================================== 3242 # generate_subprocess_directory_v4 3243 #===========================================================================
3244 - def generate_subprocess_directory_v4(self, matrix_element, 3245 fortran_model, 3246 me_number):
3247 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3248 including the necessary matrix.f and various helper files""" 3249 3250 cwd = os.getcwd() 3251 path = pjoin(self.dir_path, 'SubProcesses') 3252 3253 3254 if not self.model: 3255 self.model = matrix_element.get('processes')[0].get('model') 3256 3257 3258 3259 #os.chdir(path) 3260 # Create the directory PN_xx_xxxxx in the specified path 3261 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3262 try: 3263 os.mkdir(pjoin(path,subprocdir)) 3264 except os.error as error: 3265 logger.warning(error.strerror + " " + subprocdir) 3266 3267 #try: 3268 # os.chdir(subprocdir) 3269 #except os.error: 3270 # logger.error('Could not cd to directory %s' % subprocdir) 3271 # return 0 3272 3273 logger.info('Creating files in directory %s' % subprocdir) 3274 Ppath = pjoin(path, subprocdir) 3275 3276 # Extract number of external particles 3277 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3278 3279 # Add the driver.f 3280 ncomb = matrix_element.get_helicity_combinations() 3281 filename = pjoin(Ppath,'driver.f') 3282 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1) 3283 3284 # Create the matrix.f file, auto_dsig.f file and all inc files 3285 filename = pjoin(Ppath, 'matrix.f') 3286 calls, ncolor = \ 3287 self.write_matrix_element_v4(writers.FortranWriter(filename), 3288 matrix_element, fortran_model, subproc_number = me_number) 3289 3290 filename = pjoin(Ppath, 'auto_dsig.f') 3291 self.write_auto_dsig_file(writers.FortranWriter(filename), 3292 matrix_element) 3293 3294 filename = pjoin(Ppath, 'configs.inc') 3295 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3296 writers.FortranWriter(filename), 3297 matrix_element) 3298 3299 filename = pjoin(Ppath, 'config_nqcd.inc') 3300 self.write_config_nqcd_file(writers.FortranWriter(filename), 3301 nqcd_list) 3302 3303 filename = pjoin(Ppath, 'config_subproc_map.inc') 3304 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3305 s_and_t_channels) 3306 3307 filename = pjoin(Ppath, 'coloramps.inc') 3308 self.write_coloramps_file(writers.FortranWriter(filename), 3309 mapconfigs, 3310 matrix_element) 3311 3312 filename = pjoin(Ppath, 'get_color.f') 3313 self.write_colors_file(writers.FortranWriter(filename), 3314 matrix_element) 3315 3316 filename = pjoin(Ppath, 'decayBW.inc') 3317 self.write_decayBW_file(writers.FortranWriter(filename), 3318 s_and_t_channels) 3319 3320 filename = pjoin(Ppath, 'dname.mg') 3321 self.write_dname_file(writers.FileWriter(filename), 3322 "P"+matrix_element.get('processes')[0].shell_string()) 3323 3324 filename = pjoin(Ppath, 'iproc.dat') 3325 self.write_iproc_file(writers.FortranWriter(filename), 3326 me_number) 3327 3328 filename = pjoin(Ppath, 'leshouche.inc') 3329 self.write_leshouche_file(writers.FortranWriter(filename), 3330 matrix_element) 3331 3332 filename = pjoin(Ppath, 'maxamps.inc') 3333 self.write_maxamps_file(writers.FortranWriter(filename), 3334 len(matrix_element.get('diagrams')), 3335 ncolor, 3336 len(matrix_element.get('processes')), 3337 1) 3338 3339 filename = pjoin(Ppath, 'mg.sym') 3340 self.write_mg_sym_file(writers.FortranWriter(filename), 3341 matrix_element) 3342 3343 filename = pjoin(Ppath, 'ncombs.inc') 3344 self.write_ncombs_file(writers.FortranWriter(filename), 3345 nexternal) 3346 3347 filename = pjoin(Ppath, 'nexternal.inc') 3348 self.write_nexternal_file(writers.FortranWriter(filename), 3349 nexternal, ninitial) 3350 3351 filename = pjoin(Ppath, 'ngraphs.inc') 3352 self.write_ngraphs_file(writers.FortranWriter(filename), 3353 len(mapconfigs)) 3354 3355 3356 filename = pjoin(Ppath, 'pmass.inc') 3357 self.write_pmass_file(writers.FortranWriter(filename), 3358 matrix_element) 3359 3360 filename = pjoin(Ppath, 'props.inc') 3361 self.write_props_file(writers.FortranWriter(filename), 3362 matrix_element, 3363 s_and_t_channels) 3364 3365 # Find config symmetries and permutations 3366 symmetry, perms, ident_perms = \ 3367 diagram_symmetry.find_symmetry(matrix_element) 3368 3369 filename = pjoin(Ppath, 'symswap.inc') 3370 self.write_symswap_file(writers.FortranWriter(filename), 3371 ident_perms) 3372 3373 filename = pjoin(Ppath, 'symfact_orig.dat') 3374 self.write_symfact_file(open(filename, 'w'), symmetry) 3375 3376 # Generate diagrams 3377 filename = pjoin(Ppath, "matrix.ps") 3378 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3379 get('diagrams'), 3380 filename, 3381 model=matrix_element.get('processes')[0].\ 3382 get('model'), 3383 amplitude=True) 3384 logger.info("Generating Feynman diagrams for " + \ 3385 matrix_element.get('processes')[0].nice_string()) 3386 plot.draw() 3387 3388 self.link_files_in_SubProcess(Ppath) 3389 3390 #import nexternal/leshouche in Source 3391 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3392 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3393 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3394 # Return to SubProcesses dir 3395 #os.chdir(os.path.pardir) 3396 3397 # Add subprocess to subproc.mg 3398 filename = pjoin(path, 'subproc.mg') 3399 files.append_to_file(filename, 3400 self.write_subproc, 3401 subprocdir) 3402 3403 # Return to original dir 3404 #os.chdir(cwd) 3405 3406 # Generate info page 3407 gen_infohtml.make_info_html(self.dir_path) 3408 3409 3410 if not calls: 3411 calls = 0 3412 return calls
3413 3449
3450 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 3451 online = False, compiler=default_compiler):
3452 """Finalize ME v4 directory by creating jpeg diagrams, html 3453 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3454 3455 # indicate that the output type is not grouped 3456 if not isinstance(self, ProcessExporterFortranMEGroup): 3457 self.proc_characteristic['grouped_matrix'] = False 3458 3459 modelname = self.opt['model'] 3460 if modelname == 'mssm' or modelname.startswith('mssm-'): 3461 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3462 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3463 check_param_card.convert_to_mg5card(param_card, mg5_param) 3464 check_param_card.check_valid_param_card(mg5_param) 3465 3466 # Add the combine_events.f modify param_card path/number of @X 3467 filename = pjoin(self.dir_path,'Source','combine_events.f') 3468 try: 3469 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3470 except AttributeError: 3471 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3472 nb_proc = len(set(nb_proc)) 3473 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3474 # Write maxconfigs.inc based on max of ME's/subprocess groups 3475 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3476 self.write_maxconfigs_file(writers.FortranWriter(filename), 3477 matrix_elements) 3478 3479 # Write maxparticles.inc based on max of ME's/subprocess groups 3480 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3481 self.write_maxparticles_file(writers.FortranWriter(filename), 3482 matrix_elements) 3483 3484 # Touch "done" file 3485 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3486 3487 # Check for compiler 3488 self.set_compiler(compiler) 3489 3490 old_pos = os.getcwd() 3491 subpath = pjoin(self.dir_path, 'SubProcesses') 3492 3493 P_dir_list = [proc for proc in os.listdir(subpath) 3494 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3495 3496 devnull = os.open(os.devnull, os.O_RDWR) 3497 # Convert the poscript in jpg files (if authorize) 3498 if makejpg: 3499 try: 3500 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3501 except Exception, error: 3502 pass 3503 logger.info("Generate jpeg diagrams") 3504 for Pdir in P_dir_list: 3505 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3506 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3507 3508 logger.info("Generate web pages") 3509 # Create the WebPage using perl script 3510 3511 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3512 stdout = devnull,cwd=pjoin(self.dir_path)) 3513 3514 #os.chdir(os.path.pardir) 3515 3516 obj = gen_infohtml.make_info_html(self.dir_path) 3517 3518 if online: 3519 nb_channel = obj.rep_rule['nb_gen_diag'] 3520 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3521 #add the information to proc_charac 3522 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3523 3524 # Write command history as proc_card_mg5 3525 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3526 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3527 history.write(output_file) 3528 3529 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3530 stdout = devnull) 3531 3532 #crate the proc_characteristic file 3533 self.create_proc_charac(matrix_elements, history) 3534 3535 # create the run_card 3536 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler) 3537 3538 # Run "make" to generate madevent.tar.gz file 3539 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3540 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3541 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3542 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3543 stdout = devnull, cwd=self.dir_path) 3544 3545 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3546 stdout = devnull, cwd=self.dir_path)
3547 3548 3549 3550 3551 3552 3553 #return to the initial dir 3554 #os.chdir(old_pos) 3555 3556 #=========================================================================== 3557 # write_matrix_element_v4 3558 #===========================================================================
3559 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3560 proc_id = "", config_map = [], subproc_number = ""):
3561 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3562 3563 if not matrix_element.get('processes') or \ 3564 not matrix_element.get('diagrams'): 3565 return 0 3566 3567 if not isinstance(writer, writers.FortranWriter): 3568 raise writers.FortranWriter.FortranWriterError(\ 3569 "writer not FortranWriter") 3570 3571 3572 # Set lowercase/uppercase Fortran code 3573 writers.FortranWriter.downcase = False 3574 3575 # The proc prefix is not used for MadEvent output so it can safely be set 3576 # to an empty string. 3577 replace_dict = {'proc_prefix':''} 3578 3579 # Extract helas calls 3580 helas_calls = fortran_model.get_matrix_element_calls(\ 3581 matrix_element) 3582 3583 replace_dict['helas_calls'] = "\n".join(helas_calls) 3584 3585 3586 # Extract version number and date from VERSION file 3587 info_lines = self.get_mg5_info_lines() 3588 replace_dict['info_lines'] = info_lines 3589 3590 # Extract process info lines 3591 process_lines = self.get_process_info_lines(matrix_element) 3592 replace_dict['process_lines'] = process_lines 3593 3594 # Set proc_id 3595 replace_dict['proc_id'] = proc_id 3596 3597 # Extract ncomb 3598 ncomb = matrix_element.get_helicity_combinations() 3599 replace_dict['ncomb'] = ncomb 3600 3601 # Extract helicity lines 3602 helicity_lines = self.get_helicity_lines(matrix_element) 3603 replace_dict['helicity_lines'] = helicity_lines 3604 3605 # Extract IC line 3606 ic_line = self.get_ic_line(matrix_element) 3607 replace_dict['ic_line'] = ic_line 3608 3609 # Extract overall denominator 3610 # Averaging initial state color, spin, and identical FS particles 3611 den_factor_line = self.get_den_factor_line(matrix_element) 3612 replace_dict['den_factor_line'] = den_factor_line 3613 3614 # Extract ngraphs 3615 ngraphs = matrix_element.get_number_of_amplitudes() 3616 replace_dict['ngraphs'] = ngraphs 3617 3618 # Extract ndiags 3619 ndiags = len(matrix_element.get('diagrams')) 3620 replace_dict['ndiags'] = ndiags 3621 3622 # Set define_iconfigs_lines 3623 replace_dict['define_iconfigs_lines'] = \ 3624 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3625 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3626 3627 if proc_id: 3628 # Set lines for subprocess group version 3629 # Set define_iconfigs_lines 3630 replace_dict['define_iconfigs_lines'] += \ 3631 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3632 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3633 # Set set_amp2_line 3634 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3635 proc_id 3636 else: 3637 # Standard running 3638 # Set set_amp2_line 3639 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3640 3641 # Extract nwavefuncs 3642 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3643 replace_dict['nwavefuncs'] = nwavefuncs 3644 3645 # Extract ncolor 3646 ncolor = max(1, len(matrix_element.get('color_basis'))) 3647 replace_dict['ncolor'] = ncolor 3648 3649 # Extract color data lines 3650 color_data_lines = self.get_color_data_lines(matrix_element) 3651 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3652 3653 3654 # Set the size of Wavefunction 3655 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3656 replace_dict['wavefunctionsize'] = 18 3657 else: 3658 replace_dict['wavefunctionsize'] = 6 3659 3660 # Extract amp2 lines 3661 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3662 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3663 3664 # The JAMP definition depends on the splitting order 3665 split_orders=matrix_element.get('processes')[0].get('split_orders') 3666 if len(split_orders)>0: 3667 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3668 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3669 matrix_element.get('processes')[0],squared_orders) 3670 else: 3671 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3672 # set all amplitude order to weight 1 and only one squared order 3673 # contribution which is of course ALL_ORDERS=2. 3674 squared_orders = [(2,),] 3675 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3676 replace_dict['chosen_so_configs'] = '.TRUE.' 3677 3678 replace_dict['nAmpSplitOrders']=len(amp_orders) 3679 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3680 replace_dict['split_order_str_list']=str(split_orders) 3681 replace_dict['nSplitOrders']=max(len(split_orders),1) 3682 amp_so = self.get_split_orders_lines( 3683 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3684 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3685 replace_dict['ampsplitorders']='\n'.join(amp_so) 3686 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3687 3688 3689 # Extract JAMP lines 3690 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3691 jamp_lines = self.get_JAMP_lines_split_order(\ 3692 matrix_element,amp_orders,split_order_names= 3693 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3694 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3695 3696 file = open(pjoin(_file_path, \ 3697 'iolibs/template_files/%s' % self.matrix_file)).read() 3698 3699 file = file % replace_dict 3700 3701 # Add the split orders helper functions. 3702 file = file + '\n' + open(pjoin(_file_path, \ 3703 'iolibs/template_files/split_orders_helping_functions.inc'))\ 3704 .read()%replace_dict 3705 # Write the file 3706 writer.writelines(file) 3707 3708 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
3709 3710 #=========================================================================== 3711 # write_auto_dsig_file 3712 #===========================================================================
3713 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3714 """Write the auto_dsig.f file for the differential cross section 3715 calculation, includes pdf call information""" 3716 3717 if not matrix_element.get('processes') or \ 3718 not matrix_element.get('diagrams'): 3719 return 0 3720 3721 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3722 self.proc_characteristic['ninitial'] = ninitial 3723 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3724 3725 if ninitial < 1 or ninitial > 2: 3726 raise writers.FortranWriter.FortranWriterError, \ 3727 """Need ninitial = 1 or 2 to write auto_dsig file""" 3728 3729 replace_dict = {} 3730 3731 # Extract version number and date from VERSION file 3732 info_lines = self.get_mg5_info_lines() 3733 replace_dict['info_lines'] = info_lines 3734 3735 # Extract process info lines 3736 process_lines = self.get_process_info_lines(matrix_element) 3737 replace_dict['process_lines'] = process_lines 3738 3739 # Set proc_id 3740 replace_dict['proc_id'] = proc_id 3741 replace_dict['numproc'] = 1 3742 3743 # Set dsig_line 3744 if ninitial == 1: 3745 # No conversion, since result of decay should be given in GeV 3746 dsig_line = "pd(0)*dsiguu" 3747 else: 3748 # Convert result (in GeV) to pb 3749 dsig_line = "pd(0)*conv*dsiguu" 3750 3751 replace_dict['dsig_line'] = dsig_line 3752 3753 # Extract pdf lines 3754 pdf_vars, pdf_data, pdf_lines = \ 3755 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3756 replace_dict['pdf_vars'] = pdf_vars 3757 replace_dict['pdf_data'] = pdf_data 3758 replace_dict['pdf_lines'] = pdf_lines 3759 3760 # Lines that differ between subprocess group and regular 3761 if proc_id: 3762 replace_dict['numproc'] = int(proc_id) 3763 replace_dict['passcuts_begin'] = "" 3764 replace_dict['passcuts_end'] = "" 3765 # Set lines for subprocess group version 3766 # Set define_iconfigs_lines 3767 replace_dict['define_subdiag_lines'] = \ 3768 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3769 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3770 replace_dict['cutsdone'] = "" 3771 else: 3772 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3773 replace_dict['passcuts_end'] = "ENDIF" 3774 replace_dict['define_subdiag_lines'] = "" 3775 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3776 3777 if not isinstance(self, ProcessExporterFortranMEGroup): 3778 ncomb=matrix_element.get_helicity_combinations() 3779 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3780 else: 3781 replace_dict['read_write_good_hel'] = "" 3782 3783 3784 3785 file = open(pjoin(_file_path, \ 3786 'iolibs/template_files/auto_dsig_v4.inc')).read() 3787 file = file % replace_dict 3788 3789 # Write the file 3790 writer.writelines(file, context={'read_write_good_hel':True})
3791 3792 #=========================================================================== 3793 # write_coloramps_file 3794 #===========================================================================
3795 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3796 """Write the coloramps.inc file for MadEvent""" 3797 3798 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3799 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3800 (max(len(matrix_element.get('color_basis').keys()), 1), 3801 len(mapconfigs))) 3802 3803 3804 # Write the file 3805 writer.writelines(lines) 3806 3807 return True
3808 3809 #=========================================================================== 3810 # write_colors_file 3811 #===========================================================================
3812 - def write_colors_file(self, writer, matrix_elements):
3813 """Write the get_color.f file for MadEvent, which returns color 3814 for all particles used in the matrix element.""" 3815 3816 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3817 matrix_elements = [matrix_elements] 3818 3819 model = matrix_elements[0].get('processes')[0].get('model') 3820 3821 # We need the both particle and antiparticle wf_ids, since the identity 3822 # depends on the direction of the wf. 3823 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3824 for wf in d.get('wavefunctions')],[]) \ 3825 for d in me.get('diagrams')], []) \ 3826 for me in matrix_elements], [])) 3827 3828 leg_ids = set(sum([sum([sum([[l.get('id'), 3829 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 3830 for l in p.get_legs_with_decays()], []) \ 3831 for p in me.get('processes')], []) \ 3832 for me in matrix_elements], [])) 3833 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3834 3835 lines = """function get_color(ipdg) 3836 implicit none 3837 integer get_color, ipdg 3838 3839 if(ipdg.eq.%d)then 3840 get_color=%d 3841 return 3842 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3843 3844 for part_id in particle_ids[1:]: 3845 lines += """else if(ipdg.eq.%d)then 3846 get_color=%d 3847 return 3848 """ % (part_id, model.get_particle(part_id).get_color()) 3849 # Dummy particle for multiparticle vertices with pdg given by 3850 # first code not in the model 3851 lines += """else if(ipdg.eq.%d)then 3852 c This is dummy particle used in multiparticle vertices 3853 get_color=2 3854 return 3855 """ % model.get_first_non_pdg() 3856 lines += """else 3857 write(*,*)'Error: No color given for pdg ',ipdg 3858 get_color=0 3859 return 3860 endif 3861 end 3862 """ 3863 3864 # Write the file 3865 writer.writelines(lines) 3866 3867 return True
3868 3869 #=========================================================================== 3870 # write_config_nqcd_file 3871 #===========================================================================
3872 - def write_config_nqcd_file(self, writer, nqcd_list):
3873 """Write the config_nqcd.inc with the number of QCD couplings 3874 for each config""" 3875 3876 lines = [] 3877 for iconf, n in enumerate(nqcd_list): 3878 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 3879 3880 # Write the file 3881 writer.writelines(lines) 3882 3883 return True
3884 3885 #=========================================================================== 3886 # write_maxconfigs_file 3887 #===========================================================================
3888 - def write_maxconfigs_file(self, writer, matrix_elements):
3889 """Write the maxconfigs.inc file for MadEvent""" 3890 3891 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 3892 maxconfigs = max([me.get_num_configs() for me in \ 3893 matrix_elements.get('matrix_elements')]) 3894 else: 3895 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 3896 3897 lines = "integer lmaxconfigs\n" 3898 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 3899 3900 # Write the file 3901 writer.writelines(lines) 3902 3903 return True
3904 3905 #=========================================================================== 3906 # read_write_good_hel 3907 #===========================================================================
3908 - def read_write_good_hel(self, ncomb):
3909 """return the code to read/write the good_hel common_block""" 3910 3911 convert = {'ncomb' : ncomb} 3912 output = """ 3913 subroutine write_good_hel(stream_id) 3914 implicit none 3915 integer stream_id 3916 INTEGER NCOMB 3917 PARAMETER ( NCOMB=%(ncomb)d) 3918 LOGICAL GOODHEL(NCOMB) 3919 INTEGER NTRY 3920 common/BLOCK_GOODHEL/NTRY,GOODHEL 3921 write(stream_id,*) GOODHEL 3922 return 3923 end 3924 3925 3926 subroutine read_good_hel(stream_id) 3927 implicit none 3928 include 'genps.inc' 3929 integer stream_id 3930 INTEGER NCOMB 3931 PARAMETER ( NCOMB=%(ncomb)d) 3932 LOGICAL GOODHEL(NCOMB) 3933 INTEGER NTRY 3934 common/BLOCK_GOODHEL/NTRY,GOODHEL 3935 read(stream_id,*) GOODHEL 3936 NTRY = MAXTRIES + 1 3937 return 3938 end 3939 3940 subroutine init_good_hel() 3941 implicit none 3942 INTEGER NCOMB 3943 PARAMETER ( NCOMB=%(ncomb)d) 3944 LOGICAL GOODHEL(NCOMB) 3945 INTEGER NTRY 3946 INTEGER I 3947 3948 do i=1,NCOMB 3949 GOODHEL(I) = .false. 3950 enddo 3951 NTRY = 0 3952 end 3953 3954 integer function get_maxsproc() 3955 implicit none 3956 get_maxsproc = 1 3957 return 3958 end 3959 3960 """ % convert 3961 3962 return output
3963 3964 #=========================================================================== 3965 # write_config_subproc_map_file 3966 #===========================================================================
3967 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3968 """Write a dummy config_subproc.inc file for MadEvent""" 3969 3970 lines = [] 3971 3972 for iconfig in range(len(s_and_t_channels)): 3973 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3974 (iconfig + 1)) 3975 3976 # Write the file 3977 writer.writelines(lines) 3978 3979 return True
3980 3981 #=========================================================================== 3982 # write_configs_file 3983 #===========================================================================
3984 - def write_configs_file(self, writer, matrix_element):
3985 """Write the configs.inc file for MadEvent""" 3986 3987 # Extract number of external particles 3988 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3989 3990 model = matrix_element.get('processes')[0].get('model') 3991 configs = [(i+1, d) for (i, d) in \ 3992 enumerate(matrix_element.get('diagrams'))] 3993 mapconfigs = [c[0] for c in configs] 3994 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3995 [[c[1]] for c in configs], 3996 mapconfigs, 3997 nexternal, ninitial, 3998 model)
3999 4000 #=========================================================================== 4001 # write_run_configs_file 4002 #===========================================================================
4003 - def write_run_config_file(self, writer):
4004 """Write the run_configs.inc file for MadEvent""" 4005 4006 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4007 4008 if self.proc_characteristic['loop_induced']: 4009 job_per_chan = 1 4010 else: 4011 job_per_chan = 5 4012 text = open(path).read() % {'chanperjob': job_per_chan} 4013 writer.write(text) 4014 return True
4015 4016 4017 #=========================================================================== 4018 # write_configs_file_from_diagrams 4019 #===========================================================================
4020 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4021 nexternal, ninitial, model):
4022 """Write the actual configs.inc file. 4023 4024 configs is the diagrams corresponding to configs (each 4025 diagrams is a list of corresponding diagrams for all 4026 subprocesses, with None if there is no corresponding diagrams 4027 for a given process). 4028 mapconfigs gives the diagram number for each config. 4029 4030 For s-channels, we need to output one PDG for each subprocess in 4031 the subprocess group, in order to be able to pick the right 4032 one for multiprocesses.""" 4033 4034 lines = [] 4035 4036 s_and_t_channels = [] 4037 4038 nqcd_list = [] 4039 4040 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4041 for config in configs if [d for d in config if d][0].\ 4042 get_vertex_leg_numbers()!=[]] 4043 minvert = min(vert_list) if vert_list!=[] else 0 4044 4045 # Number of subprocesses 4046 nsubprocs = len(configs[0]) 4047 4048 nconfigs = 0 4049 4050 new_pdg = model.get_first_non_pdg() 4051 4052 for iconfig, helas_diags in enumerate(configs): 4053 if any([vert > minvert for vert in 4054 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4055 # Only 3-vertices allowed in configs.inc 4056 continue 4057 nconfigs += 1 4058 4059 # Need s- and t-channels for all subprocesses, including 4060 # those that don't contribute to this config 4061 empty_verts = [] 4062 stchannels = [] 4063 for h in helas_diags: 4064 if h: 4065 # get_s_and_t_channels gives vertices starting from 4066 # final state external particles and working inwards 4067 stchannels.append(h.get('amplitudes')[0].\ 4068 get_s_and_t_channels(ninitial, model, 4069 new_pdg)) 4070 else: 4071 stchannels.append((empty_verts, None)) 4072 4073 # For t-channels, just need the first non-empty one 4074 tchannels = [t for s,t in stchannels if t != None][0] 4075 4076 # For s_and_t_channels (to be used later) use only first config 4077 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4078 tchannels]) 4079 4080 # Make sure empty_verts is same length as real vertices 4081 if any([s for s,t in stchannels]): 4082 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4083 4084 # Reorganize s-channel vertices to get a list of all 4085 # subprocesses for each vertex 4086 schannels = zip(*[s for s,t in stchannels]) 4087 else: 4088 schannels = [] 4089 4090 allchannels = schannels 4091 if len(tchannels) > 1: 4092 # Write out tchannels only if there are any non-trivial ones 4093 allchannels = schannels + tchannels 4094 4095 # Write out propagators for s-channel and t-channel vertices 4096 4097 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4098 # Correspondance between the config and the diagram = amp2 4099 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4100 mapconfigs[iconfig])) 4101 # Number of QCD couplings in this diagram 4102 nqcd = 0 4103 for h in helas_diags: 4104 if h: 4105 try: 4106 nqcd = h.calculate_orders()['QCD'] 4107 except KeyError: 4108 pass 4109 break 4110 else: 4111 continue 4112 4113 nqcd_list.append(nqcd) 4114 4115 for verts in allchannels: 4116 if verts in schannels: 4117 vert = [v for v in verts if v][0] 4118 else: 4119 vert = verts 4120 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4121 last_leg = vert.get('legs')[-1] 4122 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4123 (last_leg.get('number'), nconfigs, len(daughters), 4124 ",".join([str(d) for d in daughters]))) 4125 if verts in schannels: 4126 pdgs = [] 4127 for v in verts: 4128 if v: 4129 pdgs.append(v.get('legs')[-1].get('id')) 4130 else: 4131 pdgs.append(0) 4132 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4133 (last_leg.get('number'), nconfigs, nsubprocs, 4134 ",".join([str(d) for d in pdgs]))) 4135 lines.append("data tprid(%d,%d)/0/" % \ 4136 (last_leg.get('number'), nconfigs)) 4137 elif verts in tchannels[:-1]: 4138 lines.append("data tprid(%d,%d)/%d/" % \ 4139 (last_leg.get('number'), nconfigs, 4140 abs(last_leg.get('id')))) 4141 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4142 (last_leg.get('number'), nconfigs, nsubprocs, 4143 ",".join(['0'] * nsubprocs))) 4144 4145 # Write out number of configs 4146 lines.append("# Number of configs") 4147 lines.append("data mapconfig(0)/%d/" % nconfigs) 4148 4149 # Write the file 4150 writer.writelines(lines) 4151 4152 return s_and_t_channels, nqcd_list
4153 4154 #=========================================================================== 4155 # write_decayBW_file 4156 #===========================================================================
4157 - def write_decayBW_file(self, writer, s_and_t_channels):
4158 """Write the decayBW.inc file for MadEvent""" 4159 4160 lines = [] 4161 4162 booldict = {None: "0", True: "1", False: "2"} 4163 4164 for iconf, config in enumerate(s_and_t_channels): 4165 schannels = config[0] 4166 for vertex in schannels: 4167 # For the resulting leg, pick out whether it comes from 4168 # decay or not, as given by the onshell flag 4169 leg = vertex.get('legs')[-1] 4170 lines.append("data gForceBW(%d,%d)/%s/" % \ 4171 (leg.get('number'), iconf + 1, 4172 booldict[leg.get('onshell')])) 4173 4174 # Write the file 4175 writer.writelines(lines) 4176 4177 return True
4178 4179 #=========================================================================== 4180 # write_dname_file 4181 #===========================================================================
4182 - def write_dname_file(self, writer, dir_name):
4183 """Write the dname.mg file for MG4""" 4184 4185 line = "DIRNAME=%s" % dir_name 4186 4187 # Write the file 4188 writer.write(line + "\n") 4189 4190 return True
4191 4192 #=========================================================================== 4193 # write_driver 4194 #===========================================================================
4195 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4196 """Write the SubProcess/driver.f file for MG4""" 4197 4198 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4199 4200 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4201 card = 'Source/MODEL/MG5_param.dat' 4202 else: 4203 card = 'param_card.dat' 4204 # Requiring each helicity configuration to be probed by 10 points for 4205 # matrix element before using the resulting grid for MC over helicity 4206 # sampling. 4207 # We multiply this by 2 because each grouped subprocess is called at most 4208 # twice for each IMIRROR. 4209 replace_dict = {'param_card_name':card, 4210 'ncomb':ncomb, 4211 'hel_init_points':n_grouped_proc*10*2} 4212 if v5: 4213 replace_dict['secondparam']=',.true.' 4214 else: 4215 replace_dict['secondparam']='' 4216 4217 text = open(path).read() % replace_dict 4218 4219 writer.write(text) 4220 4221 return True
4222 4223 #=========================================================================== 4224 # write_addmothers 4225 #===========================================================================
4226 - def write_addmothers(self, writer):
4227 """Write the SubProcess/addmothers.f""" 4228 4229 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4230 4231 text = open(path).read() % {'iconfig': 'diag_number'} 4232 writer.write(text) 4233 4234 return True
4235 4236 4237 #=========================================================================== 4238 # write_combine_events 4239 #===========================================================================
4240 - def write_combine_events(self, writer, nb_proc=100):
4241 """Write the SubProcess/driver.f file for MG4""" 4242 4243 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4244 4245 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4246 card = 'Source/MODEL/MG5_param.dat' 4247 else: 4248 card = 'param_card.dat' 4249 4250 #set maxpup (number of @X in the process card) 4251 4252 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4253 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4254 writer.write(text) 4255 4256 return True
4257 4258 4259 #=========================================================================== 4260 # write_symmetry 4261 #===========================================================================
4262 - def write_symmetry(self, writer, v5=True):
4263 """Write the SubProcess/driver.f file for ME""" 4264 4265 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4266 4267 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4268 card = 'Source/MODEL/MG5_param.dat' 4269 else: 4270 card = 'param_card.dat' 4271 text = open(path).read() 4272 4273 if v5: 4274 text = text % {'param_card_name':card, 'setparasecondarg':''} 4275 else: 4276 text = text % {'param_card_name':card, 'setparasecondarg':',.true.'} 4277 writer.write(text) 4278 4279 return True
4280 4281 4282 4283 4284 #=========================================================================== 4285 # write_iproc_file 4286 #===========================================================================
4287 - def write_iproc_file(self, writer, me_number):
4288 """Write the iproc.dat file for MG4""" 4289 line = "%d" % (me_number + 1) 4290 4291 # Write the file 4292 for line_to_write in writer.write_line(line): 4293 writer.write(line_to_write) 4294 return True
4295 4296 #=========================================================================== 4297 # write_mg_sym_file 4298 #===========================================================================
4299 - def write_mg_sym_file(self, writer, matrix_element):
4300 """Write the mg.sym file for MadEvent.""" 4301 4302 lines = [] 4303 4304 # Extract process with all decays included 4305 final_legs = filter(lambda leg: leg.get('state') == True, 4306 matrix_element.get('processes')[0].get_legs_with_decays()) 4307 4308 ninitial = len(filter(lambda leg: leg.get('state') == False, 4309 matrix_element.get('processes')[0].get('legs'))) 4310 4311 identical_indices = {} 4312 4313 # Extract identical particle info 4314 for i, leg in enumerate(final_legs): 4315 if leg.get('id') in identical_indices: 4316 identical_indices[leg.get('id')].append(\ 4317 i + ninitial + 1) 4318 else: 4319 identical_indices[leg.get('id')] = [i + ninitial + 1] 4320 4321 # Remove keys which have only one particle 4322 for key in identical_indices.keys(): 4323 if len(identical_indices[key]) < 2: 4324 del identical_indices[key] 4325 4326 # Write mg.sym file 4327 lines.append(str(len(identical_indices.keys()))) 4328 for key in identical_indices.keys(): 4329 lines.append(str(len(identical_indices[key]))) 4330 for number in identical_indices[key]: 4331 lines.append(str(number)) 4332 4333 # Write the file 4334 writer.writelines(lines) 4335 4336 return True
4337 4338 #=========================================================================== 4339 # write_mg_sym_file 4340 #===========================================================================
4341 - def write_default_mg_sym_file(self, writer):
4342 """Write the mg.sym file for MadEvent.""" 4343 4344 lines = "0" 4345 4346 # Write the file 4347 writer.writelines(lines) 4348 4349 return True
4350 4351 #=========================================================================== 4352 # write_ncombs_file 4353 #===========================================================================
4354 - def write_ncombs_file(self, writer, nexternal):
4355 """Write the ncombs.inc file for MadEvent.""" 4356 4357 # ncomb (used for clustering) is 2^nexternal 4358 file = " integer n_max_cl\n" 4359 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4360 4361 # Write the file 4362 writer.writelines(file) 4363 4364 return True
4365 4366 #=========================================================================== 4367 # write_processes_file 4368 #===========================================================================
4369 - def write_processes_file(self, writer, subproc_group):
4370 """Write the processes.dat file with info about the subprocesses 4371 in this group.""" 4372 4373 lines = [] 4374 4375 for ime, me in \ 4376 enumerate(subproc_group.get('matrix_elements')): 4377 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4378 ",".join(p.base_string() for p in \ 4379 me.get('processes')))) 4380 if me.get('has_mirror_process'): 4381 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4382 for proc in mirror_procs: 4383 legs = copy.copy(proc.get('legs_with_decays')) 4384 legs.insert(0, legs.pop(1)) 4385 proc.set("legs_with_decays", legs) 4386 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4387 mirror_procs)) 4388 else: 4389 lines.append("mirror none") 4390 4391 # Write the file 4392 writer.write("\n".join(lines)) 4393 4394 return True
4395 4396 #=========================================================================== 4397 # write_symswap_file 4398 #===========================================================================
4399 - def write_symswap_file(self, writer, ident_perms):
4400 """Write the file symswap.inc for MG4 by comparing diagrams using 4401 the internal matrix element value functionality.""" 4402 4403 lines = [] 4404 4405 # Write out lines for symswap.inc file (used to permute the 4406 # external leg momenta 4407 for iperm, perm in enumerate(ident_perms): 4408 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4409 (iperm+1, ",".join([str(i+1) for i in perm]))) 4410 lines.append("data nsym/%d/" % len(ident_perms)) 4411 4412 # Write the file 4413 writer.writelines(lines) 4414 4415 return True
4416 4417 #=========================================================================== 4418 # write_symfact_file 4419 #===========================================================================
4420 - def write_symfact_file(self, writer, symmetry):
4421 """Write the files symfact.dat for MG4 by comparing diagrams using 4422 the internal matrix element value functionality.""" 4423 4424 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4425 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4426 # Write out lines for symswap.inc file (used to permute the 4427 # external leg momenta 4428 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4429 # Write the file 4430 writer.write('\n'.join(lines)) 4431 writer.write('\n') 4432 4433 return True
4434 4435 #=========================================================================== 4436 # write_symperms_file 4437 #===========================================================================
4438 - def write_symperms_file(self, writer, perms):
4439 """Write the symperms.inc file for subprocess group, used for 4440 symmetric configurations""" 4441 4442 lines = [] 4443 for iperm, perm in enumerate(perms): 4444 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4445 (iperm+1, ",".join([str(i+1) for i in perm]))) 4446 4447 # Write the file 4448 writer.writelines(lines) 4449 4450 return True
4451 4452 #=========================================================================== 4453 # write_subproc 4454 #===========================================================================
4455 - def write_subproc(self, writer, subprocdir):
4456 """Append this subprocess to the subproc.mg file for MG4""" 4457 4458 # Write line to file 4459 writer.write(subprocdir + "\n") 4460 4461 return True
4462
4463 #=============================================================================== 4464 # ProcessExporterFortranMEGroup 4465 #=============================================================================== 4466 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4467 """Class to take care of exporting a set of matrix elements to 4468 MadEvent subprocess group format.""" 4469 4470 matrix_file = "matrix_madevent_group_v4.inc" 4471 4472 #=========================================================================== 4473 # generate_subprocess_directory_v4 4474 #===========================================================================
4475 - def generate_subprocess_directory_v4(self, subproc_group, 4476 fortran_model, 4477 group_number):
4478 """Generate the Pn directory for a subprocess group in MadEvent, 4479 including the necessary matrix_N.f files, configs.inc and various 4480 other helper files""" 4481 4482 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4483 "subproc_group object not SubProcessGroup" 4484 4485 if not self.model: 4486 self.model = subproc_group.get('matrix_elements')[0].\ 4487 get('processes')[0].get('model') 4488 4489 cwd = os.getcwd() 4490 path = pjoin(self.dir_path, 'SubProcesses') 4491 4492 os.chdir(path) 4493 pathdir = os.getcwd() 4494 4495 # Create the directory PN in the specified path 4496 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4497 subproc_group.get('name')) 4498 try: 4499 os.mkdir(subprocdir) 4500 except os.error as error: 4501 logger.warning(error.strerror + " " + subprocdir) 4502 4503 try: 4504 os.chdir(subprocdir) 4505 except os.error: 4506 logger.error('Could not cd to directory %s' % subprocdir) 4507 return 0 4508 4509 logger.info('Creating files in directory %s' % subprocdir) 4510 4511 # Create the matrix.f files, auto_dsig.f files and all inc files 4512 # for all subprocesses in the group 4513 4514 maxamps = 0 4515 maxflows = 0 4516 tot_calls = 0 4517 4518 matrix_elements = subproc_group.get('matrix_elements') 4519 4520 # Add the driver.f, all grouped ME's must share the same number of 4521 # helicity configuration 4522 ncomb = matrix_elements[0].get_helicity_combinations() 4523 for me in matrix_elements[1:]: 4524 if ncomb!=me.get_helicity_combinations(): 4525 raise MadGraph5Error, "All grouped processes must share the "+\ 4526 "same number of helicity configurations." 4527 4528 filename = 'driver.f' 4529 self.write_driver(writers.FortranWriter(filename),ncomb, 4530 n_grouped_proc=len(matrix_elements), v5=False) 4531 4532 for ime, matrix_element in \ 4533 enumerate(matrix_elements): 4534 filename = 'matrix%d.f' % (ime+1) 4535 calls, ncolor = \ 4536 self.write_matrix_element_v4(writers.FortranWriter(filename), 4537 matrix_element, 4538 fortran_model, 4539 proc_id=str(ime+1), 4540 config_map=subproc_group.get('diagram_maps')[ime], 4541 subproc_number=group_number) 4542 4543 filename = 'auto_dsig%d.f' % (ime+1) 4544 self.write_auto_dsig_file(writers.FortranWriter(filename), 4545 matrix_element, 4546 str(ime+1)) 4547 4548 # Keep track of needed quantities 4549 tot_calls += int(calls) 4550 maxflows = max(maxflows, ncolor) 4551 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4552 4553 # Draw diagrams 4554 filename = "matrix%d.ps" % (ime+1) 4555 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4556 get('diagrams'), 4557 filename, 4558 model = \ 4559 matrix_element.get('processes')[0].\ 4560 get('model'), 4561 amplitude=True) 4562 logger.info("Generating Feynman diagrams for " + \ 4563 matrix_element.get('processes')[0].nice_string()) 4564 plot.draw() 4565 4566 # Extract number of external particles 4567 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4568 4569 # Generate a list of diagrams corresponding to each configuration 4570 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4571 # If a subprocess has no diagrams for this config, the number is 0 4572 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4573 4574 filename = 'auto_dsig.f' 4575 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4576 subproc_group) 4577 4578 filename = 'coloramps.inc' 4579 self.write_coloramps_file(writers.FortranWriter(filename), 4580 subproc_diagrams_for_config, 4581 maxflows, 4582 matrix_elements) 4583 4584 filename = 'get_color.f' 4585 self.write_colors_file(writers.FortranWriter(filename), 4586 matrix_elements) 4587 4588 filename = 'config_subproc_map.inc' 4589 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4590 subproc_diagrams_for_config) 4591 4592 filename = 'configs.inc' 4593 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4594 writers.FortranWriter(filename), 4595 subproc_group, 4596 subproc_diagrams_for_config) 4597 4598 filename = 'config_nqcd.inc' 4599 self.write_config_nqcd_file(writers.FortranWriter(filename), 4600 nqcd_list) 4601 4602 filename = 'decayBW.inc' 4603 self.write_decayBW_file(writers.FortranWriter(filename), 4604 s_and_t_channels) 4605 4606 filename = 'dname.mg' 4607 self.write_dname_file(writers.FortranWriter(filename), 4608 subprocdir) 4609 4610 filename = 'iproc.dat' 4611 self.write_iproc_file(writers.FortranWriter(filename), 4612 group_number) 4613 4614 filename = 'leshouche.inc' 4615 self.write_leshouche_file(writers.FortranWriter(filename), 4616 subproc_group) 4617 4618 filename = 'maxamps.inc' 4619 self.write_maxamps_file(writers.FortranWriter(filename), 4620 maxamps, 4621 maxflows, 4622 max([len(me.get('processes')) for me in \ 4623 matrix_elements]), 4624 len(matrix_elements)) 4625 4626 # Note that mg.sym is not relevant for this case 4627 filename = 'mg.sym' 4628 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4629 4630 filename = 'mirrorprocs.inc' 4631 self.write_mirrorprocs(writers.FortranWriter(filename), 4632 subproc_group) 4633 4634 filename = 'ncombs.inc' 4635 self.write_ncombs_file(writers.FortranWriter(filename), 4636 nexternal) 4637 4638 filename = 'nexternal.inc' 4639 self.write_nexternal_file(writers.FortranWriter(filename), 4640 nexternal, ninitial) 4641 4642 filename = 'ngraphs.inc' 4643 self.write_ngraphs_file(writers.FortranWriter(filename), 4644 nconfigs) 4645 4646 filename = 'pmass.inc' 4647 self.write_pmass_file(writers.FortranWriter(filename), 4648 matrix_element) 4649 4650 filename = 'props.inc' 4651 self.write_props_file(writers.FortranWriter(filename), 4652 matrix_element, 4653 s_and_t_channels) 4654 4655 filename = 'processes.dat' 4656 files.write_to_file(filename, 4657 self.write_processes_file, 4658 subproc_group) 4659 4660 # Find config symmetries and permutations 4661 symmetry, perms, ident_perms = \ 4662 diagram_symmetry.find_symmetry(subproc_group) 4663 4664 filename = 'symswap.inc' 4665 self.write_symswap_file(writers.FortranWriter(filename), 4666 ident_perms) 4667 4668 filename = 'symfact_orig.dat' 4669 self.write_symfact_file(open(filename, 'w'), symmetry) 4670 4671 filename = 'symperms.inc' 4672 self.write_symperms_file(writers.FortranWriter(filename), 4673 perms) 4674 4675 # Generate jpgs -> pass in make_html 4676 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4677 4678 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4679 4680 #import nexternal/leshouch in Source 4681 ln('nexternal.inc', '../../Source', log=False) 4682 ln('leshouche.inc', '../../Source', log=False) 4683 ln('maxamps.inc', '../../Source', log=False) 4684 4685 # Return to SubProcesses dir) 4686 os.chdir(pathdir) 4687 4688 # Add subprocess to subproc.mg 4689 filename = 'subproc.mg' 4690 files.append_to_file(filename, 4691 self.write_subproc, 4692 subprocdir) 4693 4694 # Return to original dir 4695 os.chdir(cwd) 4696 4697 if not tot_calls: 4698 tot_calls = 0 4699 return tot_calls
4700 4701 #=========================================================================== 4702 # write_super_auto_dsig_file 4703 #===========================================================================
4704 - def write_super_auto_dsig_file(self, writer, subproc_group):
4705 """Write the auto_dsig.f file selecting between the subprocesses 4706 in subprocess group mode""" 4707 4708 replace_dict = {} 4709 4710 # Extract version number and date from VERSION file 4711 info_lines = self.get_mg5_info_lines() 4712 replace_dict['info_lines'] = info_lines 4713 4714 matrix_elements = subproc_group.get('matrix_elements') 4715 4716 # Extract process info lines 4717 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4718 matrix_elements]) 4719 replace_dict['process_lines'] = process_lines 4720 4721 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4722 replace_dict['nexternal'] = nexternal 4723 4724 replace_dict['nsprocs'] = 2*len(matrix_elements) 4725 4726 # Generate dsig definition line 4727 dsig_def_line = "DOUBLE PRECISION " + \ 4728 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4729 range(len(matrix_elements))]) 4730 replace_dict["dsig_def_line"] = dsig_def_line 4731 4732 # Generate dsig process lines 4733 call_dsig_proc_lines = [] 4734 for iproc in range(len(matrix_elements)): 4735 call_dsig_proc_lines.append(\ 4736 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4737 {"num": iproc + 1, 4738 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4739 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4740 4741 ncomb=matrix_elements[0].get_helicity_combinations() 4742 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4743 4744 file = open(pjoin(_file_path, \ 4745 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4746 file = file % replace_dict 4747 4748 # Write the file 4749 writer.writelines(file)
4750 4751 #=========================================================================== 4752 # write_mirrorprocs 4753 #===========================================================================
4754 - def write_mirrorprocs(self, writer, subproc_group):
4755 """Write the mirrorprocs.inc file determining which processes have 4756 IS mirror process in subprocess group mode.""" 4757 4758 lines = [] 4759 bool_dict = {True: '.true.', False: '.false.'} 4760 matrix_elements = subproc_group.get('matrix_elements') 4761 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4762 (len(matrix_elements), 4763 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4764 me in matrix_elements]))) 4765 # Write the file 4766 writer.writelines(lines)
4767 4768 #=========================================================================== 4769 # write_addmothers 4770 #===========================================================================
4771 - def write_addmothers(self, writer):
4772 """Write the SubProcess/addmothers.f""" 4773 4774 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4775 4776 text = open(path).read() % {'iconfig': 'lconfig'} 4777 writer.write(text) 4778 4779 return True
4780 4781 4782 #=========================================================================== 4783 # write_coloramps_file 4784 #===========================================================================
4785 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4786 matrix_elements):
4787 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4788 4789 # Create a map from subprocess (matrix element) to a list of 4790 # the diagrams corresponding to each config 4791 4792 lines = [] 4793 4794 subproc_to_confdiag = {} 4795 for config in diagrams_for_config: 4796 for subproc, diag in enumerate(config): 4797 try: 4798 subproc_to_confdiag[subproc].append(diag) 4799 except KeyError: 4800 subproc_to_confdiag[subproc] = [diag] 4801 4802 for subproc in sorted(subproc_to_confdiag.keys()): 4803 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 4804 matrix_elements[subproc], 4805 subproc + 1)) 4806 4807 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 4808 (maxflows, 4809 len(diagrams_for_config), 4810 len(matrix_elements))) 4811 4812 # Write the file 4813 writer.writelines(lines) 4814 4815 return True
4816 4817 #=========================================================================== 4818 # write_config_subproc_map_file 4819 #===========================================================================
4820 - def write_config_subproc_map_file(self, writer, config_subproc_map):
4821 """Write the config_subproc_map.inc file for subprocess groups""" 4822 4823 lines = [] 4824 # Output only configs that have some corresponding diagrams 4825 iconfig = 0 4826 for config in config_subproc_map: 4827 if set(config) == set([0]): 4828 continue 4829 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 4830 (iconfig + 1, len(config), 4831 ",".join([str(i) for i in config]))) 4832 iconfig += 1 4833 # Write the file 4834 writer.writelines(lines) 4835 4836 return True
4837 4838 #=========================================================================== 4839 # read_write_good_hel 4840 #===========================================================================
4841 - def read_write_good_hel(self, ncomb):
4842 """return the code to read/write the good_hel common_block""" 4843 4844 convert = {'ncomb' : ncomb} 4845 4846 output = """ 4847 subroutine write_good_hel(stream_id) 4848 implicit none 4849 integer stream_id 4850 INTEGER NCOMB 4851 PARAMETER ( NCOMB=%(ncomb)d) 4852 LOGICAL GOODHEL(NCOMB, 2) 4853 INTEGER NTRY(2) 4854 common/BLOCK_GOODHEL/NTRY,GOODHEL 4855 write(stream_id,*) GOODHEL 4856 return 4857 end 4858 4859 4860 subroutine read_good_hel(stream_id) 4861 implicit none 4862 include 'genps.inc' 4863 integer stream_id 4864 INTEGER NCOMB 4865 PARAMETER ( NCOMB=%(ncomb)d) 4866 LOGICAL GOODHEL(NCOMB, 2) 4867 INTEGER NTRY(2) 4868 common/BLOCK_GOODHEL/NTRY,GOODHEL 4869 read(stream_id,*) GOODHEL 4870 NTRY(1) = MAXTRIES + 1 4871 NTRY(2) = MAXTRIES + 1 4872 return 4873 end 4874 4875 subroutine init_good_hel() 4876 implicit none 4877 INTEGER NCOMB 4878 PARAMETER ( NCOMB=%(ncomb)d) 4879 LOGICAL GOODHEL(NCOMB, 2) 4880 INTEGER NTRY(2) 4881 INTEGER I 4882 4883 do i=1,NCOMB 4884 GOODHEL(I,1) = .false. 4885 GOODHEL(I,2) = .false. 4886 enddo 4887 NTRY(1) = 0 4888 NTRY(2) = 0 4889 end 4890 4891 integer function get_maxsproc() 4892 implicit none 4893 include 'maxamps.inc' 4894 4895 get_maxsproc = maxsproc 4896 return 4897 end 4898 4899 """ % convert 4900 4901 return output
4902 4903 4904 4905 #=========================================================================== 4906 # write_configs_file 4907 #===========================================================================
4908 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
4909 """Write the configs.inc file with topology information for a 4910 subprocess group. Use the first subprocess with a diagram for each 4911 configuration.""" 4912 4913 matrix_elements = subproc_group.get('matrix_elements') 4914 model = matrix_elements[0].get('processes')[0].get('model') 4915 4916 diagrams = [] 4917 config_numbers = [] 4918 for iconfig, config in enumerate(diagrams_for_config): 4919 # Check if any diagrams correspond to this config 4920 if set(config) == set([0]): 4921 continue 4922 subproc_diags = [] 4923 for s,d in enumerate(config): 4924 if d: 4925 subproc_diags.append(matrix_elements[s].\ 4926 get('diagrams')[d-1]) 4927 else: 4928 subproc_diags.append(None) 4929 diagrams.append(subproc_diags) 4930 config_numbers.append(iconfig + 1) 4931 4932 # Extract number of external particles 4933 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 4934 4935 return len(diagrams), \ 4936 self.write_configs_file_from_diagrams(writer, diagrams, 4937 config_numbers, 4938 nexternal, ninitial, 4939 model)
4940 4941 #=========================================================================== 4942 # write_run_configs_file 4943 #===========================================================================
4944 - def write_run_config_file(self, writer):
4945 """Write the run_configs.inc file for MadEvent""" 4946 4947 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4948 if self.proc_characteristic['loop_induced']: 4949 job_per_chan = 1 4950 else: 4951 job_per_chan = 2 4952 text = open(path).read() % {'chanperjob':job_per_chan} 4953 writer.write(text) 4954 return True
4955 4956 4957 #=========================================================================== 4958 # write_leshouche_file 4959 #===========================================================================
4960 - def write_leshouche_file(self, writer, subproc_group):
4961 """Write the leshouche.inc file for MG4""" 4962 4963 all_lines = [] 4964 4965 for iproc, matrix_element in \ 4966 enumerate(subproc_group.get('matrix_elements')): 4967 all_lines.extend(self.get_leshouche_lines(matrix_element, 4968 iproc)) 4969 4970 # Write the file 4971 writer.writelines(all_lines) 4972 4973 return True
4974 4975 4976
4977 - def finalize_v4_directory(self,*args, **opts):
4978 4979 4980 4981 super(ProcessExporterFortranMEGroup, self).finalize_v4_directory(*args, **opts) 4982 #ensure that the grouping information is on the correct value 4983 self.proc_characteristic['grouped_matrix'] = True
4984 4985 4986 #=============================================================================== 4987 # UFO_model_to_mg4 4988 #=============================================================================== 4989 4990 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
4991 4992 -class UFO_model_to_mg4(object):
4993 """ A converter of the UFO-MG5 Model to the MG4 format """ 4994 4995 # The list below shows the only variables the user is allowed to change by 4996 # himself for each PS point. If he changes any other, then calling 4997 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 4998 # correctly account for the change. 4999 PS_dependent_key = ['aS','MU_R'] 5000 mp_complex_format = 'complex*32' 5001 mp_real_format = 'real*16' 5002 # Warning, it is crucial none of the couplings/parameters of the model 5003 # starts with this prefix. I should add a check for this. 5004 # You can change it as the global variable to check_param_card.ParamCard 5005 mp_prefix = check_param_card.ParamCard.mp_prefix 5006
5007 - def __init__(self, model, output_path, opt=None):
5008 """ initialization of the objects """ 5009 5010 self.model = model 5011 self.model_name = model['name'] 5012 self.dir_path = output_path 5013 if opt: 5014 self.opt = opt 5015 else: 5016 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5017 'loop_induced': False} 5018 5019 self.coups_dep = [] # (name, expression, type) 5020 self.coups_indep = [] # (name, expression, type) 5021 self.params_dep = [] # (name, expression, type) 5022 self.params_indep = [] # (name, expression, type) 5023 self.params_ext = [] # external parameter 5024 self.p_to_f = parsers.UFOExpressionParserFortran() 5025 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5026
5028 """modify the parameter if some of them are identical up to the case""" 5029 5030 lower_dict={} 5031 duplicate = set() 5032 keys = self.model['parameters'].keys() 5033 for key in keys: 5034 for param in self.model['parameters'][key]: 5035 lower_name = param.name.lower() 5036 if not lower_name: 5037 continue 5038 try: 5039 lower_dict[lower_name].append(param) 5040 except KeyError,error: 5041 lower_dict[lower_name] = [param] 5042 else: 5043 duplicate.add(lower_name) 5044 logger.debug('%s is define both as lower case and upper case.' 5045 % lower_name) 5046 if not duplicate: 5047 return 5048 5049 re_expr = r'''\b(%s)\b''' 5050 to_change = [] 5051 change={} 5052 for value in duplicate: 5053 for i, var in enumerate(lower_dict[value]): 5054 to_change.append(var.name) 5055 new_name = '%s%s' % (var.name.lower(), 5056 ('__%d'%(i+1) if i>0 else '')) 5057 change[var.name] = new_name 5058 var.name = new_name 5059 5060 # Apply the modification to the map_CTcoup_CTparam of the model 5061 # if it has one (giving for each coupling the CT parameters whcih 5062 # are necessary and which should be exported to the model. 5063 if hasattr(self.model,'map_CTcoup_CTparam'): 5064 for coup, ctparams in self.model.map_CTcoup_CTparam: 5065 for i, ctparam in enumerate(ctparams): 5066 try: 5067 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5068 except KeyError: 5069 pass 5070 5071 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5072 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5073 5074 # change parameters 5075 for key in keys: 5076 if key == ('external',): 5077 continue 5078 for param in self.model['parameters'][key]: 5079 param.expr = rep_pattern.sub(replace, param.expr) 5080 5081 # change couplings 5082 for key in self.model['couplings'].keys(): 5083 for coup in self.model['couplings'][key]: 5084 coup.expr = rep_pattern.sub(replace, coup.expr) 5085 5086 # change mass/width 5087 for part in self.model['particles']: 5088 if str(part.get('mass')) in to_change: 5089 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5090 if str(part.get('width')) in to_change: 5091 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5092
5093 - def refactorize(self, wanted_couplings = []):
5094 """modify the couplings to fit with MG4 convention """ 5095 5096 # Keep only separation in alphaS 5097 keys = self.model['parameters'].keys() 5098 keys.sort(key=len) 5099 for key in keys: 5100 to_add = [o for o in self.model['parameters'][key] if o.name] 5101 5102 if key == ('external',): 5103 self.params_ext += to_add 5104 elif any([(k in key) for k in self.PS_dependent_key]): 5105 self.params_dep += to_add 5106 else: 5107 self.params_indep += to_add 5108 # same for couplings 5109 keys = self.model['couplings'].keys() 5110 keys.sort(key=len) 5111 for key, coup_list in self.model['couplings'].items(): 5112 if any([(k in key) for k in self.PS_dependent_key]): 5113 self.coups_dep += [c for c in coup_list if 5114 (not wanted_couplings or c.name in \ 5115 wanted_couplings)] 5116 else: 5117 self.coups_indep += [c for c in coup_list if 5118 (not wanted_couplings or c.name in \ 5119 wanted_couplings)] 5120 5121 # MG4 use G and not aS as it basic object for alphas related computation 5122 #Pass G in the independant list 5123 if 'G' in self.params_dep: 5124 index = self.params_dep.index('G') 5125 G = self.params_dep.pop(index) 5126 # G.expr = '2*cmath.sqrt(as*pi)' 5127 # self.params_indep.insert(0, self.params_dep.pop(index)) 5128 # No need to add it if not defined 5129 5130 if 'aS' not in self.params_ext: 5131 logger.critical('aS not define as external parameter adding it!') 5132 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5133 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5134 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5135 - def build(self, wanted_couplings = [], full=True):
5136 """modify the couplings to fit with MG4 convention and creates all the 5137 different files""" 5138 5139 self.pass_parameter_to_case_insensitive() 5140 self.refactorize(wanted_couplings) 5141 5142 # write the files 5143 if full: 5144 if wanted_couplings: 5145 # extract the wanted ct parameters 5146 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5147 self.write_all()
5148 5149
5150 - def open(self, name, comment='c', format='default'):
5151 """ Open the file name in the correct directory and with a valid 5152 header.""" 5153 5154 file_path = pjoin(self.dir_path, name) 5155 5156 if format == 'fortran': 5157 fsock = writers.FortranWriter(file_path, 'w') 5158 else: 5159 fsock = open(file_path, 'w') 5160 5161 file.writelines(fsock, comment * 77 + '\n') 5162 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5163 {'comment': comment + (6 - len(comment)) * ' '}) 5164 file.writelines(fsock, comment * 77 + '\n\n') 5165 return fsock
5166 5167
5168 - def write_all(self):
5169 """ write all the files """ 5170 #write the part related to the external parameter 5171 self.create_ident_card() 5172 self.create_param_read() 5173 5174 #write the definition of the parameter 5175 self.create_input() 5176 self.create_intparam_def(dp=True,mp=False) 5177 if self.opt['mp']: 5178 self.create_intparam_def(dp=False,mp=True) 5179 5180 5181 # definition of the coupling. 5182 self.create_actualize_mp_ext_param_inc() 5183 self.create_coupl_inc() 5184 self.create_write_couplings() 5185 self.create_couplings() 5186 5187 # the makefile 5188 self.create_makeinc() 5189 self.create_param_write() 5190 5191 # The model functions 5192 self.create_model_functions_inc() 5193 self.create_model_functions_def() 5194 5195 # The param_card.dat 5196 self.create_param_card() 5197 5198 5199 # All the standard files 5200 self.copy_standard_file()
5201 5202 ############################################################################ 5203 ## ROUTINE CREATING THE FILES ############################################ 5204 ############################################################################ 5205
5206 - def copy_standard_file(self):
5207 """Copy the standard files for the fortran model.""" 5208 5209 5210 #copy the library files 5211 file_to_link = ['formats.inc','printout.f', \ 5212 'rw_para.f', 'testprog.f'] 5213 5214 for filename in file_to_link: 5215 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5216 self.dir_path) 5217 5218 file = open(os.path.join(MG5DIR,\ 5219 'models/template_files/fortran/rw_para.f')).read() 5220 5221 includes=["include \'coupl.inc\'","include \'input.inc\'", 5222 "include \'model_functions.inc\'"] 5223 if self.opt['mp']: 5224 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5225 # In standalone and madloop we do no use the compiled param card but 5226 # still parse the .dat one so we must load it. 5227 if self.opt['loop_induced']: 5228 #loop induced follow MadEvent way to handle the card. 5229 load_card = '' 5230 lha_read_filename='lha_read.f' 5231 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5232 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5233 lha_read_filename='lha_read_mp.f' 5234 elif self.opt['export_format'].startswith('standalone') or self.opt['export_format'] in ['madweight']\ 5235 or self.opt['export_format'].startswith('matchbox'): 5236 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5237 lha_read_filename='lha_read.f' 5238 else: 5239 load_card = '' 5240 lha_read_filename='lha_read.f' 5241 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5242 os.path.join(self.dir_path,'lha_read.f')) 5243 5244 file=file%{'includes':'\n '.join(includes), 5245 'load_card':load_card} 5246 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5247 writer.writelines(file) 5248 writer.close() 5249 5250 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5251 or self.opt['loop_induced']: 5252 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5253 self.dir_path + '/makefile') 5254 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5255 path = pjoin(self.dir_path, 'makefile') 5256 text = open(path).read() 5257 text = text.replace('madevent','aMCatNLO') 5258 open(path, 'w').writelines(text) 5259 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5260 'madloop','madloop_optimized', 'standalone_rw', 'madweight','matchbox','madloop_matchbox']: 5261 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5262 self.dir_path + '/makefile') 5263 #elif self.opt['export_format'] in []: 5264 #pass 5265 else: 5266 raise MadGraph5Error('Unknown format')
5267
5268 - def create_coupl_inc(self):
5269 """ write coupling.inc """ 5270 5271 fsock = self.open('coupl.inc', format='fortran') 5272 if self.opt['mp']: 5273 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5274 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5275 format='fortran') 5276 5277 # Write header 5278 header = """double precision G 5279 common/strong/ G 5280 5281 double complex gal(2) 5282 common/weak/ gal 5283 5284 double precision MU_R 5285 common/rscale/ MU_R 5286 5287 double precision Nf 5288 parameter(Nf=%d) 5289 """ % self.model.get_nflav() 5290 5291 fsock.writelines(header) 5292 5293 if self.opt['mp']: 5294 header = """%(real_mp_format)s %(mp_prefix)sG 5295 common/MP_strong/ %(mp_prefix)sG 5296 5297 %(complex_mp_format)s %(mp_prefix)sgal(2) 5298 common/MP_weak/ %(mp_prefix)sgal 5299 5300 %(complex_mp_format)s %(mp_prefix)sMU_R 5301 common/MP_rscale/ %(mp_prefix)sMU_R 5302 5303 """ 5304 5305 5306 5307 5308 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5309 'complex_mp_format':self.mp_complex_format, 5310 'mp_prefix':self.mp_prefix}) 5311 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5312 'complex_mp_format':self.mp_complex_format, 5313 'mp_prefix':''}) 5314 5315 # Write the Mass definition/ common block 5316 masses = set() 5317 widths = set() 5318 if self.opt['complex_mass']: 5319 complex_mass = set() 5320 5321 for particle in self.model.get('particles'): 5322 #find masses 5323 one_mass = particle.get('mass') 5324 if one_mass.lower() != 'zero': 5325 masses.add(one_mass) 5326 5327 # find width 5328 one_width = particle.get('width') 5329 if one_width.lower() != 'zero': 5330 widths.add(one_width) 5331 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5332 complex_mass.add('CMASS_%s' % one_mass) 5333 5334 if masses: 5335 fsock.writelines('double precision '+','.join(masses)+'\n') 5336 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5337 if self.opt['mp']: 5338 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5339 ','.join(masses)+'\n') 5340 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5341 ','.join(masses)+'\n\n') 5342 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5343 self.mp_prefix+m for m in masses])+'\n') 5344 mp_fsock.writelines('common/MP_masses/ '+\ 5345 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5346 5347 if widths: 5348 fsock.writelines('double precision '+','.join(widths)+'\n') 5349 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5350 if self.opt['mp']: 5351 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5352 ','.join(widths)+'\n') 5353 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5354 ','.join(widths)+'\n\n') 5355 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5356 self.mp_prefix+w for w in widths])+'\n') 5357 mp_fsock.writelines('common/MP_widths/ '+\ 5358 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5359 5360 # Write the Couplings 5361 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5362 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5363 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5364 if self.opt['mp']: 5365 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5366 ','.join(coupling_list)+'\n') 5367 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5368 ','.join(coupling_list)+'\n\n') 5369 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5370 self.mp_prefix+c for c in coupling_list])+'\n') 5371 mp_fsock.writelines('common/MP_couplings/ '+\ 5372 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5373 5374 # Write complex mass for complex mass scheme (if activated) 5375 if self.opt['complex_mass'] and complex_mass: 5376 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5377 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5378 if self.opt['mp']: 5379 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5380 ','.join(complex_mass)+'\n') 5381 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5382 ','.join(complex_mass)+'\n\n') 5383 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5384 self.mp_prefix+cm for cm in complex_mass])+'\n') 5385 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5386 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5387
5388 - def create_write_couplings(self):
5389 """ write the file coupl_write.inc """ 5390 5391 fsock = self.open('coupl_write.inc', format='fortran') 5392 5393 fsock.writelines("""write(*,*) ' Couplings of %s' 5394 write(*,*) ' ---------------------------------' 5395 write(*,*) ' '""" % self.model_name) 5396 def format(coupl): 5397 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5398 5399 # Write the Couplings 5400 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5401 fsock.writelines('\n'.join(lines)) 5402 5403
5404 - def create_input(self):
5405 """create input.inc containing the definition of the parameters""" 5406 5407 fsock = self.open('input.inc', format='fortran') 5408 if self.opt['mp']: 5409 mp_fsock = self.open('mp_input.inc', format='fortran') 5410 5411 #find mass/ width since they are already define 5412 already_def = set() 5413 for particle in self.model.get('particles'): 5414 already_def.add(particle.get('mass').lower()) 5415 already_def.add(particle.get('width').lower()) 5416 if self.opt['complex_mass']: 5417 already_def.add('cmass_%s' % particle.get('mass').lower()) 5418 5419 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5420 name.lower() not in already_def 5421 5422 real_parameters = [param.name for param in self.params_dep + 5423 self.params_indep if param.type == 'real' 5424 and is_valid(param.name)] 5425 5426 real_parameters += [param.name for param in self.params_ext 5427 if param.type == 'real'and 5428 is_valid(param.name)] 5429 5430 # check the parameter is a CT parameter or not 5431 # if yes, just use the needed ones 5432 real_parameters = [param for param in real_parameters \ 5433 if self.check_needed_param(param)] 5434 5435 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5436 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5437 if self.opt['mp']: 5438 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5439 self.mp_prefix+p for p in real_parameters])+'\n') 5440 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5441 self.mp_prefix+p for p in real_parameters])+'\n\n') 5442 5443 complex_parameters = [param.name for param in self.params_dep + 5444 self.params_indep if param.type == 'complex' and 5445 is_valid(param.name)] 5446 5447 # check the parameter is a CT parameter or not 5448 # if yes, just use the needed ones 5449 complex_parameters = [param for param in complex_parameters \ 5450 if self.check_needed_param(param)] 5451 5452 if complex_parameters: 5453 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5454 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5455 if self.opt['mp']: 5456 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5457 self.mp_prefix+p for p in complex_parameters])+'\n') 5458 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5459 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5460
5461 - def check_needed_param(self, param):
5462 """ Returns whether the parameter in argument is needed for this 5463 specific computation or not.""" 5464 5465 # If this is a leading order model or if there was no CT parameter 5466 # employed in this NLO model, one can directly return that the 5467 # parameter is needed since only CTParameters are filtered. 5468 if not hasattr(self, 'allCTparameters') or \ 5469 self.allCTparameters is None or self.usedCTparameters is None or \ 5470 len(self.allCTparameters)==0: 5471 return True 5472 5473 # We must allow the conjugate shorthand for the complex parameter as 5474 # well so we check wether either the parameter name or its name with 5475 # 'conjg__' substituted with '' is present in the list. 5476 # This is acceptable even if some parameter had an original name 5477 # including 'conjg__' in it, because at worst we export a parameter 5478 # was not needed. 5479 param = param.lower() 5480 cjg_param = param.replace('conjg__','',1) 5481 5482 # First make sure it is a CTparameter 5483 if param not in self.allCTparameters and \ 5484 cjg_param not in self.allCTparameters: 5485 return True 5486 5487 # Now check if it is in the list of CTparameters actually used 5488 return (param in self.usedCTparameters or \ 5489 cjg_param in self.usedCTparameters)
5490
5491 - def extract_needed_CTparam(self,wanted_couplings=[]):
5492 """ Extract what are the needed CT parameters given the wanted_couplings""" 5493 5494 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5495 # Setting these lists to none wil disable the filtering in 5496 # check_needed_param 5497 self.allCTparameters = None 5498 self.usedCTparameters = None 5499 return 5500 5501 # All CTparameters appearin in all CT couplings 5502 allCTparameters=self.model.map_CTcoup_CTparam.values() 5503 # Define in this class the list of all CT parameters 5504 self.allCTparameters=list(\ 5505 set(itertools.chain.from_iterable(allCTparameters))) 5506 5507 # All used CT couplings 5508 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5509 allUsedCTCouplings = [coupl for coupl in 5510 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5511 5512 # Now define the list of all CT parameters that are actually used 5513 self.usedCTparameters=list(\ 5514 set(itertools.chain.from_iterable([ 5515 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5516 ]))) 5517 5518 # Now at last, make these list case insensitive 5519 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5520 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5521
5522 - def create_intparam_def(self, dp=True, mp=False):
5523 """ create intparam_definition.inc setting the internal parameters. 5524 Output the double precision and/or the multiple precision parameters 5525 depending on the parameters dp and mp. If mp only, then the file names 5526 get the 'mp_' prefix. 5527 """ 5528 5529 fsock = self.open('%sintparam_definition.inc'% 5530 ('mp_' if mp and not dp else ''), format='fortran') 5531 5532 fsock.write_comments(\ 5533 "Parameters that should not be recomputed event by event.\n") 5534 fsock.writelines("if(readlha) then\n") 5535 if dp: 5536 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5537 if mp: 5538 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5539 5540 for param in self.params_indep: 5541 if param.name == 'ZERO': 5542 continue 5543 # check whether the parameter is a CT parameter 5544 # if yes,just used the needed ones 5545 if not self.check_needed_param(param.name): 5546 continue 5547 if dp: 5548 fsock.writelines("%s = %s\n" % (param.name, 5549 self.p_to_f.parse(param.expr))) 5550 if mp: 5551 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5552 self.mp_p_to_f.parse(param.expr))) 5553 5554 fsock.writelines('endif') 5555 5556 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5557 if dp: 5558 fsock.writelines("aS = G**2/4/pi\n") 5559 if mp: 5560 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5561 for param in self.params_dep: 5562 # check whether the parameter is a CT parameter 5563 # if yes,just used the needed ones 5564 if not self.check_needed_param(param.name): 5565 continue 5566 if dp: 5567 fsock.writelines("%s = %s\n" % (param.name, 5568 self.p_to_f.parse(param.expr))) 5569 elif mp: 5570 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5571 self.mp_p_to_f.parse(param.expr))) 5572 5573 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5574 if ('aEWM1',) in self.model['parameters']: 5575 if dp: 5576 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5577 gal(2) = 1d0 5578 """) 5579 elif mp: 5580 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5581 %(mp_prefix)sgal(2) = 1d0 5582 """ %{'mp_prefix':self.mp_prefix}) 5583 pass 5584 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5585 elif ('Gf',) in self.model['parameters']: 5586 if dp: 5587 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*MDL_SW*DSQRT(MDL_Gf) 5588 gal(2) = 1d0 5589 """) 5590 elif mp: 5591 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*MP__MDL_SW*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5592 %(mp_prefix)sgal(2) = 1d0 5593 """ %{'mp_prefix':self.mp_prefix}) 5594 pass 5595 else: 5596 if dp: 5597 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5598 fsock.writelines(""" gal(1) = 1d0 5599 gal(2) = 1d0 5600 """) 5601 elif mp: 5602 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5603 %(mp_prefix)sgal(2) = 1e0_16 5604 """%{'mp_prefix':self.mp_prefix})
5605 5606
5607 - def create_couplings(self):
5608 """ create couplings.f and all couplingsX.f """ 5609 5610 nb_def_by_file = 25 5611 5612 self.create_couplings_main(nb_def_by_file) 5613 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5614 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5615 5616 for i in range(nb_coup_indep): 5617 # For the independent couplings, we compute the double and multiple 5618 # precision ones together 5619 data = self.coups_indep[nb_def_by_file * i: 5620 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5621 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5622 5623 for i in range(nb_coup_dep): 5624 # For the dependent couplings, we compute the double and multiple 5625 # precision ones in separate subroutines. 5626 data = self.coups_dep[nb_def_by_file * i: 5627 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5628 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5629 dp=True,mp=False) 5630 if self.opt['mp']: 5631 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5632 dp=False,mp=True)
5633 5634
5635 - def create_couplings_main(self, nb_def_by_file=25):
5636 """ create couplings.f """ 5637 5638 fsock = self.open('couplings.f', format='fortran') 5639 5640 fsock.writelines("""subroutine coup() 5641 5642 implicit none 5643 double precision PI, ZERO 5644 logical READLHA 5645 parameter (PI=3.141592653589793d0) 5646 parameter (ZERO=0d0) 5647 include \'model_functions.inc\'""") 5648 if self.opt['mp']: 5649 fsock.writelines("""%s MP__PI, MP__ZERO 5650 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5651 parameter (MP__ZERO=0e0_16) 5652 include \'mp_input.inc\' 5653 include \'mp_coupl.inc\' 5654 """%self.mp_real_format) 5655 fsock.writelines("""include \'input.inc\' 5656 include \'coupl.inc\' 5657 READLHA = .true. 5658 include \'intparam_definition.inc\'""") 5659 if self.opt['mp']: 5660 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5661 5662 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5663 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5664 5665 fsock.writelines('\n'.join(\ 5666 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5667 5668 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5669 5670 fsock.writelines('\n'.join(\ 5671 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5672 for i in range(nb_coup_dep)])) 5673 if self.opt['mp']: 5674 fsock.writelines('\n'.join(\ 5675 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5676 for i in range(nb_coup_dep)])) 5677 fsock.writelines('''\n return \n end\n''') 5678 5679 fsock.writelines("""subroutine update_as_param() 5680 5681 implicit none 5682 double precision PI, ZERO 5683 logical READLHA 5684 parameter (PI=3.141592653589793d0) 5685 parameter (ZERO=0d0) 5686 include \'model_functions.inc\'""") 5687 fsock.writelines("""include \'input.inc\' 5688 include \'coupl.inc\' 5689 READLHA = .false.""") 5690 fsock.writelines(""" 5691 include \'intparam_definition.inc\'\n 5692 """) 5693 5694 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5695 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5696 5697 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5698 5699 fsock.writelines('\n'.join(\ 5700 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5701 for i in range(nb_coup_dep)])) 5702 fsock.writelines('''\n return \n end\n''') 5703 5704 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5705 5706 implicit none 5707 double precision PI 5708 parameter (PI=3.141592653589793d0) 5709 double precision mu_r2, as2 5710 include \'model_functions.inc\'""") 5711 fsock.writelines("""include \'input.inc\' 5712 include \'coupl.inc\'""") 5713 fsock.writelines(""" 5714 MU_R = mu_r2 5715 G = SQRT(4.0d0*PI*AS2) 5716 AS = as2 5717 5718 CALL UPDATE_AS_PARAM() 5719 """) 5720 fsock.writelines('''\n return \n end\n''') 5721 5722 if self.opt['mp']: 5723 fsock.writelines("""subroutine mp_update_as_param() 5724 5725 implicit none 5726 logical READLHA 5727 include \'model_functions.inc\'""") 5728 fsock.writelines("""%s MP__PI, MP__ZERO 5729 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5730 parameter (MP__ZERO=0e0_16) 5731 include \'mp_input.inc\' 5732 include \'mp_coupl.inc\' 5733 """%self.mp_real_format) 5734 fsock.writelines("""include \'input.inc\' 5735 include \'coupl.inc\' 5736 include \'actualize_mp_ext_params.inc\' 5737 READLHA = .false. 5738 include \'mp_intparam_definition.inc\'\n 5739 """) 5740 5741 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5742 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5743 5744 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5745 5746 fsock.writelines('\n'.join(\ 5747 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5748 for i in range(nb_coup_dep)])) 5749 fsock.writelines('''\n return \n end\n''')
5750
5751 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5752 """ create couplings[nb_file].f containing information coming from data. 5753 Outputs the computation of the double precision and/or the multiple 5754 precision couplings depending on the parameters dp and mp. 5755 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5756 filename and subroutine name. 5757 """ 5758 5759 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5760 nb_file), format='fortran') 5761 fsock.writelines("""subroutine %scoup%s() 5762 5763 implicit none 5764 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5765 if dp: 5766 fsock.writelines(""" 5767 double precision PI, ZERO 5768 parameter (PI=3.141592653589793d0) 5769 parameter (ZERO=0d0) 5770 include 'input.inc' 5771 include 'coupl.inc'""") 5772 if mp: 5773 fsock.writelines("""%s MP__PI, MP__ZERO 5774 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5775 parameter (MP__ZERO=0e0_16) 5776 include \'mp_input.inc\' 5777 include \'mp_coupl.inc\' 5778 """%self.mp_real_format) 5779 5780 for coupling in data: 5781 if dp: 5782 fsock.writelines('%s = %s' % (coupling.name, 5783 self.p_to_f.parse(coupling.expr))) 5784 if mp: 5785 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5786 self.mp_p_to_f.parse(coupling.expr))) 5787 fsock.writelines('end')
5788
5789 - def create_model_functions_inc(self):
5790 """ Create model_functions.inc which contains the various declarations 5791 of auxiliary functions which might be used in the couplings expressions 5792 """ 5793 5794 fsock = self.open('model_functions.inc', format='fortran') 5795 fsock.writelines("""double complex cond 5796 double complex condif 5797 double complex reglog 5798 double complex arg""") 5799 if self.opt['mp']: 5800 fsock.writelines("""%(complex_mp_format)s mp_cond 5801 %(complex_mp_format)s mp_condif 5802 %(complex_mp_format)s mp_reglog 5803 %(complex_mp_format)s mp_arg"""\ 5804 %{'complex_mp_format':self.mp_complex_format})
5805
5806 - def create_model_functions_def(self):
5807 """ Create model_functions.f which contains the various definitions 5808 of auxiliary functions which might be used in the couplings expressions 5809 Add the functions.f functions for formfactors support 5810 """ 5811 5812 fsock = self.open('model_functions.f', format='fortran') 5813 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 5814 implicit none 5815 double complex condition,truecase,falsecase 5816 if(condition.eq.(0.0d0,0.0d0)) then 5817 cond=truecase 5818 else 5819 cond=falsecase 5820 endif 5821 end 5822 5823 double complex function condif(condition,truecase,falsecase) 5824 implicit none 5825 logical condition 5826 double complex truecase,falsecase 5827 if(condition) then 5828 condif=truecase 5829 else 5830 condif=falsecase 5831 endif 5832 end 5833 5834 double complex function reglog(arg) 5835 implicit none 5836 double complex arg 5837 if(arg.eq.(0.0d0,0.0d0)) then 5838 reglog=(0.0d0,0.0d0) 5839 else 5840 reglog=log(arg) 5841 endif 5842 end 5843 5844 double complex function reglogp(arg) 5845 implicit none 5846 double complex arg 5847 if(arg.eq.(0.0d0,0.0d0))then 5848 reglogp=(0.0d0,0.0d0) 5849 else 5850 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 5851 reglogp=log(arg) + 2.0d0*3.1415926535897932d0*(0.0d0,1.0d0) 5852 else 5853 reglogp=log(arg) 5854 endif 5855 endif 5856 end 5857 5858 double complex function reglogm(arg) 5859 implicit none 5860 double complex arg 5861 if(arg.eq.(0.0d0,0.0d0))then 5862 reglogm=(0.0d0,0.0d0) 5863 else 5864 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 5865 reglogm=log(arg) - 2.0d0*3.1415926535897932d0*(0.0d0,1.0d0) 5866 else 5867 reglogm=log(arg) 5868 endif 5869 endif 5870 end 5871 5872 double complex function arg(comnum) 5873 implicit none 5874 double complex comnum 5875 double complex iim 5876 iim = (0.0d0,1.0d0) 5877 if(comnum.eq.(0.0d0,0.0d0)) then 5878 arg=(0.0d0,0.0d0) 5879 else 5880 arg=log(comnum/abs(comnum))/iim 5881 endif 5882 end""") 5883 if self.opt['mp']: 5884 fsock.writelines(""" 5885 5886 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 5887 implicit none 5888 %(complex_mp_format)s condition,truecase,falsecase 5889 if(condition.eq.(0.0e0_16,0.0e0_16)) then 5890 mp_cond=truecase 5891 else 5892 mp_cond=falsecase 5893 endif 5894 end 5895 5896 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 5897 implicit none 5898 logical condition 5899 %(complex_mp_format)s truecase,falsecase 5900 if(condition) then 5901 mp_condif=truecase 5902 else 5903 mp_condif=falsecase 5904 endif 5905 end 5906 5907 %(complex_mp_format)s function mp_reglog(arg) 5908 implicit none 5909 %(complex_mp_format)s arg 5910 if(arg.eq.(0.0e0_16,0.0e0_16)) then 5911 mp_reglog=(0.0e0_16,0.0e0_16) 5912 else 5913 mp_reglog=log(arg) 5914 endif 5915 end 5916 5917 %(complex_mp_format)s function mp_reglogp(arg) 5918 implicit none 5919 %(complex_mp_format)s arg 5920 if(arg.eq.(0.0e0_16,0.0e0_16))then 5921 mp_reglogp=(0.0e0_16,0.0e0_16) 5922 else 5923 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 5924 mp_reglogp=log(arg) + 2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16) 5925 else 5926 mp_reglogp=log(arg) 5927 endif 5928 endif 5929 end 5930 5931 %(complex_mp_format)s function mp_reglogm(arg) 5932 implicit none 5933 %(complex_mp_format)s arg 5934 if(arg.eq.(0.0e0_16,0.0e0_16))then 5935 mp_reglogm=(0.0e0_16,0.0e0_16) 5936 else 5937 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 5938 mp_reglogm=log(arg) - 2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16) 5939 else 5940 mp_reglogm=log(arg) 5941 endif 5942 endif 5943 end 5944 5945 %(complex_mp_format)s function mp_arg(comnum) 5946 implicit none 5947 %(complex_mp_format)s comnum 5948 %(complex_mp_format)s imm 5949 imm = (0.0e0_16,1.0e0_16) 5950 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 5951 mp_arg=(0.0e0_16,0.0e0_16) 5952 else 5953 mp_arg=log(comnum/abs(comnum))/imm 5954 endif 5955 end"""%{'complex_mp_format':self.mp_complex_format}) 5956 5957 5958 #check for the file functions.f 5959 model_path = self.model.get('modelpath') 5960 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 5961 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 5962 input = pjoin(model_path,'Fortran','functions.f') 5963 file.writelines(fsock, open(input).read()) 5964 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 5965 5966 # check for functions define in the UFO model 5967 ufo_fct = self.model.get('functions') 5968 if ufo_fct: 5969 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 5970 for fct in ufo_fct: 5971 # already handle by default 5972 if fct.name not in ["complexconjugate", "re", "im", "sec", 5973 "csc", "asec", "acsc", "theta_function", "cond", 5974 "condif", "reglogp", "reglogm", "reglog", "arg"]: 5975 ufo_fct_template = """ 5976 double complex function %(name)s(%(args)s) 5977 implicit none 5978 double complex %(args)s 5979 %(name)s = %(fct)s 5980 5981 return 5982 end 5983 """ 5984 text = ufo_fct_template % { 5985 'name': fct.name, 5986 'args': ", ".join(fct.arguments), 5987 'fct': self.p_to_f.parse(fct.expr) 5988 } 5989 fsock.writelines(text) 5990 if self.opt['mp']: 5991 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 5992 for fct in ufo_fct: 5993 # already handle by default 5994 if fct.name not in ["complexconjugate", "re", "im", "sec", 5995 "csc", "asec", "acsc", "theta_function", "cond", 5996 "condif", "reglogp", "reglogm", "reglog", "arg"]: 5997 ufo_fct_template = """ 5998 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 5999 implicit none 6000 %(complex_mp_format)s mp__%(args)s 6001 mp__%(name)s = %(fct)s 6002 6003 return 6004 end 6005 """ 6006 text = ufo_fct_template % { 6007 'name': fct.name, 6008 'args': ", mp__".join(fct.arguments), 6009 'fct': self.mp_p_to_f.parse(fct.expr), 6010 'complex_mp_format': self.mp_complex_format 6011 } 6012 fsock.writelines(text) 6013 6014 6015 6016 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6017 6018 6019
6020 - def create_makeinc(self):
6021 """create makeinc.inc containing the file to compile """ 6022 6023 fsock = self.open('makeinc.inc', comment='#') 6024 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6025 text += ' model_functions.o ' 6026 6027 nb_coup_indep = 1 + len(self.coups_dep) // 25 6028 nb_coup_dep = 1 + len(self.coups_indep) // 25 6029 couplings_files=['couplings%s.o' % (i+1) \ 6030 for i in range(nb_coup_dep + nb_coup_indep) ] 6031 if self.opt['mp']: 6032 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6033 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6034 text += ' '.join(couplings_files) 6035 fsock.writelines(text)
6036
6037 - def create_param_write(self):
6038 """ create param_write """ 6039 6040 fsock = self.open('param_write.inc', format='fortran') 6041 6042 fsock.writelines("""write(*,*) ' External Params' 6043 write(*,*) ' ---------------------------------' 6044 write(*,*) ' '""") 6045 def format(name): 6046 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6047 6048 # Write the external parameter 6049 lines = [format(param.name) for param in self.params_ext] 6050 fsock.writelines('\n'.join(lines)) 6051 6052 fsock.writelines("""write(*,*) ' Internal Params' 6053 write(*,*) ' ---------------------------------' 6054 write(*,*) ' '""") 6055 lines = [format(data.name) for data in self.params_indep 6056 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6057 fsock.writelines('\n'.join(lines)) 6058 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6059 write(*,*) ' ----------------------------------------' 6060 write(*,*) ' '""") 6061 lines = [format(data.name) for data in self.params_dep \ 6062 if self.check_needed_param(data.name)] 6063 6064 fsock.writelines('\n'.join(lines)) 6065 6066 6067
6068 - def create_ident_card(self):
6069 """ create the ident_card.dat """ 6070 6071 def format(parameter): 6072 """return the line for the ident_card corresponding to this parameter""" 6073 colum = [parameter.lhablock.lower()] + \ 6074 [str(value) for value in parameter.lhacode] + \ 6075 [parameter.name] 6076 if not parameter.name: 6077 return '' 6078 return ' '.join(colum)+'\n'
6079 6080 fsock = self.open('ident_card.dat') 6081 6082 external_param = [format(param) for param in self.params_ext] 6083 fsock.writelines('\n'.join(external_param)) 6084
6085 - def create_actualize_mp_ext_param_inc(self):
6086 """ create the actualize_mp_ext_params.inc code """ 6087 6088 # In principle one should actualize all external, but for now, it is 6089 # hardcoded that only AS and MU_R can by dynamically changed by the user 6090 # so that we only update those ones. 6091 # Of course, to be on the safe side, one could decide to update all 6092 # external parameters. 6093 update_params_list=[p for p in self.params_ext if p.name in 6094 self.PS_dependent_key] 6095 6096 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6097 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6098 for param in update_params_list] 6099 # When read_lha is false, it is G which is taken in input and not AS, so 6100 # this is what should be reset here too. 6101 if 'aS' in [param.name for param in update_params_list]: 6102 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6103 6104 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6105 fsock.writelines('\n'.join(res_strings))
6106
6107 - def create_param_read(self):
6108 """create param_read""" 6109 6110 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6111 or self.opt['loop_induced']: 6112 fsock = self.open('param_read.inc', format='fortran') 6113 fsock.writelines(' include \'../param_card.inc\'') 6114 return 6115 6116 def format_line(parameter): 6117 """return the line for the ident_card corresponding to this 6118 parameter""" 6119 template = \ 6120 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6121 % {'name': parameter.name, 6122 'value': self.p_to_f.parse(str(parameter.value.real))} 6123 if self.opt['mp']: 6124 template = template+ \ 6125 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6126 "%(mp_prefix)s%(name)s,%(value)s)") \ 6127 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6128 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6129 return template 6130 6131 fsock = self.open('param_read.inc', format='fortran') 6132 res_strings = [format_line(param) \ 6133 for param in self.params_ext] 6134 6135 # Correct width sign for Majorana particles (where the width 6136 # and mass need to have the same sign) 6137 for particle in self.model.get('particles'): 6138 if particle.is_fermion() and particle.get('self_antipart') and \ 6139 particle.get('width').lower() != 'zero': 6140 6141 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6142 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6143 if self.opt['mp']: 6144 res_strings.append(\ 6145 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6146 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6147 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6148 6149 fsock.writelines('\n'.join(res_strings)) 6150
6151 - def create_param_card(self):
6152 """ create the param_card.dat """ 6153 6154 #1. Check if a default param_card is present: 6155 done = False 6156 if hasattr(self.model, 'restrict_card') and isinstance(self.model.restrict_card, str): 6157 restrict_name = os.path.basename(self.model.restrict_card)[9:-4] 6158 model_path = self.model.get('modelpath') 6159 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6160 done = True 6161 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6162 pjoin(self.dir_path, 'param_card.dat')) 6163 if not done: 6164 out_path = pjoin(self.dir_path, 'param_card.dat') 6165 param_writer.ParamCardWriter(self.model, out_path) 6166 6167 out_path2 = None 6168 if hasattr(self.model, 'rule_card'): 6169 out_path2 = pjoin(self.dir_path, 'param_card_rule.dat') 6170 self.model.rule_card.write_file(out_path2) 6171 6172 # IF MSSM convert the card to SLAH1 6173 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 6174 import models.check_param_card as translator 6175 6176 # Check the format of the param_card for Pythia and make it correct 6177 if out_path2: 6178 translator.make_valid_param_card(out_path, out_path2) 6179 translator.convert_to_slha1(out_path)
6180
6181 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6182 """ Determine which Export_v4 class is required. cmd is the command 6183 interface containing all potential usefull information. 6184 The output_type argument specifies from which context the output 6185 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6186 and 'default' for tree-level outputs.""" 6187 6188 opt = cmd.options 6189 6190 # First treat the MadLoop5 standalone case 6191 MadLoop_SA_options = {'clean': not noclean, 6192 'complex_mass':cmd.options['complex_mass_scheme'], 6193 'export_format':'madloop', 6194 'mp':True, 6195 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6196 'cuttools_dir': cmd._cuttools_dir, 6197 'iregi_dir':cmd._iregi_dir, 6198 'pjfry_dir':cmd.options["pjfry"], 6199 'golem_dir':cmd.options["golem"], 6200 'fortran_compiler':cmd.options['fortran_compiler'], 6201 'f2py_compiler':cmd.options['f2py_compiler'], 6202 'output_dependencies':cmd.options['output_dependencies'], 6203 'SubProc_prefix':'P', 6204 'compute_color_flows':cmd.options['loop_color_flows'], 6205 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '' 6206 } 6207 6208 if output_type.startswith('madloop'): 6209 import madgraph.loop.loop_exporters as loop_exporters 6210 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6211 ExporterClass=None 6212 if not cmd.options['loop_optimized_output']: 6213 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6214 else: 6215 if output_type == "madloop": 6216 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6217 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6218 elif output_type == "madloop_matchbox": 6219 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6220 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6221 else: 6222 raise Exception, "output_type not recognize %s" % output_type 6223 return ExporterClass(cmd._mgme_dir, cmd._export_dir, MadLoop_SA_options) 6224 else: 6225 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6226 ' in %s'%str(cmd._mgme_dir)) 6227 6228 # Then treat the aMC@NLO output 6229 elif output_type=='amcatnlo': 6230 import madgraph.iolibs.export_fks as export_fks 6231 ExporterClass=None 6232 amcatnlo_options = dict(opt) 6233 amcatnlo_options.update(MadLoop_SA_options) 6234 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6235 if not cmd.options['loop_optimized_output']: 6236 logger.info("Writing out the aMC@NLO code") 6237 ExporterClass = export_fks.ProcessExporterFortranFKS 6238 amcatnlo_options['export_format']='FKS5_default' 6239 else: 6240 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6241 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6242 amcatnlo_options['export_format']='FKS5_optimized' 6243 return ExporterClass(cmd._mgme_dir, cmd._export_dir, amcatnlo_options) 6244 6245 # Then the default tree-level output 6246 elif output_type=='default': 6247 assert group_subprocesses in [True, False] 6248 6249 opt = dict(opt) 6250 opt.update({'clean': not noclean, 6251 'complex_mass': cmd.options['complex_mass_scheme'], 6252 'export_format':cmd._export_format, 6253 'mp': False, 6254 'sa_symmetry':False, 6255 'model': cmd._curr_model.get('name') }) 6256 6257 format = cmd._export_format #shortcut 6258 6259 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6260 opt['sa_symmetry'] = True 6261 6262 loop_induced_opt = dict(opt) 6263 loop_induced_opt.update(MadLoop_SA_options) 6264 loop_induced_opt['export_format'] = 'madloop_optimized' 6265 loop_induced_opt['SubProc_prefix'] = 'PV' 6266 # For loop_induced output with MadEvent, we must have access to the 6267 # color flows. 6268 loop_induced_opt['compute_color_flows'] = True 6269 for key in opt: 6270 if key not in loop_induced_opt: 6271 loop_induced_opt[key] = opt[key] 6272 6273 if format == 'matrix' or format.startswith('standalone'): 6274 return ProcessExporterFortranSA(cmd._mgme_dir, cmd._export_dir, opt, 6275 format=format) 6276 6277 elif format in ['madevent'] and group_subprocesses: 6278 if isinstance(cmd._curr_amps[0], 6279 loop_diagram_generation.LoopAmplitude): 6280 import madgraph.loop.loop_exporters as loop_exporters 6281 return loop_exporters.LoopInducedExporterMEGroup(cmd._mgme_dir, 6282 cmd._export_dir,loop_induced_opt) 6283 else: 6284 return ProcessExporterFortranMEGroup(cmd._mgme_dir, 6285 cmd._export_dir,opt) 6286 elif format in ['madevent']: 6287 if isinstance(cmd._curr_amps[0], 6288 loop_diagram_generation.LoopAmplitude): 6289 import madgraph.loop.loop_exporters as loop_exporters 6290 return loop_exporters.LoopInducedExporterMENoGroup(cmd._mgme_dir, 6291 cmd._export_dir,loop_induced_opt) 6292 else: 6293 return ProcessExporterFortranME(cmd._mgme_dir, 6294 cmd._export_dir,opt) 6295 elif format in ['matchbox']: 6296 return ProcessExporterFortranMatchBox(cmd._mgme_dir, cmd._export_dir,opt) 6297 elif cmd._export_format in ['madweight'] and group_subprocesses: 6298 6299 return ProcessExporterFortranMWGroup(cmd._mgme_dir, cmd._export_dir, 6300 opt) 6301 elif cmd._export_format in ['madweight']: 6302 return ProcessExporterFortranMW(cmd._mgme_dir, cmd._export_dir, opt) 6303 else: 6304 raise Exception, 'Wrong export_v4 format' 6305 else: 6306 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6307
6308 6309 6310 6311 #=============================================================================== 6312 # ProcessExporterFortranMWGroup 6313 #=============================================================================== 6314 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6315 """Class to take care of exporting a set of matrix elements to 6316 MadEvent subprocess group format.""" 6317 6318 matrix_file = "matrix_madweight_group_v4.inc" 6319 6320 #=========================================================================== 6321 # generate_subprocess_directory_v4 6322 #===========================================================================
6323 - def generate_subprocess_directory_v4(self, subproc_group, 6324 fortran_model, 6325 group_number):
6326 """Generate the Pn directory for a subprocess group in MadEvent, 6327 including the necessary matrix_N.f files, configs.inc and various 6328 other helper files""" 6329 6330 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6331 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6332 "subproc_group object not SubProcessGroup" 6333 6334 if not self.model: 6335 self.model = subproc_group.get('matrix_elements')[0].\ 6336 get('processes')[0].get('model') 6337 6338 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6339 6340 # Create the directory PN in the specified path 6341 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6342 subproc_group.get('name')) 6343 try: 6344 os.mkdir(pjoin(pathdir, subprocdir)) 6345 except os.error as error: 6346 logger.warning(error.strerror + " " + subprocdir) 6347 6348 6349 logger.info('Creating files in directory %s' % subprocdir) 6350 Ppath = pjoin(pathdir, subprocdir) 6351 6352 # Create the matrix.f files, auto_dsig.f files and all inc files 6353 # for all subprocesses in the group 6354 6355 maxamps = 0 6356 maxflows = 0 6357 tot_calls = 0 6358 6359 matrix_elements = subproc_group.get('matrix_elements') 6360 6361 for ime, matrix_element in \ 6362 enumerate(matrix_elements): 6363 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6364 calls, ncolor = \ 6365 self.write_matrix_element_v4(writers.FortranWriter(filename), 6366 matrix_element, 6367 fortran_model, 6368 str(ime+1), 6369 subproc_group.get('diagram_maps')[\ 6370 ime]) 6371 6372 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6373 self.write_auto_dsig_file(writers.FortranWriter(filename), 6374 matrix_element, 6375 str(ime+1)) 6376 6377 # Keep track of needed quantities 6378 tot_calls += int(calls) 6379 maxflows = max(maxflows, ncolor) 6380 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6381 6382 # Draw diagrams 6383 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6384 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6385 get('diagrams'), 6386 filename, 6387 model = \ 6388 matrix_element.get('processes')[0].\ 6389 get('model'), 6390 amplitude=True) 6391 logger.info("Generating Feynman diagrams for " + \ 6392 matrix_element.get('processes')[0].nice_string()) 6393 plot.draw() 6394 6395 # Extract number of external particles 6396 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6397 6398 # Generate a list of diagrams corresponding to each configuration 6399 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6400 # If a subprocess has no diagrams for this config, the number is 0 6401 6402 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6403 6404 filename = pjoin(Ppath, 'auto_dsig.f') 6405 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6406 subproc_group) 6407 6408 filename = pjoin(Ppath,'configs.inc') 6409 nconfigs, s_and_t_channels = self.write_configs_file(\ 6410 writers.FortranWriter(filename), 6411 subproc_group, 6412 subproc_diagrams_for_config) 6413 6414 filename = pjoin(Ppath, 'leshouche.inc') 6415 self.write_leshouche_file(writers.FortranWriter(filename), 6416 subproc_group) 6417 6418 filename = pjoin(Ppath, 'phasespace.inc') 6419 self.write_phasespace_file(writers.FortranWriter(filename), 6420 nconfigs) 6421 6422 6423 filename = pjoin(Ppath, 'maxamps.inc') 6424 self.write_maxamps_file(writers.FortranWriter(filename), 6425 maxamps, 6426 maxflows, 6427 max([len(me.get('processes')) for me in \ 6428 matrix_elements]), 6429 len(matrix_elements)) 6430 6431 filename = pjoin(Ppath, 'mirrorprocs.inc') 6432 self.write_mirrorprocs(writers.FortranWriter(filename), 6433 subproc_group) 6434 6435 filename = pjoin(Ppath, 'nexternal.inc') 6436 self.write_nexternal_file(writers.FortranWriter(filename), 6437 nexternal, ninitial) 6438 6439 filename = pjoin(Ppath, 'pmass.inc') 6440 self.write_pmass_file(writers.FortranWriter(filename), 6441 matrix_element) 6442 6443 filename = pjoin(Ppath, 'props.inc') 6444 self.write_props_file(writers.FortranWriter(filename), 6445 matrix_element, 6446 s_and_t_channels) 6447 6448 # filename = pjoin(Ppath, 'processes.dat') 6449 # files.write_to_file(filename, 6450 # self.write_processes_file, 6451 # subproc_group) 6452 6453 # Generate jpgs -> pass in make_html 6454 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6455 6456 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6457 6458 for file in linkfiles: 6459 ln('../%s' % file, cwd=Ppath) 6460 6461 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6462 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6463 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6464 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6465 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6466 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6467 if not tot_calls: 6468 tot_calls = 0 6469 return tot_calls
6470 6471 #=========================================================================== 6472 # write_super_auto_dsig_file 6473 #===========================================================================
6474 - def write_super_auto_dsig_file(self, writer, subproc_group):
6475 """Write the auto_dsig.f file selecting between the subprocesses 6476 in subprocess group mode""" 6477 6478 replace_dict = {} 6479 6480 # Extract version number and date from VERSION file 6481 info_lines = self.get_mg5_info_lines() 6482 replace_dict['info_lines'] = info_lines 6483 6484 matrix_elements = subproc_group.get('matrix_elements') 6485 6486 # Extract process info lines 6487 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6488 matrix_elements]) 6489 replace_dict['process_lines'] = process_lines 6490 6491 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6492 replace_dict['nexternal'] = nexternal 6493 6494 replace_dict['nsprocs'] = 2*len(matrix_elements) 6495 6496 # Generate dsig definition line 6497 dsig_def_line = "DOUBLE PRECISION " + \ 6498 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6499 range(len(matrix_elements))]) 6500 replace_dict["dsig_def_line"] = dsig_def_line 6501 6502 # Generate dsig process lines 6503 call_dsig_proc_lines = [] 6504 for iproc in range(len(matrix_elements)): 6505 call_dsig_proc_lines.append(\ 6506 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6507 {"num": iproc + 1, 6508 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6509 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6510 6511 file = open(os.path.join(_file_path, \ 6512 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6513 file = file % replace_dict 6514 6515 # Write the file 6516 writer.writelines(file)
6517 6518 #=========================================================================== 6519 # write_mirrorprocs 6520 #===========================================================================
6521 - def write_mirrorprocs(self, writer, subproc_group):
6522 """Write the mirrorprocs.inc file determining which processes have 6523 IS mirror process in subprocess group mode.""" 6524 6525 lines = [] 6526 bool_dict = {True: '.true.', False: '.false.'} 6527 matrix_elements = subproc_group.get('matrix_elements') 6528 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6529 (len(matrix_elements), 6530 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6531 me in matrix_elements]))) 6532 # Write the file 6533 writer.writelines(lines)
6534 6535 #=========================================================================== 6536 # write_configs_file 6537 #===========================================================================
6538 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6539 """Write the configs.inc file with topology information for a 6540 subprocess group. Use the first subprocess with a diagram for each 6541 configuration.""" 6542 6543 matrix_elements = subproc_group.get('matrix_elements') 6544 model = matrix_elements[0].get('processes')[0].get('model') 6545 6546 diagrams = [] 6547 config_numbers = [] 6548 for iconfig, config in enumerate(diagrams_for_config): 6549 # Check if any diagrams correspond to this config 6550 if set(config) == set([0]): 6551 continue 6552 subproc_diags = [] 6553 for s,d in enumerate(config): 6554 if d: 6555 subproc_diags.append(matrix_elements[s].\ 6556 get('diagrams')[d-1]) 6557 else: 6558 subproc_diags.append(None) 6559 diagrams.append(subproc_diags) 6560 config_numbers.append(iconfig + 1) 6561 6562 # Extract number of external particles 6563 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6564 6565 return len(diagrams), \ 6566 self.write_configs_file_from_diagrams(writer, diagrams, 6567 config_numbers, 6568 nexternal, ninitial, 6569 matrix_elements[0],model)
6570 6571 #=========================================================================== 6572 # write_run_configs_file 6573 #===========================================================================
6574 - def write_run_config_file(self, writer):
6575 """Write the run_configs.inc file for MadEvent""" 6576 6577 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6578 text = open(path).read() % {'chanperjob':'2'} 6579 writer.write(text) 6580 return True
6581 6582 6583 #=========================================================================== 6584 # write_leshouche_file 6585 #===========================================================================
6586 - def write_leshouche_file(self, writer, subproc_group):
6587 """Write the leshouche.inc file for MG4""" 6588 6589 all_lines = [] 6590 6591 for iproc, matrix_element in \ 6592 enumerate(subproc_group.get('matrix_elements')): 6593 all_lines.extend(self.get_leshouche_lines(matrix_element, 6594 iproc)) 6595 6596 # Write the file 6597 writer.writelines(all_lines) 6598 6599 return True
6600