Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30   
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.various.diagram_symmetry as diagram_symmetry 
  46  import madgraph.various.misc as misc 
  47  import madgraph.various.banner as banner_mod 
  48  import madgraph.various.process_checks as process_checks 
  49  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  50  import aloha.create_aloha as create_aloha 
  51  import models.import_ufo as import_ufo 
  52  import models.write_param_card as param_writer 
  53  import models.check_param_card as check_param_card 
  54   
  55   
  56  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  57  from madgraph.iolibs.files import cp, ln, mv 
  58   
  59  pjoin = os.path.join 
  60   
  61  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  62  logger = logging.getLogger('madgraph.export_v4') 
  63   
  64  default_compiler= {'fortran': 'gfortran', 
  65                         'f2py': 'f2py'} 
66 67 #=============================================================================== 68 # ProcessExporterFortran 69 #=============================================================================== 70 -class ProcessExporterFortran(object):
71 """Class to take care of exporting a set of matrix elements to 72 Fortran (v4) format.""" 73 74 default_opt = {'clean': False, 'complex_mass':False, 75 'export_format':'madevent', 'mp': False, 76 'v5_model': True 77 } 78
79 - def __init__(self, mgme_dir = "", dir_path = "", opt=None):
80 """Initiate the ProcessExporterFortran with directory information""" 81 self.mgme_dir = mgme_dir 82 self.dir_path = dir_path 83 self.model = None 84 85 self.opt = dict(self.default_opt) 86 if opt: 87 self.opt.update(opt) 88 89 #place holder to pass information to the run_interface 90 self.proc_characteristic = banner_mod.ProcCharacteristic()
91 92 93 #=========================================================================== 94 # process exporter fortran switch between group and not grouped 95 #===========================================================================
96 - def export_processes(self, matrix_elements, fortran_model):
97 """Make the switch between grouped and not grouped output""" 98 99 calls = 0 100 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 101 for (group_number, me_group) in enumerate(matrix_elements): 102 calls = calls + self.generate_subprocess_directory_v4(\ 103 me_group, fortran_model, group_number) 104 else: 105 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 106 calls = calls + self.generate_subprocess_directory_v4(\ 107 me, fortran_model, me_number) 108 109 return calls
110 111 112 113 #=========================================================================== 114 # create the run_card 115 #===========================================================================
116 - def create_run_card(self, matrix_elements, history):
117 """ """ 118 119 run_card = banner_mod.RunCard() 120 121 122 default=True 123 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 124 processes = [me.get('processes') for megroup in matrix_elements 125 for me in megroup['matrix_elements']] 126 elif matrix_elements: 127 processes = [me.get('processes') 128 for me in matrix_elements['matrix_elements']] 129 else: 130 default =False 131 132 if default: 133 run_card.create_default_for_process(self.proc_characteristic, 134 history, 135 processes) 136 137 138 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 139 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
140 141 142 #=========================================================================== 143 # copy the Template in a new directory. 144 #===========================================================================
145 - def copy_v4template(self, modelname):
146 """create the directory run_name as a copy of the MadEvent 147 Template, and clean the directory 148 """ 149 150 #First copy the full template tree if dir_path doesn't exit 151 if not os.path.isdir(self.dir_path): 152 assert self.mgme_dir, \ 153 "No valid MG_ME path given for MG4 run directory creation." 154 logger.info('initialize a new directory: %s' % \ 155 os.path.basename(self.dir_path)) 156 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 157 self.dir_path, True) 158 # distutils.dir_util.copy_tree since dir_path already exists 159 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 160 self.dir_path) 161 # Duplicate run_card and plot_card 162 for card in ['plot_card']: 163 try: 164 shutil.copy(pjoin(self.dir_path, 'Cards', 165 card + '.dat'), 166 pjoin(self.dir_path, 'Cards', 167 card + '_default.dat')) 168 except IOError: 169 logger.warning("Failed to copy " + card + ".dat to default") 170 elif os.getcwd() == os.path.realpath(self.dir_path): 171 logger.info('working in local directory: %s' % \ 172 os.path.realpath(self.dir_path)) 173 # distutils.dir_util.copy_tree since dir_path already exists 174 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 175 self.dir_path) 176 # for name in glob.glob(pjoin(self.mgme_dir, 'Template/LO/*')): 177 # name = os.path.basename(name) 178 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 179 # if os.path.isfile(filename): 180 # files.cp(filename, pjoin(self.dir_path,name)) 181 # elif os.path.isdir(filename): 182 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 183 # distutils.dir_util.copy_tree since dir_path already exists 184 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 185 self.dir_path) 186 # Duplicate run_card and plot_card 187 for card in ['plot_card']: 188 try: 189 shutil.copy(pjoin(self.dir_path, 'Cards', 190 card + '.dat'), 191 pjoin(self.dir_path, 'Cards', 192 card + '_default.dat')) 193 except IOError: 194 logger.warning("Failed to copy " + card + ".dat to default") 195 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 196 assert self.mgme_dir, \ 197 "No valid MG_ME path given for MG4 run directory creation." 198 try: 199 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 200 except IOError: 201 MG5_version = misc.get_pkg_info() 202 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 203 "5." + MG5_version['version']) 204 205 #Ensure that the Template is clean 206 if self.opt['clean']: 207 logger.info('remove old information in %s' % \ 208 os.path.basename(self.dir_path)) 209 if os.environ.has_key('MADGRAPH_BASE'): 210 misc.call([pjoin('bin', 'internal', 'clean_template'), 211 '--web'], cwd=self.dir_path) 212 else: 213 try: 214 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 215 cwd=self.dir_path) 216 except Exception, why: 217 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 218 % (os.path.basename(self.dir_path),why)) 219 220 #Write version info 221 MG_version = misc.get_pkg_info() 222 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 223 MG_version['version']) 224 225 226 # add the makefile in Source directory 227 filename = pjoin(self.dir_path,'Source','makefile') 228 self.write_source_makefile(writers.FileWriter(filename)) 229 230 # add the DiscreteSampler information 231 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 232 pjoin(self.dir_path, 'Source')) 233 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 234 pjoin(self.dir_path, 'Source')) 235 236 # We need to create the correct open_data for the pdf 237 self.write_pdf_opendata()
238 239 240 241 242 #=========================================================================== 243 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 244 #===========================================================================
245 - def write_procdef_mg5(self, file_pos, modelname, process_str):
246 """ write an equivalent of the MG4 proc_card in order that all the Madevent 247 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 248 249 proc_card_template = template_files.mg4_proc_card.mg4_template 250 process_template = template_files.mg4_proc_card.process_template 251 process_text = '' 252 coupling = '' 253 new_process_content = [] 254 255 256 # First find the coupling and suppress the coupling from process_str 257 #But first ensure that coupling are define whithout spaces: 258 process_str = process_str.replace(' =', '=') 259 process_str = process_str.replace('= ', '=') 260 process_str = process_str.replace(',',' , ') 261 #now loop on the element and treat all the coupling 262 for info in process_str.split(): 263 if '=' in info: 264 coupling += info + '\n' 265 else: 266 new_process_content.append(info) 267 # Recombine the process_str (which is the input process_str without coupling 268 #info) 269 process_str = ' '.join(new_process_content) 270 271 #format the SubProcess 272 process_text += process_template.substitute({'process': process_str, \ 273 'coupling': coupling}) 274 275 text = proc_card_template.substitute({'process': process_text, 276 'model': modelname, 277 'multiparticle':''}) 278 ff = open(file_pos, 'w') 279 ff.write(text) 280 ff.close()
281 282 #=========================================================================== 283 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 284 #===========================================================================
285 - def finalize_v4_directory(self, matrix_elements, history = "", makejpg = False, 286 online = False, compiler=default_compiler):
287 """Function to finalize v4 directory, for inheritance. 288 """ 289 290 self.create_run_card(matrix_elements, history) 291 292 pass
293 294 #=========================================================================== 295 # Create the proc_characteristic file passing information to the run_interface 296 #===========================================================================
297 - def create_proc_charac(self, matrix_elements=None, history= "", **opts):
298 299 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
300 301 #=========================================================================== 302 # write_matrix_element_v4 303 #===========================================================================
304 - def write_matrix_element_v4(self):
305 """Function to write a matrix.f file, for inheritance. 306 """ 307 pass
308 309 #=========================================================================== 310 # write_pdf_opendata 311 #===========================================================================
312 - def write_pdf_opendata(self):
313 """ modify the pdf opendata file, to allow direct access to cluster node 314 repository if configure""" 315 316 if not self.opt["cluster_local_path"]: 317 changer = {"pdf_systemwide": ""} 318 else: 319 to_add = """ 320 tempname='%(path)s'//Tablefile 321 open(IU,file=tempname,status='old',ERR=1) 322 return 323 1 tempname='%(path)s/Pdfdata/'//Tablefile 324 open(IU,file=tempname,status='old',ERR=2) 325 return 326 2 tempname='%(path)s/lhapdf'//Tablefile 327 open(IU,file=tempname,status='old',ERR=3) 328 return 329 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 330 open(IU,file=tempname,status='old',ERR=4) 331 return 332 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 333 open(IU,file=tempname,status='old',ERR=5) 334 return 335 """ % {"path" : self.opt["cluster_local_path"]} 336 337 changer = {"pdf_systemwide": to_add} 338 339 ff = open(pjoin(self.dir_path, "Source", "PDF", "opendata.f"),"w") 340 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 341 ff.write(template % changer) 342 343 # Do the same for lhapdf set 344 if not self.opt["cluster_local_path"]: 345 changer = {"cluster_specific_path": ""} 346 else: 347 to_add=""" 348 LHAPath='%(path)s/PDFsets' 349 Inquire(File=LHAPath, exist=exists) 350 if(exists)return 351 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 352 Inquire(File=LHAPath, exist=exists) 353 if(exists)return 354 LHAPath='%(path)s/../lhapdf/pdfsets/' 355 Inquire(File=LHAPath, exist=exists) 356 if(exists)return 357 LHAPath='./PDFsets' 358 """ % {"path" : self.opt["cluster_local_path"]} 359 changer = {"cluster_specific_path": to_add} 360 361 ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 362 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 363 ff.write(template % changer) 364 365 366 return
367 368 369 370 #=========================================================================== 371 # write_maxparticles_file 372 #===========================================================================
373 - def write_maxparticles_file(self, writer, matrix_elements):
374 """Write the maxparticles.inc file for MadEvent""" 375 376 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 377 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 378 matrix_elements.get('matrix_elements')]) 379 else: 380 maxparticles = max([me.get_nexternal_ninitial()[0] \ 381 for me in matrix_elements]) 382 383 lines = "integer max_particles\n" 384 lines += "parameter(max_particles=%d)" % maxparticles 385 386 # Write the file 387 writer.writelines(lines) 388 389 return True
390 391 392 #=========================================================================== 393 # export the model 394 #===========================================================================
395 - def export_model_files(self, model_path):
396 """Configure the files/link of the process according to the model""" 397 398 # Import the model 399 for file in os.listdir(model_path): 400 if os.path.isfile(pjoin(model_path, file)): 401 shutil.copy2(pjoin(model_path, file), \ 402 pjoin(self.dir_path, 'Source', 'MODEL'))
403 404 418 425 426 #=========================================================================== 427 # export the helas routine 428 #===========================================================================
429 - def export_helas(self, helas_path):
430 """Configure the files/link of the process according to the model""" 431 432 # Import helas routine 433 for filename in os.listdir(helas_path): 434 filepos = pjoin(helas_path, filename) 435 if os.path.isfile(filepos): 436 if filepos.endswith('Makefile.template'): 437 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 438 elif filepos.endswith('Makefile'): 439 pass 440 else: 441 cp(filepos, self.dir_path + '/Source/DHELAS')
442 # following lines do the same but whithout symbolic link 443 # 444 #def export_helas(mgme_dir, dir_path): 445 # 446 # # Copy the HELAS directory 447 # helas_dir = pjoin(mgme_dir, 'HELAS') 448 # for filename in os.listdir(helas_dir): 449 # if os.path.isfile(pjoin(helas_dir, filename)): 450 # shutil.copy2(pjoin(helas_dir, filename), 451 # pjoin(dir_path, 'Source', 'DHELAS')) 452 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 453 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 454 # 455 456 #=========================================================================== 457 # generate_subprocess_directory_v4 458 #===========================================================================
459 - def generate_subprocess_directory_v4(self, matrix_element, 460 fortran_model, 461 me_number):
462 """Routine to generate a subprocess directory (for inheritance)""" 463 464 pass
465 466 #=========================================================================== 467 # get_source_libraries_list 468 #===========================================================================
469 - def get_source_libraries_list(self):
470 """ Returns the list of libraries to be compiling when compiling the 471 SOURCE directory. It is different for loop_induced processes and 472 also depends on the value of the 'output_dependencies' option""" 473 474 return ['$(LIBDIR)libdhelas.$(libext)', 475 '$(LIBDIR)libpdf.$(libext)', 476 '$(LIBDIR)libmodel.$(libext)', 477 '$(LIBDIR)libcernlib.$(libext)']
478 479 #=========================================================================== 480 # write_source_makefile 481 #===========================================================================
482 - def write_source_makefile(self, writer):
483 """Write the nexternal.inc file for MG4""" 484 485 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 486 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 487 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 488 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 489 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 490 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 491 else: 492 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 493 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 494 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 495 writer.write(text) 496 497 return True
498 499 #=========================================================================== 500 # write_nexternal_madspin 501 #===========================================================================
502 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
503 """Write the nexternal_prod.inc file for madspin""" 504 505 replace_dict = {} 506 507 replace_dict['nexternal'] = nexternal 508 replace_dict['ninitial'] = ninitial 509 510 file = """ \ 511 integer nexternal_prod 512 parameter (nexternal_prod=%(nexternal)d) 513 integer nincoming_prod 514 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 515 516 # Write the file 517 writer.writelines(file) 518 519 return True
520 521 #=========================================================================== 522 # write_helamp_madspin 523 #===========================================================================
524 - def write_helamp_madspin(self, writer, ncomb):
525 """Write the helamp.inc file for madspin""" 526 527 replace_dict = {} 528 529 replace_dict['ncomb'] = ncomb 530 531 file = """ \ 532 integer ncomb1 533 parameter (ncomb1=%(ncomb)d) 534 double precision helamp(ncomb1) 535 common /to_helamp/helamp """ % replace_dict 536 537 # Write the file 538 writer.writelines(file) 539 540 return True
541 542 543 #=========================================================================== 544 # write_nexternal_file 545 #===========================================================================
546 - def write_nexternal_file(self, writer, nexternal, ninitial):
547 """Write the nexternal.inc file for MG4""" 548 549 replace_dict = {} 550 551 replace_dict['nexternal'] = nexternal 552 replace_dict['ninitial'] = ninitial 553 554 file = """ \ 555 integer nexternal 556 parameter (nexternal=%(nexternal)d) 557 integer nincoming 558 parameter (nincoming=%(ninitial)d)""" % replace_dict 559 560 # Write the file 561 writer.writelines(file) 562 563 return True
564 565 #=========================================================================== 566 # write_pmass_file 567 #===========================================================================
568 - def write_pmass_file(self, writer, matrix_element):
569 """Write the pmass.inc file for MG4""" 570 571 model = matrix_element.get('processes')[0].get('model') 572 573 lines = [] 574 for wf in matrix_element.get_external_wavefunctions(): 575 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 576 if mass.lower() != "zero": 577 mass = "abs(%s)" % mass 578 579 lines.append("pmass(%d)=%s" % \ 580 (wf.get('number_external'), mass)) 581 582 # Write the file 583 writer.writelines(lines) 584 585 return True
586 587 #=========================================================================== 588 # write_ngraphs_file 589 #===========================================================================
590 - def write_ngraphs_file(self, writer, nconfigs):
591 """Write the ngraphs.inc file for MG4. Needs input from 592 write_configs_file.""" 593 594 file = " integer n_max_cg\n" 595 file = file + "parameter (n_max_cg=%d)" % nconfigs 596 597 # Write the file 598 writer.writelines(file) 599 600 return True
601 602 #=========================================================================== 603 # write_leshouche_file 604 #===========================================================================
605 - def write_leshouche_file(self, writer, matrix_element):
606 """Write the leshouche.inc file for MG4""" 607 608 # Write the file 609 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 610 611 return True
612 613 #=========================================================================== 614 # get_leshouche_lines 615 #===========================================================================
616 - def get_leshouche_lines(self, matrix_element, numproc):
617 """Write the leshouche.inc file for MG4""" 618 619 # Extract number of external particles 620 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 621 622 lines = [] 623 for iproc, proc in enumerate(matrix_element.get('processes')): 624 legs = proc.get_legs_with_decays() 625 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 626 (iproc + 1, numproc+1, nexternal, 627 ",".join([str(l.get('id')) for l in legs]))) 628 if iproc == 0 and numproc == 0: 629 for i in [1, 2]: 630 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 631 (i, nexternal, 632 ",".join([ "%3r" % 0 ] * ninitial + \ 633 [ "%3r" % i ] * (nexternal - ninitial)))) 634 635 # Here goes the color connections corresponding to the JAMPs 636 # Only one output, for the first subproc! 637 if iproc == 0: 638 # If no color basis, just output trivial color flow 639 if not matrix_element.get('color_basis'): 640 for i in [1, 2]: 641 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 642 (i, numproc+1,nexternal, 643 ",".join([ "%3r" % 0 ] * nexternal))) 644 645 else: 646 # First build a color representation dictionnary 647 repr_dict = {} 648 for l in legs: 649 repr_dict[l.get('number')] = \ 650 proc.get('model').get_particle(l.get('id')).get_color()\ 651 * (-1)**(1+l.get('state')) 652 # Get the list of color flows 653 color_flow_list = \ 654 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 655 ninitial) 656 # And output them properly 657 for cf_i, color_flow_dict in enumerate(color_flow_list): 658 for i in [0, 1]: 659 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 660 (i + 1, cf_i + 1, numproc+1, nexternal, 661 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 662 for l in legs]))) 663 664 return lines
665 666 667 668 669 #=========================================================================== 670 # write_maxamps_file 671 #===========================================================================
672 - def write_maxamps_file(self, writer, maxamps, maxflows, 673 maxproc,maxsproc):
674 """Write the maxamps.inc file for MG4.""" 675 676 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 677 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 678 (maxamps, maxflows) 679 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 680 (maxproc, maxsproc) 681 682 # Write the file 683 writer.writelines(file) 684 685 return True
686 687 #=========================================================================== 688 # write_props_file 689 #===========================================================================
690 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
691 """Write the props.inc file for MadEvent. Needs input from 692 write_configs_file.""" 693 694 lines = [] 695 696 particle_dict = matrix_element.get('processes')[0].get('model').\ 697 get('particle_dict') 698 699 for iconf, configs in enumerate(s_and_t_channels): 700 for vertex in configs[0] + configs[1][:-1]: 701 leg = vertex.get('legs')[-1] 702 if leg.get('id') not in particle_dict: 703 # Fake propagator used in multiparticle vertices 704 mass = 'zero' 705 width = 'zero' 706 pow_part = 0 707 else: 708 particle = particle_dict[leg.get('id')] 709 # Get mass 710 if particle.get('mass').lower() == 'zero': 711 mass = particle.get('mass') 712 else: 713 mass = "abs(%s)" % particle.get('mass') 714 # Get width 715 if particle.get('width').lower() == 'zero': 716 width = particle.get('width') 717 else: 718 width = "abs(%s)" % particle.get('width') 719 720 pow_part = 1 + int(particle.is_boson()) 721 722 lines.append("prmass(%d,%d) = %s" % \ 723 (leg.get('number'), iconf + 1, mass)) 724 lines.append("prwidth(%d,%d) = %s" % \ 725 (leg.get('number'), iconf + 1, width)) 726 lines.append("pow(%d,%d) = %d" % \ 727 (leg.get('number'), iconf + 1, pow_part)) 728 729 # Write the file 730 writer.writelines(lines) 731 732 return True
733 734 735 736 737 738 #=========================================================================== 739 # Routines to output UFO models in MG4 format 740 #=========================================================================== 741
742 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 743 wanted_couplings = []):
744 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 745 746 # Make sure aloha is in quadruple precision if needed 747 old_aloha_mp=aloha.mp_precision 748 aloha.mp_precision=self.opt['mp'] 749 750 # create the MODEL 751 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 752 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 753 model_builder.build(wanted_couplings) 754 755 # Backup the loop mode, because it can be changed in what follows. 756 old_loop_mode = aloha.loop_mode 757 758 # Create the aloha model or use the existing one (for loop exporters 759 # this is useful as the aloha model will be used again in the 760 # LoopHelasMatrixElements generated). We do not save the model generated 761 # here if it didn't exist already because it would be a waste of 762 # memory for tree level applications since aloha is only needed at the 763 # time of creating the aloha fortran subroutines. 764 if hasattr(self, 'aloha_model'): 765 aloha_model = self.aloha_model 766 else: 767 aloha_model = create_aloha.AbstractALOHAModel(model.get('name')) 768 aloha_model.add_Lorentz_object(model.get('lorentz')) 769 770 # Compute the subroutines 771 if wanted_lorentz: 772 aloha_model.compute_subset(wanted_lorentz) 773 else: 774 aloha_model.compute_all(save=False) 775 776 # Write them out 777 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 778 aloha_model.write(write_dir, 'Fortran') 779 780 # Revert the original aloha loop mode 781 aloha.loop_mode = old_loop_mode 782 783 #copy Helas Template 784 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 785 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 786 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', write_dir+'/aloha_functions.f') 787 aloha_model.loop_mode = False 788 else: 789 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', write_dir+'/aloha_functions.f') 790 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 791 792 # Make final link in the Process 793 self.make_model_symbolic_link() 794 795 # Re-establish original aloha mode 796 aloha.mp_precision=old_aloha_mp
797 798 #=========================================================================== 799 # Helper functions 800 #===========================================================================
801 - def get_mg5_info_lines(self):
802 """Return info lines for MG5, suitable to place at beginning of 803 Fortran files""" 804 805 info = misc.get_pkg_info() 806 info_lines = "" 807 if info and info.has_key('version') and info.has_key('date'): 808 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 809 (info['version'], info['date']) 810 info_lines = info_lines + \ 811 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 812 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 813 else: 814 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 815 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 816 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 817 818 return info_lines
819
820 - def get_process_info_lines(self, matrix_element):
821 """Return info lines describing the processes for this matrix element""" 822 823 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 824 for process in matrix_element.get('processes')])
825 826
827 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
828 """Return the Helicity matrix definition lines for this matrix element""" 829 830 helicity_line_list = [] 831 i = 0 832 for helicities in matrix_element.get_helicity_matrix(): 833 i = i + 1 834 int_list = [i, len(helicities)] 835 int_list.extend(helicities) 836 helicity_line_list.append(\ 837 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 838 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 839 840 return "\n".join(helicity_line_list)
841
842 - def get_ic_line(self, matrix_element):
843 """Return the IC definition line coming after helicities, required by 844 switchmom in madevent""" 845 846 nexternal = matrix_element.get_nexternal_ninitial()[0] 847 int_list = range(1, nexternal + 1) 848 849 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 850 ",".join([str(i) for \ 851 i in int_list]))
852
853 - def set_chosen_SO_index(self, process, squared_orders):
854 """ From the squared order constraints set by the user, this function 855 finds what indices of the squared_orders list the user intends to pick. 856 It returns this as a string of comma-separated successive '.true.' or 857 '.false.' for each index.""" 858 859 user_squared_orders = process.get('squared_orders') 860 split_orders = process.get('split_orders') 861 862 if len(user_squared_orders)==0: 863 return ','.join(['.true.']*len(squared_orders)) 864 865 res = [] 866 for sqsos in squared_orders: 867 is_a_match = True 868 for user_sqso, value in user_squared_orders.items(): 869 if (process.get_squared_order_type(user_sqso) =='==' and \ 870 value!=sqsos[split_orders.index(user_sqso)]) or \ 871 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 872 value<sqsos[split_orders.index(user_sqso)]) or \ 873 (process.get_squared_order_type(user_sqso) == '>' and \ 874 value>=sqsos[split_orders.index(user_sqso)]): 875 is_a_match = False 876 break 877 res.append('.true.' if is_a_match else '.false.') 878 879 return ','.join(res)
880
881 - def get_split_orders_lines(self, orders, array_name, n=5):
882 """ Return the split orders definition as defined in the list orders and 883 for the name of the array 'array_name'. Split rows in chunks of size n.""" 884 885 ret_list = [] 886 for index, order in enumerate(orders): 887 for k in xrange(0, len(order), n): 888 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 889 (array_name,index + 1, k + 1, min(k + n, len(order)), 890 ','.join(["%5r" % i for i in order[k:k + n]]))) 891 return ret_list
892
893 - def format_integer_list(self, list, name, n=5):
894 """ Return an initialization of the python list in argument following 895 the fortran syntax using the data keyword assignment, filling an array 896 of name 'name'. It splits rows in chunks of size n.""" 897 898 ret_list = [] 899 for k in xrange(0, len(list), n): 900 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 901 (name, k + 1, min(k + n, len(list)), 902 ','.join(["%5r" % i for i in list[k:k + n]]))) 903 return ret_list
904
905 - def get_color_data_lines(self, matrix_element, n=6):
906 """Return the color matrix definition lines for this matrix element. Split 907 rows in chunks of size n.""" 908 909 if not matrix_element.get('color_matrix'): 910 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 911 else: 912 ret_list = [] 913 my_cs = color.ColorString() 914 for index, denominator in \ 915 enumerate(matrix_element.get('color_matrix').\ 916 get_line_denominators()): 917 # First write the common denominator for this color matrix line 918 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 919 # Then write the numerators for the matrix elements 920 num_list = matrix_element.get('color_matrix').\ 921 get_line_numerators(index, denominator) 922 923 for k in xrange(0, len(num_list), n): 924 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 925 (index + 1, k + 1, min(k + n, len(num_list)), 926 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 927 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 928 ret_list.append("C %s" % repr(my_cs)) 929 return ret_list
930 931
932 - def get_den_factor_line(self, matrix_element):
933 """Return the denominator factor line for this matrix element""" 934 935 return "DATA IDEN/%2r/" % \ 936 matrix_element.get_denominator_factor()
937
938 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
939 """Return the ICOLAMP matrix, showing which JAMPs contribute to 940 which configs (diagrams).""" 941 942 ret_list = [] 943 944 booldict = {False: ".false.", True: ".true."} 945 946 if not matrix_element.get('color_basis'): 947 # No color, so only one color factor. Simply write a ".true." 948 # for each config (i.e., each diagram with only 3 particle 949 # vertices 950 configs = len(mapconfigs) 951 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 952 (num_matrix_element, configs, 953 ','.join([".true." for i in range(configs)]))) 954 return ret_list 955 956 # There is a color basis - create a list showing which JAMPs have 957 # contributions to which configs 958 959 # Only want to include leading color flows, so find max_Nc 960 color_basis = matrix_element.get('color_basis') 961 962 # We don't want to include the power of Nc's which come from the potential 963 # loop color trace (i.e. in the case of a closed fermion loop for example) 964 # so we subtract it here when computing max_Nc 965 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 966 color_basis.values()],[])) 967 968 # Crate dictionary between diagram number and JAMP number 969 diag_jamp = {} 970 for ijamp, col_basis_elem in \ 971 enumerate(sorted(matrix_element.get('color_basis').keys())): 972 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 973 # Only use color flows with Nc == max_Nc. However, notice that 974 # we don't want to include the Nc power coming from the loop 975 # in this counting. 976 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 977 diag_num = diag_tuple[0] + 1 978 # Add this JAMP number to this diag_num 979 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 980 [ijamp+1] 981 982 colamps = ijamp + 1 983 for iconfig, num_diag in enumerate(mapconfigs): 984 if num_diag == 0: 985 continue 986 987 # List of True or False 988 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 989 # Add line 990 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 991 (iconfig+1, num_matrix_element, colamps, 992 ','.join(["%s" % booldict[b] for b in \ 993 bool_list]))) 994 995 return ret_list
996
997 - def get_amp2_lines(self, matrix_element, config_map = []):
998 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 999 1000 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1001 # Get minimum legs in a vertex 1002 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1003 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1004 minvert = min(vert_list) if vert_list!=[] else 0 1005 1006 ret_lines = [] 1007 if config_map: 1008 # In this case, we need to sum up all amplitudes that have 1009 # identical topologies, as given by the config_map (which 1010 # gives the topology/config for each of the diagrams 1011 diagrams = matrix_element.get('diagrams') 1012 # Combine the diagrams with identical topologies 1013 config_to_diag_dict = {} 1014 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1015 if config_map[idiag] == 0: 1016 continue 1017 try: 1018 config_to_diag_dict[config_map[idiag]].append(idiag) 1019 except KeyError: 1020 config_to_diag_dict[config_map[idiag]] = [idiag] 1021 # Write out the AMP2s summing squares of amplitudes belonging 1022 # to eiher the same diagram or different diagrams with 1023 # identical propagator properties. Note that we need to use 1024 # AMP2 number corresponding to the first diagram number used 1025 # for that AMP2. 1026 for config in sorted(config_to_diag_dict.keys()): 1027 1028 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1029 {"num": (config_to_diag_dict[config][0] + 1)} 1030 1031 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1032 sum([diagrams[idiag].get('amplitudes') for \ 1033 idiag in config_to_diag_dict[config]], [])]) 1034 1035 # Not using \sum |M|^2 anymore since this creates troubles 1036 # when ckm is not diagonal due to the JIM mechanism. 1037 if '+' in amp: 1038 line += "(%s)*dconjg(%s)" % (amp, amp) 1039 else: 1040 line += "%s*dconjg(%s)" % (amp, amp) 1041 ret_lines.append(line) 1042 else: 1043 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1044 # Ignore any diagrams with 4-particle vertices. 1045 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1046 continue 1047 # Now write out the expression for AMP2, meaning the sum of 1048 # squared amplitudes belonging to the same diagram 1049 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1050 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1051 {"num": a.get('number')} for a in \ 1052 diag.get('amplitudes')]) 1053 ret_lines.append(line) 1054 1055 return ret_lines
1056 1057 #=========================================================================== 1058 # Returns the data statements initializing the coeffictients for the JAMP 1059 # decomposition. It is used when the JAMP initialization is decided to be 1060 # done through big arrays containing the projection coefficients. 1061 #===========================================================================
1062 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1063 n=50, Nc_value=3):
1064 """This functions return the lines defining the DATA statement setting 1065 the coefficients building the JAMPS out of the AMPS. Split rows in 1066 bunches of size n. 1067 One can specify the color_basis from which the color amplitudes originates 1068 so that there are commentaries telling what color structure each JAMP 1069 corresponds to.""" 1070 1071 if(not isinstance(color_amplitudes,list) or 1072 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1073 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1074 1075 res_list = [] 1076 my_cs = color.ColorString() 1077 for index, coeff_list in enumerate(color_amplitudes): 1078 # Create the list of the complete numerical coefficient. 1079 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1080 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1081 coefficient in coeff_list] 1082 # Create the list of the numbers of the contributing amplitudes. 1083 # Mutliply by -1 for those which have an imaginary coefficient. 1084 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1085 for coefficient in coeff_list] 1086 # Find the common denominator. 1087 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1088 num_list=[(coefficient*commondenom).numerator \ 1089 for coefficient in coefs_list] 1090 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1091 index+1,len(num_list))) 1092 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1093 index+1,commondenom)) 1094 if color_basis: 1095 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1096 res_list.append("C %s" % repr(my_cs)) 1097 for k in xrange(0, len(num_list), n): 1098 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1099 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1100 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1101 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1102 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1103 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1104 pass 1105 return res_list
1106 1107
1108 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1109 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1110 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1111 defined as a matrix element or directly as a color_amplitudes dictionary. 1112 The split_order_amps specifies the group of amplitudes sharing the same 1113 amplitude orders which should be put in together in a given set of JAMPS. 1114 The split_order_amps is supposed to have the format of the second output 1115 of the function get_split_orders_mapping function in helas_objects.py. 1116 The split_order_names is optional (it should correspond to the process 1117 'split_orders' attribute) and only present to provide comments in the 1118 JAMP definitions in the code.""" 1119 1120 # Let the user call get_JAMP_lines_split_order directly from a 1121 error_msg="Malformed '%s' argument passed to the "+\ 1122 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1123 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1124 color_amplitudes=col_amps.get_color_amplitudes() 1125 elif(isinstance(col_amps,list)): 1126 if(col_amps and isinstance(col_amps[0],list)): 1127 color_amplitudes=col_amps 1128 else: 1129 raise MadGraph5Error, error_msg%'col_amps' 1130 else: 1131 raise MadGraph5Error, error_msg%'col_amps' 1132 1133 # Verify the sanity of the split_order_amps and split_order_names args 1134 if isinstance(split_order_amps,list): 1135 for elem in split_order_amps: 1136 if len(elem)!=2: 1137 raise MadGraph5Error, error_msg%'split_order_amps' 1138 # Check the first element of the two lists to make sure they are 1139 # integers, although in principle they should all be integers. 1140 if not isinstance(elem[0],tuple) or \ 1141 not isinstance(elem[1],tuple) or \ 1142 not isinstance(elem[0][0],int) or \ 1143 not isinstance(elem[1][0],int): 1144 raise MadGraph5Error, error_msg%'split_order_amps' 1145 else: 1146 raise MadGraph5Error, error_msg%'split_order_amps' 1147 1148 if not split_order_names is None: 1149 if isinstance(split_order_names,list): 1150 # Should specify the same number of names as there are elements 1151 # in the key of the split_order_amps. 1152 if len(split_order_names)!=len(split_order_amps[0][0]): 1153 raise MadGraph5Error, error_msg%'split_order_names' 1154 # Check the first element of the list to be a string 1155 if not isinstance(split_order_names[0],str): 1156 raise MadGraph5Error, error_msg%'split_order_names' 1157 else: 1158 raise MadGraph5Error, error_msg%'split_order_names' 1159 1160 # Now scan all contributing orders to be individually computed and 1161 # construct the list of color_amplitudes for JAMP to be constructed 1162 # accordingly. 1163 res_list=[] 1164 for i, amp_order in enumerate(split_order_amps): 1165 col_amps_order = [] 1166 for jamp in color_amplitudes: 1167 col_amps_order.append(filter(lambda col_amp: 1168 col_amp[1] in amp_order[1],jamp)) 1169 if split_order_names: 1170 res_list.append('C JAMPs contributing to orders '+' '.join( 1171 ['%s=%i'%order for order in zip(split_order_names, 1172 amp_order[0])])) 1173 if self.opt['export_format'] in ['madloop_matchbox']: 1174 res_list.extend(self.get_JAMP_lines(col_amps_order, 1175 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1176 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1177 else: 1178 res_list.extend(self.get_JAMP_lines(col_amps_order, 1179 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1180 1181 return res_list
1182 1183
1184 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1185 split=-1):
1186 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1187 defined as a matrix element or directly as a color_amplitudes dictionary, 1188 Jamp_formatLC should be define to allow to add LeadingColor computation 1189 (usefull for MatchBox) 1190 The split argument defines how the JAMP lines should be split in order 1191 not to be too long.""" 1192 1193 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1194 # the color amplitudes lists. 1195 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1196 color_amplitudes=col_amps.get_color_amplitudes() 1197 elif(isinstance(col_amps,list)): 1198 if(col_amps and isinstance(col_amps[0],list)): 1199 color_amplitudes=col_amps 1200 else: 1201 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1202 else: 1203 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1204 1205 1206 res_list = [] 1207 for i, coeff_list in enumerate(color_amplitudes): 1208 # It might happen that coeff_list is empty if this function was 1209 # called from get_JAMP_lines_split_order (i.e. if some color flow 1210 # does not contribute at all for a given order). 1211 # In this case we simply set it to 0. 1212 if coeff_list==[]: 1213 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1214 continue 1215 # Break the JAMP definition into 'n=split' pieces to avoid having 1216 # arbitrarly long lines. 1217 first=True 1218 n = (len(coeff_list)+1 if split<=0 else split) 1219 while coeff_list!=[]: 1220 coefs=coeff_list[:n] 1221 coeff_list=coeff_list[n:] 1222 res = ((JAMP_format+"=") % str(i + 1)) + \ 1223 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1224 1225 first=False 1226 # Optimization: if all contributions to that color basis element have 1227 # the same coefficient (up to a sign), put it in front 1228 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1229 common_factor = False 1230 diff_fracs = list(set(list_fracs)) 1231 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1232 common_factor = True 1233 global_factor = diff_fracs[0] 1234 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1235 1236 # loop for JAMP 1237 for (coefficient, amp_number) in coefs: 1238 if not coefficient: 1239 continue 1240 if common_factor: 1241 res = (res + "%s" + AMP_format) % \ 1242 (self.coeff(coefficient[0], 1243 coefficient[1] / abs(coefficient[1]), 1244 coefficient[2], 1245 coefficient[3]), 1246 str(amp_number)) 1247 else: 1248 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1249 coefficient[1], 1250 coefficient[2], 1251 coefficient[3]), 1252 str(amp_number)) 1253 1254 if common_factor: 1255 res = res + ')' 1256 1257 res_list.append(res) 1258 1259 return res_list
1260
1261 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1262 """Generate the PDF lines for the auto_dsig.f file""" 1263 1264 processes = matrix_element.get('processes') 1265 model = processes[0].get('model') 1266 1267 pdf_definition_lines = "" 1268 pdf_data_lines = "" 1269 pdf_lines = "" 1270 1271 if ninitial == 1: 1272 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1273 for i, proc in enumerate(processes): 1274 process_line = proc.base_string() 1275 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1276 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1277 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1278 else: 1279 # Pick out all initial state particles for the two beams 1280 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1281 p in processes]))), 1282 sorted(list(set([p.get_initial_pdg(2) for \ 1283 p in processes])))] 1284 1285 # Prepare all variable names 1286 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1287 sum(initial_states,[])]) 1288 for key,val in pdf_codes.items(): 1289 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1290 1291 # Set conversion from PDG code to number used in PDF calls 1292 pdgtopdf = {21: 0, 22: 7} 1293 1294 # Fill in missing entries of pdgtopdf 1295 for pdg in sum(initial_states,[]): 1296 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1297 pdgtopdf[pdg] = pdg 1298 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1299 # If any particle has pdg code 7, we need to use something else 1300 pdgtopdf[pdg] = 6000000 + pdg 1301 1302 # Get PDF variable declarations for all initial states 1303 for i in [0,1]: 1304 pdf_definition_lines += "DOUBLE PRECISION " + \ 1305 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1306 for pdg in \ 1307 initial_states[i]]) + \ 1308 "\n" 1309 1310 # Get PDF data lines for all initial states 1311 for i in [0,1]: 1312 pdf_data_lines += "DATA " + \ 1313 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1314 for pdg in initial_states[i]]) + \ 1315 "/%d*1D0/" % len(initial_states[i]) + \ 1316 "\n" 1317 1318 # Get PDF lines for all different initial states 1319 for i, init_states in enumerate(initial_states): 1320 if subproc_group: 1321 pdf_lines = pdf_lines + \ 1322 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1323 % (i + 1, i + 1) 1324 else: 1325 pdf_lines = pdf_lines + \ 1326 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1327 % (i + 1, i + 1) 1328 1329 for initial_state in init_states: 1330 if initial_state in pdf_codes.keys(): 1331 if subproc_group: 1332 pdf_lines = pdf_lines + \ 1333 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1334 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1335 (pdf_codes[initial_state], 1336 i + 1, i + 1, pdgtopdf[initial_state], 1337 i + 1, i + 1) 1338 else: 1339 pdf_lines = pdf_lines + \ 1340 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1341 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1342 (pdf_codes[initial_state], 1343 i + 1, i + 1, pdgtopdf[initial_state], 1344 i + 1, i + 1) 1345 pdf_lines = pdf_lines + "ENDIF\n" 1346 1347 # Add up PDFs for the different initial state particles 1348 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1349 for proc in processes: 1350 process_line = proc.base_string() 1351 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1352 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1353 for ibeam in [1, 2]: 1354 initial_state = proc.get_initial_pdg(ibeam) 1355 if initial_state in pdf_codes.keys(): 1356 pdf_lines = pdf_lines + "%s%d*" % \ 1357 (pdf_codes[initial_state], ibeam) 1358 else: 1359 pdf_lines = pdf_lines + "1d0*" 1360 # Remove last "*" from pdf_lines 1361 pdf_lines = pdf_lines[:-1] + "\n" 1362 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1363 1364 # Remove last line break from the return variables 1365 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1366 1367 #=========================================================================== 1368 # write_props_file 1369 #===========================================================================
1370 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1371 """Write the props.inc file for MadEvent. Needs input from 1372 write_configs_file.""" 1373 1374 lines = [] 1375 1376 particle_dict = matrix_element.get('processes')[0].get('model').\ 1377 get('particle_dict') 1378 1379 for iconf, configs in enumerate(s_and_t_channels): 1380 for vertex in configs[0] + configs[1][:-1]: 1381 leg = vertex.get('legs')[-1] 1382 if leg.get('id') not in particle_dict: 1383 # Fake propagator used in multiparticle vertices 1384 mass = 'zero' 1385 width = 'zero' 1386 pow_part = 0 1387 else: 1388 particle = particle_dict[leg.get('id')] 1389 # Get mass 1390 if particle.get('mass').lower() == 'zero': 1391 mass = particle.get('mass') 1392 else: 1393 mass = "abs(%s)" % particle.get('mass') 1394 # Get width 1395 if particle.get('width').lower() == 'zero': 1396 width = particle.get('width') 1397 else: 1398 width = "abs(%s)" % particle.get('width') 1399 1400 pow_part = 1 + int(particle.is_boson()) 1401 1402 lines.append("prmass(%d,%d) = %s" % \ 1403 (leg.get('number'), iconf + 1, mass)) 1404 lines.append("prwidth(%d,%d) = %s" % \ 1405 (leg.get('number'), iconf + 1, width)) 1406 lines.append("pow(%d,%d) = %d" % \ 1407 (leg.get('number'), iconf + 1, pow_part)) 1408 1409 # Write the file 1410 writer.writelines(lines) 1411 1412 return True
1413 1414 #=========================================================================== 1415 # write_configs_file 1416 #===========================================================================
1417 - def write_configs_file(self, writer, matrix_element):
1418 """Write the configs.inc file for MadEvent""" 1419 1420 # Extract number of external particles 1421 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1422 1423 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1424 mapconfigs = [c[0] for c in configs] 1425 model = matrix_element.get('processes')[0].get('model') 1426 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1427 [[c[1]] for c in configs], 1428 mapconfigs, 1429 nexternal, ninitial, 1430 model)
1431 1432 #=========================================================================== 1433 # write_configs_file_from_diagrams 1434 #===========================================================================
1435 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1436 nexternal, ninitial, model):
1437 """Write the actual configs.inc file. 1438 1439 configs is the diagrams corresponding to configs (each 1440 diagrams is a list of corresponding diagrams for all 1441 subprocesses, with None if there is no corresponding diagrams 1442 for a given process). 1443 mapconfigs gives the diagram number for each config. 1444 1445 For s-channels, we need to output one PDG for each subprocess in 1446 the subprocess group, in order to be able to pick the right 1447 one for multiprocesses.""" 1448 1449 lines = [] 1450 1451 s_and_t_channels = [] 1452 1453 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1454 for config in configs if [d for d in config if d][0].\ 1455 get_vertex_leg_numbers()!=[]] 1456 minvert = min(vert_list) if vert_list!=[] else 0 1457 1458 # Number of subprocesses 1459 nsubprocs = len(configs[0]) 1460 1461 nconfigs = 0 1462 1463 new_pdg = model.get_first_non_pdg() 1464 1465 for iconfig, helas_diags in enumerate(configs): 1466 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1467 [0].get_vertex_leg_numbers()) : 1468 # Only 3-vertices allowed in configs.inc except for vertices 1469 # which originate from a shrunk loop. 1470 continue 1471 nconfigs += 1 1472 1473 # Need s- and t-channels for all subprocesses, including 1474 # those that don't contribute to this config 1475 empty_verts = [] 1476 stchannels = [] 1477 for h in helas_diags: 1478 if h: 1479 # get_s_and_t_channels gives vertices starting from 1480 # final state external particles and working inwards 1481 stchannels.append(h.get('amplitudes')[0].\ 1482 get_s_and_t_channels(ninitial, model, new_pdg)) 1483 else: 1484 stchannels.append((empty_verts, None)) 1485 1486 # For t-channels, just need the first non-empty one 1487 tchannels = [t for s,t in stchannels if t != None][0] 1488 1489 # For s_and_t_channels (to be used later) use only first config 1490 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1491 tchannels]) 1492 1493 # Make sure empty_verts is same length as real vertices 1494 if any([s for s,t in stchannels]): 1495 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1496 1497 # Reorganize s-channel vertices to get a list of all 1498 # subprocesses for each vertex 1499 schannels = zip(*[s for s,t in stchannels]) 1500 else: 1501 schannels = [] 1502 1503 allchannels = schannels 1504 if len(tchannels) > 1: 1505 # Write out tchannels only if there are any non-trivial ones 1506 allchannels = schannels + tchannels 1507 1508 # Write out propagators for s-channel and t-channel vertices 1509 1510 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1511 # Correspondance between the config and the diagram = amp2 1512 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1513 mapconfigs[iconfig])) 1514 1515 for verts in allchannels: 1516 if verts in schannels: 1517 vert = [v for v in verts if v][0] 1518 else: 1519 vert = verts 1520 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1521 last_leg = vert.get('legs')[-1] 1522 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1523 (last_leg.get('number'), nconfigs, len(daughters), 1524 ",".join([str(d) for d in daughters]))) 1525 if verts in schannels: 1526 pdgs = [] 1527 for v in verts: 1528 if v: 1529 pdgs.append(v.get('legs')[-1].get('id')) 1530 else: 1531 pdgs.append(0) 1532 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1533 (last_leg.get('number'), nconfigs, nsubprocs, 1534 ",".join([str(d) for d in pdgs]))) 1535 lines.append("data tprid(%d,%d)/0/" % \ 1536 (last_leg.get('number'), nconfigs)) 1537 elif verts in tchannels[:-1]: 1538 lines.append("data tprid(%d,%d)/%d/" % \ 1539 (last_leg.get('number'), nconfigs, 1540 abs(last_leg.get('id')))) 1541 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1542 (last_leg.get('number'), nconfigs, nsubprocs, 1543 ",".join(['0'] * nsubprocs))) 1544 1545 # Write out number of configs 1546 lines.append("# Number of configs") 1547 lines.append("data mapconfig(0)/%d/" % nconfigs) 1548 1549 # Write the file 1550 writer.writelines(lines) 1551 1552 return s_and_t_channels
1553 1554 #=========================================================================== 1555 # Global helper methods 1556 #=========================================================================== 1557
1558 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1559 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1560 1561 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1562 1563 if total_coeff == 1: 1564 if is_imaginary: 1565 return '+imag1*' 1566 else: 1567 return '+' 1568 elif total_coeff == -1: 1569 if is_imaginary: 1570 return '-imag1*' 1571 else: 1572 return '-' 1573 1574 res_str = '%+iD0' % total_coeff.numerator 1575 1576 if total_coeff.denominator != 1: 1577 # Check if total_coeff is an integer 1578 res_str = res_str + '/%iD0' % total_coeff.denominator 1579 1580 if is_imaginary: 1581 res_str = res_str + '*imag1' 1582 1583 return res_str + '*'
1584 1585
1586 - def set_fortran_compiler(self, default_compiler, force=False):
1587 """Set compiler based on what's available on the system""" 1588 1589 # Check for compiler 1590 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1591 f77_compiler = default_compiler['fortran'] 1592 elif misc.which('gfortran'): 1593 f77_compiler = 'gfortran' 1594 elif misc.which('g77'): 1595 f77_compiler = 'g77' 1596 elif misc.which('f77'): 1597 f77_compiler = 'f77' 1598 elif default_compiler['fortran']: 1599 logger.warning('No Fortran Compiler detected! Please install one') 1600 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1601 else: 1602 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1603 logger.info('Use Fortran compiler ' + f77_compiler) 1604 1605 1606 # Check for compiler. 1. set default. 1607 if default_compiler['f2py']: 1608 f2py_compiler = default_compiler['f2py'] 1609 else: 1610 f2py_compiler = '' 1611 # Try to find the correct one. 1612 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1613 f2py_compiler = default_compiler 1614 elif misc.which('f2py'): 1615 f2py_compiler = 'f2py' 1616 elif sys.version_info[1] == 6: 1617 if misc.which('f2py-2.6'): 1618 f2py_compiler = 'f2py-2.6' 1619 elif misc.which('f2py2.6'): 1620 f2py_compiler = 'f2py2.6' 1621 elif sys.version_info[1] == 7: 1622 if misc.which('f2py-2.7'): 1623 f2py_compiler = 'f2py-2.7' 1624 elif misc.which('f2py2.7'): 1625 f2py_compiler = 'f2py2.7' 1626 1627 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1628 1629 1630 self.replace_make_opt_f_compiler(to_replace) 1631 # Replace also for Template but not for cluster 1632 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1633 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1634 1635 return f77_compiler
1636 1637 # an alias for backward compatibility 1638 set_compiler = set_fortran_compiler 1639 1640
1641 - def set_cpp_compiler(self, default_compiler, force=False):
1642 """Set compiler based on what's available on the system""" 1643 1644 # Check for compiler 1645 if default_compiler and misc.which(default_compiler): 1646 compiler = default_compiler 1647 elif misc.which('g++'): 1648 #check if clang version 1649 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1650 stderr=subprocess.PIPE) 1651 out, _ = p.communicate() 1652 if 'clang' in out and misc.which('clang'): 1653 compiler = 'clang' 1654 else: 1655 compiler = 'g++' 1656 elif misc.which('c++'): 1657 compiler = 'c++' 1658 elif misc.which('clang'): 1659 compiler = 'clang' 1660 elif default_compiler: 1661 logger.warning('No c++ Compiler detected! Please install one') 1662 compiler = default_compiler # maybe misc fail so try with it 1663 else: 1664 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1665 logger.info('Use c++ compiler ' + compiler) 1666 self.replace_make_opt_c_compiler(compiler) 1667 # Replace also for Template but not for cluster 1668 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1669 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1670 1671 return compiler
1672 1673
1674 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1675 """Set FC=compiler in Source/make_opts""" 1676 1677 assert isinstance(compilers, dict) 1678 1679 mod = False #avoid to rewrite the file if not needed 1680 if not root_dir: 1681 root_dir = self.dir_path 1682 1683 compiler= compilers['fortran'] 1684 f2py_compiler = compilers['f2py'] 1685 if not f2py_compiler: 1686 f2py_compiler = 'f2py' 1687 1688 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1689 lines = open(make_opts).read().split('\n') 1690 FC_re = re.compile('^(\s*)(FC|F2PY)\s*=\s*(.+)\s*$') 1691 for iline, line in enumerate(lines): 1692 1693 FC_result = FC_re.match(line) 1694 if FC_result: 1695 if 'FC' == FC_result.group(2): 1696 if compiler != FC_result.group(3): 1697 mod = True 1698 lines[iline] = FC_result.group(1) + "FC=" + compiler 1699 elif 'F2PY' == FC_result.group(2): 1700 if f2py_compiler != FC_result.group(3): 1701 mod = True 1702 lines[iline] = FC_result.group(1) + "F2PY=" + f2py_compiler 1703 if not mod: 1704 return 1705 1706 try: 1707 outfile = open(make_opts, 'w') 1708 except IOError: 1709 if root_dir == self.dir_path: 1710 logger.info('Fail to set compiler. Trying to continue anyway.') 1711 return 1712 outfile.write('\n'.join(lines))
1713 1714
1715 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1716 """Set CXX=compiler in Source/make_opts. 1717 The version is also checked, in order to set some extra flags 1718 if the compiler is clang (on MACOS)""" 1719 1720 1721 p = misc.Popen([compiler, '--version'], stdout=subprocess.PIPE, 1722 stderr=subprocess.PIPE) 1723 output, error = p.communicate() 1724 is_clang = 'LLVM' in output 1725 1726 mod = False #avoid to rewrite the file if not needed 1727 if not root_dir: 1728 root_dir = self.dir_path 1729 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1730 lines = open(make_opts).read().split('\n') 1731 CC_re = re.compile('^(\s*)CXX\s*=\s*(.+)\s*$') 1732 for iline, line in enumerate(lines): 1733 CC_result = CC_re.match(line) 1734 if CC_result: 1735 if compiler != CC_result.group(2): 1736 mod = True 1737 lines[iline] = CC_result.group(1) + "CXX=" + compiler 1738 1739 if is_clang: 1740 CFLAGS_re=re.compile('^(\s*)CFLAGS\s*=\s*(.+)\s*$') 1741 CXXFLAGS_re=re.compile('^(\s*)CXXFLAGS\s*=\s*(.+)\s*$') 1742 flags= '-O -stdlib=libstdc++ -mmacosx-version-min=10.6' 1743 for iline, line in enumerate(lines): 1744 CF_result = CFLAGS_re.match(line) 1745 CXXF_result = CXXFLAGS_re.match(line) 1746 if CF_result: 1747 lines[iline] = CF_result.group(1) + "CFLAGS= " + flags 1748 if CXXF_result: 1749 lines[iline] = CXXF_result.group(1) + "CXXFLAGS= " + flags 1750 if not mod: 1751 return 1752 try: 1753 outfile = open(make_opts, 'w') 1754 except IOError: 1755 if root_dir == self.dir_path: 1756 logger.info('Fail to set compiler. Trying to continue anyway.') 1757 return 1758 outfile.write('\n'.join(lines))
1759
1760 #=============================================================================== 1761 # ProcessExporterFortranSA 1762 #=============================================================================== 1763 -class ProcessExporterFortranSA(ProcessExporterFortran):
1764 """Class to take care of exporting a set of matrix elements to 1765 MadGraph v4 StandAlone format.""" 1766 1767 matrix_template = "matrix_standalone_v4.inc" 1768
1769 - def __init__(self, *args, **opts):
1770 """add the format information compare to standard init""" 1771 1772 if 'format' in opts: 1773 self.format = opts['format'] 1774 del opts['format'] 1775 else: 1776 self.format = 'standalone' 1777 ProcessExporterFortran.__init__(self, *args, **opts)
1778
1779 - def copy_v4template(self, modelname):
1780 """Additional actions needed for setup of Template 1781 """ 1782 1783 #First copy the full template tree if dir_path doesn't exit 1784 if os.path.isdir(self.dir_path): 1785 return 1786 1787 logger.info('initialize a new standalone directory: %s' % \ 1788 os.path.basename(self.dir_path)) 1789 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1790 1791 # Create the directory structure 1792 os.mkdir(self.dir_path) 1793 os.mkdir(pjoin(self.dir_path, 'Source')) 1794 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1795 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1796 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1797 os.mkdir(pjoin(self.dir_path, 'bin')) 1798 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1799 os.mkdir(pjoin(self.dir_path, 'lib')) 1800 os.mkdir(pjoin(self.dir_path, 'Cards')) 1801 1802 # Information at top-level 1803 #Write version info 1804 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1805 try: 1806 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1807 except IOError: 1808 MG5_version = misc.get_pkg_info() 1809 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1810 "5." + MG5_version['version']) 1811 1812 1813 # Add file in SubProcesses 1814 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1815 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1816 1817 if self.format == 'standalone': 1818 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1819 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1820 1821 # Add file in Source 1822 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1823 pjoin(self.dir_path, 'Source')) 1824 # add the makefile 1825 filename = pjoin(self.dir_path,'Source','makefile') 1826 self.write_source_makefile(writers.FileWriter(filename))
1827 1828 #=========================================================================== 1829 # export model files 1830 #===========================================================================
1831 - def export_model_files(self, model_path):
1832 """export the model dependent files for V4 model""" 1833 1834 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1835 # Add the routine update_as_param in v4 model 1836 # This is a function created in the UFO 1837 text=""" 1838 subroutine update_as_param() 1839 call setpara('param_card.dat',.false.) 1840 return 1841 end 1842 """ 1843 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1844 ff.write(text) 1845 ff.close() 1846 1847 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1848 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1849 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1850 fsock.write(text) 1851 fsock.close() 1852 1853 self.make_model_symbolic_link()
1854 1855 #=========================================================================== 1856 # Make the Helas and Model directories for Standalone directory 1857 #===========================================================================
1858 - def make(self):
1859 """Run make in the DHELAS and MODEL directories, to set up 1860 everything for running standalone 1861 """ 1862 1863 source_dir = pjoin(self.dir_path, "Source") 1864 logger.info("Running make for Helas") 1865 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1866 logger.info("Running make for Model") 1867 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1868 1869 #=========================================================================== 1870 # Create proc_card_mg5.dat for Standalone directory 1871 #===========================================================================
1872 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 1873 online = False, compiler=default_compiler):
1874 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 1875 1876 self.compiler_choice(compiler) 1877 self.make() 1878 1879 # Write command history as proc_card_mg5 1880 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1881 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1882 history.write(output_file) 1883 1884 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler) 1885 open(pjoin(self.dir_path,'__init__.py'),'w') 1886 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w')
1887 1888
1889 - def compiler_choice(self, compiler):
1890 """ Different daughter classes might want different compilers. 1891 So this function is meant to be overloaded if desired.""" 1892 1893 self.set_compiler(compiler)
1894 1895 #=========================================================================== 1896 # generate_subprocess_directory_v4 1897 #===========================================================================
1898 - def generate_subprocess_directory_v4(self, matrix_element, 1899 fortran_model):
1900 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 1901 including the necessary matrix.f and nexternal.inc files""" 1902 1903 cwd = os.getcwd() 1904 1905 # Create the directory PN_xx_xxxxx in the specified path 1906 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 1907 "P%s" % matrix_element.get('processes')[0].shell_string()) 1908 1909 if self.opt['sa_symmetry']: 1910 # avoid symmetric output 1911 for i,proc in enumerate(matrix_element.get('processes')): 1912 1913 initial = [] #filled in the next line 1914 final = [l.get('id') for l in proc.get('legs')\ 1915 if l.get('state') or initial.append(l.get('id'))] 1916 decay_finals = proc.get_final_ids_after_decay() 1917 decay_finals.sort() 1918 tag = (tuple(initial), tuple(decay_finals)) 1919 legs = proc.get('legs')[:] 1920 leg0 = proc.get('legs')[0] 1921 leg1 = proc.get('legs')[1] 1922 if not leg1.get('state'): 1923 proc.get('legs')[0] = leg1 1924 proc.get('legs')[1] = leg0 1925 flegs = proc.get('legs')[2:] 1926 for perm in itertools.permutations(flegs): 1927 for i,p in enumerate(perm): 1928 proc.get('legs')[i+2] = p 1929 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 1930 "P%s" % proc.shell_string()) 1931 #restore original order 1932 proc.get('legs')[2:] = legs[2:] 1933 if os.path.exists(dirpath2): 1934 proc.get('legs')[:] = legs 1935 return 0 1936 proc.get('legs')[:] = legs 1937 1938 try: 1939 os.mkdir(dirpath) 1940 except os.error as error: 1941 logger.warning(error.strerror + " " + dirpath) 1942 1943 #try: 1944 # os.chdir(dirpath) 1945 #except os.error: 1946 # logger.error('Could not cd to directory %s' % dirpath) 1947 # return 0 1948 1949 logger.info('Creating files in directory %s' % dirpath) 1950 1951 # Extract number of external particles 1952 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1953 1954 # Create the matrix.f file and the nexternal.inc file 1955 if self.opt['export_format']=='standalone_msP': 1956 filename = pjoin(dirpath, 'matrix_prod.f') 1957 else: 1958 filename = pjoin(dirpath, 'matrix.f') 1959 calls = self.write_matrix_element_v4( 1960 writers.FortranWriter(filename), 1961 matrix_element, 1962 fortran_model) 1963 1964 if self.opt['export_format'] == 'standalone_msP': 1965 filename = pjoin(dirpath,'configs_production.inc') 1966 mapconfigs, s_and_t_channels = self.write_configs_file(\ 1967 writers.FortranWriter(filename), 1968 matrix_element) 1969 1970 filename = pjoin(dirpath,'props_production.inc') 1971 self.write_props_file(writers.FortranWriter(filename), 1972 matrix_element, 1973 s_and_t_channels) 1974 1975 filename = pjoin(dirpath,'nexternal_prod.inc') 1976 self.write_nexternal_madspin(writers.FortranWriter(filename), 1977 nexternal, ninitial) 1978 1979 if self.opt['export_format']=='standalone_msF': 1980 filename = pjoin(dirpath, 'helamp.inc') 1981 ncomb=matrix_element.get_helicity_combinations() 1982 self.write_helamp_madspin(writers.FortranWriter(filename), 1983 ncomb) 1984 1985 filename = pjoin(dirpath, 'nexternal.inc') 1986 self.write_nexternal_file(writers.FortranWriter(filename), 1987 nexternal, ninitial) 1988 1989 filename = pjoin(dirpath, 'pmass.inc') 1990 self.write_pmass_file(writers.FortranWriter(filename), 1991 matrix_element) 1992 1993 filename = pjoin(dirpath, 'ngraphs.inc') 1994 self.write_ngraphs_file(writers.FortranWriter(filename), 1995 len(matrix_element.get_all_amplitudes())) 1996 1997 # Generate diagrams 1998 filename = pjoin(dirpath, "matrix.ps") 1999 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2000 get('diagrams'), 2001 filename, 2002 model=matrix_element.get('processes')[0].\ 2003 get('model'), 2004 amplitude=True) 2005 logger.info("Generating Feynman diagrams for " + \ 2006 matrix_element.get('processes')[0].nice_string()) 2007 plot.draw() 2008 2009 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 2010 2011 for file in linkfiles: 2012 ln('../%s' % file, cwd=dirpath) 2013 2014 # Return to original PWD 2015 #os.chdir(cwd) 2016 2017 if not calls: 2018 calls = 0 2019 return calls
2020 2021 2022 #=========================================================================== 2023 # write_source_makefile 2024 #===========================================================================
2025 - def write_source_makefile(self, writer):
2026 """Write the nexternal.inc file for MG4""" 2027 2028 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2029 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2030 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2031 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2032 writer.write(text) 2033 2034 return True
2035 2036 #=========================================================================== 2037 # write_matrix_element_v4 2038 #===========================================================================
2039 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2040 write=True, proc_prefix=''):
2041 """Export a matrix element to a matrix.f file in MG4 standalone format 2042 if write is on False, just return the replace_dict and not write anything.""" 2043 2044 2045 if not matrix_element.get('processes') or \ 2046 not matrix_element.get('diagrams'): 2047 return 0 2048 2049 if not isinstance(writer, writers.FortranWriter): 2050 raise writers.FortranWriter.FortranWriterError(\ 2051 "writer not FortranWriter but %s" % type(writer)) 2052 2053 if not self.opt.has_key('sa_symmetry'): 2054 self.opt['sa_symmetry']=False 2055 2056 # Set lowercase/uppercase Fortran code 2057 writers.FortranWriter.downcase = False 2058 2059 # The proc_id is for MadEvent grouping which is never used in SA. 2060 replace_dict = {'global_variable':'', 'amp2_lines':'', 2061 'proc_prefix':proc_prefix, 'proc_id':''} 2062 2063 # Extract helas calls 2064 helas_calls = fortran_model.get_matrix_element_calls(\ 2065 matrix_element) 2066 2067 replace_dict['helas_calls'] = "\n".join(helas_calls) 2068 2069 # Extract version number and date from VERSION file 2070 info_lines = self.get_mg5_info_lines() 2071 replace_dict['info_lines'] = info_lines 2072 2073 # Extract process info lines 2074 process_lines = self.get_process_info_lines(matrix_element) 2075 replace_dict['process_lines'] = process_lines 2076 2077 # Extract number of external particles 2078 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2079 replace_dict['nexternal'] = nexternal 2080 2081 # Extract ncomb 2082 ncomb = matrix_element.get_helicity_combinations() 2083 replace_dict['ncomb'] = ncomb 2084 2085 # Extract helicity lines 2086 helicity_lines = self.get_helicity_lines(matrix_element) 2087 replace_dict['helicity_lines'] = helicity_lines 2088 2089 # Extract overall denominator 2090 # Averaging initial state color, spin, and identical FS particles 2091 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2092 2093 # Extract ngraphs 2094 ngraphs = matrix_element.get_number_of_amplitudes() 2095 replace_dict['ngraphs'] = ngraphs 2096 2097 # Extract nwavefuncs 2098 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2099 replace_dict['nwavefuncs'] = nwavefuncs 2100 2101 # Extract ncolor 2102 ncolor = max(1, len(matrix_element.get('color_basis'))) 2103 replace_dict['ncolor'] = ncolor 2104 2105 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2106 2107 # Extract color data lines 2108 color_data_lines = self.get_color_data_lines(matrix_element) 2109 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2110 2111 if self.opt['export_format']=='standalone_msP': 2112 # For MadSpin need to return the AMP2 2113 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2114 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2115 replace_dict['global_variable'] = \ 2116 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2117 2118 # JAMP definition, depends on the number of independent split orders 2119 split_orders=matrix_element.get('processes')[0].get('split_orders') 2120 2121 if len(split_orders)==0: 2122 replace_dict['nSplitOrders']='' 2123 # Extract JAMP lines 2124 jamp_lines = self.get_JAMP_lines(matrix_element) 2125 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2126 # set all amplitude order to weight 1 and only one squared order 2127 # contribution which is of course ALL_ORDERS=2. 2128 squared_orders = [(2,),] 2129 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2130 replace_dict['chosen_so_configs'] = '.TRUE.' 2131 replace_dict['nSqAmpSplitOrders']=1 2132 replace_dict['split_order_str_list']='' 2133 else: 2134 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2135 replace_dict['nAmpSplitOrders']=len(amp_orders) 2136 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2137 replace_dict['nSplitOrders']=len(split_orders) 2138 replace_dict['split_order_str_list']=str(split_orders) 2139 amp_so = self.get_split_orders_lines( 2140 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2141 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2142 replace_dict['ampsplitorders']='\n'.join(amp_so) 2143 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2144 jamp_lines = self.get_JAMP_lines_split_order(\ 2145 matrix_element,amp_orders,split_order_names=split_orders) 2146 2147 # Now setup the array specifying what squared split order is chosen 2148 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2149 matrix_element.get('processes')[0],squared_orders) 2150 2151 # For convenience we also write the driver check_sa_splitOrders.f 2152 # that explicitely writes out the contribution from each squared order. 2153 # The original driver still works and is compiled with 'make' while 2154 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2155 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2156 self.write_check_sa_splitOrders(squared_orders,split_orders, 2157 nexternal,ninitial,proc_prefix,check_sa_writer) 2158 2159 if write: 2160 writers.FortranWriter('nsqso_born.inc').writelines( 2161 """INTEGER NSQSO_BORN 2162 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2163 2164 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2165 2166 matrix_template = self.matrix_template 2167 if self.opt['export_format']=='standalone_msP' : 2168 matrix_template = 'matrix_standalone_msP_v4.inc' 2169 elif self.opt['export_format']=='standalone_msF': 2170 matrix_template = 'matrix_standalone_msF_v4.inc' 2171 elif self.opt['export_format']=='matchbox': 2172 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2173 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2174 2175 if len(split_orders)>0: 2176 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2177 logger.debug("Warning: The export format %s is not "+\ 2178 " available for individual ME evaluation of given coupl. orders."+\ 2179 " Only the total ME will be computed.", self.opt['export_format']) 2180 elif self.opt['export_format'] in ['madloop_matchbox']: 2181 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2182 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2183 else: 2184 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2185 2186 if write: 2187 path = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2188 content = open(path).read() 2189 content = content % replace_dict 2190 # Write the file 2191 writer.writelines(content) 2192 # Add the helper functions. 2193 if len(split_orders)>0: 2194 content = '\n' + open(pjoin(_file_path, \ 2195 'iolibs/template_files/split_orders_helping_functions.inc'))\ 2196 .read()%replace_dict 2197 writer.writelines(content) 2198 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2199 else: 2200 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2201 return replace_dict # for subclass update
2202
2203 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2204 nincoming, proc_prefix, writer):
2205 """ Write out a more advanced version of the check_sa drivers that 2206 individually returns the matrix element for each contributing squared 2207 order.""" 2208 2209 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2210 'template_files', 'check_sa_splitOrders.f')).read() 2211 printout_sq_orders=[] 2212 for i, squared_order in enumerate(squared_orders): 2213 sq_orders=[] 2214 for j, sqo in enumerate(squared_order): 2215 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2216 printout_sq_orders.append(\ 2217 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2218 %(i+1,' '.join(sq_orders),i+1)) 2219 printout_sq_orders='\n'.join(printout_sq_orders) 2220 writer.writelines(check_sa_content%{\ 2221 'printout_sqorders':printout_sq_orders, 2222 'nSplitOrders':len(squared_orders), 2223 'nexternal':nexternal, 2224 'nincoming':nincoming, 2225 'proc_prefix':proc_prefix})
2226
2227 2228 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2229 """class to take care of exporting a set of matrix element for the Matchbox 2230 code in the case of Born only routine""" 2231 2232 default_opt = {'clean': False, 'complex_mass':False, 2233 'export_format':'matchbox', 'mp': False, 2234 'sa_symmetry': True} 2235 2236 #specific template of the born 2237 2238 2239 matrix_template = "matrix_standalone_matchbox.inc" 2240 2241 @staticmethod
2242 - def get_color_string_lines(matrix_element):
2243 """Return the color matrix definition lines for this matrix element. Split 2244 rows in chunks of size n.""" 2245 2246 if not matrix_element.get('color_matrix'): 2247 return "\n".join(["out = 1"]) 2248 2249 #start the real work 2250 color_denominators = matrix_element.get('color_matrix').\ 2251 get_line_denominators() 2252 matrix_strings = [] 2253 my_cs = color.ColorString() 2254 for i_color in xrange(len(color_denominators)): 2255 # Then write the numerators for the matrix elements 2256 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2257 t_str=repr(my_cs) 2258 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2259 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2260 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2261 all_matches = t_match.findall(t_str) 2262 output = {} 2263 arg=[] 2264 for match in all_matches: 2265 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2266 if ctype in ['ColorOne' ]: 2267 continue 2268 if ctype not in ['T', 'Tr' ]: 2269 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2270 tmparg += ['0'] 2271 arg +=tmparg 2272 for j, v in enumerate(arg): 2273 output[(i_color,j)] = v 2274 2275 for key in output: 2276 if matrix_strings == []: 2277 #first entry 2278 matrix_strings.append(""" 2279 if (in1.eq.%s.and.in2.eq.%s)then 2280 out = %s 2281 """ % (key[0], key[1], output[key])) 2282 else: 2283 #not first entry 2284 matrix_strings.append(""" 2285 elseif (in1.eq.%s.and.in2.eq.%s)then 2286 out = %s 2287 """ % (key[0], key[1], output[key])) 2288 if len(matrix_strings): 2289 matrix_strings.append(" else \n out = - 1 \n endif") 2290 else: 2291 return "\n out = - 1 \n " 2292 return "\n".join(matrix_strings)
2293
2294 - def make(self,*args,**opts):
2295 pass
2296
2297 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2298 JAMP_formatLC=None):
2299 2300 """Adding leading color part of the colorflow""" 2301 2302 if not JAMP_formatLC: 2303 JAMP_formatLC= "LN%s" % JAMP_format 2304 2305 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2306 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2307 col_amps=col_amps.get_color_amplitudes() 2308 elif(isinstance(col_amps,list)): 2309 if(col_amps and isinstance(col_amps[0],list)): 2310 col_amps=col_amps 2311 else: 2312 raise MadGraph5Error, error_msg % 'col_amps' 2313 else: 2314 raise MadGraph5Error, error_msg % 'col_amps' 2315 2316 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2317 JAMP_format=JAMP_format, 2318 AMP_format=AMP_format, 2319 split=-1) 2320 2321 2322 # Filter the col_ampls to generate only those without any 1/NC terms 2323 2324 LC_col_amps = [] 2325 for coeff_list in col_amps: 2326 to_add = [] 2327 for (coefficient, amp_number) in coeff_list: 2328 if coefficient[3]==0: 2329 to_add.append( (coefficient, amp_number) ) 2330 LC_col_amps.append(to_add) 2331 2332 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2333 JAMP_format=JAMP_formatLC, 2334 AMP_format=AMP_format, 2335 split=-1) 2336 2337 return text
2338
2339 2340 2341 2342 #=============================================================================== 2343 # ProcessExporterFortranMW 2344 #=============================================================================== 2345 -class ProcessExporterFortranMW(ProcessExporterFortran):
2346 """Class to take care of exporting a set of matrix elements to 2347 MadGraph v4 - MadWeight format.""" 2348 2349 matrix_file="matrix_standalone_v4.inc" 2350
2351 - def copy_v4template(self, modelname):
2352 """Additional actions needed for setup of Template 2353 """ 2354 2355 super(ProcessExporterFortranMW, self).copy_v4template(modelname) 2356 2357 # Add the MW specific file 2358 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2359 pjoin(self.dir_path, 'Source','MadWeight'), True) 2360 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2361 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2362 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2363 pjoin(self.dir_path, 'Source','setrun.f')) 2364 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2365 pjoin(self.dir_path, 'Source','run.inc')) 2366 # File created from Template (Different in some child class) 2367 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2368 self.write_run_config_file(writers.FortranWriter(filename)) 2369 2370 try: 2371 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2372 stdout = os.open(os.devnull, os.O_RDWR), 2373 stderr = os.open(os.devnull, os.O_RDWR), 2374 cwd=self.dir_path) 2375 except OSError: 2376 # Probably madweight already called 2377 pass 2378 2379 # Copy the different python file in the Template 2380 self.copy_python_file() 2381 # create the appropriate cuts.f 2382 self.get_mw_cuts_version() 2383 2384 # add the makefile in Source directory 2385 filename = os.path.join(self.dir_path,'Source','makefile') 2386 self.write_source_makefile(writers.FortranWriter(filename))
2387 2388 2389 2390 2391 #=========================================================================== 2392 # convert_model_to_mg4 2393 #===========================================================================
2394 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 2395 wanted_couplings = []):
2396 2397 super(ProcessExporterFortranMW,self).convert_model_to_mg4(model, 2398 wanted_lorentz, wanted_couplings) 2399 2400 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2401 try: 2402 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2403 except OSError as error: 2404 pass 2405 model_path = model.get('modelpath') 2406 # This is not safe if there is a '##' or '-' in the path. 2407 shutil.copytree(model_path, 2408 pjoin(self.dir_path,'bin','internal','ufomodel'), 2409 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2410 if hasattr(model, 'restrict_card'): 2411 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2412 'restrict_default.dat') 2413 if isinstance(model.restrict_card, check_param_card.ParamCard): 2414 model.restrict_card.write(out_path) 2415 else: 2416 files.cp(model.restrict_card, out_path)
2417 2418 #=========================================================================== 2419 # generate_subprocess_directory_v4 2420 #===========================================================================
2421 - def copy_python_file(self):
2422 """copy the python file require for the Template""" 2423 2424 # madevent interface 2425 cp(_file_path+'/interface/madweight_interface.py', 2426 self.dir_path+'/bin/internal/madweight_interface.py') 2427 cp(_file_path+'/interface/extended_cmd.py', 2428 self.dir_path+'/bin/internal/extended_cmd.py') 2429 cp(_file_path+'/interface/common_run_interface.py', 2430 self.dir_path+'/bin/internal/common_run_interface.py') 2431 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2432 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2433 #cp(_file_path+'/iolibs/save_load_object.py', 2434 # self.dir_path+'/bin/internal/save_load_object.py') 2435 cp(_file_path+'/iolibs/file_writers.py', 2436 self.dir_path+'/bin/internal/file_writers.py') 2437 #model file 2438 cp(_file_path+'../models/check_param_card.py', 2439 self.dir_path+'/bin/internal/check_param_card.py') 2440 2441 #madevent file 2442 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2443 cp(_file_path+'/various/lhe_parser.py', 2444 self.dir_path+'/bin/internal/lhe_parser.py') 2445 2446 cp(_file_path+'/various/banner.py', 2447 self.dir_path+'/bin/internal/banner.py') 2448 cp(_file_path+'/various/shower_card.py', 2449 self.dir_path+'/bin/internal/shower_card.py') 2450 cp(_file_path+'/various/cluster.py', 2451 self.dir_path+'/bin/internal/cluster.py') 2452 2453 # logging configuration 2454 cp(_file_path+'/interface/.mg5_logging.conf', 2455 self.dir_path+'/bin/internal/me5_logging.conf') 2456 cp(_file_path+'/interface/coloring_logging.py', 2457 self.dir_path+'/bin/internal/coloring_logging.py')
2458 2459 2460 #=========================================================================== 2461 # Change the version of cuts.f to the one compatible with MW 2462 #===========================================================================
2463 - def get_mw_cuts_version(self, outpath=None):
2464 """create the appropriate cuts.f 2465 This is based on the one associated to ME output but: 2466 1) No clustering (=> remove initcluster/setclscales) 2467 2) Adding the definition of cut_bw at the file. 2468 """ 2469 2470 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2471 2472 text = StringIO() 2473 #1) remove all dependencies in ickkw >1: 2474 nb_if = 0 2475 for line in template: 2476 if 'if(xqcut.gt.0d0' in line: 2477 nb_if = 1 2478 if nb_if == 0: 2479 text.write(line) 2480 continue 2481 if re.search(r'if\(.*\)\s*then', line): 2482 nb_if += 1 2483 elif 'endif' in line: 2484 nb_if -= 1 2485 2486 #2) add fake cut_bw (have to put the true one later) 2487 text.write(""" 2488 logical function cut_bw(p) 2489 include 'madweight_param.inc' 2490 double precision p(*) 2491 if (bw_cut) then 2492 cut_bw = .true. 2493 else 2494 stop 1 2495 endif 2496 return 2497 end 2498 """) 2499 2500 final = text.getvalue() 2501 #3) remove the call to initcluster: 2502 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2503 template = template.replace('genps.inc', 'maxparticles.inc') 2504 #Now we can write it 2505 if not outpath: 2506 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2507 elif isinstance(outpath, str): 2508 fsock = open(outpath, 'w') 2509 else: 2510 fsock = outpath 2511 fsock.write(template)
2512 2513 2514 2515 #=========================================================================== 2516 # Make the Helas and Model directories for Standalone directory 2517 #===========================================================================
2518 - def make(self):
2519 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2520 everything for running madweight 2521 """ 2522 2523 source_dir = os.path.join(self.dir_path, "Source") 2524 logger.info("Running make for Helas") 2525 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2526 logger.info("Running make for Model") 2527 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2528 logger.info("Running make for PDF") 2529 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2530 logger.info("Running make for CERNLIB") 2531 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2532 logger.info("Running make for GENERIC") 2533 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2534 logger.info("Running make for blocks") 2535 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2536 logger.info("Running make for tools") 2537 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2538 2539 #=========================================================================== 2540 # Create proc_card_mg5.dat for MadWeight directory 2541 #===========================================================================
2542 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 2543 online = False, compiler=default_compiler):
2544 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2545 2546 #proc_charac 2547 self.create_proc_charac() 2548 2549 # Write maxparticles.inc based on max of ME's/subprocess groups 2550 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2551 self.write_maxparticles_file(writers.FortranWriter(filename), 2552 matrix_elements) 2553 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2554 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2555 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2556 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2557 2558 self.set_compiler(compiler) 2559 self.make() 2560 2561 # Write command history as proc_card_mg5 2562 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2563 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2564 history.write(output_file) 2565 2566 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler)
2567 2568 2569 #=========================================================================== 2570 # create the run_card for MW 2571 #===========================================================================
2572 - def create_run_card(self, matrix_elements, history):
2573 """ """ 2574 2575 run_card = banner_mod.RunCard() 2576 2577 # pass to default for MW 2578 run_card["run_tag"] = "\'not_use\'" 2579 run_card["fixed_ren_scale"] = "T" 2580 run_card["fixed_fac_scale"] = "T" 2581 run_card.remove_all_cut() 2582 2583 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2584 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2585 python_template=True) 2586 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2587 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2588 python_template=True)
2589 2590 #=========================================================================== 2591 # export model files 2592 #===========================================================================
2593 - def export_model_files(self, model_path):
2594 """export the model dependent files for V4 model""" 2595 2596 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2597 # Add the routine update_as_param in v4 model 2598 # This is a function created in the UFO 2599 text=""" 2600 subroutine update_as_param() 2601 call setpara('param_card.dat',.false.) 2602 return 2603 end 2604 """ 2605 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2606 ff.write(text) 2607 ff.close() 2608 2609 # Modify setrun.f 2610 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2611 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2612 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2613 fsock.write(text) 2614 fsock.close() 2615 2616 # Modify initialization.f 2617 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2618 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2619 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2620 fsock.write(text) 2621 fsock.close() 2622 2623 2624 self.make_model_symbolic_link()
2625 2626 #=========================================================================== 2627 # generate_subprocess_directory_v4 2628 #===========================================================================
2629 - def generate_subprocess_directory_v4(self, matrix_element, 2630 fortran_model,number):
2631 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2632 including the necessary matrix.f and nexternal.inc files""" 2633 2634 cwd = os.getcwd() 2635 2636 # Create the directory PN_xx_xxxxx in the specified path 2637 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2638 "P%s" % matrix_element.get('processes')[0].shell_string()) 2639 2640 try: 2641 os.mkdir(dirpath) 2642 except os.error as error: 2643 logger.warning(error.strerror + " " + dirpath) 2644 2645 #try: 2646 # os.chdir(dirpath) 2647 #except os.error: 2648 # logger.error('Could not cd to directory %s' % dirpath) 2649 # return 0 2650 2651 logger.info('Creating files in directory %s' % dirpath) 2652 2653 # Extract number of external particles 2654 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2655 2656 # Create the matrix.f file and the nexternal.inc file 2657 filename = pjoin(dirpath,'matrix.f') 2658 calls,ncolor = self.write_matrix_element_v4( 2659 writers.FortranWriter(filename), 2660 matrix_element, 2661 fortran_model) 2662 2663 filename = pjoin(dirpath, 'auto_dsig.f') 2664 self.write_auto_dsig_file(writers.FortranWriter(filename), 2665 matrix_element) 2666 2667 filename = pjoin(dirpath, 'configs.inc') 2668 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2669 writers.FortranWriter(filename), 2670 matrix_element) 2671 2672 filename = pjoin(dirpath, 'nexternal.inc') 2673 self.write_nexternal_file(writers.FortranWriter(filename), 2674 nexternal, ninitial) 2675 2676 filename = pjoin(dirpath, 'leshouche.inc') 2677 self.write_leshouche_file(writers.FortranWriter(filename), 2678 matrix_element) 2679 2680 filename = pjoin(dirpath, 'props.inc') 2681 self.write_props_file(writers.FortranWriter(filename), 2682 matrix_element, 2683 s_and_t_channels) 2684 2685 filename = pjoin(dirpath, 'pmass.inc') 2686 self.write_pmass_file(writers.FortranWriter(filename), 2687 matrix_element) 2688 2689 filename = pjoin(dirpath, 'ngraphs.inc') 2690 self.write_ngraphs_file(writers.FortranWriter(filename), 2691 len(matrix_element.get_all_amplitudes())) 2692 2693 filename = pjoin(dirpath, 'maxamps.inc') 2694 self.write_maxamps_file(writers.FortranWriter(filename), 2695 len(matrix_element.get('diagrams')), 2696 ncolor, 2697 len(matrix_element.get('processes')), 2698 1) 2699 2700 filename = pjoin(dirpath, 'phasespace.inc') 2701 self.write_phasespace_file(writers.FortranWriter(filename), 2702 len(matrix_element.get('diagrams')), 2703 ) 2704 2705 # Generate diagrams 2706 filename = pjoin(dirpath, "matrix.ps") 2707 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2708 get('diagrams'), 2709 filename, 2710 model=matrix_element.get('processes')[0].\ 2711 get('model'), 2712 amplitude='') 2713 logger.info("Generating Feynman diagrams for " + \ 2714 matrix_element.get('processes')[0].nice_string()) 2715 plot.draw() 2716 2717 #import genps.inc and maxconfigs.inc into Subprocesses 2718 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2719 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2720 2721 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2722 2723 for file in linkfiles: 2724 ln('../%s' % file, starting_dir=cwd) 2725 2726 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2727 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2728 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2729 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2730 # Return to original PWD 2731 #os.chdir(cwd) 2732 2733 if not calls: 2734 calls = 0 2735 return calls
2736 2737 #=========================================================================== 2738 # write_matrix_element_v4 2739 #===========================================================================
2740 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2741 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2742 2743 if not matrix_element.get('processes') or \ 2744 not matrix_element.get('diagrams'): 2745 return 0 2746 2747 if not isinstance(writer, writers.FortranWriter): 2748 raise writers.FortranWriter.FortranWriterError(\ 2749 "writer not FortranWriter") 2750 2751 # Set lowercase/uppercase Fortran code 2752 writers.FortranWriter.downcase = False 2753 2754 replace_dict = {} 2755 2756 # Extract version number and date from VERSION file 2757 info_lines = self.get_mg5_info_lines() 2758 replace_dict['info_lines'] = info_lines 2759 2760 # Extract process info lines 2761 process_lines = self.get_process_info_lines(matrix_element) 2762 replace_dict['process_lines'] = process_lines 2763 2764 # Set proc_id 2765 replace_dict['proc_id'] = proc_id 2766 2767 # Extract number of external particles 2768 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2769 replace_dict['nexternal'] = nexternal 2770 2771 # Extract ncomb 2772 ncomb = matrix_element.get_helicity_combinations() 2773 replace_dict['ncomb'] = ncomb 2774 2775 # Extract helicity lines 2776 helicity_lines = self.get_helicity_lines(matrix_element) 2777 replace_dict['helicity_lines'] = helicity_lines 2778 2779 # Extract overall denominator 2780 # Averaging initial state color, spin, and identical FS particles 2781 den_factor_line = self.get_den_factor_line(matrix_element) 2782 replace_dict['den_factor_line'] = den_factor_line 2783 2784 # Extract ngraphs 2785 ngraphs = matrix_element.get_number_of_amplitudes() 2786 replace_dict['ngraphs'] = ngraphs 2787 2788 # Extract nwavefuncs 2789 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2790 replace_dict['nwavefuncs'] = nwavefuncs 2791 2792 # Extract ncolor 2793 ncolor = max(1, len(matrix_element.get('color_basis'))) 2794 replace_dict['ncolor'] = ncolor 2795 2796 # Extract color data lines 2797 color_data_lines = self.get_color_data_lines(matrix_element) 2798 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2799 2800 # Extract helas calls 2801 helas_calls = fortran_model.get_matrix_element_calls(\ 2802 matrix_element) 2803 2804 replace_dict['helas_calls'] = "\n".join(helas_calls) 2805 2806 # Extract JAMP lines 2807 jamp_lines = self.get_JAMP_lines(matrix_element) 2808 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2809 2810 file = open(os.path.join(_file_path, \ 2811 'iolibs/template_files/%s' % self.matrix_file)).read() 2812 file = file % replace_dict 2813 2814 2815 # Write the file 2816 writer.writelines(file) 2817 2818 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor
2819 2820 #=========================================================================== 2821 # write_source_makefile 2822 #===========================================================================
2823 - def write_source_makefile(self, writer):
2824 """Write the nexternal.inc file for madweight""" 2825 2826 2827 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2828 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2829 text = open(path).read() % {'libraries': set_of_lib} 2830 writer.write(text) 2831 2832 return True
2833
2834 - def write_phasespace_file(self, writer, nb_diag):
2835 """ """ 2836 2837 template = """ include 'maxparticles.inc' 2838 integer max_branches 2839 parameter (max_branches=max_particles-1) 2840 integer max_configs 2841 parameter (max_configs=%(nb_diag)s) 2842 2843 c channel position 2844 integer config_pos,perm_pos 2845 common /to_config/config_pos,perm_pos 2846 2847 """ 2848 2849 writer.write(template % {'nb_diag': nb_diag})
2850 2851 2852 #=========================================================================== 2853 # write_auto_dsig_file 2854 #===========================================================================
2855 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2856 """Write the auto_dsig.f file for the differential cross section 2857 calculation, includes pdf call information (MadWeight format)""" 2858 2859 if not matrix_element.get('processes') or \ 2860 not matrix_element.get('diagrams'): 2861 return 0 2862 2863 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2864 2865 if ninitial < 1 or ninitial > 2: 2866 raise writers.FortranWriter.FortranWriterError, \ 2867 """Need ninitial = 1 or 2 to write auto_dsig file""" 2868 2869 replace_dict = {} 2870 2871 # Extract version number and date from VERSION file 2872 info_lines = self.get_mg5_info_lines() 2873 replace_dict['info_lines'] = info_lines 2874 2875 # Extract process info lines 2876 process_lines = self.get_process_info_lines(matrix_element) 2877 replace_dict['process_lines'] = process_lines 2878 2879 # Set proc_id 2880 replace_dict['proc_id'] = proc_id 2881 replace_dict['numproc'] = 1 2882 2883 # Set dsig_line 2884 if ninitial == 1: 2885 # No conversion, since result of decay should be given in GeV 2886 dsig_line = "pd(0)*dsiguu" 2887 else: 2888 # Convert result (in GeV) to pb 2889 dsig_line = "pd(0)*conv*dsiguu" 2890 2891 replace_dict['dsig_line'] = dsig_line 2892 2893 # Extract pdf lines 2894 pdf_vars, pdf_data, pdf_lines = \ 2895 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 2896 replace_dict['pdf_vars'] = pdf_vars 2897 replace_dict['pdf_data'] = pdf_data 2898 replace_dict['pdf_lines'] = pdf_lines 2899 2900 # Lines that differ between subprocess group and regular 2901 if proc_id: 2902 replace_dict['numproc'] = int(proc_id) 2903 replace_dict['passcuts_begin'] = "" 2904 replace_dict['passcuts_end'] = "" 2905 # Set lines for subprocess group version 2906 # Set define_iconfigs_lines 2907 replace_dict['define_subdiag_lines'] = \ 2908 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 2909 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 2910 else: 2911 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 2912 replace_dict['passcuts_end'] = "ENDIF" 2913 replace_dict['define_subdiag_lines'] = "" 2914 2915 file = open(os.path.join(_file_path, \ 2916 'iolibs/template_files/auto_dsig_mw.inc')).read() 2917 2918 file = file % replace_dict 2919 2920 2921 # Write the file 2922 writer.writelines(file)
2923 2924 #=========================================================================== 2925 # write_configs_file 2926 #===========================================================================
2927 - def write_configs_file(self, writer, matrix_element):
2928 """Write the configs.inc file for MadEvent""" 2929 2930 # Extract number of external particles 2931 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2932 2933 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 2934 mapconfigs = [c[0] for c in configs] 2935 model = matrix_element.get('processes')[0].get('model') 2936 return mapconfigs, self.write_configs_file_from_diagrams(writer, 2937 [[c[1]] for c in configs], 2938 mapconfigs, 2939 nexternal, ninitial,matrix_element, model)
2940 2941 #=========================================================================== 2942 # write_run_configs_file 2943 #===========================================================================
2944 - def write_run_config_file(self, writer):
2945 """Write the run_configs.inc file for MadWeight""" 2946 2947 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 2948 text = open(path).read() % {'chanperjob':'5'} 2949 writer.write(text) 2950 return True
2951 2952 #=========================================================================== 2953 # write_configs_file_from_diagrams 2954 #===========================================================================
2955 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 2956 nexternal, ninitial, matrix_element, model):
2957 """Write the actual configs.inc file. 2958 2959 configs is the diagrams corresponding to configs (each 2960 diagrams is a list of corresponding diagrams for all 2961 subprocesses, with None if there is no corresponding diagrams 2962 for a given process). 2963 mapconfigs gives the diagram number for each config. 2964 2965 For s-channels, we need to output one PDG for each subprocess in 2966 the subprocess group, in order to be able to pick the right 2967 one for multiprocesses.""" 2968 2969 lines = [] 2970 2971 particle_dict = matrix_element.get('processes')[0].get('model').\ 2972 get('particle_dict') 2973 2974 s_and_t_channels = [] 2975 2976 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 2977 for config in configs if [d for d in config if d][0].\ 2978 get_vertex_leg_numbers()!=[]] 2979 2980 minvert = min(vert_list) if vert_list!=[] else 0 2981 # Number of subprocesses 2982 nsubprocs = len(configs[0]) 2983 2984 nconfigs = 0 2985 2986 new_pdg = model.get_first_non_pdg() 2987 2988 for iconfig, helas_diags in enumerate(configs): 2989 if any([vert > minvert for vert in 2990 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 2991 # Only 3-vertices allowed in configs.inc 2992 continue 2993 nconfigs += 1 2994 2995 # Need s- and t-channels for all subprocesses, including 2996 # those that don't contribute to this config 2997 empty_verts = [] 2998 stchannels = [] 2999 for h in helas_diags: 3000 if h: 3001 # get_s_and_t_channels gives vertices starting from 3002 # final state external particles and working inwards 3003 stchannels.append(h.get('amplitudes')[0].\ 3004 get_s_and_t_channels(ninitial,model,new_pdg)) 3005 else: 3006 stchannels.append((empty_verts, None)) 3007 3008 # For t-channels, just need the first non-empty one 3009 tchannels = [t for s,t in stchannels if t != None][0] 3010 3011 # For s_and_t_channels (to be used later) use only first config 3012 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3013 tchannels]) 3014 3015 # Make sure empty_verts is same length as real vertices 3016 if any([s for s,t in stchannels]): 3017 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3018 3019 # Reorganize s-channel vertices to get a list of all 3020 # subprocesses for each vertex 3021 schannels = zip(*[s for s,t in stchannels]) 3022 else: 3023 schannels = [] 3024 3025 allchannels = schannels 3026 if len(tchannels) > 1: 3027 # Write out tchannels only if there are any non-trivial ones 3028 allchannels = schannels + tchannels 3029 3030 # Write out propagators for s-channel and t-channel vertices 3031 3032 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3033 # Correspondance between the config and the diagram = amp2 3034 lines.append("* %d %d " % (nconfigs, 3035 mapconfigs[iconfig])) 3036 3037 for verts in allchannels: 3038 if verts in schannels: 3039 vert = [v for v in verts if v][0] 3040 else: 3041 vert = verts 3042 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3043 last_leg = vert.get('legs')[-1] 3044 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3045 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3046 # (last_leg.get('number'), nconfigs, len(daughters), 3047 # ",".join([str(d) for d in daughters]))) 3048 3049 if last_leg.get('id') == 21 and 21 not in particle_dict: 3050 # Fake propagator used in multiparticle vertices 3051 mass = 'zero' 3052 width = 'zero' 3053 pow_part = 0 3054 else: 3055 if (last_leg.get('id')!=7): 3056 particle = particle_dict[last_leg.get('id')] 3057 # Get mass 3058 mass = particle.get('mass') 3059 # Get width 3060 width = particle.get('width') 3061 else : # fake propagator used in multiparticle vertices 3062 mass= 'zero' 3063 width= 'zero' 3064 3065 line=line+" "+mass+" "+width+" " 3066 3067 if verts in schannels: 3068 pdgs = [] 3069 for v in verts: 3070 if v: 3071 pdgs.append(v.get('legs')[-1].get('id')) 3072 else: 3073 pdgs.append(0) 3074 lines.append(line+" S "+str(last_leg.get('id'))) 3075 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3076 # (last_leg.get('number'), nconfigs, nsubprocs, 3077 # ",".join([str(d) for d in pdgs]))) 3078 # lines.append("data tprid(%d,%d)/0/" % \ 3079 # (last_leg.get('number'), nconfigs)) 3080 elif verts in tchannels[:-1]: 3081 lines.append(line+" T "+str(last_leg.get('id'))) 3082 # lines.append("data tprid(%d,%d)/%d/" % \ 3083 # (last_leg.get('number'), nconfigs, 3084 # abs(last_leg.get('id')))) 3085 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3086 # (last_leg.get('number'), nconfigs, nsubprocs, 3087 # ",".join(['0'] * nsubprocs))) 3088 3089 # Write out number of configs 3090 # lines.append("# Number of configs") 3091 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3092 lines.append(" * ") # a line with just a star indicates this is the end of file 3093 # Write the file 3094 writer.writelines(lines) 3095 3096 return s_and_t_channels
3097
3098 3099 #=============================================================================== 3100 # ProcessExporterFortranME 3101 #=============================================================================== 3102 -class ProcessExporterFortranME(ProcessExporterFortran):
3103 """Class to take care of exporting a set of matrix elements to 3104 MadEvent format.""" 3105 3106 matrix_file = "matrix_madevent_v4.inc" 3107
3108 - def copy_v4template(self, modelname):
3109 """Additional actions needed for setup of Template 3110 """ 3111 3112 super(ProcessExporterFortranME, self).copy_v4template(modelname) 3113 3114 # File created from Template (Different in some child class) 3115 filename = pjoin(self.dir_path,'Source','run_config.inc') 3116 self.write_run_config_file(writers.FortranWriter(filename)) 3117 3118 # The next file are model dependant (due to SLAH convention) 3119 self.model_name = modelname 3120 # Add the symmetry.f 3121 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3122 self.write_symmetry(writers.FortranWriter(filename)) 3123 # 3124 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3125 self.write_addmothers(writers.FortranWriter(filename)) 3126 # Copy the different python file in the Template 3127 self.copy_python_file()
3128 3129 3130 3131 3132 3133 #=========================================================================== 3134 # generate_subprocess_directory_v4 3135 #===========================================================================
3136 - def copy_python_file(self):
3137 """copy the python file require for the Template""" 3138 3139 # madevent interface 3140 cp(_file_path+'/interface/madevent_interface.py', 3141 self.dir_path+'/bin/internal/madevent_interface.py') 3142 cp(_file_path+'/interface/extended_cmd.py', 3143 self.dir_path+'/bin/internal/extended_cmd.py') 3144 cp(_file_path+'/interface/common_run_interface.py', 3145 self.dir_path+'/bin/internal/common_run_interface.py') 3146 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3147 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3148 cp(_file_path+'/iolibs/save_load_object.py', 3149 self.dir_path+'/bin/internal/save_load_object.py') 3150 cp(_file_path+'/iolibs/file_writers.py', 3151 self.dir_path+'/bin/internal/file_writers.py') 3152 #model file 3153 cp(_file_path+'../models/check_param_card.py', 3154 self.dir_path+'/bin/internal/check_param_card.py') 3155 3156 #copy all the file present in madevent directory 3157 for name in os.listdir(pjoin(_file_path, 'madevent')): 3158 if name not in ['__init__.py'] and name.endswith('.py'): 3159 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3160 3161 #madevent file 3162 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3163 cp(_file_path+'/various/lhe_parser.py', 3164 self.dir_path+'/bin/internal/lhe_parser.py') 3165 cp(_file_path+'/various/banner.py', 3166 self.dir_path+'/bin/internal/banner.py') 3167 cp(_file_path+'/various/cluster.py', 3168 self.dir_path+'/bin/internal/cluster.py') 3169 cp(_file_path+'/madevent/combine_runs.py', 3170 self.dir_path+'/bin/internal/combine_runs.py') 3171 # logging configuration 3172 cp(_file_path+'/interface/.mg5_logging.conf', 3173 self.dir_path+'/bin/internal/me5_logging.conf') 3174 cp(_file_path+'/interface/coloring_logging.py', 3175 self.dir_path+'/bin/internal/coloring_logging.py') 3176 # shower card and FO_analyse_card. 3177 # Although not needed, it is imported by banner.py 3178 cp(_file_path+'/various/shower_card.py', 3179 self.dir_path+'/bin/internal/shower_card.py') 3180 cp(_file_path+'/various/FO_analyse_card.py', 3181 self.dir_path+'/bin/internal/FO_analyse_card.py')
3182 3183
3184 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 3185 wanted_couplings = []):
3186 3187 super(ProcessExporterFortranME,self).convert_model_to_mg4(model, 3188 wanted_lorentz, wanted_couplings) 3189 3190 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3191 try: 3192 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3193 except OSError as error: 3194 pass 3195 model_path = model.get('modelpath') 3196 # This is not safe if there is a '##' or '-' in the path. 3197 shutil.copytree(model_path, 3198 pjoin(self.dir_path,'bin','internal','ufomodel'), 3199 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3200 if hasattr(model, 'restrict_card'): 3201 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3202 'restrict_default.dat') 3203 if isinstance(model.restrict_card, check_param_card.ParamCard): 3204 model.restrict_card.write(out_path) 3205 else: 3206 files.cp(model.restrict_card, out_path)
3207 3208 #=========================================================================== 3209 # export model files 3210 #===========================================================================
3211 - def export_model_files(self, model_path):
3212 """export the model dependent files""" 3213 3214 super(ProcessExporterFortranME,self).export_model_files(model_path) 3215 3216 # Add the routine update_as_param in v4 model 3217 # This is a function created in the UFO 3218 text=""" 3219 subroutine update_as_param() 3220 call setpara('param_card.dat',.false.) 3221 return 3222 end 3223 """ 3224 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3225 ff.write(text) 3226 ff.close() 3227 3228 # Add the symmetry.f 3229 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3230 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3231 3232 # Modify setrun.f 3233 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3234 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3235 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3236 fsock.write(text) 3237 fsock.close() 3238 3239 self.make_model_symbolic_link()
3240 3241 3242 #=========================================================================== 3243 # generate_subprocess_directory_v4 3244 #===========================================================================
3245 - def generate_subprocess_directory_v4(self, matrix_element, 3246 fortran_model, 3247 me_number):
3248 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3249 including the necessary matrix.f and various helper files""" 3250 3251 cwd = os.getcwd() 3252 path = pjoin(self.dir_path, 'SubProcesses') 3253 3254 3255 if not self.model: 3256 self.model = matrix_element.get('processes')[0].get('model') 3257 3258 3259 3260 #os.chdir(path) 3261 # Create the directory PN_xx_xxxxx in the specified path 3262 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3263 try: 3264 os.mkdir(pjoin(path,subprocdir)) 3265 except os.error as error: 3266 logger.warning(error.strerror + " " + subprocdir) 3267 3268 #try: 3269 # os.chdir(subprocdir) 3270 #except os.error: 3271 # logger.error('Could not cd to directory %s' % subprocdir) 3272 # return 0 3273 3274 logger.info('Creating files in directory %s' % subprocdir) 3275 Ppath = pjoin(path, subprocdir) 3276 3277 # Extract number of external particles 3278 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3279 3280 # Add the driver.f 3281 ncomb = matrix_element.get_helicity_combinations() 3282 filename = pjoin(Ppath,'driver.f') 3283 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3284 v5=self.opt['v5_model']) 3285 3286 # Create the matrix.f file, auto_dsig.f file and all inc files 3287 filename = pjoin(Ppath, 'matrix.f') 3288 calls, ncolor = \ 3289 self.write_matrix_element_v4(writers.FortranWriter(filename), 3290 matrix_element, fortran_model, subproc_number = me_number) 3291 3292 filename = pjoin(Ppath, 'auto_dsig.f') 3293 self.write_auto_dsig_file(writers.FortranWriter(filename), 3294 matrix_element) 3295 3296 filename = pjoin(Ppath, 'configs.inc') 3297 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3298 writers.FortranWriter(filename), 3299 matrix_element) 3300 3301 filename = pjoin(Ppath, 'config_nqcd.inc') 3302 self.write_config_nqcd_file(writers.FortranWriter(filename), 3303 nqcd_list) 3304 3305 filename = pjoin(Ppath, 'config_subproc_map.inc') 3306 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3307 s_and_t_channels) 3308 3309 filename = pjoin(Ppath, 'coloramps.inc') 3310 self.write_coloramps_file(writers.FortranWriter(filename), 3311 mapconfigs, 3312 matrix_element) 3313 3314 filename = pjoin(Ppath, 'get_color.f') 3315 self.write_colors_file(writers.FortranWriter(filename), 3316 matrix_element) 3317 3318 filename = pjoin(Ppath, 'decayBW.inc') 3319 self.write_decayBW_file(writers.FortranWriter(filename), 3320 s_and_t_channels) 3321 3322 filename = pjoin(Ppath, 'dname.mg') 3323 self.write_dname_file(writers.FileWriter(filename), 3324 "P"+matrix_element.get('processes')[0].shell_string()) 3325 3326 filename = pjoin(Ppath, 'iproc.dat') 3327 self.write_iproc_file(writers.FortranWriter(filename), 3328 me_number) 3329 3330 filename = pjoin(Ppath, 'leshouche.inc') 3331 self.write_leshouche_file(writers.FortranWriter(filename), 3332 matrix_element) 3333 3334 filename = pjoin(Ppath, 'maxamps.inc') 3335 self.write_maxamps_file(writers.FortranWriter(filename), 3336 len(matrix_element.get('diagrams')), 3337 ncolor, 3338 len(matrix_element.get('processes')), 3339 1) 3340 3341 filename = pjoin(Ppath, 'mg.sym') 3342 self.write_mg_sym_file(writers.FortranWriter(filename), 3343 matrix_element) 3344 3345 filename = pjoin(Ppath, 'ncombs.inc') 3346 self.write_ncombs_file(writers.FortranWriter(filename), 3347 nexternal) 3348 3349 filename = pjoin(Ppath, 'nexternal.inc') 3350 self.write_nexternal_file(writers.FortranWriter(filename), 3351 nexternal, ninitial) 3352 3353 filename = pjoin(Ppath, 'ngraphs.inc') 3354 self.write_ngraphs_file(writers.FortranWriter(filename), 3355 len(mapconfigs)) 3356 3357 3358 filename = pjoin(Ppath, 'pmass.inc') 3359 self.write_pmass_file(writers.FortranWriter(filename), 3360 matrix_element) 3361 3362 filename = pjoin(Ppath, 'props.inc') 3363 self.write_props_file(writers.FortranWriter(filename), 3364 matrix_element, 3365 s_and_t_channels) 3366 3367 # Find config symmetries and permutations 3368 symmetry, perms, ident_perms = \ 3369 diagram_symmetry.find_symmetry(matrix_element) 3370 3371 filename = pjoin(Ppath, 'symswap.inc') 3372 self.write_symswap_file(writers.FortranWriter(filename), 3373 ident_perms) 3374 3375 filename = pjoin(Ppath, 'symfact_orig.dat') 3376 self.write_symfact_file(open(filename, 'w'), symmetry) 3377 3378 # Generate diagrams 3379 filename = pjoin(Ppath, "matrix.ps") 3380 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3381 get('diagrams'), 3382 filename, 3383 model=matrix_element.get('processes')[0].\ 3384 get('model'), 3385 amplitude=True) 3386 logger.info("Generating Feynman diagrams for " + \ 3387 matrix_element.get('processes')[0].nice_string()) 3388 plot.draw() 3389 3390 self.link_files_in_SubProcess(Ppath) 3391 3392 #import nexternal/leshouche in Source 3393 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3394 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3395 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3396 # Return to SubProcesses dir 3397 #os.chdir(os.path.pardir) 3398 3399 # Add subprocess to subproc.mg 3400 filename = pjoin(path, 'subproc.mg') 3401 files.append_to_file(filename, 3402 self.write_subproc, 3403 subprocdir) 3404 3405 # Return to original dir 3406 #os.chdir(cwd) 3407 3408 # Generate info page 3409 gen_infohtml.make_info_html(self.dir_path) 3410 3411 3412 if not calls: 3413 calls = 0 3414 return calls
3415 3451
3452 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 3453 online = False, compiler=default_compiler):
3454 """Finalize ME v4 directory by creating jpeg diagrams, html 3455 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3456 3457 # indicate that the output type is not grouped 3458 if not isinstance(self, ProcessExporterFortranMEGroup): 3459 self.proc_characteristic['grouped_matrix'] = False 3460 3461 modelname = self.opt['model'] 3462 if modelname == 'mssm' or modelname.startswith('mssm-'): 3463 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3464 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3465 check_param_card.convert_to_mg5card(param_card, mg5_param) 3466 check_param_card.check_valid_param_card(mg5_param) 3467 3468 # Add the combine_events.f modify param_card path/number of @X 3469 filename = pjoin(self.dir_path,'Source','combine_events.f') 3470 try: 3471 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3472 except AttributeError: 3473 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3474 nb_proc = len(set(nb_proc)) 3475 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3476 # Write maxconfigs.inc based on max of ME's/subprocess groups 3477 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3478 self.write_maxconfigs_file(writers.FortranWriter(filename), 3479 matrix_elements) 3480 3481 # Write maxparticles.inc based on max of ME's/subprocess groups 3482 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3483 self.write_maxparticles_file(writers.FortranWriter(filename), 3484 matrix_elements) 3485 3486 # Touch "done" file 3487 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3488 3489 # Check for compiler 3490 self.set_compiler(compiler) 3491 3492 old_pos = os.getcwd() 3493 subpath = pjoin(self.dir_path, 'SubProcesses') 3494 3495 P_dir_list = [proc for proc in os.listdir(subpath) 3496 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3497 3498 devnull = os.open(os.devnull, os.O_RDWR) 3499 # Convert the poscript in jpg files (if authorize) 3500 if makejpg: 3501 try: 3502 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3503 except Exception, error: 3504 pass 3505 logger.info("Generate jpeg diagrams") 3506 for Pdir in P_dir_list: 3507 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3508 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3509 3510 logger.info("Generate web pages") 3511 # Create the WebPage using perl script 3512 3513 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3514 stdout = devnull,cwd=pjoin(self.dir_path)) 3515 3516 #os.chdir(os.path.pardir) 3517 3518 obj = gen_infohtml.make_info_html(self.dir_path) 3519 3520 if online: 3521 nb_channel = obj.rep_rule['nb_gen_diag'] 3522 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3523 #add the information to proc_charac 3524 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3525 3526 # Write command history as proc_card_mg5 3527 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3528 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3529 history.write(output_file) 3530 3531 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3532 stdout = devnull) 3533 3534 #crate the proc_characteristic file 3535 self.create_proc_charac(matrix_elements, history) 3536 3537 # create the run_card 3538 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, history, makejpg, online, compiler) 3539 3540 # Run "make" to generate madevent.tar.gz file 3541 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3542 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3543 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3544 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3545 stdout = devnull, cwd=self.dir_path) 3546 3547 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3548 stdout = devnull, cwd=self.dir_path)
3549 3550 3551 3552 3553 3554 3555 #return to the initial dir 3556 #os.chdir(old_pos) 3557 3558 #=========================================================================== 3559 # write_matrix_element_v4 3560 #===========================================================================
3561 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3562 proc_id = "", config_map = [], subproc_number = ""):
3563 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3564 3565 if not matrix_element.get('processes') or \ 3566 not matrix_element.get('diagrams'): 3567 return 0 3568 3569 if not isinstance(writer, writers.FortranWriter): 3570 raise writers.FortranWriter.FortranWriterError(\ 3571 "writer not FortranWriter") 3572 3573 3574 # Set lowercase/uppercase Fortran code 3575 writers.FortranWriter.downcase = False 3576 3577 # The proc prefix is not used for MadEvent output so it can safely be set 3578 # to an empty string. 3579 replace_dict = {'proc_prefix':''} 3580 3581 # Extract helas calls 3582 helas_calls = fortran_model.get_matrix_element_calls(\ 3583 matrix_element) 3584 3585 replace_dict['helas_calls'] = "\n".join(helas_calls) 3586 3587 3588 # Extract version number and date from VERSION file 3589 info_lines = self.get_mg5_info_lines() 3590 replace_dict['info_lines'] = info_lines 3591 3592 # Extract process info lines 3593 process_lines = self.get_process_info_lines(matrix_element) 3594 replace_dict['process_lines'] = process_lines 3595 3596 # Set proc_id 3597 replace_dict['proc_id'] = proc_id 3598 3599 # Extract ncomb 3600 ncomb = matrix_element.get_helicity_combinations() 3601 replace_dict['ncomb'] = ncomb 3602 3603 # Extract helicity lines 3604 helicity_lines = self.get_helicity_lines(matrix_element) 3605 replace_dict['helicity_lines'] = helicity_lines 3606 3607 # Extract IC line 3608 ic_line = self.get_ic_line(matrix_element) 3609 replace_dict['ic_line'] = ic_line 3610 3611 # Extract overall denominator 3612 # Averaging initial state color, spin, and identical FS particles 3613 den_factor_line = self.get_den_factor_line(matrix_element) 3614 replace_dict['den_factor_line'] = den_factor_line 3615 3616 # Extract ngraphs 3617 ngraphs = matrix_element.get_number_of_amplitudes() 3618 replace_dict['ngraphs'] = ngraphs 3619 3620 # Extract ndiags 3621 ndiags = len(matrix_element.get('diagrams')) 3622 replace_dict['ndiags'] = ndiags 3623 3624 # Set define_iconfigs_lines 3625 replace_dict['define_iconfigs_lines'] = \ 3626 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3627 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3628 3629 if proc_id: 3630 # Set lines for subprocess group version 3631 # Set define_iconfigs_lines 3632 replace_dict['define_iconfigs_lines'] += \ 3633 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3634 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3635 # Set set_amp2_line 3636 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3637 proc_id 3638 else: 3639 # Standard running 3640 # Set set_amp2_line 3641 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3642 3643 # Extract nwavefuncs 3644 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3645 replace_dict['nwavefuncs'] = nwavefuncs 3646 3647 # Extract ncolor 3648 ncolor = max(1, len(matrix_element.get('color_basis'))) 3649 replace_dict['ncolor'] = ncolor 3650 3651 # Extract color data lines 3652 color_data_lines = self.get_color_data_lines(matrix_element) 3653 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3654 3655 3656 # Set the size of Wavefunction 3657 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3658 replace_dict['wavefunctionsize'] = 18 3659 else: 3660 replace_dict['wavefunctionsize'] = 6 3661 3662 # Extract amp2 lines 3663 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3664 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3665 3666 # The JAMP definition depends on the splitting order 3667 split_orders=matrix_element.get('processes')[0].get('split_orders') 3668 if len(split_orders)>0: 3669 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3670 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3671 matrix_element.get('processes')[0],squared_orders) 3672 else: 3673 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3674 # set all amplitude order to weight 1 and only one squared order 3675 # contribution which is of course ALL_ORDERS=2. 3676 squared_orders = [(2,),] 3677 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3678 replace_dict['chosen_so_configs'] = '.TRUE.' 3679 3680 replace_dict['nAmpSplitOrders']=len(amp_orders) 3681 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3682 replace_dict['split_order_str_list']=str(split_orders) 3683 replace_dict['nSplitOrders']=max(len(split_orders),1) 3684 amp_so = self.get_split_orders_lines( 3685 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3686 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3687 replace_dict['ampsplitorders']='\n'.join(amp_so) 3688 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3689 3690 3691 # Extract JAMP lines 3692 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3693 jamp_lines = self.get_JAMP_lines_split_order(\ 3694 matrix_element,amp_orders,split_order_names= 3695 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3696 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3697 3698 file = open(pjoin(_file_path, \ 3699 'iolibs/template_files/%s' % self.matrix_file)).read() 3700 3701 file = file % replace_dict 3702 3703 # Add the split orders helper functions. 3704 file = file + '\n' + open(pjoin(_file_path, \ 3705 'iolibs/template_files/split_orders_helping_functions.inc'))\ 3706 .read()%replace_dict 3707 # Write the file 3708 writer.writelines(file) 3709 3710 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
3711 3712 #=========================================================================== 3713 # write_auto_dsig_file 3714 #===========================================================================
3715 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3716 """Write the auto_dsig.f file for the differential cross section 3717 calculation, includes pdf call information""" 3718 3719 if not matrix_element.get('processes') or \ 3720 not matrix_element.get('diagrams'): 3721 return 0 3722 3723 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3724 self.proc_characteristic['ninitial'] = ninitial 3725 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3726 3727 if ninitial < 1 or ninitial > 2: 3728 raise writers.FortranWriter.FortranWriterError, \ 3729 """Need ninitial = 1 or 2 to write auto_dsig file""" 3730 3731 replace_dict = {} 3732 3733 # Extract version number and date from VERSION file 3734 info_lines = self.get_mg5_info_lines() 3735 replace_dict['info_lines'] = info_lines 3736 3737 # Extract process info lines 3738 process_lines = self.get_process_info_lines(matrix_element) 3739 replace_dict['process_lines'] = process_lines 3740 3741 # Set proc_id 3742 replace_dict['proc_id'] = proc_id 3743 replace_dict['numproc'] = 1 3744 3745 # Set dsig_line 3746 if ninitial == 1: 3747 # No conversion, since result of decay should be given in GeV 3748 dsig_line = "pd(0)*dsiguu" 3749 else: 3750 # Convert result (in GeV) to pb 3751 dsig_line = "pd(0)*conv*dsiguu" 3752 3753 replace_dict['dsig_line'] = dsig_line 3754 3755 # Extract pdf lines 3756 pdf_vars, pdf_data, pdf_lines = \ 3757 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3758 replace_dict['pdf_vars'] = pdf_vars 3759 replace_dict['pdf_data'] = pdf_data 3760 replace_dict['pdf_lines'] = pdf_lines 3761 3762 # Lines that differ between subprocess group and regular 3763 if proc_id: 3764 replace_dict['numproc'] = int(proc_id) 3765 replace_dict['passcuts_begin'] = "" 3766 replace_dict['passcuts_end'] = "" 3767 # Set lines for subprocess group version 3768 # Set define_iconfigs_lines 3769 replace_dict['define_subdiag_lines'] = \ 3770 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3771 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3772 replace_dict['cutsdone'] = "" 3773 else: 3774 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3775 replace_dict['passcuts_end'] = "ENDIF" 3776 replace_dict['define_subdiag_lines'] = "" 3777 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3778 3779 if not isinstance(self, ProcessExporterFortranMEGroup): 3780 ncomb=matrix_element.get_helicity_combinations() 3781 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3782 else: 3783 replace_dict['read_write_good_hel'] = "" 3784 3785 3786 3787 file = open(pjoin(_file_path, \ 3788 'iolibs/template_files/auto_dsig_v4.inc')).read() 3789 file = file % replace_dict 3790 3791 # Write the file 3792 writer.writelines(file, context={'read_write_good_hel':True})
3793 3794 #=========================================================================== 3795 # write_coloramps_file 3796 #===========================================================================
3797 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3798 """Write the coloramps.inc file for MadEvent""" 3799 3800 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3801 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3802 (max(len(matrix_element.get('color_basis').keys()), 1), 3803 len(mapconfigs))) 3804 3805 3806 # Write the file 3807 writer.writelines(lines) 3808 3809 return True
3810 3811 #=========================================================================== 3812 # write_colors_file 3813 #===========================================================================
3814 - def write_colors_file(self, writer, matrix_elements):
3815 """Write the get_color.f file for MadEvent, which returns color 3816 for all particles used in the matrix element.""" 3817 3818 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3819 matrix_elements = [matrix_elements] 3820 3821 model = matrix_elements[0].get('processes')[0].get('model') 3822 3823 # We need the both particle and antiparticle wf_ids, since the identity 3824 # depends on the direction of the wf. 3825 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3826 for wf in d.get('wavefunctions')],[]) \ 3827 for d in me.get('diagrams')], []) \ 3828 for me in matrix_elements], [])) 3829 3830 leg_ids = set(sum([sum([sum([[l.get('id'), 3831 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 3832 for l in p.get_legs_with_decays()], []) \ 3833 for p in me.get('processes')], []) \ 3834 for me in matrix_elements], [])) 3835 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3836 3837 lines = """function get_color(ipdg) 3838 implicit none 3839 integer get_color, ipdg 3840 3841 if(ipdg.eq.%d)then 3842 get_color=%d 3843 return 3844 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3845 3846 for part_id in particle_ids[1:]: 3847 lines += """else if(ipdg.eq.%d)then 3848 get_color=%d 3849 return 3850 """ % (part_id, model.get_particle(part_id).get_color()) 3851 # Dummy particle for multiparticle vertices with pdg given by 3852 # first code not in the model 3853 lines += """else if(ipdg.eq.%d)then 3854 c This is dummy particle used in multiparticle vertices 3855 get_color=2 3856 return 3857 """ % model.get_first_non_pdg() 3858 lines += """else 3859 write(*,*)'Error: No color given for pdg ',ipdg 3860 get_color=0 3861 return 3862 endif 3863 end 3864 """ 3865 3866 # Write the file 3867 writer.writelines(lines) 3868 3869 return True
3870 3871 #=========================================================================== 3872 # write_config_nqcd_file 3873 #===========================================================================
3874 - def write_config_nqcd_file(self, writer, nqcd_list):
3875 """Write the config_nqcd.inc with the number of QCD couplings 3876 for each config""" 3877 3878 lines = [] 3879 for iconf, n in enumerate(nqcd_list): 3880 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 3881 3882 # Write the file 3883 writer.writelines(lines) 3884 3885 return True
3886 3887 #=========================================================================== 3888 # write_maxconfigs_file 3889 #===========================================================================
3890 - def write_maxconfigs_file(self, writer, matrix_elements):
3891 """Write the maxconfigs.inc file for MadEvent""" 3892 3893 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 3894 maxconfigs = max([me.get_num_configs() for me in \ 3895 matrix_elements.get('matrix_elements')]) 3896 else: 3897 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 3898 3899 lines = "integer lmaxconfigs\n" 3900 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 3901 3902 # Write the file 3903 writer.writelines(lines) 3904 3905 return True
3906 3907 #=========================================================================== 3908 # read_write_good_hel 3909 #===========================================================================
3910 - def read_write_good_hel(self, ncomb):
3911 """return the code to read/write the good_hel common_block""" 3912 3913 convert = {'ncomb' : ncomb} 3914 output = """ 3915 subroutine write_good_hel(stream_id) 3916 implicit none 3917 integer stream_id 3918 INTEGER NCOMB 3919 PARAMETER ( NCOMB=%(ncomb)d) 3920 LOGICAL GOODHEL(NCOMB) 3921 INTEGER NTRY 3922 common/BLOCK_GOODHEL/NTRY,GOODHEL 3923 write(stream_id,*) GOODHEL 3924 return 3925 end 3926 3927 3928 subroutine read_good_hel(stream_id) 3929 implicit none 3930 include 'genps.inc' 3931 integer stream_id 3932 INTEGER NCOMB 3933 PARAMETER ( NCOMB=%(ncomb)d) 3934 LOGICAL GOODHEL(NCOMB) 3935 INTEGER NTRY 3936 common/BLOCK_GOODHEL/NTRY,GOODHEL 3937 read(stream_id,*) GOODHEL 3938 NTRY = MAXTRIES + 1 3939 return 3940 end 3941 3942 subroutine init_good_hel() 3943 implicit none 3944 INTEGER NCOMB 3945 PARAMETER ( NCOMB=%(ncomb)d) 3946 LOGICAL GOODHEL(NCOMB) 3947 INTEGER NTRY 3948 INTEGER I 3949 3950 do i=1,NCOMB 3951 GOODHEL(I) = .false. 3952 enddo 3953 NTRY = 0 3954 end 3955 3956 integer function get_maxsproc() 3957 implicit none 3958 get_maxsproc = 1 3959 return 3960 end 3961 3962 """ % convert 3963 3964 return output
3965 3966 #=========================================================================== 3967 # write_config_subproc_map_file 3968 #===========================================================================
3969 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3970 """Write a dummy config_subproc.inc file for MadEvent""" 3971 3972 lines = [] 3973 3974 for iconfig in range(len(s_and_t_channels)): 3975 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3976 (iconfig + 1)) 3977 3978 # Write the file 3979 writer.writelines(lines) 3980 3981 return True
3982 3983 #=========================================================================== 3984 # write_configs_file 3985 #===========================================================================
3986 - def write_configs_file(self, writer, matrix_element):
3987 """Write the configs.inc file for MadEvent""" 3988 3989 # Extract number of external particles 3990 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3991 3992 model = matrix_element.get('processes')[0].get('model') 3993 configs = [(i+1, d) for (i, d) in \ 3994 enumerate(matrix_element.get('diagrams'))] 3995 mapconfigs = [c[0] for c in configs] 3996 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3997 [[c[1]] for c in configs], 3998 mapconfigs, 3999 nexternal, ninitial, 4000 model)
4001 4002 #=========================================================================== 4003 # write_run_configs_file 4004 #===========================================================================
4005 - def write_run_config_file(self, writer):
4006 """Write the run_configs.inc file for MadEvent""" 4007 4008 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4009 4010 if self.proc_characteristic['loop_induced']: 4011 job_per_chan = 1 4012 else: 4013 job_per_chan = 5 4014 text = open(path).read() % {'chanperjob': job_per_chan} 4015 writer.write(text) 4016 return True
4017 4018 4019 #=========================================================================== 4020 # write_configs_file_from_diagrams 4021 #===========================================================================
4022 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4023 nexternal, ninitial, model):
4024 """Write the actual configs.inc file. 4025 4026 configs is the diagrams corresponding to configs (each 4027 diagrams is a list of corresponding diagrams for all 4028 subprocesses, with None if there is no corresponding diagrams 4029 for a given process). 4030 mapconfigs gives the diagram number for each config. 4031 4032 For s-channels, we need to output one PDG for each subprocess in 4033 the subprocess group, in order to be able to pick the right 4034 one for multiprocesses.""" 4035 4036 lines = [] 4037 4038 s_and_t_channels = [] 4039 4040 nqcd_list = [] 4041 4042 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4043 for config in configs if [d for d in config if d][0].\ 4044 get_vertex_leg_numbers()!=[]] 4045 minvert = min(vert_list) if vert_list!=[] else 0 4046 4047 # Number of subprocesses 4048 nsubprocs = len(configs[0]) 4049 4050 nconfigs = 0 4051 4052 new_pdg = model.get_first_non_pdg() 4053 4054 for iconfig, helas_diags in enumerate(configs): 4055 if any([vert > minvert for vert in 4056 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4057 # Only 3-vertices allowed in configs.inc 4058 continue 4059 nconfigs += 1 4060 4061 # Need s- and t-channels for all subprocesses, including 4062 # those that don't contribute to this config 4063 empty_verts = [] 4064 stchannels = [] 4065 for h in helas_diags: 4066 if h: 4067 # get_s_and_t_channels gives vertices starting from 4068 # final state external particles and working inwards 4069 stchannels.append(h.get('amplitudes')[0].\ 4070 get_s_and_t_channels(ninitial, model, 4071 new_pdg)) 4072 else: 4073 stchannels.append((empty_verts, None)) 4074 4075 # For t-channels, just need the first non-empty one 4076 tchannels = [t for s,t in stchannels if t != None][0] 4077 4078 # For s_and_t_channels (to be used later) use only first config 4079 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4080 tchannels]) 4081 4082 # Make sure empty_verts is same length as real vertices 4083 if any([s for s,t in stchannels]): 4084 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4085 4086 # Reorganize s-channel vertices to get a list of all 4087 # subprocesses for each vertex 4088 schannels = zip(*[s for s,t in stchannels]) 4089 else: 4090 schannels = [] 4091 4092 allchannels = schannels 4093 if len(tchannels) > 1: 4094 # Write out tchannels only if there are any non-trivial ones 4095 allchannels = schannels + tchannels 4096 4097 # Write out propagators for s-channel and t-channel vertices 4098 4099 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4100 # Correspondance between the config and the diagram = amp2 4101 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4102 mapconfigs[iconfig])) 4103 # Number of QCD couplings in this diagram 4104 nqcd = 0 4105 for h in helas_diags: 4106 if h: 4107 try: 4108 nqcd = h.calculate_orders()['QCD'] 4109 except KeyError: 4110 pass 4111 break 4112 else: 4113 continue 4114 4115 nqcd_list.append(nqcd) 4116 4117 for verts in allchannels: 4118 if verts in schannels: 4119 vert = [v for v in verts if v][0] 4120 else: 4121 vert = verts 4122 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4123 last_leg = vert.get('legs')[-1] 4124 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4125 (last_leg.get('number'), nconfigs, len(daughters), 4126 ",".join([str(d) for d in daughters]))) 4127 if verts in schannels: 4128 pdgs = [] 4129 for v in verts: 4130 if v: 4131 pdgs.append(v.get('legs')[-1].get('id')) 4132 else: 4133 pdgs.append(0) 4134 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4135 (last_leg.get('number'), nconfigs, nsubprocs, 4136 ",".join([str(d) for d in pdgs]))) 4137 lines.append("data tprid(%d,%d)/0/" % \ 4138 (last_leg.get('number'), nconfigs)) 4139 elif verts in tchannels[:-1]: 4140 lines.append("data tprid(%d,%d)/%d/" % \ 4141 (last_leg.get('number'), nconfigs, 4142 abs(last_leg.get('id')))) 4143 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4144 (last_leg.get('number'), nconfigs, nsubprocs, 4145 ",".join(['0'] * nsubprocs))) 4146 4147 # Write out number of configs 4148 lines.append("# Number of configs") 4149 lines.append("data mapconfig(0)/%d/" % nconfigs) 4150 4151 # Write the file 4152 writer.writelines(lines) 4153 4154 return s_and_t_channels, nqcd_list
4155 4156 #=========================================================================== 4157 # write_decayBW_file 4158 #===========================================================================
4159 - def write_decayBW_file(self, writer, s_and_t_channels):
4160 """Write the decayBW.inc file for MadEvent""" 4161 4162 lines = [] 4163 4164 booldict = {None: "0", True: "1", False: "2"} 4165 4166 for iconf, config in enumerate(s_and_t_channels): 4167 schannels = config[0] 4168 for vertex in schannels: 4169 # For the resulting leg, pick out whether it comes from 4170 # decay or not, as given by the onshell flag 4171 leg = vertex.get('legs')[-1] 4172 lines.append("data gForceBW(%d,%d)/%s/" % \ 4173 (leg.get('number'), iconf + 1, 4174 booldict[leg.get('onshell')])) 4175 4176 # Write the file 4177 writer.writelines(lines) 4178 4179 return True
4180 4181 #=========================================================================== 4182 # write_dname_file 4183 #===========================================================================
4184 - def write_dname_file(self, writer, dir_name):
4185 """Write the dname.mg file for MG4""" 4186 4187 line = "DIRNAME=%s" % dir_name 4188 4189 # Write the file 4190 writer.write(line + "\n") 4191 4192 return True
4193 4194 #=========================================================================== 4195 # write_driver 4196 #===========================================================================
4197 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4198 """Write the SubProcess/driver.f file for MG4""" 4199 4200 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4201 4202 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4203 card = 'Source/MODEL/MG5_param.dat' 4204 else: 4205 card = 'param_card.dat' 4206 # Requiring each helicity configuration to be probed by 10 points for 4207 # matrix element before using the resulting grid for MC over helicity 4208 # sampling. 4209 # We multiply this by 2 because each grouped subprocess is called at most 4210 # twice for each IMIRROR. 4211 replace_dict = {'param_card_name':card, 4212 'ncomb':ncomb, 4213 'hel_init_points':n_grouped_proc*10*2} 4214 if not v5: 4215 replace_dict['secondparam']=',.true.' 4216 else: 4217 replace_dict['secondparam']='' 4218 4219 text = open(path).read() % replace_dict 4220 4221 writer.write(text) 4222 4223 return True
4224 4225 #=========================================================================== 4226 # write_addmothers 4227 #===========================================================================
4228 - def write_addmothers(self, writer):
4229 """Write the SubProcess/addmothers.f""" 4230 4231 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4232 4233 text = open(path).read() % {'iconfig': 'diag_number'} 4234 writer.write(text) 4235 4236 return True
4237 4238 4239 #=========================================================================== 4240 # write_combine_events 4241 #===========================================================================
4242 - def write_combine_events(self, writer, nb_proc=100):
4243 """Write the SubProcess/driver.f file for MG4""" 4244 4245 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4246 4247 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4248 card = 'Source/MODEL/MG5_param.dat' 4249 else: 4250 card = 'param_card.dat' 4251 4252 #set maxpup (number of @X in the process card) 4253 4254 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4255 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4256 writer.write(text) 4257 4258 return True
4259 4260 4261 #=========================================================================== 4262 # write_symmetry 4263 #===========================================================================
4264 - def write_symmetry(self, writer, v5=True):
4265 """Write the SubProcess/driver.f file for ME""" 4266 4267 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4268 4269 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4270 card = 'Source/MODEL/MG5_param.dat' 4271 else: 4272 card = 'param_card.dat' 4273 text = open(path).read() 4274 4275 if v5: 4276 text = text % {'param_card_name':card, 'setparasecondarg':''} 4277 else: 4278 text = text % {'param_card_name':card, 'setparasecondarg':',.true.'} 4279 writer.write(text) 4280 4281 return True
4282 4283 4284 4285 4286 #=========================================================================== 4287 # write_iproc_file 4288 #===========================================================================
4289 - def write_iproc_file(self, writer, me_number):
4290 """Write the iproc.dat file for MG4""" 4291 line = "%d" % (me_number + 1) 4292 4293 # Write the file 4294 for line_to_write in writer.write_line(line): 4295 writer.write(line_to_write) 4296 return True
4297 4298 #=========================================================================== 4299 # write_mg_sym_file 4300 #===========================================================================
4301 - def write_mg_sym_file(self, writer, matrix_element):
4302 """Write the mg.sym file for MadEvent.""" 4303 4304 lines = [] 4305 4306 # Extract process with all decays included 4307 final_legs = filter(lambda leg: leg.get('state') == True, 4308 matrix_element.get('processes')[0].get_legs_with_decays()) 4309 4310 ninitial = len(filter(lambda leg: leg.get('state') == False, 4311 matrix_element.get('processes')[0].get('legs'))) 4312 4313 identical_indices = {} 4314 4315 # Extract identical particle info 4316 for i, leg in enumerate(final_legs): 4317 if leg.get('id') in identical_indices: 4318 identical_indices[leg.get('id')].append(\ 4319 i + ninitial + 1) 4320 else: 4321 identical_indices[leg.get('id')] = [i + ninitial + 1] 4322 4323 # Remove keys which have only one particle 4324 for key in identical_indices.keys(): 4325 if len(identical_indices[key]) < 2: 4326 del identical_indices[key] 4327 4328 # Write mg.sym file 4329 lines.append(str(len(identical_indices.keys()))) 4330 for key in identical_indices.keys(): 4331 lines.append(str(len(identical_indices[key]))) 4332 for number in identical_indices[key]: 4333 lines.append(str(number)) 4334 4335 # Write the file 4336 writer.writelines(lines) 4337 4338 return True
4339 4340 #=========================================================================== 4341 # write_mg_sym_file 4342 #===========================================================================
4343 - def write_default_mg_sym_file(self, writer):
4344 """Write the mg.sym file for MadEvent.""" 4345 4346 lines = "0" 4347 4348 # Write the file 4349 writer.writelines(lines) 4350 4351 return True
4352 4353 #=========================================================================== 4354 # write_ncombs_file 4355 #===========================================================================
4356 - def write_ncombs_file(self, writer, nexternal):
4357 """Write the ncombs.inc file for MadEvent.""" 4358 4359 # ncomb (used for clustering) is 2^nexternal 4360 file = " integer n_max_cl\n" 4361 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4362 4363 # Write the file 4364 writer.writelines(file) 4365 4366 return True
4367 4368 #=========================================================================== 4369 # write_processes_file 4370 #===========================================================================
4371 - def write_processes_file(self, writer, subproc_group):
4372 """Write the processes.dat file with info about the subprocesses 4373 in this group.""" 4374 4375 lines = [] 4376 4377 for ime, me in \ 4378 enumerate(subproc_group.get('matrix_elements')): 4379 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4380 ",".join(p.base_string() for p in \ 4381 me.get('processes')))) 4382 if me.get('has_mirror_process'): 4383 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4384 for proc in mirror_procs: 4385 legs = copy.copy(proc.get('legs_with_decays')) 4386 legs.insert(0, legs.pop(1)) 4387 proc.set("legs_with_decays", legs) 4388 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4389 mirror_procs)) 4390 else: 4391 lines.append("mirror none") 4392 4393 # Write the file 4394 writer.write("\n".join(lines)) 4395 4396 return True
4397 4398 #=========================================================================== 4399 # write_symswap_file 4400 #===========================================================================
4401 - def write_symswap_file(self, writer, ident_perms):
4402 """Write the file symswap.inc for MG4 by comparing diagrams using 4403 the internal matrix element value functionality.""" 4404 4405 lines = [] 4406 4407 # Write out lines for symswap.inc file (used to permute the 4408 # external leg momenta 4409 for iperm, perm in enumerate(ident_perms): 4410 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4411 (iperm+1, ",".join([str(i+1) for i in perm]))) 4412 lines.append("data nsym/%d/" % len(ident_perms)) 4413 4414 # Write the file 4415 writer.writelines(lines) 4416 4417 return True
4418 4419 #=========================================================================== 4420 # write_symfact_file 4421 #===========================================================================
4422 - def write_symfact_file(self, writer, symmetry):
4423 """Write the files symfact.dat for MG4 by comparing diagrams using 4424 the internal matrix element value functionality.""" 4425 4426 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4427 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4428 # Write out lines for symswap.inc file (used to permute the 4429 # external leg momenta 4430 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4431 # Write the file 4432 writer.write('\n'.join(lines)) 4433 writer.write('\n') 4434 4435 return True
4436 4437 #=========================================================================== 4438 # write_symperms_file 4439 #===========================================================================
4440 - def write_symperms_file(self, writer, perms):
4441 """Write the symperms.inc file for subprocess group, used for 4442 symmetric configurations""" 4443 4444 lines = [] 4445 for iperm, perm in enumerate(perms): 4446 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4447 (iperm+1, ",".join([str(i+1) for i in perm]))) 4448 4449 # Write the file 4450 writer.writelines(lines) 4451 4452 return True
4453 4454 #=========================================================================== 4455 # write_subproc 4456 #===========================================================================
4457 - def write_subproc(self, writer, subprocdir):
4458 """Append this subprocess to the subproc.mg file for MG4""" 4459 4460 # Write line to file 4461 writer.write(subprocdir + "\n") 4462 4463 return True
4464
4465 #=============================================================================== 4466 # ProcessExporterFortranMEGroup 4467 #=============================================================================== 4468 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4469 """Class to take care of exporting a set of matrix elements to 4470 MadEvent subprocess group format.""" 4471 4472 matrix_file = "matrix_madevent_group_v4.inc" 4473 4474 #=========================================================================== 4475 # generate_subprocess_directory_v4 4476 #===========================================================================
4477 - def generate_subprocess_directory_v4(self, subproc_group, 4478 fortran_model, 4479 group_number):
4480 """Generate the Pn directory for a subprocess group in MadEvent, 4481 including the necessary matrix_N.f files, configs.inc and various 4482 other helper files""" 4483 4484 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4485 "subproc_group object not SubProcessGroup" 4486 4487 if not self.model: 4488 self.model = subproc_group.get('matrix_elements')[0].\ 4489 get('processes')[0].get('model') 4490 4491 cwd = os.getcwd() 4492 path = pjoin(self.dir_path, 'SubProcesses') 4493 4494 os.chdir(path) 4495 pathdir = os.getcwd() 4496 4497 # Create the directory PN in the specified path 4498 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4499 subproc_group.get('name')) 4500 try: 4501 os.mkdir(subprocdir) 4502 except os.error as error: 4503 logger.warning(error.strerror + " " + subprocdir) 4504 4505 try: 4506 os.chdir(subprocdir) 4507 except os.error: 4508 logger.error('Could not cd to directory %s' % subprocdir) 4509 return 0 4510 4511 logger.info('Creating files in directory %s' % subprocdir) 4512 4513 # Create the matrix.f files, auto_dsig.f files and all inc files 4514 # for all subprocesses in the group 4515 4516 maxamps = 0 4517 maxflows = 0 4518 tot_calls = 0 4519 4520 matrix_elements = subproc_group.get('matrix_elements') 4521 4522 # Add the driver.f, all grouped ME's must share the same number of 4523 # helicity configuration 4524 ncomb = matrix_elements[0].get_helicity_combinations() 4525 for me in matrix_elements[1:]: 4526 if ncomb!=me.get_helicity_combinations(): 4527 raise MadGraph5Error, "All grouped processes must share the "+\ 4528 "same number of helicity configurations." 4529 4530 filename = 'driver.f' 4531 self.write_driver(writers.FortranWriter(filename),ncomb, 4532 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4533 4534 for ime, matrix_element in \ 4535 enumerate(matrix_elements): 4536 filename = 'matrix%d.f' % (ime+1) 4537 calls, ncolor = \ 4538 self.write_matrix_element_v4(writers.FortranWriter(filename), 4539 matrix_element, 4540 fortran_model, 4541 proc_id=str(ime+1), 4542 config_map=subproc_group.get('diagram_maps')[ime], 4543 subproc_number=group_number) 4544 4545 filename = 'auto_dsig%d.f' % (ime+1) 4546 self.write_auto_dsig_file(writers.FortranWriter(filename), 4547 matrix_element, 4548 str(ime+1)) 4549 4550 # Keep track of needed quantities 4551 tot_calls += int(calls) 4552 maxflows = max(maxflows, ncolor) 4553 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4554 4555 # Draw diagrams 4556 filename = "matrix%d.ps" % (ime+1) 4557 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4558 get('diagrams'), 4559 filename, 4560 model = \ 4561 matrix_element.get('processes')[0].\ 4562 get('model'), 4563 amplitude=True) 4564 logger.info("Generating Feynman diagrams for " + \ 4565 matrix_element.get('processes')[0].nice_string()) 4566 plot.draw() 4567 4568 # Extract number of external particles 4569 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4570 4571 # Generate a list of diagrams corresponding to each configuration 4572 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4573 # If a subprocess has no diagrams for this config, the number is 0 4574 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4575 4576 filename = 'auto_dsig.f' 4577 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4578 subproc_group) 4579 4580 filename = 'coloramps.inc' 4581 self.write_coloramps_file(writers.FortranWriter(filename), 4582 subproc_diagrams_for_config, 4583 maxflows, 4584 matrix_elements) 4585 4586 filename = 'get_color.f' 4587 self.write_colors_file(writers.FortranWriter(filename), 4588 matrix_elements) 4589 4590 filename = 'config_subproc_map.inc' 4591 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4592 subproc_diagrams_for_config) 4593 4594 filename = 'configs.inc' 4595 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4596 writers.FortranWriter(filename), 4597 subproc_group, 4598 subproc_diagrams_for_config) 4599 4600 filename = 'config_nqcd.inc' 4601 self.write_config_nqcd_file(writers.FortranWriter(filename), 4602 nqcd_list) 4603 4604 filename = 'decayBW.inc' 4605 self.write_decayBW_file(writers.FortranWriter(filename), 4606 s_and_t_channels) 4607 4608 filename = 'dname.mg' 4609 self.write_dname_file(writers.FortranWriter(filename), 4610 subprocdir) 4611 4612 filename = 'iproc.dat' 4613 self.write_iproc_file(writers.FortranWriter(filename), 4614 group_number) 4615 4616 filename = 'leshouche.inc' 4617 self.write_leshouche_file(writers.FortranWriter(filename), 4618 subproc_group) 4619 4620 filename = 'maxamps.inc' 4621 self.write_maxamps_file(writers.FortranWriter(filename), 4622 maxamps, 4623 maxflows, 4624 max([len(me.get('processes')) for me in \ 4625 matrix_elements]), 4626 len(matrix_elements)) 4627 4628 # Note that mg.sym is not relevant for this case 4629 filename = 'mg.sym' 4630 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4631 4632 filename = 'mirrorprocs.inc' 4633 self.write_mirrorprocs(writers.FortranWriter(filename), 4634 subproc_group) 4635 4636 filename = 'ncombs.inc' 4637 self.write_ncombs_file(writers.FortranWriter(filename), 4638 nexternal) 4639 4640 filename = 'nexternal.inc' 4641 self.write_nexternal_file(writers.FortranWriter(filename), 4642 nexternal, ninitial) 4643 4644 filename = 'ngraphs.inc' 4645 self.write_ngraphs_file(writers.FortranWriter(filename), 4646 nconfigs) 4647 4648 filename = 'pmass.inc' 4649 self.write_pmass_file(writers.FortranWriter(filename), 4650 matrix_element) 4651 4652 filename = 'props.inc' 4653 self.write_props_file(writers.FortranWriter(filename), 4654 matrix_element, 4655 s_and_t_channels) 4656 4657 filename = 'processes.dat' 4658 files.write_to_file(filename, 4659 self.write_processes_file, 4660 subproc_group) 4661 4662 # Find config symmetries and permutations 4663 symmetry, perms, ident_perms = \ 4664 diagram_symmetry.find_symmetry(subproc_group) 4665 4666 filename = 'symswap.inc' 4667 self.write_symswap_file(writers.FortranWriter(filename), 4668 ident_perms) 4669 4670 filename = 'symfact_orig.dat' 4671 self.write_symfact_file(open(filename, 'w'), symmetry) 4672 4673 filename = 'symperms.inc' 4674 self.write_symperms_file(writers.FortranWriter(filename), 4675 perms) 4676 4677 # Generate jpgs -> pass in make_html 4678 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4679 4680 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4681 4682 #import nexternal/leshouch in Source 4683 ln('nexternal.inc', '../../Source', log=False) 4684 ln('leshouche.inc', '../../Source', log=False) 4685 ln('maxamps.inc', '../../Source', log=False) 4686 4687 # Return to SubProcesses dir) 4688 os.chdir(pathdir) 4689 4690 # Add subprocess to subproc.mg 4691 filename = 'subproc.mg' 4692 files.append_to_file(filename, 4693 self.write_subproc, 4694 subprocdir) 4695 4696 # Return to original dir 4697 os.chdir(cwd) 4698 4699 if not tot_calls: 4700 tot_calls = 0 4701 return tot_calls
4702 4703 #=========================================================================== 4704 # write_super_auto_dsig_file 4705 #===========================================================================
4706 - def write_super_auto_dsig_file(self, writer, subproc_group):
4707 """Write the auto_dsig.f file selecting between the subprocesses 4708 in subprocess group mode""" 4709 4710 replace_dict = {} 4711 4712 # Extract version number and date from VERSION file 4713 info_lines = self.get_mg5_info_lines() 4714 replace_dict['info_lines'] = info_lines 4715 4716 matrix_elements = subproc_group.get('matrix_elements') 4717 4718 # Extract process info lines 4719 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4720 matrix_elements]) 4721 replace_dict['process_lines'] = process_lines 4722 4723 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4724 replace_dict['nexternal'] = nexternal 4725 4726 replace_dict['nsprocs'] = 2*len(matrix_elements) 4727 4728 # Generate dsig definition line 4729 dsig_def_line = "DOUBLE PRECISION " + \ 4730 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4731 range(len(matrix_elements))]) 4732 replace_dict["dsig_def_line"] = dsig_def_line 4733 4734 # Generate dsig process lines 4735 call_dsig_proc_lines = [] 4736 for iproc in range(len(matrix_elements)): 4737 call_dsig_proc_lines.append(\ 4738 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4739 {"num": iproc + 1, 4740 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4741 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4742 4743 ncomb=matrix_elements[0].get_helicity_combinations() 4744 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4745 4746 file = open(pjoin(_file_path, \ 4747 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4748 file = file % replace_dict 4749 4750 # Write the file 4751 writer.writelines(file)
4752 4753 #=========================================================================== 4754 # write_mirrorprocs 4755 #===========================================================================
4756 - def write_mirrorprocs(self, writer, subproc_group):
4757 """Write the mirrorprocs.inc file determining which processes have 4758 IS mirror process in subprocess group mode.""" 4759 4760 lines = [] 4761 bool_dict = {True: '.true.', False: '.false.'} 4762 matrix_elements = subproc_group.get('matrix_elements') 4763 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4764 (len(matrix_elements), 4765 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4766 me in matrix_elements]))) 4767 # Write the file 4768 writer.writelines(lines)
4769 4770 #=========================================================================== 4771 # write_addmothers 4772 #===========================================================================
4773 - def write_addmothers(self, writer):
4774 """Write the SubProcess/addmothers.f""" 4775 4776 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4777 4778 text = open(path).read() % {'iconfig': 'lconfig'} 4779 writer.write(text) 4780 4781 return True
4782 4783 4784 #=========================================================================== 4785 # write_coloramps_file 4786 #===========================================================================
4787 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4788 matrix_elements):
4789 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4790 4791 # Create a map from subprocess (matrix element) to a list of 4792 # the diagrams corresponding to each config 4793 4794 lines = [] 4795 4796 subproc_to_confdiag = {} 4797 for config in diagrams_for_config: 4798 for subproc, diag in enumerate(config): 4799 try: 4800 subproc_to_confdiag[subproc].append(diag) 4801 except KeyError: 4802 subproc_to_confdiag[subproc] = [diag] 4803 4804 for subproc in sorted(subproc_to_confdiag.keys()): 4805 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 4806 matrix_elements[subproc], 4807 subproc + 1)) 4808 4809 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 4810 (maxflows, 4811 len(diagrams_for_config), 4812 len(matrix_elements))) 4813 4814 # Write the file 4815 writer.writelines(lines) 4816 4817 return True
4818 4819 #=========================================================================== 4820 # write_config_subproc_map_file 4821 #===========================================================================
4822 - def write_config_subproc_map_file(self, writer, config_subproc_map):
4823 """Write the config_subproc_map.inc file for subprocess groups""" 4824 4825 lines = [] 4826 # Output only configs that have some corresponding diagrams 4827 iconfig = 0 4828 for config in config_subproc_map: 4829 if set(config) == set([0]): 4830 continue 4831 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 4832 (iconfig + 1, len(config), 4833 ",".join([str(i) for i in config]))) 4834 iconfig += 1 4835 # Write the file 4836 writer.writelines(lines) 4837 4838 return True
4839 4840 #=========================================================================== 4841 # read_write_good_hel 4842 #===========================================================================
4843 - def read_write_good_hel(self, ncomb):
4844 """return the code to read/write the good_hel common_block""" 4845 4846 convert = {'ncomb' : ncomb} 4847 4848 output = """ 4849 subroutine write_good_hel(stream_id) 4850 implicit none 4851 integer stream_id 4852 INTEGER NCOMB 4853 PARAMETER ( NCOMB=%(ncomb)d) 4854 LOGICAL GOODHEL(NCOMB, 2) 4855 INTEGER NTRY(2) 4856 common/BLOCK_GOODHEL/NTRY,GOODHEL 4857 write(stream_id,*) GOODHEL 4858 return 4859 end 4860 4861 4862 subroutine read_good_hel(stream_id) 4863 implicit none 4864 include 'genps.inc' 4865 integer stream_id 4866 INTEGER NCOMB 4867 PARAMETER ( NCOMB=%(ncomb)d) 4868 LOGICAL GOODHEL(NCOMB, 2) 4869 INTEGER NTRY(2) 4870 common/BLOCK_GOODHEL/NTRY,GOODHEL 4871 read(stream_id,*) GOODHEL 4872 NTRY(1) = MAXTRIES + 1 4873 NTRY(2) = MAXTRIES + 1 4874 return 4875 end 4876 4877 subroutine init_good_hel() 4878 implicit none 4879 INTEGER NCOMB 4880 PARAMETER ( NCOMB=%(ncomb)d) 4881 LOGICAL GOODHEL(NCOMB, 2) 4882 INTEGER NTRY(2) 4883 INTEGER I 4884 4885 do i=1,NCOMB 4886 GOODHEL(I,1) = .false. 4887 GOODHEL(I,2) = .false. 4888 enddo 4889 NTRY(1) = 0 4890 NTRY(2) = 0 4891 end 4892 4893 integer function get_maxsproc() 4894 implicit none 4895 include 'maxamps.inc' 4896 4897 get_maxsproc = maxsproc 4898 return 4899 end 4900 4901 """ % convert 4902 4903 return output
4904 4905 4906 4907 #=========================================================================== 4908 # write_configs_file 4909 #===========================================================================
4910 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
4911 """Write the configs.inc file with topology information for a 4912 subprocess group. Use the first subprocess with a diagram for each 4913 configuration.""" 4914 4915 matrix_elements = subproc_group.get('matrix_elements') 4916 model = matrix_elements[0].get('processes')[0].get('model') 4917 4918 diagrams = [] 4919 config_numbers = [] 4920 for iconfig, config in enumerate(diagrams_for_config): 4921 # Check if any diagrams correspond to this config 4922 if set(config) == set([0]): 4923 continue 4924 subproc_diags = [] 4925 for s,d in enumerate(config): 4926 if d: 4927 subproc_diags.append(matrix_elements[s].\ 4928 get('diagrams')[d-1]) 4929 else: 4930 subproc_diags.append(None) 4931 diagrams.append(subproc_diags) 4932 config_numbers.append(iconfig + 1) 4933 4934 # Extract number of external particles 4935 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 4936 4937 return len(diagrams), \ 4938 self.write_configs_file_from_diagrams(writer, diagrams, 4939 config_numbers, 4940 nexternal, ninitial, 4941 model)
4942 4943 #=========================================================================== 4944 # write_run_configs_file 4945 #===========================================================================
4946 - def write_run_config_file(self, writer):
4947 """Write the run_configs.inc file for MadEvent""" 4948 4949 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4950 if self.proc_characteristic['loop_induced']: 4951 job_per_chan = 1 4952 else: 4953 job_per_chan = 2 4954 text = open(path).read() % {'chanperjob':job_per_chan} 4955 writer.write(text) 4956 return True
4957 4958 4959 #=========================================================================== 4960 # write_leshouche_file 4961 #===========================================================================
4962 - def write_leshouche_file(self, writer, subproc_group):
4963 """Write the leshouche.inc file for MG4""" 4964 4965 all_lines = [] 4966 4967 for iproc, matrix_element in \ 4968 enumerate(subproc_group.get('matrix_elements')): 4969 all_lines.extend(self.get_leshouche_lines(matrix_element, 4970 iproc)) 4971 4972 # Write the file 4973 writer.writelines(all_lines) 4974 4975 return True
4976 4977 4978
4979 - def finalize_v4_directory(self,*args, **opts):
4980 4981 4982 4983 super(ProcessExporterFortranMEGroup, self).finalize_v4_directory(*args, **opts) 4984 #ensure that the grouping information is on the correct value 4985 self.proc_characteristic['grouped_matrix'] = True
4986 4987 4988 #=============================================================================== 4989 # UFO_model_to_mg4 4990 #=============================================================================== 4991 4992 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
4993 4994 -class UFO_model_to_mg4(object):
4995 """ A converter of the UFO-MG5 Model to the MG4 format """ 4996 4997 # The list below shows the only variables the user is allowed to change by 4998 # himself for each PS point. If he changes any other, then calling 4999 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5000 # correctly account for the change. 5001 PS_dependent_key = ['aS','MU_R'] 5002 mp_complex_format = 'complex*32' 5003 mp_real_format = 'real*16' 5004 # Warning, it is crucial none of the couplings/parameters of the model 5005 # starts with this prefix. I should add a check for this. 5006 # You can change it as the global variable to check_param_card.ParamCard 5007 mp_prefix = check_param_card.ParamCard.mp_prefix 5008
5009 - def __init__(self, model, output_path, opt=None):
5010 """ initialization of the objects """ 5011 5012 self.model = model 5013 self.model_name = model['name'] 5014 self.dir_path = output_path 5015 if opt: 5016 self.opt = opt 5017 else: 5018 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5019 'loop_induced': False} 5020 5021 self.coups_dep = [] # (name, expression, type) 5022 self.coups_indep = [] # (name, expression, type) 5023 self.params_dep = [] # (name, expression, type) 5024 self.params_indep = [] # (name, expression, type) 5025 self.params_ext = [] # external parameter 5026 self.p_to_f = parsers.UFOExpressionParserFortran() 5027 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5028
5030 """modify the parameter if some of them are identical up to the case""" 5031 5032 lower_dict={} 5033 duplicate = set() 5034 keys = self.model['parameters'].keys() 5035 for key in keys: 5036 for param in self.model['parameters'][key]: 5037 lower_name = param.name.lower() 5038 if not lower_name: 5039 continue 5040 try: 5041 lower_dict[lower_name].append(param) 5042 except KeyError,error: 5043 lower_dict[lower_name] = [param] 5044 else: 5045 duplicate.add(lower_name) 5046 logger.debug('%s is define both as lower case and upper case.' 5047 % lower_name) 5048 if not duplicate: 5049 return 5050 5051 re_expr = r'''\b(%s)\b''' 5052 to_change = [] 5053 change={} 5054 for value in duplicate: 5055 for i, var in enumerate(lower_dict[value]): 5056 to_change.append(var.name) 5057 new_name = '%s%s' % (var.name.lower(), 5058 ('__%d'%(i+1) if i>0 else '')) 5059 change[var.name] = new_name 5060 var.name = new_name 5061 5062 # Apply the modification to the map_CTcoup_CTparam of the model 5063 # if it has one (giving for each coupling the CT parameters whcih 5064 # are necessary and which should be exported to the model. 5065 if hasattr(self.model,'map_CTcoup_CTparam'): 5066 for coup, ctparams in self.model.map_CTcoup_CTparam: 5067 for i, ctparam in enumerate(ctparams): 5068 try: 5069 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5070 except KeyError: 5071 pass 5072 5073 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5074 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5075 5076 # change parameters 5077 for key in keys: 5078 if key == ('external',): 5079 continue 5080 for param in self.model['parameters'][key]: 5081 param.expr = rep_pattern.sub(replace, param.expr) 5082 5083 # change couplings 5084 for key in self.model['couplings'].keys(): 5085 for coup in self.model['couplings'][key]: 5086 coup.expr = rep_pattern.sub(replace, coup.expr) 5087 5088 # change mass/width 5089 for part in self.model['particles']: 5090 if str(part.get('mass')) in to_change: 5091 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5092 if str(part.get('width')) in to_change: 5093 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5094
5095 - def refactorize(self, wanted_couplings = []):
5096 """modify the couplings to fit with MG4 convention """ 5097 5098 # Keep only separation in alphaS 5099 keys = self.model['parameters'].keys() 5100 keys.sort(key=len) 5101 for key in keys: 5102 to_add = [o for o in self.model['parameters'][key] if o.name] 5103 5104 if key == ('external',): 5105 self.params_ext += to_add 5106 elif any([(k in key) for k in self.PS_dependent_key]): 5107 self.params_dep += to_add 5108 else: 5109 self.params_indep += to_add 5110 # same for couplings 5111 keys = self.model['couplings'].keys() 5112 keys.sort(key=len) 5113 for key, coup_list in self.model['couplings'].items(): 5114 if any([(k in key) for k in self.PS_dependent_key]): 5115 self.coups_dep += [c for c in coup_list if 5116 (not wanted_couplings or c.name in \ 5117 wanted_couplings)] 5118 else: 5119 self.coups_indep += [c for c in coup_list if 5120 (not wanted_couplings or c.name in \ 5121 wanted_couplings)] 5122 5123 # MG4 use G and not aS as it basic object for alphas related computation 5124 #Pass G in the independant list 5125 if 'G' in self.params_dep: 5126 index = self.params_dep.index('G') 5127 G = self.params_dep.pop(index) 5128 # G.expr = '2*cmath.sqrt(as*pi)' 5129 # self.params_indep.insert(0, self.params_dep.pop(index)) 5130 # No need to add it if not defined 5131 5132 if 'aS' not in self.params_ext: 5133 logger.critical('aS not define as external parameter adding it!') 5134 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5135 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5136 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5137 - def build(self, wanted_couplings = [], full=True):
5138 """modify the couplings to fit with MG4 convention and creates all the 5139 different files""" 5140 5141 self.pass_parameter_to_case_insensitive() 5142 self.refactorize(wanted_couplings) 5143 5144 # write the files 5145 if full: 5146 if wanted_couplings: 5147 # extract the wanted ct parameters 5148 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5149 self.write_all()
5150 5151
5152 - def open(self, name, comment='c', format='default'):
5153 """ Open the file name in the correct directory and with a valid 5154 header.""" 5155 5156 file_path = pjoin(self.dir_path, name) 5157 5158 if format == 'fortran': 5159 fsock = writers.FortranWriter(file_path, 'w') 5160 else: 5161 fsock = open(file_path, 'w') 5162 5163 file.writelines(fsock, comment * 77 + '\n') 5164 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5165 {'comment': comment + (6 - len(comment)) * ' '}) 5166 file.writelines(fsock, comment * 77 + '\n\n') 5167 return fsock
5168 5169
5170 - def write_all(self):
5171 """ write all the files """ 5172 #write the part related to the external parameter 5173 self.create_ident_card() 5174 self.create_param_read() 5175 5176 #write the definition of the parameter 5177 self.create_input() 5178 self.create_intparam_def(dp=True,mp=False) 5179 if self.opt['mp']: 5180 self.create_intparam_def(dp=False,mp=True) 5181 5182 5183 # definition of the coupling. 5184 self.create_actualize_mp_ext_param_inc() 5185 self.create_coupl_inc() 5186 self.create_write_couplings() 5187 self.create_couplings() 5188 5189 # the makefile 5190 self.create_makeinc() 5191 self.create_param_write() 5192 5193 # The model functions 5194 self.create_model_functions_inc() 5195 self.create_model_functions_def() 5196 5197 # The param_card.dat 5198 self.create_param_card() 5199 5200 5201 # All the standard files 5202 self.copy_standard_file()
5203 5204 ############################################################################ 5205 ## ROUTINE CREATING THE FILES ############################################ 5206 ############################################################################ 5207
5208 - def copy_standard_file(self):
5209 """Copy the standard files for the fortran model.""" 5210 5211 5212 #copy the library files 5213 file_to_link = ['formats.inc','printout.f', \ 5214 'rw_para.f', 'testprog.f'] 5215 5216 for filename in file_to_link: 5217 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5218 self.dir_path) 5219 5220 file = open(os.path.join(MG5DIR,\ 5221 'models/template_files/fortran/rw_para.f')).read() 5222 5223 includes=["include \'coupl.inc\'","include \'input.inc\'", 5224 "include \'model_functions.inc\'"] 5225 if self.opt['mp']: 5226 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5227 # In standalone and madloop we do no use the compiled param card but 5228 # still parse the .dat one so we must load it. 5229 if self.opt['loop_induced']: 5230 #loop induced follow MadEvent way to handle the card. 5231 load_card = '' 5232 lha_read_filename='lha_read.f' 5233 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5234 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5235 lha_read_filename='lha_read_mp.f' 5236 elif self.opt['export_format'].startswith('standalone') or self.opt['export_format'] in ['madweight']\ 5237 or self.opt['export_format'].startswith('matchbox'): 5238 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5239 lha_read_filename='lha_read.f' 5240 else: 5241 load_card = '' 5242 lha_read_filename='lha_read.f' 5243 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5244 os.path.join(self.dir_path,'lha_read.f')) 5245 5246 file=file%{'includes':'\n '.join(includes), 5247 'load_card':load_card} 5248 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5249 writer.writelines(file) 5250 writer.close() 5251 5252 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5253 or self.opt['loop_induced']: 5254 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5255 self.dir_path + '/makefile') 5256 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5257 path = pjoin(self.dir_path, 'makefile') 5258 text = open(path).read() 5259 text = text.replace('madevent','aMCatNLO') 5260 open(path, 'w').writelines(text) 5261 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5262 'madloop','madloop_optimized', 'standalone_rw', 'madweight','matchbox','madloop_matchbox']: 5263 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5264 self.dir_path + '/makefile') 5265 #elif self.opt['export_format'] in []: 5266 #pass 5267 else: 5268 raise MadGraph5Error('Unknown format')
5269
5270 - def create_coupl_inc(self):
5271 """ write coupling.inc """ 5272 5273 fsock = self.open('coupl.inc', format='fortran') 5274 if self.opt['mp']: 5275 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5276 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5277 format='fortran') 5278 5279 # Write header 5280 header = """double precision G 5281 common/strong/ G 5282 5283 double complex gal(2) 5284 common/weak/ gal 5285 5286 double precision MU_R 5287 common/rscale/ MU_R 5288 5289 double precision Nf 5290 parameter(Nf=%d) 5291 """ % self.model.get_nflav() 5292 5293 fsock.writelines(header) 5294 5295 if self.opt['mp']: 5296 header = """%(real_mp_format)s %(mp_prefix)sG 5297 common/MP_strong/ %(mp_prefix)sG 5298 5299 %(complex_mp_format)s %(mp_prefix)sgal(2) 5300 common/MP_weak/ %(mp_prefix)sgal 5301 5302 %(complex_mp_format)s %(mp_prefix)sMU_R 5303 common/MP_rscale/ %(mp_prefix)sMU_R 5304 5305 """ 5306 5307 5308 5309 5310 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5311 'complex_mp_format':self.mp_complex_format, 5312 'mp_prefix':self.mp_prefix}) 5313 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5314 'complex_mp_format':self.mp_complex_format, 5315 'mp_prefix':''}) 5316 5317 # Write the Mass definition/ common block 5318 masses = set() 5319 widths = set() 5320 if self.opt['complex_mass']: 5321 complex_mass = set() 5322 5323 for particle in self.model.get('particles'): 5324 #find masses 5325 one_mass = particle.get('mass') 5326 if one_mass.lower() != 'zero': 5327 masses.add(one_mass) 5328 5329 # find width 5330 one_width = particle.get('width') 5331 if one_width.lower() != 'zero': 5332 widths.add(one_width) 5333 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5334 complex_mass.add('CMASS_%s' % one_mass) 5335 5336 if masses: 5337 fsock.writelines('double precision '+','.join(masses)+'\n') 5338 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5339 if self.opt['mp']: 5340 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5341 ','.join(masses)+'\n') 5342 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5343 ','.join(masses)+'\n\n') 5344 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5345 self.mp_prefix+m for m in masses])+'\n') 5346 mp_fsock.writelines('common/MP_masses/ '+\ 5347 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5348 5349 if widths: 5350 fsock.writelines('double precision '+','.join(widths)+'\n') 5351 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5352 if self.opt['mp']: 5353 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5354 ','.join(widths)+'\n') 5355 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5356 ','.join(widths)+'\n\n') 5357 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5358 self.mp_prefix+w for w in widths])+'\n') 5359 mp_fsock.writelines('common/MP_widths/ '+\ 5360 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5361 5362 # Write the Couplings 5363 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5364 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5365 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5366 if self.opt['mp']: 5367 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5368 ','.join(coupling_list)+'\n') 5369 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5370 ','.join(coupling_list)+'\n\n') 5371 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5372 self.mp_prefix+c for c in coupling_list])+'\n') 5373 mp_fsock.writelines('common/MP_couplings/ '+\ 5374 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5375 5376 # Write complex mass for complex mass scheme (if activated) 5377 if self.opt['complex_mass'] and complex_mass: 5378 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5379 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5380 if self.opt['mp']: 5381 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5382 ','.join(complex_mass)+'\n') 5383 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5384 ','.join(complex_mass)+'\n\n') 5385 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5386 self.mp_prefix+cm for cm in complex_mass])+'\n') 5387 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5388 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5389
5390 - def create_write_couplings(self):
5391 """ write the file coupl_write.inc """ 5392 5393 fsock = self.open('coupl_write.inc', format='fortran') 5394 5395 fsock.writelines("""write(*,*) ' Couplings of %s' 5396 write(*,*) ' ---------------------------------' 5397 write(*,*) ' '""" % self.model_name) 5398 def format(coupl): 5399 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5400 5401 # Write the Couplings 5402 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5403 fsock.writelines('\n'.join(lines)) 5404 5405
5406 - def create_input(self):
5407 """create input.inc containing the definition of the parameters""" 5408 5409 fsock = self.open('input.inc', format='fortran') 5410 if self.opt['mp']: 5411 mp_fsock = self.open('mp_input.inc', format='fortran') 5412 5413 #find mass/ width since they are already define 5414 already_def = set() 5415 for particle in self.model.get('particles'): 5416 already_def.add(particle.get('mass').lower()) 5417 already_def.add(particle.get('width').lower()) 5418 if self.opt['complex_mass']: 5419 already_def.add('cmass_%s' % particle.get('mass').lower()) 5420 5421 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5422 name.lower() not in already_def 5423 5424 real_parameters = [param.name for param in self.params_dep + 5425 self.params_indep if param.type == 'real' 5426 and is_valid(param.name)] 5427 5428 real_parameters += [param.name for param in self.params_ext 5429 if param.type == 'real'and 5430 is_valid(param.name)] 5431 5432 # check the parameter is a CT parameter or not 5433 # if yes, just use the needed ones 5434 real_parameters = [param for param in real_parameters \ 5435 if self.check_needed_param(param)] 5436 5437 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5438 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5439 if self.opt['mp']: 5440 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5441 self.mp_prefix+p for p in real_parameters])+'\n') 5442 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5443 self.mp_prefix+p for p in real_parameters])+'\n\n') 5444 5445 complex_parameters = [param.name for param in self.params_dep + 5446 self.params_indep if param.type == 'complex' and 5447 is_valid(param.name)] 5448 5449 # check the parameter is a CT parameter or not 5450 # if yes, just use the needed ones 5451 complex_parameters = [param for param in complex_parameters \ 5452 if self.check_needed_param(param)] 5453 5454 if complex_parameters: 5455 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5456 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5457 if self.opt['mp']: 5458 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5459 self.mp_prefix+p for p in complex_parameters])+'\n') 5460 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5461 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5462
5463 - def check_needed_param(self, param):
5464 """ Returns whether the parameter in argument is needed for this 5465 specific computation or not.""" 5466 5467 # If this is a leading order model or if there was no CT parameter 5468 # employed in this NLO model, one can directly return that the 5469 # parameter is needed since only CTParameters are filtered. 5470 if not hasattr(self, 'allCTparameters') or \ 5471 self.allCTparameters is None or self.usedCTparameters is None or \ 5472 len(self.allCTparameters)==0: 5473 return True 5474 5475 # We must allow the conjugate shorthand for the complex parameter as 5476 # well so we check wether either the parameter name or its name with 5477 # 'conjg__' substituted with '' is present in the list. 5478 # This is acceptable even if some parameter had an original name 5479 # including 'conjg__' in it, because at worst we export a parameter 5480 # was not needed. 5481 param = param.lower() 5482 cjg_param = param.replace('conjg__','',1) 5483 5484 # First make sure it is a CTparameter 5485 if param not in self.allCTparameters and \ 5486 cjg_param not in self.allCTparameters: 5487 return True 5488 5489 # Now check if it is in the list of CTparameters actually used 5490 return (param in self.usedCTparameters or \ 5491 cjg_param in self.usedCTparameters)
5492
5493 - def extract_needed_CTparam(self,wanted_couplings=[]):
5494 """ Extract what are the needed CT parameters given the wanted_couplings""" 5495 5496 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5497 # Setting these lists to none wil disable the filtering in 5498 # check_needed_param 5499 self.allCTparameters = None 5500 self.usedCTparameters = None 5501 return 5502 5503 # All CTparameters appearin in all CT couplings 5504 allCTparameters=self.model.map_CTcoup_CTparam.values() 5505 # Define in this class the list of all CT parameters 5506 self.allCTparameters=list(\ 5507 set(itertools.chain.from_iterable(allCTparameters))) 5508 5509 # All used CT couplings 5510 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5511 allUsedCTCouplings = [coupl for coupl in 5512 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5513 5514 # Now define the list of all CT parameters that are actually used 5515 self.usedCTparameters=list(\ 5516 set(itertools.chain.from_iterable([ 5517 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5518 ]))) 5519 5520 # Now at last, make these list case insensitive 5521 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5522 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5523
5524 - def create_intparam_def(self, dp=True, mp=False):
5525 """ create intparam_definition.inc setting the internal parameters. 5526 Output the double precision and/or the multiple precision parameters 5527 depending on the parameters dp and mp. If mp only, then the file names 5528 get the 'mp_' prefix. 5529 """ 5530 5531 fsock = self.open('%sintparam_definition.inc'% 5532 ('mp_' if mp and not dp else ''), format='fortran') 5533 5534 fsock.write_comments(\ 5535 "Parameters that should not be recomputed event by event.\n") 5536 fsock.writelines("if(readlha) then\n") 5537 if dp: 5538 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5539 if mp: 5540 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5541 5542 for param in self.params_indep: 5543 if param.name == 'ZERO': 5544 continue 5545 # check whether the parameter is a CT parameter 5546 # if yes,just used the needed ones 5547 if not self.check_needed_param(param.name): 5548 continue 5549 if dp: 5550 fsock.writelines("%s = %s\n" % (param.name, 5551 self.p_to_f.parse(param.expr))) 5552 if mp: 5553 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5554 self.mp_p_to_f.parse(param.expr))) 5555 5556 fsock.writelines('endif') 5557 5558 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5559 if dp: 5560 fsock.writelines("aS = G**2/4/pi\n") 5561 if mp: 5562 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5563 for param in self.params_dep: 5564 # check whether the parameter is a CT parameter 5565 # if yes,just used the needed ones 5566 if not self.check_needed_param(param.name): 5567 continue 5568 if dp: 5569 fsock.writelines("%s = %s\n" % (param.name, 5570 self.p_to_f.parse(param.expr))) 5571 elif mp: 5572 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5573 self.mp_p_to_f.parse(param.expr))) 5574 5575 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5576 if ('aEWM1',) in self.model['parameters']: 5577 if dp: 5578 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5579 gal(2) = 1d0 5580 """) 5581 elif mp: 5582 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5583 %(mp_prefix)sgal(2) = 1d0 5584 """ %{'mp_prefix':self.mp_prefix}) 5585 pass 5586 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5587 elif ('Gf',) in self.model['parameters']: 5588 if dp: 5589 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5590 gal(2) = 1d0 5591 """) 5592 elif mp: 5593 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5594 %(mp_prefix)sgal(2) = 1d0 5595 """ %{'mp_prefix':self.mp_prefix}) 5596 pass 5597 else: 5598 if dp: 5599 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5600 fsock.writelines(""" gal(1) = 1d0 5601 gal(2) = 1d0 5602 """) 5603 elif mp: 5604 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5605 %(mp_prefix)sgal(2) = 1e0_16 5606 """%{'mp_prefix':self.mp_prefix})
5607 5608
5609 - def create_couplings(self):
5610 """ create couplings.f and all couplingsX.f """ 5611 5612 nb_def_by_file = 25 5613 5614 self.create_couplings_main(nb_def_by_file) 5615 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5616 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5617 5618 for i in range(nb_coup_indep): 5619 # For the independent couplings, we compute the double and multiple 5620 # precision ones together 5621 data = self.coups_indep[nb_def_by_file * i: 5622 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5623 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5624 5625 for i in range(nb_coup_dep): 5626 # For the dependent couplings, we compute the double and multiple 5627 # precision ones in separate subroutines. 5628 data = self.coups_dep[nb_def_by_file * i: 5629 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5630 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5631 dp=True,mp=False) 5632 if self.opt['mp']: 5633 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5634 dp=False,mp=True)
5635 5636
5637 - def create_couplings_main(self, nb_def_by_file=25):
5638 """ create couplings.f """ 5639 5640 fsock = self.open('couplings.f', format='fortran') 5641 5642 fsock.writelines("""subroutine coup() 5643 5644 implicit none 5645 double precision PI, ZERO 5646 logical READLHA 5647 parameter (PI=3.141592653589793d0) 5648 parameter (ZERO=0d0) 5649 include \'model_functions.inc\'""") 5650 if self.opt['mp']: 5651 fsock.writelines("""%s MP__PI, MP__ZERO 5652 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5653 parameter (MP__ZERO=0e0_16) 5654 include \'mp_input.inc\' 5655 include \'mp_coupl.inc\' 5656 """%self.mp_real_format) 5657 fsock.writelines("""include \'input.inc\' 5658 include \'coupl.inc\' 5659 READLHA = .true. 5660 include \'intparam_definition.inc\'""") 5661 if self.opt['mp']: 5662 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5663 5664 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5665 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5666 5667 fsock.writelines('\n'.join(\ 5668 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5669 5670 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5671 5672 fsock.writelines('\n'.join(\ 5673 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5674 for i in range(nb_coup_dep)])) 5675 if self.opt['mp']: 5676 fsock.writelines('\n'.join(\ 5677 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5678 for i in range(nb_coup_dep)])) 5679 fsock.writelines('''\n return \n end\n''') 5680 5681 fsock.writelines("""subroutine update_as_param() 5682 5683 implicit none 5684 double precision PI, ZERO 5685 logical READLHA 5686 parameter (PI=3.141592653589793d0) 5687 parameter (ZERO=0d0) 5688 include \'model_functions.inc\'""") 5689 fsock.writelines("""include \'input.inc\' 5690 include \'coupl.inc\' 5691 READLHA = .false.""") 5692 fsock.writelines(""" 5693 include \'intparam_definition.inc\'\n 5694 """) 5695 5696 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5697 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5698 5699 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5700 5701 fsock.writelines('\n'.join(\ 5702 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5703 for i in range(nb_coup_dep)])) 5704 fsock.writelines('''\n return \n end\n''') 5705 5706 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5707 5708 implicit none 5709 double precision PI 5710 parameter (PI=3.141592653589793d0) 5711 double precision mu_r2, as2 5712 include \'model_functions.inc\'""") 5713 fsock.writelines("""include \'input.inc\' 5714 include \'coupl.inc\'""") 5715 fsock.writelines(""" 5716 MU_R = mu_r2 5717 G = SQRT(4.0d0*PI*AS2) 5718 AS = as2 5719 5720 CALL UPDATE_AS_PARAM() 5721 """) 5722 fsock.writelines('''\n return \n end\n''') 5723 5724 if self.opt['mp']: 5725 fsock.writelines("""subroutine mp_update_as_param() 5726 5727 implicit none 5728 logical READLHA 5729 include \'model_functions.inc\'""") 5730 fsock.writelines("""%s MP__PI, MP__ZERO 5731 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5732 parameter (MP__ZERO=0e0_16) 5733 include \'mp_input.inc\' 5734 include \'mp_coupl.inc\' 5735 """%self.mp_real_format) 5736 fsock.writelines("""include \'input.inc\' 5737 include \'coupl.inc\' 5738 include \'actualize_mp_ext_params.inc\' 5739 READLHA = .false. 5740 include \'mp_intparam_definition.inc\'\n 5741 """) 5742 5743 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5744 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5745 5746 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5747 5748 fsock.writelines('\n'.join(\ 5749 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5750 for i in range(nb_coup_dep)])) 5751 fsock.writelines('''\n return \n end\n''')
5752
5753 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5754 """ create couplings[nb_file].f containing information coming from data. 5755 Outputs the computation of the double precision and/or the multiple 5756 precision couplings depending on the parameters dp and mp. 5757 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5758 filename and subroutine name. 5759 """ 5760 5761 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5762 nb_file), format='fortran') 5763 fsock.writelines("""subroutine %scoup%s() 5764 5765 implicit none 5766 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5767 if dp: 5768 fsock.writelines(""" 5769 double precision PI, ZERO 5770 parameter (PI=3.141592653589793d0) 5771 parameter (ZERO=0d0) 5772 include 'input.inc' 5773 include 'coupl.inc'""") 5774 if mp: 5775 fsock.writelines("""%s MP__PI, MP__ZERO 5776 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5777 parameter (MP__ZERO=0e0_16) 5778 include \'mp_input.inc\' 5779 include \'mp_coupl.inc\' 5780 """%self.mp_real_format) 5781 5782 for coupling in data: 5783 if dp: 5784 fsock.writelines('%s = %s' % (coupling.name, 5785 self.p_to_f.parse(coupling.expr))) 5786 if mp: 5787 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5788 self.mp_p_to_f.parse(coupling.expr))) 5789 fsock.writelines('end')
5790
5791 - def create_model_functions_inc(self):
5792 """ Create model_functions.inc which contains the various declarations 5793 of auxiliary functions which might be used in the couplings expressions 5794 """ 5795 5796 additional_fct = [] 5797 # check for functions define in the UFO model 5798 ufo_fct = self.model.get('functions') 5799 if ufo_fct: 5800 for fct in ufo_fct: 5801 # already handle by default 5802 if fct.name not in ["complexconjugate", "re", "im", "sec", 5803 "csc", "asec", "acsc", "theta_function", "cond", 5804 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 5805 additional_fct.append(fct.name) 5806 5807 5808 fsock = self.open('model_functions.inc', format='fortran') 5809 fsock.writelines("""double complex cond 5810 double complex condif 5811 double complex reglog 5812 double complex reglogp 5813 double complex reglogm 5814 double complex recms 5815 double complex arg 5816 %s 5817 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 5818 5819 5820 if self.opt['mp']: 5821 fsock.writelines("""%(complex_mp_format)s mp_cond 5822 %(complex_mp_format)s mp_condif 5823 %(complex_mp_format)s mp_reglog 5824 %(complex_mp_format)s mp_reglogp 5825 %(complex_mp_format)s mp_reglogm 5826 %(complex_mp_format)s mp_recms 5827 %(complex_mp_format)s mp_arg 5828 %(additional)s 5829 """ %\ 5830 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 5831 'complex_mp_format':self.mp_complex_format 5832 })
5833
5834 - def create_model_functions_def(self):
5835 """ Create model_functions.f which contains the various definitions 5836 of auxiliary functions which might be used in the couplings expressions 5837 Add the functions.f functions for formfactors support 5838 """ 5839 5840 fsock = self.open('model_functions.f', format='fortran') 5841 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 5842 implicit none 5843 double complex condition,truecase,falsecase 5844 if(condition.eq.(0.0d0,0.0d0)) then 5845 cond=truecase 5846 else 5847 cond=falsecase 5848 endif 5849 end 5850 5851 double complex function condif(condition,truecase,falsecase) 5852 implicit none 5853 logical condition 5854 double complex truecase,falsecase 5855 if(condition) then 5856 condif=truecase 5857 else 5858 condif=falsecase 5859 endif 5860 end 5861 5862 double complex function recms(condition,expr) 5863 implicit none 5864 logical condition 5865 double complex expr 5866 if(condition)then 5867 recms=expr 5868 else 5869 recms=dcmplx(dble(expr)) 5870 endif 5871 end 5872 5873 double complex function reglog(arg) 5874 implicit none 5875 double complex TWOPII 5876 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5877 double complex arg 5878 if(arg.eq.(0.0d0,0.0d0)) then 5879 reglog=(0.0d0,0.0d0) 5880 else 5881 reglog=log(arg) 5882 endif 5883 end 5884 5885 double complex function reglogp(arg) 5886 implicit none 5887 double complex TWOPII 5888 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5889 double complex arg 5890 if(arg.eq.(0.0d0,0.0d0))then 5891 reglogp=(0.0d0,0.0d0) 5892 else 5893 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 5894 reglogp=log(arg) + TWOPII 5895 else 5896 reglogp=log(arg) 5897 endif 5898 endif 5899 end 5900 5901 double complex function reglogm(arg) 5902 implicit none 5903 double complex TWOPII 5904 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5905 double complex arg 5906 if(arg.eq.(0.0d0,0.0d0))then 5907 reglogm=(0.0d0,0.0d0) 5908 else 5909 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 5910 reglogm=log(arg) - TWOPII 5911 else 5912 reglogm=log(arg) 5913 endif 5914 endif 5915 end 5916 5917 double complex function arg(comnum) 5918 implicit none 5919 double complex comnum 5920 double complex iim 5921 iim = (0.0d0,1.0d0) 5922 if(comnum.eq.(0.0d0,0.0d0)) then 5923 arg=(0.0d0,0.0d0) 5924 else 5925 arg=log(comnum/abs(comnum))/iim 5926 endif 5927 end""") 5928 if self.opt['mp']: 5929 fsock.writelines(""" 5930 5931 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 5932 implicit none 5933 %(complex_mp_format)s condition,truecase,falsecase 5934 if(condition.eq.(0.0e0_16,0.0e0_16)) then 5935 mp_cond=truecase 5936 else 5937 mp_cond=falsecase 5938 endif 5939 end 5940 5941 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 5942 implicit none 5943 logical condition 5944 %(complex_mp_format)s truecase,falsecase 5945 if(condition) then 5946 mp_condif=truecase 5947 else 5948 mp_condif=falsecase 5949 endif 5950 end 5951 5952 %(complex_mp_format)s function mp_recms(condition,expr) 5953 implicit none 5954 logical condition 5955 %(complex_mp_format)s expr 5956 if(condition)then 5957 mp_recms=expr 5958 else 5959 mp_recms=cmplx(real(expr),kind=16) 5960 endif 5961 end 5962 5963 %(complex_mp_format)s function mp_reglog(arg) 5964 implicit none 5965 %(complex_mp_format)s TWOPII 5966 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5967 %(complex_mp_format)s arg 5968 if(arg.eq.(0.0e0_16,0.0e0_16)) then 5969 mp_reglog=(0.0e0_16,0.0e0_16) 5970 else 5971 mp_reglog=log(arg) 5972 endif 5973 end 5974 5975 %(complex_mp_format)s function mp_reglogp(arg) 5976 implicit none 5977 %(complex_mp_format)s TWOPII 5978 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5979 %(complex_mp_format)s arg 5980 if(arg.eq.(0.0e0_16,0.0e0_16))then 5981 mp_reglogp=(0.0e0_16,0.0e0_16) 5982 else 5983 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 5984 mp_reglogp=log(arg) + TWOPII 5985 else 5986 mp_reglogp=log(arg) 5987 endif 5988 endif 5989 end 5990 5991 %(complex_mp_format)s function mp_reglogm(arg) 5992 implicit none 5993 %(complex_mp_format)s TWOPII 5994 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5995 %(complex_mp_format)s arg 5996 if(arg.eq.(0.0e0_16,0.0e0_16))then 5997 mp_reglogm=(0.0e0_16,0.0e0_16) 5998 else 5999 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6000 mp_reglogm=log(arg) - TWOPII 6001 else 6002 mp_reglogm=log(arg) 6003 endif 6004 endif 6005 end 6006 6007 %(complex_mp_format)s function mp_arg(comnum) 6008 implicit none 6009 %(complex_mp_format)s comnum 6010 %(complex_mp_format)s imm 6011 imm = (0.0e0_16,1.0e0_16) 6012 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6013 mp_arg=(0.0e0_16,0.0e0_16) 6014 else 6015 mp_arg=log(comnum/abs(comnum))/imm 6016 endif 6017 end"""%{'complex_mp_format':self.mp_complex_format}) 6018 6019 6020 #check for the file functions.f 6021 model_path = self.model.get('modelpath') 6022 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6023 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6024 input = pjoin(model_path,'Fortran','functions.f') 6025 file.writelines(fsock, open(input).read()) 6026 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6027 6028 # check for functions define in the UFO model 6029 ufo_fct = self.model.get('functions') 6030 if ufo_fct: 6031 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6032 for fct in ufo_fct: 6033 # already handle by default 6034 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6035 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6036 ufo_fct_template = """ 6037 double complex function %(name)s(%(args)s) 6038 implicit none 6039 double complex %(args)s 6040 %(name)s = %(fct)s 6041 6042 return 6043 end 6044 """ 6045 text = ufo_fct_template % { 6046 'name': fct.name, 6047 'args': ", ".join(fct.arguments), 6048 'fct': self.p_to_f.parse(fct.expr) 6049 } 6050 fsock.writelines(text) 6051 if self.opt['mp']: 6052 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6053 for fct in ufo_fct: 6054 # already handle by default 6055 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6056 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6057 ufo_fct_template = """ 6058 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6059 implicit none 6060 %(complex_mp_format)s mp__%(args)s 6061 mp__%(name)s = %(fct)s 6062 6063 return 6064 end 6065 """ 6066 text = ufo_fct_template % { 6067 'name': fct.name, 6068 'args': ", mp__".join(fct.arguments), 6069 'fct': self.mp_p_to_f.parse(fct.expr), 6070 'complex_mp_format': self.mp_complex_format 6071 } 6072 fsock.writelines(text) 6073 6074 6075 6076 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6077 6078 6079
6080 - def create_makeinc(self):
6081 """create makeinc.inc containing the file to compile """ 6082 6083 fsock = self.open('makeinc.inc', comment='#') 6084 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6085 text += ' model_functions.o ' 6086 6087 nb_coup_indep = 1 + len(self.coups_dep) // 25 6088 nb_coup_dep = 1 + len(self.coups_indep) // 25 6089 couplings_files=['couplings%s.o' % (i+1) \ 6090 for i in range(nb_coup_dep + nb_coup_indep) ] 6091 if self.opt['mp']: 6092 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6093 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6094 text += ' '.join(couplings_files) 6095 fsock.writelines(text)
6096
6097 - def create_param_write(self):
6098 """ create param_write """ 6099 6100 fsock = self.open('param_write.inc', format='fortran') 6101 6102 fsock.writelines("""write(*,*) ' External Params' 6103 write(*,*) ' ---------------------------------' 6104 write(*,*) ' '""") 6105 def format(name): 6106 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6107 6108 # Write the external parameter 6109 lines = [format(param.name) for param in self.params_ext] 6110 fsock.writelines('\n'.join(lines)) 6111 6112 fsock.writelines("""write(*,*) ' Internal Params' 6113 write(*,*) ' ---------------------------------' 6114 write(*,*) ' '""") 6115 lines = [format(data.name) for data in self.params_indep 6116 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6117 fsock.writelines('\n'.join(lines)) 6118 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6119 write(*,*) ' ----------------------------------------' 6120 write(*,*) ' '""") 6121 lines = [format(data.name) for data in self.params_dep \ 6122 if self.check_needed_param(data.name)] 6123 6124 fsock.writelines('\n'.join(lines)) 6125 6126 6127
6128 - def create_ident_card(self):
6129 """ create the ident_card.dat """ 6130 6131 def format(parameter): 6132 """return the line for the ident_card corresponding to this parameter""" 6133 colum = [parameter.lhablock.lower()] + \ 6134 [str(value) for value in parameter.lhacode] + \ 6135 [parameter.name] 6136 if not parameter.name: 6137 return '' 6138 return ' '.join(colum)+'\n'
6139 6140 fsock = self.open('ident_card.dat') 6141 6142 external_param = [format(param) for param in self.params_ext] 6143 fsock.writelines('\n'.join(external_param)) 6144
6145 - def create_actualize_mp_ext_param_inc(self):
6146 """ create the actualize_mp_ext_params.inc code """ 6147 6148 # In principle one should actualize all external, but for now, it is 6149 # hardcoded that only AS and MU_R can by dynamically changed by the user 6150 # so that we only update those ones. 6151 # Of course, to be on the safe side, one could decide to update all 6152 # external parameters. 6153 update_params_list=[p for p in self.params_ext if p.name in 6154 self.PS_dependent_key] 6155 6156 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6157 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6158 for param in update_params_list] 6159 # When read_lha is false, it is G which is taken in input and not AS, so 6160 # this is what should be reset here too. 6161 if 'aS' in [param.name for param in update_params_list]: 6162 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6163 6164 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6165 fsock.writelines('\n'.join(res_strings))
6166
6167 - def create_param_read(self):
6168 """create param_read""" 6169 6170 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6171 or self.opt['loop_induced']: 6172 fsock = self.open('param_read.inc', format='fortran') 6173 fsock.writelines(' include \'../param_card.inc\'') 6174 return 6175 6176 def format_line(parameter): 6177 """return the line for the ident_card corresponding to this 6178 parameter""" 6179 template = \ 6180 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6181 % {'name': parameter.name, 6182 'value': self.p_to_f.parse(str(parameter.value.real))} 6183 if self.opt['mp']: 6184 template = template+ \ 6185 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6186 "%(mp_prefix)s%(name)s,%(value)s)") \ 6187 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6188 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6189 return template 6190 6191 fsock = self.open('param_read.inc', format='fortran') 6192 res_strings = [format_line(param) \ 6193 for param in self.params_ext] 6194 6195 # Correct width sign for Majorana particles (where the width 6196 # and mass need to have the same sign) 6197 for particle in self.model.get('particles'): 6198 if particle.is_fermion() and particle.get('self_antipart') and \ 6199 particle.get('width').lower() != 'zero': 6200 6201 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6202 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6203 if self.opt['mp']: 6204 res_strings.append(\ 6205 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6206 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6207 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6208 6209 fsock.writelines('\n'.join(res_strings)) 6210 6211 6212 @staticmethod
6213 - def create_param_card_static(model, output_path, rule_card_path=False, 6214 mssm_convert=True):
6215 """ create the param_card.dat for a givent model --static method-- """ 6216 #1. Check if a default param_card is present: 6217 done = False 6218 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6219 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6220 model_path = model.get('modelpath') 6221 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6222 done = True 6223 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6224 output_path) 6225 if not done: 6226 param_writer.ParamCardWriter(model, output_path) 6227 6228 if rule_card_path: 6229 if hasattr(model, 'rule_card'): 6230 model.rule_card.write_file(rule_card_path) 6231 6232 if mssm_convert: 6233 model_name = model.get('name') 6234 # IF MSSM convert the card to SLAH1 6235 if model_name == 'mssm' or model_name.startswith('mssm-'): 6236 import models.check_param_card as translator 6237 # Check the format of the param_card for Pythia and make it correct 6238 if rule_card_path: 6239 translator.make_valid_param_card(output_path, rule_card_path) 6240 translator.convert_to_slha1(output_path)
6241
6242 - def create_param_card(self):
6243 """ create the param_card.dat """ 6244 6245 self.create_param_card_static(self.model, 6246 output_path=pjoin(self.dir_path, 'param_card.dat'), 6247 rule_card_path=pjoin(self.dir_path, 'param_card_rule.dat'), 6248 mssm_convert=True)
6249
6250 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6251 """ Determine which Export_v4 class is required. cmd is the command 6252 interface containing all potential usefull information. 6253 The output_type argument specifies from which context the output 6254 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6255 and 'default' for tree-level outputs.""" 6256 6257 opt = cmd.options 6258 6259 # First treat the MadLoop5 standalone case 6260 MadLoop_SA_options = {'clean': not noclean, 6261 'complex_mass':cmd.options['complex_mass_scheme'], 6262 'export_format':'madloop', 6263 'mp':True, 6264 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6265 'cuttools_dir': cmd._cuttools_dir, 6266 'iregi_dir':cmd._iregi_dir, 6267 'pjfry_dir':cmd.options["pjfry"], 6268 'golem_dir':cmd.options["golem"], 6269 'fortran_compiler':cmd.options['fortran_compiler'], 6270 'f2py_compiler':cmd.options['f2py_compiler'], 6271 'output_dependencies':cmd.options['output_dependencies'], 6272 'SubProc_prefix':'P', 6273 'compute_color_flows':cmd.options['loop_color_flows'], 6274 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '' 6275 } 6276 6277 if output_type.startswith('madloop'): 6278 import madgraph.loop.loop_exporters as loop_exporters 6279 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6280 ExporterClass=None 6281 if not cmd.options['loop_optimized_output']: 6282 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6283 else: 6284 if output_type == "madloop": 6285 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6286 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6287 elif output_type == "madloop_matchbox": 6288 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6289 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6290 else: 6291 raise Exception, "output_type not recognize %s" % output_type 6292 return ExporterClass(cmd._mgme_dir, cmd._export_dir, MadLoop_SA_options) 6293 else: 6294 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6295 ' in %s'%str(cmd._mgme_dir)) 6296 6297 # Then treat the aMC@NLO output 6298 elif output_type=='amcatnlo': 6299 import madgraph.iolibs.export_fks as export_fks 6300 ExporterClass=None 6301 amcatnlo_options = dict(opt) 6302 amcatnlo_options.update(MadLoop_SA_options) 6303 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6304 if not cmd.options['loop_optimized_output']: 6305 logger.info("Writing out the aMC@NLO code") 6306 ExporterClass = export_fks.ProcessExporterFortranFKS 6307 amcatnlo_options['export_format']='FKS5_default' 6308 else: 6309 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6310 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6311 amcatnlo_options['export_format']='FKS5_optimized' 6312 return ExporterClass(cmd._mgme_dir, cmd._export_dir, amcatnlo_options) 6313 6314 # Then the default tree-level output 6315 elif output_type=='default': 6316 assert group_subprocesses in [True, False] 6317 6318 opt = dict(opt) 6319 opt.update({'clean': not noclean, 6320 'complex_mass': cmd.options['complex_mass_scheme'], 6321 'export_format':cmd._export_format, 6322 'mp': False, 6323 'sa_symmetry':False, 6324 'model': cmd._curr_model.get('name'), 6325 'v5_model': False if cmd._model_v4_path else True }) 6326 6327 format = cmd._export_format #shortcut 6328 6329 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6330 opt['sa_symmetry'] = True 6331 6332 loop_induced_opt = dict(opt) 6333 loop_induced_opt.update(MadLoop_SA_options) 6334 loop_induced_opt['export_format'] = 'madloop_optimized' 6335 loop_induced_opt['SubProc_prefix'] = 'PV' 6336 # For loop_induced output with MadEvent, we must have access to the 6337 # color flows. 6338 loop_induced_opt['compute_color_flows'] = True 6339 for key in opt: 6340 if key not in loop_induced_opt: 6341 loop_induced_opt[key] = opt[key] 6342 6343 if format == 'matrix' or format.startswith('standalone'): 6344 return ProcessExporterFortranSA(cmd._mgme_dir, cmd._export_dir, opt, 6345 format=format) 6346 6347 elif format in ['madevent'] and group_subprocesses: 6348 if isinstance(cmd._curr_amps[0], 6349 loop_diagram_generation.LoopAmplitude): 6350 import madgraph.loop.loop_exporters as loop_exporters 6351 return loop_exporters.LoopInducedExporterMEGroup(cmd._mgme_dir, 6352 cmd._export_dir,loop_induced_opt) 6353 else: 6354 return ProcessExporterFortranMEGroup(cmd._mgme_dir, 6355 cmd._export_dir,opt) 6356 elif format in ['madevent']: 6357 if isinstance(cmd._curr_amps[0], 6358 loop_diagram_generation.LoopAmplitude): 6359 import madgraph.loop.loop_exporters as loop_exporters 6360 return loop_exporters.LoopInducedExporterMENoGroup(cmd._mgme_dir, 6361 cmd._export_dir,loop_induced_opt) 6362 else: 6363 return ProcessExporterFortranME(cmd._mgme_dir, 6364 cmd._export_dir,opt) 6365 elif format in ['matchbox']: 6366 return ProcessExporterFortranMatchBox(cmd._mgme_dir, cmd._export_dir,opt) 6367 elif cmd._export_format in ['madweight'] and group_subprocesses: 6368 6369 return ProcessExporterFortranMWGroup(cmd._mgme_dir, cmd._export_dir, 6370 opt) 6371 elif cmd._export_format in ['madweight']: 6372 return ProcessExporterFortranMW(cmd._mgme_dir, cmd._export_dir, opt) 6373 else: 6374 raise Exception, 'Wrong export_v4 format' 6375 else: 6376 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6377
6378 6379 6380 6381 #=============================================================================== 6382 # ProcessExporterFortranMWGroup 6383 #=============================================================================== 6384 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6385 """Class to take care of exporting a set of matrix elements to 6386 MadEvent subprocess group format.""" 6387 6388 matrix_file = "matrix_madweight_group_v4.inc" 6389 6390 #=========================================================================== 6391 # generate_subprocess_directory_v4 6392 #===========================================================================
6393 - def generate_subprocess_directory_v4(self, subproc_group, 6394 fortran_model, 6395 group_number):
6396 """Generate the Pn directory for a subprocess group in MadEvent, 6397 including the necessary matrix_N.f files, configs.inc and various 6398 other helper files""" 6399 6400 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6401 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6402 "subproc_group object not SubProcessGroup" 6403 6404 if not self.model: 6405 self.model = subproc_group.get('matrix_elements')[0].\ 6406 get('processes')[0].get('model') 6407 6408 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6409 6410 # Create the directory PN in the specified path 6411 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6412 subproc_group.get('name')) 6413 try: 6414 os.mkdir(pjoin(pathdir, subprocdir)) 6415 except os.error as error: 6416 logger.warning(error.strerror + " " + subprocdir) 6417 6418 6419 logger.info('Creating files in directory %s' % subprocdir) 6420 Ppath = pjoin(pathdir, subprocdir) 6421 6422 # Create the matrix.f files, auto_dsig.f files and all inc files 6423 # for all subprocesses in the group 6424 6425 maxamps = 0 6426 maxflows = 0 6427 tot_calls = 0 6428 6429 matrix_elements = subproc_group.get('matrix_elements') 6430 6431 for ime, matrix_element in \ 6432 enumerate(matrix_elements): 6433 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6434 calls, ncolor = \ 6435 self.write_matrix_element_v4(writers.FortranWriter(filename), 6436 matrix_element, 6437 fortran_model, 6438 str(ime+1), 6439 subproc_group.get('diagram_maps')[\ 6440 ime]) 6441 6442 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6443 self.write_auto_dsig_file(writers.FortranWriter(filename), 6444 matrix_element, 6445 str(ime+1)) 6446 6447 # Keep track of needed quantities 6448 tot_calls += int(calls) 6449 maxflows = max(maxflows, ncolor) 6450 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6451 6452 # Draw diagrams 6453 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6454 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6455 get('diagrams'), 6456 filename, 6457 model = \ 6458 matrix_element.get('processes')[0].\ 6459 get('model'), 6460 amplitude=True) 6461 logger.info("Generating Feynman diagrams for " + \ 6462 matrix_element.get('processes')[0].nice_string()) 6463 plot.draw() 6464 6465 # Extract number of external particles 6466 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6467 6468 # Generate a list of diagrams corresponding to each configuration 6469 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6470 # If a subprocess has no diagrams for this config, the number is 0 6471 6472 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6473 6474 filename = pjoin(Ppath, 'auto_dsig.f') 6475 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6476 subproc_group) 6477 6478 filename = pjoin(Ppath,'configs.inc') 6479 nconfigs, s_and_t_channels = self.write_configs_file(\ 6480 writers.FortranWriter(filename), 6481 subproc_group, 6482 subproc_diagrams_for_config) 6483 6484 filename = pjoin(Ppath, 'leshouche.inc') 6485 self.write_leshouche_file(writers.FortranWriter(filename), 6486 subproc_group) 6487 6488 filename = pjoin(Ppath, 'phasespace.inc') 6489 self.write_phasespace_file(writers.FortranWriter(filename), 6490 nconfigs) 6491 6492 6493 filename = pjoin(Ppath, 'maxamps.inc') 6494 self.write_maxamps_file(writers.FortranWriter(filename), 6495 maxamps, 6496 maxflows, 6497 max([len(me.get('processes')) for me in \ 6498 matrix_elements]), 6499 len(matrix_elements)) 6500 6501 filename = pjoin(Ppath, 'mirrorprocs.inc') 6502 self.write_mirrorprocs(writers.FortranWriter(filename), 6503 subproc_group) 6504 6505 filename = pjoin(Ppath, 'nexternal.inc') 6506 self.write_nexternal_file(writers.FortranWriter(filename), 6507 nexternal, ninitial) 6508 6509 filename = pjoin(Ppath, 'pmass.inc') 6510 self.write_pmass_file(writers.FortranWriter(filename), 6511 matrix_element) 6512 6513 filename = pjoin(Ppath, 'props.inc') 6514 self.write_props_file(writers.FortranWriter(filename), 6515 matrix_element, 6516 s_and_t_channels) 6517 6518 # filename = pjoin(Ppath, 'processes.dat') 6519 # files.write_to_file(filename, 6520 # self.write_processes_file, 6521 # subproc_group) 6522 6523 # Generate jpgs -> pass in make_html 6524 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6525 6526 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6527 6528 for file in linkfiles: 6529 ln('../%s' % file, cwd=Ppath) 6530 6531 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6532 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6533 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6534 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6535 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6536 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6537 if not tot_calls: 6538 tot_calls = 0 6539 return tot_calls
6540 6541 #=========================================================================== 6542 # write_super_auto_dsig_file 6543 #===========================================================================
6544 - def write_super_auto_dsig_file(self, writer, subproc_group):
6545 """Write the auto_dsig.f file selecting between the subprocesses 6546 in subprocess group mode""" 6547 6548 replace_dict = {} 6549 6550 # Extract version number and date from VERSION file 6551 info_lines = self.get_mg5_info_lines() 6552 replace_dict['info_lines'] = info_lines 6553 6554 matrix_elements = subproc_group.get('matrix_elements') 6555 6556 # Extract process info lines 6557 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6558 matrix_elements]) 6559 replace_dict['process_lines'] = process_lines 6560 6561 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6562 replace_dict['nexternal'] = nexternal 6563 6564 replace_dict['nsprocs'] = 2*len(matrix_elements) 6565 6566 # Generate dsig definition line 6567 dsig_def_line = "DOUBLE PRECISION " + \ 6568 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6569 range(len(matrix_elements))]) 6570 replace_dict["dsig_def_line"] = dsig_def_line 6571 6572 # Generate dsig process lines 6573 call_dsig_proc_lines = [] 6574 for iproc in range(len(matrix_elements)): 6575 call_dsig_proc_lines.append(\ 6576 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6577 {"num": iproc + 1, 6578 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6579 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6580 6581 file = open(os.path.join(_file_path, \ 6582 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6583 file = file % replace_dict 6584 6585 # Write the file 6586 writer.writelines(file)
6587 6588 #=========================================================================== 6589 # write_mirrorprocs 6590 #===========================================================================
6591 - def write_mirrorprocs(self, writer, subproc_group):
6592 """Write the mirrorprocs.inc file determining which processes have 6593 IS mirror process in subprocess group mode.""" 6594 6595 lines = [] 6596 bool_dict = {True: '.true.', False: '.false.'} 6597 matrix_elements = subproc_group.get('matrix_elements') 6598 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6599 (len(matrix_elements), 6600 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6601 me in matrix_elements]))) 6602 # Write the file 6603 writer.writelines(lines)
6604 6605 #=========================================================================== 6606 # write_configs_file 6607 #===========================================================================
6608 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6609 """Write the configs.inc file with topology information for a 6610 subprocess group. Use the first subprocess with a diagram for each 6611 configuration.""" 6612 6613 matrix_elements = subproc_group.get('matrix_elements') 6614 model = matrix_elements[0].get('processes')[0].get('model') 6615 6616 diagrams = [] 6617 config_numbers = [] 6618 for iconfig, config in enumerate(diagrams_for_config): 6619 # Check if any diagrams correspond to this config 6620 if set(config) == set([0]): 6621 continue 6622 subproc_diags = [] 6623 for s,d in enumerate(config): 6624 if d: 6625 subproc_diags.append(matrix_elements[s].\ 6626 get('diagrams')[d-1]) 6627 else: 6628 subproc_diags.append(None) 6629 diagrams.append(subproc_diags) 6630 config_numbers.append(iconfig + 1) 6631 6632 # Extract number of external particles 6633 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6634 6635 return len(diagrams), \ 6636 self.write_configs_file_from_diagrams(writer, diagrams, 6637 config_numbers, 6638 nexternal, ninitial, 6639 matrix_elements[0],model)
6640 6641 #=========================================================================== 6642 # write_run_configs_file 6643 #===========================================================================
6644 - def write_run_config_file(self, writer):
6645 """Write the run_configs.inc file for MadEvent""" 6646 6647 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6648 text = open(path).read() % {'chanperjob':'2'} 6649 writer.write(text) 6650 return True
6651 6652 6653 #=========================================================================== 6654 # write_leshouche_file 6655 #===========================================================================
6656 - def write_leshouche_file(self, writer, subproc_group):
6657 """Write the leshouche.inc file for MG4""" 6658 6659 all_lines = [] 6660 6661 for iproc, matrix_element in \ 6662 enumerate(subproc_group.get('matrix_elements')): 6663 all_lines.extend(self.get_leshouche_lines(matrix_element, 6664 iproc)) 6665 6666 # Write the file 6667 writer.writelines(all_lines) 6668 6669 return True
6670