Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from distutils import dir_util 
  18  import glob 
  19  import logging 
  20  import os 
  21  import re 
  22  import shutil 
  23  import subprocess 
  24  import string 
  25  import copy 
  26  import platform 
  27   
  28  import madgraph.core.color_algebra as color 
  29  import madgraph.core.helas_objects as helas_objects 
  30  import madgraph.core.base_objects as base_objects 
  31  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  32  import madgraph.fks.fks_base as fks 
  33  import madgraph.fks.fks_common as fks_common 
  34  import madgraph.iolibs.drawing_eps as draw 
  35  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  36  import madgraph.iolibs.files as files 
  37  import madgraph.various.misc as misc 
  38  import madgraph.iolibs.file_writers as writers 
  39  import madgraph.iolibs.template_files as template_files 
  40  import madgraph.iolibs.ufo_expression_parsers as parsers 
  41  import madgraph.iolibs.export_v4 as export_v4 
  42  import madgraph.loop.loop_exporters as loop_exporters 
  43  import madgraph.various.q_polynomial as q_polynomial 
  44  import madgraph.various.banner as banner_mod 
  45   
  46  import aloha.create_aloha as create_aloha 
  47   
  48  import models.write_param_card as write_param_card 
  49  import models.check_param_card as check_param_card 
  50  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  51  from madgraph.iolibs.files import cp, ln, mv 
  52   
  53  pjoin = os.path.join 
  54   
  55  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  56  logger = logging.getLogger('madgraph.export_fks') 
  57   
  58   
59 -def make_jpeg_async(args):
60 Pdir = args[0] 61 old_pos = args[1] 62 dir_path = args[2] 63 64 devnull = os.open(os.devnull, os.O_RDWR) 65 66 os.chdir(Pdir) 67 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 68 stdout = devnull) 69 os.chdir(os.path.pardir)
70 71 72 #================================================================================= 73 # Class for used of the (non-optimized) Loop process 74 #=================================================================================
75 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
76 """Class to take care of exporting a set of matrix elements to 77 Fortran (v4) format.""" 78 79 #=============================================================================== 80 # copy the Template in a new directory. 81 #===============================================================================
82 - def copy_fkstemplate(self):
83 """create the directory run_name as a copy of the MadEvent 84 Template, and clean the directory 85 For now it is just the same as copy_v4template, but it will be modified 86 """ 87 88 mgme_dir = self.mgme_dir 89 dir_path = self.dir_path 90 clean =self.opt['clean'] 91 92 #First copy the full template tree if dir_path doesn't exit 93 if not os.path.isdir(dir_path): 94 if not mgme_dir: 95 raise MadGraph5Error, \ 96 "No valid MG_ME path given for MG4 run directory creation." 97 logger.info('initialize a new directory: %s' % \ 98 os.path.basename(dir_path)) 99 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 100 # distutils.dir_util.copy_tree since dir_path already exists 101 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) 102 # Copy plot_card 103 for card in ['plot_card']: 104 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 105 try: 106 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 107 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 108 except IOError: 109 logger.warning("Failed to move " + card + ".dat to default") 110 111 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 112 if not mgme_dir: 113 raise MadGraph5Error, \ 114 "No valid MG_ME path given for MG4 run directory creation." 115 try: 116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 117 except IOError: 118 MG5_version = misc.get_pkg_info() 119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 120 "5." + MG5_version['version']) 121 122 #Ensure that the Template is clean 123 if clean: 124 logger.info('remove old information in %s' % os.path.basename(dir_path)) 125 if os.environ.has_key('MADGRAPH_BASE'): 126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 127 '--web'],cwd=dir_path) 128 else: 129 try: 130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 131 cwd=dir_path) 132 except Exception, why: 133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 134 % (os.path.basename(dir_path),why)) 135 #Write version info 136 MG_version = misc.get_pkg_info() 137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 138 MG_version['version']) 139 140 # We must link the CutTools to the Library folder of the active Template 141 self.link_CutTools(dir_path) 142 143 link_tir_libs=[] 144 tir_libs=[] 145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 146 dirpath = os.path.join(self.dir_path, 'SubProcesses') 147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 149 link_tir_libs,tir_libs) 150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 151 filename = pjoin(self.dir_path, 'Source','make_opts') 152 calls = self.write_make_opts(writers.MakefileWriter(filename), 153 link_tir_libs,tir_libs) 154 155 # Duplicate run_card and FO_analyse_card 156 for card in ['FO_analyse_card', 'shower_card']: 157 try: 158 shutil.copy(pjoin(self.dir_path, 'Cards', 159 card + '.dat'), 160 pjoin(self.dir_path, 'Cards', 161 card + '_default.dat')) 162 except IOError: 163 logger.warning("Failed to copy " + card + ".dat to default") 164 165 cwd = os.getcwd() 166 dirpath = os.path.join(self.dir_path, 'SubProcesses') 167 try: 168 os.chdir(dirpath) 169 except os.error: 170 logger.error('Could not cd to directory %s' % dirpath) 171 return 0 172 173 # We add here the user-friendly MadLoop option setter. 174 cpfiles= ["SubProcesses/MadLoopParamReader.f", 175 "Cards/MadLoopParams.dat", 176 "SubProcesses/MadLoopParams.inc"] 177 178 for file in cpfiles: 179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 180 os.path.join(self.dir_path, file)) 181 182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 184 185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 187 'Cards', 'MadLoopParams.dat')) 188 # write the output file 189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 190 "MadLoopParams.dat")) 191 192 # We need minimal editing of MadLoopCommons.f 193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 194 "SubProcesses","MadLoopCommons.inc")).read() 195 writer = writers.FortranWriter(os.path.join(self.dir_path, 196 "SubProcesses","MadLoopCommons.f")) 197 writer.writelines(MadLoopCommon%{ 198 'print_banner_commands':self.MadLoop_banner}, 199 context={'collier_available':False}) 200 writer.close() 201 202 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 204 writers.FortranWriter('cts_mpc.h')) 205 206 207 # Finally make sure to turn off MC over Hel for the default mode. 208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 209 FKS_card_file = open(FKS_card_path,'r') 210 FKS_card = FKS_card_file.read() 211 FKS_card_file.close() 212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 213 "#NHelForMCoverHels\n-1", FKS_card) 214 FKS_card_file = open(FKS_card_path,'w') 215 FKS_card_file.write(FKS_card) 216 FKS_card_file.close() 217 218 # Return to original PWD 219 os.chdir(cwd) 220 # Copy the different python files in the Template 221 self.copy_python_files() 222 223 # We need to create the correct open_data for the pdf 224 self.write_pdf_opendata()
225 226 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 227 # Also, we overload this function (i.e. it is already defined in 228 # LoopProcessExporterFortranSA) because the path of the template makefile 229 # is different.
230 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
231 """ Create the file makefile_loop which links to the TIR libraries.""" 232 233 file = open(os.path.join(self.mgme_dir,'Template','NLO', 234 'SubProcesses','makefile_loop.inc')).read() 235 replace_dict={} 236 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 237 replace_dict['tir_libs']=' '.join(tir_libs) 238 replace_dict['dotf']='%.f' 239 replace_dict['doto']='%.o' 240 replace_dict['tir_include']=' '.join(tir_include) 241 file=file%replace_dict 242 if writer: 243 writer.writelines(file) 244 else: 245 return file
246 247 # I put it here not in optimized one, because I want to use the same make_opts.inc
248 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
249 """ Create the file make_opts which links to the TIR libraries.""" 250 file = open(os.path.join(self.mgme_dir,'Template','NLO', 251 'Source','make_opts.inc')).read() 252 replace_dict={} 253 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 254 replace_dict['tir_libs']=' '.join(tir_libs) 255 replace_dict['dotf']='%.f' 256 replace_dict['doto']='%.o' 257 file=file%replace_dict 258 if writer: 259 writer.writelines(file) 260 else: 261 return file
262 263 #=========================================================================== 264 # copy_python_files 265 #===========================================================================
266 - def copy_python_files(self):
267 """copy python files required for the Template""" 268 269 files_to_copy = [ \ 270 pjoin('interface','amcatnlo_run_interface.py'), 271 pjoin('interface','extended_cmd.py'), 272 pjoin('interface','common_run_interface.py'), 273 pjoin('interface','coloring_logging.py'), 274 pjoin('various','misc.py'), 275 pjoin('various','shower_card.py'), 276 pjoin('various','FO_analyse_card.py'), 277 pjoin('various','histograms.py'), 278 pjoin('various','banner.py'), 279 pjoin('various','cluster.py'), 280 pjoin('various','systematics.py'), 281 pjoin('various','lhe_parser.py'), 282 pjoin('madevent','sum_html.py'), 283 pjoin('madevent','gen_crossxhtml.py'), 284 pjoin('iolibs','files.py'), 285 pjoin('iolibs','save_load_object.py'), 286 pjoin('iolibs','file_writers.py'), 287 pjoin('..','models','check_param_card.py'), 288 pjoin('__init__.py') 289 ] 290 cp(_file_path+'/interface/.mg5_logging.conf', 291 self.dir_path+'/bin/internal/me5_logging.conf') 292 293 for cp_file in files_to_copy: 294 cp(pjoin(_file_path,cp_file), 295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [], 298 wanted_couplings = []):
299 300 super(ProcessExporterFortranFKS,self).convert_model(model, 301 wanted_lorentz, wanted_couplings) 302 303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 304 try: 305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 306 except OSError as error: 307 pass 308 model_path = model.get('modelpath') 309 shutil.copytree(model_path, 310 pjoin(self.dir_path,'bin','internal','ufomodel'), 311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 312 if hasattr(model, 'restrict_card'): 313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 314 'restrict_default.dat') 315 if isinstance(model.restrict_card, check_param_card.ParamCard): 316 model.restrict_card.write(out_path) 317 else: 318 files.cp(model.restrict_card, out_path)
319 320 321 322 #=========================================================================== 323 # write_maxparticles_file 324 #===========================================================================
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent""" 327 328 lines = "integer max_particles, max_branch\n" 329 lines += "parameter (max_particles=%d) \n" % maxparticles 330 lines += "parameter (max_branch=max_particles-1)" 331 332 # Write the file 333 writer.writelines(lines) 334 335 return True
336 337 338 #=========================================================================== 339 # write_maxconfigs_file 340 #===========================================================================
341 - def write_maxconfigs_file(self, writer, maxconfigs):
342 """Write the maxconfigs.inc file for MadEvent""" 343 344 lines = "integer lmaxconfigs\n" 345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 346 347 # Write the file 348 writer.writelines(lines) 349 350 return True
351 352 353 #=============================================================================== 354 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 355 #===============================================================================
356 - def write_procdef_mg5(self, file_pos, modelname, process_str):
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent 358 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 359 360 proc_card_template = template_files.mg4_proc_card.mg4_template 361 process_template = template_files.mg4_proc_card.process_template 362 process_text = '' 363 coupling = '' 364 new_process_content = [] 365 366 # First find the coupling and suppress the coupling from process_str 367 #But first ensure that coupling are define whithout spaces: 368 process_str = process_str.replace(' =', '=') 369 process_str = process_str.replace('= ', '=') 370 process_str = process_str.replace(',',' , ') 371 #now loop on the element and treat all the coupling 372 for info in process_str.split(): 373 if '=' in info: 374 coupling += info + '\n' 375 else: 376 new_process_content.append(info) 377 # Recombine the process_str (which is the input process_str without coupling 378 #info) 379 process_str = ' '.join(new_process_content) 380 381 #format the SubProcess 382 process_text += process_template.substitute({'process': process_str, \ 383 'coupling': coupling}) 384 385 text = proc_card_template.substitute({'process': process_text, 386 'model': modelname, 387 'multiparticle':''}) 388 ff = open(file_pos, 'w') 389 ff.write(text) 390 ff.close()
391 392 393 #=============================================================================== 394 # write a initial states map, useful for the fast PDF NLO interface 395 #===============================================================================
396 - def write_init_map(self, file_pos, initial_states):
397 """ Write an initial state process map. Each possible PDF 398 combination gets an unique identifier.""" 399 400 text='' 401 for i,e in enumerate(initial_states): 402 text=text+str(i+1)+' '+str(len(e)) 403 for t in e: 404 text=text+' ' 405 try: 406 for p in t: 407 text=text+' '+str(p) 408 except TypeError: 409 text=text+' '+str(t) 410 text=text+'\n' 411 412 ff = open(file_pos, 'w') 413 ff.write(text) 414 ff.close()
415
416 - def get_ME_identifier(self, matrix_element, *args, **opts):
417 """ A function returning a string uniquely identifying the matrix 418 element given in argument so that it can be used as a prefix to all 419 MadLoop5 subroutines and common blocks related to it. This allows 420 to compile several processes into one library as requested by the 421 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 422 necessitates that there is no process prefix.""" 423 424 return ''
425 426 #=============================================================================== 427 # write_coef_specs 428 #===============================================================================
429 - def write_coef_specs_file(self, virt_me_list):
430 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 431 non-optimized mode""" 432 raise fks_common.FKSProcessError(), \ 433 "write_coef_specs should be called only in the loop-optimized mode"
434 435 436 #=============================================================================== 437 # generate_directories_fks 438 #===============================================================================
439 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 440 me_ntot, path=os.getcwd(),OLP='MadLoop'):
441 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 442 including the necessary matrix.f and various helper files""" 443 proc = matrix_element.born_matrix_element['processes'][0] 444 445 if not self.model: 446 self.model = matrix_element.get('processes')[0].get('model') 447 448 cwd = os.getcwd() 449 try: 450 os.chdir(path) 451 except OSError, error: 452 error_msg = "The directory %s should exist in order to be able " % path + \ 453 "to \"export\" in it. If you see this error message by " + \ 454 "typing the command \"export\" please consider to use " + \ 455 "instead the command \"output\". " 456 raise MadGraph5Error, error_msg 457 458 calls = 0 459 460 self.fksdirs = [] 461 #first make and cd the direcrory corresponding to the born process: 462 borndir = "P%s" % \ 463 (matrix_element.get('processes')[0].shell_string()) 464 os.mkdir(borndir) 465 os.chdir(borndir) 466 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 467 468 ## write the files corresponding to the born process in the P* directory 469 self.generate_born_fks_files(matrix_element, 470 fortran_model, me_number, path) 471 472 # With NJET you want to generate the order file per subprocess and most 473 # likely also generate it for each subproc. 474 if OLP=='NJET': 475 filename = 'OLE_order.lh' 476 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP) 477 478 if matrix_element.virt_matrix_element: 479 calls += self.generate_virt_directory( \ 480 matrix_element.virt_matrix_element, \ 481 fortran_model, \ 482 os.path.join(path, borndir)) 483 484 #write the infortions for the different real emission processes 485 486 self.write_real_matrix_elements(matrix_element, fortran_model) 487 488 self.write_pdf_calls(matrix_element, fortran_model) 489 490 filename = 'nFKSconfigs.inc' 491 self.write_nfksconfigs_file(writers.FortranWriter(filename), 492 matrix_element, 493 fortran_model) 494 495 filename = 'iproc.dat' 496 self.write_iproc_file(writers.FortranWriter(filename), 497 me_number) 498 499 filename = 'fks_info.inc' 500 self.write_fks_info_file(writers.FortranWriter(filename), 501 matrix_element, 502 fortran_model) 503 504 filename = 'leshouche_info.dat' 505 nfksconfs,maxproc,maxflow,nexternal=\ 506 self.write_leshouche_info_file(filename,matrix_element) 507 508 # if no corrections are generated ([LOonly] mode), get 509 # these variables from the born 510 if nfksconfs == maxproc == maxflow == 0: 511 nfksconfs = 1 512 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 513 matrix_element.born_matrix_element, 1) 514 515 filename = 'leshouche_decl.inc' 516 self.write_leshouche_info_declarations( 517 writers.FortranWriter(filename), 518 nfksconfs,maxproc,maxflow,nexternal, 519 fortran_model) 520 521 filename = 'configs_and_props_info.dat' 522 nconfigs,max_leg_number,nfksconfs=self.write_configs_and_props_info_file( 523 filename, 524 matrix_element) 525 526 filename = 'configs_and_props_decl.inc' 527 self.write_configs_and_props_info_declarations( 528 writers.FortranWriter(filename), 529 nconfigs,max_leg_number,nfksconfs, 530 fortran_model) 531 532 filename = 'real_from_born_configs.inc' 533 self.write_real_from_born_configs( 534 writers.FortranWriter(filename), 535 matrix_element, 536 fortran_model) 537 538 filename = 'ngraphs.inc' 539 self.write_ngraphs_file(writers.FortranWriter(filename), 540 nconfigs) 541 542 #write the wrappers 543 filename = 'real_me_chooser.f' 544 self.write_real_me_wrapper(writers.FortranWriter(filename), 545 matrix_element, 546 fortran_model) 547 548 filename = 'parton_lum_chooser.f' 549 self.write_pdf_wrapper(writers.FortranWriter(filename), 550 matrix_element, 551 fortran_model) 552 553 filename = 'get_color.f' 554 self.write_colors_file(writers.FortranWriter(filename), 555 matrix_element) 556 557 filename = 'nexternal.inc' 558 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 559 self.write_nexternal_file(writers.FortranWriter(filename), 560 nexternal, ninitial) 561 self.proc_characteristic['ninitial'] = ninitial 562 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 563 564 filename = 'pmass.inc' 565 try: 566 self.write_pmass_file(writers.FortranWriter(filename), 567 matrix_element.real_processes[0].matrix_element) 568 except IndexError: 569 self.write_pmass_file(writers.FortranWriter(filename), 570 matrix_element.born_matrix_element) 571 572 #draw the diagrams 573 self.draw_feynman_diagrams(matrix_element) 574 575 linkfiles = ['BinothLHADummy.f', 576 'check_poles.f', 577 'MCmasses_HERWIG6.inc', 578 'MCmasses_HERWIGPP.inc', 579 'MCmasses_PYTHIA6Q.inc', 580 'MCmasses_PYTHIA6PT.inc', 581 'MCmasses_PYTHIA8.inc', 582 'add_write_info.f', 583 'coupl.inc', 584 'cuts.f', 585 'FKS_params.dat', 586 'initial_states_map.dat', 587 'OLE_order.olc', 588 'FKSParams.inc', 589 'FKSParamReader.f', 590 'cuts.inc', 591 'unlops.inc', 592 'pythia_unlops.f', 593 'driver_mintMC.f', 594 'driver_mintFO.f', 595 'appl_interface.cc', 596 'appl_interface_dummy.f', 597 'appl_common.inc', 598 'reweight_appl.inc', 599 'fastjetfortran_madfks_core.cc', 600 'fastjetfortran_madfks_full.cc', 601 'fjcore.cc', 602 'fastjet_wrapper.f', 603 'fjcore.hh', 604 'fks_Sij.f', 605 'fks_powers.inc', 606 'fks_singular.f', 607 'veto_xsec.f', 608 'veto_xsec.inc', 609 'c_weight.inc', 610 'fks_inc_chooser.f', 611 'leshouche_inc_chooser.f', 612 'configs_and_props_inc_chooser.f', 613 'genps.inc', 614 'genps_fks.f', 615 'boostwdir2.f', 616 'madfks_mcatnlo.inc', 617 'open_output_files.f', 618 'open_output_files_dummy.f', 619 'HwU_dummy.f', 620 'madfks_plot.f', 621 'analysis_dummy.f', 622 'analysis_lhe.f', 623 'mint-integrator2.f', 624 'MC_integer.f', 625 'mint.inc', 626 'montecarlocounter.f', 627 'q_es.inc', 628 'recluster.cc', 629 'Boosts.h', 630 'reweight.inc', 631 'reweight0.inc', 632 'reweight1.inc', 633 'reweightNLO.inc', 634 'reweight_all.inc', 635 'reweight_events.f', 636 'reweight_xsec.f', 637 'reweight_xsec_events.f', 638 'reweight_xsec_events_pdf_dummy.f', 639 'iproc_map.f', 640 'run.inc', 641 'run_card.inc', 642 'setcuts.f', 643 'setscales.f', 644 'symmetry_fks_test_MC.f', 645 'symmetry_fks_test_ME.f', 646 'symmetry_fks_test_Sij.f', 647 'symmetry_fks_v3.f', 648 'trapfpe.c', 649 'vegas2.for', 650 'write_ajob.f', 651 'handling_lhe_events.f', 652 'write_event.f', 653 'fill_MC_mshell.f', 654 'maxparticles.inc', 655 'message.inc', 656 'initcluster.f', 657 'cluster.inc', 658 'cluster.f', 659 'reweight.f', 660 'randinit', 661 'sudakov.inc', 662 'maxconfigs.inc', 663 'timing_variables.inc'] 664 665 for file in linkfiles: 666 ln('../' + file , '.') 667 os.system("ln -s ../../Cards/param_card.dat .") 668 669 #copy the makefile 670 os.system("ln -s ../makefile_fks_dir ./makefile") 671 if matrix_element.virt_matrix_element: 672 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 673 elif OLP!='MadLoop': 674 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 675 else: 676 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 677 678 679 #import nexternal/leshouches in Source 680 ln('nexternal.inc', '../../Source', log=False) 681 ln('born_leshouche.inc', '../../Source', log=False) 682 683 684 # Return to SubProcesses dir 685 os.chdir(os.path.pardir) 686 # Add subprocess to subproc.mg 687 filename = 'subproc.mg' 688 files.append_to_file(filename, 689 self.write_subproc, 690 borndir) 691 692 693 os.chdir(cwd) 694 # Generate info page 695 gen_infohtml.make_info_html_nlo(self.dir_path) 696 697 698 return calls
699 700 #=========================================================================== 701 # create the run_card 702 #===========================================================================
703 - def create_run_card(self, processes, history):
704 """ """ 705 706 run_card = banner_mod.RunCardNLO() 707 708 run_card.create_default_for_process(self.proc_characteristic, 709 history, 710 processes) 711 712 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 713 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
714 715
716 - def pass_information_from_cmd(self, cmd):
717 """pass information from the command interface to the exporter. 718 Please do not modify any object of the interface from the exporter. 719 """ 720 self.proc_defs = cmd._curr_proc_defs 721 if hasattr(cmd,'born_processes'): 722 self.born_processes = cmd.born_processes 723 else: 724 self.born_processes = [] 725 return
726
727 - def finalize(self, matrix_elements, history, mg5options, flaglist):
728 """Finalize FKS directory by creating jpeg diagrams, html 729 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if 730 necessary.""" 731 732 devnull = os.open(os.devnull, os.O_RDWR) 733 try: 734 res = misc.call([self.options['lhapdf'], '--version'], \ 735 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 736 except Exception: 737 res = 1 738 if res != 0: 739 logger.info('The value for lhapdf in the current configuration does not ' + \ 740 'correspond to a valid executable.\nPlease set it correctly either in ' + \ 741 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \ 742 'and regenrate the process. \nTo avoid regeneration, edit the ' + \ 743 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \ 744 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n') 745 746 compiler_dict = {'fortran': mg5options['fortran_compiler'], 747 'cpp': mg5options['cpp_compiler'], 748 'f2py': mg5options['f2py_compiler']} 749 750 if 'nojpeg' in flaglist: 751 makejpg = False 752 else: 753 makejpg = True 754 output_dependencies = mg5options['output_dependencies'] 755 756 757 self.proc_characteristic['grouped_matrix'] = False 758 self.create_proc_charac() 759 760 self.create_run_card(matrix_elements.get_processes(), history) 761 # modelname = self.model.get('name') 762 # if modelname == 'mssm' or modelname.startswith('mssm-'): 763 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 764 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 765 # check_param_card.convert_to_mg5card(param_card, mg5_param) 766 # check_param_card.check_valid_param_card(mg5_param) 767 768 # # write the model functions get_mass/width_from_id 769 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 770 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 771 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 772 773 # # Write maxconfigs.inc based on max of ME's/subprocess groups 774 775 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 776 self.write_maxconfigs_file(writers.FortranWriter(filename), 777 matrix_elements.get_max_configs()) 778 779 # # Write maxparticles.inc based on max of ME's/subprocess groups 780 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 781 self.write_maxparticles_file(writers.FortranWriter(filename), 782 matrix_elements.get_max_particles()) 783 784 # Touch "done" file 785 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 786 787 # Check for compiler 788 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 789 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 790 791 old_pos = os.getcwd() 792 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 793 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 794 proc[0] == 'P'] 795 796 devnull = os.open(os.devnull, os.O_RDWR) 797 # Convert the poscript in jpg files (if authorize) 798 if makejpg: 799 logger.info("Generate jpeg diagrams") 800 for Pdir in P_dir_list: 801 os.chdir(Pdir) 802 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 803 stdout = devnull) 804 os.chdir(os.path.pardir) 805 # 806 logger.info("Generate web pages") 807 # Create the WebPage using perl script 808 809 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 810 stdout = devnull) 811 812 os.chdir(os.path.pardir) 813 # 814 # obj = gen_infohtml.make_info_html(self.dir_path) 815 # [mv(name, './HTML/') for name in os.listdir('.') if \ 816 # (name.endswith('.html') or name.endswith('.jpg')) and \ 817 # name != 'index.html'] 818 # if online: 819 # nb_channel = obj.rep_rule['nb_gen_diag'] 820 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 821 822 # Write command history as proc_card_mg5 823 if os.path.isdir('Cards'): 824 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 825 history.write(output_file) 826 827 # Duplicate run_card and FO_analyse_card 828 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 829 try: 830 shutil.copy(pjoin(self.dir_path, 'Cards', 831 card + '.dat'), 832 pjoin(self.dir_path, 'Cards', 833 card + '_default.dat')) 834 except IOError: 835 logger.warning("Failed to copy " + card + ".dat to default") 836 837 838 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 839 stdout = devnull) 840 841 # Run "make" to generate madevent.tar.gz file 842 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 843 if os.path.exists('amcatnlo.tar.gz'): 844 os.remove('amcatnlo.tar.gz') 845 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 846 stdout = devnull) 847 # 848 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 849 stdout = devnull) 850 851 #return to the initial dir 852 os.chdir(old_pos) 853 854 # Setup stdHep 855 # Find the correct fortran compiler 856 base_compiler= ['FC=g77','FC=gfortran'] 857 858 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 859 860 if output_dependencies == 'external': 861 # check if stdhep has to be compiled (only the first time) 862 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 863 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')): 864 if 'FC' not in os.environ or not os.environ['FC']: 865 path = os.path.join(StdHep_path, 'src', 'make_opts') 866 text = open(path).read() 867 for base in base_compiler: 868 text = text.replace(base,'FC=%s' % fcompiler_chosen) 869 open(path, 'w').writelines(text) 870 871 logger.info('Compiling StdHEP. This has to be done only once.') 872 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 873 logger.info('Done.') 874 #then link the libraries in the exported dir 875 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 876 pjoin(self.dir_path, 'MCatNLO', 'lib')) 877 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 878 pjoin(self.dir_path, 'MCatNLO', 'lib')) 879 880 elif output_dependencies == 'internal': 881 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 882 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 883 # Create the links to the lib folder 884 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 885 for file in linkfiles: 886 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 887 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 888 if 'FC' not in os.environ or not os.environ['FC']: 889 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 890 text = open(path).read() 891 for base in base_compiler: 892 text = text.replace(base,'FC=%s' % fcompiler_chosen) 893 open(path, 'w').writelines(text) 894 # To avoid compiler version conflicts, we force a clean here 895 misc.compile(['clean'],cwd = StdHEP_internal_path) 896 897 elif output_dependencies == 'environment_paths': 898 # Here the user chose to define the dependencies path in one of 899 # his environmental paths 900 libStdHep = misc.which_lib('libstdhep.a') 901 libFmcfio = misc.which_lib('libFmcfio.a') 902 if not libStdHep is None and not libFmcfio is None: 903 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 904 os.path.dirname(libStdHep)) 905 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 906 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 907 else: 908 raise InvalidCmd("Could not find the location of the files"+\ 909 " libstdhep.a and libFmcfio.a in you environment paths.") 910 911 else: 912 raise MadGraph5Error, 'output_dependencies option %s not recognized'\ 913 %output_dependencies 914 915 # Create the default MadAnalysis5 cards 916 if 'madanalysis5_path' in self.opt and not \ 917 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 918 # When using 919 processes = sum([me.get('processes') if not isinstance(me, str) else [] \ 920 for me in matrix_elements.get('matrix_elements')],[]) 921 922 # Try getting the processes from the generation info directly if no ME are 923 # available (as it is the case for parallel generation 924 if len(processes)==0: 925 processes = self.born_processes 926 if len(processes)==0: 927 logger.warning( 928 """MG5aMC could not provide to Madanalysis5 the list of processes generated. 929 As a result, the default card will not be tailored to the process generated. 930 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""") 931 # For now, simply assign all processes to each proc_defs. 932 # That shouldn't really affect the default analysis card created by MA5 933 self.create_default_madanalysis5_cards( 934 history, self.proc_defs, [processes,]*len(self.proc_defs), 935 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 936 levels =['hadron'])
937
938 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
939 """Writes the real_from_born_configs.inc file that contains 940 the mapping to go for a given born configuration (that is used 941 e.g. in the multi-channel phase-space integration to the 942 corresponding real-emission diagram, i.e. the real emission 943 diagram in which the combined ij is split in i_fks and 944 j_fks.""" 945 lines=[] 946 lines2=[] 947 max_links=0 948 born_me=matrix_element.born_matrix_element 949 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 950 iFKS=iFKS+1 951 links=conf['fks_info']['rb_links'] 952 max_links=max(max_links,len(links)) 953 for i,diags in enumerate(links): 954 if not i == diags['born_conf']: 955 print links 956 raise MadGraph5Error, "born_conf should be canonically ordered" 957 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 958 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 959 % (iFKS,len(links),real_configs)) 960 961 lines2.append("integer irfbc") 962 lines2.append("integer real_from_born_conf(%d,%d)" \ 963 % (max_links,len(matrix_element.get_fks_info_list()))) 964 # Write the file 965 writer.writelines(lines2+lines)
966 967 968 #=============================================================================== 969 # write_get_mass_width_file 970 #=============================================================================== 971 #test written
972 - def write_get_mass_width_file(self, writer, makeinc, model):
973 """Write the get_mass_width_file.f file for MG4. 974 Also update the makeinc.inc file 975 """ 976 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 977 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 978 979 iflines_mass = '' 980 iflines_width = '' 981 982 for i, part in enumerate(mass_particles): 983 if i == 0: 984 ifstring = 'if' 985 else: 986 ifstring = 'else if' 987 if part['self_antipart']: 988 iflines_mass += '%s (id.eq.%d) then\n' % \ 989 (ifstring, part.get_pdg_code()) 990 else: 991 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 992 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 993 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 994 995 for i, part in enumerate(width_particles): 996 if i == 0: 997 ifstring = 'if' 998 else: 999 ifstring = 'else if' 1000 if part['self_antipart']: 1001 iflines_width += '%s (id.eq.%d) then\n' % \ 1002 (ifstring, part.get_pdg_code()) 1003 else: 1004 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1005 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1006 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 1007 1008 replace_dict = {'iflines_mass' : iflines_mass, 1009 'iflines_width' : iflines_width} 1010 1011 file = open(os.path.join(_file_path, \ 1012 'iolibs/template_files/get_mass_width_fcts.inc')).read() 1013 file = file % replace_dict 1014 1015 # Write the file 1016 writer.writelines(file) 1017 1018 # update the makeinc 1019 makeinc_content = open(makeinc).read() 1020 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 1021 open(makeinc, 'w').write(makeinc_content) 1022 1023 return
1024 1025
1026 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
1027 """writes the declarations for the variables relevant for configs_and_props 1028 """ 1029 lines = [] 1030 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 1031 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 1032 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 1033 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 1034 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1035 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1036 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1037 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1038 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1039 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1040 1041 writer.writelines(lines)
1042 1043
1044 - def write_configs_and_props_info_file(self, filename, matrix_element):
1045 """writes the configs_and_props_info.inc file that cointains 1046 all the (real-emission) configurations (IFOREST) as well as 1047 the masses and widths of intermediate particles""" 1048 lines = [] 1049 lines.append("# C -> MAPCONFIG_D") 1050 lines.append("# F/D -> IFOREST_D") 1051 lines.append("# S -> SPROP_D") 1052 lines.append("# T -> TPRID_D") 1053 lines.append("# M -> PMASS_D/PWIDTH_D") 1054 lines.append("# P -> POW_D") 1055 lines2 = [] 1056 nconfs = len(matrix_element.get_fks_info_list()) 1057 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1058 1059 max_iconfig=0 1060 max_leg_number=0 1061 1062 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1063 iFKS=iFKS+1 1064 iconfig = 0 1065 s_and_t_channels = [] 1066 mapconfigs = [] 1067 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 1068 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 1069 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 1070 minvert = min([max([len(vert.get('legs')) for vert in \ 1071 diag.get('vertices')]) for diag in base_diagrams]) 1072 1073 lines.append("# ") 1074 lines.append("# nFKSprocess %d" % iFKS) 1075 for idiag, diag in enumerate(base_diagrams): 1076 if any([len(vert.get('legs')) > minvert for vert in 1077 diag.get('vertices')]): 1078 # Only 3-vertices allowed in configs.inc 1079 continue 1080 iconfig = iconfig + 1 1081 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1082 mapconfigs.append(helas_diag.get('number')) 1083 lines.append("# Diagram %d for nFKSprocess %d" % \ 1084 (helas_diag.get('number'),iFKS)) 1085 # Correspondance between the config and the amplitudes 1086 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1087 helas_diag.get('number'))) 1088 1089 # Need to reorganize the topology so that we start with all 1090 # final state external particles and work our way inwards 1091 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1092 get_s_and_t_channels(ninitial, model, 990) 1093 1094 s_and_t_channels.append([schannels, tchannels]) 1095 1096 # Write out propagators for s-channel and t-channel vertices 1097 allchannels = schannels 1098 if len(tchannels) > 1: 1099 # Write out tchannels only if there are any non-trivial ones 1100 allchannels = schannels + tchannels 1101 1102 for vert in allchannels: 1103 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1104 last_leg = vert.get('legs')[-1] 1105 lines.append("F %4d %4d %4d %4d" % \ 1106 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1107 for d in daughters: 1108 lines.append("D %4d" % d) 1109 if vert in schannels: 1110 lines.append("S %4d %4d %4d %10d" % \ 1111 (iFKS,last_leg.get('number'), iconfig, 1112 last_leg.get('id'))) 1113 elif vert in tchannels[:-1]: 1114 lines.append("T %4d %4d %4d %10d" % \ 1115 (iFKS,last_leg.get('number'), iconfig, 1116 abs(last_leg.get('id')))) 1117 1118 # update what the array sizes (mapconfig,iforest,etc) will be 1119 max_leg_number = min(max_leg_number,last_leg.get('number')) 1120 max_iconfig = max(max_iconfig,iconfig) 1121 1122 # Write out number of configs 1123 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1124 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1125 1126 # write the props.inc information 1127 lines2.append("# ") 1128 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1129 get('particle_dict') 1130 1131 for iconf, configs in enumerate(s_and_t_channels): 1132 for vertex in configs[0] + configs[1][:-1]: 1133 leg = vertex.get('legs')[-1] 1134 if leg.get('id') not in particle_dict: 1135 # Fake propagator used in multiparticle vertices 1136 pow_part = 0 1137 else: 1138 particle = particle_dict[leg.get('id')] 1139 1140 pow_part = 1 + int(particle.is_boson()) 1141 1142 lines2.append("M %4d %4d %4d %10d " % \ 1143 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1144 lines2.append("P %4d %4d %4d %4d " % \ 1145 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1146 1147 # Write the file 1148 open(filename,'w').write('\n'.join(lines+lines2)) 1149 1150 return max_iconfig, max_leg_number, nconfs
1151 1152
1153 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1154 maxproc, maxflow, nexternal, fortran_model):
1155 """writes the declarations for the variables relevant for leshouche_info 1156 """ 1157 lines = [] 1158 lines.append('integer maxproc_used, maxflow_used') 1159 lines.append('parameter (maxproc_used = %d)' % maxproc) 1160 lines.append('parameter (maxflow_used = %d)' % maxflow) 1161 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1162 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1163 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1164 lines.append('integer niprocs_d(%d)' % (nfksconfs)) 1165 1166 writer.writelines(lines)
1167 1168
1169 - def write_leshouche_info_file(self, filename, matrix_element):
1170 """writes the leshouche_info.inc file which contains 1171 the LHA informations for all the real emission processes 1172 """ 1173 lines = [] 1174 lines.append("# I -> IDUP_D") 1175 lines.append("# M -> MOTHUP_D") 1176 lines.append("# C -> ICOLUP_D") 1177 nfksconfs = len(matrix_element.get_fks_info_list()) 1178 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1179 1180 maxproc = 0 1181 maxflow = 0 1182 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1183 # for i, real in enumerate(matrix_element.real_processes): 1184 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1185 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1186 lines.extend(newlines) 1187 maxproc = max(maxproc, nprocs) 1188 maxflow = max(maxflow, nflows) 1189 1190 # Write the file 1191 open(filename,'w').write('\n'.join(lines)) 1192 1193 return nfksconfs, maxproc, maxflow, nexternal
1194 1195
1196 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1197 """writes the wrapper which allows to chose among the different real matrix elements""" 1198 1199 file = \ 1200 """double precision function dlum() 1201 implicit none 1202 include 'timing_variables.inc' 1203 integer nfksprocess 1204 common/c_nfksprocess/nfksprocess 1205 call cpu_time(tbefore) 1206 """ 1207 if matrix_element.real_processes: 1208 for n, info in enumerate(matrix_element.get_fks_info_list()): 1209 file += \ 1210 """if (nfksprocess.eq.%(n)d) then 1211 call dlum_%(n_me)d(dlum) 1212 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1213 file += \ 1214 """ 1215 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1216 stop 1217 endif 1218 call cpu_time(tAfter) 1219 tPDF = tPDF + (tAfter-tBefore) 1220 return 1221 end 1222 """ 1223 else: 1224 file+= \ 1225 """call dlum_0(dlum) 1226 call cpu_time(tAfter) 1227 tPDF = tPDF + (tAfter-tBefore) 1228 return 1229 end 1230 """ 1231 1232 # Write the file 1233 writer.writelines(file) 1234 return 0
1235 1236
1237 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1238 """writes the wrapper which allows to chose among the different real matrix elements""" 1239 1240 file = \ 1241 """subroutine smatrix_real(p, wgt) 1242 implicit none 1243 include 'nexternal.inc' 1244 double precision p(0:3, nexternal) 1245 double precision wgt 1246 integer nfksprocess 1247 common/c_nfksprocess/nfksprocess 1248 """ 1249 for n, info in enumerate(matrix_element.get_fks_info_list()): 1250 file += \ 1251 """if (nfksprocess.eq.%(n)d) then 1252 call smatrix_%(n_me)d(p, wgt) 1253 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1254 1255 if matrix_element.real_processes: 1256 file += \ 1257 """ 1258 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1259 stop 1260 endif 1261 return 1262 end 1263 """ 1264 else: 1265 file += \ 1266 """ 1267 wgt=0d0 1268 return 1269 end 1270 """ 1271 # Write the file 1272 writer.writelines(file) 1273 return 0
1274 1275
1276 - def draw_feynman_diagrams(self, matrix_element):
1277 """Create the ps files containing the feynman diagrams for the born process, 1278 as well as for all the real emission processes""" 1279 1280 filename = 'born.ps' 1281 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1282 get('base_amplitude').get('diagrams'), 1283 filename, 1284 model=matrix_element.born_matrix_element.\ 1285 get('processes')[0].get('model'), 1286 amplitude=True, diagram_type='born') 1287 plot.draw() 1288 1289 for n, fksreal in enumerate(matrix_element.real_processes): 1290 filename = 'matrix_%d.ps' % (n + 1) 1291 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1292 get('base_amplitude').get('diagrams'), 1293 filename, 1294 model=fksreal.matrix_element.\ 1295 get('processes')[0].get('model'), 1296 amplitude=True, diagram_type='real') 1297 plot.draw()
1298 1299
1300 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1301 """writes the matrix_i.f files which contain the real matrix elements""" 1302 1303 1304 1305 for n, fksreal in enumerate(matrix_element.real_processes): 1306 filename = 'matrix_%d.f' % (n + 1) 1307 self.write_matrix_element_fks(writers.FortranWriter(filename), 1308 fksreal.matrix_element, n + 1, 1309 fortran_model)
1310
1311 - def write_pdf_calls(self, matrix_element, fortran_model):
1312 """writes the parton_lum_i.f files which contain the real matrix elements. 1313 If no real emission existst, write the one for the born""" 1314 1315 if matrix_element.real_processes: 1316 for n, fksreal in enumerate(matrix_element.real_processes): 1317 filename = 'parton_lum_%d.f' % (n + 1) 1318 self.write_pdf_file(writers.FortranWriter(filename), 1319 fksreal.matrix_element, n + 1, 1320 fortran_model) 1321 else: 1322 filename = 'parton_lum_0.f' 1323 self.write_pdf_file(writers.FortranWriter(filename), 1324 matrix_element.born_matrix_element, 0, 1325 fortran_model)
1326 1327
1328 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1329 """generates the files needed for the born amplitude in the P* directory, which will 1330 be needed by the P* directories""" 1331 pathdir = os.getcwd() 1332 1333 filename = 'born.f' 1334 calls_born, ncolor_born = \ 1335 self.write_born_fks(writers.FortranWriter(filename),\ 1336 matrix_element, 1337 fortran_model) 1338 1339 filename = 'born_hel.f' 1340 self.write_born_hel(writers.FortranWriter(filename),\ 1341 matrix_element, 1342 fortran_model) 1343 1344 1345 filename = 'born_conf.inc' 1346 nconfigs, mapconfigs, s_and_t_channels = \ 1347 self.write_configs_file( 1348 writers.FortranWriter(filename), 1349 matrix_element.born_matrix_element, 1350 fortran_model) 1351 1352 filename = 'born_props.inc' 1353 self.write_props_file(writers.FortranWriter(filename), 1354 matrix_element.born_matrix_element, 1355 fortran_model, 1356 s_and_t_channels) 1357 1358 filename = 'born_decayBW.inc' 1359 self.write_decayBW_file(writers.FortranWriter(filename), 1360 s_and_t_channels) 1361 1362 filename = 'born_leshouche.inc' 1363 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1364 matrix_element.born_matrix_element, 1365 fortran_model) 1366 1367 filename = 'born_nhel.inc' 1368 self.write_born_nhel_file(writers.FortranWriter(filename), 1369 matrix_element.born_matrix_element, nflows, 1370 fortran_model, 1371 ncolor_born) 1372 1373 filename = 'born_ngraphs.inc' 1374 self.write_ngraphs_file(writers.FortranWriter(filename), 1375 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1376 1377 filename = 'ncombs.inc' 1378 self.write_ncombs_file(writers.FortranWriter(filename), 1379 matrix_element.born_matrix_element, 1380 fortran_model) 1381 1382 filename = 'born_maxamps.inc' 1383 maxamps = len(matrix_element.get('diagrams')) 1384 maxflows = ncolor_born 1385 self.write_maxamps_file(writers.FortranWriter(filename), 1386 maxamps, 1387 maxflows, 1388 max([len(matrix_element.get('processes')) for me in \ 1389 matrix_element.born_matrix_element]),1) 1390 1391 filename = 'config_subproc_map.inc' 1392 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1393 s_and_t_channels) 1394 1395 filename = 'coloramps.inc' 1396 self.write_coloramps_file(writers.FortranWriter(filename), 1397 mapconfigs, 1398 matrix_element.born_matrix_element, 1399 fortran_model) 1400 1401 #write the sborn_sf.f and the b_sf_files 1402 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1403 for i, links in enumerate([matrix_element.color_links, []]): 1404 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1405 links, 1406 fortran_model) 1407 self.color_link_files = [] 1408 for i in range(len(matrix_element.color_links)): 1409 filename = 'b_sf_%3.3d.f' % (i + 1) 1410 self.color_link_files.append(filename) 1411 self.write_b_sf_fks(writers.FortranWriter(filename), 1412 matrix_element, i, 1413 fortran_model)
1414 1415
1416 - def generate_virtuals_from_OLP(self,process_list,export_path, OLP):
1417 """Generates the library for computing the loop matrix elements 1418 necessary for this process using the OLP specified.""" 1419 1420 # Start by writing the BLHA order file 1421 virtual_path = pjoin(export_path,'OLP_virtuals') 1422 if not os.path.exists(virtual_path): 1423 os.makedirs(virtual_path) 1424 filename = os.path.join(virtual_path,'OLE_order.lh') 1425 self.write_lh_order(filename, process_list, OLP) 1426 1427 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1428 'Please check the virt_generation.log file in %s.'\ 1429 %str(pjoin(virtual_path,'virt_generation.log')) 1430 1431 # Perform some tasks specific to certain OLP's 1432 if OLP=='GoSam': 1433 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1434 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1435 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1436 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1437 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1438 # Now generate the process 1439 logger.info('Generating the loop matrix elements with %s...'%OLP) 1440 virt_generation_log = \ 1441 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1442 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1443 stdout=virt_generation_log, stderr=virt_generation_log) 1444 virt_generation_log.close() 1445 # Check what extension is used for the share libraries on this system 1446 possible_other_extensions = ['so','dylib'] 1447 shared_lib_ext='so' 1448 for ext in possible_other_extensions: 1449 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1450 'libgolem_olp.'+ext)): 1451 shared_lib_ext = ext 1452 1453 # Now check that everything got correctly generated 1454 files_to_check = ['olp_module.mod',str(pjoin('lib', 1455 'libgolem_olp.'+shared_lib_ext))] 1456 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1457 'Virtuals',f)) for f in files_to_check]): 1458 raise fks_common.FKSProcessError(fail_msg) 1459 # link the library to the lib folder 1460 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1461 pjoin(export_path,'lib')) 1462 1463 # Specify in make_opts the right library necessitated by the OLP 1464 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1465 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1466 if OLP=='GoSam': 1467 if platform.system().lower()=='darwin': 1468 # On mac the -rpath is not supported and the path of the dynamic 1469 # library is automatically wired in the executable 1470 make_opts_content=make_opts_content.replace('libOLP=', 1471 'libOLP=-Wl,-lgolem_olp') 1472 else: 1473 # On other platforms the option , -rpath= path to libgolem.so is necessary 1474 # Using a relative path is not ideal because the file libgolem.so is not 1475 # copied on the worker nodes. 1476 # make_opts_content=make_opts_content.replace('libOLP=', 1477 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1478 # Using the absolute path is working in the case where the disk of the 1479 # front end machine is mounted on all worker nodes as well. 1480 make_opts_content=make_opts_content.replace('libOLP=', 1481 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1482 1483 1484 make_opts.write(make_opts_content) 1485 make_opts.close() 1486 1487 # A priori this is generic to all OLP's 1488 1489 # Parse the contract file returned and propagate the process label to 1490 # the include of the BinothLHA.f file 1491 proc_to_label = self.parse_contract_file( 1492 pjoin(virtual_path,'OLE_order.olc')) 1493 1494 self.write_BinothLHA_inc(process_list,proc_to_label,\ 1495 pjoin(export_path,'SubProcesses')) 1496 1497 # Link the contract file to within the SubProcess directory 1498 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1499
1500 - def write_BinothLHA_inc(self, processes, proc_to_label, SubProcPath):
1501 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1502 to provide the right process_label to use in the OLP call to get the 1503 loop matrix element evaluation. The proc_to_label is the dictionary of 1504 the format of the one returned by the function parse_contract_file.""" 1505 1506 for proc in processes: 1507 name = "P%s"%proc.shell_string() 1508 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1509 not leg.get('state')]), 1510 tuple([leg.get('id') for leg in proc.get('legs') if \ 1511 leg.get('state')])) 1512 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1513 try: 1514 incFile.write( 1515 """ INTEGER PROC_LABEL 1516 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1517 except KeyError: 1518 raise fks_common.FKSProcessError('Could not found the target'+\ 1519 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1520 ' the proc_to_label argument in write_BinothLHA_inc.') 1521 incFile.close()
1522
1523 - def parse_contract_file(self, contract_file_path):
1524 """ Parses the BLHA contract file, make sure all parameters could be 1525 understood by the OLP and return a mapping of the processes (characterized 1526 by the pdg's of the initial and final state particles) to their process 1527 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1528 """ 1529 1530 proc_def_to_label = {} 1531 1532 if not os.path.exists(contract_file_path): 1533 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1534 ' OLE_order.olc in %s.'%str(contract_file_path)) 1535 1536 comment_re=re.compile(r"^\s*#") 1537 proc_def_re=re.compile( 1538 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1539 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1540 line_OK_re=re.compile(r"^.*\|\s*OK") 1541 for line in file(contract_file_path): 1542 # Ignore comments 1543 if not comment_re.match(line) is None: 1544 continue 1545 # Check if it is a proc definition line 1546 proc_def = proc_def_re.match(line) 1547 if not proc_def is None: 1548 if int(proc_def.group('proc_class'))!=1: 1549 raise fks_common.FKSProcessError( 1550 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1551 ' process class attribute. Found %s instead in: \n%s'\ 1552 %(proc_def.group('proc_class'),line)) 1553 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1554 proc_def.group('in_pdgs').split()]) 1555 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1556 proc_def.group('out_pdgs').split()]) 1557 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1558 int(proc_def.group('proc_label')) 1559 continue 1560 # For the other types of line, just make sure they end with | OK 1561 if line_OK_re.match(line) is None: 1562 raise fks_common.FKSProcessError( 1563 'The OLP could not process the following line: \n%s'%line) 1564 1565 return proc_def_to_label
1566 1567
1568 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1569 """writes the V**** directory inside the P**** directories specified in 1570 dir_name""" 1571 1572 cwd = os.getcwd() 1573 1574 matrix_element = loop_matrix_element 1575 1576 # Create the MadLoop5_resources directory if not already existing 1577 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1578 try: 1579 os.mkdir(dirpath) 1580 except os.error as error: 1581 logger.warning(error.strerror + " " + dirpath) 1582 1583 # Create the directory PN_xx_xxxxx in the specified path 1584 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1585 dirpath = os.path.join(dir_name, name) 1586 1587 try: 1588 os.mkdir(dirpath) 1589 except os.error as error: 1590 logger.warning(error.strerror + " " + dirpath) 1591 1592 try: 1593 os.chdir(dirpath) 1594 except os.error: 1595 logger.error('Could not cd to directory %s' % dirpath) 1596 return 0 1597 1598 logger.info('Creating files in directory %s' % name) 1599 1600 # Extract number of external particles 1601 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1602 1603 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1604 # The born matrix element, if needed 1605 filename = 'born_matrix.f' 1606 calls = self.write_bornmatrix( 1607 writers.FortranWriter(filename), 1608 matrix_element, 1609 fortran_model) 1610 1611 filename = 'nexternal.inc' 1612 self.write_nexternal_file(writers.FortranWriter(filename), 1613 nexternal, ninitial) 1614 1615 filename = 'pmass.inc' 1616 self.write_pmass_file(writers.FortranWriter(filename), 1617 matrix_element) 1618 1619 filename = 'ngraphs.inc' 1620 self.write_ngraphs_file(writers.FortranWriter(filename), 1621 len(matrix_element.get_all_amplitudes())) 1622 1623 filename = "loop_matrix.ps" 1624 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1625 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1626 filename, 1627 model=matrix_element.get('processes')[0].get('model'), 1628 amplitude='') 1629 logger.info("Drawing loop Feynman diagrams for " + \ 1630 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1631 plot.draw() 1632 1633 filename = "born_matrix.ps" 1634 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1635 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1636 get('model'),amplitude='') 1637 logger.info("Generating born Feynman diagrams for " + \ 1638 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1639 plot.draw() 1640 1641 # We also need to write the overall maximum quantities for this group 1642 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 1643 # only one process, so this is trivial 1644 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 1645 open('unique_id.inc','w').write( 1646 """ integer UNIQUE_ID 1647 parameter(UNIQUE_ID=1)""") 1648 1649 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1650 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1651 'MadLoopCommons.f','MadLoopParams.inc'] 1652 1653 # We should move to MadLoop5_resources directory from the SubProcesses 1654 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1655 pjoin('..','MadLoop5_resources')) 1656 1657 for file in linkfiles: 1658 ln('../../%s' % file) 1659 1660 os.system("ln -s ../../makefile_loop makefile") 1661 1662 linkfiles = ['mpmodule.mod'] 1663 1664 for file in linkfiles: 1665 ln('../../../lib/%s' % file) 1666 1667 linkfiles = ['coef_specs.inc'] 1668 1669 for file in linkfiles: 1670 ln('../../../Source/DHELAS/%s' % file) 1671 1672 # Return to original PWD 1673 os.chdir(cwd) 1674 1675 if not calls: 1676 calls = 0 1677 return calls
1678
1679 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1680 """computes the QED/QCD orders from the knowledge of the n of ext particles 1681 and of the weighted orders""" 1682 # n vertices = nexternal - 2 =QED + QCD 1683 # weighted = 2*QED + QCD 1684 QED = weighted - nexternal + 2 1685 QCD = weighted - 2 * QED 1686 return QED, QCD
1687 1688 1689 1690 #=============================================================================== 1691 # write_lh_order 1692 #=============================================================================== 1693 #test written
1694 - def write_lh_order(self, filename, process_list, OLP='MadLoop'):
1695 """Creates the OLE_order.lh file. This function should be edited according 1696 to the OLP which is used. For now it is generic.""" 1697 1698 1699 if len(process_list)==0: 1700 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1701 'the function write_lh_order.') 1702 return 1703 1704 # We assume the orders to be common to all Subprocesses 1705 1706 orders = process_list[0].get('orders') 1707 if 'QED' in orders.keys() and 'QCD' in orders.keys(): 1708 QED=orders['QED'] 1709 QCD=orders['QCD'] 1710 elif 'QED' in orders.keys(): 1711 QED=orders['QED'] 1712 QCD=0 1713 elif 'QCD' in orders.keys(): 1714 QED=0 1715 QCD=orders['QCD'] 1716 else: 1717 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1718 len(process_list[0].get('legs')), 1719 orders['WEIGHTED']) 1720 1721 replace_dict = {} 1722 replace_dict['mesq'] = 'CHaveraged' 1723 replace_dict['corr'] = ' '.join(process_list[0].\ 1724 get('perturbation_couplings')) 1725 replace_dict['irreg'] = 'CDR' 1726 replace_dict['aspow'] = QCD 1727 replace_dict['aepow'] = QED 1728 replace_dict['modelfile'] = './param_card.dat' 1729 replace_dict['params'] = 'alpha_s' 1730 proc_lines=[] 1731 for proc in process_list: 1732 proc_lines.append('%s -> %s' % \ 1733 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']), 1734 ' '.join(str(l['id']) for l in proc['legs'] if l['state']))) 1735 replace_dict['pdgs'] = '\n'.join(proc_lines) 1736 replace_dict['symfin'] = 'Yes' 1737 content = \ 1738 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1739 \n\ 1740 MatrixElementSquareType %(mesq)s\n\ 1741 CorrectionType %(corr)s\n\ 1742 IRregularisation %(irreg)s\n\ 1743 AlphasPower %(aspow)d\n\ 1744 AlphaPower %(aepow)d\n\ 1745 NJetSymmetrizeFinal %(symfin)s\n\ 1746 ModelFile %(modelfile)s\n\ 1747 Parameters %(params)s\n\ 1748 \n\ 1749 # process\n\ 1750 %(pdgs)s\n\ 1751 " % replace_dict 1752 1753 file = open(filename, 'w') 1754 file.write(content) 1755 file.close 1756 return
1757 1758 1759 #=============================================================================== 1760 # write_born_fks 1761 #=============================================================================== 1762 # test written
1763 - def write_born_fks(self, writer, fksborn, fortran_model):
1764 """Export a matrix element to a born.f file in MadFKS format""" 1765 1766 matrix_element = fksborn.born_matrix_element 1767 1768 if not matrix_element.get('processes') or \ 1769 not matrix_element.get('diagrams'): 1770 return 0 1771 1772 if not isinstance(writer, writers.FortranWriter): 1773 raise writers.FortranWriter.FortranWriterError(\ 1774 "writer not FortranWriter") 1775 # Set lowercase/uppercase Fortran code 1776 writers.FortranWriter.downcase = False 1777 1778 replace_dict = {} 1779 1780 # Extract version number and date from VERSION file 1781 info_lines = self.get_mg5_info_lines() 1782 replace_dict['info_lines'] = info_lines 1783 1784 # Extract process info lines 1785 process_lines = self.get_process_info_lines(matrix_element) 1786 replace_dict['process_lines'] = process_lines 1787 1788 1789 # Extract ncomb 1790 ncomb = matrix_element.get_helicity_combinations() 1791 replace_dict['ncomb'] = ncomb 1792 1793 # Extract helicity lines 1794 helicity_lines = self.get_helicity_lines(matrix_element) 1795 replace_dict['helicity_lines'] = helicity_lines 1796 1797 # Extract IC line 1798 ic_line = self.get_ic_line(matrix_element) 1799 replace_dict['ic_line'] = ic_line 1800 1801 # Extract overall denominator 1802 # Averaging initial state color, spin, and identical FS particles 1803 #den_factor_line = get_den_factor_line(matrix_element) 1804 1805 # Extract ngraphs 1806 ngraphs = matrix_element.get_number_of_amplitudes() 1807 replace_dict['ngraphs'] = ngraphs 1808 1809 # Extract nwavefuncs 1810 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1811 replace_dict['nwavefuncs'] = nwavefuncs 1812 1813 # Extract ncolor 1814 ncolor = max(1, len(matrix_element.get('color_basis'))) 1815 replace_dict['ncolor'] = ncolor 1816 1817 # Extract color data lines 1818 color_data_lines = self.get_color_data_lines(matrix_element) 1819 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1820 1821 # Extract helas calls 1822 helas_calls = fortran_model.get_matrix_element_calls(\ 1823 matrix_element) 1824 replace_dict['helas_calls'] = "\n".join(helas_calls) 1825 1826 # Extract amp2 lines 1827 amp2_lines = self.get_amp2_lines(matrix_element) 1828 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1829 1830 # Extract JAMP lines 1831 jamp_lines = self.get_JAMP_lines(matrix_element) 1832 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1833 1834 # Set the size of Wavefunction 1835 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1836 replace_dict['wavefunctionsize'] = 20 1837 else: 1838 replace_dict['wavefunctionsize'] = 8 1839 1840 # Extract glu_ij_lines 1841 ij_lines = self.get_ij_lines(fksborn) 1842 replace_dict['ij_lines'] = '\n'.join(ij_lines) 1843 1844 # Extract den_factor_lines 1845 den_factor_lines = self.get_den_factor_lines(fksborn) 1846 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1847 1848 # Extract the number of FKS process 1849 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 1850 1851 file = open(os.path.join(_file_path, \ 1852 'iolibs/template_files/born_fks.inc')).read() 1853 file = file % replace_dict 1854 1855 # Write the file 1856 writer.writelines(file) 1857 1858 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1859 1860
1861 - def write_born_hel(self, writer, fksborn, fortran_model):
1862 """Export a matrix element to a born_hel.f file in MadFKS format""" 1863 1864 matrix_element = fksborn.born_matrix_element 1865 1866 if not matrix_element.get('processes') or \ 1867 not matrix_element.get('diagrams'): 1868 return 0 1869 1870 if not isinstance(writer, writers.FortranWriter): 1871 raise writers.FortranWriter.FortranWriterError(\ 1872 "writer not FortranWriter") 1873 # Set lowercase/uppercase Fortran code 1874 writers.FortranWriter.downcase = False 1875 1876 replace_dict = {} 1877 1878 # Extract version number and date from VERSION file 1879 info_lines = self.get_mg5_info_lines() 1880 replace_dict['info_lines'] = info_lines 1881 1882 # Extract process info lines 1883 process_lines = self.get_process_info_lines(matrix_element) 1884 replace_dict['process_lines'] = process_lines 1885 1886 1887 # Extract ncomb 1888 ncomb = matrix_element.get_helicity_combinations() 1889 replace_dict['ncomb'] = ncomb 1890 1891 # Extract helicity lines 1892 helicity_lines = self.get_helicity_lines(matrix_element) 1893 replace_dict['helicity_lines'] = helicity_lines 1894 1895 # Extract IC line 1896 ic_line = self.get_ic_line(matrix_element) 1897 replace_dict['ic_line'] = ic_line 1898 1899 # Extract overall denominator 1900 # Averaging initial state color, spin, and identical FS particles 1901 #den_factor_line = get_den_factor_line(matrix_element) 1902 1903 # Extract ngraphs 1904 ngraphs = matrix_element.get_number_of_amplitudes() 1905 replace_dict['ngraphs'] = ngraphs 1906 1907 # Extract nwavefuncs 1908 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1909 replace_dict['nwavefuncs'] = nwavefuncs 1910 1911 # Extract ncolor 1912 ncolor = max(1, len(matrix_element.get('color_basis'))) 1913 replace_dict['ncolor'] = ncolor 1914 1915 # Extract color data lines 1916 color_data_lines = self.get_color_data_lines(matrix_element) 1917 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1918 1919 # Extract amp2 lines 1920 amp2_lines = self.get_amp2_lines(matrix_element) 1921 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1922 1923 # Extract JAMP lines 1924 jamp_lines = self.get_JAMP_lines(matrix_element) 1925 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1926 1927 # Extract den_factor_lines 1928 den_factor_lines = self.get_den_factor_lines(fksborn) 1929 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1930 1931 # Extract the number of FKS process 1932 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1933 1934 file = open(os.path.join(_file_path, \ 1935 'iolibs/template_files/born_fks_hel.inc')).read() 1936 file = file % replace_dict 1937 1938 # Write the file 1939 writer.writelines(file) 1940 1941 return
1942 1943 1944 #=============================================================================== 1945 # write_born_sf_fks 1946 #=============================================================================== 1947 #test written
1948 - def write_sborn_sf(self, writer, color_links, fortran_model):
1949 """Creates the sborn_sf.f file, containing the calls to the different 1950 color linked borns""" 1951 1952 replace_dict = {} 1953 nborns = len(color_links) 1954 ifkss = [] 1955 iborns = [] 1956 mms = [] 1957 nns = [] 1958 iflines = "\n" 1959 1960 #header for the sborn_sf.f file 1961 file = """subroutine sborn_sf(p_born,m,n,wgt) 1962 implicit none 1963 include "nexternal.inc" 1964 double precision p_born(0:3,nexternal-1),wgt 1965 double complex wgt1(2) 1966 integer m,n \n""" 1967 1968 if nborns > 0: 1969 1970 for i, c_link in enumerate(color_links): 1971 iborn = i+1 1972 1973 iff = {True : 'if', False : 'elseif'}[i==0] 1974 1975 m, n = c_link['link'] 1976 1977 if m != n: 1978 iflines += \ 1979 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1980 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 1981 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1982 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1983 else: 1984 iflines += \ 1985 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1986 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 1987 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1988 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1989 1990 1991 file += iflines + \ 1992 """else 1993 wgt = 0d0 1994 endif 1995 1996 return 1997 end""" 1998 elif nborns == 0: 1999 #write a dummy file 2000 file+=""" 2001 c This is a dummy function because 2002 c this subdir has no soft singularities 2003 wgt = 0d0 2004 2005 return 2006 end""" 2007 # Write the end of the file 2008 2009 writer.writelines(file)
2010 2011 2012 #=============================================================================== 2013 # write_b_sf_fks 2014 #=============================================================================== 2015 #test written
2016 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
2017 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 2018 2019 matrix_element = copy.copy(fksborn.born_matrix_element) 2020 2021 if not matrix_element.get('processes') or \ 2022 not matrix_element.get('diagrams'): 2023 return 0 2024 2025 if not isinstance(writer, writers.FortranWriter): 2026 raise writers.FortranWriter.FortranWriterError(\ 2027 "writer not FortranWriter") 2028 # Set lowercase/uppercase Fortran code 2029 writers.FortranWriter.downcase = False 2030 2031 iborn = i + 1 2032 link = fksborn.color_links[i] 2033 2034 replace_dict = {} 2035 2036 replace_dict['iborn'] = iborn 2037 2038 # Extract version number and date from VERSION file 2039 info_lines = self.get_mg5_info_lines() 2040 replace_dict['info_lines'] = info_lines 2041 2042 # Extract process info lines 2043 process_lines = self.get_process_info_lines(matrix_element) 2044 replace_dict['process_lines'] = process_lines + \ 2045 "\nc spectators: %d %d \n" % tuple(link['link']) 2046 2047 # Extract ncomb 2048 ncomb = matrix_element.get_helicity_combinations() 2049 replace_dict['ncomb'] = ncomb 2050 2051 # Extract helicity lines 2052 helicity_lines = self.get_helicity_lines(matrix_element) 2053 replace_dict['helicity_lines'] = helicity_lines 2054 2055 # Extract IC line 2056 ic_line = self.get_ic_line(matrix_element) 2057 replace_dict['ic_line'] = ic_line 2058 2059 # Extract den_factor_lines 2060 den_factor_lines = self.get_den_factor_lines(fksborn) 2061 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2062 2063 # Extract ngraphs 2064 ngraphs = matrix_element.get_number_of_amplitudes() 2065 replace_dict['ngraphs'] = ngraphs 2066 2067 # Extract nwavefuncs 2068 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2069 replace_dict['nwavefuncs'] = nwavefuncs 2070 2071 # Extract ncolor 2072 ncolor1 = max(1, len(link['orig_basis'])) 2073 replace_dict['ncolor1'] = ncolor1 2074 ncolor2 = max(1, len(link['link_basis'])) 2075 replace_dict['ncolor2'] = ncolor2 2076 2077 # Extract color data lines 2078 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2079 link['link_matrix']) 2080 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2081 2082 # Extract amp2 lines 2083 amp2_lines = self.get_amp2_lines(matrix_element) 2084 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2085 2086 # Extract JAMP lines 2087 jamp_lines = self.get_JAMP_lines(matrix_element) 2088 new_jamp_lines = [] 2089 for line in jamp_lines: 2090 line = string.replace(line, 'JAMP', 'JAMP1') 2091 new_jamp_lines.append(line) 2092 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 2093 2094 matrix_element.set('color_basis', link['link_basis'] ) 2095 jamp_lines = self.get_JAMP_lines(matrix_element) 2096 new_jamp_lines = [] 2097 for line in jamp_lines: 2098 line = string.replace(line, 'JAMP', 'JAMP2') 2099 new_jamp_lines.append(line) 2100 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 2101 2102 2103 # Extract the number of FKS process 2104 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2105 2106 file = open(os.path.join(_file_path, \ 2107 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2108 file = file % replace_dict 2109 2110 # Write the file 2111 writer.writelines(file) 2112 2113 return 0 , ncolor1
2114 2115 2116 #=============================================================================== 2117 # write_born_nhel_file 2118 #=============================================================================== 2119 #test written
2120 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2121 """Write the born_nhel.inc file for MG4.""" 2122 2123 ncomb = matrix_element.get_helicity_combinations() 2124 file = " integer max_bhel, max_bcol \n" 2125 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2126 (ncomb, nflows) 2127 2128 # Write the file 2129 writer.writelines(file) 2130 2131 return True
2132 2133 #=============================================================================== 2134 # write_fks_info_file 2135 #===============================================================================
2136 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2137 """Writes the content of nFKSconfigs.inc, which just gives the 2138 total FKS dirs as a parameter. 2139 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2140 replace_dict = {} 2141 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2142 content = \ 2143 """ INTEGER FKS_CONFIGS 2144 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2145 2146 """ % replace_dict 2147 2148 writer.writelines(content)
2149 2150 2151 #=============================================================================== 2152 # write_fks_info_file 2153 #===============================================================================
2154 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2155 """Writes the content of fks_info.inc, which lists the informations on the 2156 possible splittings of the born ME. 2157 nconfs is always >=1 (use a fake configuration for LOonly). 2158 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2159 the last colored particle as j_fks.""" 2160 2161 replace_dict = {} 2162 fks_info_list = fksborn.get_fks_info_list() 2163 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2164 2165 # this is for processes with 'real' or 'all' as NLO mode 2166 if len(fks_info_list) > 0: 2167 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2168 for info in fks_info_list]) 2169 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2170 for info in fks_info_list]) 2171 2172 col_lines = [] 2173 pdg_lines = [] 2174 charge_lines = [] 2175 fks_j_from_i_lines = [] 2176 for i, info in enumerate(fks_info_list): 2177 col_lines.append( \ 2178 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2179 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2180 pdg_lines.append( \ 2181 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2182 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2183 charge_lines.append(\ 2184 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2185 % (i + 1, ', '.join('%19.15fd0' % charg\ 2186 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2187 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2188 i + 1)) 2189 else: 2190 # this is for 'LOonly', generate a fake FKS configuration with 2191 # - i_fks = nexternal, pdg type = -21 and color =8 2192 # - j_fks = the last colored particle 2193 bornproc = fksborn.born_matrix_element.get('processes')[0] 2194 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2195 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2196 charges = [0.] * len(colors) 2197 2198 fks_i = len(colors) 2199 # use the first colored particle if it exists, or 2200 # just the first 2201 fks_j=1 2202 for cpos, col in enumerate(colors[:-1]): 2203 if col != 1: 2204 fks_j = cpos+1 2205 2206 fks_i_values = str(fks_i) 2207 fks_j_values = str(fks_j) 2208 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2209 % ', '.join([str(col) for col in colors])] 2210 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2211 % ', '.join([str(pdg) for pdg in pdgs])] 2212 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2213 % ', '.join('%19.15fd0' % charg for charg in charges)] 2214 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2215 % (fks_i, fks_j)] 2216 2217 2218 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2219 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2220 replace_dict['col_lines'] = '\n'.join(col_lines) 2221 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2222 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2223 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2224 2225 content = \ 2226 """ INTEGER IPOS, JPOS 2227 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2228 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2229 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2230 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2231 2232 %(fks_i_line)s 2233 %(fks_j_line)s 2234 2235 %(fks_j_from_i_lines)s 2236 2237 C 2238 C Particle type: 2239 C octet = 8, triplet = 3, singlet = 1 2240 %(col_lines)s 2241 2242 C 2243 C Particle type according to PDG: 2244 C 2245 %(pdg_lines)s 2246 2247 C 2248 C Particle charge: 2249 C charge is set 0. with QCD corrections, which is irrelevant 2250 %(charge_lines)s 2251 """ % replace_dict 2252 if not isinstance(writer, writers.FortranWriter): 2253 raise writers.FortranWriter.FortranWriterError(\ 2254 "writer not FortranWriter") 2255 # Set lowercase/uppercase Fortran code 2256 writers.FortranWriter.downcase = False 2257 2258 writer.writelines(content) 2259 2260 return True
2261 2262 2263 #=============================================================================== 2264 # write_matrix_element_fks 2265 #=============================================================================== 2266 #test written
2267 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2268 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2269 2270 if not matrix_element.get('processes') or \ 2271 not matrix_element.get('diagrams'): 2272 return 0,0 2273 2274 if not isinstance(writer, writers.FortranWriter): 2275 raise writers.FortranWriter.FortranWriterError(\ 2276 "writer not FortranWriter") 2277 # Set lowercase/uppercase Fortran code 2278 writers.FortranWriter.downcase = False 2279 2280 replace_dict = {} 2281 replace_dict['N_me'] = n 2282 2283 # Extract version number and date from VERSION file 2284 info_lines = self.get_mg5_info_lines() 2285 replace_dict['info_lines'] = info_lines 2286 2287 # Extract process info lines 2288 process_lines = self.get_process_info_lines(matrix_element) 2289 replace_dict['process_lines'] = process_lines 2290 2291 # Extract ncomb 2292 ncomb = matrix_element.get_helicity_combinations() 2293 replace_dict['ncomb'] = ncomb 2294 2295 # Extract helicity lines 2296 helicity_lines = self.get_helicity_lines(matrix_element) 2297 replace_dict['helicity_lines'] = helicity_lines 2298 2299 # Extract IC line 2300 ic_line = self.get_ic_line(matrix_element) 2301 replace_dict['ic_line'] = ic_line 2302 2303 # Extract overall denominator 2304 # Averaging initial state color, spin, and identical FS particles 2305 den_factor_line = self.get_den_factor_line(matrix_element) 2306 replace_dict['den_factor_line'] = den_factor_line 2307 2308 # Extract ngraphs 2309 ngraphs = matrix_element.get_number_of_amplitudes() 2310 replace_dict['ngraphs'] = ngraphs 2311 2312 # Extract ncolor 2313 ncolor = max(1, len(matrix_element.get('color_basis'))) 2314 replace_dict['ncolor'] = ncolor 2315 2316 # Extract color data lines 2317 color_data_lines = self.get_color_data_lines(matrix_element) 2318 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2319 2320 # Extract helas calls 2321 helas_calls = fortran_model.get_matrix_element_calls(\ 2322 matrix_element) 2323 replace_dict['helas_calls'] = "\n".join(helas_calls) 2324 2325 # Extract nwavefuncs (important to place after get_matrix_element_calls 2326 # so that 'me_id' is set) 2327 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2328 replace_dict['nwavefuncs'] = nwavefuncs 2329 2330 # Extract amp2 lines 2331 amp2_lines = self.get_amp2_lines(matrix_element) 2332 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2333 2334 # Set the size of Wavefunction 2335 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2336 replace_dict['wavefunctionsize'] = 20 2337 else: 2338 replace_dict['wavefunctionsize'] = 8 2339 2340 # Extract JAMP lines 2341 jamp_lines = self.get_JAMP_lines(matrix_element) 2342 2343 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2344 2345 realfile = open(os.path.join(_file_path, \ 2346 'iolibs/template_files/realmatrix_fks.inc')).read() 2347 2348 realfile = realfile % replace_dict 2349 2350 # Write the file 2351 writer.writelines(realfile) 2352 2353 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2354 2355 2356 #=============================================================================== 2357 # write_pdf_file 2358 #===============================================================================
2359 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2360 #test written 2361 """Write the auto_dsig.f file for MadFKS, which contains 2362 pdf call information""" 2363 2364 if not matrix_element.get('processes') or \ 2365 not matrix_element.get('diagrams'): 2366 return 0 2367 2368 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2369 2370 if ninitial < 1 or ninitial > 2: 2371 raise writers.FortranWriter.FortranWriterError, \ 2372 """Need ninitial = 1 or 2 to write auto_dsig file""" 2373 2374 replace_dict = {} 2375 2376 replace_dict['N_me'] = n 2377 2378 # Extract version number and date from VERSION file 2379 info_lines = self.get_mg5_info_lines() 2380 replace_dict['info_lines'] = info_lines 2381 2382 # Extract process info lines 2383 process_lines = self.get_process_info_lines(matrix_element) 2384 replace_dict['process_lines'] = process_lines 2385 2386 pdf_vars, pdf_data, pdf_lines = \ 2387 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2388 replace_dict['pdf_vars'] = pdf_vars 2389 replace_dict['pdf_data'] = pdf_data 2390 replace_dict['pdf_lines'] = pdf_lines 2391 2392 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2393 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2394 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2395 2396 file = open(os.path.join(_file_path, \ 2397 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2398 file = file % replace_dict 2399 2400 # Write the file 2401 writer.writelines(file)
2402 2403 2404 2405 #=============================================================================== 2406 # write_coloramps_file 2407 #=============================================================================== 2408 #test written
2409 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2410 """Write the coloramps.inc file for MadEvent""" 2411 2412 lines = [] 2413 lines.append( "logical icolamp(%d,%d,1)" % \ 2414 (max(len(matrix_element.get('color_basis').keys()), 1), 2415 len(mapconfigs))) 2416 2417 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2418 2419 # Write the file 2420 writer.writelines(lines) 2421 2422 return True
2423 2424 2425 #=============================================================================== 2426 # write_leshouche_file 2427 #=============================================================================== 2428 #test written
2429 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2430 """Write the leshouche.inc file for MG4""" 2431 2432 # Extract number of external particles 2433 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2434 2435 lines = [] 2436 for iproc, proc in enumerate(matrix_element.get('processes')): 2437 legs = proc.get_legs_with_decays() 2438 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2439 (iproc + 1, nexternal, 2440 ",".join([str(l.get('id')) for l in legs]))) 2441 for i in [1, 2]: 2442 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2443 (i, iproc + 1, nexternal, 2444 ",".join([ "%3r" % 0 ] * ninitial + \ 2445 [ "%3r" % i ] * (nexternal - ninitial)))) 2446 2447 # Here goes the color connections corresponding to the JAMPs 2448 # Only one output, for the first subproc! 2449 if iproc == 0: 2450 # If no color basis, just output trivial color flow 2451 if not matrix_element.get('color_basis'): 2452 for i in [1, 2]: 2453 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2454 (i, nexternal, 2455 ",".join([ "%3r" % 0 ] * nexternal))) 2456 color_flow_list = [] 2457 2458 else: 2459 # First build a color representation dictionnary 2460 repr_dict = {} 2461 for l in legs: 2462 repr_dict[l.get('number')] = \ 2463 proc.get('model').get_particle(l.get('id')).get_color()\ 2464 * (-1)**(1+l.get('state')) 2465 # Get the list of color flows 2466 color_flow_list = \ 2467 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2468 ninitial) 2469 # And output them properly 2470 for cf_i, color_flow_dict in enumerate(color_flow_list): 2471 for i in [0, 1]: 2472 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2473 (i + 1, cf_i + 1, nexternal, 2474 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2475 for l in legs]))) 2476 2477 # Write the file 2478 writer.writelines(lines) 2479 2480 return len(color_flow_list)
2481 2482 2483 #=============================================================================== 2484 # write_configs_file 2485 #=============================================================================== 2486 #test_written
2487 - def write_configs_file(self, writer, matrix_element, fortran_model):
2488 """Write the configs.inc file for MadEvent""" 2489 2490 # Extract number of external particles 2491 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2492 lines = [] 2493 2494 iconfig = 0 2495 2496 s_and_t_channels = [] 2497 mapconfigs = [] 2498 2499 model = matrix_element.get('processes')[0].get('model') 2500 # new_pdg = model.get_first_non_pdg() 2501 2502 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2503 model = matrix_element.get('base_amplitude').get('process').get('model') 2504 minvert = min([max([len(vert.get('legs')) for vert in \ 2505 diag.get('vertices')]) for diag in base_diagrams]) 2506 2507 for idiag, diag in enumerate(base_diagrams): 2508 if any([len(vert.get('legs')) > minvert for vert in 2509 diag.get('vertices')]): 2510 # Only 3-vertices allowed in configs.inc 2511 continue 2512 iconfig = iconfig + 1 2513 helas_diag = matrix_element.get('diagrams')[idiag] 2514 mapconfigs.append(helas_diag.get('number')) 2515 lines.append("# Diagram %d, Amplitude %d" % \ 2516 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2517 # Correspondance between the config and the amplitudes 2518 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2519 helas_diag.get('amplitudes')[0]['number'])) 2520 2521 # Need to reorganize the topology so that we start with all 2522 # final state external particles and work our way inwards 2523 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2524 get_s_and_t_channels(ninitial, model, 990) 2525 2526 s_and_t_channels.append([schannels, tchannels]) 2527 2528 # Write out propagators for s-channel and t-channel vertices 2529 allchannels = schannels 2530 if len(tchannels) > 1: 2531 # Write out tchannels only if there are any non-trivial ones 2532 allchannels = schannels + tchannels 2533 2534 for vert in allchannels: 2535 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2536 last_leg = vert.get('legs')[-1] 2537 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2538 (last_leg.get('number'), iconfig, len(daughters), 2539 ",".join(["%3d" % d for d in daughters]))) 2540 if vert in schannels: 2541 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2542 (last_leg.get('number'), iconfig, 2543 last_leg.get('id'))) 2544 elif vert in tchannels[:-1]: 2545 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2546 (last_leg.get('number'), iconfig, 2547 abs(last_leg.get('id')))) 2548 2549 # Write out number of configs 2550 lines.append("# Number of configs") 2551 lines.append("data mapconfig(0)/%4d/" % iconfig) 2552 2553 # Write the file 2554 writer.writelines(lines) 2555 2556 return iconfig, mapconfigs, s_and_t_channels
2557 2558 2559 #=============================================================================== 2560 # write_decayBW_file 2561 #=============================================================================== 2562 #test written
2563 - def write_decayBW_file(self, writer, s_and_t_channels):
2564 """Write the decayBW.inc file for MadEvent""" 2565 2566 lines = [] 2567 2568 booldict = {False: ".false.", True: ".false."} 2569 ####Changed by MZ 2011-11-23!!!! 2570 2571 for iconf, config in enumerate(s_and_t_channels): 2572 schannels = config[0] 2573 for vertex in schannels: 2574 # For the resulting leg, pick out whether it comes from 2575 # decay or not, as given by the from_group flag 2576 leg = vertex.get('legs')[-1] 2577 lines.append("data gForceBW(%d,%d)/%s/" % \ 2578 (leg.get('number'), iconf + 1, 2579 booldict[leg.get('from_group')])) 2580 2581 # Write the file 2582 writer.writelines(lines) 2583 2584 return True
2585 2586 2587 #=============================================================================== 2588 # write_dname_file 2589 #===============================================================================
2590 - def write_dname_file(self, writer, matrix_element, fortran_model):
2591 """Write the dname.mg file for MG4""" 2592 2593 line = "DIRNAME=P%s" % \ 2594 matrix_element.get('processes')[0].shell_string() 2595 2596 # Write the file 2597 writer.write(line + "\n") 2598 2599 return True
2600 2601 2602 #=============================================================================== 2603 # write_iproc_file 2604 #===============================================================================
2605 - def write_iproc_file(self, writer, me_number):
2606 """Write the iproc.dat file for MG4""" 2607 2608 line = "%d" % (me_number + 1) 2609 2610 # Write the file 2611 for line_to_write in writer.write_line(line): 2612 writer.write(line_to_write) 2613 return True
2614 2615 2616 #=============================================================================== 2617 # Helper functions 2618 #=============================================================================== 2619 2620 2621 #=============================================================================== 2622 # get_fks_j_from_i_lines 2623 #=============================================================================== 2624
2625 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2626 """generate the lines for fks.inc describing initializating the 2627 fks_j_from_i array""" 2628 lines = [] 2629 if not me.isfinite: 2630 for ii, js in me.fks_j_from_i.items(): 2631 if js: 2632 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2633 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2634 else: 2635 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2636 % (2, 1, 1, '1')) 2637 lines.append('') 2638 2639 return lines 2640 2641 2642 #=============================================================================== 2643 # get_leshouche_lines 2644 #===============================================================================
2645 - def get_leshouche_lines(self, matrix_element, ime):
2646 #test written 2647 """Write the leshouche.inc file for MG4""" 2648 2649 # Extract number of external particles 2650 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2651 2652 lines = [] 2653 for iproc, proc in enumerate(matrix_element.get('processes')): 2654 legs = proc.get_legs_with_decays() 2655 lines.append("I %4d %4d %s" % \ 2656 (ime, iproc + 1, 2657 " ".join([str(l.get('id')) for l in legs]))) 2658 for i in [1, 2]: 2659 lines.append("M %4d %4d %4d %s" % \ 2660 (ime, i, iproc + 1, 2661 " ".join([ "%3d" % 0 ] * ninitial + \ 2662 [ "%3d" % i ] * (nexternal - ninitial)))) 2663 2664 # Here goes the color connections corresponding to the JAMPs 2665 # Only one output, for the first subproc! 2666 if iproc == 0: 2667 # If no color basis, just output trivial color flow 2668 if not matrix_element.get('color_basis'): 2669 for i in [1, 2]: 2670 lines.append("C %4d %4d 1 %s" % \ 2671 (ime, i, 2672 " ".join([ "%3d" % 0 ] * nexternal))) 2673 color_flow_list = [] 2674 nflow = 1 2675 2676 else: 2677 # First build a color representation dictionnary 2678 repr_dict = {} 2679 for l in legs: 2680 repr_dict[l.get('number')] = \ 2681 proc.get('model').get_particle(l.get('id')).get_color()\ 2682 * (-1)**(1+l.get('state')) 2683 # Get the list of color flows 2684 color_flow_list = \ 2685 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2686 ninitial) 2687 # And output them properly 2688 for cf_i, color_flow_dict in enumerate(color_flow_list): 2689 for i in [0, 1]: 2690 lines.append("C %4d %4d %4d %s" % \ 2691 (ime, i + 1, cf_i + 1, 2692 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2693 for l in legs]))) 2694 2695 nflow = len(color_flow_list) 2696 2697 nproc = len(matrix_element.get('processes')) 2698 2699 return lines, nproc, nflow
2700 2701 2702 #=============================================================================== 2703 # get_den_factor_lines 2704 #===============================================================================
2705 - def get_den_factor_lines(self, fks_born):
2706 """returns the lines with the information on the denominator keeping care 2707 of the identical particle factors in the various real emissions""" 2708 2709 lines = [] 2710 info_list = fks_born.get_fks_info_list() 2711 if info_list: 2712 # if the reals have been generated, fill with the corresponding average factor 2713 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 2714 lines.append('DATA IDEN_VALUES /' + \ 2715 ', '.join(['%d' % ( 2716 fks_born.born_matrix_element.get_denominator_factor() ) \ 2717 for info in info_list]) + '/') 2718 else: 2719 # otherwise use the born 2720 lines.append('INTEGER IDEN_VALUES(1)') 2721 lines.append('DATA IDEN_VALUES / %d /' \ 2722 % fks_born.born_matrix_element.get_denominator_factor()) 2723 2724 return lines
2725 2726 2727 #=============================================================================== 2728 # get_ij_lines 2729 #===============================================================================
2730 - def get_ij_lines(self, fks_born):
2731 """returns the lines with the information on the particle number of the born 2732 that splits""" 2733 info_list = fks_born.get_fks_info_list() 2734 lines = [] 2735 if info_list: 2736 # if the reals have been generated, fill with the corresponding value of ij 2737 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 2738 lines.append('DATA IJ_VALUES /' + \ 2739 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/') 2740 else: 2741 #otherwise just put the first leg 2742 lines.append('INTEGER IJ_VALUES(1)') 2743 lines.append('DATA IJ_VALUES / 1 /') 2744 2745 return lines
2746 2747
2748 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 2749 mirror = False): #test written
2750 """Generate the PDF lines for the auto_dsig.f file""" 2751 2752 processes = matrix_element.get('processes') 2753 model = processes[0].get('model') 2754 2755 pdf_definition_lines = "" 2756 pdf_data_lines = "" 2757 pdf_lines = "" 2758 2759 if ninitial == 1: 2760 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 2761 for i, proc in enumerate(processes): 2762 process_line = proc.base_string() 2763 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2764 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 2765 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 2766 else: 2767 # Pick out all initial state particles for the two beams 2768 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 2769 p in processes]))), 2770 sorted(list(set([p.get_initial_pdg(2) for \ 2771 p in processes])))] 2772 2773 # Prepare all variable names 2774 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 2775 sum(initial_states,[])]) 2776 for key,val in pdf_codes.items(): 2777 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 2778 2779 # Set conversion from PDG code to number used in PDF calls 2780 pdgtopdf = {21: 0, 22: 7} 2781 # Fill in missing entries of pdgtopdf 2782 for pdg in sum(initial_states,[]): 2783 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 2784 pdgtopdf[pdg] = pdg 2785 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 2786 # If any particle has pdg code 7, we need to use something else 2787 pdgtopdf[pdg] = 6000000 + pdg 2788 2789 # Get PDF variable declarations for all initial states 2790 for i in [0,1]: 2791 pdf_definition_lines += "DOUBLE PRECISION " + \ 2792 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2793 for pdg in \ 2794 initial_states[i]]) + \ 2795 "\n" 2796 2797 # Get PDF data lines for all initial states 2798 for i in [0,1]: 2799 pdf_data_lines += "DATA " + \ 2800 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2801 for pdg in initial_states[i]]) + \ 2802 "/%d*1D0/" % len(initial_states[i]) + \ 2803 "\n" 2804 2805 # Get PDF values for the different initial states 2806 for i, init_states in enumerate(initial_states): 2807 if not mirror: 2808 ibeam = i + 1 2809 else: 2810 ibeam = 2 - i 2811 if subproc_group: 2812 pdf_lines = pdf_lines + \ 2813 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 2814 % (ibeam, ibeam) 2815 else: 2816 pdf_lines = pdf_lines + \ 2817 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 2818 % (ibeam, ibeam) 2819 2820 for initial_state in init_states: 2821 if initial_state in pdf_codes.keys(): 2822 if subproc_group: 2823 if abs(pdgtopdf[initial_state]) <= 7: 2824 pdf_lines = pdf_lines + \ 2825 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 2826 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 2827 (pdf_codes[initial_state], 2828 i + 1, ibeam, pdgtopdf[initial_state], 2829 ibeam, ibeam) 2830 else: 2831 # setting other partons flavours outside quark, gluon, photon to be 0d0 2832 pdf_lines = pdf_lines + \ 2833 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2834 "%s%d=0d0\n") % \ 2835 (pdf_codes[initial_state],i + 1) 2836 else: 2837 if abs(pdgtopdf[initial_state]) <= 7: 2838 pdf_lines = pdf_lines + \ 2839 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 2840 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 2841 (pdf_codes[initial_state], 2842 i + 1, ibeam, pdgtopdf[initial_state], 2843 ibeam, ibeam) 2844 else: 2845 # setting other partons flavours outside quark, gluon, photon to be 0d0 2846 pdf_lines = pdf_lines + \ 2847 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2848 "%s%d=0d0\n") % \ 2849 (pdf_codes[initial_state],i + 1) 2850 2851 pdf_lines = pdf_lines + "ENDIF\n" 2852 2853 # Add up PDFs for the different initial state particles 2854 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 2855 for proc in processes: 2856 process_line = proc.base_string() 2857 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2858 pdf_lines = pdf_lines + "\nPD(IPROC) = " 2859 for ibeam in [1, 2]: 2860 initial_state = proc.get_initial_pdg(ibeam) 2861 if initial_state in pdf_codes.keys(): 2862 pdf_lines = pdf_lines + "%s%d*" % \ 2863 (pdf_codes[initial_state], ibeam) 2864 else: 2865 pdf_lines = pdf_lines + "1d0*" 2866 # Remove last "*" from pdf_lines 2867 pdf_lines = pdf_lines[:-1] + "\n" 2868 2869 # Remove last line break from pdf_lines 2870 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 2871 2872 2873 #test written
2874 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
2875 """Return the color matrix definition lines for the given color_matrix. Split 2876 rows in chunks of size n.""" 2877 2878 if not color_matrix: 2879 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 2880 else: 2881 ret_list = [] 2882 my_cs = color.ColorString() 2883 for index, denominator in \ 2884 enumerate(color_matrix.get_line_denominators()): 2885 # First write the common denominator for this color matrix line 2886 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 2887 # Then write the numerators for the matrix elements 2888 num_list = color_matrix.get_line_numerators(index, denominator) 2889 for k in xrange(0, len(num_list), n): 2890 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 2891 (index + 1, k + 1, min(k + n, len(num_list)), 2892 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 2893 2894 return ret_list
2895 2896 #=========================================================================== 2897 # write_maxamps_file 2898 #===========================================================================
2899 - def write_maxamps_file(self, writer, maxamps, maxflows, 2900 maxproc,maxsproc):
2901 """Write the maxamps.inc file for MG4.""" 2902 2903 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 2904 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 2905 (maxamps, maxflows) 2906 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 2907 (maxproc, maxsproc) 2908 2909 # Write the file 2910 writer.writelines(file) 2911 2912 return True
2913 2914 #=============================================================================== 2915 # write_ncombs_file 2916 #===============================================================================
2917 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
2918 # #test written 2919 """Write the ncombs.inc file for MadEvent.""" 2920 2921 # Extract number of external particles 2922 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2923 2924 # ncomb (used for clustering) is 2^(nexternal) 2925 file = " integer n_max_cl\n" 2926 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 2927 2928 # Write the file 2929 writer.writelines(file) 2930 2931 return True
2932 2933 #=========================================================================== 2934 # write_config_subproc_map_file 2935 #===========================================================================
2936 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
2937 """Write a dummy config_subproc.inc file for MadEvent""" 2938 2939 lines = [] 2940 2941 for iconfig in range(len(s_and_t_channels)): 2942 lines.append("DATA CONFSUB(1,%d)/1/" % \ 2943 (iconfig + 1)) 2944 2945 # Write the file 2946 writer.writelines(lines) 2947 2948 return True
2949 2950 #=========================================================================== 2951 # write_colors_file 2952 #===========================================================================
2953 - def write_colors_file(self, writer, matrix_element):
2954 """Write the get_color.f file for MadEvent, which returns color 2955 for all particles used in the matrix element.""" 2956 2957 try: 2958 matrix_elements=matrix_element.real_processes[0].matrix_element 2959 except IndexError: 2960 matrix_elements=[matrix_element.born_matrix_element] 2961 2962 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 2963 matrix_elements = [matrix_elements] 2964 2965 model = matrix_elements[0].get('processes')[0].get('model') 2966 2967 # We need the both particle and antiparticle wf_ids, since the identity 2968 # depends on the direction of the wf. 2969 # loop on the real emissions 2970 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2971 for wf in d.get('wavefunctions')],[]) \ 2972 for d in me.get('diagrams')],[]) \ 2973 for me in [real_proc.matrix_element]],[])\ 2974 for real_proc in matrix_element.real_processes],[])) 2975 # and also on the born 2976 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2977 for wf in d.get('wavefunctions')],[]) \ 2978 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 2979 2980 # loop on the real emissions 2981 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 2982 p.get_legs_with_decays()] for p in \ 2983 me.get('processes')], []) for me in \ 2984 [real_proc.matrix_element]], []) for real_proc in \ 2985 matrix_element.real_processes],[])) 2986 # and also on the born 2987 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 2988 p.get_legs_with_decays()] for p in \ 2989 matrix_element.born_matrix_element.get('processes')], []))) 2990 particle_ids = sorted(list(wf_ids.union(leg_ids))) 2991 2992 lines = """function get_color(ipdg) 2993 implicit none 2994 integer get_color, ipdg 2995 2996 if(ipdg.eq.%d)then 2997 get_color=%d 2998 return 2999 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3000 3001 for part_id in particle_ids[1:]: 3002 lines += """else if(ipdg.eq.%d)then 3003 get_color=%d 3004 return 3005 """ % (part_id, model.get_particle(part_id).get_color()) 3006 # Dummy particle for multiparticle vertices with pdg given by 3007 # first code not in the model 3008 lines += """else if(ipdg.eq.%d)then 3009 c This is dummy particle used in multiparticle vertices 3010 get_color=2 3011 return 3012 """ % model.get_first_non_pdg() 3013 lines += """else 3014 write(*,*)'Error: No color given for pdg ',ipdg 3015 get_color=0 3016 return 3017 endif 3018 end 3019 """ 3020 3021 # Write the file 3022 writer.writelines(lines) 3023 3024 return True
3025 3026 #=============================================================================== 3027 # write_props_file 3028 #=============================================================================== 3029 #test_written
3030 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3031 """Write the props.inc file for MadEvent. Needs input from 3032 write_configs_file. With respect to the parent routine, it has some 3033 more specific formats that allow the props.inc file to be read by the 3034 link program""" 3035 3036 lines = [] 3037 3038 particle_dict = matrix_element.get('processes')[0].get('model').\ 3039 get('particle_dict') 3040 3041 for iconf, configs in enumerate(s_and_t_channels): 3042 for vertex in configs[0] + configs[1][:-1]: 3043 leg = vertex.get('legs')[-1] 3044 if leg.get('id') not in particle_dict: 3045 # Fake propagator used in multiparticle vertices 3046 mass = 'zero' 3047 width = 'zero' 3048 pow_part = 0 3049 else: 3050 particle = particle_dict[leg.get('id')] 3051 # Get mass 3052 if particle.get('mass').lower() == 'zero': 3053 mass = particle.get('mass') 3054 else: 3055 mass = "abs(%s)" % particle.get('mass') 3056 # Get width 3057 if particle.get('width').lower() == 'zero': 3058 width = particle.get('width') 3059 else: 3060 width = "abs(%s)" % particle.get('width') 3061 3062 pow_part = 1 + int(particle.is_boson()) 3063 3064 lines.append("pmass(%3d,%4d) = %s" % \ 3065 (leg.get('number'), iconf + 1, mass)) 3066 lines.append("pwidth(%3d,%4d) = %s" % \ 3067 (leg.get('number'), iconf + 1, width)) 3068 lines.append("pow(%3d,%4d) = %d" % \ 3069 (leg.get('number'), iconf + 1, pow_part)) 3070 3071 # Write the file 3072 writer.writelines(lines) 3073 3074 return True
3075 3076 3077 #=========================================================================== 3078 # write_subproc 3079 #===========================================================================
3080 - def write_subproc(self, writer, subprocdir):
3081 """Append this subprocess to the subproc.mg file for MG4""" 3082 3083 # Write line to file 3084 writer.write(subprocdir + "\n") 3085 3086 return True
3087 3088 3089 3090 3091 3092 #================================================================================= 3093 # Class for using the optimized Loop process 3094 #=================================================================================
3095 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3096 ProcessExporterFortranFKS):
3097 """Class to take care of exporting a set of matrix elements to 3098 Fortran (v4) format.""" 3099 3100
3101 - def finalize(self, *args, **opts):
3103 #export_v4.ProcessExporterFortranSA.finalize(self, *args, **opts) 3104 3105 #=============================================================================== 3106 # copy the Template in a new directory. 3107 #===============================================================================
3108 - def copy_fkstemplate(self):
3109 """create the directory run_name as a copy of the MadEvent 3110 Template, and clean the directory 3111 For now it is just the same as copy_v4template, but it will be modified 3112 """ 3113 mgme_dir = self.mgme_dir 3114 dir_path = self.dir_path 3115 clean =self.opt['clean'] 3116 3117 #First copy the full template tree if dir_path doesn't exit 3118 if not os.path.isdir(dir_path): 3119 if not mgme_dir: 3120 raise MadGraph5Error, \ 3121 "No valid MG_ME path given for MG4 run directory creation." 3122 logger.info('initialize a new directory: %s' % \ 3123 os.path.basename(dir_path)) 3124 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3125 # distutils.dir_util.copy_tree since dir_path already exists 3126 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 3127 dir_path) 3128 # Copy plot_card 3129 for card in ['plot_card']: 3130 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 3131 try: 3132 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 3133 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 3134 except IOError: 3135 logger.warning("Failed to copy " + card + ".dat to default") 3136 3137 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3138 if not mgme_dir: 3139 raise MadGraph5Error, \ 3140 "No valid MG_ME path given for MG4 run directory creation." 3141 try: 3142 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3143 except IOError: 3144 MG5_version = misc.get_pkg_info() 3145 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3146 "5." + MG5_version['version']) 3147 3148 #Ensure that the Template is clean 3149 if clean: 3150 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3151 if os.environ.has_key('MADGRAPH_BASE'): 3152 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3153 '--web'], cwd=dir_path) 3154 else: 3155 try: 3156 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3157 cwd=dir_path) 3158 except Exception, why: 3159 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3160 % (os.path.basename(dir_path),why)) 3161 #Write version info 3162 MG_version = misc.get_pkg_info() 3163 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3164 MG_version['version']) 3165 3166 # We must link the CutTools to the Library folder of the active Template 3167 self.link_CutTools(dir_path) 3168 # We must link the TIR to the Library folder of the active Template 3169 link_tir_libs=[] 3170 tir_libs=[] 3171 tir_include=[] 3172 for tir in self.all_tir: 3173 tir_dir="%s_dir"%tir 3174 libpath=getattr(self,tir_dir) 3175 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3176 libpath,"lib%s.a"%tir,tir_name=tir) 3177 setattr(self,tir_dir,libpath) 3178 if libpath != "": 3179 if tir in ['pjfry','ninja','golem', 'samurai','collier']: 3180 # We should link dynamically when possible, so we use the original 3181 # location of these libraries. 3182 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3183 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3184 # For Ninja, we must also link against OneLoop. 3185 if tir in ['ninja']: 3186 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext)) 3187 for ext in ['a','dylib','so']): 3188 raise MadGraph5Error( 3189 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath) 3190 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo')) 3191 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo')) 3192 # We must add the corresponding includes for these TIR 3193 if tir in ['golem','samurai','ninja','collier']: 3194 trg_path = pjoin(os.path.dirname(libpath),'include') 3195 if os.path.isdir(trg_path): 3196 to_include = misc.find_includes_path(trg_path, 3197 self.include_names[tir]) 3198 else: 3199 to_include = None 3200 # Special possible location for collier 3201 if to_include is None and tir=='collier': 3202 to_include = misc.find_includes_path( 3203 pjoin(libpath,'modules'),self.include_names[tir]) 3204 if to_include is None: 3205 logger.error( 3206 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+ 3207 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3208 to_include = '<Not_found_define_it_yourself>' 3209 tir_include.append('-I %s'%to_include) 3210 else: 3211 link_tir_libs.append('-l%s'%tir) 3212 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3213 3214 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3215 cwd = os.getcwd() 3216 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3217 try: 3218 os.chdir(dirpath) 3219 except os.error: 3220 logger.error('Could not cd to directory %s' % dirpath) 3221 return 0 3222 filename = 'makefile_loop' 3223 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3224 link_tir_libs,tir_libs,tir_include=tir_include) 3225 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3226 dirpath = os.path.join(self.dir_path, 'Source') 3227 try: 3228 os.chdir(dirpath) 3229 except os.error: 3230 logger.error('Could not cd to directory %s' % dirpath) 3231 return 0 3232 filename = 'make_opts' 3233 calls = self.write_make_opts(writers.MakefileWriter(filename), 3234 link_tir_libs,tir_libs) 3235 # Return to original PWD 3236 os.chdir(cwd) 3237 3238 cwd = os.getcwd() 3239 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3240 try: 3241 os.chdir(dirpath) 3242 except os.error: 3243 logger.error('Could not cd to directory %s' % dirpath) 3244 return 0 3245 3246 # We add here the user-friendly MadLoop option setter. 3247 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3248 "Cards/MadLoopParams.dat", 3249 "SubProcesses/MadLoopParams.inc"] 3250 3251 for file in cpfiles: 3252 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3253 os.path.join(self.dir_path, file)) 3254 3255 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 3256 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 3257 3258 3259 3260 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3261 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3262 'Cards', 'MadLoopParams.dat')) 3263 # write the output file 3264 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3265 "MadLoopParams.dat")) 3266 3267 # We need minimal editing of MadLoopCommons.f 3268 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3269 "SubProcesses","MadLoopCommons.inc")).read() 3270 writer = writers.FortranWriter(os.path.join(self.dir_path, 3271 "SubProcesses","MadLoopCommons.f")) 3272 writer.writelines(MadLoopCommon%{ 3273 'print_banner_commands':self.MadLoop_banner}, 3274 context={'collier_available':self.tir_available_dict['collier']}) 3275 writer.close() 3276 3277 # link the files from the MODEL 3278 model_path = self.dir_path + '/Source/MODEL/' 3279 # Note that for the [real=] mode, these files are not present 3280 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3281 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3282 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3283 ln(model_path + '/mp_coupl_same_name.inc', \ 3284 self.dir_path + '/SubProcesses') 3285 3286 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3287 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3288 writers.FortranWriter('cts_mpc.h'),) 3289 3290 self.copy_python_files() 3291 3292 3293 # We need to create the correct open_data for the pdf 3294 self.write_pdf_opendata() 3295 3296 3297 # Return to original PWD 3298 os.chdir(cwd)
3299
3300 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3301 """writes the V**** directory inside the P**** directories specified in 3302 dir_name""" 3303 3304 cwd = os.getcwd() 3305 3306 matrix_element = loop_matrix_element 3307 3308 # Create the MadLoop5_resources directory if not already existing 3309 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3310 try: 3311 os.mkdir(dirpath) 3312 except os.error as error: 3313 logger.warning(error.strerror + " " + dirpath) 3314 3315 # Create the directory PN_xx_xxxxx in the specified path 3316 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3317 dirpath = os.path.join(dir_name, name) 3318 3319 try: 3320 os.mkdir(dirpath) 3321 except os.error as error: 3322 logger.warning(error.strerror + " " + dirpath) 3323 3324 try: 3325 os.chdir(dirpath) 3326 except os.error: 3327 logger.error('Could not cd to directory %s' % dirpath) 3328 return 0 3329 3330 logger.info('Creating files in directory %s' % name) 3331 3332 # Extract number of external particles 3333 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3334 3335 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3336 3337 # We need a link to coefs.inc from DHELAS 3338 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'), 3339 abspath=False, cwd=None) 3340 3341 # The born matrix element, if needed 3342 filename = 'born_matrix.f' 3343 calls = self.write_bornmatrix( 3344 writers.FortranWriter(filename), 3345 matrix_element, 3346 fortran_model) 3347 3348 filename = 'nexternal.inc' 3349 self.write_nexternal_file(writers.FortranWriter(filename), 3350 nexternal, ninitial) 3351 3352 filename = 'pmass.inc' 3353 self.write_pmass_file(writers.FortranWriter(filename), 3354 matrix_element) 3355 3356 filename = 'ngraphs.inc' 3357 self.write_ngraphs_file(writers.FortranWriter(filename), 3358 len(matrix_element.get_all_amplitudes())) 3359 3360 filename = "loop_matrix.ps" 3361 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3362 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3363 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3364 filename, 3365 model=matrix_element.get('processes')[0].get('model'), 3366 amplitude='') 3367 logger.info("Drawing loop Feynman diagrams for " + \ 3368 matrix_element.get('processes')[0].nice_string(\ 3369 print_weighted=False)) 3370 plot.draw() 3371 3372 filename = "born_matrix.ps" 3373 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3374 get('born_diagrams'), 3375 filename, 3376 model=matrix_element.get('processes')[0].\ 3377 get('model'), 3378 amplitude='') 3379 logger.info("Generating born Feynman diagrams for " + \ 3380 matrix_element.get('processes')[0].nice_string(\ 3381 print_weighted=False)) 3382 plot.draw() 3383 3384 # We also need to write the overall maximum quantities for this group 3385 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 3386 # only one process, so this is trivial 3387 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 3388 3389 open('unique_id.inc','w').write( 3390 """ integer UNIQUE_ID 3391 parameter(UNIQUE_ID=1)""") 3392 3393 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3394 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3395 'MadLoopParams.inc','MadLoopCommons.f'] 3396 3397 for file in linkfiles: 3398 ln('../../%s' % file) 3399 3400 os.system("ln -s ../../makefile_loop makefile") 3401 3402 # We should move to MadLoop5_resources directory from the SubProcesses 3403 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3404 pjoin('..','MadLoop5_resources')) 3405 3406 linkfiles = ['mpmodule.mod'] 3407 3408 for file in linkfiles: 3409 ln('../../../lib/%s' % file) 3410 3411 linkfiles = ['coef_specs.inc'] 3412 3413 for file in linkfiles: 3414 ln('../../../Source/DHELAS/%s' % file) 3415 3416 # Return to original PWD 3417 os.chdir(cwd) 3418 3419 if not calls: 3420 calls = 0 3421 return calls
3422 3423 3424 #=============================================================================== 3425 # write_coef_specs 3426 #===============================================================================
3427 - def write_coef_specs_file(self, max_loop_vertex_ranks):
3428 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3429 non-optimized mode""" 3430 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3431 3432 replace_dict = {} 3433 replace_dict['max_lwf_size'] = 4 3434 replace_dict['vertex_max_coefs'] = max(\ 3435 [q_polynomial.get_number_of_coefs_for_rank(n) 3436 for n in max_loop_vertex_ranks]) 3437 IncWriter=writers.FortranWriter(filename,'w') 3438 IncWriter.writelines("""INTEGER MAXLWFSIZE 3439 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3440 INTEGER VERTEXMAXCOEFS 3441 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3442 % replace_dict) 3443 IncWriter.close()
3444