Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from distutils import dir_util 
  18  import glob 
  19  import logging 
  20  import os 
  21  import re 
  22  import shutil 
  23  import subprocess 
  24  import string 
  25  import copy 
  26  import platform 
  27   
  28  import madgraph.core.color_algebra as color 
  29  import madgraph.core.helas_objects as helas_objects 
  30  import madgraph.core.base_objects as base_objects 
  31  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  32  import madgraph.fks.fks_base as fks 
  33  import madgraph.fks.fks_common as fks_common 
  34  import madgraph.iolibs.drawing_eps as draw 
  35  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  36  import madgraph.iolibs.files as files 
  37  import madgraph.various.misc as misc 
  38  import madgraph.iolibs.file_writers as writers 
  39  import madgraph.iolibs.template_files as template_files 
  40  import madgraph.iolibs.ufo_expression_parsers as parsers 
  41  import madgraph.iolibs.export_v4 as export_v4 
  42  import madgraph.loop.loop_exporters as loop_exporters 
  43  import madgraph.various.q_polynomial as q_polynomial 
  44  import madgraph.various.banner as banner_mod 
  45   
  46  import aloha.create_aloha as create_aloha 
  47   
  48  import models.write_param_card as write_param_card 
  49  import models.check_param_card as check_param_card 
  50  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  51  from madgraph.iolibs.files import cp, ln, mv 
  52   
  53  pjoin = os.path.join 
  54   
  55  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  56  logger = logging.getLogger('madgraph.export_fks') 
  57   
  58   
59 -def make_jpeg_async(args):
60 Pdir = args[0] 61 old_pos = args[1] 62 dir_path = args[2] 63 64 devnull = os.open(os.devnull, os.O_RDWR) 65 66 os.chdir(Pdir) 67 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 68 stdout = devnull) 69 os.chdir(os.path.pardir)
70 71 72 #================================================================================= 73 # Class for used of the (non-optimized) Loop process 74 #=================================================================================
75 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
76 """Class to take care of exporting a set of matrix elements to 77 Fortran (v4) format.""" 78 79 #=============================================================================== 80 # copy the Template in a new directory. 81 #===============================================================================
82 - def copy_fkstemplate(self):
83 """create the directory run_name as a copy of the MadEvent 84 Template, and clean the directory 85 For now it is just the same as copy_v4template, but it will be modified 86 """ 87 88 mgme_dir = self.mgme_dir 89 dir_path = self.dir_path 90 clean =self.opt['clean'] 91 92 #First copy the full template tree if dir_path doesn't exit 93 if not os.path.isdir(dir_path): 94 if not mgme_dir: 95 raise MadGraph5Error, \ 96 "No valid MG_ME path given for MG4 run directory creation." 97 logger.info('initialize a new directory: %s' % \ 98 os.path.basename(dir_path)) 99 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 100 # distutils.dir_util.copy_tree since dir_path already exists 101 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) 102 # Copy plot_card 103 for card in ['plot_card']: 104 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 105 try: 106 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 107 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 108 except IOError: 109 logger.warning("Failed to move " + card + ".dat to default") 110 111 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 112 if not mgme_dir: 113 raise MadGraph5Error, \ 114 "No valid MG_ME path given for MG4 run directory creation." 115 try: 116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 117 except IOError: 118 MG5_version = misc.get_pkg_info() 119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 120 "5." + MG5_version['version']) 121 122 #Ensure that the Template is clean 123 if clean: 124 logger.info('remove old information in %s' % os.path.basename(dir_path)) 125 if os.environ.has_key('MADGRAPH_BASE'): 126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 127 '--web'],cwd=dir_path) 128 else: 129 try: 130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 131 cwd=dir_path) 132 except Exception, why: 133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 134 % (os.path.basename(dir_path),why)) 135 #Write version info 136 MG_version = misc.get_pkg_info() 137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 138 MG_version['version']) 139 140 # We must link the CutTools to the Library folder of the active Template 141 self.link_CutTools(dir_path) 142 143 link_tir_libs=[] 144 tir_libs=[] 145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 146 dirpath = os.path.join(self.dir_path, 'SubProcesses') 147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 149 link_tir_libs,tir_libs) 150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 151 filename = pjoin(self.dir_path, 'Source','make_opts') 152 calls = self.write_make_opts(writers.MakefileWriter(filename), 153 link_tir_libs,tir_libs) 154 155 # Duplicate run_card and FO_analyse_card 156 for card in ['FO_analyse_card', 'shower_card']: 157 try: 158 shutil.copy(pjoin(self.dir_path, 'Cards', 159 card + '.dat'), 160 pjoin(self.dir_path, 'Cards', 161 card + '_default.dat')) 162 except IOError: 163 logger.warning("Failed to copy " + card + ".dat to default") 164 165 cwd = os.getcwd() 166 dirpath = os.path.join(self.dir_path, 'SubProcesses') 167 try: 168 os.chdir(dirpath) 169 except os.error: 170 logger.error('Could not cd to directory %s' % dirpath) 171 return 0 172 173 # We add here the user-friendly MadLoop option setter. 174 cpfiles= ["SubProcesses/MadLoopParamReader.f", 175 "Cards/MadLoopParams.dat", 176 "SubProcesses/MadLoopParams.inc"] 177 178 for file in cpfiles: 179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 180 os.path.join(self.dir_path, file)) 181 182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 184 185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 187 'Cards', 'MadLoopParams.dat')) 188 # write the output file 189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 190 "MadLoopParams.dat")) 191 192 # We need minimal editing of MadLoopCommons.f 193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 194 "SubProcesses","MadLoopCommons.inc")).read() 195 writer = writers.FortranWriter(os.path.join(self.dir_path, 196 "SubProcesses","MadLoopCommons.f")) 197 writer.writelines(MadLoopCommon%{ 198 'print_banner_commands':self.MadLoop_banner}, 199 context={'collier_available':False}) 200 writer.close() 201 202 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 204 writers.FortranWriter('cts_mpc.h')) 205 206 207 # Finally make sure to turn off MC over Hel for the default mode. 208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 209 FKS_card_file = open(FKS_card_path,'r') 210 FKS_card = FKS_card_file.read() 211 FKS_card_file.close() 212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 213 "#NHelForMCoverHels\n-1", FKS_card) 214 FKS_card_file = open(FKS_card_path,'w') 215 FKS_card_file.write(FKS_card) 216 FKS_card_file.close() 217 218 # Return to original PWD 219 os.chdir(cwd) 220 # Copy the different python files in the Template 221 self.copy_python_files() 222 223 # We need to create the correct open_data for the pdf 224 self.write_pdf_opendata()
225 226 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 227 # Also, we overload this function (i.e. it is already defined in 228 # LoopProcessExporterFortranSA) because the path of the template makefile 229 # is different.
230 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
231 """ Create the file makefile_loop which links to the TIR libraries.""" 232 233 file = open(os.path.join(self.mgme_dir,'Template','NLO', 234 'SubProcesses','makefile_loop.inc')).read() 235 replace_dict={} 236 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 237 replace_dict['tir_libs']=' '.join(tir_libs) 238 replace_dict['dotf']='%.f' 239 replace_dict['doto']='%.o' 240 replace_dict['tir_include']=' '.join(tir_include) 241 file=file%replace_dict 242 if writer: 243 writer.writelines(file) 244 else: 245 return file
246 247 # I put it here not in optimized one, because I want to use the same make_opts.inc
248 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
249 """ Create the file make_opts which links to the TIR libraries.""" 250 file = open(os.path.join(self.mgme_dir,'Template','NLO', 251 'Source','make_opts.inc')).read() 252 replace_dict={} 253 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 254 replace_dict['tir_libs']=' '.join(tir_libs) 255 replace_dict['dotf']='%.f' 256 replace_dict['doto']='%.o' 257 file=file%replace_dict 258 if writer: 259 writer.writelines(file) 260 else: 261 return file
262 263 #=========================================================================== 264 # copy_python_files 265 #===========================================================================
266 - def copy_python_files(self):
267 """copy python files required for the Template""" 268 269 files_to_copy = [ \ 270 pjoin('interface','amcatnlo_run_interface.py'), 271 pjoin('interface','extended_cmd.py'), 272 pjoin('interface','common_run_interface.py'), 273 pjoin('interface','coloring_logging.py'), 274 pjoin('various','misc.py'), 275 pjoin('various','shower_card.py'), 276 pjoin('various','FO_analyse_card.py'), 277 pjoin('various','histograms.py'), 278 pjoin('various','banner.py'), 279 pjoin('various','cluster.py'), 280 pjoin('various','systematics.py'), 281 pjoin('various','lhe_parser.py'), 282 pjoin('madevent','sum_html.py'), 283 pjoin('madevent','gen_crossxhtml.py'), 284 pjoin('iolibs','files.py'), 285 pjoin('iolibs','save_load_object.py'), 286 pjoin('iolibs','file_writers.py'), 287 pjoin('..','models','check_param_card.py'), 288 pjoin('__init__.py') 289 ] 290 cp(_file_path+'/interface/.mg5_logging.conf', 291 self.dir_path+'/bin/internal/me5_logging.conf') 292 293 for cp_file in files_to_copy: 294 cp(pjoin(_file_path,cp_file), 295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [], 298 wanted_couplings = []):
299 300 super(ProcessExporterFortranFKS,self).convert_model(model, 301 wanted_lorentz, wanted_couplings) 302 303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 304 try: 305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 306 except OSError as error: 307 pass 308 model_path = model.get('modelpath') 309 shutil.copytree(model_path, 310 pjoin(self.dir_path,'bin','internal','ufomodel'), 311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 312 if hasattr(model, 'restrict_card'): 313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 314 'restrict_default.dat') 315 if isinstance(model.restrict_card, check_param_card.ParamCard): 316 model.restrict_card.write(out_path) 317 else: 318 files.cp(model.restrict_card, out_path)
319 320 321 322 #=========================================================================== 323 # write_maxparticles_file 324 #===========================================================================
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent""" 327 328 lines = "integer max_particles, max_branch\n" 329 lines += "parameter (max_particles=%d) \n" % maxparticles 330 lines += "parameter (max_branch=max_particles-1)" 331 332 # Write the file 333 writer.writelines(lines) 334 335 return True
336 337 338 #=========================================================================== 339 # write_maxconfigs_file 340 #===========================================================================
341 - def write_maxconfigs_file(self, writer, maxconfigs):
342 """Write the maxconfigs.inc file for MadEvent""" 343 344 lines = "integer lmaxconfigs\n" 345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 346 347 # Write the file 348 writer.writelines(lines) 349 350 return True
351 352 353 #=============================================================================== 354 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 355 #===============================================================================
356 - def write_procdef_mg5(self, file_pos, modelname, process_str):
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent 358 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 359 360 proc_card_template = template_files.mg4_proc_card.mg4_template 361 process_template = template_files.mg4_proc_card.process_template 362 process_text = '' 363 coupling = '' 364 new_process_content = [] 365 366 # First find the coupling and suppress the coupling from process_str 367 #But first ensure that coupling are define whithout spaces: 368 process_str = process_str.replace(' =', '=') 369 process_str = process_str.replace('= ', '=') 370 process_str = process_str.replace(',',' , ') 371 #now loop on the element and treat all the coupling 372 for info in process_str.split(): 373 if '=' in info: 374 coupling += info + '\n' 375 else: 376 new_process_content.append(info) 377 # Recombine the process_str (which is the input process_str without coupling 378 #info) 379 process_str = ' '.join(new_process_content) 380 381 #format the SubProcess 382 process_text += process_template.substitute({'process': process_str, \ 383 'coupling': coupling}) 384 385 text = proc_card_template.substitute({'process': process_text, 386 'model': modelname, 387 'multiparticle':''}) 388 ff = open(file_pos, 'w') 389 ff.write(text) 390 ff.close()
391 392 393 #=============================================================================== 394 # write a initial states map, useful for the fast PDF NLO interface 395 #===============================================================================
396 - def write_init_map(self, file_pos, initial_states):
397 """ Write an initial state process map. Each possible PDF 398 combination gets an unique identifier.""" 399 400 text='' 401 for i,e in enumerate(initial_states): 402 text=text+str(i+1)+' '+str(len(e)) 403 for t in e: 404 text=text+' ' 405 try: 406 for p in t: 407 text=text+' '+str(p) 408 except TypeError: 409 text=text+' '+str(t) 410 text=text+'\n' 411 412 ff = open(file_pos, 'w') 413 ff.write(text) 414 ff.close()
415
416 - def get_ME_identifier(self, matrix_element, *args, **opts):
417 """ A function returning a string uniquely identifying the matrix 418 element given in argument so that it can be used as a prefix to all 419 MadLoop5 subroutines and common blocks related to it. This allows 420 to compile several processes into one library as requested by the 421 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 422 necessitates that there is no process prefix.""" 423 424 return ''
425 426 #=============================================================================== 427 # write_coef_specs 428 #===============================================================================
429 - def write_coef_specs_file(self, virt_me_list):
430 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 431 non-optimized mode""" 432 raise fks_common.FKSProcessError(), \ 433 "write_coef_specs should be called only in the loop-optimized mode"
434 435 436 #=============================================================================== 437 # generate_directories_fks 438 #===============================================================================
439 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 440 me_ntot, path=os.getcwd(),OLP='MadLoop'):
441 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 442 including the necessary matrix.f and various helper files""" 443 proc = matrix_element.born_matrix_element['processes'][0] 444 445 if not self.model: 446 self.model = matrix_element.get('processes')[0].get('model') 447 448 cwd = os.getcwd() 449 try: 450 os.chdir(path) 451 except OSError, error: 452 error_msg = "The directory %s should exist in order to be able " % path + \ 453 "to \"export\" in it. If you see this error message by " + \ 454 "typing the command \"export\" please consider to use " + \ 455 "instead the command \"output\". " 456 raise MadGraph5Error, error_msg 457 458 calls = 0 459 460 self.fksdirs = [] 461 #first make and cd the direcrory corresponding to the born process: 462 borndir = "P%s" % \ 463 (matrix_element.get('processes')[0].shell_string()) 464 os.mkdir(borndir) 465 os.chdir(borndir) 466 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 467 468 ## write the files corresponding to the born process in the P* directory 469 self.generate_born_fks_files(matrix_element, 470 fortran_model, me_number, path) 471 472 # With NJET you want to generate the order file per subprocess and most 473 # likely also generate it for each subproc. 474 if OLP=='NJET': 475 filename = 'OLE_order.lh' 476 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP) 477 478 if matrix_element.virt_matrix_element: 479 calls += self.generate_virt_directory( \ 480 matrix_element.virt_matrix_element, \ 481 fortran_model, \ 482 os.path.join(path, borndir)) 483 484 #write the infortions for the different real emission processes 485 486 self.write_real_matrix_elements(matrix_element, fortran_model) 487 488 self.write_pdf_calls(matrix_element, fortran_model) 489 490 filename = 'nFKSconfigs.inc' 491 self.write_nfksconfigs_file(writers.FortranWriter(filename), 492 matrix_element, 493 fortran_model) 494 495 filename = 'iproc.dat' 496 self.write_iproc_file(writers.FortranWriter(filename), 497 me_number) 498 499 filename = 'fks_info.inc' 500 self.write_fks_info_file(writers.FortranWriter(filename), 501 matrix_element, 502 fortran_model) 503 504 filename = 'leshouche_info.dat' 505 nfksconfs,maxproc,maxflow,nexternal=\ 506 self.write_leshouche_info_file(filename,matrix_element) 507 508 # if no corrections are generated ([LOonly] mode), get 509 # these variables from the born 510 if nfksconfs == maxproc == maxflow == 0: 511 nfksconfs = 1 512 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 513 matrix_element.born_matrix_element, 1) 514 515 filename = 'leshouche_decl.inc' 516 self.write_leshouche_info_declarations( 517 writers.FortranWriter(filename), 518 nfksconfs,maxproc,maxflow,nexternal, 519 fortran_model) 520 521 filename = 'configs_and_props_info.dat' 522 nconfigs,max_leg_number,nfksconfs=self.write_configs_and_props_info_file( 523 filename, 524 matrix_element) 525 526 filename = 'configs_and_props_decl.inc' 527 self.write_configs_and_props_info_declarations( 528 writers.FortranWriter(filename), 529 nconfigs,max_leg_number,nfksconfs, 530 fortran_model) 531 532 filename = 'real_from_born_configs.inc' 533 self.write_real_from_born_configs( 534 writers.FortranWriter(filename), 535 matrix_element, 536 fortran_model) 537 538 filename = 'ngraphs.inc' 539 self.write_ngraphs_file(writers.FortranWriter(filename), 540 nconfigs) 541 542 #write the wrappers 543 filename = 'real_me_chooser.f' 544 self.write_real_me_wrapper(writers.FortranWriter(filename), 545 matrix_element, 546 fortran_model) 547 548 filename = 'parton_lum_chooser.f' 549 self.write_pdf_wrapper(writers.FortranWriter(filename), 550 matrix_element, 551 fortran_model) 552 553 filename = 'get_color.f' 554 self.write_colors_file(writers.FortranWriter(filename), 555 matrix_element) 556 557 filename = 'nexternal.inc' 558 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 559 self.write_nexternal_file(writers.FortranWriter(filename), 560 nexternal, ninitial) 561 self.proc_characteristic['ninitial'] = ninitial 562 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 563 564 filename = 'pmass.inc' 565 try: 566 self.write_pmass_file(writers.FortranWriter(filename), 567 matrix_element.real_processes[0].matrix_element) 568 except IndexError: 569 self.write_pmass_file(writers.FortranWriter(filename), 570 matrix_element.born_matrix_element) 571 572 #draw the diagrams 573 self.draw_feynman_diagrams(matrix_element) 574 575 linkfiles = ['BinothLHADummy.f', 576 'check_poles.f', 577 'MCmasses_HERWIG6.inc', 578 'MCmasses_HERWIGPP.inc', 579 'MCmasses_PYTHIA6Q.inc', 580 'MCmasses_PYTHIA6PT.inc', 581 'MCmasses_PYTHIA8.inc', 582 'add_write_info.f', 583 'coupl.inc', 584 'cuts.f', 585 'FKS_params.dat', 586 'initial_states_map.dat', 587 'OLE_order.olc', 588 'FKSParams.inc', 589 'FKSParamReader.f', 590 'cuts.inc', 591 'unlops.inc', 592 'pythia_unlops.f', 593 'driver_mintMC.f', 594 'driver_mintFO.f', 595 'driver_vegas.f', 596 'appl_interface.cc', 597 'appl_interface_dummy.f', 598 'appl_common.inc', 599 'reweight_appl.inc', 600 'driver_reweight.f', 601 'fastjetfortran_madfks_core.cc', 602 'fastjetfortran_madfks_full.cc', 603 'fjcore.cc', 604 'fastjet_wrapper.f', 605 'fjcore.hh', 606 'fks_Sij.f', 607 'fks_powers.inc', 608 'fks_singular.f', 609 'veto_xsec.f', 610 'veto_xsec.inc', 611 'c_weight.inc', 612 'fks_inc_chooser.f', 613 'leshouche_inc_chooser.f', 614 'configs_and_props_inc_chooser.f', 615 'genps.inc', 616 'genps_fks.f', 617 'boostwdir2.f', 618 'madfks_mcatnlo.inc', 619 'open_output_files.f', 620 'open_output_files_dummy.f', 621 'HwU_dummy.f', 622 'madfks_plot.f', 623 'analysis_dummy.f', 624 'mint-integrator2.f', 625 'MC_integer.f', 626 'mint.inc', 627 'montecarlocounter.f', 628 'q_es.inc', 629 'recluster.cc', 630 'Boosts.h', 631 'reweight.inc', 632 'reweight0.inc', 633 'reweight1.inc', 634 'reweightNLO.inc', 635 'reweight_all.inc', 636 'reweight_events.f', 637 'reweight_xsec.f', 638 'reweight_xsec_events.f', 639 'reweight_xsec_events_pdf_dummy.f', 640 'iproc_map.f', 641 'run.inc', 642 'run_card.inc', 643 'setcuts.f', 644 'setscales.f', 645 'symmetry_fks_test_MC.f', 646 'symmetry_fks_test_ME.f', 647 'symmetry_fks_test_Sij.f', 648 'symmetry_fks_v3.f', 649 'trapfpe.c', 650 'vegas2.for', 651 'write_ajob.f', 652 'handling_lhe_events.f', 653 'write_event.f', 654 'fill_MC_mshell.f', 655 'maxparticles.inc', 656 'message.inc', 657 'initcluster.f', 658 'cluster.inc', 659 'cluster.f', 660 'reweight.f', 661 'randinit', 662 'sudakov.inc', 663 'maxconfigs.inc', 664 'timing_variables.inc'] 665 666 for file in linkfiles: 667 ln('../' + file , '.') 668 os.system("ln -s ../../Cards/param_card.dat .") 669 670 #copy the makefile 671 os.system("ln -s ../makefile_fks_dir ./makefile") 672 if matrix_element.virt_matrix_element: 673 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 674 elif OLP!='MadLoop': 675 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 676 else: 677 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 678 679 680 #import nexternal/leshouches in Source 681 ln('nexternal.inc', '../../Source', log=False) 682 ln('born_leshouche.inc', '../../Source', log=False) 683 684 685 # Return to SubProcesses dir 686 os.chdir(os.path.pardir) 687 # Add subprocess to subproc.mg 688 filename = 'subproc.mg' 689 files.append_to_file(filename, 690 self.write_subproc, 691 borndir) 692 693 694 os.chdir(cwd) 695 # Generate info page 696 gen_infohtml.make_info_html_nlo(self.dir_path) 697 698 699 return calls
700 701 #=========================================================================== 702 # create the run_card 703 #===========================================================================
704 - def create_run_card(self, processes, history):
705 """ """ 706 707 run_card = banner_mod.RunCardNLO() 708 709 run_card.create_default_for_process(self.proc_characteristic, 710 history, 711 processes) 712 713 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 714 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
715 716
717 - def pass_information_from_cmd(self, cmd):
718 """pass information from the command interface to the exporter. 719 Please do not modify any object of the interface from the exporter. 720 """ 721 self.proc_defs = cmd._curr_proc_defs 722 if hasattr(cmd,'born_processes'): 723 self.born_processes = cmd.born_processes 724 else: 725 self.born_processes = [] 726 return
727
728 - def finalize(self, matrix_elements, history, mg5options, flaglist):
729 """Finalize FKS directory by creating jpeg diagrams, html 730 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if 731 necessary.""" 732 733 devnull = os.open(os.devnull, os.O_RDWR) 734 try: 735 res = misc.call([self.options['lhapdf'], '--version'], \ 736 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 737 except Exception: 738 res = 1 739 if res != 0: 740 logger.info('The value for lhapdf in the current configuration does not ' + \ 741 'correspond to a valid executable.\nPlease set it correctly either in ' + \ 742 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \ 743 'and regenrate the process. \nTo avoid regeneration, edit the ' + \ 744 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \ 745 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n') 746 747 compiler_dict = {'fortran': mg5options['fortran_compiler'], 748 'cpp': mg5options['cpp_compiler'], 749 'f2py': mg5options['f2py_compiler']} 750 751 if 'nojpeg' in flaglist: 752 makejpg = False 753 else: 754 makejpg = True 755 output_dependencies = mg5options['output_dependencies'] 756 757 758 self.proc_characteristic['grouped_matrix'] = False 759 self.create_proc_charac() 760 761 self.create_run_card(matrix_elements.get_processes(), history) 762 # modelname = self.model.get('name') 763 # if modelname == 'mssm' or modelname.startswith('mssm-'): 764 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 765 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 766 # check_param_card.convert_to_mg5card(param_card, mg5_param) 767 # check_param_card.check_valid_param_card(mg5_param) 768 769 # # write the model functions get_mass/width_from_id 770 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 771 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 772 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 773 774 # # Write maxconfigs.inc based on max of ME's/subprocess groups 775 776 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 777 self.write_maxconfigs_file(writers.FortranWriter(filename), 778 matrix_elements.get_max_configs()) 779 780 # # Write maxparticles.inc based on max of ME's/subprocess groups 781 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 782 self.write_maxparticles_file(writers.FortranWriter(filename), 783 matrix_elements.get_max_particles()) 784 785 # Touch "done" file 786 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 787 788 # Check for compiler 789 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 790 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 791 792 old_pos = os.getcwd() 793 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 794 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 795 proc[0] == 'P'] 796 797 devnull = os.open(os.devnull, os.O_RDWR) 798 # Convert the poscript in jpg files (if authorize) 799 if makejpg: 800 logger.info("Generate jpeg diagrams") 801 for Pdir in P_dir_list: 802 os.chdir(Pdir) 803 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 804 stdout = devnull) 805 os.chdir(os.path.pardir) 806 # 807 logger.info("Generate web pages") 808 # Create the WebPage using perl script 809 810 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 811 stdout = devnull) 812 813 os.chdir(os.path.pardir) 814 # 815 # obj = gen_infohtml.make_info_html(self.dir_path) 816 # [mv(name, './HTML/') for name in os.listdir('.') if \ 817 # (name.endswith('.html') or name.endswith('.jpg')) and \ 818 # name != 'index.html'] 819 # if online: 820 # nb_channel = obj.rep_rule['nb_gen_diag'] 821 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 822 823 # Write command history as proc_card_mg5 824 if os.path.isdir('Cards'): 825 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 826 history.write(output_file) 827 828 # Duplicate run_card and FO_analyse_card 829 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 830 try: 831 shutil.copy(pjoin(self.dir_path, 'Cards', 832 card + '.dat'), 833 pjoin(self.dir_path, 'Cards', 834 card + '_default.dat')) 835 except IOError: 836 logger.warning("Failed to copy " + card + ".dat to default") 837 838 839 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 840 stdout = devnull) 841 842 # Run "make" to generate madevent.tar.gz file 843 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 844 if os.path.exists('amcatnlo.tar.gz'): 845 os.remove('amcatnlo.tar.gz') 846 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 847 stdout = devnull) 848 # 849 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 850 stdout = devnull) 851 852 #return to the initial dir 853 os.chdir(old_pos) 854 855 # Setup stdHep 856 # Find the correct fortran compiler 857 base_compiler= ['FC=g77','FC=gfortran'] 858 859 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 860 861 if output_dependencies == 'external': 862 # check if stdhep has to be compiled (only the first time) 863 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 864 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')): 865 if 'FC' not in os.environ or not os.environ['FC']: 866 path = os.path.join(StdHep_path, 'src', 'make_opts') 867 text = open(path).read() 868 for base in base_compiler: 869 text = text.replace(base,'FC=%s' % fcompiler_chosen) 870 open(path, 'w').writelines(text) 871 872 logger.info('Compiling StdHEP. This has to be done only once.') 873 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 874 logger.info('Done.') 875 #then link the libraries in the exported dir 876 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 877 pjoin(self.dir_path, 'MCatNLO', 'lib')) 878 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 879 pjoin(self.dir_path, 'MCatNLO', 'lib')) 880 881 elif output_dependencies == 'internal': 882 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 883 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 884 # Create the links to the lib folder 885 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 886 for file in linkfiles: 887 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 888 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 889 if 'FC' not in os.environ or not os.environ['FC']: 890 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 891 text = open(path).read() 892 for base in base_compiler: 893 text = text.replace(base,'FC=%s' % fcompiler_chosen) 894 open(path, 'w').writelines(text) 895 # To avoid compiler version conflicts, we force a clean here 896 misc.compile(['clean'],cwd = StdHEP_internal_path) 897 898 elif output_dependencies == 'environment_paths': 899 # Here the user chose to define the dependencies path in one of 900 # his environmental paths 901 libStdHep = misc.which_lib('libstdhep.a') 902 libFmcfio = misc.which_lib('libFmcfio.a') 903 if not libStdHep is None and not libFmcfio is None: 904 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 905 os.path.dirname(libStdHep)) 906 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 907 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 908 else: 909 raise InvalidCmd("Could not find the location of the files"+\ 910 " libstdhep.a and libFmcfio.a in you environment paths.") 911 912 else: 913 raise MadGraph5Error, 'output_dependencies option %s not recognized'\ 914 %output_dependencies 915 916 # Create the default MadAnalysis5 cards 917 if 'madanalysis5_path' in self.opt and not \ 918 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 919 # When using 920 processes = sum([me.get('processes') if not isinstance(me, str) else [] \ 921 for me in matrix_elements.get('matrix_elements')],[]) 922 923 # Try getting the processes from the generation info directly if no ME are 924 # available (as it is the case for parallel generation 925 if len(processes)==0: 926 processes = self.born_processes 927 if len(processes)==0: 928 logger.warning( 929 """MG5aMC could not provide to Madanalysis5 the list of processes generated. 930 As a result, the default card will not be tailored to the process generated. 931 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""") 932 # For now, simply assign all processes to each proc_defs. 933 # That shouldn't really affect the default analysis card created by MA5 934 self.create_default_madanalysis5_cards( 935 history, self.proc_defs, [processes,]*len(self.proc_defs), 936 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 937 levels =['hadron'])
938
939 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
940 """Writes the real_from_born_configs.inc file that contains 941 the mapping to go for a given born configuration (that is used 942 e.g. in the multi-channel phase-space integration to the 943 corresponding real-emission diagram, i.e. the real emission 944 diagram in which the combined ij is split in i_fks and 945 j_fks.""" 946 lines=[] 947 lines2=[] 948 max_links=0 949 born_me=matrix_element.born_matrix_element 950 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 951 iFKS=iFKS+1 952 links=conf['fks_info']['rb_links'] 953 max_links=max(max_links,len(links)) 954 for i,diags in enumerate(links): 955 if not i == diags['born_conf']: 956 print links 957 raise MadGraph5Error, "born_conf should be canonically ordered" 958 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 959 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 960 % (iFKS,len(links),real_configs)) 961 962 lines2.append("integer irfbc") 963 lines2.append("integer real_from_born_conf(%d,%d)" \ 964 % (max_links,len(matrix_element.get_fks_info_list()))) 965 # Write the file 966 writer.writelines(lines2+lines)
967 968 969 #=============================================================================== 970 # write_get_mass_width_file 971 #=============================================================================== 972 #test written
973 - def write_get_mass_width_file(self, writer, makeinc, model):
974 """Write the get_mass_width_file.f file for MG4. 975 Also update the makeinc.inc file 976 """ 977 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 978 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 979 980 iflines_mass = '' 981 iflines_width = '' 982 983 for i, part in enumerate(mass_particles): 984 if i == 0: 985 ifstring = 'if' 986 else: 987 ifstring = 'else if' 988 if part['self_antipart']: 989 iflines_mass += '%s (id.eq.%d) then\n' % \ 990 (ifstring, part.get_pdg_code()) 991 else: 992 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 993 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 994 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 995 996 for i, part in enumerate(width_particles): 997 if i == 0: 998 ifstring = 'if' 999 else: 1000 ifstring = 'else if' 1001 if part['self_antipart']: 1002 iflines_width += '%s (id.eq.%d) then\n' % \ 1003 (ifstring, part.get_pdg_code()) 1004 else: 1005 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1006 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1007 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 1008 1009 replace_dict = {'iflines_mass' : iflines_mass, 1010 'iflines_width' : iflines_width} 1011 1012 file = open(os.path.join(_file_path, \ 1013 'iolibs/template_files/get_mass_width_fcts.inc')).read() 1014 file = file % replace_dict 1015 1016 # Write the file 1017 writer.writelines(file) 1018 1019 # update the makeinc 1020 makeinc_content = open(makeinc).read() 1021 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 1022 open(makeinc, 'w').write(makeinc_content) 1023 1024 return
1025 1026
1027 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
1028 """writes the declarations for the variables relevant for configs_and_props 1029 """ 1030 lines = [] 1031 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 1032 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 1033 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 1034 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 1035 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1036 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1037 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1038 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1039 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1040 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1041 1042 writer.writelines(lines)
1043 1044
1045 - def write_configs_and_props_info_file(self, filename, matrix_element):
1046 """writes the configs_and_props_info.inc file that cointains 1047 all the (real-emission) configurations (IFOREST) as well as 1048 the masses and widths of intermediate particles""" 1049 lines = [] 1050 lines.append("# C -> MAPCONFIG_D") 1051 lines.append("# F/D -> IFOREST_D") 1052 lines.append("# S -> SPROP_D") 1053 lines.append("# T -> TPRID_D") 1054 lines.append("# M -> PMASS_D/PWIDTH_D") 1055 lines.append("# P -> POW_D") 1056 lines2 = [] 1057 nconfs = len(matrix_element.get_fks_info_list()) 1058 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1059 1060 max_iconfig=0 1061 max_leg_number=0 1062 1063 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1064 iFKS=iFKS+1 1065 iconfig = 0 1066 s_and_t_channels = [] 1067 mapconfigs = [] 1068 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 1069 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 1070 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 1071 minvert = min([max([len(vert.get('legs')) for vert in \ 1072 diag.get('vertices')]) for diag in base_diagrams]) 1073 1074 lines.append("# ") 1075 lines.append("# nFKSprocess %d" % iFKS) 1076 for idiag, diag in enumerate(base_diagrams): 1077 if any([len(vert.get('legs')) > minvert for vert in 1078 diag.get('vertices')]): 1079 # Only 3-vertices allowed in configs.inc 1080 continue 1081 iconfig = iconfig + 1 1082 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1083 mapconfigs.append(helas_diag.get('number')) 1084 lines.append("# Diagram %d for nFKSprocess %d" % \ 1085 (helas_diag.get('number'),iFKS)) 1086 # Correspondance between the config and the amplitudes 1087 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1088 helas_diag.get('number'))) 1089 1090 # Need to reorganize the topology so that we start with all 1091 # final state external particles and work our way inwards 1092 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1093 get_s_and_t_channels(ninitial, model, 990) 1094 1095 s_and_t_channels.append([schannels, tchannels]) 1096 1097 # Write out propagators for s-channel and t-channel vertices 1098 allchannels = schannels 1099 if len(tchannels) > 1: 1100 # Write out tchannels only if there are any non-trivial ones 1101 allchannels = schannels + tchannels 1102 1103 for vert in allchannels: 1104 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1105 last_leg = vert.get('legs')[-1] 1106 lines.append("F %4d %4d %4d %4d" % \ 1107 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1108 for d in daughters: 1109 lines.append("D %4d" % d) 1110 if vert in schannels: 1111 lines.append("S %4d %4d %4d %10d" % \ 1112 (iFKS,last_leg.get('number'), iconfig, 1113 last_leg.get('id'))) 1114 elif vert in tchannels[:-1]: 1115 lines.append("T %4d %4d %4d %10d" % \ 1116 (iFKS,last_leg.get('number'), iconfig, 1117 abs(last_leg.get('id')))) 1118 1119 # update what the array sizes (mapconfig,iforest,etc) will be 1120 max_leg_number = min(max_leg_number,last_leg.get('number')) 1121 max_iconfig = max(max_iconfig,iconfig) 1122 1123 # Write out number of configs 1124 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1125 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1126 1127 # write the props.inc information 1128 lines2.append("# ") 1129 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1130 get('particle_dict') 1131 1132 for iconf, configs in enumerate(s_and_t_channels): 1133 for vertex in configs[0] + configs[1][:-1]: 1134 leg = vertex.get('legs')[-1] 1135 if leg.get('id') not in particle_dict: 1136 # Fake propagator used in multiparticle vertices 1137 pow_part = 0 1138 else: 1139 particle = particle_dict[leg.get('id')] 1140 1141 pow_part = 1 + int(particle.is_boson()) 1142 1143 lines2.append("M %4d %4d %4d %10d " % \ 1144 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1145 lines2.append("P %4d %4d %4d %4d " % \ 1146 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1147 1148 # Write the file 1149 open(filename,'w').write('\n'.join(lines+lines2)) 1150 1151 return max_iconfig, max_leg_number, nconfs
1152 1153
1154 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1155 maxproc, maxflow, nexternal, fortran_model):
1156 """writes the declarations for the variables relevant for leshouche_info 1157 """ 1158 lines = [] 1159 lines.append('integer maxproc_used, maxflow_used') 1160 lines.append('parameter (maxproc_used = %d)' % maxproc) 1161 lines.append('parameter (maxflow_used = %d)' % maxflow) 1162 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1163 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1164 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1165 lines.append('integer niprocs_d(%d)' % (nfksconfs)) 1166 1167 writer.writelines(lines)
1168 1169
1170 - def write_leshouche_info_file(self, filename, matrix_element):
1171 """writes the leshouche_info.inc file which contains 1172 the LHA informations for all the real emission processes 1173 """ 1174 lines = [] 1175 lines.append("# I -> IDUP_D") 1176 lines.append("# M -> MOTHUP_D") 1177 lines.append("# C -> ICOLUP_D") 1178 nfksconfs = len(matrix_element.get_fks_info_list()) 1179 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1180 1181 maxproc = 0 1182 maxflow = 0 1183 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1184 # for i, real in enumerate(matrix_element.real_processes): 1185 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1186 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1187 lines.extend(newlines) 1188 maxproc = max(maxproc, nprocs) 1189 maxflow = max(maxflow, nflows) 1190 1191 # Write the file 1192 open(filename,'w').write('\n'.join(lines)) 1193 1194 return nfksconfs, maxproc, maxflow, nexternal
1195 1196
1197 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1198 """writes the wrapper which allows to chose among the different real matrix elements""" 1199 1200 file = \ 1201 """double precision function dlum() 1202 implicit none 1203 include 'timing_variables.inc' 1204 integer nfksprocess 1205 common/c_nfksprocess/nfksprocess 1206 call cpu_time(tbefore) 1207 """ 1208 if matrix_element.real_processes: 1209 for n, info in enumerate(matrix_element.get_fks_info_list()): 1210 file += \ 1211 """if (nfksprocess.eq.%(n)d) then 1212 call dlum_%(n_me)d(dlum) 1213 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1214 file += \ 1215 """ 1216 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1217 stop 1218 endif 1219 call cpu_time(tAfter) 1220 tPDF = tPDF + (tAfter-tBefore) 1221 return 1222 end 1223 """ 1224 else: 1225 file+= \ 1226 """call dlum_0(dlum) 1227 call cpu_time(tAfter) 1228 tPDF = tPDF + (tAfter-tBefore) 1229 return 1230 end 1231 """ 1232 1233 # Write the file 1234 writer.writelines(file) 1235 return 0
1236 1237
1238 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1239 """writes the wrapper which allows to chose among the different real matrix elements""" 1240 1241 file = \ 1242 """subroutine smatrix_real(p, wgt) 1243 implicit none 1244 include 'nexternal.inc' 1245 double precision p(0:3, nexternal) 1246 double precision wgt 1247 integer nfksprocess 1248 common/c_nfksprocess/nfksprocess 1249 """ 1250 for n, info in enumerate(matrix_element.get_fks_info_list()): 1251 file += \ 1252 """if (nfksprocess.eq.%(n)d) then 1253 call smatrix_%(n_me)d(p, wgt) 1254 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1255 1256 if matrix_element.real_processes: 1257 file += \ 1258 """ 1259 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1260 stop 1261 endif 1262 return 1263 end 1264 """ 1265 else: 1266 file += \ 1267 """ 1268 wgt=0d0 1269 return 1270 end 1271 """ 1272 # Write the file 1273 writer.writelines(file) 1274 return 0
1275 1276
1277 - def draw_feynman_diagrams(self, matrix_element):
1278 """Create the ps files containing the feynman diagrams for the born process, 1279 as well as for all the real emission processes""" 1280 1281 filename = 'born.ps' 1282 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1283 get('base_amplitude').get('diagrams'), 1284 filename, 1285 model=matrix_element.born_matrix_element.\ 1286 get('processes')[0].get('model'), 1287 amplitude=True, diagram_type='born') 1288 plot.draw() 1289 1290 for n, fksreal in enumerate(matrix_element.real_processes): 1291 filename = 'matrix_%d.ps' % (n + 1) 1292 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1293 get('base_amplitude').get('diagrams'), 1294 filename, 1295 model=fksreal.matrix_element.\ 1296 get('processes')[0].get('model'), 1297 amplitude=True, diagram_type='real') 1298 plot.draw()
1299 1300
1301 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1302 """writes the matrix_i.f files which contain the real matrix elements""" 1303 1304 1305 1306 for n, fksreal in enumerate(matrix_element.real_processes): 1307 filename = 'matrix_%d.f' % (n + 1) 1308 self.write_matrix_element_fks(writers.FortranWriter(filename), 1309 fksreal.matrix_element, n + 1, 1310 fortran_model)
1311
1312 - def write_pdf_calls(self, matrix_element, fortran_model):
1313 """writes the parton_lum_i.f files which contain the real matrix elements. 1314 If no real emission existst, write the one for the born""" 1315 1316 if matrix_element.real_processes: 1317 for n, fksreal in enumerate(matrix_element.real_processes): 1318 filename = 'parton_lum_%d.f' % (n + 1) 1319 self.write_pdf_file(writers.FortranWriter(filename), 1320 fksreal.matrix_element, n + 1, 1321 fortran_model) 1322 else: 1323 filename = 'parton_lum_0.f' 1324 self.write_pdf_file(writers.FortranWriter(filename), 1325 matrix_element.born_matrix_element, 0, 1326 fortran_model)
1327 1328
1329 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1330 """generates the files needed for the born amplitude in the P* directory, which will 1331 be needed by the P* directories""" 1332 pathdir = os.getcwd() 1333 1334 filename = 'born.f' 1335 calls_born, ncolor_born = \ 1336 self.write_born_fks(writers.FortranWriter(filename),\ 1337 matrix_element, 1338 fortran_model) 1339 1340 filename = 'born_hel.f' 1341 self.write_born_hel(writers.FortranWriter(filename),\ 1342 matrix_element, 1343 fortran_model) 1344 1345 1346 filename = 'born_conf.inc' 1347 nconfigs, mapconfigs, s_and_t_channels = \ 1348 self.write_configs_file( 1349 writers.FortranWriter(filename), 1350 matrix_element.born_matrix_element, 1351 fortran_model) 1352 1353 filename = 'born_props.inc' 1354 self.write_props_file(writers.FortranWriter(filename), 1355 matrix_element.born_matrix_element, 1356 fortran_model, 1357 s_and_t_channels) 1358 1359 filename = 'born_decayBW.inc' 1360 self.write_decayBW_file(writers.FortranWriter(filename), 1361 s_and_t_channels) 1362 1363 filename = 'born_leshouche.inc' 1364 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1365 matrix_element.born_matrix_element, 1366 fortran_model) 1367 1368 filename = 'born_nhel.inc' 1369 self.write_born_nhel_file(writers.FortranWriter(filename), 1370 matrix_element.born_matrix_element, nflows, 1371 fortran_model, 1372 ncolor_born) 1373 1374 filename = 'born_ngraphs.inc' 1375 self.write_ngraphs_file(writers.FortranWriter(filename), 1376 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1377 1378 filename = 'ncombs.inc' 1379 self.write_ncombs_file(writers.FortranWriter(filename), 1380 matrix_element.born_matrix_element, 1381 fortran_model) 1382 1383 filename = 'born_maxamps.inc' 1384 maxamps = len(matrix_element.get('diagrams')) 1385 maxflows = ncolor_born 1386 self.write_maxamps_file(writers.FortranWriter(filename), 1387 maxamps, 1388 maxflows, 1389 max([len(matrix_element.get('processes')) for me in \ 1390 matrix_element.born_matrix_element]),1) 1391 1392 filename = 'config_subproc_map.inc' 1393 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1394 s_and_t_channels) 1395 1396 filename = 'coloramps.inc' 1397 self.write_coloramps_file(writers.FortranWriter(filename), 1398 mapconfigs, 1399 matrix_element.born_matrix_element, 1400 fortran_model) 1401 1402 #write the sborn_sf.f and the b_sf_files 1403 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1404 for i, links in enumerate([matrix_element.color_links, []]): 1405 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1406 links, 1407 fortran_model) 1408 self.color_link_files = [] 1409 for i in range(len(matrix_element.color_links)): 1410 filename = 'b_sf_%3.3d.f' % (i + 1) 1411 self.color_link_files.append(filename) 1412 self.write_b_sf_fks(writers.FortranWriter(filename), 1413 matrix_element, i, 1414 fortran_model)
1415 1416
1417 - def generate_virtuals_from_OLP(self,process_list,export_path, OLP):
1418 """Generates the library for computing the loop matrix elements 1419 necessary for this process using the OLP specified.""" 1420 1421 # Start by writing the BLHA order file 1422 virtual_path = pjoin(export_path,'OLP_virtuals') 1423 if not os.path.exists(virtual_path): 1424 os.makedirs(virtual_path) 1425 filename = os.path.join(virtual_path,'OLE_order.lh') 1426 self.write_lh_order(filename, process_list, OLP) 1427 1428 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1429 'Please check the virt_generation.log file in %s.'\ 1430 %str(pjoin(virtual_path,'virt_generation.log')) 1431 1432 # Perform some tasks specific to certain OLP's 1433 if OLP=='GoSam': 1434 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1435 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1436 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1437 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1438 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1439 # Now generate the process 1440 logger.info('Generating the loop matrix elements with %s...'%OLP) 1441 virt_generation_log = \ 1442 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1443 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1444 stdout=virt_generation_log, stderr=virt_generation_log) 1445 virt_generation_log.close() 1446 # Check what extension is used for the share libraries on this system 1447 possible_other_extensions = ['so','dylib'] 1448 shared_lib_ext='so' 1449 for ext in possible_other_extensions: 1450 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1451 'libgolem_olp.'+ext)): 1452 shared_lib_ext = ext 1453 1454 # Now check that everything got correctly generated 1455 files_to_check = ['olp_module.mod',str(pjoin('lib', 1456 'libgolem_olp.'+shared_lib_ext))] 1457 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1458 'Virtuals',f)) for f in files_to_check]): 1459 raise fks_common.FKSProcessError(fail_msg) 1460 # link the library to the lib folder 1461 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1462 pjoin(export_path,'lib')) 1463 1464 # Specify in make_opts the right library necessitated by the OLP 1465 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1466 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1467 if OLP=='GoSam': 1468 if platform.system().lower()=='darwin': 1469 # On mac the -rpath is not supported and the path of the dynamic 1470 # library is automatically wired in the executable 1471 make_opts_content=make_opts_content.replace('libOLP=', 1472 'libOLP=-Wl,-lgolem_olp') 1473 else: 1474 # On other platforms the option , -rpath= path to libgolem.so is necessary 1475 # Using a relative path is not ideal because the file libgolem.so is not 1476 # copied on the worker nodes. 1477 # make_opts_content=make_opts_content.replace('libOLP=', 1478 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1479 # Using the absolute path is working in the case where the disk of the 1480 # front end machine is mounted on all worker nodes as well. 1481 make_opts_content=make_opts_content.replace('libOLP=', 1482 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1483 1484 1485 make_opts.write(make_opts_content) 1486 make_opts.close() 1487 1488 # A priori this is generic to all OLP's 1489 1490 # Parse the contract file returned and propagate the process label to 1491 # the include of the BinothLHA.f file 1492 proc_to_label = self.parse_contract_file( 1493 pjoin(virtual_path,'OLE_order.olc')) 1494 1495 self.write_BinothLHA_inc(process_list,proc_to_label,\ 1496 pjoin(export_path,'SubProcesses')) 1497 1498 # Link the contract file to within the SubProcess directory 1499 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1500
1501 - def write_BinothLHA_inc(self, processes, proc_to_label, SubProcPath):
1502 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1503 to provide the right process_label to use in the OLP call to get the 1504 loop matrix element evaluation. The proc_to_label is the dictionary of 1505 the format of the one returned by the function parse_contract_file.""" 1506 1507 for proc in processes: 1508 name = "P%s"%proc.shell_string() 1509 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1510 not leg.get('state')]), 1511 tuple([leg.get('id') for leg in proc.get('legs') if \ 1512 leg.get('state')])) 1513 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1514 try: 1515 incFile.write( 1516 """ INTEGER PROC_LABEL 1517 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1518 except KeyError: 1519 raise fks_common.FKSProcessError('Could not found the target'+\ 1520 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1521 ' the proc_to_label argument in write_BinothLHA_inc.') 1522 incFile.close()
1523
1524 - def parse_contract_file(self, contract_file_path):
1525 """ Parses the BLHA contract file, make sure all parameters could be 1526 understood by the OLP and return a mapping of the processes (characterized 1527 by the pdg's of the initial and final state particles) to their process 1528 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1529 """ 1530 1531 proc_def_to_label = {} 1532 1533 if not os.path.exists(contract_file_path): 1534 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1535 ' OLE_order.olc in %s.'%str(contract_file_path)) 1536 1537 comment_re=re.compile(r"^\s*#") 1538 proc_def_re=re.compile( 1539 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1540 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1541 line_OK_re=re.compile(r"^.*\|\s*OK") 1542 for line in file(contract_file_path): 1543 # Ignore comments 1544 if not comment_re.match(line) is None: 1545 continue 1546 # Check if it is a proc definition line 1547 proc_def = proc_def_re.match(line) 1548 if not proc_def is None: 1549 if int(proc_def.group('proc_class'))!=1: 1550 raise fks_common.FKSProcessError( 1551 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1552 ' process class attribute. Found %s instead in: \n%s'\ 1553 %(proc_def.group('proc_class'),line)) 1554 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1555 proc_def.group('in_pdgs').split()]) 1556 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1557 proc_def.group('out_pdgs').split()]) 1558 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1559 int(proc_def.group('proc_label')) 1560 continue 1561 # For the other types of line, just make sure they end with | OK 1562 if line_OK_re.match(line) is None: 1563 raise fks_common.FKSProcessError( 1564 'The OLP could not process the following line: \n%s'%line) 1565 1566 return proc_def_to_label
1567 1568
1569 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1570 """writes the V**** directory inside the P**** directories specified in 1571 dir_name""" 1572 1573 cwd = os.getcwd() 1574 1575 matrix_element = loop_matrix_element 1576 1577 # Create the MadLoop5_resources directory if not already existing 1578 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1579 try: 1580 os.mkdir(dirpath) 1581 except os.error as error: 1582 logger.warning(error.strerror + " " + dirpath) 1583 1584 # Create the directory PN_xx_xxxxx in the specified path 1585 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1586 dirpath = os.path.join(dir_name, name) 1587 1588 try: 1589 os.mkdir(dirpath) 1590 except os.error as error: 1591 logger.warning(error.strerror + " " + dirpath) 1592 1593 try: 1594 os.chdir(dirpath) 1595 except os.error: 1596 logger.error('Could not cd to directory %s' % dirpath) 1597 return 0 1598 1599 logger.info('Creating files in directory %s' % name) 1600 1601 # Extract number of external particles 1602 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1603 1604 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1605 # The born matrix element, if needed 1606 filename = 'born_matrix.f' 1607 calls = self.write_bornmatrix( 1608 writers.FortranWriter(filename), 1609 matrix_element, 1610 fortran_model) 1611 1612 filename = 'nexternal.inc' 1613 self.write_nexternal_file(writers.FortranWriter(filename), 1614 nexternal, ninitial) 1615 1616 filename = 'pmass.inc' 1617 self.write_pmass_file(writers.FortranWriter(filename), 1618 matrix_element) 1619 1620 filename = 'ngraphs.inc' 1621 self.write_ngraphs_file(writers.FortranWriter(filename), 1622 len(matrix_element.get_all_amplitudes())) 1623 1624 filename = "loop_matrix.ps" 1625 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1626 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1627 filename, 1628 model=matrix_element.get('processes')[0].get('model'), 1629 amplitude='') 1630 logger.info("Drawing loop Feynman diagrams for " + \ 1631 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1632 plot.draw() 1633 1634 filename = "born_matrix.ps" 1635 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1636 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1637 get('model'),amplitude='') 1638 logger.info("Generating born Feynman diagrams for " + \ 1639 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1640 plot.draw() 1641 1642 # We also need to write the overall maximum quantities for this group 1643 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 1644 # only one process, so this is trivial 1645 self.write_global_specs(matrix_element) 1646 open('unique_id.inc','w').write( 1647 """ integer UNIQUE_ID 1648 parameter(UNIQUE_ID=1)""") 1649 1650 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1651 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1652 'MadLoopCommons.f','MadLoopParams.inc','global_specs.inc'] 1653 1654 # We should move to MadLoop5_resources directory from the SubProcesses 1655 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1656 pjoin('..','MadLoop5_resources')) 1657 1658 for file in linkfiles: 1659 ln('../../%s' % file) 1660 1661 os.system("ln -s ../../makefile_loop makefile") 1662 1663 linkfiles = ['mpmodule.mod'] 1664 1665 for file in linkfiles: 1666 ln('../../../lib/%s' % file) 1667 1668 linkfiles = ['coef_specs.inc'] 1669 1670 for file in linkfiles: 1671 ln('../../../Source/DHELAS/%s' % file) 1672 1673 # Return to original PWD 1674 os.chdir(cwd) 1675 1676 if not calls: 1677 calls = 0 1678 return calls
1679
1680 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1681 """computes the QED/QCD orders from the knowledge of the n of ext particles 1682 and of the weighted orders""" 1683 # n vertices = nexternal - 2 =QED + QCD 1684 # weighted = 2*QED + QCD 1685 QED = weighted - nexternal + 2 1686 QCD = weighted - 2 * QED 1687 return QED, QCD
1688 1689 1690 1691 #=============================================================================== 1692 # write_lh_order 1693 #=============================================================================== 1694 #test written
1695 - def write_lh_order(self, filename, process_list, OLP='MadLoop'):
1696 """Creates the OLE_order.lh file. This function should be edited according 1697 to the OLP which is used. For now it is generic.""" 1698 1699 1700 if len(process_list)==0: 1701 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1702 'the function write_lh_order.') 1703 return 1704 1705 # We assume the orders to be common to all Subprocesses 1706 1707 orders = process_list[0].get('orders') 1708 if 'QED' in orders.keys() and 'QCD' in orders.keys(): 1709 QED=orders['QED'] 1710 QCD=orders['QCD'] 1711 elif 'QED' in orders.keys(): 1712 QED=orders['QED'] 1713 QCD=0 1714 elif 'QCD' in orders.keys(): 1715 QED=0 1716 QCD=orders['QCD'] 1717 else: 1718 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1719 len(process_list[0].get('legs')), 1720 orders['WEIGHTED']) 1721 1722 replace_dict = {} 1723 replace_dict['mesq'] = 'CHaveraged' 1724 replace_dict['corr'] = ' '.join(process_list[0].\ 1725 get('perturbation_couplings')) 1726 replace_dict['irreg'] = 'CDR' 1727 replace_dict['aspow'] = QCD 1728 replace_dict['aepow'] = QED 1729 replace_dict['modelfile'] = './param_card.dat' 1730 replace_dict['params'] = 'alpha_s' 1731 proc_lines=[] 1732 for proc in process_list: 1733 proc_lines.append('%s -> %s' % \ 1734 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']), 1735 ' '.join(str(l['id']) for l in proc['legs'] if l['state']))) 1736 replace_dict['pdgs'] = '\n'.join(proc_lines) 1737 replace_dict['symfin'] = 'Yes' 1738 content = \ 1739 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1740 \n\ 1741 MatrixElementSquareType %(mesq)s\n\ 1742 CorrectionType %(corr)s\n\ 1743 IRregularisation %(irreg)s\n\ 1744 AlphasPower %(aspow)d\n\ 1745 AlphaPower %(aepow)d\n\ 1746 NJetSymmetrizeFinal %(symfin)s\n\ 1747 ModelFile %(modelfile)s\n\ 1748 Parameters %(params)s\n\ 1749 \n\ 1750 # process\n\ 1751 %(pdgs)s\n\ 1752 " % replace_dict 1753 1754 file = open(filename, 'w') 1755 file.write(content) 1756 file.close 1757 return
1758 1759 1760 #=============================================================================== 1761 # write_born_fks 1762 #=============================================================================== 1763 # test written
1764 - def write_born_fks(self, writer, fksborn, fortran_model):
1765 """Export a matrix element to a born.f file in MadFKS format""" 1766 1767 matrix_element = fksborn.born_matrix_element 1768 1769 if not matrix_element.get('processes') or \ 1770 not matrix_element.get('diagrams'): 1771 return 0 1772 1773 if not isinstance(writer, writers.FortranWriter): 1774 raise writers.FortranWriter.FortranWriterError(\ 1775 "writer not FortranWriter") 1776 # Set lowercase/uppercase Fortran code 1777 writers.FortranWriter.downcase = False 1778 1779 replace_dict = {} 1780 1781 # Extract version number and date from VERSION file 1782 info_lines = self.get_mg5_info_lines() 1783 replace_dict['info_lines'] = info_lines 1784 1785 # Extract process info lines 1786 process_lines = self.get_process_info_lines(matrix_element) 1787 replace_dict['process_lines'] = process_lines 1788 1789 1790 # Extract ncomb 1791 ncomb = matrix_element.get_helicity_combinations() 1792 replace_dict['ncomb'] = ncomb 1793 1794 # Extract helicity lines 1795 helicity_lines = self.get_helicity_lines(matrix_element) 1796 replace_dict['helicity_lines'] = helicity_lines 1797 1798 # Extract IC line 1799 ic_line = self.get_ic_line(matrix_element) 1800 replace_dict['ic_line'] = ic_line 1801 1802 # Extract overall denominator 1803 # Averaging initial state color, spin, and identical FS particles 1804 #den_factor_line = get_den_factor_line(matrix_element) 1805 1806 # Extract ngraphs 1807 ngraphs = matrix_element.get_number_of_amplitudes() 1808 replace_dict['ngraphs'] = ngraphs 1809 1810 # Extract nwavefuncs 1811 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1812 replace_dict['nwavefuncs'] = nwavefuncs 1813 1814 # Extract ncolor 1815 ncolor = max(1, len(matrix_element.get('color_basis'))) 1816 replace_dict['ncolor'] = ncolor 1817 1818 # Extract color data lines 1819 color_data_lines = self.get_color_data_lines(matrix_element) 1820 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1821 1822 # Extract helas calls 1823 helas_calls = fortran_model.get_matrix_element_calls(\ 1824 matrix_element) 1825 replace_dict['helas_calls'] = "\n".join(helas_calls) 1826 1827 # Extract amp2 lines 1828 amp2_lines = self.get_amp2_lines(matrix_element) 1829 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1830 1831 # Extract JAMP lines 1832 jamp_lines = self.get_JAMP_lines(matrix_element) 1833 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1834 1835 # Set the size of Wavefunction 1836 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1837 replace_dict['wavefunctionsize'] = 20 1838 else: 1839 replace_dict['wavefunctionsize'] = 8 1840 1841 # Extract glu_ij_lines 1842 ij_lines = self.get_ij_lines(fksborn) 1843 replace_dict['ij_lines'] = '\n'.join(ij_lines) 1844 1845 # Extract den_factor_lines 1846 den_factor_lines = self.get_den_factor_lines(fksborn) 1847 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1848 1849 # Extract the number of FKS process 1850 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 1851 1852 file = open(os.path.join(_file_path, \ 1853 'iolibs/template_files/born_fks.inc')).read() 1854 file = file % replace_dict 1855 1856 # Write the file 1857 writer.writelines(file) 1858 1859 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1860 1861
1862 - def write_born_hel(self, writer, fksborn, fortran_model):
1863 """Export a matrix element to a born_hel.f file in MadFKS format""" 1864 1865 matrix_element = fksborn.born_matrix_element 1866 1867 if not matrix_element.get('processes') or \ 1868 not matrix_element.get('diagrams'): 1869 return 0 1870 1871 if not isinstance(writer, writers.FortranWriter): 1872 raise writers.FortranWriter.FortranWriterError(\ 1873 "writer not FortranWriter") 1874 # Set lowercase/uppercase Fortran code 1875 writers.FortranWriter.downcase = False 1876 1877 replace_dict = {} 1878 1879 # Extract version number and date from VERSION file 1880 info_lines = self.get_mg5_info_lines() 1881 replace_dict['info_lines'] = info_lines 1882 1883 # Extract process info lines 1884 process_lines = self.get_process_info_lines(matrix_element) 1885 replace_dict['process_lines'] = process_lines 1886 1887 1888 # Extract ncomb 1889 ncomb = matrix_element.get_helicity_combinations() 1890 replace_dict['ncomb'] = ncomb 1891 1892 # Extract helicity lines 1893 helicity_lines = self.get_helicity_lines(matrix_element) 1894 replace_dict['helicity_lines'] = helicity_lines 1895 1896 # Extract IC line 1897 ic_line = self.get_ic_line(matrix_element) 1898 replace_dict['ic_line'] = ic_line 1899 1900 # Extract overall denominator 1901 # Averaging initial state color, spin, and identical FS particles 1902 #den_factor_line = get_den_factor_line(matrix_element) 1903 1904 # Extract ngraphs 1905 ngraphs = matrix_element.get_number_of_amplitudes() 1906 replace_dict['ngraphs'] = ngraphs 1907 1908 # Extract nwavefuncs 1909 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1910 replace_dict['nwavefuncs'] = nwavefuncs 1911 1912 # Extract ncolor 1913 ncolor = max(1, len(matrix_element.get('color_basis'))) 1914 replace_dict['ncolor'] = ncolor 1915 1916 # Extract color data lines 1917 color_data_lines = self.get_color_data_lines(matrix_element) 1918 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1919 1920 # Extract amp2 lines 1921 amp2_lines = self.get_amp2_lines(matrix_element) 1922 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1923 1924 # Extract JAMP lines 1925 jamp_lines = self.get_JAMP_lines(matrix_element) 1926 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1927 1928 # Extract den_factor_lines 1929 den_factor_lines = self.get_den_factor_lines(fksborn) 1930 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1931 1932 # Extract the number of FKS process 1933 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1934 1935 file = open(os.path.join(_file_path, \ 1936 'iolibs/template_files/born_fks_hel.inc')).read() 1937 file = file % replace_dict 1938 1939 # Write the file 1940 writer.writelines(file) 1941 1942 return
1943 1944 1945 #=============================================================================== 1946 # write_born_sf_fks 1947 #=============================================================================== 1948 #test written
1949 - def write_sborn_sf(self, writer, color_links, fortran_model):
1950 """Creates the sborn_sf.f file, containing the calls to the different 1951 color linked borns""" 1952 1953 replace_dict = {} 1954 nborns = len(color_links) 1955 ifkss = [] 1956 iborns = [] 1957 mms = [] 1958 nns = [] 1959 iflines = "\n" 1960 1961 #header for the sborn_sf.f file 1962 file = """subroutine sborn_sf(p_born,m,n,wgt) 1963 implicit none 1964 include "nexternal.inc" 1965 double precision p_born(0:3,nexternal-1),wgt 1966 double complex wgt1(2) 1967 integer m,n \n""" 1968 1969 if nborns > 0: 1970 1971 for i, c_link in enumerate(color_links): 1972 iborn = i+1 1973 1974 iff = {True : 'if', False : 'elseif'}[i==0] 1975 1976 m, n = c_link['link'] 1977 1978 if m != n: 1979 iflines += \ 1980 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1981 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 1982 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1983 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1984 else: 1985 iflines += \ 1986 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1987 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 1988 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1989 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1990 1991 1992 file += iflines + \ 1993 """else 1994 wgt = 0d0 1995 endif 1996 1997 return 1998 end""" 1999 elif nborns == 0: 2000 #write a dummy file 2001 file+=""" 2002 c This is a dummy function because 2003 c this subdir has no soft singularities 2004 wgt = 0d0 2005 2006 return 2007 end""" 2008 # Write the end of the file 2009 2010 writer.writelines(file)
2011 2012 2013 #=============================================================================== 2014 # write_b_sf_fks 2015 #=============================================================================== 2016 #test written
2017 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
2018 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 2019 2020 matrix_element = copy.copy(fksborn.born_matrix_element) 2021 2022 if not matrix_element.get('processes') or \ 2023 not matrix_element.get('diagrams'): 2024 return 0 2025 2026 if not isinstance(writer, writers.FortranWriter): 2027 raise writers.FortranWriter.FortranWriterError(\ 2028 "writer not FortranWriter") 2029 # Set lowercase/uppercase Fortran code 2030 writers.FortranWriter.downcase = False 2031 2032 iborn = i + 1 2033 link = fksborn.color_links[i] 2034 2035 replace_dict = {} 2036 2037 replace_dict['iborn'] = iborn 2038 2039 # Extract version number and date from VERSION file 2040 info_lines = self.get_mg5_info_lines() 2041 replace_dict['info_lines'] = info_lines 2042 2043 # Extract process info lines 2044 process_lines = self.get_process_info_lines(matrix_element) 2045 replace_dict['process_lines'] = process_lines + \ 2046 "\nc spectators: %d %d \n" % tuple(link['link']) 2047 2048 # Extract ncomb 2049 ncomb = matrix_element.get_helicity_combinations() 2050 replace_dict['ncomb'] = ncomb 2051 2052 # Extract helicity lines 2053 helicity_lines = self.get_helicity_lines(matrix_element) 2054 replace_dict['helicity_lines'] = helicity_lines 2055 2056 # Extract IC line 2057 ic_line = self.get_ic_line(matrix_element) 2058 replace_dict['ic_line'] = ic_line 2059 2060 # Extract den_factor_lines 2061 den_factor_lines = self.get_den_factor_lines(fksborn) 2062 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2063 2064 # Extract ngraphs 2065 ngraphs = matrix_element.get_number_of_amplitudes() 2066 replace_dict['ngraphs'] = ngraphs 2067 2068 # Extract nwavefuncs 2069 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2070 replace_dict['nwavefuncs'] = nwavefuncs 2071 2072 # Extract ncolor 2073 ncolor1 = max(1, len(link['orig_basis'])) 2074 replace_dict['ncolor1'] = ncolor1 2075 ncolor2 = max(1, len(link['link_basis'])) 2076 replace_dict['ncolor2'] = ncolor2 2077 2078 # Extract color data lines 2079 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2080 link['link_matrix']) 2081 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2082 2083 # Extract amp2 lines 2084 amp2_lines = self.get_amp2_lines(matrix_element) 2085 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2086 2087 # Extract JAMP lines 2088 jamp_lines = self.get_JAMP_lines(matrix_element) 2089 new_jamp_lines = [] 2090 for line in jamp_lines: 2091 line = string.replace(line, 'JAMP', 'JAMP1') 2092 new_jamp_lines.append(line) 2093 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 2094 2095 matrix_element.set('color_basis', link['link_basis'] ) 2096 jamp_lines = self.get_JAMP_lines(matrix_element) 2097 new_jamp_lines = [] 2098 for line in jamp_lines: 2099 line = string.replace(line, 'JAMP', 'JAMP2') 2100 new_jamp_lines.append(line) 2101 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 2102 2103 2104 # Extract the number of FKS process 2105 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2106 2107 file = open(os.path.join(_file_path, \ 2108 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2109 file = file % replace_dict 2110 2111 # Write the file 2112 writer.writelines(file) 2113 2114 return 0 , ncolor1
2115 2116 2117 #=============================================================================== 2118 # write_born_nhel_file 2119 #=============================================================================== 2120 #test written
2121 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2122 """Write the born_nhel.inc file for MG4.""" 2123 2124 ncomb = matrix_element.get_helicity_combinations() 2125 file = " integer max_bhel, max_bcol \n" 2126 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2127 (ncomb, nflows) 2128 2129 # Write the file 2130 writer.writelines(file) 2131 2132 return True
2133 2134 #=============================================================================== 2135 # write_fks_info_file 2136 #===============================================================================
2137 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2138 """Writes the content of nFKSconfigs.inc, which just gives the 2139 total FKS dirs as a parameter. 2140 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2141 replace_dict = {} 2142 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2143 content = \ 2144 """ INTEGER FKS_CONFIGS 2145 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2146 2147 """ % replace_dict 2148 2149 writer.writelines(content)
2150 2151 2152 #=============================================================================== 2153 # write_fks_info_file 2154 #===============================================================================
2155 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2156 """Writes the content of fks_info.inc, which lists the informations on the 2157 possible splittings of the born ME. 2158 nconfs is always >=1 (use a fake configuration for LOonly). 2159 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2160 the last colored particle as j_fks.""" 2161 2162 replace_dict = {} 2163 fks_info_list = fksborn.get_fks_info_list() 2164 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2165 2166 # this is for processes with 'real' or 'all' as NLO mode 2167 if len(fks_info_list) > 0: 2168 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2169 for info in fks_info_list]) 2170 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2171 for info in fks_info_list]) 2172 2173 col_lines = [] 2174 pdg_lines = [] 2175 charge_lines = [] 2176 fks_j_from_i_lines = [] 2177 for i, info in enumerate(fks_info_list): 2178 col_lines.append( \ 2179 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2180 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2181 pdg_lines.append( \ 2182 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2183 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2184 charge_lines.append(\ 2185 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2186 % (i + 1, ', '.join('%19.15fd0' % charg\ 2187 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2188 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2189 i + 1)) 2190 else: 2191 # this is for 'LOonly', generate a fake FKS configuration with 2192 # - i_fks = nexternal, pdg type = -21 and color =8 2193 # - j_fks = the last colored particle 2194 bornproc = fksborn.born_matrix_element.get('processes')[0] 2195 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2196 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2197 charges = [0.] * len(colors) 2198 2199 fks_i = len(colors) 2200 # use the first colored particle if it exists, or 2201 # just the first 2202 fks_j=1 2203 for cpos, col in enumerate(colors[:-1]): 2204 if col != 1: 2205 fks_j = cpos+1 2206 2207 fks_i_values = str(fks_i) 2208 fks_j_values = str(fks_j) 2209 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2210 % ', '.join([str(col) for col in colors])] 2211 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2212 % ', '.join([str(pdg) for pdg in pdgs])] 2213 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2214 % ', '.join('%19.15fd0' % charg for charg in charges)] 2215 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2216 % (fks_i, fks_j)] 2217 2218 2219 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2220 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2221 replace_dict['col_lines'] = '\n'.join(col_lines) 2222 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2223 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2224 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2225 2226 content = \ 2227 """ INTEGER IPOS, JPOS 2228 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2229 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2230 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2231 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2232 2233 %(fks_i_line)s 2234 %(fks_j_line)s 2235 2236 %(fks_j_from_i_lines)s 2237 2238 C 2239 C Particle type: 2240 C octet = 8, triplet = 3, singlet = 1 2241 %(col_lines)s 2242 2243 C 2244 C Particle type according to PDG: 2245 C 2246 %(pdg_lines)s 2247 2248 C 2249 C Particle charge: 2250 C charge is set 0. with QCD corrections, which is irrelevant 2251 %(charge_lines)s 2252 """ % replace_dict 2253 if not isinstance(writer, writers.FortranWriter): 2254 raise writers.FortranWriter.FortranWriterError(\ 2255 "writer not FortranWriter") 2256 # Set lowercase/uppercase Fortran code 2257 writers.FortranWriter.downcase = False 2258 2259 writer.writelines(content) 2260 2261 return True
2262 2263 2264 #=============================================================================== 2265 # write_matrix_element_fks 2266 #=============================================================================== 2267 #test written
2268 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2269 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2270 2271 if not matrix_element.get('processes') or \ 2272 not matrix_element.get('diagrams'): 2273 return 0,0 2274 2275 if not isinstance(writer, writers.FortranWriter): 2276 raise writers.FortranWriter.FortranWriterError(\ 2277 "writer not FortranWriter") 2278 # Set lowercase/uppercase Fortran code 2279 writers.FortranWriter.downcase = False 2280 2281 replace_dict = {} 2282 replace_dict['N_me'] = n 2283 2284 # Extract version number and date from VERSION file 2285 info_lines = self.get_mg5_info_lines() 2286 replace_dict['info_lines'] = info_lines 2287 2288 # Extract process info lines 2289 process_lines = self.get_process_info_lines(matrix_element) 2290 replace_dict['process_lines'] = process_lines 2291 2292 # Extract ncomb 2293 ncomb = matrix_element.get_helicity_combinations() 2294 replace_dict['ncomb'] = ncomb 2295 2296 # Extract helicity lines 2297 helicity_lines = self.get_helicity_lines(matrix_element) 2298 replace_dict['helicity_lines'] = helicity_lines 2299 2300 # Extract IC line 2301 ic_line = self.get_ic_line(matrix_element) 2302 replace_dict['ic_line'] = ic_line 2303 2304 # Extract overall denominator 2305 # Averaging initial state color, spin, and identical FS particles 2306 den_factor_line = self.get_den_factor_line(matrix_element) 2307 replace_dict['den_factor_line'] = den_factor_line 2308 2309 # Extract ngraphs 2310 ngraphs = matrix_element.get_number_of_amplitudes() 2311 replace_dict['ngraphs'] = ngraphs 2312 2313 # Extract ncolor 2314 ncolor = max(1, len(matrix_element.get('color_basis'))) 2315 replace_dict['ncolor'] = ncolor 2316 2317 # Extract color data lines 2318 color_data_lines = self.get_color_data_lines(matrix_element) 2319 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2320 2321 # Extract helas calls 2322 helas_calls = fortran_model.get_matrix_element_calls(\ 2323 matrix_element) 2324 replace_dict['helas_calls'] = "\n".join(helas_calls) 2325 2326 # Extract nwavefuncs (important to place after get_matrix_element_calls 2327 # so that 'me_id' is set) 2328 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2329 replace_dict['nwavefuncs'] = nwavefuncs 2330 2331 # Extract amp2 lines 2332 amp2_lines = self.get_amp2_lines(matrix_element) 2333 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2334 2335 # Set the size of Wavefunction 2336 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2337 replace_dict['wavefunctionsize'] = 20 2338 else: 2339 replace_dict['wavefunctionsize'] = 8 2340 2341 # Extract JAMP lines 2342 jamp_lines = self.get_JAMP_lines(matrix_element) 2343 2344 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2345 2346 realfile = open(os.path.join(_file_path, \ 2347 'iolibs/template_files/realmatrix_fks.inc')).read() 2348 2349 realfile = realfile % replace_dict 2350 2351 # Write the file 2352 writer.writelines(realfile) 2353 2354 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2355 2356 2357 #=============================================================================== 2358 # write_pdf_file 2359 #===============================================================================
2360 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2361 #test written 2362 """Write the auto_dsig.f file for MadFKS, which contains 2363 pdf call information""" 2364 2365 if not matrix_element.get('processes') or \ 2366 not matrix_element.get('diagrams'): 2367 return 0 2368 2369 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2370 2371 if ninitial < 1 or ninitial > 2: 2372 raise writers.FortranWriter.FortranWriterError, \ 2373 """Need ninitial = 1 or 2 to write auto_dsig file""" 2374 2375 replace_dict = {} 2376 2377 replace_dict['N_me'] = n 2378 2379 # Extract version number and date from VERSION file 2380 info_lines = self.get_mg5_info_lines() 2381 replace_dict['info_lines'] = info_lines 2382 2383 # Extract process info lines 2384 process_lines = self.get_process_info_lines(matrix_element) 2385 replace_dict['process_lines'] = process_lines 2386 2387 pdf_vars, pdf_data, pdf_lines = \ 2388 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2389 replace_dict['pdf_vars'] = pdf_vars 2390 replace_dict['pdf_data'] = pdf_data 2391 replace_dict['pdf_lines'] = pdf_lines 2392 2393 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2394 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2395 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2396 2397 file = open(os.path.join(_file_path, \ 2398 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2399 file = file % replace_dict 2400 2401 # Write the file 2402 writer.writelines(file)
2403 2404 2405 2406 #=============================================================================== 2407 # write_coloramps_file 2408 #=============================================================================== 2409 #test written
2410 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2411 """Write the coloramps.inc file for MadEvent""" 2412 2413 lines = [] 2414 lines.append( "logical icolamp(%d,%d,1)" % \ 2415 (max(len(matrix_element.get('color_basis').keys()), 1), 2416 len(mapconfigs))) 2417 2418 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2419 2420 # Write the file 2421 writer.writelines(lines) 2422 2423 return True
2424 2425 2426 #=============================================================================== 2427 # write_leshouche_file 2428 #=============================================================================== 2429 #test written
2430 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2431 """Write the leshouche.inc file for MG4""" 2432 2433 # Extract number of external particles 2434 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2435 2436 lines = [] 2437 for iproc, proc in enumerate(matrix_element.get('processes')): 2438 legs = proc.get_legs_with_decays() 2439 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2440 (iproc + 1, nexternal, 2441 ",".join([str(l.get('id')) for l in legs]))) 2442 for i in [1, 2]: 2443 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2444 (i, iproc + 1, nexternal, 2445 ",".join([ "%3r" % 0 ] * ninitial + \ 2446 [ "%3r" % i ] * (nexternal - ninitial)))) 2447 2448 # Here goes the color connections corresponding to the JAMPs 2449 # Only one output, for the first subproc! 2450 if iproc == 0: 2451 # If no color basis, just output trivial color flow 2452 if not matrix_element.get('color_basis'): 2453 for i in [1, 2]: 2454 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2455 (i, nexternal, 2456 ",".join([ "%3r" % 0 ] * nexternal))) 2457 color_flow_list = [] 2458 2459 else: 2460 # First build a color representation dictionnary 2461 repr_dict = {} 2462 for l in legs: 2463 repr_dict[l.get('number')] = \ 2464 proc.get('model').get_particle(l.get('id')).get_color()\ 2465 * (-1)**(1+l.get('state')) 2466 # Get the list of color flows 2467 color_flow_list = \ 2468 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2469 ninitial) 2470 # And output them properly 2471 for cf_i, color_flow_dict in enumerate(color_flow_list): 2472 for i in [0, 1]: 2473 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2474 (i + 1, cf_i + 1, nexternal, 2475 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2476 for l in legs]))) 2477 2478 # Write the file 2479 writer.writelines(lines) 2480 2481 return len(color_flow_list)
2482 2483 2484 #=============================================================================== 2485 # write_configs_file 2486 #=============================================================================== 2487 #test_written
2488 - def write_configs_file(self, writer, matrix_element, fortran_model):
2489 """Write the configs.inc file for MadEvent""" 2490 2491 # Extract number of external particles 2492 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2493 lines = [] 2494 2495 iconfig = 0 2496 2497 s_and_t_channels = [] 2498 mapconfigs = [] 2499 2500 model = matrix_element.get('processes')[0].get('model') 2501 # new_pdg = model.get_first_non_pdg() 2502 2503 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2504 model = matrix_element.get('base_amplitude').get('process').get('model') 2505 minvert = min([max([len(vert.get('legs')) for vert in \ 2506 diag.get('vertices')]) for diag in base_diagrams]) 2507 2508 for idiag, diag in enumerate(base_diagrams): 2509 if any([len(vert.get('legs')) > minvert for vert in 2510 diag.get('vertices')]): 2511 # Only 3-vertices allowed in configs.inc 2512 continue 2513 iconfig = iconfig + 1 2514 helas_diag = matrix_element.get('diagrams')[idiag] 2515 mapconfigs.append(helas_diag.get('number')) 2516 lines.append("# Diagram %d, Amplitude %d" % \ 2517 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2518 # Correspondance between the config and the amplitudes 2519 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2520 helas_diag.get('amplitudes')[0]['number'])) 2521 2522 # Need to reorganize the topology so that we start with all 2523 # final state external particles and work our way inwards 2524 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2525 get_s_and_t_channels(ninitial, model, 990) 2526 2527 s_and_t_channels.append([schannels, tchannels]) 2528 2529 # Write out propagators for s-channel and t-channel vertices 2530 allchannels = schannels 2531 if len(tchannels) > 1: 2532 # Write out tchannels only if there are any non-trivial ones 2533 allchannels = schannels + tchannels 2534 2535 for vert in allchannels: 2536 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2537 last_leg = vert.get('legs')[-1] 2538 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2539 (last_leg.get('number'), iconfig, len(daughters), 2540 ",".join(["%3d" % d for d in daughters]))) 2541 if vert in schannels: 2542 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2543 (last_leg.get('number'), iconfig, 2544 last_leg.get('id'))) 2545 elif vert in tchannels[:-1]: 2546 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2547 (last_leg.get('number'), iconfig, 2548 abs(last_leg.get('id')))) 2549 2550 # Write out number of configs 2551 lines.append("# Number of configs") 2552 lines.append("data mapconfig(0)/%4d/" % iconfig) 2553 2554 # Write the file 2555 writer.writelines(lines) 2556 2557 return iconfig, mapconfigs, s_and_t_channels
2558 2559 2560 #=============================================================================== 2561 # write_decayBW_file 2562 #=============================================================================== 2563 #test written
2564 - def write_decayBW_file(self, writer, s_and_t_channels):
2565 """Write the decayBW.inc file for MadEvent""" 2566 2567 lines = [] 2568 2569 booldict = {False: ".false.", True: ".false."} 2570 ####Changed by MZ 2011-11-23!!!! 2571 2572 for iconf, config in enumerate(s_and_t_channels): 2573 schannels = config[0] 2574 for vertex in schannels: 2575 # For the resulting leg, pick out whether it comes from 2576 # decay or not, as given by the from_group flag 2577 leg = vertex.get('legs')[-1] 2578 lines.append("data gForceBW(%d,%d)/%s/" % \ 2579 (leg.get('number'), iconf + 1, 2580 booldict[leg.get('from_group')])) 2581 2582 # Write the file 2583 writer.writelines(lines) 2584 2585 return True
2586 2587 2588 #=============================================================================== 2589 # write_dname_file 2590 #===============================================================================
2591 - def write_dname_file(self, writer, matrix_element, fortran_model):
2592 """Write the dname.mg file for MG4""" 2593 2594 line = "DIRNAME=P%s" % \ 2595 matrix_element.get('processes')[0].shell_string() 2596 2597 # Write the file 2598 writer.write(line + "\n") 2599 2600 return True
2601 2602 2603 #=============================================================================== 2604 # write_iproc_file 2605 #===============================================================================
2606 - def write_iproc_file(self, writer, me_number):
2607 """Write the iproc.dat file for MG4""" 2608 2609 line = "%d" % (me_number + 1) 2610 2611 # Write the file 2612 for line_to_write in writer.write_line(line): 2613 writer.write(line_to_write) 2614 return True
2615 2616 2617 #=============================================================================== 2618 # Helper functions 2619 #=============================================================================== 2620 2621 2622 #=============================================================================== 2623 # get_fks_j_from_i_lines 2624 #=============================================================================== 2625
2626 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2627 """generate the lines for fks.inc describing initializating the 2628 fks_j_from_i array""" 2629 lines = [] 2630 if not me.isfinite: 2631 for ii, js in me.fks_j_from_i.items(): 2632 if js: 2633 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2634 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2635 else: 2636 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2637 % (2, 1, 1, '1')) 2638 lines.append('') 2639 2640 return lines 2641 2642 2643 #=============================================================================== 2644 # get_leshouche_lines 2645 #===============================================================================
2646 - def get_leshouche_lines(self, matrix_element, ime):
2647 #test written 2648 """Write the leshouche.inc file for MG4""" 2649 2650 # Extract number of external particles 2651 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2652 2653 lines = [] 2654 for iproc, proc in enumerate(matrix_element.get('processes')): 2655 legs = proc.get_legs_with_decays() 2656 lines.append("I %4d %4d %s" % \ 2657 (ime, iproc + 1, 2658 " ".join([str(l.get('id')) for l in legs]))) 2659 for i in [1, 2]: 2660 lines.append("M %4d %4d %4d %s" % \ 2661 (ime, i, iproc + 1, 2662 " ".join([ "%3d" % 0 ] * ninitial + \ 2663 [ "%3d" % i ] * (nexternal - ninitial)))) 2664 2665 # Here goes the color connections corresponding to the JAMPs 2666 # Only one output, for the first subproc! 2667 if iproc == 0: 2668 # If no color basis, just output trivial color flow 2669 if not matrix_element.get('color_basis'): 2670 for i in [1, 2]: 2671 lines.append("C %4d %4d 1 %s" % \ 2672 (ime, i, 2673 " ".join([ "%3d" % 0 ] * nexternal))) 2674 color_flow_list = [] 2675 nflow = 1 2676 2677 else: 2678 # First build a color representation dictionnary 2679 repr_dict = {} 2680 for l in legs: 2681 repr_dict[l.get('number')] = \ 2682 proc.get('model').get_particle(l.get('id')).get_color()\ 2683 * (-1)**(1+l.get('state')) 2684 # Get the list of color flows 2685 color_flow_list = \ 2686 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2687 ninitial) 2688 # And output them properly 2689 for cf_i, color_flow_dict in enumerate(color_flow_list): 2690 for i in [0, 1]: 2691 lines.append("C %4d %4d %4d %s" % \ 2692 (ime, i + 1, cf_i + 1, 2693 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2694 for l in legs]))) 2695 2696 nflow = len(color_flow_list) 2697 2698 nproc = len(matrix_element.get('processes')) 2699 2700 return lines, nproc, nflow
2701 2702 2703 #=============================================================================== 2704 # get_den_factor_lines 2705 #===============================================================================
2706 - def get_den_factor_lines(self, fks_born):
2707 """returns the lines with the information on the denominator keeping care 2708 of the identical particle factors in the various real emissions""" 2709 2710 lines = [] 2711 info_list = fks_born.get_fks_info_list() 2712 if info_list: 2713 # if the reals have been generated, fill with the corresponding average factor 2714 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 2715 lines.append('DATA IDEN_VALUES /' + \ 2716 ', '.join(['%d' % ( 2717 fks_born.born_matrix_element.get_denominator_factor() ) \ 2718 for info in info_list]) + '/') 2719 else: 2720 # otherwise use the born 2721 lines.append('INTEGER IDEN_VALUES(1)') 2722 lines.append('DATA IDEN_VALUES / %d /' \ 2723 % fks_born.born_matrix_element.get_denominator_factor()) 2724 2725 return lines
2726 2727 2728 #=============================================================================== 2729 # get_ij_lines 2730 #===============================================================================
2731 - def get_ij_lines(self, fks_born):
2732 """returns the lines with the information on the particle number of the born 2733 that splits""" 2734 info_list = fks_born.get_fks_info_list() 2735 lines = [] 2736 if info_list: 2737 # if the reals have been generated, fill with the corresponding value of ij 2738 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 2739 lines.append('DATA IJ_VALUES /' + \ 2740 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/') 2741 else: 2742 #otherwise just put the first leg 2743 lines.append('INTEGER IJ_VALUES(1)') 2744 lines.append('DATA IJ_VALUES / 1 /') 2745 2746 return lines
2747 2748
2749 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 2750 mirror = False): #test written
2751 """Generate the PDF lines for the auto_dsig.f file""" 2752 2753 processes = matrix_element.get('processes') 2754 model = processes[0].get('model') 2755 2756 pdf_definition_lines = "" 2757 pdf_data_lines = "" 2758 pdf_lines = "" 2759 2760 if ninitial == 1: 2761 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 2762 for i, proc in enumerate(processes): 2763 process_line = proc.base_string() 2764 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2765 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 2766 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 2767 else: 2768 # Pick out all initial state particles for the two beams 2769 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 2770 p in processes]))), 2771 sorted(list(set([p.get_initial_pdg(2) for \ 2772 p in processes])))] 2773 2774 # Prepare all variable names 2775 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 2776 sum(initial_states,[])]) 2777 for key,val in pdf_codes.items(): 2778 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 2779 2780 # Set conversion from PDG code to number used in PDF calls 2781 pdgtopdf = {21: 0, 22: 7} 2782 # Fill in missing entries of pdgtopdf 2783 for pdg in sum(initial_states,[]): 2784 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 2785 pdgtopdf[pdg] = pdg 2786 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 2787 # If any particle has pdg code 7, we need to use something else 2788 pdgtopdf[pdg] = 6000000 + pdg 2789 2790 # Get PDF variable declarations for all initial states 2791 for i in [0,1]: 2792 pdf_definition_lines += "DOUBLE PRECISION " + \ 2793 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2794 for pdg in \ 2795 initial_states[i]]) + \ 2796 "\n" 2797 2798 # Get PDF data lines for all initial states 2799 for i in [0,1]: 2800 pdf_data_lines += "DATA " + \ 2801 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2802 for pdg in initial_states[i]]) + \ 2803 "/%d*1D0/" % len(initial_states[i]) + \ 2804 "\n" 2805 2806 # Get PDF values for the different initial states 2807 for i, init_states in enumerate(initial_states): 2808 if not mirror: 2809 ibeam = i + 1 2810 else: 2811 ibeam = 2 - i 2812 if subproc_group: 2813 pdf_lines = pdf_lines + \ 2814 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 2815 % (ibeam, ibeam) 2816 else: 2817 pdf_lines = pdf_lines + \ 2818 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 2819 % (ibeam, ibeam) 2820 2821 for initial_state in init_states: 2822 if initial_state in pdf_codes.keys(): 2823 if subproc_group: 2824 if abs(pdgtopdf[initial_state]) <= 7: 2825 pdf_lines = pdf_lines + \ 2826 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 2827 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 2828 (pdf_codes[initial_state], 2829 i + 1, ibeam, pdgtopdf[initial_state], 2830 ibeam, ibeam) 2831 else: 2832 # setting other partons flavours outside quark, gluon, photon to be 0d0 2833 pdf_lines = pdf_lines + \ 2834 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2835 "%s%d=0d0\n") % \ 2836 (pdf_codes[initial_state],i + 1) 2837 else: 2838 if abs(pdgtopdf[initial_state]) <= 7: 2839 pdf_lines = pdf_lines + \ 2840 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 2841 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 2842 (pdf_codes[initial_state], 2843 i + 1, ibeam, pdgtopdf[initial_state], 2844 ibeam, ibeam) 2845 else: 2846 # setting other partons flavours outside quark, gluon, photon to be 0d0 2847 pdf_lines = pdf_lines + \ 2848 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2849 "%s%d=0d0\n") % \ 2850 (pdf_codes[initial_state],i + 1) 2851 2852 pdf_lines = pdf_lines + "ENDIF\n" 2853 2854 # Add up PDFs for the different initial state particles 2855 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 2856 for proc in processes: 2857 process_line = proc.base_string() 2858 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2859 pdf_lines = pdf_lines + "\nPD(IPROC) = " 2860 for ibeam in [1, 2]: 2861 initial_state = proc.get_initial_pdg(ibeam) 2862 if initial_state in pdf_codes.keys(): 2863 pdf_lines = pdf_lines + "%s%d*" % \ 2864 (pdf_codes[initial_state], ibeam) 2865 else: 2866 pdf_lines = pdf_lines + "1d0*" 2867 # Remove last "*" from pdf_lines 2868 pdf_lines = pdf_lines[:-1] + "\n" 2869 2870 # Remove last line break from pdf_lines 2871 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 2872 2873 2874 #test written
2875 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
2876 """Return the color matrix definition lines for the given color_matrix. Split 2877 rows in chunks of size n.""" 2878 2879 if not color_matrix: 2880 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 2881 else: 2882 ret_list = [] 2883 my_cs = color.ColorString() 2884 for index, denominator in \ 2885 enumerate(color_matrix.get_line_denominators()): 2886 # First write the common denominator for this color matrix line 2887 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 2888 # Then write the numerators for the matrix elements 2889 num_list = color_matrix.get_line_numerators(index, denominator) 2890 for k in xrange(0, len(num_list), n): 2891 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 2892 (index + 1, k + 1, min(k + n, len(num_list)), 2893 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 2894 2895 return ret_list
2896 2897 #=========================================================================== 2898 # write_maxamps_file 2899 #===========================================================================
2900 - def write_maxamps_file(self, writer, maxamps, maxflows, 2901 maxproc,maxsproc):
2902 """Write the maxamps.inc file for MG4.""" 2903 2904 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 2905 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 2906 (maxamps, maxflows) 2907 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 2908 (maxproc, maxsproc) 2909 2910 # Write the file 2911 writer.writelines(file) 2912 2913 return True
2914 2915 #=============================================================================== 2916 # write_ncombs_file 2917 #===============================================================================
2918 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
2919 # #test written 2920 """Write the ncombs.inc file for MadEvent.""" 2921 2922 # Extract number of external particles 2923 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2924 2925 # ncomb (used for clustering) is 2^(nexternal) 2926 file = " integer n_max_cl\n" 2927 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 2928 2929 # Write the file 2930 writer.writelines(file) 2931 2932 return True
2933 2934 #=========================================================================== 2935 # write_config_subproc_map_file 2936 #===========================================================================
2937 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
2938 """Write a dummy config_subproc.inc file for MadEvent""" 2939 2940 lines = [] 2941 2942 for iconfig in range(len(s_and_t_channels)): 2943 lines.append("DATA CONFSUB(1,%d)/1/" % \ 2944 (iconfig + 1)) 2945 2946 # Write the file 2947 writer.writelines(lines) 2948 2949 return True
2950 2951 #=========================================================================== 2952 # write_colors_file 2953 #===========================================================================
2954 - def write_colors_file(self, writer, matrix_element):
2955 """Write the get_color.f file for MadEvent, which returns color 2956 for all particles used in the matrix element.""" 2957 2958 try: 2959 matrix_elements=matrix_element.real_processes[0].matrix_element 2960 except IndexError: 2961 matrix_elements=[matrix_element.born_matrix_element] 2962 2963 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 2964 matrix_elements = [matrix_elements] 2965 2966 model = matrix_elements[0].get('processes')[0].get('model') 2967 2968 # We need the both particle and antiparticle wf_ids, since the identity 2969 # depends on the direction of the wf. 2970 # loop on the real emissions 2971 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2972 for wf in d.get('wavefunctions')],[]) \ 2973 for d in me.get('diagrams')],[]) \ 2974 for me in [real_proc.matrix_element]],[])\ 2975 for real_proc in matrix_element.real_processes],[])) 2976 # and also on the born 2977 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2978 for wf in d.get('wavefunctions')],[]) \ 2979 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 2980 2981 # loop on the real emissions 2982 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 2983 p.get_legs_with_decays()] for p in \ 2984 me.get('processes')], []) for me in \ 2985 [real_proc.matrix_element]], []) for real_proc in \ 2986 matrix_element.real_processes],[])) 2987 # and also on the born 2988 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 2989 p.get_legs_with_decays()] for p in \ 2990 matrix_element.born_matrix_element.get('processes')], []))) 2991 particle_ids = sorted(list(wf_ids.union(leg_ids))) 2992 2993 lines = """function get_color(ipdg) 2994 implicit none 2995 integer get_color, ipdg 2996 2997 if(ipdg.eq.%d)then 2998 get_color=%d 2999 return 3000 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3001 3002 for part_id in particle_ids[1:]: 3003 lines += """else if(ipdg.eq.%d)then 3004 get_color=%d 3005 return 3006 """ % (part_id, model.get_particle(part_id).get_color()) 3007 # Dummy particle for multiparticle vertices with pdg given by 3008 # first code not in the model 3009 lines += """else if(ipdg.eq.%d)then 3010 c This is dummy particle used in multiparticle vertices 3011 get_color=2 3012 return 3013 """ % model.get_first_non_pdg() 3014 lines += """else 3015 write(*,*)'Error: No color given for pdg ',ipdg 3016 get_color=0 3017 return 3018 endif 3019 end 3020 """ 3021 3022 # Write the file 3023 writer.writelines(lines) 3024 3025 return True
3026 3027 #=============================================================================== 3028 # write_props_file 3029 #=============================================================================== 3030 #test_written
3031 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3032 """Write the props.inc file for MadEvent. Needs input from 3033 write_configs_file. With respect to the parent routine, it has some 3034 more specific formats that allow the props.inc file to be read by the 3035 link program""" 3036 3037 lines = [] 3038 3039 particle_dict = matrix_element.get('processes')[0].get('model').\ 3040 get('particle_dict') 3041 3042 for iconf, configs in enumerate(s_and_t_channels): 3043 for vertex in configs[0] + configs[1][:-1]: 3044 leg = vertex.get('legs')[-1] 3045 if leg.get('id') not in particle_dict: 3046 # Fake propagator used in multiparticle vertices 3047 mass = 'zero' 3048 width = 'zero' 3049 pow_part = 0 3050 else: 3051 particle = particle_dict[leg.get('id')] 3052 # Get mass 3053 if particle.get('mass').lower() == 'zero': 3054 mass = particle.get('mass') 3055 else: 3056 mass = "abs(%s)" % particle.get('mass') 3057 # Get width 3058 if particle.get('width').lower() == 'zero': 3059 width = particle.get('width') 3060 else: 3061 width = "abs(%s)" % particle.get('width') 3062 3063 pow_part = 1 + int(particle.is_boson()) 3064 3065 lines.append("pmass(%3d,%4d) = %s" % \ 3066 (leg.get('number'), iconf + 1, mass)) 3067 lines.append("pwidth(%3d,%4d) = %s" % \ 3068 (leg.get('number'), iconf + 1, width)) 3069 lines.append("pow(%3d,%4d) = %d" % \ 3070 (leg.get('number'), iconf + 1, pow_part)) 3071 3072 # Write the file 3073 writer.writelines(lines) 3074 3075 return True
3076 3077 3078 #=========================================================================== 3079 # write_subproc 3080 #===========================================================================
3081 - def write_subproc(self, writer, subprocdir):
3082 """Append this subprocess to the subproc.mg file for MG4""" 3083 3084 # Write line to file 3085 writer.write(subprocdir + "\n") 3086 3087 return True
3088 3089 3090 3091 3092 3093 #================================================================================= 3094 # Class for using the optimized Loop process 3095 #=================================================================================
3096 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3097 ProcessExporterFortranFKS):
3098 """Class to take care of exporting a set of matrix elements to 3099 Fortran (v4) format.""" 3100 3101
3102 - def finalize(self, *args, **opts):
3104 #export_v4.ProcessExporterFortranSA.finalize(self, *args, **opts) 3105 3106 #=============================================================================== 3107 # copy the Template in a new directory. 3108 #===============================================================================
3109 - def copy_fkstemplate(self):
3110 """create the directory run_name as a copy of the MadEvent 3111 Template, and clean the directory 3112 For now it is just the same as copy_v4template, but it will be modified 3113 """ 3114 mgme_dir = self.mgme_dir 3115 dir_path = self.dir_path 3116 clean =self.opt['clean'] 3117 3118 #First copy the full template tree if dir_path doesn't exit 3119 if not os.path.isdir(dir_path): 3120 if not mgme_dir: 3121 raise MadGraph5Error, \ 3122 "No valid MG_ME path given for MG4 run directory creation." 3123 logger.info('initialize a new directory: %s' % \ 3124 os.path.basename(dir_path)) 3125 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3126 # distutils.dir_util.copy_tree since dir_path already exists 3127 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 3128 dir_path) 3129 # Copy plot_card 3130 for card in ['plot_card']: 3131 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 3132 try: 3133 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 3134 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 3135 except IOError: 3136 logger.warning("Failed to copy " + card + ".dat to default") 3137 3138 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3139 if not mgme_dir: 3140 raise MadGraph5Error, \ 3141 "No valid MG_ME path given for MG4 run directory creation." 3142 try: 3143 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3144 except IOError: 3145 MG5_version = misc.get_pkg_info() 3146 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3147 "5." + MG5_version['version']) 3148 3149 #Ensure that the Template is clean 3150 if clean: 3151 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3152 if os.environ.has_key('MADGRAPH_BASE'): 3153 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3154 '--web'], cwd=dir_path) 3155 else: 3156 try: 3157 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3158 cwd=dir_path) 3159 except Exception, why: 3160 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3161 % (os.path.basename(dir_path),why)) 3162 #Write version info 3163 MG_version = misc.get_pkg_info() 3164 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3165 MG_version['version']) 3166 3167 # We must link the CutTools to the Library folder of the active Template 3168 self.link_CutTools(dir_path) 3169 # We must link the TIR to the Library folder of the active Template 3170 link_tir_libs=[] 3171 tir_libs=[] 3172 tir_include=[] 3173 for tir in self.all_tir: 3174 tir_dir="%s_dir"%tir 3175 libpath=getattr(self,tir_dir) 3176 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3177 libpath,"lib%s.a"%tir,tir_name=tir) 3178 setattr(self,tir_dir,libpath) 3179 if libpath != "": 3180 if tir in ['pjfry','ninja','golem', 'samurai','collier']: 3181 # We should link dynamically when possible, so we use the original 3182 # location of these libraries. 3183 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3184 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3185 # For Ninja, we must also link against OneLoop. 3186 if tir in ['ninja']: 3187 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext)) 3188 for ext in ['a','dylib','so']): 3189 raise MadGraph5Error( 3190 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath) 3191 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo')) 3192 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo')) 3193 # We must add the corresponding includes for these TIR 3194 if tir in ['golem','samurai','ninja','collier']: 3195 trg_path = pjoin(os.path.dirname(libpath),'include') 3196 if os.path.isdir(trg_path): 3197 to_include = misc.find_includes_path(trg_path, 3198 self.include_names[tir]) 3199 else: 3200 to_include = None 3201 # Special possible location for collier 3202 if to_include is None and tir=='collier': 3203 to_include = misc.find_includes_path( 3204 pjoin(libpath,'modules'),self.include_names[tir]) 3205 if to_include is None: 3206 logger.error( 3207 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+ 3208 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3209 to_include = '<Not_found_define_it_yourself>' 3210 tir_include.append('-I %s'%to_include) 3211 else: 3212 link_tir_libs.append('-l%s'%tir) 3213 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3214 3215 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3216 cwd = os.getcwd() 3217 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3218 try: 3219 os.chdir(dirpath) 3220 except os.error: 3221 logger.error('Could not cd to directory %s' % dirpath) 3222 return 0 3223 filename = 'makefile_loop' 3224 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3225 link_tir_libs,tir_libs,tir_include=tir_include) 3226 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3227 dirpath = os.path.join(self.dir_path, 'Source') 3228 try: 3229 os.chdir(dirpath) 3230 except os.error: 3231 logger.error('Could not cd to directory %s' % dirpath) 3232 return 0 3233 filename = 'make_opts' 3234 calls = self.write_make_opts(writers.MakefileWriter(filename), 3235 link_tir_libs,tir_libs) 3236 # Return to original PWD 3237 os.chdir(cwd) 3238 3239 cwd = os.getcwd() 3240 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3241 try: 3242 os.chdir(dirpath) 3243 except os.error: 3244 logger.error('Could not cd to directory %s' % dirpath) 3245 return 0 3246 3247 # We add here the user-friendly MadLoop option setter. 3248 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3249 "Cards/MadLoopParams.dat", 3250 "SubProcesses/MadLoopParams.inc"] 3251 3252 for file in cpfiles: 3253 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3254 os.path.join(self.dir_path, file)) 3255 3256 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 3257 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 3258 3259 3260 3261 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3262 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3263 'Cards', 'MadLoopParams.dat')) 3264 # write the output file 3265 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3266 "MadLoopParams.dat")) 3267 3268 # We need minimal editing of MadLoopCommons.f 3269 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3270 "SubProcesses","MadLoopCommons.inc")).read() 3271 writer = writers.FortranWriter(os.path.join(self.dir_path, 3272 "SubProcesses","MadLoopCommons.f")) 3273 writer.writelines(MadLoopCommon%{ 3274 'print_banner_commands':self.MadLoop_banner}, 3275 context={'collier_available':self.tir_available_dict['collier']}) 3276 writer.close() 3277 3278 # link the files from the MODEL 3279 model_path = self.dir_path + '/Source/MODEL/' 3280 # Note that for the [real=] mode, these files are not present 3281 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3282 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3283 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3284 ln(model_path + '/mp_coupl_same_name.inc', \ 3285 self.dir_path + '/SubProcesses') 3286 3287 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3288 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3289 writers.FortranWriter('cts_mpc.h'),) 3290 3291 self.copy_python_files() 3292 3293 3294 # We need to create the correct open_data for the pdf 3295 self.write_pdf_opendata() 3296 3297 3298 # Return to original PWD 3299 os.chdir(cwd)
3300
3301 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3302 """writes the V**** directory inside the P**** directories specified in 3303 dir_name""" 3304 3305 cwd = os.getcwd() 3306 3307 matrix_element = loop_matrix_element 3308 3309 # Create the MadLoop5_resources directory if not already existing 3310 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3311 try: 3312 os.mkdir(dirpath) 3313 except os.error as error: 3314 logger.warning(error.strerror + " " + dirpath) 3315 3316 # Create the directory PN_xx_xxxxx in the specified path 3317 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3318 dirpath = os.path.join(dir_name, name) 3319 3320 try: 3321 os.mkdir(dirpath) 3322 except os.error as error: 3323 logger.warning(error.strerror + " " + dirpath) 3324 3325 try: 3326 os.chdir(dirpath) 3327 except os.error: 3328 logger.error('Could not cd to directory %s' % dirpath) 3329 return 0 3330 3331 logger.info('Creating files in directory %s' % name) 3332 3333 # Extract number of external particles 3334 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3335 3336 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3337 3338 # We need a link to coefs.inc from DHELAS 3339 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'), 3340 abspath=False, cwd=None) 3341 3342 # The born matrix element, if needed 3343 filename = 'born_matrix.f' 3344 calls = self.write_bornmatrix( 3345 writers.FortranWriter(filename), 3346 matrix_element, 3347 fortran_model) 3348 3349 filename = 'nexternal.inc' 3350 self.write_nexternal_file(writers.FortranWriter(filename), 3351 nexternal, ninitial) 3352 3353 filename = 'pmass.inc' 3354 self.write_pmass_file(writers.FortranWriter(filename), 3355 matrix_element) 3356 3357 filename = 'ngraphs.inc' 3358 self.write_ngraphs_file(writers.FortranWriter(filename), 3359 len(matrix_element.get_all_amplitudes())) 3360 3361 filename = "loop_matrix.ps" 3362 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3363 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3364 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3365 filename, 3366 model=matrix_element.get('processes')[0].get('model'), 3367 amplitude='') 3368 logger.info("Drawing loop Feynman diagrams for " + \ 3369 matrix_element.get('processes')[0].nice_string(\ 3370 print_weighted=False)) 3371 plot.draw() 3372 3373 filename = "born_matrix.ps" 3374 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3375 get('born_diagrams'), 3376 filename, 3377 model=matrix_element.get('processes')[0].\ 3378 get('model'), 3379 amplitude='') 3380 logger.info("Generating born Feynman diagrams for " + \ 3381 matrix_element.get('processes')[0].nice_string(\ 3382 print_weighted=False)) 3383 plot.draw() 3384 3385 # We also need to write the overall maximum quantities for this group 3386 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 3387 # only one process, so this is trivial 3388 self.write_global_specs(matrix_element) 3389 open('unique_id.inc','w').write( 3390 """ integer UNIQUE_ID 3391 parameter(UNIQUE_ID=1)""") 3392 3393 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3394 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3395 'MadLoopParams.inc','MadLoopCommons.f','global_specs.inc'] 3396 3397 for file in linkfiles: 3398 ln('../../%s' % file) 3399 3400 os.system("ln -s ../../makefile_loop makefile") 3401 3402 # We should move to MadLoop5_resources directory from the SubProcesses 3403 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3404 pjoin('..','MadLoop5_resources')) 3405 3406 linkfiles = ['mpmodule.mod'] 3407 3408 for file in linkfiles: 3409 ln('../../../lib/%s' % file) 3410 3411 linkfiles = ['coef_specs.inc'] 3412 3413 for file in linkfiles: 3414 ln('../../../Source/DHELAS/%s' % file) 3415 3416 # Return to original PWD 3417 os.chdir(cwd) 3418 3419 if not calls: 3420 calls = 0 3421 return calls
3422 3423 3424 #=============================================================================== 3425 # write_coef_specs 3426 #===============================================================================
3427 - def write_coef_specs_file(self, max_loop_vertex_ranks):
3428 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3429 non-optimized mode""" 3430 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3431 3432 replace_dict = {} 3433 replace_dict['max_lwf_size'] = 4 3434 replace_dict['vertex_max_coefs'] = max(\ 3435 [q_polynomial.get_number_of_coefs_for_rank(n) 3436 for n in max_loop_vertex_ranks]) 3437 IncWriter=writers.FortranWriter(filename,'w') 3438 IncWriter.writelines("""INTEGER MAXLWFSIZE 3439 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3440 INTEGER VERTEXMAXCOEFS 3441 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3442 % replace_dict) 3443 IncWriter.close()
3444