Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from distutils import dir_util 
  18  import glob 
  19  import logging 
  20  import os 
  21  import re 
  22  import shutil 
  23  import subprocess 
  24  import string 
  25  import copy 
  26  import platform 
  27   
  28  import madgraph.core.color_algebra as color 
  29  import madgraph.core.helas_objects as helas_objects 
  30  import madgraph.core.base_objects as base_objects 
  31  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  32  import madgraph.fks.fks_base as fks 
  33  import madgraph.fks.fks_common as fks_common 
  34  import madgraph.iolibs.drawing_eps as draw 
  35  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  36  import madgraph.iolibs.files as files 
  37  import madgraph.various.misc as misc 
  38  import madgraph.iolibs.file_writers as writers 
  39  import madgraph.iolibs.template_files as template_files 
  40  import madgraph.iolibs.ufo_expression_parsers as parsers 
  41  import madgraph.iolibs.export_v4 as export_v4 
  42  import madgraph.loop.loop_exporters as loop_exporters 
  43  import madgraph.various.q_polynomial as q_polynomial 
  44  import madgraph.various.banner as banner_mod 
  45   
  46  import aloha.create_aloha as create_aloha 
  47   
  48  import models.write_param_card as write_param_card 
  49  import models.check_param_card as check_param_card 
  50  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  51  from madgraph.iolibs.files import cp, ln, mv 
  52   
  53  pjoin = os.path.join 
  54   
  55  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  56  logger = logging.getLogger('madgraph.export_fks') 
  57   
  58  #================================================================================= 
  59  # Class for used of the (non-optimized) Loop process 
  60  #================================================================================= 
61 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
62 """Class to take care of exporting a set of matrix elements to 63 Fortran (v4) format.""" 64 65 #=============================================================================== 66 # copy the Template in a new directory. 67 #===============================================================================
68 - def copy_fkstemplate(self):
69 """create the directory run_name as a copy of the MadEvent 70 Template, and clean the directory 71 For now it is just the same as copy_v4template, but it will be modified 72 """ 73 mgme_dir = self.mgme_dir 74 dir_path = self.dir_path 75 clean =self.opt['clean'] 76 77 78 #First copy the full template tree if dir_path doesn't exit 79 if not os.path.isdir(dir_path): 80 if not mgme_dir: 81 raise MadGraph5Error, \ 82 "No valid MG_ME path given for MG4 run directory creation." 83 logger.info('initialize a new directory: %s' % \ 84 os.path.basename(dir_path)) 85 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 86 # distutils.dir_util.copy_tree since dir_path already exists 87 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 88 dir_path) 89 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 90 if not mgme_dir: 91 raise MadGraph5Error, \ 92 "No valid MG_ME path given for MG4 run directory creation." 93 try: 94 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 95 except IOError: 96 MG5_version = misc.get_pkg_info() 97 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 98 "5." + MG5_version['version']) 99 100 #Ensure that the Template is clean 101 if clean: 102 logger.info('remove old information in %s' % os.path.basename(dir_path)) 103 if os.environ.has_key('MADGRAPH_BASE'): 104 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 105 '--web'],cwd=dir_path) 106 else: 107 try: 108 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 109 cwd=dir_path) 110 except Exception, why: 111 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 112 % (os.path.basename(dir_path),why)) 113 #Write version info 114 MG_version = misc.get_pkg_info() 115 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 116 MG_version['version']) 117 118 # We must link the CutTools to the Library folder of the active Template 119 self.link_CutTools(dir_path) 120 121 link_tir_libs=[] 122 tir_libs=[] 123 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 124 dirpath = os.path.join(self.dir_path, 'SubProcesses') 125 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 126 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 127 link_tir_libs,tir_libs) 128 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 129 filename = pjoin(self.dir_path, 'Source','make_opts') 130 calls = self.write_make_opts(writers.MakefileWriter(filename), 131 link_tir_libs,tir_libs) 132 133 # Duplicate run_card and FO_analyse_card 134 for card in ['FO_analyse_card', 'shower_card']: 135 try: 136 shutil.copy(pjoin(self.dir_path, 'Cards', 137 card + '.dat'), 138 pjoin(self.dir_path, 'Cards', 139 card + '_default.dat')) 140 except IOError: 141 logger.warning("Failed to copy " + card + ".dat to default") 142 143 cwd = os.getcwd() 144 dirpath = os.path.join(self.dir_path, 'SubProcesses') 145 try: 146 os.chdir(dirpath) 147 except os.error: 148 logger.error('Could not cd to directory %s' % dirpath) 149 return 0 150 151 # We add here the user-friendly MadLoop option setter. 152 cpfiles= ["SubProcesses/MadLoopParamReader.f", 153 "Cards/MadLoopParams.dat", 154 "SubProcesses/MadLoopParams.inc"] 155 156 for file in cpfiles: 157 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 158 os.path.join(self.dir_path, file)) 159 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 160 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 161 'Cards', 'MadLoopParams.dat')) 162 # write the output file 163 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 164 "MadLoopParams.dat")) 165 166 # We need minimal editing of MadLoopCommons.f 167 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 168 "SubProcesses","MadLoopCommons.inc")).read() 169 writer = writers.FortranWriter(os.path.join(self.dir_path, 170 "SubProcesses","MadLoopCommons.f")) 171 writer.writelines(MadLoopCommon%{ 172 'print_banner_commands':self.MadLoop_banner}) 173 writer.close() 174 175 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 176 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 177 writers.FortranWriter('cts_mpc.h')) 178 179 180 # Finally make sure to turn off MC over Hel for the default mode. 181 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 182 FKS_card_file = open(FKS_card_path,'r') 183 FKS_card = FKS_card_file.read() 184 FKS_card_file.close() 185 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 186 "#NHelForMCoverHels\n-1", FKS_card) 187 FKS_card_file = open(FKS_card_path,'w') 188 FKS_card_file.write(FKS_card) 189 FKS_card_file.close() 190 191 # Return to original PWD 192 os.chdir(cwd) 193 # Copy the different python files in the Template 194 self.copy_python_files() 195 196 # We need to create the correct open_data for the pdf 197 self.write_pdf_opendata()
198 199 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 200 # Also, we overload this function (i.e. it is already defined in 201 # LoopProcessExporterFortranSA) because the path of the template makefile 202 # is different.
203 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
204 """ Create the file makefile_loop which links to the TIR libraries.""" 205 206 file = open(os.path.join(self.mgme_dir,'Template','NLO', 207 'SubProcesses','makefile_loop.inc')).read() 208 replace_dict={} 209 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 210 replace_dict['tir_libs']=' '.join(tir_libs) 211 replace_dict['dotf']='%.f' 212 replace_dict['doto']='%.o' 213 replace_dict['tir_include']=' '.join(tir_include) 214 file=file%replace_dict 215 if writer: 216 writer.writelines(file) 217 else: 218 return file
219 220 # I put it here not in optimized one, because I want to use the same make_opts.inc
221 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
222 """ Create the file make_opts which links to the TIR libraries.""" 223 file = open(os.path.join(self.mgme_dir,'Template','NLO', 224 'Source','make_opts.inc')).read() 225 replace_dict={} 226 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 227 replace_dict['tir_libs']=' '.join(tir_libs) 228 replace_dict['dotf']='%.f' 229 replace_dict['doto']='%.o' 230 file=file%replace_dict 231 if writer: 232 writer.writelines(file) 233 else: 234 return file
235 236 #=========================================================================== 237 # copy_python_files 238 #===========================================================================
239 - def copy_python_files(self):
240 """copy python files required for the Template""" 241 242 files_to_copy = [ \ 243 pjoin('interface','amcatnlo_run_interface.py'), 244 pjoin('interface','extended_cmd.py'), 245 pjoin('interface','common_run_interface.py'), 246 pjoin('interface','coloring_logging.py'), 247 pjoin('various','misc.py'), 248 pjoin('various','shower_card.py'), 249 pjoin('various','FO_analyse_card.py'), 250 pjoin('various','histograms.py'), 251 pjoin('various','banner.py'), 252 pjoin('various','cluster.py'), 253 pjoin('various','lhe_parser.py'), 254 pjoin('madevent','sum_html.py'), 255 pjoin('madevent','gen_crossxhtml.py'), 256 pjoin('iolibs','files.py'), 257 pjoin('iolibs','save_load_object.py'), 258 pjoin('iolibs','file_writers.py'), 259 pjoin('..','models','check_param_card.py'), 260 pjoin('__init__.py') 261 ] 262 cp(_file_path+'/interface/.mg5_logging.conf', 263 self.dir_path+'/bin/internal/me5_logging.conf') 264 265 for cp_file in files_to_copy: 266 cp(pjoin(_file_path,cp_file), 267 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
268
269 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 270 wanted_couplings = []):
271 272 super(ProcessExporterFortranFKS,self).convert_model_to_mg4(model, 273 wanted_lorentz, wanted_couplings) 274 275 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 276 try: 277 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 278 except OSError as error: 279 pass 280 model_path = model.get('modelpath') 281 shutil.copytree(model_path, 282 pjoin(self.dir_path,'bin','internal','ufomodel'), 283 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 284 if hasattr(model, 'restrict_card'): 285 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 286 'restrict_default.dat') 287 if isinstance(model.restrict_card, check_param_card.ParamCard): 288 model.restrict_card.write(out_path) 289 else: 290 files.cp(model.restrict_card, out_path)
291 292 293 294 #=========================================================================== 295 # write_maxparticles_file 296 #===========================================================================
297 - def write_maxparticles_file(self, writer, matrix_elements):
298 """Write the maxparticles.inc file for MadEvent""" 299 300 maxparticles = max([me.get_nexternal_ninitial()[0] \ 301 for me in matrix_elements['matrix_elements']]) 302 303 lines = "integer max_particles, max_branch\n" 304 lines += "parameter (max_particles=%d) \n" % maxparticles 305 lines += "parameter (max_branch=max_particles-1)" 306 307 # Write the file 308 writer.writelines(lines) 309 310 return True
311 312 313 #=========================================================================== 314 # write_maxconfigs_file 315 #===========================================================================
316 - def write_maxconfigs_file(self, writer, matrix_elements):
317 """Write the maxconfigs.inc file for MadEvent""" 318 319 try: 320 maxconfigs = max([me.get_num_configs() \ 321 for me in matrix_elements['real_matrix_elements']]) 322 except ValueError: 323 maxconfigs = max([me.born_matrix_element.get_num_configs() \ 324 for me in matrix_elements['matrix_elements']]) 325 326 lines = "integer lmaxconfigs\n" 327 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 328 329 # Write the file 330 writer.writelines(lines) 331 332 return True
333 334 335 #=============================================================================== 336 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 337 #===============================================================================
338 - def write_procdef_mg5(self, file_pos, modelname, process_str):
339 """ write an equivalent of the MG4 proc_card in order that all the Madevent 340 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 341 342 proc_card_template = template_files.mg4_proc_card.mg4_template 343 process_template = template_files.mg4_proc_card.process_template 344 process_text = '' 345 coupling = '' 346 new_process_content = [] 347 348 # First find the coupling and suppress the coupling from process_str 349 #But first ensure that coupling are define whithout spaces: 350 process_str = process_str.replace(' =', '=') 351 process_str = process_str.replace('= ', '=') 352 process_str = process_str.replace(',',' , ') 353 #now loop on the element and treat all the coupling 354 for info in process_str.split(): 355 if '=' in info: 356 coupling += info + '\n' 357 else: 358 new_process_content.append(info) 359 # Recombine the process_str (which is the input process_str without coupling 360 #info) 361 process_str = ' '.join(new_process_content) 362 363 #format the SubProcess 364 process_text += process_template.substitute({'process': process_str, \ 365 'coupling': coupling}) 366 367 text = proc_card_template.substitute({'process': process_text, 368 'model': modelname, 369 'multiparticle':''}) 370 ff = open(file_pos, 'w') 371 ff.write(text) 372 ff.close()
373 374 375 #=============================================================================== 376 # write a initial states map, useful for the fast PDF NLO interface 377 #===============================================================================
378 - def write_init_map(self, file_pos, initial_states):
379 """ Write an initial state process map. Each possible PDF 380 combination gets an unique identifier.""" 381 382 text='' 383 for i,e in enumerate(initial_states): 384 text=text+str(i+1)+' '+str(len(e)) 385 for t in e: 386 text=text+' ' 387 for p in t: 388 text=text+' '+str(p) 389 text=text+'\n' 390 391 ff = open(file_pos, 'w') 392 ff.write(text) 393 ff.close()
394
395 - def get_ME_identifier(self, matrix_element, *args, **opts):
396 """ A function returning a string uniquely identifying the matrix 397 element given in argument so that it can be used as a prefix to all 398 MadLoop5 subroutines and common blocks related to it. This allows 399 to compile several processes into one library as requested by the 400 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 401 necessitates that there is no process prefix.""" 402 403 return ''
404 405 #=============================================================================== 406 # write_coef_specs 407 #===============================================================================
408 - def write_coef_specs_file(self, virt_me_list):
409 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 410 non-optimized mode""" 411 raise fks_common.FKSProcessError(), \ 412 "write_coef_specs should be called only in the loop-optimized mode"
413 414 415 #=============================================================================== 416 # generate_directories_fks 417 #===============================================================================
418 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 419 me_ntot, path=os.getcwd(),OLP='MadLoop'):
420 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 421 including the necessary matrix.f and various helper files""" 422 proc = matrix_element.born_matrix_element['processes'][0] 423 424 if not self.model: 425 self.model = matrix_element.get('processes')[0].get('model') 426 427 cwd = os.getcwd() 428 try: 429 os.chdir(path) 430 except OSError, error: 431 error_msg = "The directory %s should exist in order to be able " % path + \ 432 "to \"export\" in it. If you see this error message by " + \ 433 "typing the command \"export\" please consider to use " + \ 434 "instead the command \"output\". " 435 raise MadGraph5Error, error_msg 436 437 calls = 0 438 439 self.fksdirs = [] 440 #first make and cd the direcrory corresponding to the born process: 441 borndir = "P%s" % \ 442 (matrix_element.get('processes')[0].shell_string()) 443 os.mkdir(borndir) 444 os.chdir(borndir) 445 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 446 447 ## write the files corresponding to the born process in the P* directory 448 self.generate_born_fks_files(matrix_element, 449 fortran_model, me_number, path) 450 451 # With NJET you want to generate the order file per subprocess and most 452 # likely also generate it for each subproc. 453 if OLP=='NJET': 454 filename = 'OLE_order.lh' 455 self.write_lh_order(filename, matrix_element, OLP) 456 457 if matrix_element.virt_matrix_element: 458 calls += self.generate_virt_directory( \ 459 matrix_element.virt_matrix_element, \ 460 fortran_model, \ 461 os.path.join(path, borndir)) 462 463 #write the infortions for the different real emission processes 464 465 self.write_real_matrix_elements(matrix_element, fortran_model) 466 467 self.write_pdf_calls(matrix_element, fortran_model) 468 469 filename = 'nFKSconfigs.inc' 470 self.write_nfksconfigs_file(writers.FortranWriter(filename), 471 matrix_element, 472 fortran_model) 473 474 filename = 'iproc.dat' 475 self.write_iproc_file(writers.FortranWriter(filename), 476 me_number) 477 478 filename = 'fks_info.inc' 479 self.write_fks_info_file(writers.FortranWriter(filename), 480 matrix_element, 481 fortran_model) 482 483 filename = 'leshouche_info.dat' 484 nfksconfs,maxproc,maxflow,nexternal=\ 485 self.write_leshouche_info_file(filename,matrix_element) 486 487 # if no corrections are generated ([LOonly] mode), get 488 # these variables from the born 489 if nfksconfs == maxproc == maxflow == 0: 490 nfksconfs = 1 491 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 492 matrix_element.born_matrix_element, 1) 493 494 filename = 'leshouche_decl.inc' 495 self.write_leshouche_info_declarations( 496 writers.FortranWriter(filename), 497 nfksconfs,maxproc,maxflow,nexternal, 498 fortran_model) 499 500 filename = 'configs_and_props_info.dat' 501 nconfigs,max_leg_number,nfksconfs=self.write_configs_and_props_info_file( 502 filename, 503 matrix_element) 504 505 filename = 'configs_and_props_decl.inc' 506 self.write_configs_and_props_info_declarations( 507 writers.FortranWriter(filename), 508 nconfigs,max_leg_number,nfksconfs, 509 fortran_model) 510 511 filename = 'real_from_born_configs.inc' 512 self.write_real_from_born_configs( 513 writers.FortranWriter(filename), 514 matrix_element, 515 fortran_model) 516 517 filename = 'ngraphs.inc' 518 self.write_ngraphs_file(writers.FortranWriter(filename), 519 nconfigs) 520 521 #write the wrappers 522 filename = 'real_me_chooser.f' 523 self.write_real_me_wrapper(writers.FortranWriter(filename), 524 matrix_element, 525 fortran_model) 526 527 filename = 'parton_lum_chooser.f' 528 self.write_pdf_wrapper(writers.FortranWriter(filename), 529 matrix_element, 530 fortran_model) 531 532 filename = 'get_color.f' 533 self.write_colors_file(writers.FortranWriter(filename), 534 matrix_element) 535 536 filename = 'nexternal.inc' 537 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 538 self.write_nexternal_file(writers.FortranWriter(filename), 539 nexternal, ninitial) 540 541 filename = 'pmass.inc' 542 try: 543 self.write_pmass_file(writers.FortranWriter(filename), 544 matrix_element.real_processes[0].matrix_element) 545 except IndexError: 546 self.write_pmass_file(writers.FortranWriter(filename), 547 matrix_element.born_matrix_element) 548 549 #draw the diagrams 550 self.draw_feynman_diagrams(matrix_element) 551 552 linkfiles = ['BinothLHADummy.f', 553 'check_poles.f', 554 'MCmasses_HERWIG6.inc', 555 'MCmasses_HERWIGPP.inc', 556 'MCmasses_PYTHIA6Q.inc', 557 'MCmasses_PYTHIA6PT.inc', 558 'MCmasses_PYTHIA8.inc', 559 'add_write_info.f', 560 'coupl.inc', 561 'cuts.f', 562 'FKS_params.dat', 563 'initial_states_map.dat', 564 'OLE_order.olc', 565 'FKSParams.inc', 566 'FKSParamReader.f', 567 'cuts.inc', 568 'unlops.inc', 569 'pythia_unlops.f', 570 'driver_mintMC.f', 571 'driver_mintFO.f', 572 'driver_vegas.f', 573 'appl_interface.cc', 574 'appl_interface_dummy.f', 575 'appl_common.inc', 576 'reweight_appl.inc', 577 'driver_reweight.f', 578 'fastjetfortran_madfks_core.cc', 579 'fastjetfortran_madfks_full.cc', 580 'fjcore.cc', 581 'fastjet_wrapper.f', 582 'fjcore.hh', 583 'fks_Sij.f', 584 'fks_powers.inc', 585 'fks_singular.f', 586 'veto_xsec.f', 587 'veto_xsec.inc', 588 'c_weight.inc', 589 'fks_inc_chooser.f', 590 'leshouche_inc_chooser.f', 591 'configs_and_props_inc_chooser.f', 592 'genps.inc', 593 'genps_fks.f', 594 'boostwdir2.f', 595 'madfks_mcatnlo.inc', 596 'open_output_files.f', 597 'open_output_files_dummy.f', 598 'HwU_dummy.f', 599 'madfks_plot.f', 600 'analysis_dummy.f', 601 'mint-integrator2.f', 602 'MC_integer.f', 603 'mint.inc', 604 'montecarlocounter.f', 605 'q_es.inc', 606 'recluster.cc', 607 'Boosts.h', 608 'reweight.inc', 609 'reweight0.inc', 610 'reweight1.inc', 611 'reweightNLO.inc', 612 'reweight_all.inc', 613 'reweight_events.f', 614 'reweight_xsec.f', 615 'reweight_xsec_events.f', 616 'reweight_xsec_events_pdf_dummy.f', 617 'iproc_map.f', 618 'run.inc', 619 'run_card.inc', 620 'setcuts.f', 621 'setscales.f', 622 'symmetry_fks_test_MC.f', 623 'symmetry_fks_test_ME.f', 624 'symmetry_fks_test_Sij.f', 625 'symmetry_fks_v3.f', 626 'trapfpe.c', 627 'vegas2.for', 628 'write_ajob.f', 629 'handling_lhe_events.f', 630 'write_event.f', 631 'fill_MC_mshell.f', 632 'maxparticles.inc', 633 'message.inc', 634 'initcluster.f', 635 'cluster.inc', 636 'cluster.f', 637 'reweight.f', 638 'randinit', 639 'sudakov.inc', 640 'maxconfigs.inc', 641 'timing_variables.inc'] 642 643 for file in linkfiles: 644 ln('../' + file , '.') 645 os.system("ln -s ../../Cards/param_card.dat .") 646 647 #copy the makefile 648 os.system("ln -s ../makefile_fks_dir ./makefile") 649 if matrix_element.virt_matrix_element: 650 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 651 elif OLP!='MadLoop': 652 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 653 else: 654 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 655 656 657 #import nexternal/leshouches in Source 658 ln('nexternal.inc', '../../Source', log=False) 659 ln('born_leshouche.inc', '../../Source', log=False) 660 661 662 # Return to SubProcesses dir 663 os.chdir(os.path.pardir) 664 # Add subprocess to subproc.mg 665 filename = 'subproc.mg' 666 files.append_to_file(filename, 667 self.write_subproc, 668 borndir) 669 670 671 os.chdir(cwd) 672 # Generate info page 673 gen_infohtml.make_info_html_nlo(self.dir_path) 674 675 676 return calls
677 678 #=========================================================================== 679 # create the run_card 680 #===========================================================================
681 - def create_run_card(self, matrix_elements, history):
682 """ """ 683 684 run_card = banner_mod.RunCardNLO() 685 686 processes = [me.get('processes') 687 for me in matrix_elements['matrix_elements']] 688 689 run_card.create_default_for_process(self.proc_characteristic, 690 history, 691 processes) 692 693 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 694 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
695 696
697 - def finalize_fks_directory(self, matrix_elements, history, makejpg = False, 698 online = False, 699 compiler_dict={'fortran': 'gfortran', 'cpp': 'g++'}, 700 output_dependencies = 'external', MG5DIR = None):
701 """Finalize FKS directory by creating jpeg diagrams, html 702 pages,proc_card_mg5.dat and madevent.tar.gz.""" 703 704 self.proc_characteristic['grouped_matrix'] = False 705 706 self.create_run_card(matrix_elements, history) 707 # modelname = self.model.get('name') 708 # if modelname == 'mssm' or modelname.startswith('mssm-'): 709 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 710 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 711 # check_param_card.convert_to_mg5card(param_card, mg5_param) 712 # check_param_card.check_valid_param_card(mg5_param) 713 714 # # write the model functions get_mass/width_from_id 715 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 716 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 717 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 718 719 # # Write maxconfigs.inc based on max of ME's/subprocess groups 720 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 721 self.write_maxconfigs_file(writers.FortranWriter(filename), 722 matrix_elements) 723 724 # # Write maxparticles.inc based on max of ME's/subprocess groups 725 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 726 self.write_maxparticles_file(writers.FortranWriter(filename), 727 matrix_elements) 728 729 # Touch "done" file 730 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 731 732 # Check for compiler 733 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 734 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 735 736 old_pos = os.getcwd() 737 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 738 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 739 proc[0] == 'P'] 740 741 devnull = os.open(os.devnull, os.O_RDWR) 742 # Convert the poscript in jpg files (if authorize) 743 if makejpg: 744 logger.info("Generate jpeg diagrams") 745 for Pdir in P_dir_list: 746 os.chdir(Pdir) 747 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 748 stdout = devnull) 749 os.chdir(os.path.pardir) 750 # 751 logger.info("Generate web pages") 752 # Create the WebPage using perl script 753 754 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 755 stdout = devnull) 756 757 os.chdir(os.path.pardir) 758 # 759 # obj = gen_infohtml.make_info_html(self.dir_path) 760 # [mv(name, './HTML/') for name in os.listdir('.') if \ 761 # (name.endswith('.html') or name.endswith('.jpg')) and \ 762 # name != 'index.html'] 763 # if online: 764 # nb_channel = obj.rep_rule['nb_gen_diag'] 765 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 766 767 # Write command history as proc_card_mg5 768 if os.path.isdir('Cards'): 769 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 770 history.write(output_file) 771 772 # Duplicate run_card and FO_analyse_card 773 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 774 try: 775 shutil.copy(pjoin(self.dir_path, 'Cards', 776 card + '.dat'), 777 pjoin(self.dir_path, 'Cards', 778 card + '_default.dat')) 779 except IOError: 780 logger.warning("Failed to copy " + card + ".dat to default") 781 782 783 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 784 stdout = devnull) 785 786 # Run "make" to generate madevent.tar.gz file 787 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 788 if os.path.exists('amcatnlo.tar.gz'): 789 os.remove('amcatnlo.tar.gz') 790 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 791 stdout = devnull) 792 # 793 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 794 stdout = devnull) 795 796 #return to the initial dir 797 os.chdir(old_pos) 798 799 # Setup stdHep 800 # Find the correct fortran compiler 801 base_compiler= ['FC=g77','FC=gfortran'] 802 803 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 804 805 if output_dependencies == 'external': 806 # check if stdhep has to be compiled (only the first time) 807 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 808 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')): 809 if 'FC' not in os.environ or not os.environ['FC']: 810 path = os.path.join(StdHep_path, 'src', 'make_opts') 811 text = open(path).read() 812 for base in base_compiler: 813 text = text.replace(base,'FC=%s' % fcompiler_chosen) 814 open(path, 'w').writelines(text) 815 816 logger.info('Compiling StdHEP. This has to be done only once.') 817 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 818 logger.info('Done.') 819 #then link the libraries in the exported dir 820 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 821 pjoin(self.dir_path, 'MCatNLO', 'lib')) 822 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 823 pjoin(self.dir_path, 'MCatNLO', 'lib')) 824 825 elif output_dependencies == 'internal': 826 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 827 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 828 # Create the links to the lib folder 829 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 830 for file in linkfiles: 831 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 832 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 833 if 'FC' not in os.environ or not os.environ['FC']: 834 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 835 text = open(path).read() 836 for base in base_compiler: 837 text = text.replace(base,'FC=%s' % fcompiler_chosen) 838 open(path, 'w').writelines(text) 839 # To avoid compiler version conflicts, we force a clean here 840 misc.compile(['clean'],cwd = StdHEP_internal_path) 841 842 elif output_dependencies == 'environment_paths': 843 # Here the user chose to define the dependencies path in one of 844 # his environmental paths 845 libStdHep = misc.which_lib('libstdhep.a') 846 libFmcfio = misc.which_lib('libFmcfio.a') 847 if not libStdHep is None and not libFmcfio is None: 848 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 849 os.path.dirname(libStdHep)) 850 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 851 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 852 else: 853 raise InvalidCmd("Could not find the location of the files"+\ 854 " libstdhep.a and libFmcfio.a in you environment paths.") 855 856 else: 857 raise MadGraph5Error, 'output_dependencies option %s not recognized'\ 858 %output_dependencies
859 860
861 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
862 """Writes the real_from_born_configs.inc file that contains 863 the mapping to go for a given born configuration (that is used 864 e.g. in the multi-channel phase-space integration to the 865 corresponding real-emission diagram, i.e. the real emission 866 diagram in which the combined ij is split in i_fks and 867 j_fks.""" 868 lines=[] 869 lines2=[] 870 max_links=0 871 born_me=matrix_element.born_matrix_element 872 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 873 iFKS=iFKS+1 874 links=conf['fks_info']['rb_links'] 875 max_links=max(max_links,len(links)) 876 for i,diags in enumerate(links): 877 if not i == diags['born_conf']: 878 print links 879 raise MadGraph5Error, "born_conf should be canonically ordered" 880 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 881 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 882 % (iFKS,len(links),real_configs)) 883 884 lines2.append("integer irfbc") 885 lines2.append("integer real_from_born_conf(%d,%d)" \ 886 % (max_links,len(matrix_element.get_fks_info_list()))) 887 # Write the file 888 writer.writelines(lines2+lines)
889 890 891 #=============================================================================== 892 # write_get_mass_width_file 893 #=============================================================================== 894 #test written
895 - def write_get_mass_width_file(self, writer, makeinc, model):
896 """Write the get_mass_width_file.f file for MG4. 897 Also update the makeinc.inc file 898 """ 899 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 900 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 901 902 iflines_mass = '' 903 iflines_width = '' 904 905 for i, part in enumerate(mass_particles): 906 if i == 0: 907 ifstring = 'if' 908 else: 909 ifstring = 'else if' 910 if part['self_antipart']: 911 iflines_mass += '%s (id.eq.%d) then\n' % \ 912 (ifstring, part.get_pdg_code()) 913 else: 914 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 915 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 916 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 917 918 for i, part in enumerate(width_particles): 919 if i == 0: 920 ifstring = 'if' 921 else: 922 ifstring = 'else if' 923 if part['self_antipart']: 924 iflines_width += '%s (id.eq.%d) then\n' % \ 925 (ifstring, part.get_pdg_code()) 926 else: 927 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 928 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 929 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 930 931 replace_dict = {'iflines_mass' : iflines_mass, 932 'iflines_width' : iflines_width} 933 934 file = open(os.path.join(_file_path, \ 935 'iolibs/template_files/get_mass_width_fcts.inc')).read() 936 file = file % replace_dict 937 938 # Write the file 939 writer.writelines(file) 940 941 # update the makeinc 942 makeinc_content = open(makeinc).read() 943 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 944 open(makeinc, 'w').write(makeinc_content) 945 946 return
947 948
949 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
950 """writes the declarations for the variables relevant for configs_and_props 951 """ 952 lines = [] 953 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 954 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 955 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 956 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 957 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 958 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 959 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 960 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 961 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 962 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 963 964 writer.writelines(lines)
965 966
967 - def write_configs_and_props_info_file(self, filename, matrix_element):
968 """writes the configs_and_props_info.inc file that cointains 969 all the (real-emission) configurations (IFOREST) as well as 970 the masses and widths of intermediate particles""" 971 lines = [] 972 lines.append("# C -> MAPCONFIG_D") 973 lines.append("# F/D -> IFOREST_D") 974 lines.append("# S -> SPROP_D") 975 lines.append("# T -> TPRID_D") 976 lines.append("# M -> PMASS_D/PWIDTH_D") 977 lines.append("# P -> POW_D") 978 lines2 = [] 979 nconfs = len(matrix_element.get_fks_info_list()) 980 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 981 982 max_iconfig=0 983 max_leg_number=0 984 985 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 986 iFKS=iFKS+1 987 iconfig = 0 988 s_and_t_channels = [] 989 mapconfigs = [] 990 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 991 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 992 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 993 minvert = min([max([len(vert.get('legs')) for vert in \ 994 diag.get('vertices')]) for diag in base_diagrams]) 995 996 lines.append("# ") 997 lines.append("# nFKSprocess %d" % iFKS) 998 for idiag, diag in enumerate(base_diagrams): 999 if any([len(vert.get('legs')) > minvert for vert in 1000 diag.get('vertices')]): 1001 # Only 3-vertices allowed in configs.inc 1002 continue 1003 iconfig = iconfig + 1 1004 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1005 mapconfigs.append(helas_diag.get('number')) 1006 lines.append("# Diagram %d for nFKSprocess %d" % \ 1007 (helas_diag.get('number'),iFKS)) 1008 # Correspondance between the config and the amplitudes 1009 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1010 helas_diag.get('number'))) 1011 1012 # Need to reorganize the topology so that we start with all 1013 # final state external particles and work our way inwards 1014 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1015 get_s_and_t_channels(ninitial, model, 990) 1016 1017 s_and_t_channels.append([schannels, tchannels]) 1018 1019 # Write out propagators for s-channel and t-channel vertices 1020 allchannels = schannels 1021 if len(tchannels) > 1: 1022 # Write out tchannels only if there are any non-trivial ones 1023 allchannels = schannels + tchannels 1024 1025 for vert in allchannels: 1026 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1027 last_leg = vert.get('legs')[-1] 1028 lines.append("F %4d %4d %4d %4d" % \ 1029 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1030 for d in daughters: 1031 lines.append("D %4d" % d) 1032 if vert in schannels: 1033 lines.append("S %4d %4d %4d %10d" % \ 1034 (iFKS,last_leg.get('number'), iconfig, 1035 last_leg.get('id'))) 1036 elif vert in tchannels[:-1]: 1037 lines.append("T %4d %4d %4d %10d" % \ 1038 (iFKS,last_leg.get('number'), iconfig, 1039 abs(last_leg.get('id')))) 1040 1041 # update what the array sizes (mapconfig,iforest,etc) will be 1042 max_leg_number = min(max_leg_number,last_leg.get('number')) 1043 max_iconfig = max(max_iconfig,iconfig) 1044 1045 # Write out number of configs 1046 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1047 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1048 1049 # write the props.inc information 1050 lines2.append("# ") 1051 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1052 get('particle_dict') 1053 1054 for iconf, configs in enumerate(s_and_t_channels): 1055 for vertex in configs[0] + configs[1][:-1]: 1056 leg = vertex.get('legs')[-1] 1057 if leg.get('id') not in particle_dict: 1058 # Fake propagator used in multiparticle vertices 1059 pow_part = 0 1060 else: 1061 particle = particle_dict[leg.get('id')] 1062 1063 pow_part = 1 + int(particle.is_boson()) 1064 1065 lines2.append("M %4d %4d %4d %10d " % \ 1066 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1067 lines2.append("P %4d %4d %4d %4d " % \ 1068 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1069 1070 # Write the file 1071 open(filename,'w').write('\n'.join(lines+lines2)) 1072 1073 return max_iconfig, max_leg_number, nconfs
1074 1075
1076 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1077 maxproc, maxflow, nexternal, fortran_model):
1078 """writes the declarations for the variables relevant for leshouche_info 1079 """ 1080 lines = [] 1081 lines.append('integer maxproc_used, maxflow_used') 1082 lines.append('parameter (maxproc_used = %d)' % maxproc) 1083 lines.append('parameter (maxflow_used = %d)' % maxflow) 1084 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1085 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1086 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1087 1088 writer.writelines(lines)
1089 1090
1091 - def write_leshouche_info_file(self, filename, matrix_element):
1092 """writes the leshouche_info.inc file which contains 1093 the LHA informations for all the real emission processes 1094 """ 1095 lines = [] 1096 lines.append("# I -> IDUP_D") 1097 lines.append("# M -> MOTHUP_D") 1098 lines.append("# C -> ICOLUP_D") 1099 nfksconfs = len(matrix_element.get_fks_info_list()) 1100 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1101 1102 maxproc = 0 1103 maxflow = 0 1104 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1105 # for i, real in enumerate(matrix_element.real_processes): 1106 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1107 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1108 lines.extend(newlines) 1109 maxproc = max(maxproc, nprocs) 1110 maxflow = max(maxflow, nflows) 1111 1112 # Write the file 1113 open(filename,'w').write('\n'.join(lines)) 1114 1115 return nfksconfs, maxproc, maxflow, nexternal
1116 1117
1118 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1119 """writes the wrapper which allows to chose among the different real matrix elements""" 1120 1121 file = \ 1122 """double precision function dlum() 1123 implicit none 1124 include 'timing_variables.inc' 1125 integer nfksprocess 1126 common/c_nfksprocess/nfksprocess 1127 call cpu_time(tbefore) 1128 """ 1129 if matrix_element.real_processes: 1130 for n, info in enumerate(matrix_element.get_fks_info_list()): 1131 file += \ 1132 """if (nfksprocess.eq.%(n)d) then 1133 call dlum_%(n_me)d(dlum) 1134 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1135 file += \ 1136 """ 1137 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1138 stop 1139 endif 1140 call cpu_time(tAfter) 1141 tPDF = tPDF + (tAfter-tBefore) 1142 return 1143 end 1144 """ 1145 else: 1146 file+= \ 1147 """call dlum_0(dlum) 1148 call cpu_time(tAfter) 1149 tPDF = tPDF + (tAfter-tBefore) 1150 return 1151 end 1152 """ 1153 1154 # Write the file 1155 writer.writelines(file) 1156 return 0
1157 1158
1159 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1160 """writes the wrapper which allows to chose among the different real matrix elements""" 1161 1162 file = \ 1163 """subroutine smatrix_real(p, wgt) 1164 implicit none 1165 include 'nexternal.inc' 1166 double precision p(0:3, nexternal) 1167 double precision wgt 1168 integer nfksprocess 1169 common/c_nfksprocess/nfksprocess 1170 """ 1171 for n, info in enumerate(matrix_element.get_fks_info_list()): 1172 file += \ 1173 """if (nfksprocess.eq.%(n)d) then 1174 call smatrix_%(n_me)d(p, wgt) 1175 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1176 1177 if matrix_element.real_processes: 1178 file += \ 1179 """ 1180 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1181 stop 1182 endif 1183 return 1184 end 1185 """ 1186 else: 1187 file += \ 1188 """ 1189 wgt=0d0 1190 return 1191 end 1192 """ 1193 # Write the file 1194 writer.writelines(file) 1195 return 0
1196 1197
1198 - def draw_feynman_diagrams(self, matrix_element):
1199 """Create the ps files containing the feynman diagrams for the born process, 1200 as well as for all the real emission processes""" 1201 1202 filename = 'born.ps' 1203 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1204 get('base_amplitude').get('diagrams'), 1205 filename, 1206 model=matrix_element.born_matrix_element.\ 1207 get('processes')[0].get('model'), 1208 amplitude=True, diagram_type='born') 1209 plot.draw() 1210 1211 for n, fksreal in enumerate(matrix_element.real_processes): 1212 filename = 'matrix_%d.ps' % (n + 1) 1213 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1214 get('base_amplitude').get('diagrams'), 1215 filename, 1216 model=fksreal.matrix_element.\ 1217 get('processes')[0].get('model'), 1218 amplitude=True, diagram_type='real') 1219 plot.draw()
1220 1221
1222 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1223 """writes the matrix_i.f files which contain the real matrix elements""" 1224 1225 for n, fksreal in enumerate(matrix_element.real_processes): 1226 filename = 'matrix_%d.f' % (n + 1) 1227 self.write_matrix_element_fks(writers.FortranWriter(filename), 1228 fksreal.matrix_element, n + 1, 1229 fortran_model)
1230
1231 - def write_pdf_calls(self, matrix_element, fortran_model):
1232 """writes the parton_lum_i.f files which contain the real matrix elements. 1233 If no real emission existst, write the one for the born""" 1234 1235 if matrix_element.real_processes: 1236 for n, fksreal in enumerate(matrix_element.real_processes): 1237 filename = 'parton_lum_%d.f' % (n + 1) 1238 self.write_pdf_file(writers.FortranWriter(filename), 1239 fksreal.matrix_element, n + 1, 1240 fortran_model) 1241 else: 1242 filename = 'parton_lum_0.f' 1243 self.write_pdf_file(writers.FortranWriter(filename), 1244 matrix_element.born_matrix_element, 0, 1245 fortran_model)
1246 1247
1248 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1249 """generates the files needed for the born amplitude in the P* directory, which will 1250 be needed by the P* directories""" 1251 pathdir = os.getcwd() 1252 1253 filename = 'born.f' 1254 calls_born, ncolor_born = \ 1255 self.write_born_fks(writers.FortranWriter(filename),\ 1256 matrix_element, 1257 fortran_model) 1258 1259 filename = 'born_hel.f' 1260 self.write_born_hel(writers.FortranWriter(filename),\ 1261 matrix_element, 1262 fortran_model) 1263 1264 1265 filename = 'born_conf.inc' 1266 nconfigs, mapconfigs, s_and_t_channels = \ 1267 self.write_configs_file( 1268 writers.FortranWriter(filename), 1269 matrix_element.born_matrix_element, 1270 fortran_model) 1271 1272 filename = 'born_props.inc' 1273 self.write_props_file(writers.FortranWriter(filename), 1274 matrix_element.born_matrix_element, 1275 fortran_model, 1276 s_and_t_channels) 1277 1278 filename = 'born_decayBW.inc' 1279 self.write_decayBW_file(writers.FortranWriter(filename), 1280 s_and_t_channels) 1281 1282 filename = 'born_leshouche.inc' 1283 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1284 matrix_element.born_matrix_element, 1285 fortran_model) 1286 1287 filename = 'born_nhel.inc' 1288 self.write_born_nhel_file(writers.FortranWriter(filename), 1289 matrix_element.born_matrix_element, nflows, 1290 fortran_model, 1291 ncolor_born) 1292 1293 filename = 'born_ngraphs.inc' 1294 self.write_ngraphs_file(writers.FortranWriter(filename), 1295 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1296 1297 filename = 'ncombs.inc' 1298 self.write_ncombs_file(writers.FortranWriter(filename), 1299 matrix_element.born_matrix_element, 1300 fortran_model) 1301 1302 filename = 'born_maxamps.inc' 1303 maxamps = len(matrix_element.get('diagrams')) 1304 maxflows = ncolor_born 1305 self.write_maxamps_file(writers.FortranWriter(filename), 1306 maxamps, 1307 maxflows, 1308 max([len(matrix_element.get('processes')) for me in \ 1309 matrix_element.born_matrix_element]),1) 1310 1311 filename = 'config_subproc_map.inc' 1312 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1313 s_and_t_channels) 1314 1315 filename = 'coloramps.inc' 1316 self.write_coloramps_file(writers.FortranWriter(filename), 1317 mapconfigs, 1318 matrix_element.born_matrix_element, 1319 fortran_model) 1320 1321 #write the sborn_sf.f and the b_sf_files 1322 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1323 for i, links in enumerate([matrix_element.color_links, []]): 1324 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1325 links, 1326 fortran_model) 1327 self.color_link_files = [] 1328 for i in range(len(matrix_element.color_links)): 1329 filename = 'b_sf_%3.3d.f' % (i + 1) 1330 self.color_link_files.append(filename) 1331 self.write_b_sf_fks(writers.FortranWriter(filename), 1332 matrix_element, i, 1333 fortran_model)
1334
1335 - def generate_virtuals_from_OLP(self,FKSHMultiproc,export_path, OLP):
1336 """Generates the library for computing the loop matrix elements 1337 necessary for this process using the OLP specified.""" 1338 1339 # Start by writing the BLHA order file 1340 virtual_path = pjoin(export_path,'OLP_virtuals') 1341 if not os.path.exists(virtual_path): 1342 os.makedirs(virtual_path) 1343 filename = os.path.join(virtual_path,'OLE_order.lh') 1344 self.write_lh_order(filename, FKSHMultiproc.get('matrix_elements'),OLP) 1345 1346 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1347 'Please check the virt_generation.log file in %s.'\ 1348 %str(pjoin(virtual_path,'virt_generation.log')) 1349 1350 # Perform some tasks specific to certain OLP's 1351 if OLP=='GoSam': 1352 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1353 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1354 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1355 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1356 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1357 # Now generate the process 1358 logger.info('Generating the loop matrix elements with %s...'%OLP) 1359 virt_generation_log = \ 1360 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1361 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1362 stdout=virt_generation_log, stderr=virt_generation_log) 1363 virt_generation_log.close() 1364 # Check what extension is used for the share libraries on this system 1365 possible_other_extensions = ['so','dylib'] 1366 shared_lib_ext='so' 1367 for ext in possible_other_extensions: 1368 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1369 'libgolem_olp.'+ext)): 1370 shared_lib_ext = ext 1371 1372 # Now check that everything got correctly generated 1373 files_to_check = ['olp_module.mod',str(pjoin('lib', 1374 'libgolem_olp.'+shared_lib_ext))] 1375 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1376 'Virtuals',f)) for f in files_to_check]): 1377 raise fks_common.FKSProcessError(fail_msg) 1378 # link the library to the lib folder 1379 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1380 pjoin(export_path,'lib')) 1381 1382 # Specify in make_opts the right library necessitated by the OLP 1383 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1384 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1385 if OLP=='GoSam': 1386 if platform.system().lower()=='darwin': 1387 # On mac the -rpath is not supported and the path of the dynamic 1388 # library is automatically wired in the executable 1389 make_opts_content=make_opts_content.replace('libOLP=', 1390 'libOLP=-Wl,-lgolem_olp') 1391 else: 1392 # On other platforms the option , -rpath= path to libgolem.so is necessary 1393 # Using a relative path is not ideal because the file libgolem.so is not 1394 # copied on the worker nodes. 1395 # make_opts_content=make_opts_content.replace('libOLP=', 1396 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1397 # Using the absolute path is working in the case where the disk of the 1398 # front end machine is mounted on all worker nodes as well. 1399 make_opts_content=make_opts_content.replace('libOLP=', 1400 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1401 1402 1403 make_opts.write(make_opts_content) 1404 make_opts.close() 1405 1406 # A priori this is generic to all OLP's 1407 1408 # Parse the contract file returned and propagate the process label to 1409 # the include of the BinothLHA.f file 1410 proc_to_label = self.parse_contract_file( 1411 pjoin(virtual_path,'OLE_order.olc')) 1412 1413 self.write_BinothLHA_inc(FKSHMultiproc,proc_to_label,\ 1414 pjoin(export_path,'SubProcesses')) 1415 1416 # Link the contract file to within the SubProcess directory 1417 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1418
1419 - def write_BinothLHA_inc(self, FKSHMultiproc, proc_to_label, SubProcPath):
1420 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1421 to provide the right process_label to use in the OLP call to get the 1422 loop matrix element evaluation. The proc_to_label is the dictionary of 1423 the format of the one returned by the function parse_contract_file.""" 1424 1425 for matrix_element in FKSHMultiproc.get('matrix_elements'): 1426 proc = matrix_element.get('processes')[0] 1427 name = "P%s"%proc.shell_string() 1428 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1429 not leg.get('state')]), 1430 tuple([leg.get('id') for leg in proc.get('legs') if \ 1431 leg.get('state')])) 1432 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1433 try: 1434 incFile.write( 1435 """ INTEGER PROC_LABEL 1436 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1437 except KeyError: 1438 raise fks_common.FKSProcessError('Could not found the target'+\ 1439 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1440 ' the proc_to_label argument in write_BinothLHA_inc.') 1441 incFile.close()
1442
1443 - def parse_contract_file(self, contract_file_path):
1444 """ Parses the BLHA contract file, make sure all parameters could be 1445 understood by the OLP and return a mapping of the processes (characterized 1446 by the pdg's of the initial and final state particles) to their process 1447 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1448 """ 1449 1450 proc_def_to_label = {} 1451 1452 if not os.path.exists(contract_file_path): 1453 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1454 ' OLE_order.olc in %s.'%str(contract_file_path)) 1455 1456 comment_re=re.compile(r"^\s*#") 1457 proc_def_re=re.compile( 1458 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1459 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1460 line_OK_re=re.compile(r"^.*\|\s*OK") 1461 for line in file(contract_file_path): 1462 # Ignore comments 1463 if not comment_re.match(line) is None: 1464 continue 1465 # Check if it is a proc definition line 1466 proc_def = proc_def_re.match(line) 1467 if not proc_def is None: 1468 if int(proc_def.group('proc_class'))!=1: 1469 raise fks_common.FKSProcessError( 1470 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1471 ' process class attribute. Found %s instead in: \n%s'\ 1472 %(proc_def.group('proc_class'),line)) 1473 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1474 proc_def.group('in_pdgs').split()]) 1475 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1476 proc_def.group('out_pdgs').split()]) 1477 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1478 int(proc_def.group('proc_label')) 1479 continue 1480 # For the other types of line, just make sure they end with | OK 1481 if line_OK_re.match(line) is None: 1482 raise fks_common.FKSProcessError( 1483 'The OLP could not process the following line: \n%s'%line) 1484 1485 return proc_def_to_label
1486 1487
1488 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1489 """writes the V**** directory inside the P**** directories specified in 1490 dir_name""" 1491 1492 cwd = os.getcwd() 1493 1494 matrix_element = loop_matrix_element 1495 1496 # Create the MadLoop5_resources directory if not already existing 1497 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1498 try: 1499 os.mkdir(dirpath) 1500 except os.error as error: 1501 logger.warning(error.strerror + " " + dirpath) 1502 1503 # Create the directory PN_xx_xxxxx in the specified path 1504 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1505 dirpath = os.path.join(dir_name, name) 1506 1507 try: 1508 os.mkdir(dirpath) 1509 except os.error as error: 1510 logger.warning(error.strerror + " " + dirpath) 1511 1512 try: 1513 os.chdir(dirpath) 1514 except os.error: 1515 logger.error('Could not cd to directory %s' % dirpath) 1516 return 0 1517 1518 logger.info('Creating files in directory %s' % name) 1519 1520 # Extract number of external particles 1521 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1522 1523 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1524 # The born matrix element, if needed 1525 filename = 'born_matrix.f' 1526 calls = self.write_bornmatrix( 1527 writers.FortranWriter(filename), 1528 matrix_element, 1529 fortran_model) 1530 1531 filename = 'nexternal.inc' 1532 self.write_nexternal_file(writers.FortranWriter(filename), 1533 nexternal, ninitial) 1534 1535 filename = 'pmass.inc' 1536 self.write_pmass_file(writers.FortranWriter(filename), 1537 matrix_element) 1538 1539 filename = 'ngraphs.inc' 1540 self.write_ngraphs_file(writers.FortranWriter(filename), 1541 len(matrix_element.get_all_amplitudes())) 1542 1543 filename = "loop_matrix.ps" 1544 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1545 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1546 filename, 1547 model=matrix_element.get('processes')[0].get('model'), 1548 amplitude='') 1549 logger.info("Drawing loop Feynman diagrams for " + \ 1550 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1551 plot.draw() 1552 1553 filename = "born_matrix.ps" 1554 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1555 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1556 get('model'),amplitude='') 1557 logger.info("Generating born Feynman diagrams for " + \ 1558 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1559 plot.draw() 1560 1561 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1562 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1563 'MadLoopCommons.f','MadLoopParams.inc'] 1564 1565 # We should move to MadLoop5_resources directory from the SubProcesses 1566 1567 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1568 pjoin('..','MadLoop5_resources')) 1569 1570 for file in linkfiles: 1571 ln('../../%s' % file) 1572 1573 os.system("ln -s ../../makefile_loop makefile") 1574 1575 linkfiles = ['mpmodule.mod'] 1576 1577 for file in linkfiles: 1578 ln('../../../lib/%s' % file) 1579 1580 # Return to original PWD 1581 os.chdir(cwd) 1582 1583 if not calls: 1584 calls = 0 1585 return calls
1586
1587 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1588 """computes the QED/QCD orders from the knowledge of the n of ext particles 1589 and of the weighted orders""" 1590 # n vertices = nexternal - 2 =QED + QCD 1591 # weighted = 2*QED + QCD 1592 QED = weighted - nexternal + 2 1593 QCD = weighted - 2 * QED 1594 return QED, QCD
1595 1596 1597 1598 #=============================================================================== 1599 # write_lh_order 1600 #=============================================================================== 1601 #test written
1602 - def write_lh_order(self, filename, matrix_elements, OLP='MadLoop'):
1603 """Creates the OLE_order.lh file. This function should be edited according 1604 to the OLP which is used. For now it is generic.""" 1605 1606 if isinstance(matrix_elements,fks_helas_objects.FKSHelasProcess): 1607 fksborns=fks_helas_objects.FKSHelasProcessList([matrix_elements]) 1608 elif isinstance(matrix_elements,fks_helas_objects.FKSHelasProcessList): 1609 fksborns= matrix_elements 1610 else: 1611 raise fks_common.FKSProcessError('Wrong type of argument for '+\ 1612 'matrix_elements in function write_lh_order.') 1613 1614 if len(fksborns)==0: 1615 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1616 'the function write_lh_order.') 1617 return 1618 1619 # We assume the orders to be common to all Subprocesses 1620 1621 orders = fksborns[0].orders 1622 if 'QED' in orders.keys() and 'QCD' in orders.keys(): 1623 QED=orders['QED'] 1624 QCD=orders['QCD'] 1625 elif 'QED' in orders.keys(): 1626 QED=orders['QED'] 1627 QCD=0 1628 elif 'QCD' in orders.keys(): 1629 QED=0 1630 QCD=orders['QCD'] 1631 else: 1632 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1633 fksborns[0].get_nexternal_ninitial()[0]-1, # -1 is because the function returns nexternal of the real emission 1634 orders['WEIGHTED']) 1635 1636 replace_dict = {} 1637 replace_dict['mesq'] = 'CHaveraged' 1638 replace_dict['corr'] = ' '.join(matrix_elements[0].get('processes')[0].\ 1639 get('perturbation_couplings')) 1640 replace_dict['irreg'] = 'CDR' 1641 replace_dict['aspow'] = QCD 1642 replace_dict['aepow'] = QED 1643 replace_dict['modelfile'] = './param_card.dat' 1644 replace_dict['params'] = 'alpha_s' 1645 proc_lines=[] 1646 for fksborn in fksborns: 1647 proc_lines.append(fksborn.get_lh_pdg_string()) 1648 replace_dict['pdgs'] = '\n'.join(proc_lines) 1649 replace_dict['symfin'] = 'Yes' 1650 content = \ 1651 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1652 \n\ 1653 MatrixElementSquareType %(mesq)s\n\ 1654 CorrectionType %(corr)s\n\ 1655 IRregularisation %(irreg)s\n\ 1656 AlphasPower %(aspow)d\n\ 1657 AlphaPower %(aepow)d\n\ 1658 NJetSymmetrizeFinal %(symfin)s\n\ 1659 ModelFile %(modelfile)s\n\ 1660 Parameters %(params)s\n\ 1661 \n\ 1662 # process\n\ 1663 %(pdgs)s\n\ 1664 " % replace_dict 1665 1666 file = open(filename, 'w') 1667 file.write(content) 1668 file.close 1669 return
1670 1671 1672 #=============================================================================== 1673 # write_born_fks 1674 #=============================================================================== 1675 # test written
1676 - def write_born_fks(self, writer, fksborn, fortran_model):
1677 """Export a matrix element to a born.f file in MadFKS format""" 1678 1679 matrix_element = fksborn.born_matrix_element 1680 1681 if not matrix_element.get('processes') or \ 1682 not matrix_element.get('diagrams'): 1683 return 0 1684 1685 if not isinstance(writer, writers.FortranWriter): 1686 raise writers.FortranWriter.FortranWriterError(\ 1687 "writer not FortranWriter") 1688 # Set lowercase/uppercase Fortran code 1689 writers.FortranWriter.downcase = False 1690 1691 replace_dict = {} 1692 1693 # Extract version number and date from VERSION file 1694 info_lines = self.get_mg5_info_lines() 1695 replace_dict['info_lines'] = info_lines 1696 1697 # Extract process info lines 1698 process_lines = self.get_process_info_lines(matrix_element) 1699 replace_dict['process_lines'] = process_lines 1700 1701 1702 # Extract ncomb 1703 ncomb = matrix_element.get_helicity_combinations() 1704 replace_dict['ncomb'] = ncomb 1705 1706 # Extract helicity lines 1707 helicity_lines = self.get_helicity_lines(matrix_element) 1708 replace_dict['helicity_lines'] = helicity_lines 1709 1710 # Extract IC line 1711 ic_line = self.get_ic_line(matrix_element) 1712 replace_dict['ic_line'] = ic_line 1713 1714 # Extract overall denominator 1715 # Averaging initial state color, spin, and identical FS particles 1716 #den_factor_line = get_den_factor_line(matrix_element) 1717 1718 # Extract ngraphs 1719 ngraphs = matrix_element.get_number_of_amplitudes() 1720 replace_dict['ngraphs'] = ngraphs 1721 1722 # Extract nwavefuncs 1723 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1724 replace_dict['nwavefuncs'] = nwavefuncs 1725 1726 # Extract ncolor 1727 ncolor = max(1, len(matrix_element.get('color_basis'))) 1728 replace_dict['ncolor'] = ncolor 1729 1730 # Extract color data lines 1731 color_data_lines = self.get_color_data_lines(matrix_element) 1732 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1733 1734 # Extract helas calls 1735 helas_calls = fortran_model.get_matrix_element_calls(\ 1736 matrix_element) 1737 replace_dict['helas_calls'] = "\n".join(helas_calls) 1738 1739 # Extract amp2 lines 1740 amp2_lines = self.get_amp2_lines(matrix_element) 1741 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1742 1743 # Extract JAMP lines 1744 jamp_lines = self.get_JAMP_lines(matrix_element) 1745 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1746 1747 # Set the size of Wavefunction 1748 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1749 replace_dict['wavefunctionsize'] = 20 1750 else: 1751 replace_dict['wavefunctionsize'] = 8 1752 1753 # Extract glu_ij_lines 1754 ij_lines = self.get_ij_lines(fksborn) 1755 replace_dict['ij_lines'] = '\n'.join(ij_lines) 1756 1757 # Extract den_factor_lines 1758 den_factor_lines = self.get_den_factor_lines(fksborn) 1759 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1760 1761 # Extract the number of FKS process 1762 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 1763 1764 file = open(os.path.join(_file_path, \ 1765 'iolibs/template_files/born_fks.inc')).read() 1766 file = file % replace_dict 1767 1768 # Write the file 1769 writer.writelines(file) 1770 1771 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1772 1773
1774 - def write_born_hel(self, writer, fksborn, fortran_model):
1775 """Export a matrix element to a born_hel.f file in MadFKS format""" 1776 1777 matrix_element = fksborn.born_matrix_element 1778 1779 if not matrix_element.get('processes') or \ 1780 not matrix_element.get('diagrams'): 1781 return 0 1782 1783 if not isinstance(writer, writers.FortranWriter): 1784 raise writers.FortranWriter.FortranWriterError(\ 1785 "writer not FortranWriter") 1786 # Set lowercase/uppercase Fortran code 1787 writers.FortranWriter.downcase = False 1788 1789 replace_dict = {} 1790 1791 # Extract version number and date from VERSION file 1792 info_lines = self.get_mg5_info_lines() 1793 replace_dict['info_lines'] = info_lines 1794 1795 # Extract process info lines 1796 process_lines = self.get_process_info_lines(matrix_element) 1797 replace_dict['process_lines'] = process_lines 1798 1799 1800 # Extract ncomb 1801 ncomb = matrix_element.get_helicity_combinations() 1802 replace_dict['ncomb'] = ncomb 1803 1804 # Extract helicity lines 1805 helicity_lines = self.get_helicity_lines(matrix_element) 1806 replace_dict['helicity_lines'] = helicity_lines 1807 1808 # Extract IC line 1809 ic_line = self.get_ic_line(matrix_element) 1810 replace_dict['ic_line'] = ic_line 1811 1812 # Extract overall denominator 1813 # Averaging initial state color, spin, and identical FS particles 1814 #den_factor_line = get_den_factor_line(matrix_element) 1815 1816 # Extract ngraphs 1817 ngraphs = matrix_element.get_number_of_amplitudes() 1818 replace_dict['ngraphs'] = ngraphs 1819 1820 # Extract nwavefuncs 1821 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1822 replace_dict['nwavefuncs'] = nwavefuncs 1823 1824 # Extract ncolor 1825 ncolor = max(1, len(matrix_element.get('color_basis'))) 1826 replace_dict['ncolor'] = ncolor 1827 1828 # Extract color data lines 1829 color_data_lines = self.get_color_data_lines(matrix_element) 1830 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1831 1832 # Extract amp2 lines 1833 amp2_lines = self.get_amp2_lines(matrix_element) 1834 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1835 1836 # Extract JAMP lines 1837 jamp_lines = self.get_JAMP_lines(matrix_element) 1838 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1839 1840 # Extract den_factor_lines 1841 den_factor_lines = self.get_den_factor_lines(fksborn) 1842 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1843 1844 # Extract the number of FKS process 1845 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1846 1847 file = open(os.path.join(_file_path, \ 1848 'iolibs/template_files/born_fks_hel.inc')).read() 1849 file = file % replace_dict 1850 1851 # Write the file 1852 writer.writelines(file) 1853 1854 return
1855 1856 1857 #=============================================================================== 1858 # write_born_sf_fks 1859 #=============================================================================== 1860 #test written
1861 - def write_sborn_sf(self, writer, color_links, fortran_model):
1862 """Creates the sborn_sf.f file, containing the calls to the different 1863 color linked borns""" 1864 1865 replace_dict = {} 1866 nborns = len(color_links) 1867 ifkss = [] 1868 iborns = [] 1869 mms = [] 1870 nns = [] 1871 iflines = "\n" 1872 1873 #header for the sborn_sf.f file 1874 file = """subroutine sborn_sf(p_born,m,n,wgt) 1875 implicit none 1876 include "nexternal.inc" 1877 double precision p_born(0:3,nexternal-1),wgt 1878 double complex wgt1(2) 1879 integer m,n \n""" 1880 1881 if nborns > 0: 1882 1883 for i, c_link in enumerate(color_links): 1884 iborn = i+1 1885 1886 iff = {True : 'if', False : 'elseif'}[i==0] 1887 1888 m, n = c_link['link'] 1889 1890 if m != n: 1891 iflines += \ 1892 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1893 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 1894 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1895 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1896 else: 1897 iflines += \ 1898 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1899 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 1900 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1901 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1902 1903 1904 file += iflines + \ 1905 """else 1906 wgt = 0d0 1907 endif 1908 1909 return 1910 end""" 1911 elif nborns == 0: 1912 #write a dummy file 1913 file+=""" 1914 c This is a dummy function because 1915 c this subdir has no soft singularities 1916 wgt = 0d0 1917 1918 return 1919 end""" 1920 # Write the end of the file 1921 1922 writer.writelines(file)
1923 1924 1925 #=============================================================================== 1926 # write_b_sf_fks 1927 #=============================================================================== 1928 #test written
1929 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
1930 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 1931 1932 matrix_element = copy.copy(fksborn.born_matrix_element) 1933 1934 if not matrix_element.get('processes') or \ 1935 not matrix_element.get('diagrams'): 1936 return 0 1937 1938 if not isinstance(writer, writers.FortranWriter): 1939 raise writers.FortranWriter.FortranWriterError(\ 1940 "writer not FortranWriter") 1941 # Set lowercase/uppercase Fortran code 1942 writers.FortranWriter.downcase = False 1943 1944 iborn = i + 1 1945 link = fksborn.color_links[i] 1946 1947 replace_dict = {} 1948 1949 replace_dict['iborn'] = iborn 1950 1951 # Extract version number and date from VERSION file 1952 info_lines = self.get_mg5_info_lines() 1953 replace_dict['info_lines'] = info_lines 1954 1955 # Extract process info lines 1956 process_lines = self.get_process_info_lines(matrix_element) 1957 replace_dict['process_lines'] = process_lines + \ 1958 "\nc spectators: %d %d \n" % tuple(link['link']) 1959 1960 # Extract ncomb 1961 ncomb = matrix_element.get_helicity_combinations() 1962 replace_dict['ncomb'] = ncomb 1963 1964 # Extract helicity lines 1965 helicity_lines = self.get_helicity_lines(matrix_element) 1966 replace_dict['helicity_lines'] = helicity_lines 1967 1968 # Extract IC line 1969 ic_line = self.get_ic_line(matrix_element) 1970 replace_dict['ic_line'] = ic_line 1971 1972 # Extract den_factor_lines 1973 den_factor_lines = self.get_den_factor_lines(fksborn) 1974 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1975 1976 # Extract ngraphs 1977 ngraphs = matrix_element.get_number_of_amplitudes() 1978 replace_dict['ngraphs'] = ngraphs 1979 1980 # Extract nwavefuncs 1981 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1982 replace_dict['nwavefuncs'] = nwavefuncs 1983 1984 # Extract ncolor 1985 ncolor1 = max(1, len(link['orig_basis'])) 1986 replace_dict['ncolor1'] = ncolor1 1987 ncolor2 = max(1, len(link['link_basis'])) 1988 replace_dict['ncolor2'] = ncolor2 1989 1990 # Extract color data lines 1991 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 1992 link['link_matrix']) 1993 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1994 1995 # Extract amp2 lines 1996 amp2_lines = self.get_amp2_lines(matrix_element) 1997 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1998 1999 # Extract JAMP lines 2000 jamp_lines = self.get_JAMP_lines(matrix_element) 2001 new_jamp_lines = [] 2002 for line in jamp_lines: 2003 line = string.replace(line, 'JAMP', 'JAMP1') 2004 new_jamp_lines.append(line) 2005 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 2006 2007 matrix_element.set('color_basis', link['link_basis'] ) 2008 jamp_lines = self.get_JAMP_lines(matrix_element) 2009 new_jamp_lines = [] 2010 for line in jamp_lines: 2011 line = string.replace(line, 'JAMP', 'JAMP2') 2012 new_jamp_lines.append(line) 2013 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 2014 2015 2016 # Extract the number of FKS process 2017 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2018 2019 file = open(os.path.join(_file_path, \ 2020 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2021 file = file % replace_dict 2022 2023 # Write the file 2024 writer.writelines(file) 2025 2026 return 0 , ncolor1
2027 2028 2029 #=============================================================================== 2030 # write_born_nhel_file 2031 #=============================================================================== 2032 #test written
2033 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2034 """Write the born_nhel.inc file for MG4.""" 2035 2036 ncomb = matrix_element.get_helicity_combinations() 2037 file = " integer max_bhel, max_bcol \n" 2038 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2039 (ncomb, nflows) 2040 2041 # Write the file 2042 writer.writelines(file) 2043 2044 return True
2045 2046 #=============================================================================== 2047 # write_fks_info_file 2048 #===============================================================================
2049 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2050 """Writes the content of nFKSconfigs.inc, which just gives the 2051 total FKS dirs as a parameter. 2052 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2053 replace_dict = {} 2054 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2055 content = \ 2056 """ INTEGER FKS_CONFIGS 2057 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2058 2059 """ % replace_dict 2060 2061 writer.writelines(content)
2062 2063 2064 #=============================================================================== 2065 # write_fks_info_file 2066 #===============================================================================
2067 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2068 """Writes the content of fks_info.inc, which lists the informations on the 2069 possible splittings of the born ME. 2070 nconfs is always >=1 (use a fake configuration for LOonly). 2071 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2072 the last colored particle as j_fks.""" 2073 2074 replace_dict = {} 2075 fks_info_list = fksborn.get_fks_info_list() 2076 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2077 2078 # this is for processes with 'real' or 'all' as NLO mode 2079 if len(fks_info_list) > 0: 2080 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2081 for info in fks_info_list]) 2082 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2083 for info in fks_info_list]) 2084 2085 col_lines = [] 2086 pdg_lines = [] 2087 charge_lines = [] 2088 fks_j_from_i_lines = [] 2089 for i, info in enumerate(fks_info_list): 2090 col_lines.append( \ 2091 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2092 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2093 pdg_lines.append( \ 2094 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2095 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2096 charge_lines.append(\ 2097 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2098 % (i + 1, ', '.join('%19.15fd0' % charg\ 2099 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2100 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2101 i + 1)) 2102 else: 2103 # this is for 'LOonly', generate a fake FKS configuration with 2104 # - i_fks = nexternal, pdg type = -21 and color =8 2105 # - j_fks = the last colored particle 2106 bornproc = fksborn.born_matrix_element.get('processes')[0] 2107 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2108 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2109 charges = [0.] * len(colors) 2110 2111 fks_i = len(colors) 2112 for cpos, col in enumerate(colors[:-1]): 2113 if col != 1: 2114 fks_j = cpos+1 2115 2116 fks_i_values = str(fks_i) 2117 fks_j_values = str(fks_j) 2118 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2119 % ', '.join([str(col) for col in colors])] 2120 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2121 % ', '.join([str(pdg) for pdg in pdgs])] 2122 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2123 % ', '.join('%19.15fd0' % charg for charg in charges)] 2124 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2125 % (fks_i, fks_j)] 2126 2127 2128 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2129 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2130 replace_dict['col_lines'] = '\n'.join(col_lines) 2131 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2132 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2133 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2134 2135 content = \ 2136 """ INTEGER IPOS, JPOS 2137 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2138 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2139 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2140 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2141 2142 %(fks_i_line)s 2143 %(fks_j_line)s 2144 2145 %(fks_j_from_i_lines)s 2146 2147 C 2148 C Particle type: 2149 C octet = 8, triplet = 3, singlet = 1 2150 %(col_lines)s 2151 2152 C 2153 C Particle type according to PDG: 2154 C 2155 %(pdg_lines)s 2156 2157 C 2158 C Particle charge: 2159 C charge is set 0. with QCD corrections, which is irrelevant 2160 %(charge_lines)s 2161 """ % replace_dict 2162 if not isinstance(writer, writers.FortranWriter): 2163 raise writers.FortranWriter.FortranWriterError(\ 2164 "writer not FortranWriter") 2165 # Set lowercase/uppercase Fortran code 2166 writers.FortranWriter.downcase = False 2167 2168 writer.writelines(content) 2169 2170 return True
2171 2172 2173 #=============================================================================== 2174 # write_matrix_element_fks 2175 #=============================================================================== 2176 #test written
2177 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2178 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2179 2180 if not matrix_element.get('processes') or \ 2181 not matrix_element.get('diagrams'): 2182 return 0,0 2183 2184 if not isinstance(writer, writers.FortranWriter): 2185 raise writers.FortranWriter.FortranWriterError(\ 2186 "writer not FortranWriter") 2187 # Set lowercase/uppercase Fortran code 2188 writers.FortranWriter.downcase = False 2189 2190 replace_dict = {} 2191 replace_dict['N_me'] = n 2192 2193 # Extract version number and date from VERSION file 2194 info_lines = self.get_mg5_info_lines() 2195 replace_dict['info_lines'] = info_lines 2196 2197 # Extract process info lines 2198 process_lines = self.get_process_info_lines(matrix_element) 2199 replace_dict['process_lines'] = process_lines 2200 2201 # Extract ncomb 2202 ncomb = matrix_element.get_helicity_combinations() 2203 replace_dict['ncomb'] = ncomb 2204 2205 # Extract helicity lines 2206 helicity_lines = self.get_helicity_lines(matrix_element) 2207 replace_dict['helicity_lines'] = helicity_lines 2208 2209 # Extract IC line 2210 ic_line = self.get_ic_line(matrix_element) 2211 replace_dict['ic_line'] = ic_line 2212 2213 # Extract overall denominator 2214 # Averaging initial state color, spin, and identical FS particles 2215 den_factor_line = self.get_den_factor_line(matrix_element) 2216 replace_dict['den_factor_line'] = den_factor_line 2217 2218 # Extract ngraphs 2219 ngraphs = matrix_element.get_number_of_amplitudes() 2220 replace_dict['ngraphs'] = ngraphs 2221 2222 # Extract ncolor 2223 ncolor = max(1, len(matrix_element.get('color_basis'))) 2224 replace_dict['ncolor'] = ncolor 2225 2226 # Extract color data lines 2227 color_data_lines = self.get_color_data_lines(matrix_element) 2228 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2229 2230 # Extract helas calls 2231 helas_calls = fortran_model.get_matrix_element_calls(\ 2232 matrix_element) 2233 replace_dict['helas_calls'] = "\n".join(helas_calls) 2234 2235 # Extract nwavefuncs (important to place after get_matrix_element_calls 2236 # so that 'me_id' is set) 2237 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2238 replace_dict['nwavefuncs'] = nwavefuncs 2239 2240 # Extract amp2 lines 2241 amp2_lines = self.get_amp2_lines(matrix_element) 2242 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2243 2244 # Set the size of Wavefunction 2245 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2246 replace_dict['wavefunctionsize'] = 20 2247 else: 2248 replace_dict['wavefunctionsize'] = 8 2249 2250 # Extract JAMP lines 2251 jamp_lines = self.get_JAMP_lines(matrix_element) 2252 2253 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2254 2255 realfile = open(os.path.join(_file_path, \ 2256 'iolibs/template_files/realmatrix_fks.inc')).read() 2257 2258 realfile = realfile % replace_dict 2259 2260 # Write the file 2261 writer.writelines(realfile) 2262 2263 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2264 2265 2266 #=============================================================================== 2267 # write_pdf_file 2268 #===============================================================================
2269 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2270 #test written 2271 """Write the auto_dsig.f file for MadFKS, which contains 2272 pdf call information""" 2273 2274 if not matrix_element.get('processes') or \ 2275 not matrix_element.get('diagrams'): 2276 return 0 2277 2278 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2279 2280 if ninitial < 1 or ninitial > 2: 2281 raise writers.FortranWriter.FortranWriterError, \ 2282 """Need ninitial = 1 or 2 to write auto_dsig file""" 2283 2284 replace_dict = {} 2285 2286 replace_dict['N_me'] = n 2287 2288 # Extract version number and date from VERSION file 2289 info_lines = self.get_mg5_info_lines() 2290 replace_dict['info_lines'] = info_lines 2291 2292 # Extract process info lines 2293 process_lines = self.get_process_info_lines(matrix_element) 2294 replace_dict['process_lines'] = process_lines 2295 2296 pdf_vars, pdf_data, pdf_lines = \ 2297 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2298 replace_dict['pdf_vars'] = pdf_vars 2299 replace_dict['pdf_data'] = pdf_data 2300 replace_dict['pdf_lines'] = pdf_lines 2301 2302 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2303 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2304 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2305 2306 file = open(os.path.join(_file_path, \ 2307 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2308 file = file % replace_dict 2309 2310 # Write the file 2311 writer.writelines(file)
2312 2313 2314 2315 #=============================================================================== 2316 # write_coloramps_file 2317 #=============================================================================== 2318 #test written
2319 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2320 """Write the coloramps.inc file for MadEvent""" 2321 2322 lines = [] 2323 lines.append( "logical icolamp(%d,%d,1)" % \ 2324 (max(len(matrix_element.get('color_basis').keys()), 1), 2325 len(mapconfigs))) 2326 2327 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2328 2329 # Write the file 2330 writer.writelines(lines) 2331 2332 return True
2333 2334 2335 #=============================================================================== 2336 # write_leshouche_file 2337 #=============================================================================== 2338 #test written
2339 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2340 """Write the leshouche.inc file for MG4""" 2341 2342 # Extract number of external particles 2343 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2344 2345 lines = [] 2346 for iproc, proc in enumerate(matrix_element.get('processes')): 2347 legs = proc.get_legs_with_decays() 2348 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2349 (iproc + 1, nexternal, 2350 ",".join([str(l.get('id')) for l in legs]))) 2351 for i in [1, 2]: 2352 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2353 (i, iproc + 1, nexternal, 2354 ",".join([ "%3r" % 0 ] * ninitial + \ 2355 [ "%3r" % i ] * (nexternal - ninitial)))) 2356 2357 # Here goes the color connections corresponding to the JAMPs 2358 # Only one output, for the first subproc! 2359 if iproc == 0: 2360 # If no color basis, just output trivial color flow 2361 if not matrix_element.get('color_basis'): 2362 for i in [1, 2]: 2363 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2364 (i, nexternal, 2365 ",".join([ "%3r" % 0 ] * nexternal))) 2366 color_flow_list = [] 2367 2368 else: 2369 # First build a color representation dictionnary 2370 repr_dict = {} 2371 for l in legs: 2372 repr_dict[l.get('number')] = \ 2373 proc.get('model').get_particle(l.get('id')).get_color()\ 2374 * (-1)**(1+l.get('state')) 2375 # Get the list of color flows 2376 color_flow_list = \ 2377 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2378 ninitial) 2379 # And output them properly 2380 for cf_i, color_flow_dict in enumerate(color_flow_list): 2381 for i in [0, 1]: 2382 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2383 (i + 1, cf_i + 1, nexternal, 2384 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2385 for l in legs]))) 2386 2387 # Write the file 2388 writer.writelines(lines) 2389 2390 return len(color_flow_list)
2391 2392 2393 #=============================================================================== 2394 # write_configs_file 2395 #=============================================================================== 2396 #test_written
2397 - def write_configs_file(self, writer, matrix_element, fortran_model):
2398 """Write the configs.inc file for MadEvent""" 2399 2400 # Extract number of external particles 2401 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2402 lines = [] 2403 2404 iconfig = 0 2405 2406 s_and_t_channels = [] 2407 mapconfigs = [] 2408 2409 model = matrix_element.get('processes')[0].get('model') 2410 # new_pdg = model.get_first_non_pdg() 2411 2412 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2413 model = matrix_element.get('base_amplitude').get('process').get('model') 2414 minvert = min([max([len(vert.get('legs')) for vert in \ 2415 diag.get('vertices')]) for diag in base_diagrams]) 2416 2417 for idiag, diag in enumerate(base_diagrams): 2418 if any([len(vert.get('legs')) > minvert for vert in 2419 diag.get('vertices')]): 2420 # Only 3-vertices allowed in configs.inc 2421 continue 2422 iconfig = iconfig + 1 2423 helas_diag = matrix_element.get('diagrams')[idiag] 2424 mapconfigs.append(helas_diag.get('number')) 2425 lines.append("# Diagram %d, Amplitude %d" % \ 2426 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2427 # Correspondance between the config and the amplitudes 2428 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2429 helas_diag.get('amplitudes')[0]['number'])) 2430 2431 # Need to reorganize the topology so that we start with all 2432 # final state external particles and work our way inwards 2433 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2434 get_s_and_t_channels(ninitial, model, 990) 2435 2436 s_and_t_channels.append([schannels, tchannels]) 2437 2438 # Write out propagators for s-channel and t-channel vertices 2439 allchannels = schannels 2440 if len(tchannels) > 1: 2441 # Write out tchannels only if there are any non-trivial ones 2442 allchannels = schannels + tchannels 2443 2444 for vert in allchannels: 2445 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2446 last_leg = vert.get('legs')[-1] 2447 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2448 (last_leg.get('number'), iconfig, len(daughters), 2449 ",".join(["%3d" % d for d in daughters]))) 2450 if vert in schannels: 2451 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2452 (last_leg.get('number'), iconfig, 2453 last_leg.get('id'))) 2454 elif vert in tchannels[:-1]: 2455 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2456 (last_leg.get('number'), iconfig, 2457 abs(last_leg.get('id')))) 2458 2459 # Write out number of configs 2460 lines.append("# Number of configs") 2461 lines.append("data mapconfig(0)/%4d/" % iconfig) 2462 2463 # Write the file 2464 writer.writelines(lines) 2465 2466 return iconfig, mapconfigs, s_and_t_channels
2467 2468 2469 #=============================================================================== 2470 # write_decayBW_file 2471 #=============================================================================== 2472 #test written
2473 - def write_decayBW_file(self, writer, s_and_t_channels):
2474 """Write the decayBW.inc file for MadEvent""" 2475 2476 lines = [] 2477 2478 booldict = {False: ".false.", True: ".false."} 2479 ####Changed by MZ 2011-11-23!!!! 2480 2481 for iconf, config in enumerate(s_and_t_channels): 2482 schannels = config[0] 2483 for vertex in schannels: 2484 # For the resulting leg, pick out whether it comes from 2485 # decay or not, as given by the from_group flag 2486 leg = vertex.get('legs')[-1] 2487 lines.append("data gForceBW(%d,%d)/%s/" % \ 2488 (leg.get('number'), iconf + 1, 2489 booldict[leg.get('from_group')])) 2490 2491 # Write the file 2492 writer.writelines(lines) 2493 2494 return True
2495 2496 2497 #=============================================================================== 2498 # write_dname_file 2499 #===============================================================================
2500 - def write_dname_file(self, writer, matrix_element, fortran_model):
2501 """Write the dname.mg file for MG4""" 2502 2503 line = "DIRNAME=P%s" % \ 2504 matrix_element.get('processes')[0].shell_string() 2505 2506 # Write the file 2507 writer.write(line + "\n") 2508 2509 return True
2510 2511 2512 #=============================================================================== 2513 # write_iproc_file 2514 #===============================================================================
2515 - def write_iproc_file(self, writer, me_number):
2516 """Write the iproc.dat file for MG4""" 2517 2518 line = "%d" % (me_number + 1) 2519 2520 # Write the file 2521 for line_to_write in writer.write_line(line): 2522 writer.write(line_to_write) 2523 return True
2524 2525 2526 #=============================================================================== 2527 # Helper functions 2528 #=============================================================================== 2529 2530 2531 #=============================================================================== 2532 # get_fks_j_from_i_lines 2533 #=============================================================================== 2534
2535 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2536 """generate the lines for fks.inc describing initializating the 2537 fks_j_from_i array""" 2538 lines = [] 2539 if not me.isfinite: 2540 for ii, js in me.fks_j_from_i.items(): 2541 if js: 2542 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2543 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2544 else: 2545 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2546 % (2, 1, 1, '1')) 2547 lines.append('') 2548 2549 return lines 2550 2551 2552 #=============================================================================== 2553 # get_leshouche_lines 2554 #===============================================================================
2555 - def get_leshouche_lines(self, matrix_element, ime):
2556 #test written 2557 """Write the leshouche.inc file for MG4""" 2558 2559 # Extract number of external particles 2560 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2561 2562 lines = [] 2563 for iproc, proc in enumerate(matrix_element.get('processes')): 2564 legs = proc.get_legs_with_decays() 2565 lines.append("I %4d %4d %s" % \ 2566 (ime, iproc + 1, 2567 " ".join([str(l.get('id')) for l in legs]))) 2568 for i in [1, 2]: 2569 lines.append("M %4d %4d %4d %s" % \ 2570 (ime, i, iproc + 1, 2571 " ".join([ "%3d" % 0 ] * ninitial + \ 2572 [ "%3d" % i ] * (nexternal - ninitial)))) 2573 2574 # Here goes the color connections corresponding to the JAMPs 2575 # Only one output, for the first subproc! 2576 if iproc == 0: 2577 # If no color basis, just output trivial color flow 2578 if not matrix_element.get('color_basis'): 2579 for i in [1, 2]: 2580 lines.append("C %4d %4d 1 %s" % \ 2581 (ime, i, 2582 " ".join([ "%3d" % 0 ] * nexternal))) 2583 color_flow_list = [] 2584 nflow = 1 2585 2586 else: 2587 # First build a color representation dictionnary 2588 repr_dict = {} 2589 for l in legs: 2590 repr_dict[l.get('number')] = \ 2591 proc.get('model').get_particle(l.get('id')).get_color()\ 2592 * (-1)**(1+l.get('state')) 2593 # Get the list of color flows 2594 color_flow_list = \ 2595 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2596 ninitial) 2597 # And output them properly 2598 for cf_i, color_flow_dict in enumerate(color_flow_list): 2599 for i in [0, 1]: 2600 lines.append("C %4d %4d %4d %s" % \ 2601 (ime, i + 1, cf_i + 1, 2602 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2603 for l in legs]))) 2604 2605 nflow = len(color_flow_list) 2606 2607 nproc = len(matrix_element.get('processes')) 2608 2609 return lines, nproc, nflow
2610 2611 2612 #=============================================================================== 2613 # get_den_factor_lines 2614 #===============================================================================
2615 - def get_den_factor_lines(self, fks_born):
2616 """returns the lines with the information on the denominator keeping care 2617 of the identical particle factors in the various real emissions""" 2618 2619 lines = [] 2620 info_list = fks_born.get_fks_info_list() 2621 if info_list: 2622 # if the reals have been generated, fill with the corresponding average factor 2623 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 2624 lines.append('DATA IDEN_VALUES /' + \ 2625 ', '.join(['%d' % ( 2626 fks_born.born_matrix_element.get_denominator_factor() / \ 2627 fks_born.born_matrix_element['identical_particle_factor'] * \ 2628 fks_born.real_processes[info['n_me'] - 1].matrix_element['identical_particle_factor'] ) \ 2629 for info in info_list]) + '/') 2630 else: 2631 # otherwise use the born 2632 lines.append('INTEGER IDEN_VALUES(1)') 2633 lines.append('DATA IDEN_VALUES / %d /' \ 2634 % fks_born.born_matrix_element.get_denominator_factor()) 2635 2636 return lines
2637 2638 2639 #=============================================================================== 2640 # get_ij_lines 2641 #===============================================================================
2642 - def get_ij_lines(self, fks_born):
2643 """returns the lines with the information on the particle number of the born 2644 that splits""" 2645 info_list = fks_born.get_fks_info_list() 2646 lines = [] 2647 if info_list: 2648 # if the reals have been generated, fill with the corresponding value of ij 2649 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 2650 lines.append('DATA IJ_VALUES /' + \ 2651 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/') 2652 else: 2653 #otherwise just put the first leg 2654 lines.append('INTEGER IJ_VALUES(1)') 2655 lines.append('DATA IJ_VALUES / 1 /') 2656 2657 return lines
2658 2659
2660 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 2661 mirror = False): #test written
2662 """Generate the PDF lines for the auto_dsig.f file""" 2663 2664 processes = matrix_element.get('processes') 2665 model = processes[0].get('model') 2666 2667 pdf_definition_lines = "" 2668 pdf_data_lines = "" 2669 pdf_lines = "" 2670 2671 if ninitial == 1: 2672 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 2673 for i, proc in enumerate(processes): 2674 process_line = proc.base_string() 2675 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2676 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 2677 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 2678 else: 2679 # Pick out all initial state particles for the two beams 2680 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 2681 p in processes]))), 2682 sorted(list(set([p.get_initial_pdg(2) for \ 2683 p in processes])))] 2684 2685 # Prepare all variable names 2686 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 2687 sum(initial_states,[])]) 2688 for key,val in pdf_codes.items(): 2689 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 2690 2691 # Set conversion from PDG code to number used in PDF calls 2692 pdgtopdf = {21: 0, 22: 7} 2693 # Fill in missing entries of pdgtopdf 2694 for pdg in sum(initial_states,[]): 2695 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 2696 pdgtopdf[pdg] = pdg 2697 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 2698 # If any particle has pdg code 7, we need to use something else 2699 pdgtopdf[pdg] = 6000000 + pdg 2700 2701 # Get PDF variable declarations for all initial states 2702 for i in [0,1]: 2703 pdf_definition_lines += "DOUBLE PRECISION " + \ 2704 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2705 for pdg in \ 2706 initial_states[i]]) + \ 2707 "\n" 2708 2709 # Get PDF data lines for all initial states 2710 for i in [0,1]: 2711 pdf_data_lines += "DATA " + \ 2712 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2713 for pdg in initial_states[i]]) + \ 2714 "/%d*1D0/" % len(initial_states[i]) + \ 2715 "\n" 2716 2717 # Get PDF values for the different initial states 2718 for i, init_states in enumerate(initial_states): 2719 if not mirror: 2720 ibeam = i + 1 2721 else: 2722 ibeam = 2 - i 2723 if subproc_group: 2724 pdf_lines = pdf_lines + \ 2725 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 2726 % (ibeam, ibeam) 2727 else: 2728 pdf_lines = pdf_lines + \ 2729 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 2730 % (ibeam, ibeam) 2731 2732 for initial_state in init_states: 2733 if initial_state in pdf_codes.keys(): 2734 if subproc_group: 2735 if abs(pdgtopdf[initial_state]) <= 7: 2736 pdf_lines = pdf_lines + \ 2737 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 2738 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 2739 (pdf_codes[initial_state], 2740 i + 1, ibeam, pdgtopdf[initial_state], 2741 ibeam, ibeam) 2742 else: 2743 # setting other partons flavours outside quark, gluon, photon to be 0d0 2744 pdf_lines = pdf_lines + \ 2745 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2746 "%s%d=0d0\n") % \ 2747 (pdf_codes[initial_state],i + 1) 2748 else: 2749 if abs(pdgtopdf[initial_state]) <= 7: 2750 pdf_lines = pdf_lines + \ 2751 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 2752 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 2753 (pdf_codes[initial_state], 2754 i + 1, ibeam, pdgtopdf[initial_state], 2755 ibeam, ibeam) 2756 else: 2757 # setting other partons flavours outside quark, gluon, photon to be 0d0 2758 pdf_lines = pdf_lines + \ 2759 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2760 "%s%d=0d0\n") % \ 2761 (pdf_codes[initial_state],i + 1) 2762 2763 pdf_lines = pdf_lines + "ENDIF\n" 2764 2765 # Add up PDFs for the different initial state particles 2766 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 2767 for proc in processes: 2768 process_line = proc.base_string() 2769 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2770 pdf_lines = pdf_lines + "\nPD(IPROC) = " 2771 for ibeam in [1, 2]: 2772 initial_state = proc.get_initial_pdg(ibeam) 2773 if initial_state in pdf_codes.keys(): 2774 pdf_lines = pdf_lines + "%s%d*" % \ 2775 (pdf_codes[initial_state], ibeam) 2776 else: 2777 pdf_lines = pdf_lines + "1d0*" 2778 # Remove last "*" from pdf_lines 2779 pdf_lines = pdf_lines[:-1] + "\n" 2780 2781 # Remove last line break from pdf_lines 2782 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 2783 2784 2785 #test written
2786 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
2787 """Return the color matrix definition lines for the given color_matrix. Split 2788 rows in chunks of size n.""" 2789 2790 if not color_matrix: 2791 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 2792 else: 2793 ret_list = [] 2794 my_cs = color.ColorString() 2795 for index, denominator in \ 2796 enumerate(color_matrix.get_line_denominators()): 2797 # First write the common denominator for this color matrix line 2798 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 2799 # Then write the numerators for the matrix elements 2800 num_list = color_matrix.get_line_numerators(index, denominator) 2801 for k in xrange(0, len(num_list), n): 2802 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 2803 (index + 1, k + 1, min(k + n, len(num_list)), 2804 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 2805 2806 return ret_list
2807 2808 #=========================================================================== 2809 # write_maxamps_file 2810 #===========================================================================
2811 - def write_maxamps_file(self, writer, maxamps, maxflows, 2812 maxproc,maxsproc):
2813 """Write the maxamps.inc file for MG4.""" 2814 2815 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 2816 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 2817 (maxamps, maxflows) 2818 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 2819 (maxproc, maxsproc) 2820 2821 # Write the file 2822 writer.writelines(file) 2823 2824 return True
2825 2826 #=============================================================================== 2827 # write_ncombs_file 2828 #===============================================================================
2829 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
2830 # #test written 2831 """Write the ncombs.inc file for MadEvent.""" 2832 2833 # Extract number of external particles 2834 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2835 2836 # ncomb (used for clustering) is 2^(nexternal) 2837 file = " integer n_max_cl\n" 2838 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 2839 2840 # Write the file 2841 writer.writelines(file) 2842 2843 return True
2844 2845 #=========================================================================== 2846 # write_config_subproc_map_file 2847 #===========================================================================
2848 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
2849 """Write a dummy config_subproc.inc file for MadEvent""" 2850 2851 lines = [] 2852 2853 for iconfig in range(len(s_and_t_channels)): 2854 lines.append("DATA CONFSUB(1,%d)/1/" % \ 2855 (iconfig + 1)) 2856 2857 # Write the file 2858 writer.writelines(lines) 2859 2860 return True
2861 2862 #=========================================================================== 2863 # write_colors_file 2864 #===========================================================================
2865 - def write_colors_file(self, writer, matrix_element):
2866 """Write the get_color.f file for MadEvent, which returns color 2867 for all particles used in the matrix element.""" 2868 2869 try: 2870 matrix_elements=matrix_element.real_processes[0].matrix_element 2871 except IndexError: 2872 matrix_elements=[matrix_element.born_matrix_element] 2873 2874 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 2875 matrix_elements = [matrix_elements] 2876 2877 model = matrix_elements[0].get('processes')[0].get('model') 2878 2879 # We need the both particle and antiparticle wf_ids, since the identity 2880 # depends on the direction of the wf. 2881 # loop on the real emissions 2882 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2883 for wf in d.get('wavefunctions')],[]) \ 2884 for d in me.get('diagrams')],[]) \ 2885 for me in [real_proc.matrix_element]],[])\ 2886 for real_proc in matrix_element.real_processes],[])) 2887 # and also on the born 2888 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2889 for wf in d.get('wavefunctions')],[]) \ 2890 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 2891 2892 # loop on the real emissions 2893 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 2894 p.get_legs_with_decays()] for p in \ 2895 me.get('processes')], []) for me in \ 2896 [real_proc.matrix_element]], []) for real_proc in \ 2897 matrix_element.real_processes],[])) 2898 # and also on the born 2899 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 2900 p.get_legs_with_decays()] for p in \ 2901 matrix_element.born_matrix_element.get('processes')], []))) 2902 particle_ids = sorted(list(wf_ids.union(leg_ids))) 2903 2904 lines = """function get_color(ipdg) 2905 implicit none 2906 integer get_color, ipdg 2907 2908 if(ipdg.eq.%d)then 2909 get_color=%d 2910 return 2911 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 2912 2913 for part_id in particle_ids[1:]: 2914 lines += """else if(ipdg.eq.%d)then 2915 get_color=%d 2916 return 2917 """ % (part_id, model.get_particle(part_id).get_color()) 2918 # Dummy particle for multiparticle vertices with pdg given by 2919 # first code not in the model 2920 lines += """else if(ipdg.eq.%d)then 2921 c This is dummy particle used in multiparticle vertices 2922 get_color=2 2923 return 2924 """ % model.get_first_non_pdg() 2925 lines += """else 2926 write(*,*)'Error: No color given for pdg ',ipdg 2927 get_color=0 2928 return 2929 endif 2930 end 2931 """ 2932 2933 # Write the file 2934 writer.writelines(lines) 2935 2936 return True
2937 2938 #=============================================================================== 2939 # write_props_file 2940 #=============================================================================== 2941 #test_written
2942 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
2943 """Write the props.inc file for MadEvent. Needs input from 2944 write_configs_file. With respect to the parent routine, it has some 2945 more specific formats that allow the props.inc file to be read by the 2946 link program""" 2947 2948 lines = [] 2949 2950 particle_dict = matrix_element.get('processes')[0].get('model').\ 2951 get('particle_dict') 2952 2953 for iconf, configs in enumerate(s_and_t_channels): 2954 for vertex in configs[0] + configs[1][:-1]: 2955 leg = vertex.get('legs')[-1] 2956 if leg.get('id') not in particle_dict: 2957 # Fake propagator used in multiparticle vertices 2958 mass = 'zero' 2959 width = 'zero' 2960 pow_part = 0 2961 else: 2962 particle = particle_dict[leg.get('id')] 2963 # Get mass 2964 if particle.get('mass').lower() == 'zero': 2965 mass = particle.get('mass') 2966 else: 2967 mass = "abs(%s)" % particle.get('mass') 2968 # Get width 2969 if particle.get('width').lower() == 'zero': 2970 width = particle.get('width') 2971 else: 2972 width = "abs(%s)" % particle.get('width') 2973 2974 pow_part = 1 + int(particle.is_boson()) 2975 2976 lines.append("pmass(%3d,%4d) = %s" % \ 2977 (leg.get('number'), iconf + 1, mass)) 2978 lines.append("pwidth(%3d,%4d) = %s" % \ 2979 (leg.get('number'), iconf + 1, width)) 2980 lines.append("pow(%3d,%4d) = %d" % \ 2981 (leg.get('number'), iconf + 1, pow_part)) 2982 2983 # Write the file 2984 writer.writelines(lines) 2985 2986 return True
2987 2988 2989 #=========================================================================== 2990 # write_subproc 2991 #===========================================================================
2992 - def write_subproc(self, writer, subprocdir):
2993 """Append this subprocess to the subproc.mg file for MG4""" 2994 2995 # Write line to file 2996 writer.write(subprocdir + "\n") 2997 2998 return True
2999 3000 3001 3002 3003 3004 #================================================================================= 3005 # Class for using the optimized Loop process 3006 #=================================================================================
3007 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3008 ProcessExporterFortranFKS):
3009 """Class to take care of exporting a set of matrix elements to 3010 Fortran (v4) format.""" 3011 3012 #=============================================================================== 3013 # copy the Template in a new directory. 3014 #===============================================================================
3015 - def copy_fkstemplate(self):
3016 """create the directory run_name as a copy of the MadEvent 3017 Template, and clean the directory 3018 For now it is just the same as copy_v4template, but it will be modified 3019 """ 3020 mgme_dir = self.mgme_dir 3021 dir_path = self.dir_path 3022 clean =self.opt['clean'] 3023 3024 #First copy the full template tree if dir_path doesn't exit 3025 if not os.path.isdir(dir_path): 3026 if not mgme_dir: 3027 raise MadGraph5Error, \ 3028 "No valid MG_ME path given for MG4 run directory creation." 3029 logger.info('initialize a new directory: %s' % \ 3030 os.path.basename(dir_path)) 3031 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3032 # distutils.dir_util.copy_tree since dir_path already exists 3033 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 3034 dir_path) 3035 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3036 if not mgme_dir: 3037 raise MadGraph5Error, \ 3038 "No valid MG_ME path given for MG4 run directory creation." 3039 try: 3040 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3041 except IOError: 3042 MG5_version = misc.get_pkg_info() 3043 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3044 "5." + MG5_version['version']) 3045 3046 #Ensure that the Template is clean 3047 if clean: 3048 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3049 if os.environ.has_key('MADGRAPH_BASE'): 3050 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3051 '--web'], cwd=dir_path) 3052 else: 3053 try: 3054 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3055 cwd=dir_path) 3056 except Exception, why: 3057 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3058 % (os.path.basename(dir_path),why)) 3059 #Write version info 3060 MG_version = misc.get_pkg_info() 3061 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3062 MG_version['version']) 3063 3064 # We must link the CutTools to the Library folder of the active Template 3065 self.link_CutTools(dir_path) 3066 # We must link the TIR to the Library folder of the active Template 3067 link_tir_libs=[] 3068 tir_libs=[] 3069 tir_include=[] 3070 # special for PJFry++/Golem95 3071 link_pjfry_lib="" 3072 pjfry_lib="" 3073 for tir in self.all_tir: 3074 tir_dir="%s_dir"%tir 3075 libpath=getattr(self,tir_dir) 3076 libname="lib%s.a"%tir 3077 tir_name=tir 3078 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3079 libpath,libname,tir_name=tir_name) 3080 setattr(self,tir_dir,libpath) 3081 if libpath != "": 3082 if tir in ['pjfry','golem']: 3083 # Apparently it is necessary to link against the original 3084 # location of the pjfry/golem library, so it needs a special treatment. 3085 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3086 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3087 if tir=='golem': 3088 trg_path = pjoin(os.path.dirname(libpath),'include') 3089 golem_include = misc.find_includes_path(trg_path,'.mod') 3090 if golem_include is None: 3091 logger.error( 3092 'Could not find the include directory for golem, looking in %s.\n' % str(trg_path)+ 3093 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3094 golem_include = '<Not_found_define_it_yourself>' 3095 tir_include.append('-I %s'%golem_include) 3096 else: 3097 link_tir_libs.append('-l%s'%tir) 3098 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3099 3100 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3101 cwd = os.getcwd() 3102 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3103 try: 3104 os.chdir(dirpath) 3105 except os.error: 3106 logger.error('Could not cd to directory %s' % dirpath) 3107 return 0 3108 filename = 'makefile_loop' 3109 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3110 link_tir_libs,tir_libs,tir_include=tir_include) 3111 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3112 dirpath = os.path.join(self.dir_path, 'Source') 3113 try: 3114 os.chdir(dirpath) 3115 except os.error: 3116 logger.error('Could not cd to directory %s' % dirpath) 3117 return 0 3118 filename = 'make_opts' 3119 calls = self.write_make_opts(writers.MakefileWriter(filename), 3120 link_tir_libs,tir_libs) 3121 # Return to original PWD 3122 os.chdir(cwd) 3123 3124 cwd = os.getcwd() 3125 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3126 try: 3127 os.chdir(dirpath) 3128 except os.error: 3129 logger.error('Could not cd to directory %s' % dirpath) 3130 return 0 3131 3132 # We add here the user-friendly MadLoop option setter. 3133 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3134 "Cards/MadLoopParams.dat", 3135 "SubProcesses/MadLoopParams.inc"] 3136 3137 for file in cpfiles: 3138 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3139 os.path.join(self.dir_path, file)) 3140 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3141 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3142 'Cards', 'MadLoopParams.dat')) 3143 # write the output file 3144 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3145 "MadLoopParams.dat")) 3146 3147 # We need minimal editing of MadLoopCommons.f 3148 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3149 "SubProcesses","MadLoopCommons.inc")).read() 3150 writer = writers.FortranWriter(os.path.join(self.dir_path, 3151 "SubProcesses","MadLoopCommons.f")) 3152 writer.writelines(MadLoopCommon%{ 3153 'print_banner_commands':self.MadLoop_banner}) 3154 writer.close() 3155 3156 # link the files from the MODEL 3157 model_path = self.dir_path + '/Source/MODEL/' 3158 # Note that for the [real=] mode, these files are not present 3159 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3160 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3161 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3162 ln(model_path + '/mp_coupl_same_name.inc', \ 3163 self.dir_path + '/SubProcesses') 3164 3165 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3166 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3167 writers.FortranWriter('cts_mpc.h'),) 3168 3169 self.copy_python_files() 3170 3171 3172 # We need to create the correct open_data for the pdf 3173 self.write_pdf_opendata() 3174 3175 3176 # Return to original PWD 3177 os.chdir(cwd)
3178
3179 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3180 """writes the V**** directory inside the P**** directories specified in 3181 dir_name""" 3182 3183 cwd = os.getcwd() 3184 3185 matrix_element = loop_matrix_element 3186 3187 # Create the MadLoop5_resources directory if not already existing 3188 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3189 try: 3190 os.mkdir(dirpath) 3191 except os.error as error: 3192 logger.warning(error.strerror + " " + dirpath) 3193 3194 # Create the directory PN_xx_xxxxx in the specified path 3195 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3196 dirpath = os.path.join(dir_name, name) 3197 3198 try: 3199 os.mkdir(dirpath) 3200 except os.error as error: 3201 logger.warning(error.strerror + " " + dirpath) 3202 3203 try: 3204 os.chdir(dirpath) 3205 except os.error: 3206 logger.error('Could not cd to directory %s' % dirpath) 3207 return 0 3208 3209 logger.info('Creating files in directory %s' % name) 3210 3211 # Extract number of external particles 3212 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3213 3214 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3215 3216 # The born matrix element, if needed 3217 filename = 'born_matrix.f' 3218 calls = self.write_bornmatrix( 3219 writers.FortranWriter(filename), 3220 matrix_element, 3221 fortran_model) 3222 3223 filename = 'nexternal.inc' 3224 self.write_nexternal_file(writers.FortranWriter(filename), 3225 nexternal, ninitial) 3226 3227 filename = 'pmass.inc' 3228 self.write_pmass_file(writers.FortranWriter(filename), 3229 matrix_element) 3230 3231 filename = 'ngraphs.inc' 3232 self.write_ngraphs_file(writers.FortranWriter(filename), 3233 len(matrix_element.get_all_amplitudes())) 3234 3235 filename = "loop_matrix.ps" 3236 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3237 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3238 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3239 filename, 3240 model=matrix_element.get('processes')[0].get('model'), 3241 amplitude='') 3242 logger.info("Drawing loop Feynman diagrams for " + \ 3243 matrix_element.get('processes')[0].nice_string(\ 3244 print_weighted=False)) 3245 plot.draw() 3246 3247 filename = "born_matrix.ps" 3248 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3249 get('born_diagrams'), 3250 filename, 3251 model=matrix_element.get('processes')[0].\ 3252 get('model'), 3253 amplitude='') 3254 logger.info("Generating born Feynman diagrams for " + \ 3255 matrix_element.get('processes')[0].nice_string(\ 3256 print_weighted=False)) 3257 plot.draw() 3258 3259 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3260 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3261 'MadLoopParams.inc','MadLoopCommons.f'] 3262 3263 for file in linkfiles: 3264 ln('../../%s' % file) 3265 3266 3267 os.system("ln -s ../../makefile_loop makefile") 3268 3269 # We should move to MadLoop5_resources directory from the SubProcesses 3270 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3271 pjoin('..','MadLoop5_resources')) 3272 3273 linkfiles = ['mpmodule.mod'] 3274 3275 for file in linkfiles: 3276 ln('../../../lib/%s' % file) 3277 3278 # Return to original PWD 3279 os.chdir(cwd) 3280 3281 if not calls: 3282 calls = 0 3283 return calls
3284 3285 3286 #=============================================================================== 3287 # write_coef_specs 3288 #===============================================================================
3289 - def write_coef_specs_file(self, virt_me_list):
3290 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3291 non-optimized mode""" 3292 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3293 3294 replace_dict = {} 3295 replace_dict['max_lwf_size'] = 4 3296 3297 max_loop_vertex_ranks = [me.get_max_loop_vertex_rank() for me in virt_me_list] 3298 replace_dict['vertex_max_coefs'] = max(\ 3299 [q_polynomial.get_number_of_coefs_for_rank(n) 3300 for n in max_loop_vertex_ranks]) 3301 3302 IncWriter=writers.FortranWriter(filename,'w') 3303 IncWriter.writelines("""INTEGER MAXLWFSIZE 3304 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3305 INTEGER VERTEXMAXCOEFS 3306 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3307 % replace_dict) 3308 IncWriter.close()
3309