Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from distutils import dir_util 
  18  import glob 
  19  import logging 
  20  import os 
  21  import re 
  22  import shutil 
  23  import subprocess 
  24  import string 
  25  import copy 
  26  import platform 
  27   
  28  import madgraph.core.color_algebra as color 
  29  import madgraph.core.helas_objects as helas_objects 
  30  import madgraph.core.base_objects as base_objects 
  31  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  32  import madgraph.fks.fks_base as fks 
  33  import madgraph.fks.fks_common as fks_common 
  34  import madgraph.iolibs.drawing_eps as draw 
  35  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  36  import madgraph.iolibs.files as files 
  37  import madgraph.various.misc as misc 
  38  import madgraph.iolibs.file_writers as writers 
  39  import madgraph.iolibs.template_files as template_files 
  40  import madgraph.iolibs.ufo_expression_parsers as parsers 
  41  import madgraph.iolibs.export_v4 as export_v4 
  42  import madgraph.loop.loop_exporters as loop_exporters 
  43  import madgraph.various.q_polynomial as q_polynomial 
  44  import madgraph.various.banner as banner_mod 
  45   
  46  import aloha.create_aloha as create_aloha 
  47   
  48  import models.write_param_card as write_param_card 
  49  import models.check_param_card as check_param_card 
  50  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  51  from madgraph.iolibs.files import cp, ln, mv 
  52   
  53  pjoin = os.path.join 
  54   
  55  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  56  logger = logging.getLogger('madgraph.export_fks') 
  57   
  58  #================================================================================= 
  59  # Class for used of the (non-optimized) Loop process 
  60  #================================================================================= 
61 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
62 """Class to take care of exporting a set of matrix elements to 63 Fortran (v4) format.""" 64 65 #=============================================================================== 66 # copy the Template in a new directory. 67 #===============================================================================
68 - def copy_fkstemplate(self):
69 """create the directory run_name as a copy of the MadEvent 70 Template, and clean the directory 71 For now it is just the same as copy_v4template, but it will be modified 72 """ 73 mgme_dir = self.mgme_dir 74 dir_path = self.dir_path 75 clean =self.opt['clean'] 76 77 78 #First copy the full template tree if dir_path doesn't exit 79 if not os.path.isdir(dir_path): 80 if not mgme_dir: 81 raise MadGraph5Error, \ 82 "No valid MG_ME path given for MG4 run directory creation." 83 logger.info('initialize a new directory: %s' % \ 84 os.path.basename(dir_path)) 85 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 86 # distutils.dir_util.copy_tree since dir_path already exists 87 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 88 dir_path) 89 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 90 if not mgme_dir: 91 raise MadGraph5Error, \ 92 "No valid MG_ME path given for MG4 run directory creation." 93 try: 94 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 95 except IOError: 96 MG5_version = misc.get_pkg_info() 97 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 98 "5." + MG5_version['version']) 99 100 #Ensure that the Template is clean 101 if clean: 102 logger.info('remove old information in %s' % os.path.basename(dir_path)) 103 if os.environ.has_key('MADGRAPH_BASE'): 104 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 105 '--web'],cwd=dir_path) 106 else: 107 try: 108 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 109 cwd=dir_path) 110 except Exception, why: 111 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 112 % (os.path.basename(dir_path),why)) 113 #Write version info 114 MG_version = misc.get_pkg_info() 115 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 116 MG_version['version']) 117 118 # We must link the CutTools to the Library folder of the active Template 119 self.link_CutTools(dir_path) 120 121 link_tir_libs=[] 122 tir_libs=[] 123 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 124 dirpath = os.path.join(self.dir_path, 'SubProcesses') 125 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 126 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 127 link_tir_libs,tir_libs) 128 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 129 filename = pjoin(self.dir_path, 'Source','make_opts') 130 calls = self.write_make_opts(writers.MakefileWriter(filename), 131 link_tir_libs,tir_libs) 132 133 # Duplicate run_card and FO_analyse_card 134 for card in ['FO_analyse_card', 'shower_card']: 135 try: 136 shutil.copy(pjoin(self.dir_path, 'Cards', 137 card + '.dat'), 138 pjoin(self.dir_path, 'Cards', 139 card + '_default.dat')) 140 except IOError: 141 logger.warning("Failed to copy " + card + ".dat to default") 142 143 cwd = os.getcwd() 144 dirpath = os.path.join(self.dir_path, 'SubProcesses') 145 try: 146 os.chdir(dirpath) 147 except os.error: 148 logger.error('Could not cd to directory %s' % dirpath) 149 return 0 150 151 # We add here the user-friendly MadLoop option setter. 152 cpfiles= ["SubProcesses/MadLoopParamReader.f", 153 "Cards/MadLoopParams.dat", 154 "SubProcesses/MadLoopParams.inc"] 155 156 for file in cpfiles: 157 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 158 os.path.join(self.dir_path, file)) 159 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 160 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 161 'Cards', 'MadLoopParams.dat')) 162 # write the output file 163 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 164 "MadLoopParams.dat")) 165 166 # We need minimal editing of MadLoopCommons.f 167 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 168 "SubProcesses","MadLoopCommons.inc")).read() 169 writer = writers.FortranWriter(os.path.join(self.dir_path, 170 "SubProcesses","MadLoopCommons.f")) 171 writer.writelines(MadLoopCommon%{ 172 'print_banner_commands':self.MadLoop_banner}) 173 writer.close() 174 175 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 176 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 177 writers.FortranWriter('cts_mpc.h')) 178 179 180 # Finally make sure to turn off MC over Hel for the default mode. 181 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 182 FKS_card_file = open(FKS_card_path,'r') 183 FKS_card = FKS_card_file.read() 184 FKS_card_file.close() 185 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 186 "#NHelForMCoverHels\n-1", FKS_card) 187 FKS_card_file = open(FKS_card_path,'w') 188 FKS_card_file.write(FKS_card) 189 FKS_card_file.close() 190 191 # Return to original PWD 192 os.chdir(cwd) 193 # Copy the different python files in the Template 194 self.copy_python_files() 195 196 # We need to create the correct open_data for the pdf 197 self.write_pdf_opendata()
198 199 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 200 # Also, we overload this function (i.e. it is already defined in 201 # LoopProcessExporterFortranSA) because the path of the template makefile 202 # is different.
203 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
204 """ Create the file makefile_loop which links to the TIR libraries.""" 205 206 file = open(os.path.join(self.mgme_dir,'Template','NLO', 207 'SubProcesses','makefile_loop.inc')).read() 208 replace_dict={} 209 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 210 replace_dict['tir_libs']=' '.join(tir_libs) 211 replace_dict['dotf']='%.f' 212 replace_dict['doto']='%.o' 213 replace_dict['tir_include']=' '.join(tir_include) 214 file=file%replace_dict 215 if writer: 216 writer.writelines(file) 217 else: 218 return file
219 220 # I put it here not in optimized one, because I want to use the same make_opts.inc
221 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
222 """ Create the file make_opts which links to the TIR libraries.""" 223 file = open(os.path.join(self.mgme_dir,'Template','NLO', 224 'Source','make_opts.inc')).read() 225 replace_dict={} 226 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 227 replace_dict['tir_libs']=' '.join(tir_libs) 228 replace_dict['dotf']='%.f' 229 replace_dict['doto']='%.o' 230 file=file%replace_dict 231 if writer: 232 writer.writelines(file) 233 else: 234 return file
235 236 #=========================================================================== 237 # copy_python_files 238 #===========================================================================
239 - def copy_python_files(self):
240 """copy python files required for the Template""" 241 242 files_to_copy = [ \ 243 pjoin('interface','amcatnlo_run_interface.py'), 244 pjoin('interface','extended_cmd.py'), 245 pjoin('interface','common_run_interface.py'), 246 pjoin('interface','coloring_logging.py'), 247 pjoin('various','misc.py'), 248 pjoin('various','shower_card.py'), 249 pjoin('various','FO_analyse_card.py'), 250 pjoin('various','histograms.py'), 251 pjoin('various','banner.py'), 252 pjoin('various','cluster.py'), 253 pjoin('various','lhe_parser.py'), 254 pjoin('madevent','sum_html.py'), 255 pjoin('madevent','gen_crossxhtml.py'), 256 pjoin('iolibs','files.py'), 257 pjoin('iolibs','save_load_object.py'), 258 pjoin('iolibs','file_writers.py'), 259 pjoin('..','models','check_param_card.py'), 260 pjoin('__init__.py') 261 ] 262 cp(_file_path+'/interface/.mg5_logging.conf', 263 self.dir_path+'/bin/internal/me5_logging.conf') 264 265 for cp_file in files_to_copy: 266 cp(pjoin(_file_path,cp_file), 267 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
268
269 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 270 wanted_couplings = []):
271 272 super(ProcessExporterFortranFKS,self).convert_model_to_mg4(model, 273 wanted_lorentz, wanted_couplings) 274 275 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 276 try: 277 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 278 except OSError as error: 279 pass 280 model_path = model.get('modelpath') 281 shutil.copytree(model_path, 282 pjoin(self.dir_path,'bin','internal','ufomodel'), 283 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 284 if hasattr(model, 'restrict_card'): 285 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 286 'restrict_default.dat') 287 if isinstance(model.restrict_card, check_param_card.ParamCard): 288 model.restrict_card.write(out_path) 289 else: 290 files.cp(model.restrict_card, out_path)
291 292 293 294 #=========================================================================== 295 # write_maxparticles_file 296 #===========================================================================
297 - def write_maxparticles_file(self, writer, matrix_elements):
298 """Write the maxparticles.inc file for MadEvent""" 299 300 maxparticles = max([me.get_nexternal_ninitial()[0] \ 301 for me in matrix_elements['matrix_elements']]) 302 303 lines = "integer max_particles, max_branch\n" 304 lines += "parameter (max_particles=%d) \n" % maxparticles 305 lines += "parameter (max_branch=max_particles-1)" 306 307 # Write the file 308 writer.writelines(lines) 309 310 return True
311 312 313 #=========================================================================== 314 # write_maxconfigs_file 315 #===========================================================================
316 - def write_maxconfigs_file(self, writer, matrix_elements):
317 """Write the maxconfigs.inc file for MadEvent""" 318 319 try: 320 maxconfigs = max([me.get_num_configs() \ 321 for me in matrix_elements['real_matrix_elements']]) 322 except ValueError: 323 maxconfigs = max([me.born_matrix_element.get_num_configs() \ 324 for me in matrix_elements['matrix_elements']]) 325 326 lines = "integer lmaxconfigs\n" 327 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 328 329 # Write the file 330 writer.writelines(lines) 331 332 return True
333 334 335 #=============================================================================== 336 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 337 #===============================================================================
338 - def write_procdef_mg5(self, file_pos, modelname, process_str):
339 """ write an equivalent of the MG4 proc_card in order that all the Madevent 340 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 341 342 proc_card_template = template_files.mg4_proc_card.mg4_template 343 process_template = template_files.mg4_proc_card.process_template 344 process_text = '' 345 coupling = '' 346 new_process_content = [] 347 348 # First find the coupling and suppress the coupling from process_str 349 #But first ensure that coupling are define whithout spaces: 350 process_str = process_str.replace(' =', '=') 351 process_str = process_str.replace('= ', '=') 352 process_str = process_str.replace(',',' , ') 353 #now loop on the element and treat all the coupling 354 for info in process_str.split(): 355 if '=' in info: 356 coupling += info + '\n' 357 else: 358 new_process_content.append(info) 359 # Recombine the process_str (which is the input process_str without coupling 360 #info) 361 process_str = ' '.join(new_process_content) 362 363 #format the SubProcess 364 process_text += process_template.substitute({'process': process_str, \ 365 'coupling': coupling}) 366 367 text = proc_card_template.substitute({'process': process_text, 368 'model': modelname, 369 'multiparticle':''}) 370 ff = open(file_pos, 'w') 371 ff.write(text) 372 ff.close()
373 374 375 #=============================================================================== 376 # write a initial states map, useful for the fast PDF NLO interface 377 #===============================================================================
378 - def write_init_map(self, file_pos, initial_states):
379 """ Write an initial state process map. Each possible PDF 380 combination gets an unique identifier.""" 381 382 text='' 383 for i,e in enumerate(initial_states): 384 text=text+str(i+1)+' '+str(len(e)) 385 for t in e: 386 text=text+' ' 387 try: 388 for p in t: 389 text=text+' '+str(p) 390 except TypeError: 391 text=text+' '+str(t) 392 text=text+'\n' 393 394 ff = open(file_pos, 'w') 395 ff.write(text) 396 ff.close()
397
398 - def get_ME_identifier(self, matrix_element, *args, **opts):
399 """ A function returning a string uniquely identifying the matrix 400 element given in argument so that it can be used as a prefix to all 401 MadLoop5 subroutines and common blocks related to it. This allows 402 to compile several processes into one library as requested by the 403 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 404 necessitates that there is no process prefix.""" 405 406 return ''
407 408 #=============================================================================== 409 # write_coef_specs 410 #===============================================================================
411 - def write_coef_specs_file(self, virt_me_list):
412 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 413 non-optimized mode""" 414 raise fks_common.FKSProcessError(), \ 415 "write_coef_specs should be called only in the loop-optimized mode"
416 417 418 #=============================================================================== 419 # generate_directories_fks 420 #===============================================================================
421 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 422 me_ntot, path=os.getcwd(),OLP='MadLoop'):
423 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 424 including the necessary matrix.f and various helper files""" 425 proc = matrix_element.born_matrix_element['processes'][0] 426 427 if not self.model: 428 self.model = matrix_element.get('processes')[0].get('model') 429 430 cwd = os.getcwd() 431 try: 432 os.chdir(path) 433 except OSError, error: 434 error_msg = "The directory %s should exist in order to be able " % path + \ 435 "to \"export\" in it. If you see this error message by " + \ 436 "typing the command \"export\" please consider to use " + \ 437 "instead the command \"output\". " 438 raise MadGraph5Error, error_msg 439 440 calls = 0 441 442 self.fksdirs = [] 443 #first make and cd the direcrory corresponding to the born process: 444 borndir = "P%s" % \ 445 (matrix_element.get('processes')[0].shell_string()) 446 os.mkdir(borndir) 447 os.chdir(borndir) 448 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 449 450 ## write the files corresponding to the born process in the P* directory 451 self.generate_born_fks_files(matrix_element, 452 fortran_model, me_number, path) 453 454 # With NJET you want to generate the order file per subprocess and most 455 # likely also generate it for each subproc. 456 if OLP=='NJET': 457 filename = 'OLE_order.lh' 458 self.write_lh_order(filename, matrix_element, OLP) 459 460 if matrix_element.virt_matrix_element: 461 calls += self.generate_virt_directory( \ 462 matrix_element.virt_matrix_element, \ 463 fortran_model, \ 464 os.path.join(path, borndir)) 465 466 #write the infortions for the different real emission processes 467 468 self.write_real_matrix_elements(matrix_element, fortran_model) 469 470 self.write_pdf_calls(matrix_element, fortran_model) 471 472 filename = 'nFKSconfigs.inc' 473 self.write_nfksconfigs_file(writers.FortranWriter(filename), 474 matrix_element, 475 fortran_model) 476 477 filename = 'iproc.dat' 478 self.write_iproc_file(writers.FortranWriter(filename), 479 me_number) 480 481 filename = 'fks_info.inc' 482 self.write_fks_info_file(writers.FortranWriter(filename), 483 matrix_element, 484 fortran_model) 485 486 filename = 'leshouche_info.dat' 487 nfksconfs,maxproc,maxflow,nexternal=\ 488 self.write_leshouche_info_file(filename,matrix_element) 489 490 # if no corrections are generated ([LOonly] mode), get 491 # these variables from the born 492 if nfksconfs == maxproc == maxflow == 0: 493 nfksconfs = 1 494 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 495 matrix_element.born_matrix_element, 1) 496 497 filename = 'leshouche_decl.inc' 498 self.write_leshouche_info_declarations( 499 writers.FortranWriter(filename), 500 nfksconfs,maxproc,maxflow,nexternal, 501 fortran_model) 502 503 filename = 'configs_and_props_info.dat' 504 nconfigs,max_leg_number,nfksconfs=self.write_configs_and_props_info_file( 505 filename, 506 matrix_element) 507 508 filename = 'configs_and_props_decl.inc' 509 self.write_configs_and_props_info_declarations( 510 writers.FortranWriter(filename), 511 nconfigs,max_leg_number,nfksconfs, 512 fortran_model) 513 514 filename = 'real_from_born_configs.inc' 515 self.write_real_from_born_configs( 516 writers.FortranWriter(filename), 517 matrix_element, 518 fortran_model) 519 520 filename = 'ngraphs.inc' 521 self.write_ngraphs_file(writers.FortranWriter(filename), 522 nconfigs) 523 524 #write the wrappers 525 filename = 'real_me_chooser.f' 526 self.write_real_me_wrapper(writers.FortranWriter(filename), 527 matrix_element, 528 fortran_model) 529 530 filename = 'parton_lum_chooser.f' 531 self.write_pdf_wrapper(writers.FortranWriter(filename), 532 matrix_element, 533 fortran_model) 534 535 filename = 'get_color.f' 536 self.write_colors_file(writers.FortranWriter(filename), 537 matrix_element) 538 539 filename = 'nexternal.inc' 540 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 541 self.write_nexternal_file(writers.FortranWriter(filename), 542 nexternal, ninitial) 543 self.proc_characteristic['ninitial'] = ninitial 544 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 545 546 filename = 'pmass.inc' 547 try: 548 self.write_pmass_file(writers.FortranWriter(filename), 549 matrix_element.real_processes[0].matrix_element) 550 except IndexError: 551 self.write_pmass_file(writers.FortranWriter(filename), 552 matrix_element.born_matrix_element) 553 554 #draw the diagrams 555 self.draw_feynman_diagrams(matrix_element) 556 557 linkfiles = ['BinothLHADummy.f', 558 'check_poles.f', 559 'MCmasses_HERWIG6.inc', 560 'MCmasses_HERWIGPP.inc', 561 'MCmasses_PYTHIA6Q.inc', 562 'MCmasses_PYTHIA6PT.inc', 563 'MCmasses_PYTHIA8.inc', 564 'add_write_info.f', 565 'coupl.inc', 566 'cuts.f', 567 'FKS_params.dat', 568 'initial_states_map.dat', 569 'OLE_order.olc', 570 'FKSParams.inc', 571 'FKSParamReader.f', 572 'cuts.inc', 573 'unlops.inc', 574 'pythia_unlops.f', 575 'driver_mintMC.f', 576 'driver_mintFO.f', 577 'driver_vegas.f', 578 'appl_interface.cc', 579 'appl_interface_dummy.f', 580 'appl_common.inc', 581 'reweight_appl.inc', 582 'driver_reweight.f', 583 'fastjetfortran_madfks_core.cc', 584 'fastjetfortran_madfks_full.cc', 585 'fjcore.cc', 586 'fastjet_wrapper.f', 587 'fjcore.hh', 588 'fks_Sij.f', 589 'fks_powers.inc', 590 'fks_singular.f', 591 'veto_xsec.f', 592 'veto_xsec.inc', 593 'c_weight.inc', 594 'fks_inc_chooser.f', 595 'leshouche_inc_chooser.f', 596 'configs_and_props_inc_chooser.f', 597 'genps.inc', 598 'genps_fks.f', 599 'boostwdir2.f', 600 'madfks_mcatnlo.inc', 601 'open_output_files.f', 602 'open_output_files_dummy.f', 603 'HwU_dummy.f', 604 'madfks_plot.f', 605 'analysis_dummy.f', 606 'mint-integrator2.f', 607 'MC_integer.f', 608 'mint.inc', 609 'montecarlocounter.f', 610 'q_es.inc', 611 'recluster.cc', 612 'Boosts.h', 613 'reweight.inc', 614 'reweight0.inc', 615 'reweight1.inc', 616 'reweightNLO.inc', 617 'reweight_all.inc', 618 'reweight_events.f', 619 'reweight_xsec.f', 620 'reweight_xsec_events.f', 621 'reweight_xsec_events_pdf_dummy.f', 622 'iproc_map.f', 623 'run.inc', 624 'run_card.inc', 625 'setcuts.f', 626 'setscales.f', 627 'symmetry_fks_test_MC.f', 628 'symmetry_fks_test_ME.f', 629 'symmetry_fks_test_Sij.f', 630 'symmetry_fks_v3.f', 631 'trapfpe.c', 632 'vegas2.for', 633 'write_ajob.f', 634 'handling_lhe_events.f', 635 'write_event.f', 636 'fill_MC_mshell.f', 637 'maxparticles.inc', 638 'message.inc', 639 'initcluster.f', 640 'cluster.inc', 641 'cluster.f', 642 'reweight.f', 643 'randinit', 644 'sudakov.inc', 645 'maxconfigs.inc', 646 'timing_variables.inc'] 647 648 for file in linkfiles: 649 ln('../' + file , '.') 650 os.system("ln -s ../../Cards/param_card.dat .") 651 652 #copy the makefile 653 os.system("ln -s ../makefile_fks_dir ./makefile") 654 if matrix_element.virt_matrix_element: 655 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 656 elif OLP!='MadLoop': 657 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 658 else: 659 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 660 661 662 #import nexternal/leshouches in Source 663 ln('nexternal.inc', '../../Source', log=False) 664 ln('born_leshouche.inc', '../../Source', log=False) 665 666 667 # Return to SubProcesses dir 668 os.chdir(os.path.pardir) 669 # Add subprocess to subproc.mg 670 filename = 'subproc.mg' 671 files.append_to_file(filename, 672 self.write_subproc, 673 borndir) 674 675 676 os.chdir(cwd) 677 # Generate info page 678 gen_infohtml.make_info_html_nlo(self.dir_path) 679 680 681 return calls
682 683 #=========================================================================== 684 # create the run_card 685 #===========================================================================
686 - def create_run_card(self, matrix_elements, history):
687 """ """ 688 689 run_card = banner_mod.RunCardNLO() 690 691 processes = [me.get('processes') 692 for me in matrix_elements['matrix_elements']] 693 694 run_card.create_default_for_process(self.proc_characteristic, 695 history, 696 processes) 697 698 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 699 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
700 701
702 - def finalize_fks_directory(self, matrix_elements, history, makejpg = False, 703 online = False, 704 compiler_dict={'fortran': 'gfortran', 'cpp': 'g++'}, 705 output_dependencies = 'external', MG5DIR = None):
706 """Finalize FKS directory by creating jpeg diagrams, html 707 pages,proc_card_mg5.dat and madevent.tar.gz.""" 708 709 self.proc_characteristic['grouped_matrix'] = False 710 self.create_proc_charac() 711 712 self.create_run_card(matrix_elements, history) 713 # modelname = self.model.get('name') 714 # if modelname == 'mssm' or modelname.startswith('mssm-'): 715 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 716 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 717 # check_param_card.convert_to_mg5card(param_card, mg5_param) 718 # check_param_card.check_valid_param_card(mg5_param) 719 720 # # write the model functions get_mass/width_from_id 721 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 722 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 723 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 724 725 # # Write maxconfigs.inc based on max of ME's/subprocess groups 726 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 727 self.write_maxconfigs_file(writers.FortranWriter(filename), 728 matrix_elements) 729 730 # # Write maxparticles.inc based on max of ME's/subprocess groups 731 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 732 self.write_maxparticles_file(writers.FortranWriter(filename), 733 matrix_elements) 734 735 # Touch "done" file 736 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 737 738 # Check for compiler 739 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 740 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 741 742 old_pos = os.getcwd() 743 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 744 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 745 proc[0] == 'P'] 746 747 devnull = os.open(os.devnull, os.O_RDWR) 748 # Convert the poscript in jpg files (if authorize) 749 if makejpg: 750 logger.info("Generate jpeg diagrams") 751 for Pdir in P_dir_list: 752 os.chdir(Pdir) 753 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 754 stdout = devnull) 755 os.chdir(os.path.pardir) 756 # 757 logger.info("Generate web pages") 758 # Create the WebPage using perl script 759 760 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 761 stdout = devnull) 762 763 os.chdir(os.path.pardir) 764 # 765 # obj = gen_infohtml.make_info_html(self.dir_path) 766 # [mv(name, './HTML/') for name in os.listdir('.') if \ 767 # (name.endswith('.html') or name.endswith('.jpg')) and \ 768 # name != 'index.html'] 769 # if online: 770 # nb_channel = obj.rep_rule['nb_gen_diag'] 771 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 772 773 # Write command history as proc_card_mg5 774 if os.path.isdir('Cards'): 775 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 776 history.write(output_file) 777 778 # Duplicate run_card and FO_analyse_card 779 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 780 try: 781 shutil.copy(pjoin(self.dir_path, 'Cards', 782 card + '.dat'), 783 pjoin(self.dir_path, 'Cards', 784 card + '_default.dat')) 785 except IOError: 786 logger.warning("Failed to copy " + card + ".dat to default") 787 788 789 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 790 stdout = devnull) 791 792 # Run "make" to generate madevent.tar.gz file 793 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 794 if os.path.exists('amcatnlo.tar.gz'): 795 os.remove('amcatnlo.tar.gz') 796 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 797 stdout = devnull) 798 # 799 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 800 stdout = devnull) 801 802 #return to the initial dir 803 os.chdir(old_pos) 804 805 # Setup stdHep 806 # Find the correct fortran compiler 807 base_compiler= ['FC=g77','FC=gfortran'] 808 809 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 810 811 if output_dependencies == 'external': 812 # check if stdhep has to be compiled (only the first time) 813 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 814 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')): 815 if 'FC' not in os.environ or not os.environ['FC']: 816 path = os.path.join(StdHep_path, 'src', 'make_opts') 817 text = open(path).read() 818 for base in base_compiler: 819 text = text.replace(base,'FC=%s' % fcompiler_chosen) 820 open(path, 'w').writelines(text) 821 822 logger.info('Compiling StdHEP. This has to be done only once.') 823 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 824 logger.info('Done.') 825 #then link the libraries in the exported dir 826 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 827 pjoin(self.dir_path, 'MCatNLO', 'lib')) 828 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 829 pjoin(self.dir_path, 'MCatNLO', 'lib')) 830 831 elif output_dependencies == 'internal': 832 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 833 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 834 # Create the links to the lib folder 835 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 836 for file in linkfiles: 837 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 838 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 839 if 'FC' not in os.environ or not os.environ['FC']: 840 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 841 text = open(path).read() 842 for base in base_compiler: 843 text = text.replace(base,'FC=%s' % fcompiler_chosen) 844 open(path, 'w').writelines(text) 845 # To avoid compiler version conflicts, we force a clean here 846 misc.compile(['clean'],cwd = StdHEP_internal_path) 847 848 elif output_dependencies == 'environment_paths': 849 # Here the user chose to define the dependencies path in one of 850 # his environmental paths 851 libStdHep = misc.which_lib('libstdhep.a') 852 libFmcfio = misc.which_lib('libFmcfio.a') 853 if not libStdHep is None and not libFmcfio is None: 854 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 855 os.path.dirname(libStdHep)) 856 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 857 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 858 else: 859 raise InvalidCmd("Could not find the location of the files"+\ 860 " libstdhep.a and libFmcfio.a in you environment paths.") 861 862 else: 863 raise MadGraph5Error, 'output_dependencies option %s not recognized'\ 864 %output_dependencies
865 866
867 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
868 """Writes the real_from_born_configs.inc file that contains 869 the mapping to go for a given born configuration (that is used 870 e.g. in the multi-channel phase-space integration to the 871 corresponding real-emission diagram, i.e. the real emission 872 diagram in which the combined ij is split in i_fks and 873 j_fks.""" 874 lines=[] 875 lines2=[] 876 max_links=0 877 born_me=matrix_element.born_matrix_element 878 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 879 iFKS=iFKS+1 880 links=conf['fks_info']['rb_links'] 881 max_links=max(max_links,len(links)) 882 for i,diags in enumerate(links): 883 if not i == diags['born_conf']: 884 print links 885 raise MadGraph5Error, "born_conf should be canonically ordered" 886 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 887 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 888 % (iFKS,len(links),real_configs)) 889 890 lines2.append("integer irfbc") 891 lines2.append("integer real_from_born_conf(%d,%d)" \ 892 % (max_links,len(matrix_element.get_fks_info_list()))) 893 # Write the file 894 writer.writelines(lines2+lines)
895 896 897 #=============================================================================== 898 # write_get_mass_width_file 899 #=============================================================================== 900 #test written
901 - def write_get_mass_width_file(self, writer, makeinc, model):
902 """Write the get_mass_width_file.f file for MG4. 903 Also update the makeinc.inc file 904 """ 905 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 906 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 907 908 iflines_mass = '' 909 iflines_width = '' 910 911 for i, part in enumerate(mass_particles): 912 if i == 0: 913 ifstring = 'if' 914 else: 915 ifstring = 'else if' 916 if part['self_antipart']: 917 iflines_mass += '%s (id.eq.%d) then\n' % \ 918 (ifstring, part.get_pdg_code()) 919 else: 920 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 921 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 922 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 923 924 for i, part in enumerate(width_particles): 925 if i == 0: 926 ifstring = 'if' 927 else: 928 ifstring = 'else if' 929 if part['self_antipart']: 930 iflines_width += '%s (id.eq.%d) then\n' % \ 931 (ifstring, part.get_pdg_code()) 932 else: 933 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 934 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 935 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 936 937 replace_dict = {'iflines_mass' : iflines_mass, 938 'iflines_width' : iflines_width} 939 940 file = open(os.path.join(_file_path, \ 941 'iolibs/template_files/get_mass_width_fcts.inc')).read() 942 file = file % replace_dict 943 944 # Write the file 945 writer.writelines(file) 946 947 # update the makeinc 948 makeinc_content = open(makeinc).read() 949 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 950 open(makeinc, 'w').write(makeinc_content) 951 952 return
953 954
955 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
956 """writes the declarations for the variables relevant for configs_and_props 957 """ 958 lines = [] 959 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 960 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 961 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 962 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 963 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 964 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 965 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 966 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 967 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 968 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 969 970 writer.writelines(lines)
971 972
973 - def write_configs_and_props_info_file(self, filename, matrix_element):
974 """writes the configs_and_props_info.inc file that cointains 975 all the (real-emission) configurations (IFOREST) as well as 976 the masses and widths of intermediate particles""" 977 lines = [] 978 lines.append("# C -> MAPCONFIG_D") 979 lines.append("# F/D -> IFOREST_D") 980 lines.append("# S -> SPROP_D") 981 lines.append("# T -> TPRID_D") 982 lines.append("# M -> PMASS_D/PWIDTH_D") 983 lines.append("# P -> POW_D") 984 lines2 = [] 985 nconfs = len(matrix_element.get_fks_info_list()) 986 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 987 988 max_iconfig=0 989 max_leg_number=0 990 991 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 992 iFKS=iFKS+1 993 iconfig = 0 994 s_and_t_channels = [] 995 mapconfigs = [] 996 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 997 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 998 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 999 minvert = min([max([len(vert.get('legs')) for vert in \ 1000 diag.get('vertices')]) for diag in base_diagrams]) 1001 1002 lines.append("# ") 1003 lines.append("# nFKSprocess %d" % iFKS) 1004 for idiag, diag in enumerate(base_diagrams): 1005 if any([len(vert.get('legs')) > minvert for vert in 1006 diag.get('vertices')]): 1007 # Only 3-vertices allowed in configs.inc 1008 continue 1009 iconfig = iconfig + 1 1010 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1011 mapconfigs.append(helas_diag.get('number')) 1012 lines.append("# Diagram %d for nFKSprocess %d" % \ 1013 (helas_diag.get('number'),iFKS)) 1014 # Correspondance between the config and the amplitudes 1015 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1016 helas_diag.get('number'))) 1017 1018 # Need to reorganize the topology so that we start with all 1019 # final state external particles and work our way inwards 1020 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1021 get_s_and_t_channels(ninitial, model, 990) 1022 1023 s_and_t_channels.append([schannels, tchannels]) 1024 1025 # Write out propagators for s-channel and t-channel vertices 1026 allchannels = schannels 1027 if len(tchannels) > 1: 1028 # Write out tchannels only if there are any non-trivial ones 1029 allchannels = schannels + tchannels 1030 1031 for vert in allchannels: 1032 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1033 last_leg = vert.get('legs')[-1] 1034 lines.append("F %4d %4d %4d %4d" % \ 1035 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1036 for d in daughters: 1037 lines.append("D %4d" % d) 1038 if vert in schannels: 1039 lines.append("S %4d %4d %4d %10d" % \ 1040 (iFKS,last_leg.get('number'), iconfig, 1041 last_leg.get('id'))) 1042 elif vert in tchannels[:-1]: 1043 lines.append("T %4d %4d %4d %10d" % \ 1044 (iFKS,last_leg.get('number'), iconfig, 1045 abs(last_leg.get('id')))) 1046 1047 # update what the array sizes (mapconfig,iforest,etc) will be 1048 max_leg_number = min(max_leg_number,last_leg.get('number')) 1049 max_iconfig = max(max_iconfig,iconfig) 1050 1051 # Write out number of configs 1052 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1053 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1054 1055 # write the props.inc information 1056 lines2.append("# ") 1057 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1058 get('particle_dict') 1059 1060 for iconf, configs in enumerate(s_and_t_channels): 1061 for vertex in configs[0] + configs[1][:-1]: 1062 leg = vertex.get('legs')[-1] 1063 if leg.get('id') not in particle_dict: 1064 # Fake propagator used in multiparticle vertices 1065 pow_part = 0 1066 else: 1067 particle = particle_dict[leg.get('id')] 1068 1069 pow_part = 1 + int(particle.is_boson()) 1070 1071 lines2.append("M %4d %4d %4d %10d " % \ 1072 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1073 lines2.append("P %4d %4d %4d %4d " % \ 1074 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1075 1076 # Write the file 1077 open(filename,'w').write('\n'.join(lines+lines2)) 1078 1079 return max_iconfig, max_leg_number, nconfs
1080 1081
1082 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1083 maxproc, maxflow, nexternal, fortran_model):
1084 """writes the declarations for the variables relevant for leshouche_info 1085 """ 1086 lines = [] 1087 lines.append('integer maxproc_used, maxflow_used') 1088 lines.append('parameter (maxproc_used = %d)' % maxproc) 1089 lines.append('parameter (maxflow_used = %d)' % maxflow) 1090 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1091 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1092 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1093 1094 writer.writelines(lines)
1095 1096
1097 - def write_leshouche_info_file(self, filename, matrix_element):
1098 """writes the leshouche_info.inc file which contains 1099 the LHA informations for all the real emission processes 1100 """ 1101 lines = [] 1102 lines.append("# I -> IDUP_D") 1103 lines.append("# M -> MOTHUP_D") 1104 lines.append("# C -> ICOLUP_D") 1105 nfksconfs = len(matrix_element.get_fks_info_list()) 1106 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1107 1108 maxproc = 0 1109 maxflow = 0 1110 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1111 # for i, real in enumerate(matrix_element.real_processes): 1112 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1113 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1114 lines.extend(newlines) 1115 maxproc = max(maxproc, nprocs) 1116 maxflow = max(maxflow, nflows) 1117 1118 # Write the file 1119 open(filename,'w').write('\n'.join(lines)) 1120 1121 return nfksconfs, maxproc, maxflow, nexternal
1122 1123
1124 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1125 """writes the wrapper which allows to chose among the different real matrix elements""" 1126 1127 file = \ 1128 """double precision function dlum() 1129 implicit none 1130 include 'timing_variables.inc' 1131 integer nfksprocess 1132 common/c_nfksprocess/nfksprocess 1133 call cpu_time(tbefore) 1134 """ 1135 if matrix_element.real_processes: 1136 for n, info in enumerate(matrix_element.get_fks_info_list()): 1137 file += \ 1138 """if (nfksprocess.eq.%(n)d) then 1139 call dlum_%(n_me)d(dlum) 1140 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1141 file += \ 1142 """ 1143 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1144 stop 1145 endif 1146 call cpu_time(tAfter) 1147 tPDF = tPDF + (tAfter-tBefore) 1148 return 1149 end 1150 """ 1151 else: 1152 file+= \ 1153 """call dlum_0(dlum) 1154 call cpu_time(tAfter) 1155 tPDF = tPDF + (tAfter-tBefore) 1156 return 1157 end 1158 """ 1159 1160 # Write the file 1161 writer.writelines(file) 1162 return 0
1163 1164
1165 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1166 """writes the wrapper which allows to chose among the different real matrix elements""" 1167 1168 file = \ 1169 """subroutine smatrix_real(p, wgt) 1170 implicit none 1171 include 'nexternal.inc' 1172 double precision p(0:3, nexternal) 1173 double precision wgt 1174 integer nfksprocess 1175 common/c_nfksprocess/nfksprocess 1176 """ 1177 for n, info in enumerate(matrix_element.get_fks_info_list()): 1178 file += \ 1179 """if (nfksprocess.eq.%(n)d) then 1180 call smatrix_%(n_me)d(p, wgt) 1181 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1182 1183 if matrix_element.real_processes: 1184 file += \ 1185 """ 1186 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1187 stop 1188 endif 1189 return 1190 end 1191 """ 1192 else: 1193 file += \ 1194 """ 1195 wgt=0d0 1196 return 1197 end 1198 """ 1199 # Write the file 1200 writer.writelines(file) 1201 return 0
1202 1203
1204 - def draw_feynman_diagrams(self, matrix_element):
1205 """Create the ps files containing the feynman diagrams for the born process, 1206 as well as for all the real emission processes""" 1207 1208 filename = 'born.ps' 1209 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1210 get('base_amplitude').get('diagrams'), 1211 filename, 1212 model=matrix_element.born_matrix_element.\ 1213 get('processes')[0].get('model'), 1214 amplitude=True, diagram_type='born') 1215 plot.draw() 1216 1217 for n, fksreal in enumerate(matrix_element.real_processes): 1218 filename = 'matrix_%d.ps' % (n + 1) 1219 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1220 get('base_amplitude').get('diagrams'), 1221 filename, 1222 model=fksreal.matrix_element.\ 1223 get('processes')[0].get('model'), 1224 amplitude=True, diagram_type='real') 1225 plot.draw()
1226 1227
1228 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1229 """writes the matrix_i.f files which contain the real matrix elements""" 1230 1231 1232 1233 for n, fksreal in enumerate(matrix_element.real_processes): 1234 filename = 'matrix_%d.f' % (n + 1) 1235 self.write_matrix_element_fks(writers.FortranWriter(filename), 1236 fksreal.matrix_element, n + 1, 1237 fortran_model)
1238
1239 - def write_pdf_calls(self, matrix_element, fortran_model):
1240 """writes the parton_lum_i.f files which contain the real matrix elements. 1241 If no real emission existst, write the one for the born""" 1242 1243 if matrix_element.real_processes: 1244 for n, fksreal in enumerate(matrix_element.real_processes): 1245 filename = 'parton_lum_%d.f' % (n + 1) 1246 self.write_pdf_file(writers.FortranWriter(filename), 1247 fksreal.matrix_element, n + 1, 1248 fortran_model) 1249 else: 1250 filename = 'parton_lum_0.f' 1251 self.write_pdf_file(writers.FortranWriter(filename), 1252 matrix_element.born_matrix_element, 0, 1253 fortran_model)
1254 1255
1256 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1257 """generates the files needed for the born amplitude in the P* directory, which will 1258 be needed by the P* directories""" 1259 pathdir = os.getcwd() 1260 1261 filename = 'born.f' 1262 calls_born, ncolor_born = \ 1263 self.write_born_fks(writers.FortranWriter(filename),\ 1264 matrix_element, 1265 fortran_model) 1266 1267 filename = 'born_hel.f' 1268 self.write_born_hel(writers.FortranWriter(filename),\ 1269 matrix_element, 1270 fortran_model) 1271 1272 1273 filename = 'born_conf.inc' 1274 nconfigs, mapconfigs, s_and_t_channels = \ 1275 self.write_configs_file( 1276 writers.FortranWriter(filename), 1277 matrix_element.born_matrix_element, 1278 fortran_model) 1279 1280 filename = 'born_props.inc' 1281 self.write_props_file(writers.FortranWriter(filename), 1282 matrix_element.born_matrix_element, 1283 fortran_model, 1284 s_and_t_channels) 1285 1286 filename = 'born_decayBW.inc' 1287 self.write_decayBW_file(writers.FortranWriter(filename), 1288 s_and_t_channels) 1289 1290 filename = 'born_leshouche.inc' 1291 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1292 matrix_element.born_matrix_element, 1293 fortran_model) 1294 1295 filename = 'born_nhel.inc' 1296 self.write_born_nhel_file(writers.FortranWriter(filename), 1297 matrix_element.born_matrix_element, nflows, 1298 fortran_model, 1299 ncolor_born) 1300 1301 filename = 'born_ngraphs.inc' 1302 self.write_ngraphs_file(writers.FortranWriter(filename), 1303 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1304 1305 filename = 'ncombs.inc' 1306 self.write_ncombs_file(writers.FortranWriter(filename), 1307 matrix_element.born_matrix_element, 1308 fortran_model) 1309 1310 filename = 'born_maxamps.inc' 1311 maxamps = len(matrix_element.get('diagrams')) 1312 maxflows = ncolor_born 1313 self.write_maxamps_file(writers.FortranWriter(filename), 1314 maxamps, 1315 maxflows, 1316 max([len(matrix_element.get('processes')) for me in \ 1317 matrix_element.born_matrix_element]),1) 1318 1319 filename = 'config_subproc_map.inc' 1320 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1321 s_and_t_channels) 1322 1323 filename = 'coloramps.inc' 1324 self.write_coloramps_file(writers.FortranWriter(filename), 1325 mapconfigs, 1326 matrix_element.born_matrix_element, 1327 fortran_model) 1328 1329 #write the sborn_sf.f and the b_sf_files 1330 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1331 for i, links in enumerate([matrix_element.color_links, []]): 1332 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1333 links, 1334 fortran_model) 1335 self.color_link_files = [] 1336 for i in range(len(matrix_element.color_links)): 1337 filename = 'b_sf_%3.3d.f' % (i + 1) 1338 self.color_link_files.append(filename) 1339 self.write_b_sf_fks(writers.FortranWriter(filename), 1340 matrix_element, i, 1341 fortran_model)
1342
1343 - def generate_virtuals_from_OLP(self,FKSHMultiproc,export_path, OLP):
1344 """Generates the library for computing the loop matrix elements 1345 necessary for this process using the OLP specified.""" 1346 1347 # Start by writing the BLHA order file 1348 virtual_path = pjoin(export_path,'OLP_virtuals') 1349 if not os.path.exists(virtual_path): 1350 os.makedirs(virtual_path) 1351 filename = os.path.join(virtual_path,'OLE_order.lh') 1352 self.write_lh_order(filename, FKSHMultiproc.get('matrix_elements'),OLP) 1353 1354 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1355 'Please check the virt_generation.log file in %s.'\ 1356 %str(pjoin(virtual_path,'virt_generation.log')) 1357 1358 # Perform some tasks specific to certain OLP's 1359 if OLP=='GoSam': 1360 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1361 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1362 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1363 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1364 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1365 # Now generate the process 1366 logger.info('Generating the loop matrix elements with %s...'%OLP) 1367 virt_generation_log = \ 1368 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1369 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1370 stdout=virt_generation_log, stderr=virt_generation_log) 1371 virt_generation_log.close() 1372 # Check what extension is used for the share libraries on this system 1373 possible_other_extensions = ['so','dylib'] 1374 shared_lib_ext='so' 1375 for ext in possible_other_extensions: 1376 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1377 'libgolem_olp.'+ext)): 1378 shared_lib_ext = ext 1379 1380 # Now check that everything got correctly generated 1381 files_to_check = ['olp_module.mod',str(pjoin('lib', 1382 'libgolem_olp.'+shared_lib_ext))] 1383 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1384 'Virtuals',f)) for f in files_to_check]): 1385 raise fks_common.FKSProcessError(fail_msg) 1386 # link the library to the lib folder 1387 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1388 pjoin(export_path,'lib')) 1389 1390 # Specify in make_opts the right library necessitated by the OLP 1391 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1392 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1393 if OLP=='GoSam': 1394 if platform.system().lower()=='darwin': 1395 # On mac the -rpath is not supported and the path of the dynamic 1396 # library is automatically wired in the executable 1397 make_opts_content=make_opts_content.replace('libOLP=', 1398 'libOLP=-Wl,-lgolem_olp') 1399 else: 1400 # On other platforms the option , -rpath= path to libgolem.so is necessary 1401 # Using a relative path is not ideal because the file libgolem.so is not 1402 # copied on the worker nodes. 1403 # make_opts_content=make_opts_content.replace('libOLP=', 1404 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1405 # Using the absolute path is working in the case where the disk of the 1406 # front end machine is mounted on all worker nodes as well. 1407 make_opts_content=make_opts_content.replace('libOLP=', 1408 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1409 1410 1411 make_opts.write(make_opts_content) 1412 make_opts.close() 1413 1414 # A priori this is generic to all OLP's 1415 1416 # Parse the contract file returned and propagate the process label to 1417 # the include of the BinothLHA.f file 1418 proc_to_label = self.parse_contract_file( 1419 pjoin(virtual_path,'OLE_order.olc')) 1420 1421 self.write_BinothLHA_inc(FKSHMultiproc,proc_to_label,\ 1422 pjoin(export_path,'SubProcesses')) 1423 1424 # Link the contract file to within the SubProcess directory 1425 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1426
1427 - def write_BinothLHA_inc(self, FKSHMultiproc, proc_to_label, SubProcPath):
1428 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1429 to provide the right process_label to use in the OLP call to get the 1430 loop matrix element evaluation. The proc_to_label is the dictionary of 1431 the format of the one returned by the function parse_contract_file.""" 1432 1433 for matrix_element in FKSHMultiproc.get('matrix_elements'): 1434 proc = matrix_element.get('processes')[0] 1435 name = "P%s"%proc.shell_string() 1436 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1437 not leg.get('state')]), 1438 tuple([leg.get('id') for leg in proc.get('legs') if \ 1439 leg.get('state')])) 1440 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1441 try: 1442 incFile.write( 1443 """ INTEGER PROC_LABEL 1444 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1445 except KeyError: 1446 raise fks_common.FKSProcessError('Could not found the target'+\ 1447 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1448 ' the proc_to_label argument in write_BinothLHA_inc.') 1449 incFile.close()
1450
1451 - def parse_contract_file(self, contract_file_path):
1452 """ Parses the BLHA contract file, make sure all parameters could be 1453 understood by the OLP and return a mapping of the processes (characterized 1454 by the pdg's of the initial and final state particles) to their process 1455 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1456 """ 1457 1458 proc_def_to_label = {} 1459 1460 if not os.path.exists(contract_file_path): 1461 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1462 ' OLE_order.olc in %s.'%str(contract_file_path)) 1463 1464 comment_re=re.compile(r"^\s*#") 1465 proc_def_re=re.compile( 1466 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1467 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1468 line_OK_re=re.compile(r"^.*\|\s*OK") 1469 for line in file(contract_file_path): 1470 # Ignore comments 1471 if not comment_re.match(line) is None: 1472 continue 1473 # Check if it is a proc definition line 1474 proc_def = proc_def_re.match(line) 1475 if not proc_def is None: 1476 if int(proc_def.group('proc_class'))!=1: 1477 raise fks_common.FKSProcessError( 1478 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1479 ' process class attribute. Found %s instead in: \n%s'\ 1480 %(proc_def.group('proc_class'),line)) 1481 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1482 proc_def.group('in_pdgs').split()]) 1483 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1484 proc_def.group('out_pdgs').split()]) 1485 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1486 int(proc_def.group('proc_label')) 1487 continue 1488 # For the other types of line, just make sure they end with | OK 1489 if line_OK_re.match(line) is None: 1490 raise fks_common.FKSProcessError( 1491 'The OLP could not process the following line: \n%s'%line) 1492 1493 return proc_def_to_label
1494 1495
1496 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1497 """writes the V**** directory inside the P**** directories specified in 1498 dir_name""" 1499 1500 cwd = os.getcwd() 1501 1502 matrix_element = loop_matrix_element 1503 1504 # Create the MadLoop5_resources directory if not already existing 1505 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1506 try: 1507 os.mkdir(dirpath) 1508 except os.error as error: 1509 logger.warning(error.strerror + " " + dirpath) 1510 1511 # Create the directory PN_xx_xxxxx in the specified path 1512 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1513 dirpath = os.path.join(dir_name, name) 1514 1515 try: 1516 os.mkdir(dirpath) 1517 except os.error as error: 1518 logger.warning(error.strerror + " " + dirpath) 1519 1520 try: 1521 os.chdir(dirpath) 1522 except os.error: 1523 logger.error('Could not cd to directory %s' % dirpath) 1524 return 0 1525 1526 logger.info('Creating files in directory %s' % name) 1527 1528 # Extract number of external particles 1529 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1530 1531 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1532 # The born matrix element, if needed 1533 filename = 'born_matrix.f' 1534 calls = self.write_bornmatrix( 1535 writers.FortranWriter(filename), 1536 matrix_element, 1537 fortran_model) 1538 1539 filename = 'nexternal.inc' 1540 self.write_nexternal_file(writers.FortranWriter(filename), 1541 nexternal, ninitial) 1542 1543 filename = 'pmass.inc' 1544 self.write_pmass_file(writers.FortranWriter(filename), 1545 matrix_element) 1546 1547 filename = 'ngraphs.inc' 1548 self.write_ngraphs_file(writers.FortranWriter(filename), 1549 len(matrix_element.get_all_amplitudes())) 1550 1551 filename = "loop_matrix.ps" 1552 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1553 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1554 filename, 1555 model=matrix_element.get('processes')[0].get('model'), 1556 amplitude='') 1557 logger.info("Drawing loop Feynman diagrams for " + \ 1558 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1559 plot.draw() 1560 1561 filename = "born_matrix.ps" 1562 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1563 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1564 get('model'),amplitude='') 1565 logger.info("Generating born Feynman diagrams for " + \ 1566 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1567 plot.draw() 1568 1569 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1570 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1571 'MadLoopCommons.f','MadLoopParams.inc'] 1572 1573 # We should move to MadLoop5_resources directory from the SubProcesses 1574 1575 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1576 pjoin('..','MadLoop5_resources')) 1577 1578 for file in linkfiles: 1579 ln('../../%s' % file) 1580 1581 os.system("ln -s ../../makefile_loop makefile") 1582 1583 linkfiles = ['mpmodule.mod'] 1584 1585 for file in linkfiles: 1586 ln('../../../lib/%s' % file) 1587 1588 # Return to original PWD 1589 os.chdir(cwd) 1590 1591 if not calls: 1592 calls = 0 1593 return calls
1594
1595 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1596 """computes the QED/QCD orders from the knowledge of the n of ext particles 1597 and of the weighted orders""" 1598 # n vertices = nexternal - 2 =QED + QCD 1599 # weighted = 2*QED + QCD 1600 QED = weighted - nexternal + 2 1601 QCD = weighted - 2 * QED 1602 return QED, QCD
1603 1604 1605 1606 #=============================================================================== 1607 # write_lh_order 1608 #=============================================================================== 1609 #test written
1610 - def write_lh_order(self, filename, matrix_elements, OLP='MadLoop'):
1611 """Creates the OLE_order.lh file. This function should be edited according 1612 to the OLP which is used. For now it is generic.""" 1613 1614 if isinstance(matrix_elements,fks_helas_objects.FKSHelasProcess): 1615 fksborns=fks_helas_objects.FKSHelasProcessList([matrix_elements]) 1616 elif isinstance(matrix_elements,fks_helas_objects.FKSHelasProcessList): 1617 fksborns= matrix_elements 1618 else: 1619 raise fks_common.FKSProcessError('Wrong type of argument for '+\ 1620 'matrix_elements in function write_lh_order.') 1621 1622 if len(fksborns)==0: 1623 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1624 'the function write_lh_order.') 1625 return 1626 1627 # We assume the orders to be common to all Subprocesses 1628 1629 orders = fksborns[0].orders 1630 if 'QED' in orders.keys() and 'QCD' in orders.keys(): 1631 QED=orders['QED'] 1632 QCD=orders['QCD'] 1633 elif 'QED' in orders.keys(): 1634 QED=orders['QED'] 1635 QCD=0 1636 elif 'QCD' in orders.keys(): 1637 QED=0 1638 QCD=orders['QCD'] 1639 else: 1640 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1641 fksborns[0].get_nexternal_ninitial()[0]-1, # -1 is because the function returns nexternal of the real emission 1642 orders['WEIGHTED']) 1643 1644 replace_dict = {} 1645 replace_dict['mesq'] = 'CHaveraged' 1646 replace_dict['corr'] = ' '.join(matrix_elements[0].get('processes')[0].\ 1647 get('perturbation_couplings')) 1648 replace_dict['irreg'] = 'CDR' 1649 replace_dict['aspow'] = QCD 1650 replace_dict['aepow'] = QED 1651 replace_dict['modelfile'] = './param_card.dat' 1652 replace_dict['params'] = 'alpha_s' 1653 proc_lines=[] 1654 for fksborn in fksborns: 1655 proc_lines.append(fksborn.get_lh_pdg_string()) 1656 replace_dict['pdgs'] = '\n'.join(proc_lines) 1657 replace_dict['symfin'] = 'Yes' 1658 content = \ 1659 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1660 \n\ 1661 MatrixElementSquareType %(mesq)s\n\ 1662 CorrectionType %(corr)s\n\ 1663 IRregularisation %(irreg)s\n\ 1664 AlphasPower %(aspow)d\n\ 1665 AlphaPower %(aepow)d\n\ 1666 NJetSymmetrizeFinal %(symfin)s\n\ 1667 ModelFile %(modelfile)s\n\ 1668 Parameters %(params)s\n\ 1669 \n\ 1670 # process\n\ 1671 %(pdgs)s\n\ 1672 " % replace_dict 1673 1674 file = open(filename, 'w') 1675 file.write(content) 1676 file.close 1677 return
1678 1679 1680 #=============================================================================== 1681 # write_born_fks 1682 #=============================================================================== 1683 # test written
1684 - def write_born_fks(self, writer, fksborn, fortran_model):
1685 """Export a matrix element to a born.f file in MadFKS format""" 1686 1687 matrix_element = fksborn.born_matrix_element 1688 1689 if not matrix_element.get('processes') or \ 1690 not matrix_element.get('diagrams'): 1691 return 0 1692 1693 if not isinstance(writer, writers.FortranWriter): 1694 raise writers.FortranWriter.FortranWriterError(\ 1695 "writer not FortranWriter") 1696 # Set lowercase/uppercase Fortran code 1697 writers.FortranWriter.downcase = False 1698 1699 replace_dict = {} 1700 1701 # Extract version number and date from VERSION file 1702 info_lines = self.get_mg5_info_lines() 1703 replace_dict['info_lines'] = info_lines 1704 1705 # Extract process info lines 1706 process_lines = self.get_process_info_lines(matrix_element) 1707 replace_dict['process_lines'] = process_lines 1708 1709 1710 # Extract ncomb 1711 ncomb = matrix_element.get_helicity_combinations() 1712 replace_dict['ncomb'] = ncomb 1713 1714 # Extract helicity lines 1715 helicity_lines = self.get_helicity_lines(matrix_element) 1716 replace_dict['helicity_lines'] = helicity_lines 1717 1718 # Extract IC line 1719 ic_line = self.get_ic_line(matrix_element) 1720 replace_dict['ic_line'] = ic_line 1721 1722 # Extract overall denominator 1723 # Averaging initial state color, spin, and identical FS particles 1724 #den_factor_line = get_den_factor_line(matrix_element) 1725 1726 # Extract ngraphs 1727 ngraphs = matrix_element.get_number_of_amplitudes() 1728 replace_dict['ngraphs'] = ngraphs 1729 1730 # Extract nwavefuncs 1731 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1732 replace_dict['nwavefuncs'] = nwavefuncs 1733 1734 # Extract ncolor 1735 ncolor = max(1, len(matrix_element.get('color_basis'))) 1736 replace_dict['ncolor'] = ncolor 1737 1738 # Extract color data lines 1739 color_data_lines = self.get_color_data_lines(matrix_element) 1740 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1741 1742 # Extract helas calls 1743 helas_calls = fortran_model.get_matrix_element_calls(\ 1744 matrix_element) 1745 replace_dict['helas_calls'] = "\n".join(helas_calls) 1746 1747 # Extract amp2 lines 1748 amp2_lines = self.get_amp2_lines(matrix_element) 1749 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1750 1751 # Extract JAMP lines 1752 jamp_lines = self.get_JAMP_lines(matrix_element) 1753 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1754 1755 # Set the size of Wavefunction 1756 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1757 replace_dict['wavefunctionsize'] = 20 1758 else: 1759 replace_dict['wavefunctionsize'] = 8 1760 1761 # Extract glu_ij_lines 1762 ij_lines = self.get_ij_lines(fksborn) 1763 replace_dict['ij_lines'] = '\n'.join(ij_lines) 1764 1765 # Extract den_factor_lines 1766 den_factor_lines = self.get_den_factor_lines(fksborn) 1767 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1768 1769 # Extract the number of FKS process 1770 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 1771 1772 file = open(os.path.join(_file_path, \ 1773 'iolibs/template_files/born_fks.inc')).read() 1774 file = file % replace_dict 1775 1776 # Write the file 1777 writer.writelines(file) 1778 1779 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1780 1781
1782 - def write_born_hel(self, writer, fksborn, fortran_model):
1783 """Export a matrix element to a born_hel.f file in MadFKS format""" 1784 1785 matrix_element = fksborn.born_matrix_element 1786 1787 if not matrix_element.get('processes') or \ 1788 not matrix_element.get('diagrams'): 1789 return 0 1790 1791 if not isinstance(writer, writers.FortranWriter): 1792 raise writers.FortranWriter.FortranWriterError(\ 1793 "writer not FortranWriter") 1794 # Set lowercase/uppercase Fortran code 1795 writers.FortranWriter.downcase = False 1796 1797 replace_dict = {} 1798 1799 # Extract version number and date from VERSION file 1800 info_lines = self.get_mg5_info_lines() 1801 replace_dict['info_lines'] = info_lines 1802 1803 # Extract process info lines 1804 process_lines = self.get_process_info_lines(matrix_element) 1805 replace_dict['process_lines'] = process_lines 1806 1807 1808 # Extract ncomb 1809 ncomb = matrix_element.get_helicity_combinations() 1810 replace_dict['ncomb'] = ncomb 1811 1812 # Extract helicity lines 1813 helicity_lines = self.get_helicity_lines(matrix_element) 1814 replace_dict['helicity_lines'] = helicity_lines 1815 1816 # Extract IC line 1817 ic_line = self.get_ic_line(matrix_element) 1818 replace_dict['ic_line'] = ic_line 1819 1820 # Extract overall denominator 1821 # Averaging initial state color, spin, and identical FS particles 1822 #den_factor_line = get_den_factor_line(matrix_element) 1823 1824 # Extract ngraphs 1825 ngraphs = matrix_element.get_number_of_amplitudes() 1826 replace_dict['ngraphs'] = ngraphs 1827 1828 # Extract nwavefuncs 1829 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1830 replace_dict['nwavefuncs'] = nwavefuncs 1831 1832 # Extract ncolor 1833 ncolor = max(1, len(matrix_element.get('color_basis'))) 1834 replace_dict['ncolor'] = ncolor 1835 1836 # Extract color data lines 1837 color_data_lines = self.get_color_data_lines(matrix_element) 1838 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1839 1840 # Extract amp2 lines 1841 amp2_lines = self.get_amp2_lines(matrix_element) 1842 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1843 1844 # Extract JAMP lines 1845 jamp_lines = self.get_JAMP_lines(matrix_element) 1846 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1847 1848 # Extract den_factor_lines 1849 den_factor_lines = self.get_den_factor_lines(fksborn) 1850 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1851 1852 # Extract the number of FKS process 1853 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1854 1855 file = open(os.path.join(_file_path, \ 1856 'iolibs/template_files/born_fks_hel.inc')).read() 1857 file = file % replace_dict 1858 1859 # Write the file 1860 writer.writelines(file) 1861 1862 return
1863 1864 1865 #=============================================================================== 1866 # write_born_sf_fks 1867 #=============================================================================== 1868 #test written
1869 - def write_sborn_sf(self, writer, color_links, fortran_model):
1870 """Creates the sborn_sf.f file, containing the calls to the different 1871 color linked borns""" 1872 1873 replace_dict = {} 1874 nborns = len(color_links) 1875 ifkss = [] 1876 iborns = [] 1877 mms = [] 1878 nns = [] 1879 iflines = "\n" 1880 1881 #header for the sborn_sf.f file 1882 file = """subroutine sborn_sf(p_born,m,n,wgt) 1883 implicit none 1884 include "nexternal.inc" 1885 double precision p_born(0:3,nexternal-1),wgt 1886 double complex wgt1(2) 1887 integer m,n \n""" 1888 1889 if nborns > 0: 1890 1891 for i, c_link in enumerate(color_links): 1892 iborn = i+1 1893 1894 iff = {True : 'if', False : 'elseif'}[i==0] 1895 1896 m, n = c_link['link'] 1897 1898 if m != n: 1899 iflines += \ 1900 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1901 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 1902 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1903 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1904 else: 1905 iflines += \ 1906 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1907 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 1908 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1909 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1910 1911 1912 file += iflines + \ 1913 """else 1914 wgt = 0d0 1915 endif 1916 1917 return 1918 end""" 1919 elif nborns == 0: 1920 #write a dummy file 1921 file+=""" 1922 c This is a dummy function because 1923 c this subdir has no soft singularities 1924 wgt = 0d0 1925 1926 return 1927 end""" 1928 # Write the end of the file 1929 1930 writer.writelines(file)
1931 1932 1933 #=============================================================================== 1934 # write_b_sf_fks 1935 #=============================================================================== 1936 #test written
1937 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
1938 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 1939 1940 matrix_element = copy.copy(fksborn.born_matrix_element) 1941 1942 if not matrix_element.get('processes') or \ 1943 not matrix_element.get('diagrams'): 1944 return 0 1945 1946 if not isinstance(writer, writers.FortranWriter): 1947 raise writers.FortranWriter.FortranWriterError(\ 1948 "writer not FortranWriter") 1949 # Set lowercase/uppercase Fortran code 1950 writers.FortranWriter.downcase = False 1951 1952 iborn = i + 1 1953 link = fksborn.color_links[i] 1954 1955 replace_dict = {} 1956 1957 replace_dict['iborn'] = iborn 1958 1959 # Extract version number and date from VERSION file 1960 info_lines = self.get_mg5_info_lines() 1961 replace_dict['info_lines'] = info_lines 1962 1963 # Extract process info lines 1964 process_lines = self.get_process_info_lines(matrix_element) 1965 replace_dict['process_lines'] = process_lines + \ 1966 "\nc spectators: %d %d \n" % tuple(link['link']) 1967 1968 # Extract ncomb 1969 ncomb = matrix_element.get_helicity_combinations() 1970 replace_dict['ncomb'] = ncomb 1971 1972 # Extract helicity lines 1973 helicity_lines = self.get_helicity_lines(matrix_element) 1974 replace_dict['helicity_lines'] = helicity_lines 1975 1976 # Extract IC line 1977 ic_line = self.get_ic_line(matrix_element) 1978 replace_dict['ic_line'] = ic_line 1979 1980 # Extract den_factor_lines 1981 den_factor_lines = self.get_den_factor_lines(fksborn) 1982 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1983 1984 # Extract ngraphs 1985 ngraphs = matrix_element.get_number_of_amplitudes() 1986 replace_dict['ngraphs'] = ngraphs 1987 1988 # Extract nwavefuncs 1989 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1990 replace_dict['nwavefuncs'] = nwavefuncs 1991 1992 # Extract ncolor 1993 ncolor1 = max(1, len(link['orig_basis'])) 1994 replace_dict['ncolor1'] = ncolor1 1995 ncolor2 = max(1, len(link['link_basis'])) 1996 replace_dict['ncolor2'] = ncolor2 1997 1998 # Extract color data lines 1999 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2000 link['link_matrix']) 2001 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2002 2003 # Extract amp2 lines 2004 amp2_lines = self.get_amp2_lines(matrix_element) 2005 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2006 2007 # Extract JAMP lines 2008 jamp_lines = self.get_JAMP_lines(matrix_element) 2009 new_jamp_lines = [] 2010 for line in jamp_lines: 2011 line = string.replace(line, 'JAMP', 'JAMP1') 2012 new_jamp_lines.append(line) 2013 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 2014 2015 matrix_element.set('color_basis', link['link_basis'] ) 2016 jamp_lines = self.get_JAMP_lines(matrix_element) 2017 new_jamp_lines = [] 2018 for line in jamp_lines: 2019 line = string.replace(line, 'JAMP', 'JAMP2') 2020 new_jamp_lines.append(line) 2021 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 2022 2023 2024 # Extract the number of FKS process 2025 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2026 2027 file = open(os.path.join(_file_path, \ 2028 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2029 file = file % replace_dict 2030 2031 # Write the file 2032 writer.writelines(file) 2033 2034 return 0 , ncolor1
2035 2036 2037 #=============================================================================== 2038 # write_born_nhel_file 2039 #=============================================================================== 2040 #test written
2041 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2042 """Write the born_nhel.inc file for MG4.""" 2043 2044 ncomb = matrix_element.get_helicity_combinations() 2045 file = " integer max_bhel, max_bcol \n" 2046 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2047 (ncomb, nflows) 2048 2049 # Write the file 2050 writer.writelines(file) 2051 2052 return True
2053 2054 #=============================================================================== 2055 # write_fks_info_file 2056 #===============================================================================
2057 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2058 """Writes the content of nFKSconfigs.inc, which just gives the 2059 total FKS dirs as a parameter. 2060 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2061 replace_dict = {} 2062 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2063 content = \ 2064 """ INTEGER FKS_CONFIGS 2065 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2066 2067 """ % replace_dict 2068 2069 writer.writelines(content)
2070 2071 2072 #=============================================================================== 2073 # write_fks_info_file 2074 #===============================================================================
2075 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2076 """Writes the content of fks_info.inc, which lists the informations on the 2077 possible splittings of the born ME. 2078 nconfs is always >=1 (use a fake configuration for LOonly). 2079 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2080 the last colored particle as j_fks.""" 2081 2082 replace_dict = {} 2083 fks_info_list = fksborn.get_fks_info_list() 2084 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2085 2086 # this is for processes with 'real' or 'all' as NLO mode 2087 if len(fks_info_list) > 0: 2088 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2089 for info in fks_info_list]) 2090 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2091 for info in fks_info_list]) 2092 2093 col_lines = [] 2094 pdg_lines = [] 2095 charge_lines = [] 2096 fks_j_from_i_lines = [] 2097 for i, info in enumerate(fks_info_list): 2098 col_lines.append( \ 2099 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2100 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2101 pdg_lines.append( \ 2102 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2103 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2104 charge_lines.append(\ 2105 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2106 % (i + 1, ', '.join('%19.15fd0' % charg\ 2107 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2108 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2109 i + 1)) 2110 else: 2111 # this is for 'LOonly', generate a fake FKS configuration with 2112 # - i_fks = nexternal, pdg type = -21 and color =8 2113 # - j_fks = the last colored particle 2114 bornproc = fksborn.born_matrix_element.get('processes')[0] 2115 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2116 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2117 charges = [0.] * len(colors) 2118 2119 fks_i = len(colors) 2120 # use the first colored particle if it exists, or 2121 # just the first 2122 fks_j=1 2123 for cpos, col in enumerate(colors[:-1]): 2124 if col != 1: 2125 fks_j = cpos+1 2126 2127 fks_i_values = str(fks_i) 2128 fks_j_values = str(fks_j) 2129 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2130 % ', '.join([str(col) for col in colors])] 2131 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2132 % ', '.join([str(pdg) for pdg in pdgs])] 2133 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2134 % ', '.join('%19.15fd0' % charg for charg in charges)] 2135 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2136 % (fks_i, fks_j)] 2137 2138 2139 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2140 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2141 replace_dict['col_lines'] = '\n'.join(col_lines) 2142 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2143 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2144 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2145 2146 content = \ 2147 """ INTEGER IPOS, JPOS 2148 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2149 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2150 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2151 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2152 2153 %(fks_i_line)s 2154 %(fks_j_line)s 2155 2156 %(fks_j_from_i_lines)s 2157 2158 C 2159 C Particle type: 2160 C octet = 8, triplet = 3, singlet = 1 2161 %(col_lines)s 2162 2163 C 2164 C Particle type according to PDG: 2165 C 2166 %(pdg_lines)s 2167 2168 C 2169 C Particle charge: 2170 C charge is set 0. with QCD corrections, which is irrelevant 2171 %(charge_lines)s 2172 """ % replace_dict 2173 if not isinstance(writer, writers.FortranWriter): 2174 raise writers.FortranWriter.FortranWriterError(\ 2175 "writer not FortranWriter") 2176 # Set lowercase/uppercase Fortran code 2177 writers.FortranWriter.downcase = False 2178 2179 writer.writelines(content) 2180 2181 return True
2182 2183 2184 #=============================================================================== 2185 # write_matrix_element_fks 2186 #=============================================================================== 2187 #test written
2188 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2189 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2190 2191 if not matrix_element.get('processes') or \ 2192 not matrix_element.get('diagrams'): 2193 return 0,0 2194 2195 if not isinstance(writer, writers.FortranWriter): 2196 raise writers.FortranWriter.FortranWriterError(\ 2197 "writer not FortranWriter") 2198 # Set lowercase/uppercase Fortran code 2199 writers.FortranWriter.downcase = False 2200 2201 replace_dict = {} 2202 replace_dict['N_me'] = n 2203 2204 # Extract version number and date from VERSION file 2205 info_lines = self.get_mg5_info_lines() 2206 replace_dict['info_lines'] = info_lines 2207 2208 # Extract process info lines 2209 process_lines = self.get_process_info_lines(matrix_element) 2210 replace_dict['process_lines'] = process_lines 2211 2212 # Extract ncomb 2213 ncomb = matrix_element.get_helicity_combinations() 2214 replace_dict['ncomb'] = ncomb 2215 2216 # Extract helicity lines 2217 helicity_lines = self.get_helicity_lines(matrix_element) 2218 replace_dict['helicity_lines'] = helicity_lines 2219 2220 # Extract IC line 2221 ic_line = self.get_ic_line(matrix_element) 2222 replace_dict['ic_line'] = ic_line 2223 2224 # Extract overall denominator 2225 # Averaging initial state color, spin, and identical FS particles 2226 den_factor_line = self.get_den_factor_line(matrix_element) 2227 replace_dict['den_factor_line'] = den_factor_line 2228 2229 # Extract ngraphs 2230 ngraphs = matrix_element.get_number_of_amplitudes() 2231 replace_dict['ngraphs'] = ngraphs 2232 2233 # Extract ncolor 2234 ncolor = max(1, len(matrix_element.get('color_basis'))) 2235 replace_dict['ncolor'] = ncolor 2236 2237 # Extract color data lines 2238 color_data_lines = self.get_color_data_lines(matrix_element) 2239 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2240 2241 # Extract helas calls 2242 helas_calls = fortran_model.get_matrix_element_calls(\ 2243 matrix_element) 2244 replace_dict['helas_calls'] = "\n".join(helas_calls) 2245 2246 # Extract nwavefuncs (important to place after get_matrix_element_calls 2247 # so that 'me_id' is set) 2248 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2249 replace_dict['nwavefuncs'] = nwavefuncs 2250 2251 # Extract amp2 lines 2252 amp2_lines = self.get_amp2_lines(matrix_element) 2253 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2254 2255 # Set the size of Wavefunction 2256 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2257 replace_dict['wavefunctionsize'] = 20 2258 else: 2259 replace_dict['wavefunctionsize'] = 8 2260 2261 # Extract JAMP lines 2262 jamp_lines = self.get_JAMP_lines(matrix_element) 2263 2264 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2265 2266 realfile = open(os.path.join(_file_path, \ 2267 'iolibs/template_files/realmatrix_fks.inc')).read() 2268 2269 realfile = realfile % replace_dict 2270 2271 # Write the file 2272 writer.writelines(realfile) 2273 2274 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2275 2276 2277 #=============================================================================== 2278 # write_pdf_file 2279 #===============================================================================
2280 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2281 #test written 2282 """Write the auto_dsig.f file for MadFKS, which contains 2283 pdf call information""" 2284 2285 if not matrix_element.get('processes') or \ 2286 not matrix_element.get('diagrams'): 2287 return 0 2288 2289 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2290 2291 if ninitial < 1 or ninitial > 2: 2292 raise writers.FortranWriter.FortranWriterError, \ 2293 """Need ninitial = 1 or 2 to write auto_dsig file""" 2294 2295 replace_dict = {} 2296 2297 replace_dict['N_me'] = n 2298 2299 # Extract version number and date from VERSION file 2300 info_lines = self.get_mg5_info_lines() 2301 replace_dict['info_lines'] = info_lines 2302 2303 # Extract process info lines 2304 process_lines = self.get_process_info_lines(matrix_element) 2305 replace_dict['process_lines'] = process_lines 2306 2307 pdf_vars, pdf_data, pdf_lines = \ 2308 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2309 replace_dict['pdf_vars'] = pdf_vars 2310 replace_dict['pdf_data'] = pdf_data 2311 replace_dict['pdf_lines'] = pdf_lines 2312 2313 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2314 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2315 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2316 2317 file = open(os.path.join(_file_path, \ 2318 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2319 file = file % replace_dict 2320 2321 # Write the file 2322 writer.writelines(file)
2323 2324 2325 2326 #=============================================================================== 2327 # write_coloramps_file 2328 #=============================================================================== 2329 #test written
2330 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2331 """Write the coloramps.inc file for MadEvent""" 2332 2333 lines = [] 2334 lines.append( "logical icolamp(%d,%d,1)" % \ 2335 (max(len(matrix_element.get('color_basis').keys()), 1), 2336 len(mapconfigs))) 2337 2338 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2339 2340 # Write the file 2341 writer.writelines(lines) 2342 2343 return True
2344 2345 2346 #=============================================================================== 2347 # write_leshouche_file 2348 #=============================================================================== 2349 #test written
2350 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2351 """Write the leshouche.inc file for MG4""" 2352 2353 # Extract number of external particles 2354 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2355 2356 lines = [] 2357 for iproc, proc in enumerate(matrix_element.get('processes')): 2358 legs = proc.get_legs_with_decays() 2359 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2360 (iproc + 1, nexternal, 2361 ",".join([str(l.get('id')) for l in legs]))) 2362 for i in [1, 2]: 2363 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2364 (i, iproc + 1, nexternal, 2365 ",".join([ "%3r" % 0 ] * ninitial + \ 2366 [ "%3r" % i ] * (nexternal - ninitial)))) 2367 2368 # Here goes the color connections corresponding to the JAMPs 2369 # Only one output, for the first subproc! 2370 if iproc == 0: 2371 # If no color basis, just output trivial color flow 2372 if not matrix_element.get('color_basis'): 2373 for i in [1, 2]: 2374 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2375 (i, nexternal, 2376 ",".join([ "%3r" % 0 ] * nexternal))) 2377 color_flow_list = [] 2378 2379 else: 2380 # First build a color representation dictionnary 2381 repr_dict = {} 2382 for l in legs: 2383 repr_dict[l.get('number')] = \ 2384 proc.get('model').get_particle(l.get('id')).get_color()\ 2385 * (-1)**(1+l.get('state')) 2386 # Get the list of color flows 2387 color_flow_list = \ 2388 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2389 ninitial) 2390 # And output them properly 2391 for cf_i, color_flow_dict in enumerate(color_flow_list): 2392 for i in [0, 1]: 2393 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2394 (i + 1, cf_i + 1, nexternal, 2395 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2396 for l in legs]))) 2397 2398 # Write the file 2399 writer.writelines(lines) 2400 2401 return len(color_flow_list)
2402 2403 2404 #=============================================================================== 2405 # write_configs_file 2406 #=============================================================================== 2407 #test_written
2408 - def write_configs_file(self, writer, matrix_element, fortran_model):
2409 """Write the configs.inc file for MadEvent""" 2410 2411 # Extract number of external particles 2412 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2413 lines = [] 2414 2415 iconfig = 0 2416 2417 s_and_t_channels = [] 2418 mapconfigs = [] 2419 2420 model = matrix_element.get('processes')[0].get('model') 2421 # new_pdg = model.get_first_non_pdg() 2422 2423 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2424 model = matrix_element.get('base_amplitude').get('process').get('model') 2425 minvert = min([max([len(vert.get('legs')) for vert in \ 2426 diag.get('vertices')]) for diag in base_diagrams]) 2427 2428 for idiag, diag in enumerate(base_diagrams): 2429 if any([len(vert.get('legs')) > minvert for vert in 2430 diag.get('vertices')]): 2431 # Only 3-vertices allowed in configs.inc 2432 continue 2433 iconfig = iconfig + 1 2434 helas_diag = matrix_element.get('diagrams')[idiag] 2435 mapconfigs.append(helas_diag.get('number')) 2436 lines.append("# Diagram %d, Amplitude %d" % \ 2437 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2438 # Correspondance between the config and the amplitudes 2439 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2440 helas_diag.get('amplitudes')[0]['number'])) 2441 2442 # Need to reorganize the topology so that we start with all 2443 # final state external particles and work our way inwards 2444 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2445 get_s_and_t_channels(ninitial, model, 990) 2446 2447 s_and_t_channels.append([schannels, tchannels]) 2448 2449 # Write out propagators for s-channel and t-channel vertices 2450 allchannels = schannels 2451 if len(tchannels) > 1: 2452 # Write out tchannels only if there are any non-trivial ones 2453 allchannels = schannels + tchannels 2454 2455 for vert in allchannels: 2456 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2457 last_leg = vert.get('legs')[-1] 2458 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2459 (last_leg.get('number'), iconfig, len(daughters), 2460 ",".join(["%3d" % d for d in daughters]))) 2461 if vert in schannels: 2462 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2463 (last_leg.get('number'), iconfig, 2464 last_leg.get('id'))) 2465 elif vert in tchannels[:-1]: 2466 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2467 (last_leg.get('number'), iconfig, 2468 abs(last_leg.get('id')))) 2469 2470 # Write out number of configs 2471 lines.append("# Number of configs") 2472 lines.append("data mapconfig(0)/%4d/" % iconfig) 2473 2474 # Write the file 2475 writer.writelines(lines) 2476 2477 return iconfig, mapconfigs, s_and_t_channels
2478 2479 2480 #=============================================================================== 2481 # write_decayBW_file 2482 #=============================================================================== 2483 #test written
2484 - def write_decayBW_file(self, writer, s_and_t_channels):
2485 """Write the decayBW.inc file for MadEvent""" 2486 2487 lines = [] 2488 2489 booldict = {False: ".false.", True: ".false."} 2490 ####Changed by MZ 2011-11-23!!!! 2491 2492 for iconf, config in enumerate(s_and_t_channels): 2493 schannels = config[0] 2494 for vertex in schannels: 2495 # For the resulting leg, pick out whether it comes from 2496 # decay or not, as given by the from_group flag 2497 leg = vertex.get('legs')[-1] 2498 lines.append("data gForceBW(%d,%d)/%s/" % \ 2499 (leg.get('number'), iconf + 1, 2500 booldict[leg.get('from_group')])) 2501 2502 # Write the file 2503 writer.writelines(lines) 2504 2505 return True
2506 2507 2508 #=============================================================================== 2509 # write_dname_file 2510 #===============================================================================
2511 - def write_dname_file(self, writer, matrix_element, fortran_model):
2512 """Write the dname.mg file for MG4""" 2513 2514 line = "DIRNAME=P%s" % \ 2515 matrix_element.get('processes')[0].shell_string() 2516 2517 # Write the file 2518 writer.write(line + "\n") 2519 2520 return True
2521 2522 2523 #=============================================================================== 2524 # write_iproc_file 2525 #===============================================================================
2526 - def write_iproc_file(self, writer, me_number):
2527 """Write the iproc.dat file for MG4""" 2528 2529 line = "%d" % (me_number + 1) 2530 2531 # Write the file 2532 for line_to_write in writer.write_line(line): 2533 writer.write(line_to_write) 2534 return True
2535 2536 2537 #=============================================================================== 2538 # Helper functions 2539 #=============================================================================== 2540 2541 2542 #=============================================================================== 2543 # get_fks_j_from_i_lines 2544 #=============================================================================== 2545
2546 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2547 """generate the lines for fks.inc describing initializating the 2548 fks_j_from_i array""" 2549 lines = [] 2550 if not me.isfinite: 2551 for ii, js in me.fks_j_from_i.items(): 2552 if js: 2553 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2554 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2555 else: 2556 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2557 % (2, 1, 1, '1')) 2558 lines.append('') 2559 2560 return lines 2561 2562 2563 #=============================================================================== 2564 # get_leshouche_lines 2565 #===============================================================================
2566 - def get_leshouche_lines(self, matrix_element, ime):
2567 #test written 2568 """Write the leshouche.inc file for MG4""" 2569 2570 # Extract number of external particles 2571 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2572 2573 lines = [] 2574 for iproc, proc in enumerate(matrix_element.get('processes')): 2575 legs = proc.get_legs_with_decays() 2576 lines.append("I %4d %4d %s" % \ 2577 (ime, iproc + 1, 2578 " ".join([str(l.get('id')) for l in legs]))) 2579 for i in [1, 2]: 2580 lines.append("M %4d %4d %4d %s" % \ 2581 (ime, i, iproc + 1, 2582 " ".join([ "%3d" % 0 ] * ninitial + \ 2583 [ "%3d" % i ] * (nexternal - ninitial)))) 2584 2585 # Here goes the color connections corresponding to the JAMPs 2586 # Only one output, for the first subproc! 2587 if iproc == 0: 2588 # If no color basis, just output trivial color flow 2589 if not matrix_element.get('color_basis'): 2590 for i in [1, 2]: 2591 lines.append("C %4d %4d 1 %s" % \ 2592 (ime, i, 2593 " ".join([ "%3d" % 0 ] * nexternal))) 2594 color_flow_list = [] 2595 nflow = 1 2596 2597 else: 2598 # First build a color representation dictionnary 2599 repr_dict = {} 2600 for l in legs: 2601 repr_dict[l.get('number')] = \ 2602 proc.get('model').get_particle(l.get('id')).get_color()\ 2603 * (-1)**(1+l.get('state')) 2604 # Get the list of color flows 2605 color_flow_list = \ 2606 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2607 ninitial) 2608 # And output them properly 2609 for cf_i, color_flow_dict in enumerate(color_flow_list): 2610 for i in [0, 1]: 2611 lines.append("C %4d %4d %4d %s" % \ 2612 (ime, i + 1, cf_i + 1, 2613 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2614 for l in legs]))) 2615 2616 nflow = len(color_flow_list) 2617 2618 nproc = len(matrix_element.get('processes')) 2619 2620 return lines, nproc, nflow
2621 2622 2623 #=============================================================================== 2624 # get_den_factor_lines 2625 #===============================================================================
2626 - def get_den_factor_lines(self, fks_born):
2627 """returns the lines with the information on the denominator keeping care 2628 of the identical particle factors in the various real emissions""" 2629 2630 lines = [] 2631 info_list = fks_born.get_fks_info_list() 2632 if info_list: 2633 # if the reals have been generated, fill with the corresponding average factor 2634 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 2635 lines.append('DATA IDEN_VALUES /' + \ 2636 ', '.join(['%d' % ( 2637 fks_born.born_matrix_element.get_denominator_factor() / \ 2638 fks_born.born_matrix_element['identical_particle_factor'] * \ 2639 fks_born.real_processes[info['n_me'] - 1].matrix_element['identical_particle_factor'] ) \ 2640 for info in info_list]) + '/') 2641 else: 2642 # otherwise use the born 2643 lines.append('INTEGER IDEN_VALUES(1)') 2644 lines.append('DATA IDEN_VALUES / %d /' \ 2645 % fks_born.born_matrix_element.get_denominator_factor()) 2646 2647 return lines
2648 2649 2650 #=============================================================================== 2651 # get_ij_lines 2652 #===============================================================================
2653 - def get_ij_lines(self, fks_born):
2654 """returns the lines with the information on the particle number of the born 2655 that splits""" 2656 info_list = fks_born.get_fks_info_list() 2657 lines = [] 2658 if info_list: 2659 # if the reals have been generated, fill with the corresponding value of ij 2660 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 2661 lines.append('DATA IJ_VALUES /' + \ 2662 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/') 2663 else: 2664 #otherwise just put the first leg 2665 lines.append('INTEGER IJ_VALUES(1)') 2666 lines.append('DATA IJ_VALUES / 1 /') 2667 2668 return lines
2669 2670
2671 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 2672 mirror = False): #test written
2673 """Generate the PDF lines for the auto_dsig.f file""" 2674 2675 processes = matrix_element.get('processes') 2676 model = processes[0].get('model') 2677 2678 pdf_definition_lines = "" 2679 pdf_data_lines = "" 2680 pdf_lines = "" 2681 2682 if ninitial == 1: 2683 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 2684 for i, proc in enumerate(processes): 2685 process_line = proc.base_string() 2686 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2687 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 2688 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 2689 else: 2690 # Pick out all initial state particles for the two beams 2691 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 2692 p in processes]))), 2693 sorted(list(set([p.get_initial_pdg(2) for \ 2694 p in processes])))] 2695 2696 # Prepare all variable names 2697 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 2698 sum(initial_states,[])]) 2699 for key,val in pdf_codes.items(): 2700 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 2701 2702 # Set conversion from PDG code to number used in PDF calls 2703 pdgtopdf = {21: 0, 22: 7} 2704 # Fill in missing entries of pdgtopdf 2705 for pdg in sum(initial_states,[]): 2706 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 2707 pdgtopdf[pdg] = pdg 2708 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 2709 # If any particle has pdg code 7, we need to use something else 2710 pdgtopdf[pdg] = 6000000 + pdg 2711 2712 # Get PDF variable declarations for all initial states 2713 for i in [0,1]: 2714 pdf_definition_lines += "DOUBLE PRECISION " + \ 2715 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2716 for pdg in \ 2717 initial_states[i]]) + \ 2718 "\n" 2719 2720 # Get PDF data lines for all initial states 2721 for i in [0,1]: 2722 pdf_data_lines += "DATA " + \ 2723 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2724 for pdg in initial_states[i]]) + \ 2725 "/%d*1D0/" % len(initial_states[i]) + \ 2726 "\n" 2727 2728 # Get PDF values for the different initial states 2729 for i, init_states in enumerate(initial_states): 2730 if not mirror: 2731 ibeam = i + 1 2732 else: 2733 ibeam = 2 - i 2734 if subproc_group: 2735 pdf_lines = pdf_lines + \ 2736 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 2737 % (ibeam, ibeam) 2738 else: 2739 pdf_lines = pdf_lines + \ 2740 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 2741 % (ibeam, ibeam) 2742 2743 for initial_state in init_states: 2744 if initial_state in pdf_codes.keys(): 2745 if subproc_group: 2746 if abs(pdgtopdf[initial_state]) <= 7: 2747 pdf_lines = pdf_lines + \ 2748 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 2749 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 2750 (pdf_codes[initial_state], 2751 i + 1, ibeam, pdgtopdf[initial_state], 2752 ibeam, ibeam) 2753 else: 2754 # setting other partons flavours outside quark, gluon, photon to be 0d0 2755 pdf_lines = pdf_lines + \ 2756 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2757 "%s%d=0d0\n") % \ 2758 (pdf_codes[initial_state],i + 1) 2759 else: 2760 if abs(pdgtopdf[initial_state]) <= 7: 2761 pdf_lines = pdf_lines + \ 2762 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 2763 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 2764 (pdf_codes[initial_state], 2765 i + 1, ibeam, pdgtopdf[initial_state], 2766 ibeam, ibeam) 2767 else: 2768 # setting other partons flavours outside quark, gluon, photon to be 0d0 2769 pdf_lines = pdf_lines + \ 2770 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2771 "%s%d=0d0\n") % \ 2772 (pdf_codes[initial_state],i + 1) 2773 2774 pdf_lines = pdf_lines + "ENDIF\n" 2775 2776 # Add up PDFs for the different initial state particles 2777 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 2778 for proc in processes: 2779 process_line = proc.base_string() 2780 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2781 pdf_lines = pdf_lines + "\nPD(IPROC) = " 2782 for ibeam in [1, 2]: 2783 initial_state = proc.get_initial_pdg(ibeam) 2784 if initial_state in pdf_codes.keys(): 2785 pdf_lines = pdf_lines + "%s%d*" % \ 2786 (pdf_codes[initial_state], ibeam) 2787 else: 2788 pdf_lines = pdf_lines + "1d0*" 2789 # Remove last "*" from pdf_lines 2790 pdf_lines = pdf_lines[:-1] + "\n" 2791 2792 # Remove last line break from pdf_lines 2793 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 2794 2795 2796 #test written
2797 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
2798 """Return the color matrix definition lines for the given color_matrix. Split 2799 rows in chunks of size n.""" 2800 2801 if not color_matrix: 2802 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 2803 else: 2804 ret_list = [] 2805 my_cs = color.ColorString() 2806 for index, denominator in \ 2807 enumerate(color_matrix.get_line_denominators()): 2808 # First write the common denominator for this color matrix line 2809 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 2810 # Then write the numerators for the matrix elements 2811 num_list = color_matrix.get_line_numerators(index, denominator) 2812 for k in xrange(0, len(num_list), n): 2813 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 2814 (index + 1, k + 1, min(k + n, len(num_list)), 2815 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 2816 2817 return ret_list
2818 2819 #=========================================================================== 2820 # write_maxamps_file 2821 #===========================================================================
2822 - def write_maxamps_file(self, writer, maxamps, maxflows, 2823 maxproc,maxsproc):
2824 """Write the maxamps.inc file for MG4.""" 2825 2826 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 2827 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 2828 (maxamps, maxflows) 2829 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 2830 (maxproc, maxsproc) 2831 2832 # Write the file 2833 writer.writelines(file) 2834 2835 return True
2836 2837 #=============================================================================== 2838 # write_ncombs_file 2839 #===============================================================================
2840 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
2841 # #test written 2842 """Write the ncombs.inc file for MadEvent.""" 2843 2844 # Extract number of external particles 2845 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2846 2847 # ncomb (used for clustering) is 2^(nexternal) 2848 file = " integer n_max_cl\n" 2849 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 2850 2851 # Write the file 2852 writer.writelines(file) 2853 2854 return True
2855 2856 #=========================================================================== 2857 # write_config_subproc_map_file 2858 #===========================================================================
2859 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
2860 """Write a dummy config_subproc.inc file for MadEvent""" 2861 2862 lines = [] 2863 2864 for iconfig in range(len(s_and_t_channels)): 2865 lines.append("DATA CONFSUB(1,%d)/1/" % \ 2866 (iconfig + 1)) 2867 2868 # Write the file 2869 writer.writelines(lines) 2870 2871 return True
2872 2873 #=========================================================================== 2874 # write_colors_file 2875 #===========================================================================
2876 - def write_colors_file(self, writer, matrix_element):
2877 """Write the get_color.f file for MadEvent, which returns color 2878 for all particles used in the matrix element.""" 2879 2880 try: 2881 matrix_elements=matrix_element.real_processes[0].matrix_element 2882 except IndexError: 2883 matrix_elements=[matrix_element.born_matrix_element] 2884 2885 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 2886 matrix_elements = [matrix_elements] 2887 2888 model = matrix_elements[0].get('processes')[0].get('model') 2889 2890 # We need the both particle and antiparticle wf_ids, since the identity 2891 # depends on the direction of the wf. 2892 # loop on the real emissions 2893 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2894 for wf in d.get('wavefunctions')],[]) \ 2895 for d in me.get('diagrams')],[]) \ 2896 for me in [real_proc.matrix_element]],[])\ 2897 for real_proc in matrix_element.real_processes],[])) 2898 # and also on the born 2899 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2900 for wf in d.get('wavefunctions')],[]) \ 2901 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 2902 2903 # loop on the real emissions 2904 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 2905 p.get_legs_with_decays()] for p in \ 2906 me.get('processes')], []) for me in \ 2907 [real_proc.matrix_element]], []) for real_proc in \ 2908 matrix_element.real_processes],[])) 2909 # and also on the born 2910 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 2911 p.get_legs_with_decays()] for p in \ 2912 matrix_element.born_matrix_element.get('processes')], []))) 2913 particle_ids = sorted(list(wf_ids.union(leg_ids))) 2914 2915 lines = """function get_color(ipdg) 2916 implicit none 2917 integer get_color, ipdg 2918 2919 if(ipdg.eq.%d)then 2920 get_color=%d 2921 return 2922 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 2923 2924 for part_id in particle_ids[1:]: 2925 lines += """else if(ipdg.eq.%d)then 2926 get_color=%d 2927 return 2928 """ % (part_id, model.get_particle(part_id).get_color()) 2929 # Dummy particle for multiparticle vertices with pdg given by 2930 # first code not in the model 2931 lines += """else if(ipdg.eq.%d)then 2932 c This is dummy particle used in multiparticle vertices 2933 get_color=2 2934 return 2935 """ % model.get_first_non_pdg() 2936 lines += """else 2937 write(*,*)'Error: No color given for pdg ',ipdg 2938 get_color=0 2939 return 2940 endif 2941 end 2942 """ 2943 2944 # Write the file 2945 writer.writelines(lines) 2946 2947 return True
2948 2949 #=============================================================================== 2950 # write_props_file 2951 #=============================================================================== 2952 #test_written
2953 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
2954 """Write the props.inc file for MadEvent. Needs input from 2955 write_configs_file. With respect to the parent routine, it has some 2956 more specific formats that allow the props.inc file to be read by the 2957 link program""" 2958 2959 lines = [] 2960 2961 particle_dict = matrix_element.get('processes')[0].get('model').\ 2962 get('particle_dict') 2963 2964 for iconf, configs in enumerate(s_and_t_channels): 2965 for vertex in configs[0] + configs[1][:-1]: 2966 leg = vertex.get('legs')[-1] 2967 if leg.get('id') not in particle_dict: 2968 # Fake propagator used in multiparticle vertices 2969 mass = 'zero' 2970 width = 'zero' 2971 pow_part = 0 2972 else: 2973 particle = particle_dict[leg.get('id')] 2974 # Get mass 2975 if particle.get('mass').lower() == 'zero': 2976 mass = particle.get('mass') 2977 else: 2978 mass = "abs(%s)" % particle.get('mass') 2979 # Get width 2980 if particle.get('width').lower() == 'zero': 2981 width = particle.get('width') 2982 else: 2983 width = "abs(%s)" % particle.get('width') 2984 2985 pow_part = 1 + int(particle.is_boson()) 2986 2987 lines.append("pmass(%3d,%4d) = %s" % \ 2988 (leg.get('number'), iconf + 1, mass)) 2989 lines.append("pwidth(%3d,%4d) = %s" % \ 2990 (leg.get('number'), iconf + 1, width)) 2991 lines.append("pow(%3d,%4d) = %d" % \ 2992 (leg.get('number'), iconf + 1, pow_part)) 2993 2994 # Write the file 2995 writer.writelines(lines) 2996 2997 return True
2998 2999 3000 #=========================================================================== 3001 # write_subproc 3002 #===========================================================================
3003 - def write_subproc(self, writer, subprocdir):
3004 """Append this subprocess to the subproc.mg file for MG4""" 3005 3006 # Write line to file 3007 writer.write(subprocdir + "\n") 3008 3009 return True
3010 3011 3012 3013 3014 3015 #================================================================================= 3016 # Class for using the optimized Loop process 3017 #=================================================================================
3018 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3019 ProcessExporterFortranFKS):
3020 """Class to take care of exporting a set of matrix elements to 3021 Fortran (v4) format.""" 3022 3023 #=============================================================================== 3024 # copy the Template in a new directory. 3025 #===============================================================================
3026 - def copy_fkstemplate(self):
3027 """create the directory run_name as a copy of the MadEvent 3028 Template, and clean the directory 3029 For now it is just the same as copy_v4template, but it will be modified 3030 """ 3031 mgme_dir = self.mgme_dir 3032 dir_path = self.dir_path 3033 clean =self.opt['clean'] 3034 3035 #First copy the full template tree if dir_path doesn't exit 3036 if not os.path.isdir(dir_path): 3037 if not mgme_dir: 3038 raise MadGraph5Error, \ 3039 "No valid MG_ME path given for MG4 run directory creation." 3040 logger.info('initialize a new directory: %s' % \ 3041 os.path.basename(dir_path)) 3042 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3043 # distutils.dir_util.copy_tree since dir_path already exists 3044 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 3045 dir_path) 3046 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3047 if not mgme_dir: 3048 raise MadGraph5Error, \ 3049 "No valid MG_ME path given for MG4 run directory creation." 3050 try: 3051 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3052 except IOError: 3053 MG5_version = misc.get_pkg_info() 3054 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3055 "5." + MG5_version['version']) 3056 3057 #Ensure that the Template is clean 3058 if clean: 3059 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3060 if os.environ.has_key('MADGRAPH_BASE'): 3061 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3062 '--web'], cwd=dir_path) 3063 else: 3064 try: 3065 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3066 cwd=dir_path) 3067 except Exception, why: 3068 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3069 % (os.path.basename(dir_path),why)) 3070 #Write version info 3071 MG_version = misc.get_pkg_info() 3072 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3073 MG_version['version']) 3074 3075 # We must link the CutTools to the Library folder of the active Template 3076 self.link_CutTools(dir_path) 3077 # We must link the TIR to the Library folder of the active Template 3078 link_tir_libs=[] 3079 tir_libs=[] 3080 tir_include=[] 3081 # special for PJFry++/Golem95 3082 link_pjfry_lib="" 3083 pjfry_lib="" 3084 for tir in self.all_tir: 3085 tir_dir="%s_dir"%tir 3086 libpath=getattr(self,tir_dir) 3087 libname="lib%s.a"%tir 3088 tir_name=tir 3089 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3090 libpath,libname,tir_name=tir_name) 3091 setattr(self,tir_dir,libpath) 3092 if libpath != "": 3093 if tir in ['pjfry','golem']: 3094 # Apparently it is necessary to link against the original 3095 # location of the pjfry/golem library, so it needs a special treatment. 3096 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3097 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3098 if tir=='golem': 3099 trg_path = pjoin(os.path.dirname(libpath),'include') 3100 golem_include = misc.find_includes_path(trg_path,'.mod') 3101 if golem_include is None: 3102 logger.error( 3103 'Could not find the include directory for golem, looking in %s.\n' % str(trg_path)+ 3104 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3105 golem_include = '<Not_found_define_it_yourself>' 3106 tir_include.append('-I %s'%golem_include) 3107 else: 3108 link_tir_libs.append('-l%s'%tir) 3109 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3110 3111 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3112 cwd = os.getcwd() 3113 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3114 try: 3115 os.chdir(dirpath) 3116 except os.error: 3117 logger.error('Could not cd to directory %s' % dirpath) 3118 return 0 3119 filename = 'makefile_loop' 3120 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3121 link_tir_libs,tir_libs,tir_include=tir_include) 3122 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3123 dirpath = os.path.join(self.dir_path, 'Source') 3124 try: 3125 os.chdir(dirpath) 3126 except os.error: 3127 logger.error('Could not cd to directory %s' % dirpath) 3128 return 0 3129 filename = 'make_opts' 3130 calls = self.write_make_opts(writers.MakefileWriter(filename), 3131 link_tir_libs,tir_libs) 3132 # Return to original PWD 3133 os.chdir(cwd) 3134 3135 cwd = os.getcwd() 3136 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3137 try: 3138 os.chdir(dirpath) 3139 except os.error: 3140 logger.error('Could not cd to directory %s' % dirpath) 3141 return 0 3142 3143 # We add here the user-friendly MadLoop option setter. 3144 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3145 "Cards/MadLoopParams.dat", 3146 "SubProcesses/MadLoopParams.inc"] 3147 3148 for file in cpfiles: 3149 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3150 os.path.join(self.dir_path, file)) 3151 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3152 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3153 'Cards', 'MadLoopParams.dat')) 3154 # write the output file 3155 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3156 "MadLoopParams.dat")) 3157 3158 # We need minimal editing of MadLoopCommons.f 3159 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3160 "SubProcesses","MadLoopCommons.inc")).read() 3161 writer = writers.FortranWriter(os.path.join(self.dir_path, 3162 "SubProcesses","MadLoopCommons.f")) 3163 writer.writelines(MadLoopCommon%{ 3164 'print_banner_commands':self.MadLoop_banner}) 3165 writer.close() 3166 3167 # link the files from the MODEL 3168 model_path = self.dir_path + '/Source/MODEL/' 3169 # Note that for the [real=] mode, these files are not present 3170 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3171 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3172 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3173 ln(model_path + '/mp_coupl_same_name.inc', \ 3174 self.dir_path + '/SubProcesses') 3175 3176 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3177 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3178 writers.FortranWriter('cts_mpc.h'),) 3179 3180 self.copy_python_files() 3181 3182 3183 # We need to create the correct open_data for the pdf 3184 self.write_pdf_opendata() 3185 3186 3187 # Return to original PWD 3188 os.chdir(cwd)
3189
3190 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3191 """writes the V**** directory inside the P**** directories specified in 3192 dir_name""" 3193 3194 cwd = os.getcwd() 3195 3196 matrix_element = loop_matrix_element 3197 3198 # Create the MadLoop5_resources directory if not already existing 3199 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3200 try: 3201 os.mkdir(dirpath) 3202 except os.error as error: 3203 logger.warning(error.strerror + " " + dirpath) 3204 3205 # Create the directory PN_xx_xxxxx in the specified path 3206 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3207 dirpath = os.path.join(dir_name, name) 3208 3209 try: 3210 os.mkdir(dirpath) 3211 except os.error as error: 3212 logger.warning(error.strerror + " " + dirpath) 3213 3214 try: 3215 os.chdir(dirpath) 3216 except os.error: 3217 logger.error('Could not cd to directory %s' % dirpath) 3218 return 0 3219 3220 logger.info('Creating files in directory %s' % name) 3221 3222 # Extract number of external particles 3223 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3224 3225 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3226 3227 # The born matrix element, if needed 3228 filename = 'born_matrix.f' 3229 calls = self.write_bornmatrix( 3230 writers.FortranWriter(filename), 3231 matrix_element, 3232 fortran_model) 3233 3234 filename = 'nexternal.inc' 3235 self.write_nexternal_file(writers.FortranWriter(filename), 3236 nexternal, ninitial) 3237 3238 filename = 'pmass.inc' 3239 self.write_pmass_file(writers.FortranWriter(filename), 3240 matrix_element) 3241 3242 filename = 'ngraphs.inc' 3243 self.write_ngraphs_file(writers.FortranWriter(filename), 3244 len(matrix_element.get_all_amplitudes())) 3245 3246 filename = "loop_matrix.ps" 3247 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3248 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3249 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3250 filename, 3251 model=matrix_element.get('processes')[0].get('model'), 3252 amplitude='') 3253 logger.info("Drawing loop Feynman diagrams for " + \ 3254 matrix_element.get('processes')[0].nice_string(\ 3255 print_weighted=False)) 3256 plot.draw() 3257 3258 filename = "born_matrix.ps" 3259 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3260 get('born_diagrams'), 3261 filename, 3262 model=matrix_element.get('processes')[0].\ 3263 get('model'), 3264 amplitude='') 3265 logger.info("Generating born Feynman diagrams for " + \ 3266 matrix_element.get('processes')[0].nice_string(\ 3267 print_weighted=False)) 3268 plot.draw() 3269 3270 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3271 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3272 'MadLoopParams.inc','MadLoopCommons.f'] 3273 3274 for file in linkfiles: 3275 ln('../../%s' % file) 3276 3277 3278 os.system("ln -s ../../makefile_loop makefile") 3279 3280 # We should move to MadLoop5_resources directory from the SubProcesses 3281 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3282 pjoin('..','MadLoop5_resources')) 3283 3284 linkfiles = ['mpmodule.mod'] 3285 3286 for file in linkfiles: 3287 ln('../../../lib/%s' % file) 3288 3289 # Return to original PWD 3290 os.chdir(cwd) 3291 3292 if not calls: 3293 calls = 0 3294 return calls
3295 3296 3297 #=============================================================================== 3298 # write_coef_specs 3299 #===============================================================================
3300 - def write_coef_specs_file(self, virt_me_list):
3301 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3302 non-optimized mode""" 3303 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3304 3305 replace_dict = {} 3306 replace_dict['max_lwf_size'] = 4 3307 3308 max_loop_vertex_ranks = [me.get_max_loop_vertex_rank() for me in virt_me_list] 3309 replace_dict['vertex_max_coefs'] = max(\ 3310 [q_polynomial.get_number_of_coefs_for_rank(n) 3311 for n in max_loop_vertex_ranks]) 3312 3313 IncWriter=writers.FortranWriter(filename,'w') 3314 IncWriter.writelines("""INTEGER MAXLWFSIZE 3315 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3316 INTEGER VERTEXMAXCOEFS 3317 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3318 % replace_dict) 3319 IncWriter.close()
3320