Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from distutils import dir_util 
  18  import glob 
  19  import logging 
  20  import os 
  21  import re 
  22  import shutil 
  23  import subprocess 
  24  import string 
  25  import copy 
  26   
  27  import madgraph.core.color_algebra as color 
  28  import madgraph.core.helas_objects as helas_objects 
  29  import madgraph.core.base_objects as base_objects 
  30  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  31  import madgraph.fks.fks_base as fks 
  32  import madgraph.fks.fks_common as fks_common 
  33  import madgraph.iolibs.drawing_eps as draw 
  34  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  35  import madgraph.iolibs.files as files 
  36  import madgraph.various.misc as misc 
  37  import madgraph.iolibs.file_writers as writers 
  38  import madgraph.iolibs.template_files as template_files 
  39  import madgraph.iolibs.ufo_expression_parsers as parsers 
  40  import madgraph.iolibs.export_v4 as export_v4 
  41  import madgraph.loop.loop_exporters as loop_exporters 
  42  import madgraph.various.q_polynomial as q_polynomial 
  43  import madgraph.various.banner as banner_mod 
  44   
  45  import aloha.create_aloha as create_aloha 
  46   
  47  import models.write_param_card as write_param_card 
  48  import models.check_param_card as check_param_card 
  49  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  50  from madgraph.iolibs.files import cp, ln, mv 
  51   
  52  pjoin = os.path.join 
  53   
  54  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  55  logger = logging.getLogger('madgraph.export_fks') 
  56   
  57  #================================================================================= 
  58  # Class for used of the (non-optimized) Loop process 
  59  #================================================================================= 
60 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
61 """Class to take care of exporting a set of matrix elements to 62 Fortran (v4) format.""" 63 64 #=============================================================================== 65 # copy the Template in a new directory. 66 #===============================================================================
67 - def copy_fkstemplate(self):
68 """create the directory run_name as a copy of the MadEvent 69 Template, and clean the directory 70 For now it is just the same as copy_v4template, but it will be modified 71 """ 72 mgme_dir = self.mgme_dir 73 dir_path = self.dir_path 74 clean =self.opt['clean'] 75 76 77 #First copy the full template tree if dir_path doesn't exit 78 if not os.path.isdir(dir_path): 79 if not mgme_dir: 80 raise MadGraph5Error, \ 81 "No valid MG_ME path given for MG4 run directory creation." 82 logger.info('initialize a new directory: %s' % \ 83 os.path.basename(dir_path)) 84 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 85 # distutils.dir_util.copy_tree since dir_path already exists 86 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 87 dir_path) 88 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 89 if not mgme_dir: 90 raise MadGraph5Error, \ 91 "No valid MG_ME path given for MG4 run directory creation." 92 try: 93 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 94 except IOError: 95 MG5_version = misc.get_pkg_info() 96 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 97 "5." + MG5_version['version']) 98 99 #Ensure that the Template is clean 100 if clean: 101 logger.info('remove old information in %s' % os.path.basename(dir_path)) 102 if os.environ.has_key('MADGRAPH_BASE'): 103 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 104 '--web'],cwd=dir_path) 105 else: 106 try: 107 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 108 cwd=dir_path) 109 except Exception, why: 110 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 111 % (os.path.basename(dir_path),why)) 112 #Write version info 113 MG_version = misc.get_pkg_info() 114 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 115 MG_version['version']) 116 117 # We must link the CutTools to the Library folder of the active Template 118 self.link_CutTools(dir_path) 119 120 link_tir_libs=[] 121 tir_libs=[] 122 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 123 dirpath = os.path.join(self.dir_path, 'SubProcesses') 124 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 125 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 126 link_tir_libs,tir_libs) 127 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 128 filename = pjoin(self.dir_path, 'Source','make_opts') 129 calls = self.write_make_opts(writers.MakefileWriter(filename), 130 link_tir_libs,tir_libs) 131 132 # Duplicate run_card and FO_analyse_card 133 for card in ['FO_analyse_card', 'shower_card']: 134 try: 135 shutil.copy(pjoin(self.dir_path, 'Cards', 136 card + '.dat'), 137 pjoin(self.dir_path, 'Cards', 138 card + '_default.dat')) 139 except IOError: 140 logger.warning("Failed to copy " + card + ".dat to default") 141 142 cwd = os.getcwd() 143 dirpath = os.path.join(self.dir_path, 'SubProcesses') 144 try: 145 os.chdir(dirpath) 146 except os.error: 147 logger.error('Could not cd to directory %s' % dirpath) 148 return 0 149 150 # We add here the user-friendly MadLoop option setter. 151 cpfiles= ["SubProcesses/MadLoopParamReader.f", 152 "Cards/MadLoopParams.dat", 153 "SubProcesses/MadLoopParams.inc"] 154 155 for file in cpfiles: 156 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 157 os.path.join(self.dir_path, file)) 158 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 159 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 160 'Cards', 'MadLoopParams.dat')) 161 # write the output file 162 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 163 "MadLoopParams.dat")) 164 165 # We need minimal editing of MadLoopCommons.f 166 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 167 "SubProcesses","MadLoopCommons.inc")).read() 168 writer = writers.FortranWriter(os.path.join(self.dir_path, 169 "SubProcesses","MadLoopCommons.f")) 170 writer.writelines(MadLoopCommon%{ 171 'print_banner_commands':self.MadLoop_banner}) 172 writer.close() 173 174 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 175 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 176 writers.FortranWriter('cts_mpc.h')) 177 178 179 # Finally make sure to turn off MC over Hel for the default mode. 180 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 181 FKS_card_file = open(FKS_card_path,'r') 182 FKS_card = FKS_card_file.read() 183 FKS_card_file.close() 184 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 185 "#NHelForMCoverHels\n-1", FKS_card) 186 FKS_card_file = open(FKS_card_path,'w') 187 FKS_card_file.write(FKS_card) 188 FKS_card_file.close() 189 190 # Return to original PWD 191 os.chdir(cwd) 192 # Copy the different python files in the Template 193 self.copy_python_files() 194 195 # We need to create the correct open_data for the pdf 196 self.write_pdf_opendata()
197 198 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 199 # Also, we overload this function (i.e. it is already defined in 200 # LoopProcessExporterFortranSA) because the path of the template makefile 201 # is different.
202 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
203 """ Create the file makefile_loop which links to the TIR libraries.""" 204 205 file = open(os.path.join(self.mgme_dir,'Template','NLO', 206 'SubProcesses','makefile_loop.inc')).read() 207 replace_dict={} 208 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 209 replace_dict['tir_libs']=' '.join(tir_libs) 210 replace_dict['dotf']='%.f' 211 replace_dict['doto']='%.o' 212 replace_dict['tir_include']=' '.join(tir_include) 213 file=file%replace_dict 214 if writer: 215 writer.writelines(file) 216 else: 217 return file
218 219 # I put it here not in optimized one, because I want to use the same make_opts.inc
220 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
221 """ Create the file make_opts which links to the TIR libraries.""" 222 file = open(os.path.join(self.mgme_dir,'Template','NLO', 223 'Source','make_opts.inc')).read() 224 replace_dict={} 225 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 226 replace_dict['tir_libs']=' '.join(tir_libs) 227 replace_dict['dotf']='%.f' 228 replace_dict['doto']='%.o' 229 file=file%replace_dict 230 if writer: 231 writer.writelines(file) 232 else: 233 return file
234 235 #=========================================================================== 236 # copy_python_files 237 #===========================================================================
238 - def copy_python_files(self):
239 """copy python files required for the Template""" 240 241 files_to_copy = [ \ 242 pjoin('interface','amcatnlo_run_interface.py'), 243 pjoin('interface','extended_cmd.py'), 244 pjoin('interface','common_run_interface.py'), 245 pjoin('interface','coloring_logging.py'), 246 pjoin('various','misc.py'), 247 pjoin('various','shower_card.py'), 248 pjoin('various','FO_analyse_card.py'), 249 pjoin('various','histograms.py'), 250 pjoin('various','banner.py'), 251 pjoin('various','cluster.py'), 252 pjoin('various','lhe_parser.py'), 253 pjoin('madevent','sum_html.py'), 254 pjoin('madevent','gen_crossxhtml.py'), 255 pjoin('iolibs','files.py'), 256 pjoin('iolibs','save_load_object.py'), 257 pjoin('iolibs','file_writers.py'), 258 pjoin('..','models','check_param_card.py'), 259 pjoin('__init__.py') 260 ] 261 cp(_file_path+'/interface/.mg5_logging.conf', 262 self.dir_path+'/bin/internal/me5_logging.conf') 263 264 for cp_file in files_to_copy: 265 cp(pjoin(_file_path,cp_file), 266 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
267
268 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 269 wanted_couplings = []):
270 271 super(ProcessExporterFortranFKS,self).convert_model_to_mg4(model, 272 wanted_lorentz, wanted_couplings) 273 274 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 275 try: 276 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 277 except OSError as error: 278 pass 279 model_path = model.get('modelpath') 280 shutil.copytree(model_path, 281 pjoin(self.dir_path,'bin','internal','ufomodel'), 282 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 283 if hasattr(model, 'restrict_card'): 284 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 285 'restrict_default.dat') 286 if isinstance(model.restrict_card, check_param_card.ParamCard): 287 model.restrict_card.write(out_path) 288 else: 289 files.cp(model.restrict_card, out_path)
290 291 292 293 #=========================================================================== 294 # write_maxparticles_file 295 #===========================================================================
296 - def write_maxparticles_file(self, writer, matrix_elements):
297 """Write the maxparticles.inc file for MadEvent""" 298 299 maxparticles = max([me.get_nexternal_ninitial()[0] \ 300 for me in matrix_elements['matrix_elements']]) 301 302 lines = "integer max_particles, max_branch\n" 303 lines += "parameter (max_particles=%d) \n" % maxparticles 304 lines += "parameter (max_branch=max_particles-1)" 305 306 # Write the file 307 writer.writelines(lines) 308 309 return True
310 311 312 #=========================================================================== 313 # write_maxconfigs_file 314 #===========================================================================
315 - def write_maxconfigs_file(self, writer, matrix_elements):
316 """Write the maxconfigs.inc file for MadEvent""" 317 318 try: 319 maxconfigs = max([me.get_num_configs() \ 320 for me in matrix_elements['real_matrix_elements']]) 321 except ValueError: 322 maxconfigs = max([me.born_matrix_element.get_num_configs() \ 323 for me in matrix_elements['matrix_elements']]) 324 325 lines = "integer lmaxconfigs\n" 326 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 327 328 # Write the file 329 writer.writelines(lines) 330 331 return True
332 333 334 #=============================================================================== 335 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 336 #===============================================================================
337 - def write_procdef_mg5(self, file_pos, modelname, process_str):
338 """ write an equivalent of the MG4 proc_card in order that all the Madevent 339 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 340 341 proc_card_template = template_files.mg4_proc_card.mg4_template 342 process_template = template_files.mg4_proc_card.process_template 343 process_text = '' 344 coupling = '' 345 new_process_content = [] 346 347 # First find the coupling and suppress the coupling from process_str 348 #But first ensure that coupling are define whithout spaces: 349 process_str = process_str.replace(' =', '=') 350 process_str = process_str.replace('= ', '=') 351 process_str = process_str.replace(',',' , ') 352 #now loop on the element and treat all the coupling 353 for info in process_str.split(): 354 if '=' in info: 355 coupling += info + '\n' 356 else: 357 new_process_content.append(info) 358 # Recombine the process_str (which is the input process_str without coupling 359 #info) 360 process_str = ' '.join(new_process_content) 361 362 #format the SubProcess 363 process_text += process_template.substitute({'process': process_str, \ 364 'coupling': coupling}) 365 366 text = proc_card_template.substitute({'process': process_text, 367 'model': modelname, 368 'multiparticle':''}) 369 ff = open(file_pos, 'w') 370 ff.write(text) 371 ff.close()
372 373 374 #=============================================================================== 375 # write a initial states map, useful for the fast PDF NLO interface 376 #===============================================================================
377 - def write_init_map(self, file_pos, initial_states):
378 """ Write an initial state process map. Each possible PDF 379 combination gets an unique identifier.""" 380 381 text='' 382 for i,e in enumerate(initial_states): 383 text=text+str(i+1)+' '+str(len(e)) 384 for t in e: 385 text=text+' ' 386 for p in t: 387 text=text+' '+str(p) 388 text=text+'\n' 389 390 ff = open(file_pos, 'w') 391 ff.write(text) 392 ff.close()
393
394 - def get_ME_identifier(self, matrix_element, *args, **opts):
395 """ A function returning a string uniquely identifying the matrix 396 element given in argument so that it can be used as a prefix to all 397 MadLoop5 subroutines and common blocks related to it. This allows 398 to compile several processes into one library as requested by the 399 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 400 necessitates that there is no process prefix.""" 401 402 return ''
403 404 #=============================================================================== 405 # write_coef_specs 406 #===============================================================================
407 - def write_coef_specs_file(self, virt_me_list):
408 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 409 non-optimized mode""" 410 raise fks_common.FKSProcessError(), \ 411 "write_coef_specs should be called only in the loop-optimized mode"
412 413 414 #=============================================================================== 415 # generate_directories_fks 416 #===============================================================================
417 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 418 me_ntot, path=os.getcwd(),OLP='MadLoop'):
419 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 420 including the necessary matrix.f and various helper files""" 421 proc = matrix_element.born_matrix_element['processes'][0] 422 423 if not self.model: 424 self.model = matrix_element.get('processes')[0].get('model') 425 426 cwd = os.getcwd() 427 try: 428 os.chdir(path) 429 except OSError, error: 430 error_msg = "The directory %s should exist in order to be able " % path + \ 431 "to \"export\" in it. If you see this error message by " + \ 432 "typing the command \"export\" please consider to use " + \ 433 "instead the command \"output\". " 434 raise MadGraph5Error, error_msg 435 436 calls = 0 437 438 self.fksdirs = [] 439 #first make and cd the direcrory corresponding to the born process: 440 borndir = "P%s" % \ 441 (matrix_element.get('processes')[0].shell_string()) 442 os.mkdir(borndir) 443 os.chdir(borndir) 444 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 445 446 ## write the files corresponding to the born process in the P* directory 447 self.generate_born_fks_files(matrix_element, 448 fortran_model, me_number, path) 449 450 # With NJET you want to generate the order file per subprocess and most 451 # likely also generate it for each subproc. 452 if OLP=='NJET': 453 filename = 'OLE_order.lh' 454 self.write_lh_order(filename, matrix_element, OLP) 455 456 if matrix_element.virt_matrix_element: 457 calls += self.generate_virt_directory( \ 458 matrix_element.virt_matrix_element, \ 459 fortran_model, \ 460 os.path.join(path, borndir)) 461 462 #write the infortions for the different real emission processes 463 464 self.write_real_matrix_elements(matrix_element, fortran_model) 465 466 self.write_pdf_calls(matrix_element, fortran_model) 467 468 filename = 'nFKSconfigs.inc' 469 self.write_nfksconfigs_file(writers.FortranWriter(filename), 470 matrix_element, 471 fortran_model) 472 473 filename = 'iproc.dat' 474 self.write_iproc_file(writers.FortranWriter(filename), 475 me_number) 476 477 filename = 'fks_info.inc' 478 self.write_fks_info_file(writers.FortranWriter(filename), 479 matrix_element, 480 fortran_model) 481 482 filename = 'leshouche_info.dat' 483 nfksconfs,maxproc,maxflow,nexternal=\ 484 self.write_leshouche_info_file(filename,matrix_element) 485 486 # if no corrections are generated ([LOonly] mode), get 487 # these variables from the born 488 if nfksconfs == maxproc == maxflow == 0: 489 nfksconfs = 1 490 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 491 matrix_element.born_matrix_element, 1) 492 493 filename = 'leshouche_decl.inc' 494 self.write_leshouche_info_declarations( 495 writers.FortranWriter(filename), 496 nfksconfs,maxproc,maxflow,nexternal, 497 fortran_model) 498 499 filename = 'configs_and_props_info.dat' 500 nconfigs,max_leg_number,nfksconfs=self.write_configs_and_props_info_file( 501 filename, 502 matrix_element) 503 504 filename = 'configs_and_props_decl.inc' 505 self.write_configs_and_props_info_declarations( 506 writers.FortranWriter(filename), 507 nconfigs,max_leg_number,nfksconfs, 508 fortran_model) 509 510 filename = 'real_from_born_configs.inc' 511 self.write_real_from_born_configs( 512 writers.FortranWriter(filename), 513 matrix_element, 514 fortran_model) 515 516 filename = 'ngraphs.inc' 517 self.write_ngraphs_file(writers.FortranWriter(filename), 518 nconfigs) 519 520 #write the wrappers 521 filename = 'real_me_chooser.f' 522 self.write_real_me_wrapper(writers.FortranWriter(filename), 523 matrix_element, 524 fortran_model) 525 526 filename = 'parton_lum_chooser.f' 527 self.write_pdf_wrapper(writers.FortranWriter(filename), 528 matrix_element, 529 fortran_model) 530 531 filename = 'get_color.f' 532 self.write_colors_file(writers.FortranWriter(filename), 533 matrix_element) 534 535 filename = 'nexternal.inc' 536 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 537 self.write_nexternal_file(writers.FortranWriter(filename), 538 nexternal, ninitial) 539 540 filename = 'pmass.inc' 541 try: 542 self.write_pmass_file(writers.FortranWriter(filename), 543 matrix_element.real_processes[0].matrix_element) 544 except IndexError: 545 self.write_pmass_file(writers.FortranWriter(filename), 546 matrix_element.born_matrix_element) 547 548 #draw the diagrams 549 self.draw_feynman_diagrams(matrix_element) 550 551 linkfiles = ['BinothLHADummy.f', 552 'check_poles.f', 553 'MCmasses_HERWIG6.inc', 554 'MCmasses_HERWIGPP.inc', 555 'MCmasses_PYTHIA6Q.inc', 556 'MCmasses_PYTHIA6PT.inc', 557 'MCmasses_PYTHIA8.inc', 558 'add_write_info.f', 559 'coupl.inc', 560 'cuts.f', 561 'FKS_params.dat', 562 'initial_states_map.dat', 563 'OLE_order.olc', 564 'FKSParams.inc', 565 'FKSParamReader.f', 566 'cuts.inc', 567 'unlops.inc', 568 'pythia_unlops.f', 569 'driver_mintMC.f', 570 'driver_mintFO.f', 571 'driver_vegas.f', 572 'appl_interface.cc', 573 'appl_interface_dummy.f', 574 'appl_common.inc', 575 'reweight_appl.inc', 576 'driver_reweight.f', 577 'fastjetfortran_madfks_core.cc', 578 'fastjetfortran_madfks_full.cc', 579 'fjcore.cc', 580 'fastjet_wrapper.f', 581 'fjcore.hh', 582 'fks_Sij.f', 583 'fks_powers.inc', 584 'fks_singular.f', 585 'veto_xsec.f', 586 'veto_xsec.inc', 587 'c_weight.inc', 588 'fks_inc_chooser.f', 589 'leshouche_inc_chooser.f', 590 'configs_and_props_inc_chooser.f', 591 'genps.inc', 592 'genps_fks.f', 593 'boostwdir2.f', 594 'madfks_mcatnlo.inc', 595 'open_output_files.f', 596 'open_output_files_dummy.f', 597 'HwU_dummy.f', 598 'madfks_plot.f', 599 'analysis_dummy.f', 600 'mint-integrator2.f', 601 'MC_integer.f', 602 'mint.inc', 603 'montecarlocounter.f', 604 'q_es.inc', 605 'recluster.cc', 606 'Boosts.h', 607 'reweight.inc', 608 'reweight0.inc', 609 'reweight1.inc', 610 'reweightNLO.inc', 611 'reweight_all.inc', 612 'reweight_events.f', 613 'reweight_xsec.f', 614 'reweight_xsec_events.f', 615 'reweight_xsec_events_pdf_dummy.f', 616 'iproc_map.f', 617 'run.inc', 618 'run_card.inc', 619 'setcuts.f', 620 'setscales.f', 621 'symmetry_fks_test_MC.f', 622 'symmetry_fks_test_ME.f', 623 'symmetry_fks_test_Sij.f', 624 'symmetry_fks_v3.f', 625 'trapfpe.c', 626 'vegas2.for', 627 'write_ajob.f', 628 'handling_lhe_events.f', 629 'write_event.f', 630 'fill_MC_mshell.f', 631 'maxparticles.inc', 632 'message.inc', 633 'initcluster.f', 634 'cluster.inc', 635 'cluster.f', 636 'reweight.f', 637 'randinit', 638 'sudakov.inc', 639 'maxconfigs.inc', 640 'timing_variables.inc'] 641 642 for file in linkfiles: 643 ln('../' + file , '.') 644 os.system("ln -s ../../Cards/param_card.dat .") 645 646 #copy the makefile 647 os.system("ln -s ../makefile_fks_dir ./makefile") 648 if matrix_element.virt_matrix_element: 649 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 650 elif OLP!='MadLoop': 651 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 652 else: 653 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 654 655 656 #import nexternal/leshouches in Source 657 ln('nexternal.inc', '../../Source', log=False) 658 ln('born_leshouche.inc', '../../Source', log=False) 659 660 661 # Return to SubProcesses dir 662 os.chdir(os.path.pardir) 663 # Add subprocess to subproc.mg 664 filename = 'subproc.mg' 665 files.append_to_file(filename, 666 self.write_subproc, 667 borndir) 668 669 670 os.chdir(cwd) 671 # Generate info page 672 gen_infohtml.make_info_html_nlo(self.dir_path) 673 674 675 return calls
676 677 #=========================================================================== 678 # create the run_card 679 #===========================================================================
680 - def create_run_card(self, matrix_elements, history):
681 """ """ 682 683 run_card = banner_mod.RunCardNLO() 684 685 processes = [me.get('processes') 686 for me in matrix_elements['matrix_elements']] 687 688 run_card.create_default_for_process(self.proc_characteristic, 689 history, 690 processes) 691 692 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 693 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
694 695
696 - def finalize_fks_directory(self, matrix_elements, history, makejpg = False, 697 online = False, 698 compiler_dict={'fortran': 'gfortran', 'cpp': 'g++'}, 699 output_dependencies = 'external', MG5DIR = None):
700 """Finalize FKS directory by creating jpeg diagrams, html 701 pages,proc_card_mg5.dat and madevent.tar.gz.""" 702 703 self.proc_characteristic['grouped_matrix'] = False 704 705 self.create_run_card(matrix_elements, history) 706 # modelname = self.model.get('name') 707 # if modelname == 'mssm' or modelname.startswith('mssm-'): 708 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 709 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 710 # check_param_card.convert_to_mg5card(param_card, mg5_param) 711 # check_param_card.check_valid_param_card(mg5_param) 712 713 # # write the model functions get_mass/width_from_id 714 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 715 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 716 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 717 718 # # Write maxconfigs.inc based on max of ME's/subprocess groups 719 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 720 self.write_maxconfigs_file(writers.FortranWriter(filename), 721 matrix_elements) 722 723 # # Write maxparticles.inc based on max of ME's/subprocess groups 724 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 725 self.write_maxparticles_file(writers.FortranWriter(filename), 726 matrix_elements) 727 728 # Touch "done" file 729 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 730 731 # Check for compiler 732 fcompiler_chosen = self.set_fortran_compiler(compiler_dict['fortran']) 733 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 734 735 old_pos = os.getcwd() 736 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 737 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 738 proc[0] == 'P'] 739 740 devnull = os.open(os.devnull, os.O_RDWR) 741 # Convert the poscript in jpg files (if authorize) 742 if makejpg: 743 logger.info("Generate jpeg diagrams") 744 for Pdir in P_dir_list: 745 os.chdir(Pdir) 746 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 747 stdout = devnull) 748 os.chdir(os.path.pardir) 749 # 750 logger.info("Generate web pages") 751 # Create the WebPage using perl script 752 753 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 754 stdout = devnull) 755 756 os.chdir(os.path.pardir) 757 # 758 # obj = gen_infohtml.make_info_html(self.dir_path) 759 # [mv(name, './HTML/') for name in os.listdir('.') if \ 760 # (name.endswith('.html') or name.endswith('.jpg')) and \ 761 # name != 'index.html'] 762 # if online: 763 # nb_channel = obj.rep_rule['nb_gen_diag'] 764 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 765 766 # Write command history as proc_card_mg5 767 if os.path.isdir('Cards'): 768 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 769 history.write(output_file) 770 771 # Duplicate run_card and FO_analyse_card 772 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 773 try: 774 shutil.copy(pjoin(self.dir_path, 'Cards', 775 card + '.dat'), 776 pjoin(self.dir_path, 'Cards', 777 card + '_default.dat')) 778 except IOError: 779 logger.warning("Failed to copy " + card + ".dat to default") 780 781 782 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 783 stdout = devnull) 784 785 # Run "make" to generate madevent.tar.gz file 786 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 787 if os.path.exists('amcatnlo.tar.gz'): 788 os.remove('amcatnlo.tar.gz') 789 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 790 stdout = devnull) 791 # 792 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 793 stdout = devnull) 794 795 #return to the initial dir 796 os.chdir(old_pos) 797 798 # Setup stdHep 799 # Find the correct fortran compiler 800 base_compiler= ['FC=g77','FC=gfortran'] 801 802 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 803 804 if output_dependencies == 'external': 805 # check if stdhep has to be compiled (only the first time) 806 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 807 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')): 808 if 'FC' not in os.environ or not os.environ['FC']: 809 path = os.path.join(StdHep_path, 'src', 'make_opts') 810 text = open(path).read() 811 for base in base_compiler: 812 text = text.replace(base,'FC=%s' % fcompiler_chosen) 813 open(path, 'w').writelines(text) 814 815 logger.info('Compiling StdHEP. This has to be done only once.') 816 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 817 logger.info('Done.') 818 #then link the libraries in the exported dir 819 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 820 pjoin(self.dir_path, 'MCatNLO', 'lib')) 821 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 822 pjoin(self.dir_path, 'MCatNLO', 'lib')) 823 824 elif output_dependencies == 'internal': 825 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 826 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 827 # Create the links to the lib folder 828 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 829 for file in linkfiles: 830 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 831 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 832 if 'FC' not in os.environ or not os.environ['FC']: 833 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 834 text = open(path).read() 835 for base in base_compiler: 836 text = text.replace(base,'FC=%s' % fcompiler_chosen) 837 open(path, 'w').writelines(text) 838 # To avoid compiler version conflicts, we force a clean here 839 misc.compile(['clean'],cwd = StdHEP_internal_path) 840 841 elif output_dependencies == 'environment_paths': 842 # Here the user chose to define the dependencies path in one of 843 # his environmental paths 844 libStdHep = misc.which_lib('libstdhep.a') 845 libFmcfio = misc.which_lib('libFmcfio.a') 846 if not libStdHep is None and not libFmcfio is None: 847 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 848 os.path.dirname(libStdHep)) 849 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 850 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 851 else: 852 raise InvalidCmd("Could not find the location of the files"+\ 853 " libstdhep.a and libFmcfio.a in you environment paths.") 854 855 else: 856 raise MadGraph5Error, 'output_dependencies option %s not recognized'\ 857 %output_dependencies
858 859
860 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
861 """Writes the real_from_born_configs.inc file that contains 862 the mapping to go for a given born configuration (that is used 863 e.g. in the multi-channel phase-space integration to the 864 corresponding real-emission diagram, i.e. the real emission 865 diagram in which the combined ij is split in i_fks and 866 j_fks.""" 867 lines=[] 868 lines2=[] 869 max_links=0 870 born_me=matrix_element.born_matrix_element 871 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 872 iFKS=iFKS+1 873 links=conf['fks_info']['rb_links'] 874 max_links=max(max_links,len(links)) 875 for i,diags in enumerate(links): 876 if not i == diags['born_conf']: 877 print links 878 raise MadGraph5Error, "born_conf should be canonically ordered" 879 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 880 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 881 % (iFKS,len(links),real_configs)) 882 883 lines2.append("integer irfbc") 884 lines2.append("integer real_from_born_conf(%d,%d)" \ 885 % (max_links,len(matrix_element.get_fks_info_list()))) 886 # Write the file 887 writer.writelines(lines2+lines)
888 889 890 #=============================================================================== 891 # write_get_mass_width_file 892 #=============================================================================== 893 #test written
894 - def write_get_mass_width_file(self, writer, makeinc, model):
895 """Write the get_mass_width_file.f file for MG4. 896 Also update the makeinc.inc file 897 """ 898 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 899 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 900 901 iflines_mass = '' 902 iflines_width = '' 903 904 for i, part in enumerate(mass_particles): 905 if i == 0: 906 ifstring = 'if' 907 else: 908 ifstring = 'else if' 909 if part['self_antipart']: 910 iflines_mass += '%s (id.eq.%d) then\n' % \ 911 (ifstring, part.get_pdg_code()) 912 else: 913 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 914 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 915 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 916 917 for i, part in enumerate(width_particles): 918 if i == 0: 919 ifstring = 'if' 920 else: 921 ifstring = 'else if' 922 if part['self_antipart']: 923 iflines_width += '%s (id.eq.%d) then\n' % \ 924 (ifstring, part.get_pdg_code()) 925 else: 926 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 927 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 928 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 929 930 replace_dict = {'iflines_mass' : iflines_mass, 931 'iflines_width' : iflines_width} 932 933 file = open(os.path.join(_file_path, \ 934 'iolibs/template_files/get_mass_width_fcts.inc')).read() 935 file = file % replace_dict 936 937 # Write the file 938 writer.writelines(file) 939 940 # update the makeinc 941 makeinc_content = open(makeinc).read() 942 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 943 open(makeinc, 'w').write(makeinc_content) 944 945 return
946 947
948 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
949 """writes the declarations for the variables relevant for configs_and_props 950 """ 951 lines = [] 952 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 953 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 954 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 955 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 956 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 957 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 958 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 959 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 960 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 961 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 962 963 writer.writelines(lines)
964 965
966 - def write_configs_and_props_info_file(self, filename, matrix_element):
967 """writes the configs_and_props_info.inc file that cointains 968 all the (real-emission) configurations (IFOREST) as well as 969 the masses and widths of intermediate particles""" 970 lines = [] 971 lines.append("# C -> MAPCONFIG_D") 972 lines.append("# F/D -> IFOREST_D") 973 lines.append("# S -> SPROP_D") 974 lines.append("# T -> TPRID_D") 975 lines.append("# M -> PMASS_D/PWIDTH_D") 976 lines.append("# P -> POW_D") 977 lines2 = [] 978 nconfs = len(matrix_element.get_fks_info_list()) 979 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 980 981 max_iconfig=0 982 max_leg_number=0 983 984 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 985 iFKS=iFKS+1 986 iconfig = 0 987 s_and_t_channels = [] 988 mapconfigs = [] 989 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 990 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 991 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 992 minvert = min([max([len(vert.get('legs')) for vert in \ 993 diag.get('vertices')]) for diag in base_diagrams]) 994 995 lines.append("# ") 996 lines.append("# nFKSprocess %d" % iFKS) 997 for idiag, diag in enumerate(base_diagrams): 998 if any([len(vert.get('legs')) > minvert for vert in 999 diag.get('vertices')]): 1000 # Only 3-vertices allowed in configs.inc 1001 continue 1002 iconfig = iconfig + 1 1003 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1004 mapconfigs.append(helas_diag.get('number')) 1005 lines.append("# Diagram %d for nFKSprocess %d" % \ 1006 (helas_diag.get('number'),iFKS)) 1007 # Correspondance between the config and the amplitudes 1008 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1009 helas_diag.get('number'))) 1010 1011 # Need to reorganize the topology so that we start with all 1012 # final state external particles and work our way inwards 1013 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1014 get_s_and_t_channels(ninitial, model, 990) 1015 1016 s_and_t_channels.append([schannels, tchannels]) 1017 1018 # Write out propagators for s-channel and t-channel vertices 1019 allchannels = schannels 1020 if len(tchannels) > 1: 1021 # Write out tchannels only if there are any non-trivial ones 1022 allchannels = schannels + tchannels 1023 1024 for vert in allchannels: 1025 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1026 last_leg = vert.get('legs')[-1] 1027 lines.append("F %4d %4d %4d %4d" % \ 1028 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1029 for d in daughters: 1030 lines.append("D %4d" % d) 1031 if vert in schannels: 1032 lines.append("S %4d %4d %4d %10d" % \ 1033 (iFKS,last_leg.get('number'), iconfig, 1034 last_leg.get('id'))) 1035 elif vert in tchannels[:-1]: 1036 lines.append("T %4d %4d %4d %10d" % \ 1037 (iFKS,last_leg.get('number'), iconfig, 1038 abs(last_leg.get('id')))) 1039 1040 # update what the array sizes (mapconfig,iforest,etc) will be 1041 max_leg_number = min(max_leg_number,last_leg.get('number')) 1042 max_iconfig = max(max_iconfig,iconfig) 1043 1044 # Write out number of configs 1045 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1046 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1047 1048 # write the props.inc information 1049 lines2.append("# ") 1050 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1051 get('particle_dict') 1052 1053 for iconf, configs in enumerate(s_and_t_channels): 1054 for vertex in configs[0] + configs[1][:-1]: 1055 leg = vertex.get('legs')[-1] 1056 if leg.get('id') not in particle_dict: 1057 # Fake propagator used in multiparticle vertices 1058 pow_part = 0 1059 else: 1060 particle = particle_dict[leg.get('id')] 1061 1062 pow_part = 1 + int(particle.is_boson()) 1063 1064 lines2.append("M %4d %4d %4d %10d " % \ 1065 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1066 lines2.append("P %4d %4d %4d %4d " % \ 1067 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1068 1069 # Write the file 1070 open(filename,'w').write('\n'.join(lines+lines2)) 1071 1072 return max_iconfig, max_leg_number, nconfs
1073 1074
1075 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1076 maxproc, maxflow, nexternal, fortran_model):
1077 """writes the declarations for the variables relevant for leshouche_info 1078 """ 1079 lines = [] 1080 lines.append('integer maxproc_used, maxflow_used') 1081 lines.append('parameter (maxproc_used = %d)' % maxproc) 1082 lines.append('parameter (maxflow_used = %d)' % maxflow) 1083 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1084 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1085 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1086 1087 writer.writelines(lines)
1088 1089
1090 - def write_leshouche_info_file(self, filename, matrix_element):
1091 """writes the leshouche_info.inc file which contains 1092 the LHA informations for all the real emission processes 1093 """ 1094 lines = [] 1095 lines.append("# I -> IDUP_D") 1096 lines.append("# M -> MOTHUP_D") 1097 lines.append("# C -> ICOLUP_D") 1098 nfksconfs = len(matrix_element.get_fks_info_list()) 1099 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1100 1101 maxproc = 0 1102 maxflow = 0 1103 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1104 # for i, real in enumerate(matrix_element.real_processes): 1105 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1106 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1107 lines.extend(newlines) 1108 maxproc = max(maxproc, nprocs) 1109 maxflow = max(maxflow, nflows) 1110 1111 # Write the file 1112 open(filename,'w').write('\n'.join(lines)) 1113 1114 return nfksconfs, maxproc, maxflow, nexternal
1115 1116
1117 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1118 """writes the wrapper which allows to chose among the different real matrix elements""" 1119 1120 file = \ 1121 """double precision function dlum() 1122 implicit none 1123 include 'timing_variables.inc' 1124 integer nfksprocess 1125 common/c_nfksprocess/nfksprocess 1126 call cpu_time(tbefore) 1127 """ 1128 if matrix_element.real_processes: 1129 for n, info in enumerate(matrix_element.get_fks_info_list()): 1130 file += \ 1131 """if (nfksprocess.eq.%(n)d) then 1132 call dlum_%(n_me)d(dlum) 1133 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1134 file += \ 1135 """ 1136 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1137 stop 1138 endif 1139 call cpu_time(tAfter) 1140 tPDF = tPDF + (tAfter-tBefore) 1141 return 1142 end 1143 """ 1144 else: 1145 file+= \ 1146 """call dlum_0(dlum) 1147 call cpu_time(tAfter) 1148 tPDF = tPDF + (tAfter-tBefore) 1149 return 1150 end 1151 """ 1152 1153 # Write the file 1154 writer.writelines(file) 1155 return 0
1156 1157
1158 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1159 """writes the wrapper which allows to chose among the different real matrix elements""" 1160 1161 file = \ 1162 """subroutine smatrix_real(p, wgt) 1163 implicit none 1164 include 'nexternal.inc' 1165 double precision p(0:3, nexternal) 1166 double precision wgt 1167 integer nfksprocess 1168 common/c_nfksprocess/nfksprocess 1169 """ 1170 for n, info in enumerate(matrix_element.get_fks_info_list()): 1171 file += \ 1172 """if (nfksprocess.eq.%(n)d) then 1173 call smatrix_%(n_me)d(p, wgt) 1174 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1175 1176 if matrix_element.real_processes: 1177 file += \ 1178 """ 1179 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1180 stop 1181 endif 1182 return 1183 end 1184 """ 1185 else: 1186 file += \ 1187 """ 1188 wgt=0d0 1189 return 1190 end 1191 """ 1192 # Write the file 1193 writer.writelines(file) 1194 return 0
1195 1196
1197 - def draw_feynman_diagrams(self, matrix_element):
1198 """Create the ps files containing the feynman diagrams for the born process, 1199 as well as for all the real emission processes""" 1200 1201 filename = 'born.ps' 1202 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1203 get('base_amplitude').get('diagrams'), 1204 filename, 1205 model=matrix_element.born_matrix_element.\ 1206 get('processes')[0].get('model'), 1207 amplitude=True, diagram_type='born') 1208 plot.draw() 1209 1210 for n, fksreal in enumerate(matrix_element.real_processes): 1211 filename = 'matrix_%d.ps' % (n + 1) 1212 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1213 get('base_amplitude').get('diagrams'), 1214 filename, 1215 model=fksreal.matrix_element.\ 1216 get('processes')[0].get('model'), 1217 amplitude=True, diagram_type='real') 1218 plot.draw()
1219 1220
1221 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1222 """writes the matrix_i.f files which contain the real matrix elements""" 1223 1224 for n, fksreal in enumerate(matrix_element.real_processes): 1225 filename = 'matrix_%d.f' % (n + 1) 1226 self.write_matrix_element_fks(writers.FortranWriter(filename), 1227 fksreal.matrix_element, n + 1, 1228 fortran_model)
1229
1230 - def write_pdf_calls(self, matrix_element, fortran_model):
1231 """writes the parton_lum_i.f files which contain the real matrix elements. 1232 If no real emission existst, write the one for the born""" 1233 1234 if matrix_element.real_processes: 1235 for n, fksreal in enumerate(matrix_element.real_processes): 1236 filename = 'parton_lum_%d.f' % (n + 1) 1237 self.write_pdf_file(writers.FortranWriter(filename), 1238 fksreal.matrix_element, n + 1, 1239 fortran_model) 1240 else: 1241 filename = 'parton_lum_0.f' 1242 self.write_pdf_file(writers.FortranWriter(filename), 1243 matrix_element.born_matrix_element, 0, 1244 fortran_model)
1245 1246
1247 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1248 """generates the files needed for the born amplitude in the P* directory, which will 1249 be needed by the P* directories""" 1250 pathdir = os.getcwd() 1251 1252 filename = 'born.f' 1253 calls_born, ncolor_born = \ 1254 self.write_born_fks(writers.FortranWriter(filename),\ 1255 matrix_element, 1256 fortran_model) 1257 1258 filename = 'born_hel.f' 1259 self.write_born_hel(writers.FortranWriter(filename),\ 1260 matrix_element, 1261 fortran_model) 1262 1263 1264 filename = 'born_conf.inc' 1265 nconfigs, mapconfigs, s_and_t_channels = \ 1266 self.write_configs_file( 1267 writers.FortranWriter(filename), 1268 matrix_element.born_matrix_element, 1269 fortran_model) 1270 1271 filename = 'born_props.inc' 1272 self.write_props_file(writers.FortranWriter(filename), 1273 matrix_element.born_matrix_element, 1274 fortran_model, 1275 s_and_t_channels) 1276 1277 filename = 'born_decayBW.inc' 1278 self.write_decayBW_file(writers.FortranWriter(filename), 1279 s_and_t_channels) 1280 1281 filename = 'born_leshouche.inc' 1282 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1283 matrix_element.born_matrix_element, 1284 fortran_model) 1285 1286 filename = 'born_nhel.inc' 1287 self.write_born_nhel_file(writers.FortranWriter(filename), 1288 matrix_element.born_matrix_element, nflows, 1289 fortran_model, 1290 ncolor_born) 1291 1292 filename = 'born_ngraphs.inc' 1293 self.write_ngraphs_file(writers.FortranWriter(filename), 1294 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1295 1296 filename = 'ncombs.inc' 1297 self.write_ncombs_file(writers.FortranWriter(filename), 1298 matrix_element.born_matrix_element, 1299 fortran_model) 1300 1301 filename = 'born_maxamps.inc' 1302 maxamps = len(matrix_element.get('diagrams')) 1303 maxflows = ncolor_born 1304 self.write_maxamps_file(writers.FortranWriter(filename), 1305 maxamps, 1306 maxflows, 1307 max([len(matrix_element.get('processes')) for me in \ 1308 matrix_element.born_matrix_element]),1) 1309 1310 filename = 'config_subproc_map.inc' 1311 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1312 s_and_t_channels) 1313 1314 filename = 'coloramps.inc' 1315 self.write_coloramps_file(writers.FortranWriter(filename), 1316 mapconfigs, 1317 matrix_element.born_matrix_element, 1318 fortran_model) 1319 1320 #write the sborn_sf.f and the b_sf_files 1321 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1322 for i, links in enumerate([matrix_element.color_links, []]): 1323 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1324 links, 1325 fortran_model) 1326 self.color_link_files = [] 1327 for i in range(len(matrix_element.color_links)): 1328 filename = 'b_sf_%3.3d.f' % (i + 1) 1329 self.color_link_files.append(filename) 1330 self.write_b_sf_fks(writers.FortranWriter(filename), 1331 matrix_element, i, 1332 fortran_model)
1333
1334 - def generate_virtuals_from_OLP(self,FKSHMultiproc,export_path, OLP):
1335 """Generates the library for computing the loop matrix elements 1336 necessary for this process using the OLP specified.""" 1337 1338 # Start by writing the BLHA order file 1339 virtual_path = pjoin(export_path,'OLP_virtuals') 1340 if not os.path.exists(virtual_path): 1341 os.makedirs(virtual_path) 1342 filename = os.path.join(virtual_path,'OLE_order.lh') 1343 self.write_lh_order(filename, FKSHMultiproc.get('matrix_elements'),OLP) 1344 1345 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1346 'Please check the virt_generation.log file in %s.'\ 1347 %str(pjoin(virtual_path,'virt_generation.log')) 1348 1349 # Perform some tasks specific to certain OLP's 1350 if OLP=='GoSam': 1351 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1352 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1353 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1354 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1355 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1356 # Now generate the process 1357 logger.info('Generating the loop matrix elements with %s...'%OLP) 1358 virt_generation_log = \ 1359 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1360 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1361 stdout=virt_generation_log, stderr=virt_generation_log) 1362 virt_generation_log.close() 1363 # Check what extension is used for the share libraries on this system 1364 possible_other_extensions = ['so','dylib'] 1365 shared_lib_ext='so' 1366 for ext in possible_other_extensions: 1367 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1368 'libgolem_olp.'+ext)): 1369 shared_lib_ext = ext 1370 1371 # Now check that everything got correctly generated 1372 files_to_check = ['olp_module.mod',str(pjoin('lib', 1373 'libgolem_olp.'+shared_lib_ext))] 1374 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1375 'Virtuals',f)) for f in files_to_check]): 1376 raise fks_common.FKSProcessError(fail_msg) 1377 # link the library to the lib folder 1378 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1379 pjoin(export_path,'lib')) 1380 1381 # Specify in make_opts the right library necessitated by the OLP 1382 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1383 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1384 if OLP=='GoSam': 1385 # apparently -rpath=../$(LIBDIR) is not necessary. 1386 #make_opts_content=make_opts_content.replace('libOLP=', 1387 # 'libOLP=-Wl,-rpath=../$(LIBDIR),-lgolem_olp') 1388 make_opts_content=make_opts_content.replace('libOLP=', 1389 'libOLP=-Wl,-lgolem_olp') 1390 make_opts.write(make_opts_content) 1391 make_opts.close() 1392 1393 # A priori this is generic to all OLP's 1394 1395 # Parse the contract file returned and propagate the process label to 1396 # the include of the BinothLHA.f file 1397 proc_to_label = self.parse_contract_file( 1398 pjoin(virtual_path,'OLE_order.olc')) 1399 1400 self.write_BinothLHA_inc(FKSHMultiproc,proc_to_label,\ 1401 pjoin(export_path,'SubProcesses')) 1402 1403 # Link the contract file to within the SubProcess directory 1404 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1405
1406 - def write_BinothLHA_inc(self, FKSHMultiproc, proc_to_label, SubProcPath):
1407 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1408 to provide the right process_label to use in the OLP call to get the 1409 loop matrix element evaluation. The proc_to_label is the dictionary of 1410 the format of the one returned by the function parse_contract_file.""" 1411 1412 for matrix_element in FKSHMultiproc.get('matrix_elements'): 1413 proc = matrix_element.get('processes')[0] 1414 name = "P%s"%proc.shell_string() 1415 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1416 not leg.get('state')]), 1417 tuple([leg.get('id') for leg in proc.get('legs') if \ 1418 leg.get('state')])) 1419 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1420 try: 1421 incFile.write( 1422 """ INTEGER PROC_LABEL 1423 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1424 except KeyError: 1425 raise fks_common.FKSProcessError('Could not found the target'+\ 1426 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1427 ' the proc_to_label argument in write_BinothLHA_inc.') 1428 incFile.close()
1429
1430 - def parse_contract_file(self, contract_file_path):
1431 """ Parses the BLHA contract file, make sure all parameters could be 1432 understood by the OLP and return a mapping of the processes (characterized 1433 by the pdg's of the initial and final state particles) to their process 1434 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1435 """ 1436 1437 proc_def_to_label = {} 1438 1439 if not os.path.exists(contract_file_path): 1440 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1441 ' OLE_order.olc in %s.'%str(contract_file_path)) 1442 1443 comment_re=re.compile(r"^\s*#") 1444 proc_def_re=re.compile( 1445 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1446 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1447 line_OK_re=re.compile(r"^.*\|\s*OK") 1448 for line in file(contract_file_path): 1449 # Ignore comments 1450 if not comment_re.match(line) is None: 1451 continue 1452 # Check if it is a proc definition line 1453 proc_def = proc_def_re.match(line) 1454 if not proc_def is None: 1455 if int(proc_def.group('proc_class'))!=1: 1456 raise fks_common.FKSProcessError( 1457 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1458 ' process class attribute. Found %s instead in: \n%s'\ 1459 %(proc_def.group('proc_class'),line)) 1460 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1461 proc_def.group('in_pdgs').split()]) 1462 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1463 proc_def.group('out_pdgs').split()]) 1464 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1465 int(proc_def.group('proc_label')) 1466 continue 1467 # For the other types of line, just make sure they end with | OK 1468 if line_OK_re.match(line) is None: 1469 raise fks_common.FKSProcessError( 1470 'The OLP could not process the following line: \n%s'%line) 1471 1472 return proc_def_to_label
1473 1474
1475 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1476 """writes the V**** directory inside the P**** directories specified in 1477 dir_name""" 1478 1479 cwd = os.getcwd() 1480 1481 matrix_element = loop_matrix_element 1482 1483 # Create the MadLoop5_resources directory if not already existing 1484 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1485 try: 1486 os.mkdir(dirpath) 1487 except os.error as error: 1488 logger.warning(error.strerror + " " + dirpath) 1489 1490 # Create the directory PN_xx_xxxxx in the specified path 1491 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1492 dirpath = os.path.join(dir_name, name) 1493 1494 try: 1495 os.mkdir(dirpath) 1496 except os.error as error: 1497 logger.warning(error.strerror + " " + dirpath) 1498 1499 try: 1500 os.chdir(dirpath) 1501 except os.error: 1502 logger.error('Could not cd to directory %s' % dirpath) 1503 return 0 1504 1505 logger.info('Creating files in directory %s' % name) 1506 1507 # Extract number of external particles 1508 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1509 1510 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1511 # The born matrix element, if needed 1512 filename = 'born_matrix.f' 1513 calls = self.write_bornmatrix( 1514 writers.FortranWriter(filename), 1515 matrix_element, 1516 fortran_model) 1517 1518 filename = 'nexternal.inc' 1519 self.write_nexternal_file(writers.FortranWriter(filename), 1520 nexternal, ninitial) 1521 1522 filename = 'pmass.inc' 1523 self.write_pmass_file(writers.FortranWriter(filename), 1524 matrix_element) 1525 1526 filename = 'ngraphs.inc' 1527 self.write_ngraphs_file(writers.FortranWriter(filename), 1528 len(matrix_element.get_all_amplitudes())) 1529 1530 filename = "loop_matrix.ps" 1531 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1532 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1533 filename, 1534 model=matrix_element.get('processes')[0].get('model'), 1535 amplitude='') 1536 logger.info("Drawing loop Feynman diagrams for " + \ 1537 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1538 plot.draw() 1539 1540 filename = "born_matrix.ps" 1541 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1542 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1543 get('model'),amplitude='') 1544 logger.info("Generating born Feynman diagrams for " + \ 1545 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1546 plot.draw() 1547 1548 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1549 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1550 'MadLoopCommons.f','MadLoopParams.inc'] 1551 1552 # We should move to MadLoop5_resources directory from the SubProcesses 1553 1554 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1555 pjoin('..','MadLoop5_resources')) 1556 1557 for file in linkfiles: 1558 ln('../../%s' % file) 1559 1560 os.system("ln -s ../../makefile_loop makefile") 1561 1562 linkfiles = ['mpmodule.mod'] 1563 1564 for file in linkfiles: 1565 ln('../../../lib/%s' % file) 1566 1567 # Return to original PWD 1568 os.chdir(cwd) 1569 1570 if not calls: 1571 calls = 0 1572 return calls
1573
1574 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1575 """computes the QED/QCD orders from the knowledge of the n of ext particles 1576 and of the weighted orders""" 1577 # n vertices = nexternal - 2 =QED + QCD 1578 # weighted = 2*QED + QCD 1579 QED = weighted - nexternal + 2 1580 QCD = weighted - 2 * QED 1581 return QED, QCD
1582 1583 1584 1585 #=============================================================================== 1586 # write_lh_order 1587 #=============================================================================== 1588 #test written
1589 - def write_lh_order(self, filename, matrix_elements, OLP='MadLoop'):
1590 """Creates the OLE_order.lh file. This function should be edited according 1591 to the OLP which is used. For now it is generic.""" 1592 1593 if isinstance(matrix_elements,fks_helas_objects.FKSHelasProcess): 1594 fksborns=fks_helas_objects.FKSHelasProcessList([matrix_elements]) 1595 elif isinstance(matrix_elements,fks_helas_objects.FKSHelasProcessList): 1596 fksborns= matrix_elements 1597 else: 1598 raise fks_common.FKSProcessError('Wrong type of argument for '+\ 1599 'matrix_elements in function write_lh_order.') 1600 1601 if len(fksborns)==0: 1602 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1603 'the function write_lh_order.') 1604 return 1605 1606 # We assume the orders to be common to all Subprocesses 1607 1608 orders = fksborns[0].orders 1609 if 'QED' in orders.keys() and 'QCD' in orders.keys(): 1610 QED=orders['QED'] 1611 QCD=orders['QCD'] 1612 elif 'QED' in orders.keys(): 1613 QED=orders['QED'] 1614 QCD=0 1615 elif 'QCD' in orders.keys(): 1616 QED=0 1617 QCD=orders['QCD'] 1618 else: 1619 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1620 fksborns[0].get_nexternal_ninitial()[0]-1, # -1 is because the function returns nexternal of the real emission 1621 orders['WEIGHTED']) 1622 1623 replace_dict = {} 1624 replace_dict['mesq'] = 'CHaveraged' 1625 replace_dict['corr'] = ' '.join(matrix_elements[0].get('processes')[0].\ 1626 get('perturbation_couplings')) 1627 replace_dict['irreg'] = 'CDR' 1628 replace_dict['aspow'] = QCD 1629 replace_dict['aepow'] = QED 1630 replace_dict['modelfile'] = './param_card.dat' 1631 replace_dict['params'] = 'alpha_s' 1632 proc_lines=[] 1633 for fksborn in fksborns: 1634 proc_lines.append(fksborn.get_lh_pdg_string()) 1635 replace_dict['pdgs'] = '\n'.join(proc_lines) 1636 replace_dict['symfin'] = 'Yes' 1637 content = \ 1638 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1639 \n\ 1640 MatrixElementSquareType %(mesq)s\n\ 1641 CorrectionType %(corr)s\n\ 1642 IRregularisation %(irreg)s\n\ 1643 AlphasPower %(aspow)d\n\ 1644 AlphaPower %(aepow)d\n\ 1645 NJetSymmetrizeFinal %(symfin)s\n\ 1646 ModelFile %(modelfile)s\n\ 1647 Parameters %(params)s\n\ 1648 \n\ 1649 # process\n\ 1650 %(pdgs)s\n\ 1651 " % replace_dict 1652 1653 file = open(filename, 'w') 1654 file.write(content) 1655 file.close 1656 return
1657 1658 1659 #=============================================================================== 1660 # write_born_fks 1661 #=============================================================================== 1662 # test written
1663 - def write_born_fks(self, writer, fksborn, fortran_model):
1664 """Export a matrix element to a born.f file in MadFKS format""" 1665 1666 matrix_element = fksborn.born_matrix_element 1667 1668 if not matrix_element.get('processes') or \ 1669 not matrix_element.get('diagrams'): 1670 return 0 1671 1672 if not isinstance(writer, writers.FortranWriter): 1673 raise writers.FortranWriter.FortranWriterError(\ 1674 "writer not FortranWriter") 1675 # Set lowercase/uppercase Fortran code 1676 writers.FortranWriter.downcase = False 1677 1678 replace_dict = {} 1679 1680 # Extract version number and date from VERSION file 1681 info_lines = self.get_mg5_info_lines() 1682 replace_dict['info_lines'] = info_lines 1683 1684 # Extract process info lines 1685 process_lines = self.get_process_info_lines(matrix_element) 1686 replace_dict['process_lines'] = process_lines 1687 1688 1689 # Extract ncomb 1690 ncomb = matrix_element.get_helicity_combinations() 1691 replace_dict['ncomb'] = ncomb 1692 1693 # Extract helicity lines 1694 helicity_lines = self.get_helicity_lines(matrix_element) 1695 replace_dict['helicity_lines'] = helicity_lines 1696 1697 # Extract IC line 1698 ic_line = self.get_ic_line(matrix_element) 1699 replace_dict['ic_line'] = ic_line 1700 1701 # Extract overall denominator 1702 # Averaging initial state color, spin, and identical FS particles 1703 #den_factor_line = get_den_factor_line(matrix_element) 1704 1705 # Extract ngraphs 1706 ngraphs = matrix_element.get_number_of_amplitudes() 1707 replace_dict['ngraphs'] = ngraphs 1708 1709 # Extract nwavefuncs 1710 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1711 replace_dict['nwavefuncs'] = nwavefuncs 1712 1713 # Extract ncolor 1714 ncolor = max(1, len(matrix_element.get('color_basis'))) 1715 replace_dict['ncolor'] = ncolor 1716 1717 # Extract color data lines 1718 color_data_lines = self.get_color_data_lines(matrix_element) 1719 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1720 1721 # Extract helas calls 1722 helas_calls = fortran_model.get_matrix_element_calls(\ 1723 matrix_element) 1724 replace_dict['helas_calls'] = "\n".join(helas_calls) 1725 1726 # Extract amp2 lines 1727 amp2_lines = self.get_amp2_lines(matrix_element) 1728 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1729 1730 # Extract JAMP lines 1731 jamp_lines = self.get_JAMP_lines(matrix_element) 1732 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1733 1734 # Set the size of Wavefunction 1735 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1736 replace_dict['wavefunctionsize'] = 20 1737 else: 1738 replace_dict['wavefunctionsize'] = 8 1739 1740 # Extract glu_ij_lines 1741 ij_lines = self.get_ij_lines(fksborn) 1742 replace_dict['ij_lines'] = '\n'.join(ij_lines) 1743 1744 # Extract den_factor_lines 1745 den_factor_lines = self.get_den_factor_lines(fksborn) 1746 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1747 1748 # Extract the number of FKS process 1749 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 1750 1751 file = open(os.path.join(_file_path, \ 1752 'iolibs/template_files/born_fks.inc')).read() 1753 file = file % replace_dict 1754 1755 # Write the file 1756 writer.writelines(file) 1757 1758 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1759 1760
1761 - def write_born_hel(self, writer, fksborn, fortran_model):
1762 """Export a matrix element to a born_hel.f file in MadFKS format""" 1763 1764 matrix_element = fksborn.born_matrix_element 1765 1766 if not matrix_element.get('processes') or \ 1767 not matrix_element.get('diagrams'): 1768 return 0 1769 1770 if not isinstance(writer, writers.FortranWriter): 1771 raise writers.FortranWriter.FortranWriterError(\ 1772 "writer not FortranWriter") 1773 # Set lowercase/uppercase Fortran code 1774 writers.FortranWriter.downcase = False 1775 1776 replace_dict = {} 1777 1778 # Extract version number and date from VERSION file 1779 info_lines = self.get_mg5_info_lines() 1780 replace_dict['info_lines'] = info_lines 1781 1782 # Extract process info lines 1783 process_lines = self.get_process_info_lines(matrix_element) 1784 replace_dict['process_lines'] = process_lines 1785 1786 1787 # Extract ncomb 1788 ncomb = matrix_element.get_helicity_combinations() 1789 replace_dict['ncomb'] = ncomb 1790 1791 # Extract helicity lines 1792 helicity_lines = self.get_helicity_lines(matrix_element) 1793 replace_dict['helicity_lines'] = helicity_lines 1794 1795 # Extract IC line 1796 ic_line = self.get_ic_line(matrix_element) 1797 replace_dict['ic_line'] = ic_line 1798 1799 # Extract overall denominator 1800 # Averaging initial state color, spin, and identical FS particles 1801 #den_factor_line = get_den_factor_line(matrix_element) 1802 1803 # Extract ngraphs 1804 ngraphs = matrix_element.get_number_of_amplitudes() 1805 replace_dict['ngraphs'] = ngraphs 1806 1807 # Extract nwavefuncs 1808 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1809 replace_dict['nwavefuncs'] = nwavefuncs 1810 1811 # Extract ncolor 1812 ncolor = max(1, len(matrix_element.get('color_basis'))) 1813 replace_dict['ncolor'] = ncolor 1814 1815 # Extract color data lines 1816 color_data_lines = self.get_color_data_lines(matrix_element) 1817 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1818 1819 # Extract amp2 lines 1820 amp2_lines = self.get_amp2_lines(matrix_element) 1821 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1822 1823 # Extract JAMP lines 1824 jamp_lines = self.get_JAMP_lines(matrix_element) 1825 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1826 1827 # Extract den_factor_lines 1828 den_factor_lines = self.get_den_factor_lines(fksborn) 1829 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1830 1831 # Extract the number of FKS process 1832 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1833 1834 file = open(os.path.join(_file_path, \ 1835 'iolibs/template_files/born_fks_hel.inc')).read() 1836 file = file % replace_dict 1837 1838 # Write the file 1839 writer.writelines(file) 1840 1841 return
1842 1843 1844 #=============================================================================== 1845 # write_born_sf_fks 1846 #=============================================================================== 1847 #test written
1848 - def write_sborn_sf(self, writer, color_links, fortran_model):
1849 """Creates the sborn_sf.f file, containing the calls to the different 1850 color linked borns""" 1851 1852 replace_dict = {} 1853 nborns = len(color_links) 1854 ifkss = [] 1855 iborns = [] 1856 mms = [] 1857 nns = [] 1858 iflines = "\n" 1859 1860 #header for the sborn_sf.f file 1861 file = """subroutine sborn_sf(p_born,m,n,wgt) 1862 implicit none 1863 include "nexternal.inc" 1864 double precision p_born(0:3,nexternal-1),wgt 1865 double complex wgt1(2) 1866 integer m,n \n""" 1867 1868 if nborns > 0: 1869 1870 for i, c_link in enumerate(color_links): 1871 iborn = i+1 1872 1873 iff = {True : 'if', False : 'elseif'}[i==0] 1874 1875 m, n = c_link['link'] 1876 1877 if m != n: 1878 iflines += \ 1879 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1880 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 1881 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1882 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1883 else: 1884 iflines += \ 1885 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1886 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 1887 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1888 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1889 1890 1891 file += iflines + \ 1892 """else 1893 wgt = 0d0 1894 endif 1895 1896 return 1897 end""" 1898 elif nborns == 0: 1899 #write a dummy file 1900 file+=""" 1901 c This is a dummy function because 1902 c this subdir has no soft singularities 1903 wgt = 0d0 1904 1905 return 1906 end""" 1907 # Write the end of the file 1908 1909 writer.writelines(file)
1910 1911 1912 #=============================================================================== 1913 # write_b_sf_fks 1914 #=============================================================================== 1915 #test written
1916 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
1917 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 1918 1919 matrix_element = copy.copy(fksborn.born_matrix_element) 1920 1921 if not matrix_element.get('processes') or \ 1922 not matrix_element.get('diagrams'): 1923 return 0 1924 1925 if not isinstance(writer, writers.FortranWriter): 1926 raise writers.FortranWriter.FortranWriterError(\ 1927 "writer not FortranWriter") 1928 # Set lowercase/uppercase Fortran code 1929 writers.FortranWriter.downcase = False 1930 1931 iborn = i + 1 1932 link = fksborn.color_links[i] 1933 1934 replace_dict = {} 1935 1936 replace_dict['iborn'] = iborn 1937 1938 # Extract version number and date from VERSION file 1939 info_lines = self.get_mg5_info_lines() 1940 replace_dict['info_lines'] = info_lines 1941 1942 # Extract process info lines 1943 process_lines = self.get_process_info_lines(matrix_element) 1944 replace_dict['process_lines'] = process_lines + \ 1945 "\nc spectators: %d %d \n" % tuple(link['link']) 1946 1947 # Extract ncomb 1948 ncomb = matrix_element.get_helicity_combinations() 1949 replace_dict['ncomb'] = ncomb 1950 1951 # Extract helicity lines 1952 helicity_lines = self.get_helicity_lines(matrix_element) 1953 replace_dict['helicity_lines'] = helicity_lines 1954 1955 # Extract IC line 1956 ic_line = self.get_ic_line(matrix_element) 1957 replace_dict['ic_line'] = ic_line 1958 1959 # Extract den_factor_lines 1960 den_factor_lines = self.get_den_factor_lines(fksborn) 1961 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1962 1963 # Extract ngraphs 1964 ngraphs = matrix_element.get_number_of_amplitudes() 1965 replace_dict['ngraphs'] = ngraphs 1966 1967 # Extract nwavefuncs 1968 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1969 replace_dict['nwavefuncs'] = nwavefuncs 1970 1971 # Extract ncolor 1972 ncolor1 = max(1, len(link['orig_basis'])) 1973 replace_dict['ncolor1'] = ncolor1 1974 ncolor2 = max(1, len(link['link_basis'])) 1975 replace_dict['ncolor2'] = ncolor2 1976 1977 # Extract color data lines 1978 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 1979 link['link_matrix']) 1980 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1981 1982 # Extract amp2 lines 1983 amp2_lines = self.get_amp2_lines(matrix_element) 1984 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1985 1986 # Extract JAMP lines 1987 jamp_lines = self.get_JAMP_lines(matrix_element) 1988 new_jamp_lines = [] 1989 for line in jamp_lines: 1990 line = string.replace(line, 'JAMP', 'JAMP1') 1991 new_jamp_lines.append(line) 1992 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 1993 1994 matrix_element.set('color_basis', link['link_basis'] ) 1995 jamp_lines = self.get_JAMP_lines(matrix_element) 1996 new_jamp_lines = [] 1997 for line in jamp_lines: 1998 line = string.replace(line, 'JAMP', 'JAMP2') 1999 new_jamp_lines.append(line) 2000 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 2001 2002 2003 # Extract the number of FKS process 2004 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2005 2006 file = open(os.path.join(_file_path, \ 2007 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2008 file = file % replace_dict 2009 2010 # Write the file 2011 writer.writelines(file) 2012 2013 return 0 , ncolor1
2014 2015 2016 #=============================================================================== 2017 # write_born_nhel_file 2018 #=============================================================================== 2019 #test written
2020 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2021 """Write the born_nhel.inc file for MG4.""" 2022 2023 ncomb = matrix_element.get_helicity_combinations() 2024 file = " integer max_bhel, max_bcol \n" 2025 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2026 (ncomb, nflows) 2027 2028 # Write the file 2029 writer.writelines(file) 2030 2031 return True
2032 2033 #=============================================================================== 2034 # write_fks_info_file 2035 #===============================================================================
2036 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2037 """Writes the content of nFKSconfigs.inc, which just gives the 2038 total FKS dirs as a parameter. 2039 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2040 replace_dict = {} 2041 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2042 content = \ 2043 """ INTEGER FKS_CONFIGS 2044 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2045 2046 """ % replace_dict 2047 2048 writer.writelines(content)
2049 2050 2051 #=============================================================================== 2052 # write_fks_info_file 2053 #===============================================================================
2054 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2055 """Writes the content of fks_info.inc, which lists the informations on the 2056 possible splittings of the born ME. 2057 nconfs is always >=1 (use a fake configuration for LOonly). 2058 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2059 the last colored particle as j_fks.""" 2060 2061 replace_dict = {} 2062 fks_info_list = fksborn.get_fks_info_list() 2063 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2064 2065 # this is for processes with 'real' or 'all' as NLO mode 2066 if len(fks_info_list) > 0: 2067 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2068 for info in fks_info_list]) 2069 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2070 for info in fks_info_list]) 2071 2072 col_lines = [] 2073 pdg_lines = [] 2074 charge_lines = [] 2075 fks_j_from_i_lines = [] 2076 for i, info in enumerate(fks_info_list): 2077 col_lines.append( \ 2078 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2079 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2080 pdg_lines.append( \ 2081 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2082 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2083 charge_lines.append(\ 2084 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2085 % (i + 1, ', '.join('%19.15fd0' % charg\ 2086 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2087 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2088 i + 1)) 2089 else: 2090 # this is for 'LOonly', generate a fake FKS configuration with 2091 # - i_fks = nexternal, pdg type = -21 and color =8 2092 # - j_fks = the last colored particle 2093 bornproc = fksborn.born_matrix_element.get('processes')[0] 2094 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2095 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2096 charges = [0.] * len(colors) 2097 2098 fks_i = len(colors) 2099 for cpos, col in enumerate(colors[:-1]): 2100 if col != 1: 2101 fks_j = cpos+1 2102 2103 fks_i_values = str(fks_i) 2104 fks_j_values = str(fks_j) 2105 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2106 % ', '.join([str(col) for col in colors])] 2107 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2108 % ', '.join([str(pdg) for pdg in pdgs])] 2109 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2110 % ', '.join('%19.15fd0' % charg for charg in charges)] 2111 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2112 % (fks_i, fks_j)] 2113 2114 2115 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2116 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2117 replace_dict['col_lines'] = '\n'.join(col_lines) 2118 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2119 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2120 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2121 2122 content = \ 2123 """ INTEGER IPOS, JPOS 2124 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2125 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2126 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2127 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2128 2129 %(fks_i_line)s 2130 %(fks_j_line)s 2131 2132 %(fks_j_from_i_lines)s 2133 2134 C 2135 C Particle type: 2136 C octet = 8, triplet = 3, singlet = 1 2137 %(col_lines)s 2138 2139 C 2140 C Particle type according to PDG: 2141 C 2142 %(pdg_lines)s 2143 2144 C 2145 C Particle charge: 2146 C charge is set 0. with QCD corrections, which is irrelevant 2147 %(charge_lines)s 2148 """ % replace_dict 2149 if not isinstance(writer, writers.FortranWriter): 2150 raise writers.FortranWriter.FortranWriterError(\ 2151 "writer not FortranWriter") 2152 # Set lowercase/uppercase Fortran code 2153 writers.FortranWriter.downcase = False 2154 2155 writer.writelines(content) 2156 2157 return True
2158 2159 2160 #=============================================================================== 2161 # write_matrix_element_fks 2162 #=============================================================================== 2163 #test written
2164 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2165 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2166 2167 if not matrix_element.get('processes') or \ 2168 not matrix_element.get('diagrams'): 2169 return 0,0 2170 2171 if not isinstance(writer, writers.FortranWriter): 2172 raise writers.FortranWriter.FortranWriterError(\ 2173 "writer not FortranWriter") 2174 # Set lowercase/uppercase Fortran code 2175 writers.FortranWriter.downcase = False 2176 2177 replace_dict = {} 2178 replace_dict['N_me'] = n 2179 2180 # Extract version number and date from VERSION file 2181 info_lines = self.get_mg5_info_lines() 2182 replace_dict['info_lines'] = info_lines 2183 2184 # Extract process info lines 2185 process_lines = self.get_process_info_lines(matrix_element) 2186 replace_dict['process_lines'] = process_lines 2187 2188 # Extract ncomb 2189 ncomb = matrix_element.get_helicity_combinations() 2190 replace_dict['ncomb'] = ncomb 2191 2192 # Extract helicity lines 2193 helicity_lines = self.get_helicity_lines(matrix_element) 2194 replace_dict['helicity_lines'] = helicity_lines 2195 2196 # Extract IC line 2197 ic_line = self.get_ic_line(matrix_element) 2198 replace_dict['ic_line'] = ic_line 2199 2200 # Extract overall denominator 2201 # Averaging initial state color, spin, and identical FS particles 2202 den_factor_line = self.get_den_factor_line(matrix_element) 2203 replace_dict['den_factor_line'] = den_factor_line 2204 2205 # Extract ngraphs 2206 ngraphs = matrix_element.get_number_of_amplitudes() 2207 replace_dict['ngraphs'] = ngraphs 2208 2209 # Extract ncolor 2210 ncolor = max(1, len(matrix_element.get('color_basis'))) 2211 replace_dict['ncolor'] = ncolor 2212 2213 # Extract color data lines 2214 color_data_lines = self.get_color_data_lines(matrix_element) 2215 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2216 2217 # Extract helas calls 2218 helas_calls = fortran_model.get_matrix_element_calls(\ 2219 matrix_element) 2220 replace_dict['helas_calls'] = "\n".join(helas_calls) 2221 2222 # Extract nwavefuncs (important to place after get_matrix_element_calls 2223 # so that 'me_id' is set) 2224 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2225 replace_dict['nwavefuncs'] = nwavefuncs 2226 2227 # Extract amp2 lines 2228 amp2_lines = self.get_amp2_lines(matrix_element) 2229 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2230 2231 # Set the size of Wavefunction 2232 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2233 replace_dict['wavefunctionsize'] = 20 2234 else: 2235 replace_dict['wavefunctionsize'] = 8 2236 2237 # Extract JAMP lines 2238 jamp_lines = self.get_JAMP_lines(matrix_element) 2239 2240 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2241 2242 realfile = open(os.path.join(_file_path, \ 2243 'iolibs/template_files/realmatrix_fks.inc')).read() 2244 2245 realfile = realfile % replace_dict 2246 2247 # Write the file 2248 writer.writelines(realfile) 2249 2250 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2251 2252 2253 #=============================================================================== 2254 # write_pdf_file 2255 #===============================================================================
2256 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2257 #test written 2258 """Write the auto_dsig.f file for MadFKS, which contains 2259 pdf call information""" 2260 2261 if not matrix_element.get('processes') or \ 2262 not matrix_element.get('diagrams'): 2263 return 0 2264 2265 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2266 2267 if ninitial < 1 or ninitial > 2: 2268 raise writers.FortranWriter.FortranWriterError, \ 2269 """Need ninitial = 1 or 2 to write auto_dsig file""" 2270 2271 replace_dict = {} 2272 2273 replace_dict['N_me'] = n 2274 2275 # Extract version number and date from VERSION file 2276 info_lines = self.get_mg5_info_lines() 2277 replace_dict['info_lines'] = info_lines 2278 2279 # Extract process info lines 2280 process_lines = self.get_process_info_lines(matrix_element) 2281 replace_dict['process_lines'] = process_lines 2282 2283 pdf_vars, pdf_data, pdf_lines = \ 2284 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2285 replace_dict['pdf_vars'] = pdf_vars 2286 replace_dict['pdf_data'] = pdf_data 2287 replace_dict['pdf_lines'] = pdf_lines 2288 2289 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2290 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2291 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2292 2293 file = open(os.path.join(_file_path, \ 2294 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2295 file = file % replace_dict 2296 2297 # Write the file 2298 writer.writelines(file)
2299 2300 2301 2302 #=============================================================================== 2303 # write_coloramps_file 2304 #=============================================================================== 2305 #test written
2306 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2307 """Write the coloramps.inc file for MadEvent""" 2308 2309 lines = [] 2310 lines.append( "logical icolamp(%d,%d,1)" % \ 2311 (max(len(matrix_element.get('color_basis').keys()), 1), 2312 len(mapconfigs))) 2313 2314 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2315 2316 # Write the file 2317 writer.writelines(lines) 2318 2319 return True
2320 2321 2322 #=============================================================================== 2323 # write_leshouche_file 2324 #=============================================================================== 2325 #test written
2326 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2327 """Write the leshouche.inc file for MG4""" 2328 2329 # Extract number of external particles 2330 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2331 2332 lines = [] 2333 for iproc, proc in enumerate(matrix_element.get('processes')): 2334 legs = proc.get_legs_with_decays() 2335 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2336 (iproc + 1, nexternal, 2337 ",".join([str(l.get('id')) for l in legs]))) 2338 for i in [1, 2]: 2339 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2340 (i, iproc + 1, nexternal, 2341 ",".join([ "%3r" % 0 ] * ninitial + \ 2342 [ "%3r" % i ] * (nexternal - ninitial)))) 2343 2344 # Here goes the color connections corresponding to the JAMPs 2345 # Only one output, for the first subproc! 2346 if iproc == 0: 2347 # If no color basis, just output trivial color flow 2348 if not matrix_element.get('color_basis'): 2349 for i in [1, 2]: 2350 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2351 (i, nexternal, 2352 ",".join([ "%3r" % 0 ] * nexternal))) 2353 color_flow_list = [] 2354 2355 else: 2356 # First build a color representation dictionnary 2357 repr_dict = {} 2358 for l in legs: 2359 repr_dict[l.get('number')] = \ 2360 proc.get('model').get_particle(l.get('id')).get_color()\ 2361 * (-1)**(1+l.get('state')) 2362 # Get the list of color flows 2363 color_flow_list = \ 2364 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2365 ninitial) 2366 # And output them properly 2367 for cf_i, color_flow_dict in enumerate(color_flow_list): 2368 for i in [0, 1]: 2369 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2370 (i + 1, cf_i + 1, nexternal, 2371 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2372 for l in legs]))) 2373 2374 # Write the file 2375 writer.writelines(lines) 2376 2377 return len(color_flow_list)
2378 2379 2380 #=============================================================================== 2381 # write_configs_file 2382 #=============================================================================== 2383 #test_written
2384 - def write_configs_file(self, writer, matrix_element, fortran_model):
2385 """Write the configs.inc file for MadEvent""" 2386 2387 # Extract number of external particles 2388 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2389 lines = [] 2390 2391 iconfig = 0 2392 2393 s_and_t_channels = [] 2394 mapconfigs = [] 2395 2396 model = matrix_element.get('processes')[0].get('model') 2397 # new_pdg = model.get_first_non_pdg() 2398 2399 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2400 model = matrix_element.get('base_amplitude').get('process').get('model') 2401 minvert = min([max([len(vert.get('legs')) for vert in \ 2402 diag.get('vertices')]) for diag in base_diagrams]) 2403 2404 for idiag, diag in enumerate(base_diagrams): 2405 if any([len(vert.get('legs')) > minvert for vert in 2406 diag.get('vertices')]): 2407 # Only 3-vertices allowed in configs.inc 2408 continue 2409 iconfig = iconfig + 1 2410 helas_diag = matrix_element.get('diagrams')[idiag] 2411 mapconfigs.append(helas_diag.get('number')) 2412 lines.append("# Diagram %d, Amplitude %d" % \ 2413 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2414 # Correspondance between the config and the amplitudes 2415 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2416 helas_diag.get('amplitudes')[0]['number'])) 2417 2418 # Need to reorganize the topology so that we start with all 2419 # final state external particles and work our way inwards 2420 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2421 get_s_and_t_channels(ninitial, model, 990) 2422 2423 s_and_t_channels.append([schannels, tchannels]) 2424 2425 # Write out propagators for s-channel and t-channel vertices 2426 allchannels = schannels 2427 if len(tchannels) > 1: 2428 # Write out tchannels only if there are any non-trivial ones 2429 allchannels = schannels + tchannels 2430 2431 for vert in allchannels: 2432 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2433 last_leg = vert.get('legs')[-1] 2434 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2435 (last_leg.get('number'), iconfig, len(daughters), 2436 ",".join(["%3d" % d for d in daughters]))) 2437 if vert in schannels: 2438 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2439 (last_leg.get('number'), iconfig, 2440 last_leg.get('id'))) 2441 elif vert in tchannels[:-1]: 2442 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2443 (last_leg.get('number'), iconfig, 2444 abs(last_leg.get('id')))) 2445 2446 # Write out number of configs 2447 lines.append("# Number of configs") 2448 lines.append("data mapconfig(0)/%4d/" % iconfig) 2449 2450 # Write the file 2451 writer.writelines(lines) 2452 2453 return iconfig, mapconfigs, s_and_t_channels
2454 2455 2456 #=============================================================================== 2457 # write_decayBW_file 2458 #=============================================================================== 2459 #test written
2460 - def write_decayBW_file(self, writer, s_and_t_channels):
2461 """Write the decayBW.inc file for MadEvent""" 2462 2463 lines = [] 2464 2465 booldict = {False: ".false.", True: ".false."} 2466 ####Changed by MZ 2011-11-23!!!! 2467 2468 for iconf, config in enumerate(s_and_t_channels): 2469 schannels = config[0] 2470 for vertex in schannels: 2471 # For the resulting leg, pick out whether it comes from 2472 # decay or not, as given by the from_group flag 2473 leg = vertex.get('legs')[-1] 2474 lines.append("data gForceBW(%d,%d)/%s/" % \ 2475 (leg.get('number'), iconf + 1, 2476 booldict[leg.get('from_group')])) 2477 2478 # Write the file 2479 writer.writelines(lines) 2480 2481 return True
2482 2483 2484 #=============================================================================== 2485 # write_dname_file 2486 #===============================================================================
2487 - def write_dname_file(self, writer, matrix_element, fortran_model):
2488 """Write the dname.mg file for MG4""" 2489 2490 line = "DIRNAME=P%s" % \ 2491 matrix_element.get('processes')[0].shell_string() 2492 2493 # Write the file 2494 writer.write(line + "\n") 2495 2496 return True
2497 2498 2499 #=============================================================================== 2500 # write_iproc_file 2501 #===============================================================================
2502 - def write_iproc_file(self, writer, me_number):
2503 """Write the iproc.dat file for MG4""" 2504 2505 line = "%d" % (me_number + 1) 2506 2507 # Write the file 2508 for line_to_write in writer.write_line(line): 2509 writer.write(line_to_write) 2510 return True
2511 2512 2513 #=============================================================================== 2514 # Helper functions 2515 #=============================================================================== 2516 2517 2518 #=============================================================================== 2519 # get_fks_j_from_i_lines 2520 #=============================================================================== 2521
2522 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2523 """generate the lines for fks.inc describing initializating the 2524 fks_j_from_i array""" 2525 lines = [] 2526 if not me.isfinite: 2527 for ii, js in me.fks_j_from_i.items(): 2528 if js: 2529 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2530 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2531 else: 2532 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2533 % (2, 1, 1, '1')) 2534 lines.append('') 2535 2536 return lines 2537 2538 2539 #=============================================================================== 2540 # get_leshouche_lines 2541 #===============================================================================
2542 - def get_leshouche_lines(self, matrix_element, ime):
2543 #test written 2544 """Write the leshouche.inc file for MG4""" 2545 2546 # Extract number of external particles 2547 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2548 2549 lines = [] 2550 for iproc, proc in enumerate(matrix_element.get('processes')): 2551 legs = proc.get_legs_with_decays() 2552 lines.append("I %4d %4d %s" % \ 2553 (ime, iproc + 1, 2554 " ".join([str(l.get('id')) for l in legs]))) 2555 for i in [1, 2]: 2556 lines.append("M %4d %4d %4d %s" % \ 2557 (ime, i, iproc + 1, 2558 " ".join([ "%3d" % 0 ] * ninitial + \ 2559 [ "%3d" % i ] * (nexternal - ninitial)))) 2560 2561 # Here goes the color connections corresponding to the JAMPs 2562 # Only one output, for the first subproc! 2563 if iproc == 0: 2564 # If no color basis, just output trivial color flow 2565 if not matrix_element.get('color_basis'): 2566 for i in [1, 2]: 2567 lines.append("C %4d %4d 1 %s" % \ 2568 (ime, i, 2569 " ".join([ "%3d" % 0 ] * nexternal))) 2570 color_flow_list = [] 2571 nflow = 1 2572 2573 else: 2574 # First build a color representation dictionnary 2575 repr_dict = {} 2576 for l in legs: 2577 repr_dict[l.get('number')] = \ 2578 proc.get('model').get_particle(l.get('id')).get_color()\ 2579 * (-1)**(1+l.get('state')) 2580 # Get the list of color flows 2581 color_flow_list = \ 2582 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2583 ninitial) 2584 # And output them properly 2585 for cf_i, color_flow_dict in enumerate(color_flow_list): 2586 for i in [0, 1]: 2587 lines.append("C %4d %4d %4d %s" % \ 2588 (ime, i + 1, cf_i + 1, 2589 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2590 for l in legs]))) 2591 2592 nflow = len(color_flow_list) 2593 2594 nproc = len(matrix_element.get('processes')) 2595 2596 return lines, nproc, nflow
2597 2598 2599 #=============================================================================== 2600 # get_den_factor_lines 2601 #===============================================================================
2602 - def get_den_factor_lines(self, fks_born):
2603 """returns the lines with the information on the denominator keeping care 2604 of the identical particle factors in the various real emissions""" 2605 2606 lines = [] 2607 info_list = fks_born.get_fks_info_list() 2608 if info_list: 2609 # if the reals have been generated, fill with the corresponding average factor 2610 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 2611 lines.append('DATA IDEN_VALUES /' + \ 2612 ', '.join(['%d' % ( 2613 fks_born.born_matrix_element.get_denominator_factor() / \ 2614 fks_born.born_matrix_element['identical_particle_factor'] * \ 2615 fks_born.real_processes[info['n_me'] - 1].matrix_element['identical_particle_factor'] ) \ 2616 for info in info_list]) + '/') 2617 else: 2618 # otherwise use the born 2619 lines.append('INTEGER IDEN_VALUES(1)') 2620 lines.append('DATA IDEN_VALUES / %d /' \ 2621 % fks_born.born_matrix_element.get_denominator_factor()) 2622 2623 return lines
2624 2625 2626 #=============================================================================== 2627 # get_ij_lines 2628 #===============================================================================
2629 - def get_ij_lines(self, fks_born):
2630 """returns the lines with the information on the particle number of the born 2631 that splits""" 2632 info_list = fks_born.get_fks_info_list() 2633 lines = [] 2634 if info_list: 2635 # if the reals have been generated, fill with the corresponding value of ij 2636 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 2637 lines.append('DATA IJ_VALUES /' + \ 2638 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/') 2639 else: 2640 #otherwise just put the first leg 2641 lines.append('INTEGER IJ_VALUES(1)') 2642 lines.append('DATA IJ_VALUES / 1 /') 2643 2644 return lines
2645 2646
2647 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 2648 mirror = False): #test written
2649 """Generate the PDF lines for the auto_dsig.f file""" 2650 2651 processes = matrix_element.get('processes') 2652 model = processes[0].get('model') 2653 2654 pdf_definition_lines = "" 2655 pdf_data_lines = "" 2656 pdf_lines = "" 2657 2658 if ninitial == 1: 2659 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 2660 for i, proc in enumerate(processes): 2661 process_line = proc.base_string() 2662 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2663 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 2664 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 2665 else: 2666 # Pick out all initial state particles for the two beams 2667 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 2668 p in processes]))), 2669 sorted(list(set([p.get_initial_pdg(2) for \ 2670 p in processes])))] 2671 2672 # Prepare all variable names 2673 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 2674 sum(initial_states,[])]) 2675 for key,val in pdf_codes.items(): 2676 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 2677 2678 # Set conversion from PDG code to number used in PDF calls 2679 pdgtopdf = {21: 0, 22: 7} 2680 # Fill in missing entries of pdgtopdf 2681 for pdg in sum(initial_states,[]): 2682 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 2683 pdgtopdf[pdg] = pdg 2684 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 2685 # If any particle has pdg code 7, we need to use something else 2686 pdgtopdf[pdg] = 6000000 + pdg 2687 2688 # Get PDF variable declarations for all initial states 2689 for i in [0,1]: 2690 pdf_definition_lines += "DOUBLE PRECISION " + \ 2691 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2692 for pdg in \ 2693 initial_states[i]]) + \ 2694 "\n" 2695 2696 # Get PDF data lines for all initial states 2697 for i in [0,1]: 2698 pdf_data_lines += "DATA " + \ 2699 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2700 for pdg in initial_states[i]]) + \ 2701 "/%d*1D0/" % len(initial_states[i]) + \ 2702 "\n" 2703 2704 # Get PDF values for the different initial states 2705 for i, init_states in enumerate(initial_states): 2706 if not mirror: 2707 ibeam = i + 1 2708 else: 2709 ibeam = 2 - i 2710 if subproc_group: 2711 pdf_lines = pdf_lines + \ 2712 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 2713 % (ibeam, ibeam) 2714 else: 2715 pdf_lines = pdf_lines + \ 2716 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 2717 % (ibeam, ibeam) 2718 2719 for initial_state in init_states: 2720 if initial_state in pdf_codes.keys(): 2721 if subproc_group: 2722 if abs(pdgtopdf[initial_state]) <= 7: 2723 pdf_lines = pdf_lines + \ 2724 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 2725 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 2726 (pdf_codes[initial_state], 2727 i + 1, ibeam, pdgtopdf[initial_state], 2728 ibeam, ibeam) 2729 else: 2730 # setting other partons flavours outside quark, gluon, photon to be 0d0 2731 pdf_lines = pdf_lines + \ 2732 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2733 "%s%d=0d0\n") % \ 2734 (pdf_codes[initial_state],i + 1) 2735 else: 2736 if abs(pdgtopdf[initial_state]) <= 7: 2737 pdf_lines = pdf_lines + \ 2738 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 2739 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 2740 (pdf_codes[initial_state], 2741 i + 1, ibeam, pdgtopdf[initial_state], 2742 ibeam, ibeam) 2743 else: 2744 # setting other partons flavours outside quark, gluon, photon to be 0d0 2745 pdf_lines = pdf_lines + \ 2746 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 2747 "%s%d=0d0\n") % \ 2748 (pdf_codes[initial_state],i + 1) 2749 2750 pdf_lines = pdf_lines + "ENDIF\n" 2751 2752 # Add up PDFs for the different initial state particles 2753 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 2754 for proc in processes: 2755 process_line = proc.base_string() 2756 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2757 pdf_lines = pdf_lines + "\nPD(IPROC) = " 2758 for ibeam in [1, 2]: 2759 initial_state = proc.get_initial_pdg(ibeam) 2760 if initial_state in pdf_codes.keys(): 2761 pdf_lines = pdf_lines + "%s%d*" % \ 2762 (pdf_codes[initial_state], ibeam) 2763 else: 2764 pdf_lines = pdf_lines + "1d0*" 2765 # Remove last "*" from pdf_lines 2766 pdf_lines = pdf_lines[:-1] + "\n" 2767 2768 # Remove last line break from pdf_lines 2769 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 2770 2771 2772 #test written
2773 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
2774 """Return the color matrix definition lines for the given color_matrix. Split 2775 rows in chunks of size n.""" 2776 2777 if not color_matrix: 2778 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 2779 else: 2780 ret_list = [] 2781 my_cs = color.ColorString() 2782 for index, denominator in \ 2783 enumerate(color_matrix.get_line_denominators()): 2784 # First write the common denominator for this color matrix line 2785 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 2786 # Then write the numerators for the matrix elements 2787 num_list = color_matrix.get_line_numerators(index, denominator) 2788 for k in xrange(0, len(num_list), n): 2789 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 2790 (index + 1, k + 1, min(k + n, len(num_list)), 2791 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 2792 2793 return ret_list
2794 2795 #=========================================================================== 2796 # write_maxamps_file 2797 #===========================================================================
2798 - def write_maxamps_file(self, writer, maxamps, maxflows, 2799 maxproc,maxsproc):
2800 """Write the maxamps.inc file for MG4.""" 2801 2802 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 2803 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 2804 (maxamps, maxflows) 2805 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 2806 (maxproc, maxsproc) 2807 2808 # Write the file 2809 writer.writelines(file) 2810 2811 return True
2812 2813 #=============================================================================== 2814 # write_ncombs_file 2815 #===============================================================================
2816 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
2817 # #test written 2818 """Write the ncombs.inc file for MadEvent.""" 2819 2820 # Extract number of external particles 2821 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2822 2823 # ncomb (used for clustering) is 2^(nexternal) 2824 file = " integer n_max_cl\n" 2825 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 2826 2827 # Write the file 2828 writer.writelines(file) 2829 2830 return True
2831 2832 #=========================================================================== 2833 # write_config_subproc_map_file 2834 #===========================================================================
2835 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
2836 """Write a dummy config_subproc.inc file for MadEvent""" 2837 2838 lines = [] 2839 2840 for iconfig in range(len(s_and_t_channels)): 2841 lines.append("DATA CONFSUB(1,%d)/1/" % \ 2842 (iconfig + 1)) 2843 2844 # Write the file 2845 writer.writelines(lines) 2846 2847 return True
2848 2849 #=========================================================================== 2850 # write_colors_file 2851 #===========================================================================
2852 - def write_colors_file(self, writer, matrix_element):
2853 """Write the get_color.f file for MadEvent, which returns color 2854 for all particles used in the matrix element.""" 2855 2856 try: 2857 matrix_elements=matrix_element.real_processes[0].matrix_element 2858 except IndexError: 2859 matrix_elements=[matrix_element.born_matrix_element] 2860 2861 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 2862 matrix_elements = [matrix_elements] 2863 2864 model = matrix_elements[0].get('processes')[0].get('model') 2865 2866 # We need the both particle and antiparticle wf_ids, since the identity 2867 # depends on the direction of the wf. 2868 # loop on the real emissions 2869 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2870 for wf in d.get('wavefunctions')],[]) \ 2871 for d in me.get('diagrams')],[]) \ 2872 for me in [real_proc.matrix_element]],[])\ 2873 for real_proc in matrix_element.real_processes],[])) 2874 # and also on the born 2875 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2876 for wf in d.get('wavefunctions')],[]) \ 2877 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 2878 2879 # loop on the real emissions 2880 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 2881 p.get_legs_with_decays()] for p in \ 2882 me.get('processes')], []) for me in \ 2883 [real_proc.matrix_element]], []) for real_proc in \ 2884 matrix_element.real_processes],[])) 2885 # and also on the born 2886 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 2887 p.get_legs_with_decays()] for p in \ 2888 matrix_element.born_matrix_element.get('processes')], []))) 2889 particle_ids = sorted(list(wf_ids.union(leg_ids))) 2890 2891 lines = """function get_color(ipdg) 2892 implicit none 2893 integer get_color, ipdg 2894 2895 if(ipdg.eq.%d)then 2896 get_color=%d 2897 return 2898 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 2899 2900 for part_id in particle_ids[1:]: 2901 lines += """else if(ipdg.eq.%d)then 2902 get_color=%d 2903 return 2904 """ % (part_id, model.get_particle(part_id).get_color()) 2905 # Dummy particle for multiparticle vertices with pdg given by 2906 # first code not in the model 2907 lines += """else if(ipdg.eq.%d)then 2908 c This is dummy particle used in multiparticle vertices 2909 get_color=2 2910 return 2911 """ % model.get_first_non_pdg() 2912 lines += """else 2913 write(*,*)'Error: No color given for pdg ',ipdg 2914 get_color=0 2915 return 2916 endif 2917 end 2918 """ 2919 2920 # Write the file 2921 writer.writelines(lines) 2922 2923 return True
2924 2925 #=============================================================================== 2926 # write_props_file 2927 #=============================================================================== 2928 #test_written
2929 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
2930 """Write the props.inc file for MadEvent. Needs input from 2931 write_configs_file. With respect to the parent routine, it has some 2932 more specific formats that allow the props.inc file to be read by the 2933 link program""" 2934 2935 lines = [] 2936 2937 particle_dict = matrix_element.get('processes')[0].get('model').\ 2938 get('particle_dict') 2939 2940 for iconf, configs in enumerate(s_and_t_channels): 2941 for vertex in configs[0] + configs[1][:-1]: 2942 leg = vertex.get('legs')[-1] 2943 if leg.get('id') not in particle_dict: 2944 # Fake propagator used in multiparticle vertices 2945 mass = 'zero' 2946 width = 'zero' 2947 pow_part = 0 2948 else: 2949 particle = particle_dict[leg.get('id')] 2950 # Get mass 2951 if particle.get('mass').lower() == 'zero': 2952 mass = particle.get('mass') 2953 else: 2954 mass = "abs(%s)" % particle.get('mass') 2955 # Get width 2956 if particle.get('width').lower() == 'zero': 2957 width = particle.get('width') 2958 else: 2959 width = "abs(%s)" % particle.get('width') 2960 2961 pow_part = 1 + int(particle.is_boson()) 2962 2963 lines.append("pmass(%3d,%4d) = %s" % \ 2964 (leg.get('number'), iconf + 1, mass)) 2965 lines.append("pwidth(%3d,%4d) = %s" % \ 2966 (leg.get('number'), iconf + 1, width)) 2967 lines.append("pow(%3d,%4d) = %d" % \ 2968 (leg.get('number'), iconf + 1, pow_part)) 2969 2970 # Write the file 2971 writer.writelines(lines) 2972 2973 return True
2974 2975 2976 #=========================================================================== 2977 # write_subproc 2978 #===========================================================================
2979 - def write_subproc(self, writer, subprocdir):
2980 """Append this subprocess to the subproc.mg file for MG4""" 2981 2982 # Write line to file 2983 writer.write(subprocdir + "\n") 2984 2985 return True
2986 2987 2988 2989 2990 2991 #================================================================================= 2992 # Class for using the optimized Loop process 2993 #=================================================================================
2994 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 2995 ProcessExporterFortranFKS):
2996 """Class to take care of exporting a set of matrix elements to 2997 Fortran (v4) format.""" 2998 2999 #=============================================================================== 3000 # copy the Template in a new directory. 3001 #===============================================================================
3002 - def copy_fkstemplate(self):
3003 """create the directory run_name as a copy of the MadEvent 3004 Template, and clean the directory 3005 For now it is just the same as copy_v4template, but it will be modified 3006 """ 3007 mgme_dir = self.mgme_dir 3008 dir_path = self.dir_path 3009 clean =self.opt['clean'] 3010 3011 #First copy the full template tree if dir_path doesn't exit 3012 if not os.path.isdir(dir_path): 3013 if not mgme_dir: 3014 raise MadGraph5Error, \ 3015 "No valid MG_ME path given for MG4 run directory creation." 3016 logger.info('initialize a new directory: %s' % \ 3017 os.path.basename(dir_path)) 3018 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3019 # distutils.dir_util.copy_tree since dir_path already exists 3020 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 3021 dir_path) 3022 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3023 if not mgme_dir: 3024 raise MadGraph5Error, \ 3025 "No valid MG_ME path given for MG4 run directory creation." 3026 try: 3027 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3028 except IOError: 3029 MG5_version = misc.get_pkg_info() 3030 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3031 "5." + MG5_version['version']) 3032 3033 #Ensure that the Template is clean 3034 if clean: 3035 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3036 if os.environ.has_key('MADGRAPH_BASE'): 3037 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3038 '--web'], cwd=dir_path) 3039 else: 3040 try: 3041 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3042 cwd=dir_path) 3043 except Exception, why: 3044 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3045 % (os.path.basename(dir_path),why)) 3046 #Write version info 3047 MG_version = misc.get_pkg_info() 3048 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3049 MG_version['version']) 3050 3051 # We must link the CutTools to the Library folder of the active Template 3052 self.link_CutTools(dir_path) 3053 # We must link the TIR to the Library folder of the active Template 3054 link_tir_libs=[] 3055 tir_libs=[] 3056 tir_include=[] 3057 # special for PJFry++/Golem95 3058 link_pjfry_lib="" 3059 pjfry_lib="" 3060 for tir in self.all_tir: 3061 tir_dir="%s_dir"%tir 3062 libpath=getattr(self,tir_dir) 3063 libname="lib%s.a"%tir 3064 tir_name=tir 3065 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3066 libpath,libname,tir_name=tir_name) 3067 setattr(self,tir_dir,libpath) 3068 if libpath != "": 3069 if tir in ['pjfry','golem']: 3070 # Apparently it is necessary to link against the original 3071 # location of the pjfry/golem library, so it needs a special treatment. 3072 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3073 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3074 if tir=='golem': 3075 trg_path = pjoin(os.path.dirname(libpath),'include') 3076 golem_include = misc.find_includes_path(trg_path,'.mod') 3077 if golem_include is None: 3078 logger.error( 3079 'Could not find the include directory for golem, looking in %s.\n' % str(trg_path)+ 3080 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3081 golem_include = '<Not_found_define_it_yourself>' 3082 tir_include.append('-I %s'%golem_include) 3083 else: 3084 link_tir_libs.append('-l%s'%tir) 3085 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3086 3087 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3088 cwd = os.getcwd() 3089 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3090 try: 3091 os.chdir(dirpath) 3092 except os.error: 3093 logger.error('Could not cd to directory %s' % dirpath) 3094 return 0 3095 filename = 'makefile_loop' 3096 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3097 link_tir_libs,tir_libs,tir_include=tir_include) 3098 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3099 dirpath = os.path.join(self.dir_path, 'Source') 3100 try: 3101 os.chdir(dirpath) 3102 except os.error: 3103 logger.error('Could not cd to directory %s' % dirpath) 3104 return 0 3105 filename = 'make_opts' 3106 calls = self.write_make_opts(writers.MakefileWriter(filename), 3107 link_tir_libs,tir_libs) 3108 # Return to original PWD 3109 os.chdir(cwd) 3110 3111 cwd = os.getcwd() 3112 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3113 try: 3114 os.chdir(dirpath) 3115 except os.error: 3116 logger.error('Could not cd to directory %s' % dirpath) 3117 return 0 3118 3119 # We add here the user-friendly MadLoop option setter. 3120 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3121 "Cards/MadLoopParams.dat", 3122 "SubProcesses/MadLoopParams.inc"] 3123 3124 for file in cpfiles: 3125 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3126 os.path.join(self.dir_path, file)) 3127 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3128 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3129 'Cards', 'MadLoopParams.dat')) 3130 # write the output file 3131 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3132 "MadLoopParams.dat")) 3133 3134 # We need minimal editing of MadLoopCommons.f 3135 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3136 "SubProcesses","MadLoopCommons.inc")).read() 3137 writer = writers.FortranWriter(os.path.join(self.dir_path, 3138 "SubProcesses","MadLoopCommons.f")) 3139 writer.writelines(MadLoopCommon%{ 3140 'print_banner_commands':self.MadLoop_banner}) 3141 writer.close() 3142 3143 # link the files from the MODEL 3144 model_path = self.dir_path + '/Source/MODEL/' 3145 # Note that for the [real=] mode, these files are not present 3146 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3147 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3148 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3149 ln(model_path + '/mp_coupl_same_name.inc', \ 3150 self.dir_path + '/SubProcesses') 3151 3152 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3153 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3154 writers.FortranWriter('cts_mpc.h'),) 3155 3156 self.copy_python_files() 3157 3158 3159 # We need to create the correct open_data for the pdf 3160 self.write_pdf_opendata() 3161 3162 3163 # Return to original PWD 3164 os.chdir(cwd)
3165
3166 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3167 """writes the V**** directory inside the P**** directories specified in 3168 dir_name""" 3169 3170 cwd = os.getcwd() 3171 3172 matrix_element = loop_matrix_element 3173 3174 # Create the MadLoop5_resources directory if not already existing 3175 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3176 try: 3177 os.mkdir(dirpath) 3178 except os.error as error: 3179 logger.warning(error.strerror + " " + dirpath) 3180 3181 # Create the directory PN_xx_xxxxx in the specified path 3182 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3183 dirpath = os.path.join(dir_name, name) 3184 3185 try: 3186 os.mkdir(dirpath) 3187 except os.error as error: 3188 logger.warning(error.strerror + " " + dirpath) 3189 3190 try: 3191 os.chdir(dirpath) 3192 except os.error: 3193 logger.error('Could not cd to directory %s' % dirpath) 3194 return 0 3195 3196 logger.info('Creating files in directory %s' % name) 3197 3198 # Extract number of external particles 3199 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3200 3201 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3202 3203 # The born matrix element, if needed 3204 filename = 'born_matrix.f' 3205 calls = self.write_bornmatrix( 3206 writers.FortranWriter(filename), 3207 matrix_element, 3208 fortran_model) 3209 3210 filename = 'nexternal.inc' 3211 self.write_nexternal_file(writers.FortranWriter(filename), 3212 nexternal, ninitial) 3213 3214 filename = 'pmass.inc' 3215 self.write_pmass_file(writers.FortranWriter(filename), 3216 matrix_element) 3217 3218 filename = 'ngraphs.inc' 3219 self.write_ngraphs_file(writers.FortranWriter(filename), 3220 len(matrix_element.get_all_amplitudes())) 3221 3222 filename = "loop_matrix.ps" 3223 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3224 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3225 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3226 filename, 3227 model=matrix_element.get('processes')[0].get('model'), 3228 amplitude='') 3229 logger.info("Drawing loop Feynman diagrams for " + \ 3230 matrix_element.get('processes')[0].nice_string(\ 3231 print_weighted=False)) 3232 plot.draw() 3233 3234 filename = "born_matrix.ps" 3235 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3236 get('born_diagrams'), 3237 filename, 3238 model=matrix_element.get('processes')[0].\ 3239 get('model'), 3240 amplitude='') 3241 logger.info("Generating born Feynman diagrams for " + \ 3242 matrix_element.get('processes')[0].nice_string(\ 3243 print_weighted=False)) 3244 plot.draw() 3245 3246 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3247 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3248 'MadLoopParams.inc','MadLoopCommons.f'] 3249 3250 for file in linkfiles: 3251 ln('../../%s' % file) 3252 3253 3254 os.system("ln -s ../../makefile_loop makefile") 3255 3256 # We should move to MadLoop5_resources directory from the SubProcesses 3257 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3258 pjoin('..','MadLoop5_resources')) 3259 3260 linkfiles = ['mpmodule.mod'] 3261 3262 for file in linkfiles: 3263 ln('../../../lib/%s' % file) 3264 3265 # Return to original PWD 3266 os.chdir(cwd) 3267 3268 if not calls: 3269 calls = 0 3270 return calls
3271 3272 3273 #=============================================================================== 3274 # write_coef_specs 3275 #===============================================================================
3276 - def write_coef_specs_file(self, virt_me_list):
3277 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3278 non-optimized mode""" 3279 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3280 3281 general_replace_dict = {} 3282 general_replace_dict['max_lwf_size'] = 4 3283 3284 max_loop_vertex_ranks = [me.get_max_loop_vertex_rank() for me in virt_me_list] 3285 general_replace_dict['vertex_max_coefs'] = max(\ 3286 [q_polynomial.get_number_of_coefs_for_rank(n) 3287 for n in max_loop_vertex_ranks]) 3288 3289 IncWriter=writers.FortranWriter(filename,'w') 3290 IncWriter.writelines("""INTEGER MAXLWFSIZE 3291 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3292 INTEGER VERTEXMAXCOEFS 3293 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3294 % general_replace_dict) 3295 IncWriter.close()
3296