Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from __future__ import absolute_import 
  18  from __future__ import print_function 
  19  import glob 
  20  import logging 
  21  import os 
  22  import re 
  23  import shutil 
  24  import subprocess 
  25  import string 
  26  import copy 
  27  import platform 
  28   
  29  import madgraph.core.color_algebra as color 
  30  import madgraph.core.helas_objects as helas_objects 
  31  import madgraph.core.base_objects as base_objects 
  32  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  33  import madgraph.fks.fks_base as fks 
  34  import madgraph.fks.fks_common as fks_common 
  35  import madgraph.iolibs.drawing_eps as draw 
  36  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  37  import madgraph.iolibs.files as files 
  38  import madgraph.various.misc as misc 
  39  import madgraph.iolibs.file_writers as writers 
  40  import madgraph.iolibs.template_files as template_files 
  41  import madgraph.iolibs.ufo_expression_parsers as parsers 
  42  import madgraph.iolibs.export_v4 as export_v4 
  43  import madgraph.loop.loop_exporters as loop_exporters 
  44  import madgraph.various.q_polynomial as q_polynomial 
  45  import madgraph.various.banner as banner_mod 
  46   
  47  import aloha.create_aloha as create_aloha 
  48   
  49  import models.write_param_card as write_param_card 
  50  import models.check_param_card as check_param_card 
  51  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  52  from madgraph.iolibs.files import cp, ln, mv 
  53  from six.moves import range 
  54   
  55  pjoin = os.path.join 
  56   
  57  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  58  logger = logging.getLogger('madgraph.export_fks') 
  59   
  60   
61 -def make_jpeg_async(args):
62 Pdir = args[0] 63 old_pos = args[1] 64 dir_path = args[2] 65 66 devnull = os.open(os.devnull, os.O_RDWR) 67 68 os.chdir(Pdir) 69 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 70 stdout = devnull) 71 os.chdir(os.path.pardir)
72 73 74 #================================================================================= 75 # Class for used of the (non-optimized) Loop process 76 #=================================================================================
77 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
78 """Class to take care of exporting a set of matrix elements to 79 Fortran (v4) format.""" 80 81 #=============================================================================== 82 # copy the Template in a new directory. 83 #===============================================================================
84 - def copy_fkstemplate(self):
85 """create the directory run_name as a copy of the MadEvent 86 Template, and clean the directory 87 For now it is just the same as copy_v4template, but it will be modified 88 """ 89 90 mgme_dir = self.mgme_dir 91 dir_path = self.dir_path 92 clean =self.opt['clean'] 93 94 #First copy the full template tree if dir_path doesn't exit 95 if not os.path.isdir(dir_path): 96 if not mgme_dir: 97 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 98 logger.info('initialize a new directory: %s' % \ 99 os.path.basename(dir_path)) 100 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 101 # misc.copytree since dir_path already exists 102 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) 103 # Copy plot_card 104 for card in ['plot_card']: 105 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 106 try: 107 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 108 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 109 except IOError: 110 logger.warning("Failed to move " + card + ".dat to default") 111 112 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 113 if not mgme_dir: 114 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 115 try: 116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 117 except IOError: 118 MG5_version = misc.get_pkg_info() 119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 120 "5." + MG5_version['version']) 121 122 #Ensure that the Template is clean 123 if clean: 124 logger.info('remove old information in %s' % os.path.basename(dir_path)) 125 if 'MADGRAPH_BASE' in os.environ: 126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 127 '--web'],cwd=dir_path) 128 else: 129 try: 130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 131 cwd=dir_path) 132 except Exception as why: 133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 134 % (os.path.basename(dir_path),why)) 135 #Write version info 136 MG_version = misc.get_pkg_info() 137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 138 MG_version['version']) 139 140 # We must link the CutTools to the Library folder of the active Template 141 self.link_CutTools(dir_path) 142 143 link_tir_libs=[] 144 tir_libs=[] 145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 146 dirpath = os.path.join(self.dir_path, 'SubProcesses') 147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 149 link_tir_libs,tir_libs) 150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 151 filename = pjoin(self.dir_path, 'Source','make_opts') 152 calls = self.write_make_opts(writers.MakefileWriter(filename), 153 link_tir_libs,tir_libs) 154 155 # Duplicate run_card and FO_analyse_card 156 for card in ['FO_analyse_card', 'shower_card']: 157 try: 158 shutil.copy(pjoin(self.dir_path, 'Cards', 159 card + '.dat'), 160 pjoin(self.dir_path, 'Cards', 161 card + '_default.dat')) 162 except IOError: 163 logger.warning("Failed to copy " + card + ".dat to default") 164 165 cwd = os.getcwd() 166 dirpath = os.path.join(self.dir_path, 'SubProcesses') 167 try: 168 os.chdir(dirpath) 169 except os.error: 170 logger.error('Could not cd to directory %s' % dirpath) 171 return 0 172 173 # We add here the user-friendly MadLoop option setter. 174 cpfiles= ["SubProcesses/MadLoopParamReader.f", 175 "Cards/MadLoopParams.dat", 176 "SubProcesses/MadLoopParams.inc"] 177 178 for file in cpfiles: 179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 180 os.path.join(self.dir_path, file)) 181 182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 184 185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 187 'Cards', 'MadLoopParams.dat')) 188 # write the output file 189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 190 "MadLoopParams.dat")) 191 192 # We need minimal editing of MadLoopCommons.f 193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 194 "SubProcesses","MadLoopCommons.inc")).read() 195 writer = writers.FortranWriter(os.path.join(self.dir_path, 196 "SubProcesses","MadLoopCommons.f")) 197 writer.writelines(MadLoopCommon%{ 198 'print_banner_commands':self.MadLoop_banner}, 199 context={'collier_available':False}) 200 writer.close() 201 202 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 204 writers.FortranWriter('cts_mpc.h')) 205 206 207 # Finally make sure to turn off MC over Hel for the default mode. 208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 209 FKS_card_file = open(FKS_card_path,'r') 210 FKS_card = FKS_card_file.read() 211 FKS_card_file.close() 212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 213 "#NHelForMCoverHels\n-1", FKS_card) 214 FKS_card_file = open(FKS_card_path,'w') 215 FKS_card_file.write(FKS_card) 216 FKS_card_file.close() 217 218 # Return to original PWD 219 os.chdir(cwd) 220 # Copy the different python files in the Template 221 self.copy_python_files() 222 223 # We need to create the correct open_data for the pdf 224 self.write_pdf_opendata()
225 226 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 227 # Also, we overload this function (i.e. it is already defined in 228 # LoopProcessExporterFortranSA) because the path of the template makefile 229 # is different.
230 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
231 """ Create the file makefile_loop which links to the TIR libraries.""" 232 233 file = open(os.path.join(self.mgme_dir,'Template','NLO', 234 'SubProcesses','makefile_loop.inc')).read() 235 replace_dict={} 236 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 237 replace_dict['tir_libs']=' '.join(tir_libs) 238 replace_dict['dotf']='%.f' 239 replace_dict['doto']='%.o' 240 replace_dict['tir_include']=' '.join(tir_include) 241 file=file%replace_dict 242 if writer: 243 writer.writelines(file) 244 else: 245 return file
246 247 # I put it here not in optimized one, because I want to use the same make_opts.inc
248 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
249 """ Create the file make_opts which links to the TIR libraries.""" 250 file = open(os.path.join(self.mgme_dir,'Template','NLO', 251 'Source','make_opts.inc')).read() 252 replace_dict={} 253 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 254 replace_dict['tir_libs']=' '.join(tir_libs) 255 replace_dict['dotf']='%.f' 256 replace_dict['doto']='%.o' 257 file=file%replace_dict 258 if writer: 259 writer.writelines(file) 260 else: 261 return file
262 263 #=========================================================================== 264 # copy_python_files 265 #===========================================================================
266 - def copy_python_files(self):
267 """copy python files required for the Template""" 268 269 files_to_copy = [ \ 270 pjoin('interface','amcatnlo_run_interface.py'), 271 pjoin('interface','extended_cmd.py'), 272 pjoin('interface','common_run_interface.py'), 273 pjoin('interface','coloring_logging.py'), 274 pjoin('various','misc.py'), 275 pjoin('various','shower_card.py'), 276 pjoin('various','FO_analyse_card.py'), 277 pjoin('various','histograms.py'), 278 pjoin('various','banner.py'), 279 pjoin('various','cluster.py'), 280 pjoin('various','systematics.py'), 281 pjoin('various','lhe_parser.py'), 282 pjoin('madevent','sum_html.py'), 283 pjoin('madevent','gen_crossxhtml.py'), 284 pjoin('iolibs','files.py'), 285 pjoin('iolibs','save_load_object.py'), 286 pjoin('iolibs','file_writers.py'), 287 pjoin('..','models','check_param_card.py'), 288 pjoin('__init__.py') 289 ] 290 cp(_file_path+'/interface/.mg5_logging.conf', 291 self.dir_path+'/bin/internal/me5_logging.conf') 292 293 for cp_file in files_to_copy: 294 cp(pjoin(_file_path,cp_file), 295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [], 298 wanted_couplings = []):
299 300 super(ProcessExporterFortranFKS,self).convert_model(model, 301 wanted_lorentz, wanted_couplings) 302 303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 304 try: 305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 306 except OSError as error: 307 pass 308 model_path = model.get('modelpath') 309 shutil.copytree(model_path, 310 pjoin(self.dir_path,'bin','internal','ufomodel'), 311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 312 if hasattr(model, 'restrict_card'): 313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 314 'restrict_default.dat') 315 if isinstance(model.restrict_card, check_param_card.ParamCard): 316 model.restrict_card.write(out_path) 317 else: 318 files.cp(model.restrict_card, out_path)
319 320 321 322 #=========================================================================== 323 # write_maxparticles_file 324 #===========================================================================
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent""" 327 328 lines = "integer max_particles, max_branch\n" 329 lines += "parameter (max_particles=%d) \n" % maxparticles 330 lines += "parameter (max_branch=max_particles-1)" 331 332 # Write the file 333 writer.writelines(lines) 334 335 return True
336 337 338 #=========================================================================== 339 # write_maxconfigs_file 340 #===========================================================================
341 - def write_maxconfigs_file(self, writer, maxconfigs):
342 """Write the maxconfigs.inc file for MadEvent""" 343 344 lines = "integer lmaxconfigs\n" 345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 346 347 # Write the file 348 writer.writelines(lines) 349 350 return True
351 352 353 #=============================================================================== 354 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 355 #===============================================================================
356 - def write_procdef_mg5(self, file_pos, modelname, process_str):
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent 358 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 359 360 proc_card_template = template_files.mg4_proc_card.mg4_template 361 process_template = template_files.mg4_proc_card.process_template 362 process_text = '' 363 coupling = '' 364 new_process_content = [] 365 366 # First find the coupling and suppress the coupling from process_str 367 #But first ensure that coupling are define whithout spaces: 368 process_str = process_str.replace(' =', '=') 369 process_str = process_str.replace('= ', '=') 370 process_str = process_str.replace(',',' , ') 371 #now loop on the element and treat all the coupling 372 for info in process_str.split(): 373 if '=' in info: 374 coupling += info + '\n' 375 else: 376 new_process_content.append(info) 377 # Recombine the process_str (which is the input process_str without coupling 378 #info) 379 process_str = ' '.join(new_process_content) 380 381 #format the SubProcess 382 process_text += process_template.substitute({'process': process_str, \ 383 'coupling': coupling}) 384 385 text = proc_card_template.substitute({'process': process_text, 386 'model': modelname, 387 'multiparticle':''}) 388 ff = open(file_pos, 'w') 389 ff.write(text) 390 ff.close()
391 392 393 #=============================================================================== 394 # write a initial states map, useful for the fast PDF NLO interface 395 #===============================================================================
396 - def write_init_map(self, file_pos, initial_states):
397 """ Write an initial state process map. Each possible PDF 398 combination gets an unique identifier.""" 399 400 text='' 401 for i,e in enumerate(initial_states): 402 text=text+str(i+1)+' '+str(len(e)) 403 for t in e: 404 if len(t) ==1: 405 t.append(0) 406 text=text+' ' 407 try: 408 for p in t: 409 if p == None : p = 0 410 text=text+' '+str(p) 411 except TypeError: 412 text=text+' '+str(t) 413 text=text+'\n' 414 415 ff = open(file_pos, 'w') 416 ff.write(text) 417 ff.close()
418
419 - def get_ME_identifier(self, matrix_element, *args, **opts):
420 """ A function returning a string uniquely identifying the matrix 421 element given in argument so that it can be used as a prefix to all 422 MadLoop5 subroutines and common blocks related to it. This allows 423 to compile several processes into one library as requested by the 424 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 425 necessitates that there is no process prefix.""" 426 427 return ''
428 429 #=============================================================================== 430 # write_coef_specs 431 #===============================================================================
432 - def write_coef_specs_file(self, virt_me_list):
433 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 434 non-optimized mode""" 435 raise fks_common.FKSProcessError()("write_coef_specs should be called only in the loop-optimized mode")
436 437 438 #=============================================================================== 439 # generate_directories_fks 440 #===============================================================================
441 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 442 me_ntot, path=os.getcwd(),OLP='MadLoop'):
443 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 444 including the necessary matrix.f and various helper files""" 445 proc = matrix_element.born_matrix_element['processes'][0] 446 447 if not self.model: 448 self.model = matrix_element.get('processes')[0].get('model') 449 450 cwd = os.getcwd() 451 try: 452 os.chdir(path) 453 except OSError as error: 454 error_msg = "The directory %s should exist in order to be able " % path + \ 455 "to \"export\" in it. If you see this error message by " + \ 456 "typing the command \"export\" please consider to use " + \ 457 "instead the command \"output\". " 458 raise MadGraph5Error(error_msg) 459 460 calls = 0 461 462 self.fksdirs = [] 463 #first make and cd the direcrory corresponding to the born process: 464 borndir = "P%s" % \ 465 (matrix_element.get('processes')[0].shell_string()) 466 os.mkdir(borndir) 467 os.chdir(borndir) 468 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 469 470 ## write the files corresponding to the born process in the P* directory 471 self.generate_born_fks_files(matrix_element, 472 fortran_model, me_number, path) 473 474 # With NJET you want to generate the order file per subprocess and most 475 # likely also generate it for each subproc. 476 if OLP=='NJET': 477 filename = 'OLE_order.lh' 478 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP) 479 480 if matrix_element.virt_matrix_element: 481 calls += self.generate_virt_directory( \ 482 matrix_element.virt_matrix_element, \ 483 fortran_model, \ 484 os.path.join(path, borndir)) 485 486 #write the infortions for the different real emission processes 487 488 self.write_real_matrix_elements(matrix_element, fortran_model) 489 490 self.write_pdf_calls(matrix_element, fortran_model) 491 492 filename = 'nFKSconfigs.inc' 493 self.write_nfksconfigs_file(writers.FortranWriter(filename), 494 matrix_element, 495 fortran_model) 496 497 filename = 'iproc.dat' 498 self.write_iproc_file(writers.FortranWriter(filename), 499 me_number) 500 501 filename = 'fks_info.inc' 502 self.write_fks_info_file(writers.FortranWriter(filename), 503 matrix_element, 504 fortran_model) 505 506 filename = 'leshouche_info.dat' 507 nfksconfs,maxproc,maxflow,nexternal=\ 508 self.write_leshouche_info_file(filename,matrix_element) 509 510 # if no corrections are generated ([LOonly] mode), get 511 # these variables from the born 512 if nfksconfs == maxproc == maxflow == 0: 513 nfksconfs = 1 514 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 515 matrix_element.born_matrix_element, 1) 516 517 filename = 'leshouche_decl.inc' 518 self.write_leshouche_info_declarations( 519 writers.FortranWriter(filename), 520 nfksconfs,maxproc,maxflow,nexternal, 521 fortran_model) 522 filename = 'genps.inc' 523 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes() 524 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis'))) 525 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\ 526 ncolor,maxflow,fortran_model) 527 528 filename = 'configs_and_props_info.dat' 529 nconfigs,max_leg_number=self.write_configs_and_props_info_file( 530 filename, 531 matrix_element) 532 533 filename = 'configs_and_props_decl.inc' 534 self.write_configs_and_props_info_declarations( 535 writers.FortranWriter(filename), 536 nconfigs,max_leg_number,nfksconfs, 537 fortran_model) 538 539 filename = 'real_from_born_configs.inc' 540 self.write_real_from_born_configs( 541 writers.FortranWriter(filename), 542 matrix_element, 543 fortran_model) 544 545 filename = 'ngraphs.inc' 546 self.write_ngraphs_file(writers.FortranWriter(filename), 547 nconfigs) 548 549 #write the wrappers 550 filename = 'real_me_chooser.f' 551 self.write_real_me_wrapper(writers.FortranWriter(filename), 552 matrix_element, 553 fortran_model) 554 555 filename = 'parton_lum_chooser.f' 556 self.write_pdf_wrapper(writers.FortranWriter(filename), 557 matrix_element, 558 fortran_model) 559 560 filename = 'get_color.f' 561 self.write_colors_file(writers.FortranWriter(filename), 562 matrix_element) 563 564 filename = 'nexternal.inc' 565 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 566 self.write_nexternal_file(writers.FortranWriter(filename), 567 nexternal, ninitial) 568 self.proc_characteristic['ninitial'] = ninitial 569 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 570 571 filename = 'pmass.inc' 572 try: 573 self.write_pmass_file(writers.FortranWriter(filename), 574 matrix_element.real_processes[0].matrix_element) 575 except IndexError: 576 self.write_pmass_file(writers.FortranWriter(filename), 577 matrix_element.born_matrix_element) 578 579 #draw the diagrams 580 self.draw_feynman_diagrams(matrix_element) 581 582 linkfiles = ['BinothLHADummy.f', 583 'check_poles.f', 584 'MCmasses_HERWIG6.inc', 585 'MCmasses_HERWIGPP.inc', 586 'MCmasses_PYTHIA6Q.inc', 587 'MCmasses_PYTHIA6PT.inc', 588 'MCmasses_PYTHIA8.inc', 589 'add_write_info.f', 590 'coupl.inc', 591 'cuts.f', 592 'FKS_params.dat', 593 'initial_states_map.dat', 594 'OLE_order.olc', 595 'FKSParams.inc', 596 'FKSParamReader.f', 597 'cuts.inc', 598 'unlops.inc', 599 'pythia_unlops.f', 600 'driver_mintMC.f', 601 'driver_mintFO.f', 602 'appl_interface.cc', 603 'appl_interface_dummy.f', 604 'appl_common.inc', 605 'reweight_appl.inc', 606 'fastjetfortran_madfks_core.cc', 607 'fastjetfortran_madfks_full.cc', 608 'fjcore.cc', 609 'fastjet_wrapper.f', 610 'fjcore.hh', 611 'fks_Sij.f', 612 'fks_powers.inc', 613 'fks_singular.f', 614 'veto_xsec.f', 615 'veto_xsec.inc', 616 'weight_lines.f', 617 'fks_inc_chooser.f', 618 'leshouche_inc_chooser.f', 619 'configs_and_props_inc_chooser.f', 620 'genps_fks.f', 621 'boostwdir2.f', 622 'madfks_mcatnlo.inc', 623 'open_output_files.f', 624 'open_output_files_dummy.f', 625 'HwU_dummy.f', 626 'madfks_plot.f', 627 'analysis_dummy.f', 628 'analysis_lhe.f', 629 'mint-integrator2.f', 630 'MC_integer.f', 631 'mint.inc', 632 'montecarlocounter.f', 633 'q_es.inc', 634 'recluster.cc', 635 'Boosts.h', 636 'reweight_xsec.f', 637 'reweight_xsec_events.f', 638 'reweight_xsec_events_pdf_dummy.f', 639 'iproc_map.f', 640 'run.inc', 641 'run_card.inc', 642 'setcuts.f', 643 'setscales.f', 644 'test_soft_col_limits.f', 645 'symmetry_fks_v3.f', 646 'vegas2.for', 647 'write_ajob.f', 648 'handling_lhe_events.f', 649 'write_event.f', 650 'fill_MC_mshell.f', 651 'maxparticles.inc', 652 'message.inc', 653 'initcluster.f', 654 'cluster.inc', 655 'cluster.f', 656 'reweight.f', 657 'randinit', 658 'sudakov.inc', 659 'maxconfigs.inc', 660 'timing_variables.inc'] 661 662 for file in linkfiles: 663 ln('../' + file , '.') 664 os.system("ln -s ../../Cards/param_card.dat .") 665 666 #copy the makefile 667 os.system("ln -s ../makefile_fks_dir ./makefile") 668 if matrix_element.virt_matrix_element: 669 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 670 elif OLP!='MadLoop': 671 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 672 else: 673 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 674 675 # Return to SubProcesses dir 676 os.chdir(os.path.pardir) 677 # Add subprocess to subproc.mg 678 filename = 'subproc.mg' 679 files.append_to_file(filename, 680 self.write_subproc, 681 borndir) 682 683 os.chdir(cwd) 684 # Generate info page 685 gen_infohtml.make_info_html_nlo(self.dir_path) 686 687 688 return calls
689 690 #=========================================================================== 691 # create the run_card 692 #===========================================================================
693 - def create_run_card(self, processes, history):
694 """ """ 695 696 run_card = banner_mod.RunCardNLO() 697 698 run_card.create_default_for_process(self.proc_characteristic, 699 history, 700 processes) 701 702 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 703 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
704 705
706 - def pass_information_from_cmd(self, cmd):
707 """pass information from the command interface to the exporter. 708 Please do not modify any object of the interface from the exporter. 709 """ 710 self.proc_defs = cmd._curr_proc_defs 711 if hasattr(cmd,'born_processes'): 712 self.born_processes = cmd.born_processes 713 else: 714 self.born_processes = [] 715 return
716
717 - def finalize(self, matrix_elements, history, mg5options, flaglist):
718 """Finalize FKS directory by creating jpeg diagrams, html 719 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if 720 necessary.""" 721 722 devnull = os.open(os.devnull, os.O_RDWR) 723 try: 724 res = misc.call([mg5options['lhapdf'], '--version'], \ 725 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 726 except Exception: 727 res = 1 728 if res != 0: 729 logger.info('The value for lhapdf in the current configuration does not ' + \ 730 'correspond to a valid executable.\nPlease set it correctly either in ' + \ 731 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \ 732 'and regenrate the process. \nTo avoid regeneration, edit the ' + \ 733 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \ 734 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n') 735 736 compiler_dict = {'fortran': mg5options['fortran_compiler'], 737 'cpp': mg5options['cpp_compiler'], 738 'f2py': mg5options['f2py_compiler']} 739 740 if 'nojpeg' in flaglist: 741 makejpg = False 742 else: 743 makejpg = True 744 output_dependencies = mg5options['output_dependencies'] 745 746 747 self.proc_characteristic['grouped_matrix'] = False 748 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 749 750 self.create_proc_charac() 751 752 self.create_run_card(matrix_elements.get_processes(), history) 753 # modelname = self.model.get('name') 754 # if modelname == 'mssm' or modelname.startswith('mssm-'): 755 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 756 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 757 # check_param_card.convert_to_mg5card(param_card, mg5_param) 758 # check_param_card.check_valid_param_card(mg5_param) 759 760 # # write the model functions get_mass/width_from_id 761 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 762 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 763 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 764 765 # # Write maxconfigs.inc based on max of ME's/subprocess groups 766 767 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 768 self.write_maxconfigs_file(writers.FortranWriter(filename), 769 matrix_elements.get_max_configs()) 770 771 # # Write maxparticles.inc based on max of ME's/subprocess groups 772 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 773 self.write_maxparticles_file(writers.FortranWriter(filename), 774 matrix_elements.get_max_particles()) 775 776 # Touch "done" file 777 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 778 779 # Check for compiler 780 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 781 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 782 783 old_pos = os.getcwd() 784 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 785 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 786 proc[0] == 'P'] 787 788 devnull = os.open(os.devnull, os.O_RDWR) 789 # Convert the poscript in jpg files (if authorize) 790 if makejpg: 791 logger.info("Generate jpeg diagrams") 792 for Pdir in P_dir_list: 793 os.chdir(Pdir) 794 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 795 stdout = devnull) 796 os.chdir(os.path.pardir) 797 # 798 logger.info("Generate web pages") 799 # Create the WebPage using perl script 800 801 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 802 stdout = devnull) 803 804 os.chdir(os.path.pardir) 805 # 806 # obj = gen_infohtml.make_info_html(self.dir_path) 807 # [mv(name, './HTML/') for name in os.listdir('.') if \ 808 # (name.endswith('.html') or name.endswith('.jpg')) and \ 809 # name != 'index.html'] 810 # if online: 811 # nb_channel = obj.rep_rule['nb_gen_diag'] 812 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 813 814 # Write command history as proc_card_mg5 815 if os.path.isdir('Cards'): 816 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 817 history.write(output_file) 818 819 # Duplicate run_card and FO_analyse_card 820 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 821 try: 822 shutil.copy(pjoin(self.dir_path, 'Cards', 823 card + '.dat'), 824 pjoin(self.dir_path, 'Cards', 825 card + '_default.dat')) 826 except IOError: 827 logger.warning("Failed to copy " + card + ".dat to default") 828 829 830 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 831 stdout = devnull) 832 833 # Run "make" to generate madevent.tar.gz file 834 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 835 if os.path.exists('amcatnlo.tar.gz'): 836 os.remove('amcatnlo.tar.gz') 837 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 838 stdout = devnull) 839 # 840 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 841 stdout = devnull) 842 843 #return to the initial dir 844 os.chdir(old_pos) 845 846 # Setup stdHep 847 # Find the correct fortran compiler 848 base_compiler= ['FC=g77','FC=gfortran'] 849 850 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 851 if output_dependencies == 'external': 852 # check if stdhep has to be compiled (only the first time) 853 if (not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 854 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a'))) and \ 855 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP','fail')): 856 if 'FC' not in os.environ or not os.environ['FC']: 857 path = os.path.join(StdHep_path, 'src', 'make_opts') 858 text = open(path).read() 859 for base in base_compiler: 860 text = text.replace(base,'FC=%s' % fcompiler_chosen) 861 open(path, 'w').writelines(text) 862 logger.info('Compiling StdHEP. This has to be done only once.') 863 try: 864 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 865 except Exception as error: 866 logger.debug(str(error)) 867 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6") 868 logger.info("details on the compilation error are available on %s", pjoin(MG5DIR, 'vendor', 'StdHEP','fail')) 869 logger.info("if you want to retry the compilation automatically, you have to remove that file first") 870 with open(pjoin(MG5DIR, 'vendor', 'StdHEP','fail'),'w') as fsock: 871 fsock.write(str(error)) 872 else: 873 logger.info('Done.') 874 if os.path.exists(pjoin(StdHep_path, 'lib', 'libstdhep.a')): 875 #then link the libraries in the exported dir 876 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 877 pjoin(self.dir_path, 'MCatNLO', 'lib')) 878 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 879 pjoin(self.dir_path, 'MCatNLO', 'lib')) 880 881 elif output_dependencies == 'internal': 882 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 883 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 884 # Create the links to the lib folder 885 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 886 for file in linkfiles: 887 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 888 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 889 if 'FC' not in os.environ or not os.environ['FC']: 890 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 891 text = open(path).read() 892 for base in base_compiler: 893 text = text.replace(base,'FC=%s' % fcompiler_chosen) 894 open(path, 'w').writelines(text) 895 # To avoid compiler version conflicts, we force a clean here 896 misc.compile(['clean'],cwd = StdHEP_internal_path) 897 898 elif output_dependencies == 'environment_paths': 899 # Here the user chose to define the dependencies path in one of 900 # his environmental paths 901 libStdHep = misc.which_lib('libstdhep.a') 902 libFmcfio = misc.which_lib('libFmcfio.a') 903 if not libStdHep is None and not libFmcfio is None: 904 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 905 os.path.dirname(libStdHep)) 906 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 907 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 908 else: 909 raise InvalidCmd("Could not find the location of the files"+\ 910 " libstdhep.a and libFmcfio.a in you environment paths.") 911 912 else: 913 raise MadGraph5Error('output_dependencies option %s not recognized'\ 914 %output_dependencies) 915 916 # Create the default MadAnalysis5 cards 917 if 'madanalysis5_path' in self.opt and not \ 918 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 919 # When using 920 processes = sum([me.get('processes') if not isinstance(me, str) else [] \ 921 for me in matrix_elements.get('matrix_elements')],[]) 922 923 # Try getting the processes from the generation info directly if no ME are 924 # available (as it is the case for parallel generation 925 if len(processes)==0: 926 processes = self.born_processes 927 if len(processes)==0: 928 logger.warning( 929 """MG5aMC could not provide to Madanalysis5 the list of processes generated. 930 As a result, the default card will not be tailored to the process generated. 931 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""") 932 # For now, simply assign all processes to each proc_defs. 933 # That shouldn't really affect the default analysis card created by MA5 934 self.create_default_madanalysis5_cards( 935 history, self.proc_defs, [processes,]*len(self.proc_defs), 936 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 937 levels =['hadron'])
938
939 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
940 """Writes the real_from_born_configs.inc file that contains 941 the mapping to go for a given born configuration (that is used 942 e.g. in the multi-channel phase-space integration to the 943 corresponding real-emission diagram, i.e. the real emission 944 diagram in which the combined ij is split in i_fks and 945 j_fks.""" 946 lines=[] 947 lines2=[] 948 max_links=0 949 born_me=matrix_element.born_matrix_element 950 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 951 iFKS=iFKS+1 952 links=conf['fks_info']['rb_links'] 953 max_links=max(max_links,len(links)) 954 for i,diags in enumerate(links): 955 if not i == diags['born_conf']: 956 print(links) 957 raise MadGraph5Error("born_conf should be canonically ordered") 958 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 959 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 960 % (iFKS,len(links),real_configs)) 961 962 # this is for 'LOonly' processes; in this case, a fake configuration 963 # with all the born diagrams is written 964 if not matrix_element.get_fks_info_list(): 965 # compute (again) the number of configurations at the born 966 base_diagrams = born_me.get('base_amplitude').get('diagrams') 967 minvert = min([max([len(vert.get('legs')) for vert in \ 968 diag.get('vertices')]) for diag in base_diagrams]) 969 970 for idiag, diag in enumerate(base_diagrams): 971 if any([len(vert.get('legs')) > minvert for vert in 972 diag.get('vertices')]): 973 # Only 3-vertices allowed in configs.inc 974 continue 975 max_links = max_links + 1 976 977 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)]) 978 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 979 % (1,max_links,real_configs)) 980 981 lines2.append("integer irfbc") 982 lines2.append("integer real_from_born_conf(%d,%d)" \ 983 % (max_links, max(len(matrix_element.get_fks_info_list()),1))) 984 # Write the file 985 writer.writelines(lines2+lines)
986 987 988 #=============================================================================== 989 # write_get_mass_width_file 990 #=============================================================================== 991 #test written
992 - def write_get_mass_width_file(self, writer, makeinc, model):
993 """Write the get_mass_width_file.f file for MG4. 994 Also update the makeinc.inc file 995 """ 996 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 997 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 998 999 iflines_mass = '' 1000 iflines_width = '' 1001 1002 for i, part in enumerate(mass_particles): 1003 if i == 0: 1004 ifstring = 'if' 1005 else: 1006 ifstring = 'else if' 1007 if part['self_antipart']: 1008 iflines_mass += '%s (id.eq.%d) then\n' % \ 1009 (ifstring, part.get_pdg_code()) 1010 else: 1011 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1012 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1013 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 1014 1015 for i, part in enumerate(width_particles): 1016 if i == 0: 1017 ifstring = 'if' 1018 else: 1019 ifstring = 'else if' 1020 if part['self_antipart']: 1021 iflines_width += '%s (id.eq.%d) then\n' % \ 1022 (ifstring, part.get_pdg_code()) 1023 else: 1024 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1025 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1026 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 1027 1028 # Make sure it compiles with an if-statement if the above lists are empty 1029 if len(mass_particles)==0: 1030 iflines_mass = 'if (.True.) then\n' 1031 1032 if len(width_particles)==0: 1033 iflines_width = 'if (.True.) then\n' 1034 1035 replace_dict = {'iflines_mass' : iflines_mass, 1036 'iflines_width' : iflines_width} 1037 1038 file = open(os.path.join(_file_path, \ 1039 'iolibs/template_files/get_mass_width_fcts.inc')).read() 1040 file = file % replace_dict 1041 1042 # Write the file 1043 writer.writelines(file) 1044 1045 # update the makeinc 1046 makeinc_content = open(makeinc).read() 1047 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 1048 open(makeinc, 'w').write(makeinc_content) 1049 1050 return
1051 1052
1053 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
1054 """writes the declarations for the variables relevant for configs_and_props 1055 """ 1056 lines = [] 1057 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 1058 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 1059 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 1060 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 1061 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1062 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1063 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1064 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1065 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1066 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1067 1068 writer.writelines(lines)
1069 1070
1071 - def write_configs_and_props_info_file(self, filename, matrix_element):
1072 """writes the configs_and_props_info.inc file that cointains 1073 all the (real-emission) configurations (IFOREST) as well as 1074 the masses and widths of intermediate particles""" 1075 lines = [] 1076 lines.append("# C -> MAPCONFIG_D") 1077 lines.append("# F/D -> IFOREST_D") 1078 lines.append("# S -> SPROP_D") 1079 lines.append("# T -> TPRID_D") 1080 lines.append("# M -> PMASS_D/PWIDTH_D") 1081 lines.append("# P -> POW_D") 1082 lines2 = [] 1083 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1084 1085 max_iconfig=0 1086 max_leg_number=0 1087 1088 ######################################################## 1089 # this is for standard processes with [(real=)XXX] 1090 ######################################################## 1091 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1092 iFKS=iFKS+1 1093 iconfig = 0 1094 s_and_t_channels = [] 1095 mapconfigs = [] 1096 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 1097 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 1098 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 1099 minvert = min([max([len(vert.get('legs')) for vert in \ 1100 diag.get('vertices')]) for diag in base_diagrams]) 1101 1102 lines.append("# ") 1103 lines.append("# nFKSprocess %d" % iFKS) 1104 for idiag, diag in enumerate(base_diagrams): 1105 if any([len(vert.get('legs')) > minvert for vert in 1106 diag.get('vertices')]): 1107 # Only 3-vertices allowed in configs.inc 1108 continue 1109 iconfig = iconfig + 1 1110 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1111 mapconfigs.append(helas_diag.get('number')) 1112 lines.append("# Diagram %d for nFKSprocess %d" % \ 1113 (helas_diag.get('number'),iFKS)) 1114 # Correspondance between the config and the amplitudes 1115 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1116 helas_diag.get('number'))) 1117 1118 # Need to reorganize the topology so that we start with all 1119 # final state external particles and work our way inwards 1120 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1121 get_s_and_t_channels(ninitial, model, 990) 1122 1123 s_and_t_channels.append([schannels, tchannels]) 1124 1125 # Write out propagators for s-channel and t-channel vertices 1126 allchannels = schannels 1127 if len(tchannels) > 1: 1128 # Write out tchannels only if there are any non-trivial ones 1129 allchannels = schannels + tchannels 1130 1131 for vert in allchannels: 1132 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1133 last_leg = vert.get('legs')[-1] 1134 lines.append("F %4d %4d %4d %4d" % \ 1135 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1136 for d in daughters: 1137 lines.append("D %4d" % d) 1138 if vert in schannels: 1139 lines.append("S %4d %4d %4d %10d" % \ 1140 (iFKS,last_leg.get('number'), iconfig, 1141 last_leg.get('id'))) 1142 elif vert in tchannels[:-1]: 1143 lines.append("T %4d %4d %4d %10d" % \ 1144 (iFKS,last_leg.get('number'), iconfig, 1145 abs(last_leg.get('id')))) 1146 1147 # update what the array sizes (mapconfig,iforest,etc) will be 1148 max_leg_number = min(max_leg_number,last_leg.get('number')) 1149 max_iconfig = max(max_iconfig,iconfig) 1150 1151 # Write out number of configs 1152 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1153 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1154 1155 # write the props.inc information 1156 lines2.append("# ") 1157 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1158 get('particle_dict') 1159 1160 for iconf, configs in enumerate(s_and_t_channels): 1161 for vertex in configs[0] + configs[1][:-1]: 1162 leg = vertex.get('legs')[-1] 1163 if leg.get('id') not in particle_dict: 1164 # Fake propagator used in multiparticle vertices 1165 pow_part = 0 1166 else: 1167 particle = particle_dict[leg.get('id')] 1168 1169 pow_part = 1 + int(particle.is_boson()) 1170 1171 lines2.append("M %4d %4d %4d %10d " % \ 1172 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1173 lines2.append("P %4d %4d %4d %4d " % \ 1174 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1175 1176 ######################################################## 1177 # this is for [LOonly=XXX] 1178 ######################################################## 1179 if not matrix_element.get_fks_info_list(): 1180 born_me = matrix_element.born_matrix_element 1181 # as usual, in this case we assume just one FKS configuration 1182 # exists with diagrams corresponding to born ones X the ij -> i,j 1183 # splitting. Here j is chosen to be the last colored particle in 1184 # the particle list 1185 bornproc = born_me.get('processes')[0] 1186 colors = [l.get('color') for l in bornproc.get('legs')] 1187 1188 fks_i = len(colors) 1189 # use the last colored particle if it exists, or 1190 # just the last 1191 fks_j=1 1192 for cpos, col in enumerate(colors): 1193 if col != 1: 1194 fks_j = cpos+1 1195 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos] 1196 1197 # for the moment, if j is initial-state, we do nothing 1198 if fks_j > ninitial: 1199 iFKS=1 1200 iconfig = 0 1201 s_and_t_channels = [] 1202 mapconfigs = [] 1203 base_diagrams = born_me.get('base_amplitude').get('diagrams') 1204 model = born_me.get('base_amplitude').get('process').get('model') 1205 minvert = min([max([len(vert.get('legs')) for vert in \ 1206 diag.get('vertices')]) for diag in base_diagrams]) 1207 1208 lines.append("# ") 1209 lines.append("# nFKSprocess %d" % iFKS) 1210 for idiag, diag in enumerate(base_diagrams): 1211 if any([len(vert.get('legs')) > minvert for vert in 1212 diag.get('vertices')]): 1213 # Only 3-vertices allowed in configs.inc 1214 continue 1215 iconfig = iconfig + 1 1216 helas_diag = born_me.get('diagrams')[idiag] 1217 mapconfigs.append(helas_diag.get('number')) 1218 lines.append("# Diagram %d for nFKSprocess %d" % \ 1219 (helas_diag.get('number'),iFKS)) 1220 # Correspondance between the config and the amplitudes 1221 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1222 helas_diag.get('number'))) 1223 1224 # Need to reorganize the topology so that we start with all 1225 # final state external particles and work our way inwards 1226 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1227 get_s_and_t_channels(ninitial, model, 990) 1228 1229 s_and_t_channels.append([schannels, tchannels]) 1230 1231 #the first thing to write is the splitting ij -> i,j 1232 lines.append("F %4d %4d %4d %4d" % \ 1233 (iFKS,-1,iconfig,2)) 1234 #(iFKS,last_leg.get('number'), iconfig, len(daughters))) 1235 lines.append("D %4d" % nexternal) 1236 lines.append("D %4d" % fks_j) 1237 lines.append("S %4d %4d %4d %10d" % \ 1238 (iFKS,-1, iconfig,fks_j_id)) 1239 # now we continue with all the other vertices of the diagrams; 1240 # we need to shift the 'last_leg' by 1 and replace leg fks_j with -1 1241 1242 # Write out propagators for s-channel and t-channel vertices 1243 allchannels = schannels 1244 if len(tchannels) > 1: 1245 # Write out tchannels only if there are any non-trivial ones 1246 allchannels = schannels + tchannels 1247 1248 for vert in allchannels: 1249 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1250 last_leg = vert.get('legs')[-1] 1251 lines.append("F %4d %4d %4d %4d" % \ 1252 (iFKS,last_leg.get('number')-1, iconfig, len(daughters))) 1253 1254 # legs with negative number in daughters have to be shifted by -1 1255 for i_dau in range(len(daughters)): 1256 if daughters[i_dau] < 0: 1257 daughters[i_dau] += -1 1258 # finally relable fks with -1 if it appears in daughters 1259 if fks_j in daughters: 1260 daughters[daughters.index(fks_j)] = -1 1261 for d in daughters: 1262 lines.append("D %4d" % d) 1263 if vert in schannels: 1264 lines.append("S %4d %4d %4d %10d" % \ 1265 (iFKS,last_leg.get('number')-1, iconfig, 1266 last_leg.get('id'))) 1267 elif vert in tchannels[:-1]: 1268 lines.append("T %4d %4d %4d %10d" % \ 1269 (iFKS,last_leg.get('number')-1, iconfig, 1270 abs(last_leg.get('id')))) 1271 1272 # update what the array sizes (mapconfig,iforest,etc) will be 1273 max_leg_number = min(max_leg_number,last_leg.get('number')-1) 1274 max_iconfig = max(max_iconfig,iconfig) 1275 1276 # Write out number of configs 1277 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1278 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1279 1280 # write the props.inc information 1281 lines2.append("# ") 1282 particle_dict = born_me.get('processes')[0].get('model').\ 1283 get('particle_dict') 1284 1285 for iconf, configs in enumerate(s_and_t_channels): 1286 lines2.append("M %4d %4d %4d %10d " % \ 1287 (iFKS,-1, iconf + 1, fks_j_id)) 1288 pow_part = 1 + int(particle_dict[fks_j_id].is_boson()) 1289 lines2.append("P %4d %4d %4d %4d " % \ 1290 (iFKS,-1, iconf + 1, pow_part)) 1291 for vertex in configs[0] + configs[1][:-1]: 1292 leg = vertex.get('legs')[-1] 1293 if leg.get('id') not in particle_dict: 1294 # Fake propagator used in multiparticle vertices 1295 pow_part = 0 1296 else: 1297 particle = particle_dict[leg.get('id')] 1298 1299 pow_part = 1 + int(particle.is_boson()) 1300 1301 lines2.append("M %4d %4d %4d %10d " % \ 1302 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id'))) 1303 lines2.append("P %4d %4d %4d %4d " % \ 1304 (iFKS,leg.get('number')-1, iconf + 1, pow_part)) 1305 1306 # Write the file 1307 open(filename,'w').write('\n'.join(lines+lines2)) 1308 1309 return max_iconfig, max_leg_number
1310 1311
1312 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1313 maxproc, maxflow, nexternal, fortran_model):
1314 """writes the declarations for the variables relevant for leshouche_info 1315 """ 1316 lines = [] 1317 lines.append('integer maxproc_used, maxflow_used') 1318 lines.append('parameter (maxproc_used = %d)' % maxproc) 1319 lines.append('parameter (maxflow_used = %d)' % maxflow) 1320 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1321 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1322 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1323 lines.append('integer niprocs_d(%d)' % (nfksconfs)) 1324 1325 writer.writelines(lines)
1326 1327
1328 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1329 """writes the genps.inc file 1330 """ 1331 lines = [] 1332 lines.append("include 'maxparticles.inc'") 1333 lines.append("include 'maxconfigs.inc'") 1334 lines.append("integer maxproc,ngraphs,ncolor,maxflow") 1335 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \ 1336 (maxproc,ngraphs,ncolor,maxflow)) 1337 writer.writelines(lines)
1338 1339
1340 - def write_leshouche_info_file(self, filename, matrix_element):
1341 """writes the leshouche_info.inc file which contains 1342 the LHA informations for all the real emission processes 1343 """ 1344 lines = [] 1345 lines.append("# I -> IDUP_D") 1346 lines.append("# M -> MOTHUP_D") 1347 lines.append("# C -> ICOLUP_D") 1348 nfksconfs = len(matrix_element.get_fks_info_list()) 1349 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1350 1351 maxproc = 0 1352 maxflow = 0 1353 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1354 # for i, real in enumerate(matrix_element.real_processes): 1355 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1356 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1357 lines.extend(newlines) 1358 maxproc = max(maxproc, nprocs) 1359 maxflow = max(maxflow, nflows) 1360 1361 # this is for LOonly 1362 if not matrix_element.get_fks_info_list(): 1363 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_matrix_element, 1) 1364 lines.extend(newlines) 1365 1366 # Write the file 1367 open(filename,'w').write('\n'.join(lines)) 1368 1369 return nfksconfs, maxproc, maxflow, nexternal
1370 1371
1372 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1373 """writes the wrapper which allows to chose among the different real matrix elements""" 1374 1375 file = \ 1376 """double precision function dlum() 1377 implicit none 1378 integer nfksprocess 1379 common/c_nfksprocess/nfksprocess 1380 """ 1381 if matrix_element.real_processes: 1382 for n, info in enumerate(matrix_element.get_fks_info_list()): 1383 file += \ 1384 """if (nfksprocess.eq.%(n)d) then 1385 call dlum_%(n_me)d(dlum) 1386 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1387 file += \ 1388 """ 1389 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1390 stop 1391 endif 1392 return 1393 end 1394 """ 1395 else: 1396 file+= \ 1397 """call dlum_0(dlum) 1398 return 1399 end 1400 """ 1401 1402 # Write the file 1403 writer.writelines(file) 1404 return 0
1405 1406
1407 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1408 """writes the wrapper which allows to chose among the different real matrix elements""" 1409 1410 file = \ 1411 """subroutine smatrix_real(p, wgt) 1412 implicit none 1413 include 'nexternal.inc' 1414 double precision p(0:3, nexternal) 1415 double precision wgt 1416 integer nfksprocess 1417 common/c_nfksprocess/nfksprocess 1418 """ 1419 for n, info in enumerate(matrix_element.get_fks_info_list()): 1420 file += \ 1421 """if (nfksprocess.eq.%(n)d) then 1422 call smatrix_%(n_me)d(p, wgt) 1423 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1424 1425 if matrix_element.real_processes: 1426 file += \ 1427 """ 1428 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1429 stop 1430 endif 1431 return 1432 end 1433 """ 1434 else: 1435 file += \ 1436 """ 1437 wgt=0d0 1438 return 1439 end 1440 """ 1441 # Write the file 1442 writer.writelines(file) 1443 return 0
1444 1445
1446 - def draw_feynman_diagrams(self, matrix_element):
1447 """Create the ps files containing the feynman diagrams for the born process, 1448 as well as for all the real emission processes""" 1449 1450 filename = 'born.ps' 1451 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1452 get('base_amplitude').get('diagrams'), 1453 filename, 1454 model=matrix_element.born_matrix_element.\ 1455 get('processes')[0].get('model'), 1456 amplitude=True, diagram_type='born') 1457 plot.draw() 1458 1459 for n, fksreal in enumerate(matrix_element.real_processes): 1460 filename = 'matrix_%d.ps' % (n + 1) 1461 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1462 get('base_amplitude').get('diagrams'), 1463 filename, 1464 model=fksreal.matrix_element.\ 1465 get('processes')[0].get('model'), 1466 amplitude=True, diagram_type='real') 1467 plot.draw()
1468 1469
1470 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1471 """writes the matrix_i.f files which contain the real matrix elements""" 1472 1473 1474 1475 for n, fksreal in enumerate(matrix_element.real_processes): 1476 filename = 'matrix_%d.f' % (n + 1) 1477 self.write_matrix_element_fks(writers.FortranWriter(filename), 1478 fksreal.matrix_element, n + 1, 1479 fortran_model)
1480
1481 - def write_pdf_calls(self, matrix_element, fortran_model):
1482 """writes the parton_lum_i.f files which contain the real matrix elements. 1483 If no real emission existst, write the one for the born""" 1484 1485 if matrix_element.real_processes: 1486 for n, fksreal in enumerate(matrix_element.real_processes): 1487 filename = 'parton_lum_%d.f' % (n + 1) 1488 self.write_pdf_file(writers.FortranWriter(filename), 1489 fksreal.matrix_element, n + 1, 1490 fortran_model) 1491 else: 1492 filename = 'parton_lum_0.f' 1493 self.write_pdf_file(writers.FortranWriter(filename), 1494 matrix_element.born_matrix_element, 0, 1495 fortran_model)
1496 1497
1498 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1499 """generates the files needed for the born amplitude in the P* directory, which will 1500 be needed by the P* directories""" 1501 pathdir = os.getcwd() 1502 1503 filename = 'born.f' 1504 calls_born, ncolor_born = \ 1505 self.write_born_fks(writers.FortranWriter(filename),\ 1506 matrix_element, 1507 fortran_model) 1508 1509 filename = 'born_hel.f' 1510 self.write_born_hel(writers.FortranWriter(filename),\ 1511 matrix_element, 1512 fortran_model) 1513 1514 1515 filename = 'born_conf.inc' 1516 nconfigs, mapconfigs, s_and_t_channels = \ 1517 self.write_configs_file( 1518 writers.FortranWriter(filename), 1519 matrix_element.born_matrix_element, 1520 fortran_model) 1521 1522 filename = 'born_props.inc' 1523 self.write_props_file(writers.FortranWriter(filename), 1524 matrix_element.born_matrix_element, 1525 fortran_model, 1526 s_and_t_channels) 1527 1528 filename = 'born_decayBW.inc' 1529 self.write_decayBW_file(writers.FortranWriter(filename), 1530 s_and_t_channels) 1531 1532 filename = 'born_leshouche.inc' 1533 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1534 matrix_element.born_matrix_element, 1535 fortran_model) 1536 1537 filename = 'born_nhel.inc' 1538 self.write_born_nhel_file(writers.FortranWriter(filename), 1539 matrix_element.born_matrix_element, nflows, 1540 fortran_model, 1541 ncolor_born) 1542 1543 filename = 'born_ngraphs.inc' 1544 self.write_ngraphs_file(writers.FortranWriter(filename), 1545 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1546 1547 filename = 'ncombs.inc' 1548 self.write_ncombs_file(writers.FortranWriter(filename), 1549 matrix_element.born_matrix_element, 1550 fortran_model) 1551 1552 filename = 'born_maxamps.inc' 1553 maxamps = len(matrix_element.get('diagrams')) 1554 maxflows = ncolor_born 1555 self.write_maxamps_file(writers.FortranWriter(filename), 1556 maxamps, 1557 maxflows, 1558 max([len(matrix_element.get('processes')) for me in \ 1559 matrix_element.born_matrix_element]),1) 1560 1561 filename = 'config_subproc_map.inc' 1562 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1563 s_and_t_channels) 1564 1565 filename = 'coloramps.inc' 1566 self.write_coloramps_file(writers.FortranWriter(filename), 1567 mapconfigs, 1568 matrix_element.born_matrix_element, 1569 fortran_model) 1570 1571 #write the sborn_sf.f and the b_sf_files 1572 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1573 for i, links in enumerate([matrix_element.color_links, []]): 1574 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1575 links, 1576 fortran_model) 1577 self.color_link_files = [] 1578 for i in range(len(matrix_element.color_links)): 1579 filename = 'b_sf_%3.3d.f' % (i + 1) 1580 self.color_link_files.append(filename) 1581 self.write_b_sf_fks(writers.FortranWriter(filename), 1582 matrix_element, i, 1583 fortran_model)
1584 1585
1586 - def generate_virtuals_from_OLP(self,process_list,export_path, OLP):
1587 """Generates the library for computing the loop matrix elements 1588 necessary for this process using the OLP specified.""" 1589 1590 # Start by writing the BLHA order file 1591 virtual_path = pjoin(export_path,'OLP_virtuals') 1592 if not os.path.exists(virtual_path): 1593 os.makedirs(virtual_path) 1594 filename = os.path.join(virtual_path,'OLE_order.lh') 1595 self.write_lh_order(filename, process_list, OLP) 1596 1597 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1598 'Please check the virt_generation.log file in %s.'\ 1599 %str(pjoin(virtual_path,'virt_generation.log')) 1600 1601 # Perform some tasks specific to certain OLP's 1602 if OLP=='GoSam': 1603 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1604 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1605 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1606 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1607 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1608 # Now generate the process 1609 logger.info('Generating the loop matrix elements with %s...'%OLP) 1610 virt_generation_log = \ 1611 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1612 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1613 stdout=virt_generation_log, stderr=virt_generation_log) 1614 virt_generation_log.close() 1615 # Check what extension is used for the share libraries on this system 1616 possible_other_extensions = ['so','dylib'] 1617 shared_lib_ext='so' 1618 for ext in possible_other_extensions: 1619 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1620 'libgolem_olp.'+ext)): 1621 shared_lib_ext = ext 1622 1623 # Now check that everything got correctly generated 1624 files_to_check = ['olp_module.mod',str(pjoin('lib', 1625 'libgolem_olp.'+shared_lib_ext))] 1626 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1627 'Virtuals',f)) for f in files_to_check]): 1628 raise fks_common.FKSProcessError(fail_msg) 1629 # link the library to the lib folder 1630 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1631 pjoin(export_path,'lib')) 1632 1633 # Specify in make_opts the right library necessitated by the OLP 1634 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1635 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1636 if OLP=='GoSam': 1637 if platform.system().lower()=='darwin': 1638 # On mac the -rpath is not supported and the path of the dynamic 1639 # library is automatically wired in the executable 1640 make_opts_content=make_opts_content.replace('libOLP=', 1641 'libOLP=-Wl,-lgolem_olp') 1642 else: 1643 # On other platforms the option , -rpath= path to libgolem.so is necessary 1644 # Using a relative path is not ideal because the file libgolem.so is not 1645 # copied on the worker nodes. 1646 # make_opts_content=make_opts_content.replace('libOLP=', 1647 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1648 # Using the absolute path is working in the case where the disk of the 1649 # front end machine is mounted on all worker nodes as well. 1650 make_opts_content=make_opts_content.replace('libOLP=', 1651 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1652 1653 1654 make_opts.write(make_opts_content) 1655 make_opts.close() 1656 1657 # A priori this is generic to all OLP's 1658 1659 # Parse the contract file returned and propagate the process label to 1660 # the include of the BinothLHA.f file 1661 proc_to_label = self.parse_contract_file( 1662 pjoin(virtual_path,'OLE_order.olc')) 1663 1664 self.write_BinothLHA_inc(process_list,proc_to_label,\ 1665 pjoin(export_path,'SubProcesses')) 1666 1667 # Link the contract file to within the SubProcess directory 1668 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1669
1670 - def write_BinothLHA_inc(self, processes, proc_to_label, SubProcPath):
1671 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1672 to provide the right process_label to use in the OLP call to get the 1673 loop matrix element evaluation. The proc_to_label is the dictionary of 1674 the format of the one returned by the function parse_contract_file.""" 1675 1676 for proc in processes: 1677 name = "P%s"%proc.shell_string() 1678 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1679 not leg.get('state')]), 1680 tuple([leg.get('id') for leg in proc.get('legs') if \ 1681 leg.get('state')])) 1682 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1683 try: 1684 incFile.write( 1685 """ INTEGER PROC_LABEL 1686 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1687 except KeyError: 1688 raise fks_common.FKSProcessError('Could not found the target'+\ 1689 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1690 ' the proc_to_label argument in write_BinothLHA_inc.') 1691 incFile.close()
1692
1693 - def parse_contract_file(self, contract_file_path):
1694 """ Parses the BLHA contract file, make sure all parameters could be 1695 understood by the OLP and return a mapping of the processes (characterized 1696 by the pdg's of the initial and final state particles) to their process 1697 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1698 """ 1699 1700 proc_def_to_label = {} 1701 1702 if not os.path.exists(contract_file_path): 1703 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1704 ' OLE_order.olc in %s.'%str(contract_file_path)) 1705 1706 comment_re=re.compile(r"^\s*#") 1707 proc_def_re=re.compile( 1708 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1709 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1710 line_OK_re=re.compile(r"^.*\|\s*OK") 1711 for line in open(contract_file_path): 1712 # Ignore comments 1713 if not comment_re.match(line) is None: 1714 continue 1715 # Check if it is a proc definition line 1716 proc_def = proc_def_re.match(line) 1717 if not proc_def is None: 1718 if int(proc_def.group('proc_class'))!=1: 1719 raise fks_common.FKSProcessError( 1720 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1721 ' process class attribute. Found %s instead in: \n%s'\ 1722 %(proc_def.group('proc_class'),line)) 1723 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1724 proc_def.group('in_pdgs').split()]) 1725 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1726 proc_def.group('out_pdgs').split()]) 1727 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1728 int(proc_def.group('proc_label')) 1729 continue 1730 # For the other types of line, just make sure they end with | OK 1731 if line_OK_re.match(line) is None: 1732 raise fks_common.FKSProcessError( 1733 'The OLP could not process the following line: \n%s'%line) 1734 1735 return proc_def_to_label
1736 1737
1738 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1739 """writes the V**** directory inside the P**** directories specified in 1740 dir_name""" 1741 1742 cwd = os.getcwd() 1743 1744 matrix_element = loop_matrix_element 1745 1746 # Create the MadLoop5_resources directory if not already existing 1747 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1748 try: 1749 os.mkdir(dirpath) 1750 except os.error as error: 1751 logger.warning(error.strerror + " " + dirpath) 1752 1753 # Create the directory PN_xx_xxxxx in the specified path 1754 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1755 dirpath = os.path.join(dir_name, name) 1756 1757 try: 1758 os.mkdir(dirpath) 1759 except os.error as error: 1760 logger.warning(error.strerror + " " + dirpath) 1761 1762 try: 1763 os.chdir(dirpath) 1764 except os.error: 1765 logger.error('Could not cd to directory %s' % dirpath) 1766 return 0 1767 1768 logger.info('Creating files in directory %s' % name) 1769 1770 # Extract number of external particles 1771 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1772 1773 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1774 # The born matrix element, if needed 1775 filename = 'born_matrix.f' 1776 calls = self.write_bornmatrix( 1777 writers.FortranWriter(filename), 1778 matrix_element, 1779 fortran_model) 1780 1781 filename = 'nexternal.inc' 1782 self.write_nexternal_file(writers.FortranWriter(filename), 1783 nexternal, ninitial) 1784 1785 filename = 'pmass.inc' 1786 self.write_pmass_file(writers.FortranWriter(filename), 1787 matrix_element) 1788 1789 filename = 'ngraphs.inc' 1790 self.write_ngraphs_file(writers.FortranWriter(filename), 1791 len(matrix_element.get_all_amplitudes())) 1792 1793 filename = "loop_matrix.ps" 1794 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1795 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1796 filename, 1797 model=matrix_element.get('processes')[0].get('model'), 1798 amplitude='') 1799 logger.info("Drawing loop Feynman diagrams for " + \ 1800 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1801 plot.draw() 1802 1803 filename = "born_matrix.ps" 1804 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1805 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1806 get('model'),amplitude='') 1807 logger.info("Generating born Feynman diagrams for " + \ 1808 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1809 plot.draw() 1810 1811 # We also need to write the overall maximum quantities for this group 1812 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 1813 # only one process, so this is trivial 1814 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 1815 open('unique_id.inc','w').write( 1816 """ integer UNIQUE_ID 1817 parameter(UNIQUE_ID=1)""") 1818 1819 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1820 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1821 'MadLoopCommons.f','MadLoopParams.inc'] 1822 1823 # We should move to MadLoop5_resources directory from the SubProcesses 1824 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1825 pjoin('..','MadLoop5_resources')) 1826 1827 for file in linkfiles: 1828 ln('../../%s' % file) 1829 1830 os.system("ln -s ../../makefile_loop makefile") 1831 1832 linkfiles = ['mpmodule.mod'] 1833 1834 for file in linkfiles: 1835 ln('../../../lib/%s' % file) 1836 1837 linkfiles = ['coef_specs.inc'] 1838 1839 for file in linkfiles: 1840 ln('../../../Source/DHELAS/%s' % file) 1841 1842 # Return to original PWD 1843 os.chdir(cwd) 1844 1845 if not calls: 1846 calls = 0 1847 return calls
1848
1849 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1850 """computes the QED/QCD orders from the knowledge of the n of ext particles 1851 and of the weighted orders""" 1852 # n vertices = nexternal - 2 =QED + QCD 1853 # weighted = 2*QED + QCD 1854 QED = weighted - nexternal + 2 1855 QCD = weighted - 2 * QED 1856 return QED, QCD
1857 1858 1859 1860 #=============================================================================== 1861 # write_lh_order 1862 #=============================================================================== 1863 #test written
1864 - def write_lh_order(self, filename, process_list, OLP='MadLoop'):
1865 """Creates the OLE_order.lh file. This function should be edited according 1866 to the OLP which is used. For now it is generic.""" 1867 1868 1869 if len(process_list)==0: 1870 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1871 'the function write_lh_order.') 1872 return 1873 1874 # We assume the orders to be common to all Subprocesses 1875 1876 orders = process_list[0].get('orders') 1877 if 'QED' in list(orders.keys()) and 'QCD' in list(orders.keys()): 1878 QED=orders['QED'] 1879 QCD=orders['QCD'] 1880 elif 'QED' in list(orders.keys()): 1881 QED=orders['QED'] 1882 QCD=0 1883 elif 'QCD' in list(orders.keys()): 1884 QED=0 1885 QCD=orders['QCD'] 1886 else: 1887 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1888 len(process_list[0].get('legs')), 1889 orders['WEIGHTED']) 1890 1891 replace_dict = {} 1892 replace_dict['mesq'] = 'CHaveraged' 1893 replace_dict['corr'] = ' '.join(process_list[0].\ 1894 get('perturbation_couplings')) 1895 replace_dict['irreg'] = 'CDR' 1896 replace_dict['aspow'] = QCD 1897 replace_dict['aepow'] = QED 1898 replace_dict['modelfile'] = './param_card.dat' 1899 replace_dict['params'] = 'alpha_s' 1900 proc_lines=[] 1901 for proc in process_list: 1902 proc_lines.append('%s -> %s' % \ 1903 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']), 1904 ' '.join(str(l['id']) for l in proc['legs'] if l['state']))) 1905 replace_dict['pdgs'] = '\n'.join(proc_lines) 1906 replace_dict['symfin'] = 'Yes' 1907 content = \ 1908 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1909 \n\ 1910 MatrixElementSquareType %(mesq)s\n\ 1911 CorrectionType %(corr)s\n\ 1912 IRregularisation %(irreg)s\n\ 1913 AlphasPower %(aspow)d\n\ 1914 AlphaPower %(aepow)d\n\ 1915 NJetSymmetrizeFinal %(symfin)s\n\ 1916 ModelFile %(modelfile)s\n\ 1917 Parameters %(params)s\n\ 1918 \n\ 1919 # process\n\ 1920 %(pdgs)s\n\ 1921 " % replace_dict 1922 1923 file = open(filename, 'w') 1924 file.write(content) 1925 file.close 1926 return
1927 1928 1929 #=============================================================================== 1930 # write_born_fks 1931 #=============================================================================== 1932 # test written
1933 - def write_born_fks(self, writer, fksborn, fortran_model):
1934 """Export a matrix element to a born.f file in MadFKS format""" 1935 1936 matrix_element = fksborn.born_matrix_element 1937 1938 if not matrix_element.get('processes') or \ 1939 not matrix_element.get('diagrams'): 1940 return 0 1941 1942 if not isinstance(writer, writers.FortranWriter): 1943 raise writers.FortranWriter.FortranWriterError(\ 1944 "writer not FortranWriter") 1945 1946 # Add information relevant for FxFx matching: 1947 # Maximum QCD power in all the contributions 1948 max_qcd_order = 0 1949 for diag in matrix_element.get('diagrams'): 1950 orders = diag.calculate_orders() 1951 if 'QCD' in orders: 1952 max_qcd_order = max(max_qcd_order,orders['QCD']) 1953 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 1954 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 1955 proc.get('model').get_particle(id).get('color')>1]) 1956 for proc in matrix_element.get('processes')) 1957 # Maximum number of final state light jets to be matched 1958 self.proc_characteristic['max_n_matched_jets'] = max( 1959 self.proc_characteristic['max_n_matched_jets'], 1960 min(max_qcd_order,max_n_light_final_partons)) 1961 1962 1963 # Set lowercase/uppercase Fortran code 1964 writers.FortranWriter.downcase = False 1965 1966 replace_dict = {} 1967 1968 # Extract version number and date from VERSION file 1969 info_lines = self.get_mg5_info_lines() 1970 replace_dict['info_lines'] = info_lines 1971 1972 # Extract process info lines 1973 process_lines = self.get_process_info_lines(matrix_element) 1974 replace_dict['process_lines'] = process_lines 1975 1976 1977 # Extract ncomb 1978 ncomb = matrix_element.get_helicity_combinations() 1979 replace_dict['ncomb'] = ncomb 1980 1981 # Extract helicity lines 1982 helicity_lines = self.get_helicity_lines(matrix_element) 1983 replace_dict['helicity_lines'] = helicity_lines 1984 1985 # Extract IC line 1986 ic_line = self.get_ic_line(matrix_element) 1987 replace_dict['ic_line'] = ic_line 1988 1989 # Extract overall denominator 1990 # Averaging initial state color, spin, and identical FS particles 1991 #den_factor_line = get_den_factor_line(matrix_element) 1992 1993 # Extract ngraphs 1994 ngraphs = matrix_element.get_number_of_amplitudes() 1995 replace_dict['ngraphs'] = ngraphs 1996 1997 # Extract nwavefuncs 1998 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1999 replace_dict['nwavefuncs'] = nwavefuncs 2000 2001 # Extract ncolor 2002 ncolor = max(1, len(matrix_element.get('color_basis'))) 2003 replace_dict['ncolor'] = ncolor 2004 2005 # Extract color data lines 2006 color_data_lines = self.get_color_data_lines(matrix_element) 2007 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2008 2009 # Extract helas calls 2010 helas_calls = fortran_model.get_matrix_element_calls(\ 2011 matrix_element) 2012 replace_dict['helas_calls'] = "\n".join(helas_calls) 2013 2014 # Extract amp2 lines 2015 amp2_lines = self.get_amp2_lines(matrix_element) 2016 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2017 2018 # Extract JAMP lines 2019 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2020 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2021 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2022 2023 2024 # Set the size of Wavefunction 2025 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2026 replace_dict['wavefunctionsize'] = 20 2027 else: 2028 replace_dict['wavefunctionsize'] = 8 2029 2030 # Extract glu_ij_lines 2031 ij_lines = self.get_ij_lines(fksborn) 2032 replace_dict['ij_lines'] = '\n'.join(ij_lines) 2033 2034 # Extract den_factor_lines 2035 den_factor_lines = self.get_den_factor_lines(fksborn) 2036 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2037 2038 # Extract the number of FKS process 2039 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 2040 2041 file = open(os.path.join(_file_path, \ 2042 'iolibs/template_files/born_fks.inc')).read() 2043 file = file % replace_dict 2044 2045 # Write the file 2046 writer.writelines(file) 2047 2048 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2049 2050
2051 - def write_born_hel(self, writer, fksborn, fortran_model):
2052 """Export a matrix element to a born_hel.f file in MadFKS format""" 2053 2054 matrix_element = fksborn.born_matrix_element 2055 2056 if not matrix_element.get('processes') or \ 2057 not matrix_element.get('diagrams'): 2058 return 0 2059 2060 if not isinstance(writer, writers.FortranWriter): 2061 raise writers.FortranWriter.FortranWriterError(\ 2062 "writer not FortranWriter") 2063 # Set lowercase/uppercase Fortran code 2064 writers.FortranWriter.downcase = False 2065 2066 replace_dict = {} 2067 2068 # Extract version number and date from VERSION file 2069 info_lines = self.get_mg5_info_lines() 2070 replace_dict['info_lines'] = info_lines 2071 2072 # Extract process info lines 2073 process_lines = self.get_process_info_lines(matrix_element) 2074 replace_dict['process_lines'] = process_lines 2075 2076 2077 # Extract ncomb 2078 ncomb = matrix_element.get_helicity_combinations() 2079 replace_dict['ncomb'] = ncomb 2080 2081 # Extract helicity lines 2082 helicity_lines = self.get_helicity_lines(matrix_element) 2083 replace_dict['helicity_lines'] = helicity_lines 2084 2085 # Extract IC line 2086 ic_line = self.get_ic_line(matrix_element) 2087 replace_dict['ic_line'] = ic_line 2088 2089 # Extract overall denominator 2090 # Averaging initial state color, spin, and identical FS particles 2091 #den_factor_line = get_den_factor_line(matrix_element) 2092 2093 # Extract ngraphs 2094 ngraphs = matrix_element.get_number_of_amplitudes() 2095 replace_dict['ngraphs'] = ngraphs 2096 2097 # Extract nwavefuncs 2098 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2099 replace_dict['nwavefuncs'] = nwavefuncs 2100 2101 # Extract ncolor 2102 ncolor = max(1, len(matrix_element.get('color_basis'))) 2103 replace_dict['ncolor'] = ncolor 2104 2105 # Extract color data lines 2106 color_data_lines = self.get_color_data_lines(matrix_element) 2107 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2108 2109 # Extract amp2 lines 2110 amp2_lines = self.get_amp2_lines(matrix_element) 2111 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2112 2113 # Extract JAMP lines 2114 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2115 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2116 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2117 2118 # Extract den_factor_lines 2119 den_factor_lines = self.get_den_factor_lines(fksborn) 2120 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2121 2122 # Extract the number of FKS process 2123 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2124 2125 file = open(os.path.join(_file_path, \ 2126 'iolibs/template_files/born_fks_hel.inc')).read() 2127 file = file % replace_dict 2128 2129 # Write the file 2130 writer.writelines(file) 2131 2132 return
2133 2134 2135 #=============================================================================== 2136 # write_born_sf_fks 2137 #=============================================================================== 2138 #test written
2139 - def write_sborn_sf(self, writer, color_links, fortran_model):
2140 """Creates the sborn_sf.f file, containing the calls to the different 2141 color linked borns""" 2142 2143 replace_dict = {} 2144 nborns = len(color_links) 2145 ifkss = [] 2146 iborns = [] 2147 mms = [] 2148 nns = [] 2149 iflines = "\n" 2150 2151 #header for the sborn_sf.f file 2152 file = """subroutine sborn_sf(p_born,m,n,wgt) 2153 implicit none 2154 include "nexternal.inc" 2155 double precision p_born(0:3,nexternal-1),wgt 2156 double complex wgt1(2) 2157 integer m,n \n""" 2158 2159 if nborns > 0: 2160 2161 for i, c_link in enumerate(color_links): 2162 iborn = i+1 2163 2164 iff = {True : 'if', False : 'elseif'}[i==0] 2165 2166 m, n = c_link['link'] 2167 2168 if m != n: 2169 iflines += \ 2170 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 2171 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 2172 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 2173 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 2174 else: 2175 iflines += \ 2176 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 2177 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 2178 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 2179 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 2180 2181 2182 file += iflines + \ 2183 """else 2184 wgt = 0d0 2185 endif 2186 2187 return 2188 end""" 2189 elif nborns == 0: 2190 #write a dummy file 2191 file+=""" 2192 c This is a dummy function because 2193 c this subdir has no soft singularities 2194 wgt = 0d0 2195 2196 return 2197 end""" 2198 # Write the end of the file 2199 2200 writer.writelines(file)
2201 2202 2203 #=============================================================================== 2204 # write_b_sf_fks 2205 #=============================================================================== 2206 #test written
2207 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
2208 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 2209 2210 matrix_element = copy.copy(fksborn.born_matrix_element) 2211 2212 if not matrix_element.get('processes') or \ 2213 not matrix_element.get('diagrams'): 2214 return 0 2215 2216 if not isinstance(writer, writers.FortranWriter): 2217 raise writers.FortranWriter.FortranWriterError(\ 2218 "writer not FortranWriter") 2219 # Set lowercase/uppercase Fortran code 2220 writers.FortranWriter.downcase = False 2221 2222 iborn = i + 1 2223 link = fksborn.color_links[i] 2224 2225 replace_dict = {} 2226 2227 replace_dict['iborn'] = iborn 2228 2229 # Extract version number and date from VERSION file 2230 info_lines = self.get_mg5_info_lines() 2231 replace_dict['info_lines'] = info_lines 2232 2233 # Extract process info lines 2234 process_lines = self.get_process_info_lines(matrix_element) 2235 replace_dict['process_lines'] = process_lines + \ 2236 "\nc spectators: %d %d \n" % tuple(link['link']) 2237 2238 # Extract ncomb 2239 ncomb = matrix_element.get_helicity_combinations() 2240 replace_dict['ncomb'] = ncomb 2241 2242 # Extract helicity lines 2243 helicity_lines = self.get_helicity_lines(matrix_element) 2244 replace_dict['helicity_lines'] = helicity_lines 2245 2246 # Extract IC line 2247 ic_line = self.get_ic_line(matrix_element) 2248 replace_dict['ic_line'] = ic_line 2249 2250 # Extract den_factor_lines 2251 den_factor_lines = self.get_den_factor_lines(fksborn) 2252 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2253 2254 # Extract ngraphs 2255 ngraphs = matrix_element.get_number_of_amplitudes() 2256 replace_dict['ngraphs'] = ngraphs 2257 2258 # Extract nwavefuncs 2259 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2260 replace_dict['nwavefuncs'] = nwavefuncs 2261 2262 # Extract ncolor 2263 ncolor1 = max(1, len(link['orig_basis'])) 2264 replace_dict['ncolor1'] = ncolor1 2265 ncolor2 = max(1, len(link['link_basis'])) 2266 replace_dict['ncolor2'] = ncolor2 2267 2268 # Extract color data lines 2269 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2270 link['link_matrix']) 2271 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2272 2273 # Extract amp2 lines 2274 amp2_lines = self.get_amp2_lines(matrix_element) 2275 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2276 2277 # Extract JAMP lines 2278 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element, JAMP_format="JAMP1(%s)") 2279 replace_dict['jamp1_lines'] = '\n'.join(jamp_lines) 2280 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2281 2282 2283 matrix_element.set('color_basis', link['link_basis'] ) 2284 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element, JAMP_format="JAMP2(%s)") 2285 replace_dict['jamp2_lines'] = '\n'.join(jamp_lines) 2286 replace_dict['nb_temp_jamp'] = max(nb_tmp_jamp, replace_dict['nb_temp_jamp']) 2287 2288 2289 # Extract the number of FKS process 2290 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2291 2292 file = open(os.path.join(_file_path, \ 2293 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2294 file = file % replace_dict 2295 2296 # Write the file 2297 writer.writelines(file) 2298 2299 return 0 , ncolor1
2300 2301 2302 #=============================================================================== 2303 # write_born_nhel_file 2304 #=============================================================================== 2305 #test written
2306 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2307 """Write the born_nhel.inc file for MG4.""" 2308 2309 ncomb = matrix_element.get_helicity_combinations() 2310 file = " integer max_bhel, max_bcol \n" 2311 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2312 (ncomb, nflows) 2313 2314 # Write the file 2315 writer.writelines(file) 2316 2317 return True
2318 2319 #=============================================================================== 2320 # write_fks_info_file 2321 #===============================================================================
2322 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2323 """Writes the content of nFKSconfigs.inc, which just gives the 2324 total FKS dirs as a parameter. 2325 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2326 replace_dict = {} 2327 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2328 content = \ 2329 """ INTEGER FKS_CONFIGS 2330 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2331 2332 """ % replace_dict 2333 2334 writer.writelines(content)
2335 2336 2337 #=============================================================================== 2338 # write_fks_info_file 2339 #===============================================================================
2340 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2341 """Writes the content of fks_info.inc, which lists the informations on the 2342 possible splittings of the born ME. 2343 nconfs is always >=1 (use a fake configuration for LOonly). 2344 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2345 the last colored particle as j_fks.""" 2346 2347 replace_dict = {} 2348 fks_info_list = fksborn.get_fks_info_list() 2349 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2350 2351 # this is for processes with 'real' or 'all' as NLO mode 2352 if len(fks_info_list) > 0: 2353 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2354 for info in fks_info_list]) 2355 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2356 for info in fks_info_list]) 2357 2358 col_lines = [] 2359 pdg_lines = [] 2360 charge_lines = [] 2361 fks_j_from_i_lines = [] 2362 for i, info in enumerate(fks_info_list): 2363 col_lines.append( \ 2364 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2365 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2366 pdg_lines.append( \ 2367 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2368 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2369 charge_lines.append(\ 2370 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2371 % (i + 1, ', '.join('%19.15fd0' % charg\ 2372 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2373 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2374 i + 1)) 2375 else: 2376 # this is for 'LOonly', generate a fake FKS configuration with 2377 # - i_fks = nexternal, pdg type = -21 and color =8 2378 # - j_fks = the last colored particle 2379 bornproc = fksborn.born_matrix_element.get('processes')[0] 2380 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2381 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2382 charges = [0.] * len(colors) 2383 2384 fks_i = len(colors) 2385 # use the last colored particle if it exists, or 2386 # just the last 2387 fks_j=1 2388 for cpos, col in enumerate(colors[:-1]): 2389 if col != 1: 2390 fks_j = cpos+1 2391 2392 fks_i_values = str(fks_i) 2393 fks_j_values = str(fks_j) 2394 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2395 % ', '.join([str(col) for col in colors])] 2396 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2397 % ', '.join([str(pdg) for pdg in pdgs])] 2398 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2399 % ', '.join('%19.15fd0' % charg for charg in charges)] 2400 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2401 % (fks_i, fks_j)] 2402 2403 2404 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2405 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2406 replace_dict['col_lines'] = '\n'.join(col_lines) 2407 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2408 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2409 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2410 2411 content = \ 2412 """ INTEGER IPOS, JPOS 2413 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2414 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2415 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2416 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2417 2418 %(fks_i_line)s 2419 %(fks_j_line)s 2420 2421 %(fks_j_from_i_lines)s 2422 2423 C 2424 C Particle type: 2425 C octet = 8, triplet = 3, singlet = 1 2426 %(col_lines)s 2427 2428 C 2429 C Particle type according to PDG: 2430 C 2431 %(pdg_lines)s 2432 2433 C 2434 C Particle charge: 2435 C charge is set 0. with QCD corrections, which is irrelevant 2436 %(charge_lines)s 2437 """ % replace_dict 2438 if not isinstance(writer, writers.FortranWriter): 2439 raise writers.FortranWriter.FortranWriterError(\ 2440 "writer not FortranWriter") 2441 # Set lowercase/uppercase Fortran code 2442 writers.FortranWriter.downcase = False 2443 2444 writer.writelines(content) 2445 2446 return True
2447 2448 2449 #=============================================================================== 2450 # write_matrix_element_fks 2451 #=============================================================================== 2452 #test written
2453 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2454 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2455 2456 if not matrix_element.get('processes') or \ 2457 not matrix_element.get('diagrams'): 2458 return 0,0 2459 2460 if not isinstance(writer, writers.FortranWriter): 2461 raise writers.FortranWriter.FortranWriterError(\ 2462 "writer not FortranWriter") 2463 # Set lowercase/uppercase Fortran code 2464 writers.FortranWriter.downcase = False 2465 2466 replace_dict = {} 2467 replace_dict['N_me'] = n 2468 2469 # Extract version number and date from VERSION file 2470 info_lines = self.get_mg5_info_lines() 2471 replace_dict['info_lines'] = info_lines 2472 2473 # Extract process info lines 2474 process_lines = self.get_process_info_lines(matrix_element) 2475 replace_dict['process_lines'] = process_lines 2476 2477 # Extract ncomb 2478 ncomb = matrix_element.get_helicity_combinations() 2479 replace_dict['ncomb'] = ncomb 2480 2481 # Extract helicity lines 2482 helicity_lines = self.get_helicity_lines(matrix_element) 2483 replace_dict['helicity_lines'] = helicity_lines 2484 2485 # Extract IC line 2486 ic_line = self.get_ic_line(matrix_element) 2487 replace_dict['ic_line'] = ic_line 2488 2489 # Extract overall denominator 2490 # Averaging initial state color, spin, and identical FS particles 2491 den_factor_line = self.get_den_factor_line(matrix_element) 2492 replace_dict['den_factor_line'] = den_factor_line 2493 2494 # Extract ngraphs 2495 ngraphs = matrix_element.get_number_of_amplitudes() 2496 replace_dict['ngraphs'] = ngraphs 2497 2498 # Extract ncolor 2499 ncolor = max(1, len(matrix_element.get('color_basis'))) 2500 replace_dict['ncolor'] = ncolor 2501 2502 # Extract color data lines 2503 color_data_lines = self.get_color_data_lines(matrix_element) 2504 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2505 2506 # Extract helas calls 2507 helas_calls = fortran_model.get_matrix_element_calls(\ 2508 matrix_element) 2509 replace_dict['helas_calls'] = "\n".join(helas_calls) 2510 2511 # Extract nwavefuncs (important to place after get_matrix_element_calls 2512 # so that 'me_id' is set) 2513 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2514 replace_dict['nwavefuncs'] = nwavefuncs 2515 2516 # Extract amp2 lines 2517 amp2_lines = self.get_amp2_lines(matrix_element) 2518 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2519 2520 # Set the size of Wavefunction 2521 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2522 replace_dict['wavefunctionsize'] = 20 2523 else: 2524 replace_dict['wavefunctionsize'] = 8 2525 2526 # Extract JAMP lines 2527 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2528 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2529 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2530 2531 realfile = open(os.path.join(_file_path, \ 2532 'iolibs/template_files/realmatrix_fks.inc')).read() 2533 2534 realfile = realfile % replace_dict 2535 2536 # Write the file 2537 writer.writelines(realfile) 2538 2539 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2540 2541 2542 #=============================================================================== 2543 # write_pdf_file 2544 #===============================================================================
2545 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2546 #test written 2547 """Write the auto_dsig.f file for MadFKS, which contains 2548 pdf call information""" 2549 2550 if not matrix_element.get('processes') or \ 2551 not matrix_element.get('diagrams'): 2552 return 0 2553 2554 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2555 2556 if ninitial < 1 or ninitial > 2: 2557 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 2558 2559 replace_dict = {} 2560 2561 replace_dict['N_me'] = n 2562 2563 # Extract version number and date from VERSION file 2564 info_lines = self.get_mg5_info_lines() 2565 replace_dict['info_lines'] = info_lines 2566 2567 # Extract process info lines 2568 process_lines = self.get_process_info_lines(matrix_element) 2569 replace_dict['process_lines'] = process_lines 2570 2571 pdf_vars, pdf_data, pdf_lines = \ 2572 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2573 replace_dict['pdf_vars'] = pdf_vars 2574 replace_dict['pdf_data'] = pdf_data 2575 replace_dict['pdf_lines'] = pdf_lines 2576 2577 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2578 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2579 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2580 2581 file = open(os.path.join(_file_path, \ 2582 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2583 file = file % replace_dict 2584 2585 # Write the file 2586 writer.writelines(file)
2587 2588 2589 2590 #=============================================================================== 2591 # write_coloramps_file 2592 #=============================================================================== 2593 #test written
2594 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2595 """Write the coloramps.inc file for MadEvent""" 2596 2597 lines = [] 2598 lines.append( "logical icolamp(%d,%d,1)" % \ 2599 (max(len(list(matrix_element.get('color_basis').keys())), 1), 2600 len(mapconfigs))) 2601 2602 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2603 2604 # Write the file 2605 writer.writelines(lines) 2606 2607 return True
2608 2609 2610 #=============================================================================== 2611 # write_leshouche_file 2612 #=============================================================================== 2613 #test written
2614 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2615 """Write the leshouche.inc file for MG4""" 2616 2617 # Extract number of external particles 2618 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2619 2620 lines = [] 2621 for iproc, proc in enumerate(matrix_element.get('processes')): 2622 legs = proc.get_legs_with_decays() 2623 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2624 (iproc + 1, nexternal, 2625 ",".join([str(l.get('id')) for l in legs]))) 2626 for i in [1, 2]: 2627 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2628 (i, iproc + 1, nexternal, 2629 ",".join([ "%3r" % 0 ] * ninitial + \ 2630 [ "%3r" % i ] * (nexternal - ninitial)))) 2631 2632 # Here goes the color connections corresponding to the JAMPs 2633 # Only one output, for the first subproc! 2634 if iproc == 0: 2635 # If no color basis, just output trivial color flow 2636 if not matrix_element.get('color_basis'): 2637 for i in [1, 2]: 2638 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2639 (i, nexternal, 2640 ",".join([ "%3r" % 0 ] * nexternal))) 2641 color_flow_list = [] 2642 2643 else: 2644 # First build a color representation dictionnary 2645 repr_dict = {} 2646 for l in legs: 2647 repr_dict[l.get('number')] = \ 2648 proc.get('model').get_particle(l.get('id')).get_color()\ 2649 * (-1)**(1+l.get('state')) 2650 # Get the list of color flows 2651 color_flow_list = \ 2652 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2653 ninitial) 2654 # And output them properly 2655 for cf_i, color_flow_dict in enumerate(color_flow_list): 2656 for i in [0, 1]: 2657 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2658 (i + 1, cf_i + 1, nexternal, 2659 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2660 for l in legs]))) 2661 2662 # Write the file 2663 writer.writelines(lines) 2664 2665 return len(color_flow_list)
2666 2667 2668 #=============================================================================== 2669 # write_configs_file 2670 #=============================================================================== 2671 #test_written
2672 - def write_configs_file(self, writer, matrix_element, fortran_model):
2673 """Write the configs.inc file for MadEvent""" 2674 2675 # Extract number of external particles 2676 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2677 lines = [] 2678 2679 iconfig = 0 2680 2681 s_and_t_channels = [] 2682 mapconfigs = [] 2683 2684 model = matrix_element.get('processes')[0].get('model') 2685 # new_pdg = model.get_first_non_pdg() 2686 2687 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2688 model = matrix_element.get('base_amplitude').get('process').get('model') 2689 minvert = min([max([len(vert.get('legs')) for vert in \ 2690 diag.get('vertices')]) for diag in base_diagrams]) 2691 2692 for idiag, diag in enumerate(base_diagrams): 2693 if any([len(vert.get('legs')) > minvert for vert in 2694 diag.get('vertices')]): 2695 # Only 3-vertices allowed in configs.inc 2696 continue 2697 iconfig = iconfig + 1 2698 helas_diag = matrix_element.get('diagrams')[idiag] 2699 mapconfigs.append(helas_diag.get('number')) 2700 lines.append("# Diagram %d, Amplitude %d" % \ 2701 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2702 # Correspondance between the config and the amplitudes 2703 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2704 helas_diag.get('amplitudes')[0]['number'])) 2705 2706 # Need to reorganize the topology so that we start with all 2707 # final state external particles and work our way inwards 2708 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2709 get_s_and_t_channels(ninitial, model, 990) 2710 2711 s_and_t_channels.append([schannels, tchannels]) 2712 2713 # Write out propagators for s-channel and t-channel vertices 2714 allchannels = schannels 2715 if len(tchannels) > 1: 2716 # Write out tchannels only if there are any non-trivial ones 2717 allchannels = schannels + tchannels 2718 2719 for vert in allchannels: 2720 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2721 last_leg = vert.get('legs')[-1] 2722 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2723 (last_leg.get('number'), iconfig, len(daughters), 2724 ",".join(["%3d" % d for d in daughters]))) 2725 if vert in schannels: 2726 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2727 (last_leg.get('number'), iconfig, 2728 last_leg.get('id'))) 2729 elif vert in tchannels[:-1]: 2730 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2731 (last_leg.get('number'), iconfig, 2732 abs(last_leg.get('id')))) 2733 2734 # Write out number of configs 2735 lines.append("# Number of configs") 2736 lines.append("data mapconfig(0)/%4d/" % iconfig) 2737 2738 # Write the file 2739 writer.writelines(lines) 2740 2741 return iconfig, mapconfigs, s_and_t_channels
2742 2743 2744 #=============================================================================== 2745 # write_decayBW_file 2746 #=============================================================================== 2747 #test written
2748 - def write_decayBW_file(self, writer, s_and_t_channels):
2749 """Write the decayBW.inc file for MadEvent""" 2750 2751 lines = [] 2752 2753 booldict = {False: ".false.", True: ".false."} 2754 ####Changed by MZ 2011-11-23!!!! 2755 2756 for iconf, config in enumerate(s_and_t_channels): 2757 schannels = config[0] 2758 for vertex in schannels: 2759 # For the resulting leg, pick out whether it comes from 2760 # decay or not, as given by the from_group flag 2761 leg = vertex.get('legs')[-1] 2762 lines.append("data gForceBW(%d,%d)/%s/" % \ 2763 (leg.get('number'), iconf + 1, 2764 booldict[leg.get('from_group')])) 2765 2766 # Write the file 2767 writer.writelines(lines) 2768 2769 return True
2770 2771 2772 #=============================================================================== 2773 # write_dname_file 2774 #===============================================================================
2775 - def write_dname_file(self, writer, matrix_element, fortran_model):
2776 """Write the dname.mg file for MG4""" 2777 2778 line = "DIRNAME=P%s" % \ 2779 matrix_element.get('processes')[0].shell_string() 2780 2781 # Write the file 2782 writer.write(line + "\n") 2783 2784 return True
2785 2786 2787 #=============================================================================== 2788 # write_iproc_file 2789 #===============================================================================
2790 - def write_iproc_file(self, writer, me_number):
2791 """Write the iproc.dat file for MG4""" 2792 2793 line = "%d" % (me_number + 1) 2794 2795 # Write the file 2796 for line_to_write in writer.write_line(line): 2797 writer.write(line_to_write) 2798 return True
2799 2800 2801 #=============================================================================== 2802 # Helper functions 2803 #=============================================================================== 2804 2805 2806 #=============================================================================== 2807 # get_fks_j_from_i_lines 2808 #=============================================================================== 2809
2810 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2811 """generate the lines for fks.inc describing initializating the 2812 fks_j_from_i array""" 2813 lines = [] 2814 if not me.isfinite: 2815 for ii, js in me.fks_j_from_i.items(): 2816 if js: 2817 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2818 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2819 else: 2820 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2821 % (2, 1, 1, '1')) 2822 lines.append('') 2823 2824 return lines 2825 2826 2827 #=============================================================================== 2828 # get_leshouche_lines 2829 #===============================================================================
2830 - def get_leshouche_lines(self, matrix_element, ime):
2831 #test written 2832 """Write the leshouche.inc file for MG4""" 2833 2834 # Extract number of external particles 2835 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2836 2837 lines = [] 2838 for iproc, proc in enumerate(matrix_element.get('processes')): 2839 legs = proc.get_legs_with_decays() 2840 lines.append("I %4d %4d %s" % \ 2841 (ime, iproc + 1, 2842 " ".join([str(l.get('id')) for l in legs]))) 2843 for i in [1, 2]: 2844 lines.append("M %4d %4d %4d %s" % \ 2845 (ime, i, iproc + 1, 2846 " ".join([ "%3d" % 0 ] * ninitial + \ 2847 [ "%3d" % i ] * (nexternal - ninitial)))) 2848 2849 # Here goes the color connections corresponding to the JAMPs 2850 # Only one output, for the first subproc! 2851 if iproc == 0: 2852 # If no color basis, just output trivial color flow 2853 if not matrix_element.get('color_basis'): 2854 for i in [1, 2]: 2855 lines.append("C %4d %4d 1 %s" % \ 2856 (ime, i, 2857 " ".join([ "%3d" % 0 ] * nexternal))) 2858 color_flow_list = [] 2859 nflow = 1 2860 2861 else: 2862 # First build a color representation dictionnary 2863 repr_dict = {} 2864 for l in legs: 2865 repr_dict[l.get('number')] = \ 2866 proc.get('model').get_particle(l.get('id')).get_color()\ 2867 * (-1)**(1+l.get('state')) 2868 # Get the list of color flows 2869 color_flow_list = \ 2870 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2871 ninitial) 2872 # And output them properly 2873 for cf_i, color_flow_dict in enumerate(color_flow_list): 2874 for i in [0, 1]: 2875 lines.append("C %4d %4d %4d %s" % \ 2876 (ime, i + 1, cf_i + 1, 2877 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2878 for l in legs]))) 2879 2880 nflow = len(color_flow_list) 2881 2882 nproc = len(matrix_element.get('processes')) 2883 2884 return lines, nproc, nflow
2885 2886
2887 - def get_leshouche_lines_dummy(self, matrix_element, ime):
2888 #test written 2889 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly 2890 In this case, write born color structure times ij -> i,j splitting) 2891 """ 2892 2893 bornproc = matrix_element.get('processes')[0] 2894 colors = [l.get('color') for l in bornproc.get('legs')] 2895 2896 fks_i = len(colors) 2897 # use the last colored particle if it exists, or 2898 # just the last 2899 fks_j=1 2900 for cpos, col in enumerate(colors): 2901 if col != 1: 2902 fks_j = cpos+1 2903 2904 # Extract number of external particles 2905 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2906 nexternal+=1 # remember, in this case matrix_element is born 2907 2908 lines = [] 2909 for iproc, proc in enumerate(matrix_element.get('processes')): 2910 # add the fake extra leg 2911 legs = proc.get_legs_with_decays() + \ 2912 [fks_common.FKSLeg({'id': -21, 2913 'number': nexternal, 2914 'state': True, 2915 'fks': 'i', 2916 'color': 8, 2917 'charge': 0., 2918 'massless': True, 2919 'spin': 3, 2920 'is_part': True, 2921 'self_antipart': True})] 2922 2923 lines.append("I %4d %4d %s" % \ 2924 (ime, iproc + 1, 2925 " ".join([str(l.get('id')) for l in legs]))) 2926 for i in [1, 2]: 2927 lines.append("M %4d %4d %4d %s" % \ 2928 (ime, i, iproc + 1, 2929 " ".join([ "%3d" % 0 ] * ninitial + \ 2930 [ "%3d" % i ] * (nexternal - ninitial)))) 2931 2932 # Here goes the color connections corresponding to the JAMPs 2933 # Only one output, for the first subproc! 2934 if iproc == 0: 2935 # If no color basis, just output trivial color flow 2936 if not matrix_element.get('color_basis'): 2937 for i in [1, 2]: 2938 lines.append("C %4d %4d 1 %s" % \ 2939 (ime, i, 2940 " ".join([ "%3d" % 0 ] * nexternal))) 2941 color_flow_list = [] 2942 nflow = 1 2943 2944 else: 2945 # in this case the last particle (-21) has two color indices 2946 # and it has to be emitted by j_fks 2947 # First build a color representation dictionnary 2948 repr_dict = {} 2949 for l in legs[:-1]: 2950 repr_dict[l.get('number')] = \ 2951 proc.get('model').get_particle(l.get('id')).get_color()\ 2952 * (-1)**(1+l.get('state')) 2953 # Get the list of color flows 2954 color_flow_list = \ 2955 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2956 ninitial) 2957 # And output them properly 2958 for cf_i, color_flow_dict in enumerate(color_flow_list): 2959 # we have to add the extra leg (-21), linked to the j_fks leg 2960 # first, find the maximum color label 2961 maxicol = max(sum(list(color_flow_dict.values()), [])) 2962 #then, replace the color labels 2963 if color_flow_dict[fks_j][0] == 0: 2964 anti = True 2965 icol_j = color_flow_dict[fks_j][1] 2966 else: 2967 anti = False 2968 icol_j = color_flow_dict[fks_j][0] 2969 2970 if anti: 2971 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1]) 2972 color_flow_dict[fks_j][1] = maxicol + 1 2973 else: 2974 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1) 2975 color_flow_dict[fks_j][0] = maxicol + 1 2976 2977 for i in [0, 1]: 2978 lines.append("C %4d %4d %4d %s" % \ 2979 (ime, i + 1, cf_i + 1, 2980 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2981 for l in legs]))) 2982 2983 nflow = len(color_flow_list) 2984 2985 nproc = len(matrix_element.get('processes')) 2986 2987 return lines, nproc, nflow
2988 2989 2990 #=============================================================================== 2991 # get_den_factor_lines 2992 #===============================================================================
2993 - def get_den_factor_lines(self, fks_born):
2994 """returns the lines with the information on the denominator keeping care 2995 of the identical particle factors in the various real emissions""" 2996 2997 lines = [] 2998 info_list = fks_born.get_fks_info_list() 2999 if info_list: 3000 # if the reals have been generated, fill with the corresponding average factor 3001 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 3002 lines.append('DATA IDEN_VALUES /' + \ 3003 ', '.join(['%d' % ( 3004 fks_born.born_matrix_element.get_denominator_factor() ) \ 3005 for info in info_list]) + '/') 3006 else: 3007 # otherwise use the born 3008 lines.append('INTEGER IDEN_VALUES(1)') 3009 lines.append('DATA IDEN_VALUES / %d /' \ 3010 % fks_born.born_matrix_element.get_denominator_factor()) 3011 3012 return lines
3013 3014 3015 #=============================================================================== 3016 # get_ij_lines 3017 #===============================================================================
3018 - def get_ij_lines(self, fks_born):
3019 """returns the lines with the information on the particle number of the born 3020 that splits""" 3021 info_list = fks_born.get_fks_info_list() 3022 lines = [] 3023 if info_list: 3024 # if the reals have been generated, fill with the corresponding value of ij if 3025 # ij is massless, or with 0 if ij is massive (no collinear singularity) 3026 ij_list = [info['fks_info']['ij']if \ 3027 fks_born.born_matrix_element['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \ 3028 else 0 for info in info_list] 3029 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 3030 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/') 3031 else: 3032 #otherwise just put the first leg 3033 lines.append('INTEGER IJ_VALUES(1)') 3034 lines.append('DATA IJ_VALUES / 1 /') 3035 3036 return lines
3037 3038
3039 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 3040 mirror = False): #test written
3041 """Generate the PDF lines for the auto_dsig.f file""" 3042 3043 processes = matrix_element.get('processes') 3044 model = processes[0].get('model') 3045 3046 pdf_definition_lines = "" 3047 pdf_data_lines = "" 3048 pdf_lines = "" 3049 3050 if ninitial == 1: 3051 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 3052 for i, proc in enumerate(processes): 3053 process_line = proc.base_string() 3054 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3055 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 3056 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 3057 else: 3058 # Pick out all initial state particles for the two beams 3059 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 3060 p in processes]))), 3061 sorted(list(set([p.get_initial_pdg(2) for \ 3062 p in processes])))] 3063 3064 # Prepare all variable names 3065 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 3066 sum(initial_states,[])]) 3067 for key,val in pdf_codes.items(): 3068 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 3069 3070 # Set conversion from PDG code to number used in PDF calls 3071 pdgtopdf = {21: 0, 22: 7} 3072 # Fill in missing entries of pdgtopdf 3073 for pdg in sum(initial_states,[]): 3074 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 3075 pdgtopdf[pdg] = pdg 3076 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 3077 # If any particle has pdg code 7, we need to use something else 3078 pdgtopdf[pdg] = 6000000 + pdg 3079 3080 # Get PDF variable declarations for all initial states 3081 for i in [0,1]: 3082 pdf_definition_lines += "DOUBLE PRECISION " + \ 3083 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3084 for pdg in \ 3085 initial_states[i]]) + \ 3086 "\n" 3087 3088 # Get PDF data lines for all initial states 3089 for i in [0,1]: 3090 pdf_data_lines += "DATA " + \ 3091 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3092 for pdg in initial_states[i]]) + \ 3093 "/%d*1D0/" % len(initial_states[i]) + \ 3094 "\n" 3095 3096 # Get PDF values for the different initial states 3097 for i, init_states in enumerate(initial_states): 3098 if not mirror: 3099 ibeam = i + 1 3100 else: 3101 ibeam = 2 - i 3102 if subproc_group: 3103 pdf_lines = pdf_lines + \ 3104 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 3105 % (ibeam, ibeam) 3106 else: 3107 pdf_lines = pdf_lines + \ 3108 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 3109 % (ibeam, ibeam) 3110 3111 for initial_state in init_states: 3112 if initial_state in list(pdf_codes.keys()): 3113 if subproc_group: 3114 if abs(pdgtopdf[initial_state]) <= 7: 3115 pdf_lines = pdf_lines + \ 3116 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 3117 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 3118 (pdf_codes[initial_state], 3119 i + 1, ibeam, pdgtopdf[initial_state], 3120 ibeam, ibeam) 3121 else: 3122 # setting other partons flavours outside quark, gluon, photon to be 0d0 3123 pdf_lines = pdf_lines + \ 3124 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3125 "%s%d=0d0\n") % \ 3126 (pdf_codes[initial_state],i + 1) 3127 else: 3128 if abs(pdgtopdf[initial_state]) <= 7: 3129 pdf_lines = pdf_lines + \ 3130 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 3131 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 3132 (pdf_codes[initial_state], 3133 i + 1, ibeam, pdgtopdf[initial_state], 3134 ibeam, ibeam) 3135 else: 3136 # setting other partons flavours outside quark, gluon, photon to be 0d0 3137 pdf_lines = pdf_lines + \ 3138 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3139 "%s%d=0d0\n") % \ 3140 (pdf_codes[initial_state],i + 1) 3141 3142 pdf_lines = pdf_lines + "ENDIF\n" 3143 3144 # Add up PDFs for the different initial state particles 3145 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 3146 for proc in processes: 3147 process_line = proc.base_string() 3148 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3149 pdf_lines = pdf_lines + "\nPD(IPROC) = " 3150 for ibeam in [1, 2]: 3151 initial_state = proc.get_initial_pdg(ibeam) 3152 if initial_state in list(pdf_codes.keys()): 3153 pdf_lines = pdf_lines + "%s%d*" % \ 3154 (pdf_codes[initial_state], ibeam) 3155 else: 3156 pdf_lines = pdf_lines + "1d0*" 3157 # Remove last "*" from pdf_lines 3158 pdf_lines = pdf_lines[:-1] + "\n" 3159 3160 # Remove last line break from pdf_lines 3161 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 3162 3163 3164 #test written
3165 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
3166 """Return the color matrix definition lines for the given color_matrix. Split 3167 rows in chunks of size n.""" 3168 3169 if not color_matrix: 3170 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 3171 else: 3172 ret_list = [] 3173 my_cs = color.ColorString() 3174 for index, denominator in \ 3175 enumerate(color_matrix.get_line_denominators()): 3176 # First write the common denominator for this color matrix line 3177 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 3178 # Then write the numerators for the matrix elements 3179 num_list = color_matrix.get_line_numerators(index, denominator) 3180 for k in range(0, len(num_list), n): 3181 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 3182 (index + 1, k + 1, min(k + n, len(num_list)), 3183 ','.join(["%5r" % int(i) for i in num_list[k:k + n]]))) 3184 return ret_list
3185 3186 #=========================================================================== 3187 # write_maxamps_file 3188 #===========================================================================
3189 - def write_maxamps_file(self, writer, maxamps, maxflows, 3190 maxproc,maxsproc):
3191 """Write the maxamps.inc file for MG4.""" 3192 3193 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 3194 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 3195 (maxamps, maxflows) 3196 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 3197 (maxproc, maxsproc) 3198 3199 # Write the file 3200 writer.writelines(file) 3201 3202 return True
3203 3204 #=============================================================================== 3205 # write_ncombs_file 3206 #===============================================================================
3207 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
3208 # #test written 3209 """Write the ncombs.inc file for MadEvent.""" 3210 3211 # Extract number of external particles 3212 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3213 3214 # ncomb (used for clustering) is 2^(nexternal) 3215 file = " integer n_max_cl\n" 3216 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 3217 3218 # Write the file 3219 writer.writelines(file) 3220 3221 return True
3222 3223 #=========================================================================== 3224 # write_config_subproc_map_file 3225 #===========================================================================
3226 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3227 """Write a dummy config_subproc.inc file for MadEvent""" 3228 3229 lines = [] 3230 3231 for iconfig in range(len(s_and_t_channels)): 3232 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3233 (iconfig + 1)) 3234 3235 # Write the file 3236 writer.writelines(lines) 3237 3238 return True
3239 3240 #=========================================================================== 3241 # write_colors_file 3242 #===========================================================================
3243 - def write_colors_file(self, writer, matrix_element):
3244 """Write the get_color.f file for MadEvent, which returns color 3245 for all particles used in the matrix element.""" 3246 3247 try: 3248 matrix_elements=matrix_element.real_processes[0].matrix_element 3249 except IndexError: 3250 matrix_elements=[matrix_element.born_matrix_element] 3251 3252 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3253 matrix_elements = [matrix_elements] 3254 3255 model = matrix_elements[0].get('processes')[0].get('model') 3256 3257 # We need the both particle and antiparticle wf_ids, since the identity 3258 # depends on the direction of the wf. 3259 # loop on the real emissions 3260 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3261 for wf in d.get('wavefunctions')],[]) \ 3262 for d in me.get('diagrams')],[]) \ 3263 for me in [real_proc.matrix_element]],[])\ 3264 for real_proc in matrix_element.real_processes],[])) 3265 # and also on the born 3266 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3267 for wf in d.get('wavefunctions')],[]) \ 3268 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 3269 3270 # loop on the real emissions 3271 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 3272 p.get_legs_with_decays()] for p in \ 3273 me.get('processes')], []) for me in \ 3274 [real_proc.matrix_element]], []) for real_proc in \ 3275 matrix_element.real_processes],[])) 3276 # and also on the born 3277 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 3278 p.get_legs_with_decays()] for p in \ 3279 matrix_element.born_matrix_element.get('processes')], []))) 3280 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3281 3282 lines = """function get_color(ipdg) 3283 implicit none 3284 integer get_color, ipdg 3285 3286 if(ipdg.eq.%d)then 3287 get_color=%d 3288 return 3289 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3290 3291 for part_id in particle_ids[1:]: 3292 lines += """else if(ipdg.eq.%d)then 3293 get_color=%d 3294 return 3295 """ % (part_id, model.get_particle(part_id).get_color()) 3296 # Dummy particle for multiparticle vertices with pdg given by 3297 # first code not in the model 3298 lines += """else if(ipdg.eq.%d)then 3299 c This is dummy particle used in multiparticle vertices 3300 get_color=2 3301 return 3302 """ % model.get_first_non_pdg() 3303 lines += """else 3304 write(*,*)'Error: No color given for pdg ',ipdg 3305 get_color=0 3306 return 3307 endif 3308 end 3309 """ 3310 3311 # Write the file 3312 writer.writelines(lines) 3313 3314 return True
3315 3316 #=============================================================================== 3317 # write_props_file 3318 #=============================================================================== 3319 #test_written
3320 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3321 """Write the props.inc file for MadEvent. Needs input from 3322 write_configs_file. With respect to the parent routine, it has some 3323 more specific formats that allow the props.inc file to be read by the 3324 link program""" 3325 3326 lines = [] 3327 3328 particle_dict = matrix_element.get('processes')[0].get('model').\ 3329 get('particle_dict') 3330 3331 for iconf, configs in enumerate(s_and_t_channels): 3332 for vertex in configs[0] + configs[1][:-1]: 3333 leg = vertex.get('legs')[-1] 3334 if leg.get('id') not in particle_dict: 3335 # Fake propagator used in multiparticle vertices 3336 mass = 'zero' 3337 width = 'zero' 3338 pow_part = 0 3339 else: 3340 particle = particle_dict[leg.get('id')] 3341 # Get mass 3342 if particle.get('mass').lower() == 'zero': 3343 mass = particle.get('mass') 3344 else: 3345 mass = "abs(%s)" % particle.get('mass') 3346 # Get width 3347 if particle.get('width').lower() == 'zero': 3348 width = particle.get('width') 3349 else: 3350 width = "abs(%s)" % particle.get('width') 3351 3352 pow_part = 1 + int(particle.is_boson()) 3353 3354 lines.append("pmass(%3d,%4d) = %s" % \ 3355 (leg.get('number'), iconf + 1, mass)) 3356 lines.append("pwidth(%3d,%4d) = %s" % \ 3357 (leg.get('number'), iconf + 1, width)) 3358 lines.append("pow(%3d,%4d) = %d" % \ 3359 (leg.get('number'), iconf + 1, pow_part)) 3360 3361 # Write the file 3362 writer.writelines(lines) 3363 3364 return True
3365 3366 3367 #=========================================================================== 3368 # write_subproc 3369 #===========================================================================
3370 - def write_subproc(self, writer, subprocdir):
3371 """Append this subprocess to the subproc.mg file for MG4""" 3372 3373 # Write line to file 3374 writer.write(subprocdir + "\n") 3375 3376 return True
3377 3378 3379 3380 3381 3382 #================================================================================= 3383 # Class for using the optimized Loop process 3384 #=================================================================================
3385 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3386 ProcessExporterFortranFKS):
3387 """Class to take care of exporting a set of matrix elements to 3388 Fortran (v4) format.""" 3389 3390 jamp_optim = True 3391
3392 - def finalize(self, *args, **opts):
3394 #export_v4.ProcessExporterFortranSA.finalize(self, *args, **opts) 3395 3396 #=============================================================================== 3397 # copy the Template in a new directory. 3398 #===============================================================================
3399 - def copy_fkstemplate(self):
3400 """create the directory run_name as a copy of the MadEvent 3401 Template, and clean the directory 3402 For now it is just the same as copy_v4template, but it will be modified 3403 """ 3404 mgme_dir = self.mgme_dir 3405 dir_path = self.dir_path 3406 clean =self.opt['clean'] 3407 3408 #First copy the full template tree if dir_path doesn't exit 3409 if not os.path.isdir(dir_path): 3410 if not mgme_dir: 3411 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3412 logger.info('initialize a new directory: %s' % \ 3413 os.path.basename(dir_path)) 3414 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3415 # misc.copytree since dir_path already exists 3416 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'), 3417 dir_path) 3418 # Copy plot_card 3419 for card in ['plot_card']: 3420 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 3421 try: 3422 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 3423 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 3424 except IOError: 3425 logger.warning("Failed to copy " + card + ".dat to default") 3426 3427 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3428 if not mgme_dir: 3429 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3430 try: 3431 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3432 except IOError: 3433 MG5_version = misc.get_pkg_info() 3434 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3435 "5." + MG5_version['version']) 3436 3437 #Ensure that the Template is clean 3438 if clean: 3439 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3440 if 'MADGRAPH_BASE' in os.environ: 3441 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3442 '--web'], cwd=dir_path) 3443 else: 3444 try: 3445 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3446 cwd=dir_path) 3447 except Exception as why: 3448 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3449 % (os.path.basename(dir_path),why)) 3450 #Write version info 3451 MG_version = misc.get_pkg_info() 3452 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3453 MG_version['version']) 3454 3455 # We must link the CutTools to the Library folder of the active Template 3456 self.link_CutTools(dir_path) 3457 # We must link the TIR to the Library folder of the active Template 3458 link_tir_libs=[] 3459 tir_libs=[] 3460 tir_include=[] 3461 for tir in self.all_tir: 3462 tir_dir="%s_dir"%tir 3463 libpath=getattr(self,tir_dir) 3464 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3465 libpath,"lib%s.a"%tir,tir_name=tir) 3466 setattr(self,tir_dir,libpath) 3467 if libpath != "": 3468 if tir in ['pjfry','ninja','golem', 'samurai','collier']: 3469 # We should link dynamically when possible, so we use the original 3470 # location of these libraries. 3471 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3472 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3473 # For Ninja, we must also link against OneLoop. 3474 if tir in ['ninja']: 3475 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext)) 3476 for ext in ['a','dylib','so']): 3477 raise MadGraph5Error( 3478 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath) 3479 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo')) 3480 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo')) 3481 # We must add the corresponding includes for these TIR 3482 if tir in ['golem','samurai','ninja','collier']: 3483 trg_path = pjoin(os.path.dirname(libpath),'include') 3484 if os.path.isdir(trg_path): 3485 to_include = misc.find_includes_path(trg_path, 3486 self.include_names[tir]) 3487 else: 3488 to_include = None 3489 # Special possible location for collier 3490 if to_include is None and tir=='collier': 3491 to_include = misc.find_includes_path( 3492 pjoin(libpath,'modules'),self.include_names[tir]) 3493 if to_include is None: 3494 logger.error( 3495 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+ 3496 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3497 to_include = '<Not_found_define_it_yourself>' 3498 tir_include.append('-I %s'%to_include) 3499 else: 3500 link_tir_libs.append('-l%s'%tir) 3501 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3502 3503 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3504 cwd = os.getcwd() 3505 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3506 try: 3507 os.chdir(dirpath) 3508 except os.error: 3509 logger.error('Could not cd to directory %s' % dirpath) 3510 return 0 3511 filename = 'makefile_loop' 3512 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3513 link_tir_libs,tir_libs,tir_include=tir_include) 3514 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3515 dirpath = os.path.join(self.dir_path, 'Source') 3516 try: 3517 os.chdir(dirpath) 3518 except os.error: 3519 logger.error('Could not cd to directory %s' % dirpath) 3520 return 0 3521 filename = 'make_opts' 3522 calls = self.write_make_opts(writers.MakefileWriter(filename), 3523 link_tir_libs,tir_libs) 3524 # Return to original PWD 3525 os.chdir(cwd) 3526 3527 cwd = os.getcwd() 3528 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3529 try: 3530 os.chdir(dirpath) 3531 except os.error: 3532 logger.error('Could not cd to directory %s' % dirpath) 3533 return 0 3534 3535 # We add here the user-friendly MadLoop option setter. 3536 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3537 "Cards/MadLoopParams.dat", 3538 "SubProcesses/MadLoopParams.inc"] 3539 3540 for file in cpfiles: 3541 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3542 os.path.join(self.dir_path, file)) 3543 3544 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 3545 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 3546 3547 3548 3549 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3550 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3551 'Cards', 'MadLoopParams.dat')) 3552 # write the output file 3553 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3554 "MadLoopParams.dat")) 3555 3556 # We need minimal editing of MadLoopCommons.f 3557 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3558 "SubProcesses","MadLoopCommons.inc")).read() 3559 writer = writers.FortranWriter(os.path.join(self.dir_path, 3560 "SubProcesses","MadLoopCommons.f")) 3561 writer.writelines(MadLoopCommon%{ 3562 'print_banner_commands':self.MadLoop_banner}, 3563 context={'collier_available':self.tir_available_dict['collier']}) 3564 writer.close() 3565 3566 # link the files from the MODEL 3567 model_path = self.dir_path + '/Source/MODEL/' 3568 # Note that for the [real=] mode, these files are not present 3569 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3570 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3571 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3572 ln(model_path + '/mp_coupl_same_name.inc', \ 3573 self.dir_path + '/SubProcesses') 3574 3575 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3576 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3577 writers.FortranWriter('cts_mpc.h'),) 3578 3579 self.copy_python_files() 3580 3581 3582 # We need to create the correct open_data for the pdf 3583 self.write_pdf_opendata() 3584 3585 3586 # Return to original PWD 3587 os.chdir(cwd)
3588
3589 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3590 """writes the V**** directory inside the P**** directories specified in 3591 dir_name""" 3592 3593 cwd = os.getcwd() 3594 3595 matrix_element = loop_matrix_element 3596 3597 # Create the MadLoop5_resources directory if not already existing 3598 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3599 try: 3600 os.mkdir(dirpath) 3601 except os.error as error: 3602 logger.warning(error.strerror + " " + dirpath) 3603 3604 # Create the directory PN_xx_xxxxx in the specified path 3605 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3606 dirpath = os.path.join(dir_name, name) 3607 3608 try: 3609 os.mkdir(dirpath) 3610 except os.error as error: 3611 logger.warning(error.strerror + " " + dirpath) 3612 3613 try: 3614 os.chdir(dirpath) 3615 except os.error: 3616 logger.error('Could not cd to directory %s' % dirpath) 3617 return 0 3618 3619 logger.info('Creating files in directory %s' % name) 3620 3621 # Extract number of external particles 3622 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3623 3624 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3625 3626 # We need a link to coefs.inc from DHELAS 3627 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'), 3628 abspath=False, cwd=None) 3629 3630 # The born matrix element, if needed 3631 filename = 'born_matrix.f' 3632 calls = self.write_bornmatrix( 3633 writers.FortranWriter(filename), 3634 matrix_element, 3635 fortran_model) 3636 3637 filename = 'nexternal.inc' 3638 self.write_nexternal_file(writers.FortranWriter(filename), 3639 nexternal, ninitial) 3640 3641 filename = 'pmass.inc' 3642 self.write_pmass_file(writers.FortranWriter(filename), 3643 matrix_element) 3644 3645 filename = 'ngraphs.inc' 3646 self.write_ngraphs_file(writers.FortranWriter(filename), 3647 len(matrix_element.get_all_amplitudes())) 3648 3649 filename = "loop_matrix.ps" 3650 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3651 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3652 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3653 filename, 3654 model=matrix_element.get('processes')[0].get('model'), 3655 amplitude='') 3656 logger.info("Drawing loop Feynman diagrams for " + \ 3657 matrix_element.get('processes')[0].nice_string(\ 3658 print_weighted=False)) 3659 plot.draw() 3660 3661 filename = "born_matrix.ps" 3662 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3663 get('born_diagrams'), 3664 filename, 3665 model=matrix_element.get('processes')[0].\ 3666 get('model'), 3667 amplitude='') 3668 logger.info("Generating born Feynman diagrams for " + \ 3669 matrix_element.get('processes')[0].nice_string(\ 3670 print_weighted=False)) 3671 plot.draw() 3672 3673 # We also need to write the overall maximum quantities for this group 3674 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 3675 # only one process, so this is trivial 3676 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 3677 3678 open('unique_id.inc','w').write( 3679 """ integer UNIQUE_ID 3680 parameter(UNIQUE_ID=1)""") 3681 3682 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3683 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3684 'MadLoopParams.inc','MadLoopCommons.f'] 3685 3686 for file in linkfiles: 3687 ln('../../%s' % file) 3688 3689 os.system("ln -s ../../makefile_loop makefile") 3690 3691 # We should move to MadLoop5_resources directory from the SubProcesses 3692 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3693 pjoin('..','MadLoop5_resources')) 3694 3695 linkfiles = ['mpmodule.mod'] 3696 3697 for file in linkfiles: 3698 ln('../../../lib/%s' % file) 3699 3700 linkfiles = ['coef_specs.inc'] 3701 3702 for file in linkfiles: 3703 ln('../../../Source/DHELAS/%s' % file) 3704 3705 # Return to original PWD 3706 os.chdir(cwd) 3707 3708 if not calls: 3709 calls = 0 3710 return calls
3711 3712 3713 #=============================================================================== 3714 # write_coef_specs 3715 #===============================================================================
3716 - def write_coef_specs_file(self, max_loop_vertex_ranks):
3717 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3718 non-optimized mode""" 3719 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3720 3721 replace_dict = {} 3722 replace_dict['max_lwf_size'] = 4 3723 replace_dict['vertex_max_coefs'] = max(\ 3724 [q_polynomial.get_number_of_coefs_for_rank(n) 3725 for n in max_loop_vertex_ranks]) 3726 IncWriter=writers.FortranWriter(filename,'w') 3727 IncWriter.writelines("""INTEGER MAXLWFSIZE 3728 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3729 INTEGER VERTEXMAXCOEFS 3730 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3731 % replace_dict) 3732 IncWriter.close()
3733