Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from __future__ import absolute_import 
  18  from __future__ import print_function 
  19  import glob 
  20  import logging 
  21  import os 
  22  import re 
  23  import shutil 
  24  import subprocess 
  25  import string 
  26  import copy 
  27  import platform 
  28   
  29  import madgraph.core.color_algebra as color 
  30  import madgraph.core.helas_objects as helas_objects 
  31  import madgraph.core.base_objects as base_objects 
  32  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  33  import madgraph.fks.fks_base as fks 
  34  import madgraph.fks.fks_common as fks_common 
  35  import madgraph.iolibs.drawing_eps as draw 
  36  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  37  import madgraph.iolibs.files as files 
  38  import madgraph.various.misc as misc 
  39  import madgraph.iolibs.file_writers as writers 
  40  import madgraph.iolibs.template_files as template_files 
  41  import madgraph.iolibs.ufo_expression_parsers as parsers 
  42  import madgraph.iolibs.export_v4 as export_v4 
  43  import madgraph.loop.loop_exporters as loop_exporters 
  44  import madgraph.various.q_polynomial as q_polynomial 
  45  import madgraph.various.banner as banner_mod 
  46   
  47  import aloha.create_aloha as create_aloha 
  48   
  49  import models.write_param_card as write_param_card 
  50  import models.check_param_card as check_param_card 
  51  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  52  from madgraph.iolibs.files import cp, ln, mv 
  53  from six.moves import range 
  54   
  55  pjoin = os.path.join 
  56   
  57  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  58  logger = logging.getLogger('madgraph.export_fks') 
  59   
  60   
61 -def make_jpeg_async(args):
62 Pdir = args[0] 63 old_pos = args[1] 64 dir_path = args[2] 65 66 devnull = os.open(os.devnull, os.O_RDWR) 67 68 os.chdir(Pdir) 69 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 70 stdout = devnull) 71 os.chdir(os.path.pardir)
72 73 74 #================================================================================= 75 # Class for used of the (non-optimized) Loop process 76 #=================================================================================
77 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
78 """Class to take care of exporting a set of matrix elements to 79 Fortran (v4) format.""" 80 81 #=============================================================================== 82 # copy the Template in a new directory. 83 #===============================================================================
84 - def copy_fkstemplate(self):
85 """create the directory run_name as a copy of the MadEvent 86 Template, and clean the directory 87 For now it is just the same as copy_v4template, but it will be modified 88 """ 89 90 mgme_dir = self.mgme_dir 91 dir_path = self.dir_path 92 clean =self.opt['clean'] 93 94 #First copy the full template tree if dir_path doesn't exit 95 if not os.path.isdir(dir_path): 96 if not mgme_dir: 97 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 98 logger.info('initialize a new directory: %s' % \ 99 os.path.basename(dir_path)) 100 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 101 # misc.copytree since dir_path already exists 102 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) 103 # Copy plot_card 104 for card in ['plot_card']: 105 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 106 try: 107 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 108 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 109 except IOError: 110 logger.warning("Failed to move " + card + ".dat to default") 111 112 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 113 if not mgme_dir: 114 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 115 try: 116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 117 except IOError: 118 MG5_version = misc.get_pkg_info() 119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 120 "5." + MG5_version['version']) 121 122 #Ensure that the Template is clean 123 if clean: 124 logger.info('remove old information in %s' % os.path.basename(dir_path)) 125 if 'MADGRAPH_BASE' in os.environ: 126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 127 '--web'],cwd=dir_path) 128 else: 129 try: 130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 131 cwd=dir_path) 132 except Exception as why: 133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 134 % (os.path.basename(dir_path),why)) 135 #Write version info 136 MG_version = misc.get_pkg_info() 137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 138 MG_version['version']) 139 140 # We must link the CutTools to the Library folder of the active Template 141 self.link_CutTools(dir_path) 142 143 link_tir_libs=[] 144 tir_libs=[] 145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 146 dirpath = os.path.join(self.dir_path, 'SubProcesses') 147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 149 link_tir_libs,tir_libs) 150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 151 filename = pjoin(self.dir_path, 'Source','make_opts') 152 calls = self.write_make_opts(writers.MakefileWriter(filename), 153 link_tir_libs,tir_libs) 154 155 # Duplicate run_card and FO_analyse_card 156 for card in ['FO_analyse_card', 'shower_card']: 157 try: 158 shutil.copy(pjoin(self.dir_path, 'Cards', 159 card + '.dat'), 160 pjoin(self.dir_path, 'Cards', 161 card + '_default.dat')) 162 except IOError: 163 logger.warning("Failed to copy " + card + ".dat to default") 164 165 cwd = os.getcwd() 166 dirpath = os.path.join(self.dir_path, 'SubProcesses') 167 try: 168 os.chdir(dirpath) 169 except os.error: 170 logger.error('Could not cd to directory %s' % dirpath) 171 return 0 172 173 # We add here the user-friendly MadLoop option setter. 174 cpfiles= ["SubProcesses/MadLoopParamReader.f", 175 "Cards/MadLoopParams.dat", 176 "SubProcesses/MadLoopParams.inc"] 177 178 for file in cpfiles: 179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 180 os.path.join(self.dir_path, file)) 181 182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 184 185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 187 'Cards', 'MadLoopParams.dat')) 188 # write the output file 189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 190 "MadLoopParams.dat")) 191 192 # We need minimal editing of MadLoopCommons.f 193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 194 "SubProcesses","MadLoopCommons.inc")).read() 195 writer = writers.FortranWriter(os.path.join(self.dir_path, 196 "SubProcesses","MadLoopCommons.f")) 197 writer.writelines(MadLoopCommon%{ 198 'print_banner_commands':self.MadLoop_banner}, 199 context={'collier_available':False}) 200 writer.close() 201 202 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 204 writers.FortranWriter('cts_mpc.h')) 205 206 207 # Finally make sure to turn off MC over Hel for the default mode. 208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 209 FKS_card_file = open(FKS_card_path,'r') 210 FKS_card = FKS_card_file.read() 211 FKS_card_file.close() 212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 213 "#NHelForMCoverHels\n-1", FKS_card) 214 FKS_card_file = open(FKS_card_path,'w') 215 FKS_card_file.write(FKS_card) 216 FKS_card_file.close() 217 218 # Return to original PWD 219 os.chdir(cwd) 220 # Copy the different python files in the Template 221 self.copy_python_files() 222 223 # We need to create the correct open_data for the pdf 224 self.write_pdf_opendata()
225 226 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 227 # Also, we overload this function (i.e. it is already defined in 228 # LoopProcessExporterFortranSA) because the path of the template makefile 229 # is different.
230 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
231 """ Create the file makefile_loop which links to the TIR libraries.""" 232 233 file = open(os.path.join(self.mgme_dir,'Template','NLO', 234 'SubProcesses','makefile_loop.inc')).read() 235 replace_dict={} 236 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 237 replace_dict['tir_libs']=' '.join(tir_libs) 238 replace_dict['dotf']='%.f' 239 replace_dict['doto']='%.o' 240 replace_dict['tir_include']=' '.join(tir_include) 241 file=file%replace_dict 242 if writer: 243 writer.writelines(file) 244 else: 245 return file
246 247 # I put it here not in optimized one, because I want to use the same make_opts.inc
248 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
249 """ Create the file make_opts which links to the TIR libraries.""" 250 file = open(os.path.join(self.mgme_dir,'Template','NLO', 251 'Source','make_opts.inc')).read() 252 replace_dict={} 253 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 254 replace_dict['tir_libs']=' '.join(tir_libs) 255 replace_dict['dotf']='%.f' 256 replace_dict['doto']='%.o' 257 file=file%replace_dict 258 if writer: 259 writer.writelines(file) 260 else: 261 return file
262 263 #=========================================================================== 264 # copy_python_files 265 #===========================================================================
266 - def copy_python_files(self):
267 """copy python files required for the Template""" 268 269 files_to_copy = [ \ 270 pjoin('interface','amcatnlo_run_interface.py'), 271 pjoin('interface','extended_cmd.py'), 272 pjoin('interface','common_run_interface.py'), 273 pjoin('interface','coloring_logging.py'), 274 pjoin('various','misc.py'), 275 pjoin('various','shower_card.py'), 276 pjoin('various','FO_analyse_card.py'), 277 pjoin('various','histograms.py'), 278 pjoin('various','banner.py'), 279 pjoin('various','cluster.py'), 280 pjoin('various','systematics.py'), 281 pjoin('various','lhe_parser.py'), 282 pjoin('madevent','sum_html.py'), 283 pjoin('madevent','gen_crossxhtml.py'), 284 pjoin('iolibs','files.py'), 285 pjoin('iolibs','save_load_object.py'), 286 pjoin('iolibs','file_writers.py'), 287 pjoin('..','models','check_param_card.py'), 288 pjoin('__init__.py') 289 ] 290 cp(_file_path+'/interface/.mg5_logging.conf', 291 self.dir_path+'/bin/internal/me5_logging.conf') 292 293 for cp_file in files_to_copy: 294 cp(pjoin(_file_path,cp_file), 295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [], 298 wanted_couplings = []):
299 300 super(ProcessExporterFortranFKS,self).convert_model(model, 301 wanted_lorentz, wanted_couplings) 302 303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 304 try: 305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 306 except OSError as error: 307 pass 308 model_path = model.get('modelpath') 309 shutil.copytree(model_path, 310 pjoin(self.dir_path,'bin','internal','ufomodel'), 311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 312 if hasattr(model, 'restrict_card'): 313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 314 'restrict_default.dat') 315 if isinstance(model.restrict_card, check_param_card.ParamCard): 316 model.restrict_card.write(out_path) 317 else: 318 files.cp(model.restrict_card, out_path)
319 320 321 322 #=========================================================================== 323 # write_maxparticles_file 324 #===========================================================================
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent""" 327 328 lines = "integer max_particles, max_branch\n" 329 lines += "parameter (max_particles=%d) \n" % maxparticles 330 lines += "parameter (max_branch=max_particles-1)" 331 332 # Write the file 333 writer.writelines(lines) 334 335 return True
336 337 338 #=========================================================================== 339 # write_maxconfigs_file 340 #===========================================================================
341 - def write_maxconfigs_file(self, writer, maxconfigs):
342 """Write the maxconfigs.inc file for MadEvent""" 343 344 lines = "integer lmaxconfigs\n" 345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 346 347 # Write the file 348 writer.writelines(lines) 349 350 return True
351 352 353 #=============================================================================== 354 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 355 #===============================================================================
356 - def write_procdef_mg5(self, file_pos, modelname, process_str):
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent 358 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 359 360 proc_card_template = template_files.mg4_proc_card.mg4_template 361 process_template = template_files.mg4_proc_card.process_template 362 process_text = '' 363 coupling = '' 364 new_process_content = [] 365 366 # First find the coupling and suppress the coupling from process_str 367 #But first ensure that coupling are define whithout spaces: 368 process_str = process_str.replace(' =', '=') 369 process_str = process_str.replace('= ', '=') 370 process_str = process_str.replace(',',' , ') 371 #now loop on the element and treat all the coupling 372 for info in process_str.split(): 373 if '=' in info: 374 coupling += info + '\n' 375 else: 376 new_process_content.append(info) 377 # Recombine the process_str (which is the input process_str without coupling 378 #info) 379 process_str = ' '.join(new_process_content) 380 381 #format the SubProcess 382 process_text += process_template.substitute({'process': process_str, \ 383 'coupling': coupling}) 384 385 text = proc_card_template.substitute({'process': process_text, 386 'model': modelname, 387 'multiparticle':''}) 388 ff = open(file_pos, 'w') 389 ff.write(text) 390 ff.close()
391 392 393 #=============================================================================== 394 # write a initial states map, useful for the fast PDF NLO interface 395 #===============================================================================
396 - def write_init_map(self, file_pos, initial_states):
397 """ Write an initial state process map. Each possible PDF 398 combination gets an unique identifier.""" 399 400 text='' 401 for i,e in enumerate(initial_states): 402 text=text+str(i+1)+' '+str(len(e)) 403 for t in e: 404 if len(t) ==1: 405 t.append(0) 406 text=text+' ' 407 try: 408 for p in t: 409 if p == None : p = 0 410 text=text+' '+str(p) 411 except TypeError: 412 text=text+' '+str(t) 413 text=text+'\n' 414 415 ff = open(file_pos, 'w') 416 ff.write(text) 417 ff.close()
418
419 - def get_ME_identifier(self, matrix_element, *args, **opts):
420 """ A function returning a string uniquely identifying the matrix 421 element given in argument so that it can be used as a prefix to all 422 MadLoop5 subroutines and common blocks related to it. This allows 423 to compile several processes into one library as requested by the 424 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 425 necessitates that there is no process prefix.""" 426 427 return ''
428 429 #=============================================================================== 430 # write_coef_specs 431 #===============================================================================
432 - def write_coef_specs_file(self, virt_me_list):
433 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 434 non-optimized mode""" 435 raise fks_common.FKSProcessError()("write_coef_specs should be called only in the loop-optimized mode")
436 437 438 #=============================================================================== 439 # generate_directories_fks 440 #===============================================================================
441 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 442 me_ntot, path=os.getcwd(),OLP='MadLoop'):
443 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 444 including the necessary matrix.f and various helper files""" 445 proc = matrix_element.born_matrix_element['processes'][0] 446 447 if not self.model: 448 self.model = matrix_element.get('processes')[0].get('model') 449 450 cwd = os.getcwd() 451 try: 452 os.chdir(path) 453 except OSError as error: 454 error_msg = "The directory %s should exist in order to be able " % path + \ 455 "to \"export\" in it. If you see this error message by " + \ 456 "typing the command \"export\" please consider to use " + \ 457 "instead the command \"output\". " 458 raise MadGraph5Error(error_msg) 459 460 calls = 0 461 462 self.fksdirs = [] 463 #first make and cd the direcrory corresponding to the born process: 464 borndir = "P%s" % \ 465 (matrix_element.get('processes')[0].shell_string()) 466 os.mkdir(borndir) 467 os.chdir(borndir) 468 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 469 470 ## write the files corresponding to the born process in the P* directory 471 self.generate_born_fks_files(matrix_element, 472 fortran_model, me_number, path) 473 474 # With NJET you want to generate the order file per subprocess and most 475 # likely also generate it for each subproc. 476 if OLP=='NJET': 477 filename = 'OLE_order.lh' 478 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP) 479 480 if matrix_element.virt_matrix_element: 481 calls += self.generate_virt_directory( \ 482 matrix_element.virt_matrix_element, \ 483 fortran_model, \ 484 os.path.join(path, borndir)) 485 486 #write the infortions for the different real emission processes 487 488 self.write_real_matrix_elements(matrix_element, fortran_model) 489 490 self.write_pdf_calls(matrix_element, fortran_model) 491 492 filename = 'nFKSconfigs.inc' 493 self.write_nfksconfigs_file(writers.FortranWriter(filename), 494 matrix_element, 495 fortran_model) 496 497 filename = 'iproc.dat' 498 self.write_iproc_file(writers.FortranWriter(filename), 499 me_number) 500 501 filename = 'fks_info.inc' 502 self.write_fks_info_file(writers.FortranWriter(filename), 503 matrix_element, 504 fortran_model) 505 506 filename = 'leshouche_info.dat' 507 nfksconfs,maxproc,maxflow,nexternal=\ 508 self.write_leshouche_info_file(filename,matrix_element) 509 510 # if no corrections are generated ([LOonly] mode), get 511 # these variables from the born 512 if nfksconfs == maxproc == maxflow == 0: 513 nfksconfs = 1 514 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 515 matrix_element.born_matrix_element, 1) 516 517 filename = 'leshouche_decl.inc' 518 self.write_leshouche_info_declarations( 519 writers.FortranWriter(filename), 520 nfksconfs,maxproc,maxflow,nexternal, 521 fortran_model) 522 filename = 'genps.inc' 523 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes() 524 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis'))) 525 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\ 526 ncolor,maxflow,fortran_model) 527 528 filename = 'configs_and_props_info.dat' 529 nconfigs,max_leg_number=self.write_configs_and_props_info_file( 530 filename, 531 matrix_element) 532 533 filename = 'configs_and_props_decl.inc' 534 self.write_configs_and_props_info_declarations( 535 writers.FortranWriter(filename), 536 nconfigs,max_leg_number,nfksconfs, 537 fortran_model) 538 539 filename = 'real_from_born_configs.inc' 540 self.write_real_from_born_configs( 541 writers.FortranWriter(filename), 542 matrix_element, 543 fortran_model) 544 545 filename = 'ngraphs.inc' 546 self.write_ngraphs_file(writers.FortranWriter(filename), 547 nconfigs) 548 549 #write the wrappers 550 filename = 'real_me_chooser.f' 551 self.write_real_me_wrapper(writers.FortranWriter(filename), 552 matrix_element, 553 fortran_model) 554 555 filename = 'parton_lum_chooser.f' 556 self.write_pdf_wrapper(writers.FortranWriter(filename), 557 matrix_element, 558 fortran_model) 559 560 filename = 'get_color.f' 561 self.write_colors_file(writers.FortranWriter(filename), 562 matrix_element) 563 564 filename = 'nexternal.inc' 565 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 566 self.write_nexternal_file(writers.FortranWriter(filename), 567 nexternal, ninitial) 568 self.proc_characteristic['ninitial'] = ninitial 569 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 570 571 filename = 'pmass.inc' 572 try: 573 self.write_pmass_file(writers.FortranWriter(filename), 574 matrix_element.real_processes[0].matrix_element) 575 except IndexError: 576 self.write_pmass_file(writers.FortranWriter(filename), 577 matrix_element.born_matrix_element) 578 579 #draw the diagrams 580 self.draw_feynman_diagrams(matrix_element) 581 582 linkfiles = ['BinothLHADummy.f', 583 'check_poles.f', 584 'MCmasses_HERWIG6.inc', 585 'MCmasses_HERWIGPP.inc', 586 'MCmasses_PYTHIA6Q.inc', 587 'MCmasses_PYTHIA6PT.inc', 588 'MCmasses_PYTHIA8.inc', 589 'add_write_info.f', 590 'coupl.inc', 591 'cuts.f', 592 'FKS_params.dat', 593 'initial_states_map.dat', 594 'OLE_order.olc', 595 'FKSParams.inc', 596 'FKSParamReader.f', 597 'cuts.inc', 598 'unlops.inc', 599 'pythia_unlops.f', 600 'driver_mintMC.f', 601 'driver_mintFO.f', 602 'appl_interface.cc', 603 'appl_interface_dummy.f', 604 'appl_common.inc', 605 'reweight_appl.inc', 606 'fastjetfortran_madfks_core.cc', 607 'fastjetfortran_madfks_full.cc', 608 'fjcore.cc', 609 'fastjet_wrapper.f', 610 'fjcore.hh', 611 'fks_Sij.f', 612 'fks_powers.inc', 613 'fks_singular.f', 614 'veto_xsec.f', 615 'veto_xsec.inc', 616 'weight_lines.f', 617 'fks_inc_chooser.f', 618 'leshouche_inc_chooser.f', 619 'configs_and_props_inc_chooser.f', 620 'genps_fks.f', 621 'boostwdir2.f', 622 'madfks_mcatnlo.inc', 623 'open_output_files.f', 624 'open_output_files_dummy.f', 625 'HwU_dummy.f', 626 'madfks_plot.f', 627 'analysis_dummy.f', 628 'analysis_lhe.f', 629 'mint-integrator2.f', 630 'MC_integer.f', 631 'mint.inc', 632 'montecarlocounter.f', 633 'q_es.inc', 634 'recluster.cc', 635 'Boosts.h', 636 'reweight_xsec.f', 637 'reweight_xsec_events.f', 638 'reweight_xsec_events_pdf_dummy.f', 639 'iproc_map.f', 640 'run.inc', 641 'run_card.inc', 642 'setcuts.f', 643 'setscales.f', 644 'test_soft_col_limits.f', 645 'symmetry_fks_v3.f', 646 'vegas2.for', 647 'write_ajob.f', 648 'handling_lhe_events.f', 649 'write_event.f', 650 'fill_MC_mshell.f', 651 'maxparticles.inc', 652 'message.inc', 653 'initcluster.f', 654 'cluster.inc', 655 'cluster.f', 656 'reweight.f', 657 'randinit', 658 'sudakov.inc', 659 'maxconfigs.inc', 660 'timing_variables.inc'] 661 662 for file in linkfiles: 663 ln('../' + file , '.') 664 os.system("ln -s ../../Cards/param_card.dat .") 665 666 #copy the makefile 667 os.system("ln -s ../makefile_fks_dir ./makefile") 668 if matrix_element.virt_matrix_element: 669 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 670 elif OLP!='MadLoop': 671 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 672 else: 673 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 674 675 # Return to SubProcesses dir 676 os.chdir(os.path.pardir) 677 # Add subprocess to subproc.mg 678 filename = 'subproc.mg' 679 files.append_to_file(filename, 680 self.write_subproc, 681 borndir) 682 683 os.chdir(cwd) 684 # Generate info page 685 gen_infohtml.make_info_html_nlo(self.dir_path) 686 687 688 return calls
689 690 #=========================================================================== 691 # create the run_card 692 #===========================================================================
693 - def create_run_card(self, processes, history):
694 """ """ 695 696 run_card = banner_mod.RunCardNLO() 697 698 run_card.create_default_for_process(self.proc_characteristic, 699 history, 700 processes) 701 702 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 703 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
704 705
706 - def pass_information_from_cmd(self, cmd):
707 """pass information from the command interface to the exporter. 708 Please do not modify any object of the interface from the exporter. 709 """ 710 self.proc_defs = cmd._curr_proc_defs 711 if hasattr(cmd,'born_processes'): 712 self.born_processes = cmd.born_processes 713 else: 714 self.born_processes = [] 715 return
716
717 - def finalize(self, matrix_elements, history, mg5options, flaglist):
718 """Finalize FKS directory by creating jpeg diagrams, html 719 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if 720 necessary.""" 721 722 devnull = os.open(os.devnull, os.O_RDWR) 723 try: 724 res = misc.call([mg5options['lhapdf'], '--version'], \ 725 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 726 except Exception: 727 res = 1 728 if res != 0: 729 logger.info('The value for lhapdf in the current configuration does not ' + \ 730 'correspond to a valid executable.\nPlease set it correctly either in ' + \ 731 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \ 732 'and regenrate the process. \nTo avoid regeneration, edit the ' + \ 733 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \ 734 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n') 735 736 compiler_dict = {'fortran': mg5options['fortran_compiler'], 737 'cpp': mg5options['cpp_compiler'], 738 'f2py': mg5options['f2py_compiler']} 739 740 if 'nojpeg' in flaglist: 741 makejpg = False 742 else: 743 makejpg = True 744 output_dependencies = mg5options['output_dependencies'] 745 746 747 self.proc_characteristic['grouped_matrix'] = False 748 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 749 750 self.create_proc_charac() 751 752 self.create_run_card(matrix_elements.get_processes(), history) 753 # modelname = self.model.get('name') 754 # if modelname == 'mssm' or modelname.startswith('mssm-'): 755 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 756 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 757 # check_param_card.convert_to_mg5card(param_card, mg5_param) 758 # check_param_card.check_valid_param_card(mg5_param) 759 760 # # write the model functions get_mass/width_from_id 761 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 762 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 763 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 764 765 # # Write maxconfigs.inc based on max of ME's/subprocess groups 766 767 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 768 self.write_maxconfigs_file(writers.FortranWriter(filename), 769 matrix_elements.get_max_configs()) 770 771 # # Write maxparticles.inc based on max of ME's/subprocess groups 772 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 773 self.write_maxparticles_file(writers.FortranWriter(filename), 774 matrix_elements.get_max_particles()) 775 776 # Touch "done" file 777 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 778 779 # Check for compiler 780 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 781 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 782 783 old_pos = os.getcwd() 784 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 785 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 786 proc[0] == 'P'] 787 788 devnull = os.open(os.devnull, os.O_RDWR) 789 # Convert the poscript in jpg files (if authorize) 790 if makejpg: 791 logger.info("Generate jpeg diagrams") 792 for Pdir in P_dir_list: 793 os.chdir(Pdir) 794 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 795 stdout = devnull) 796 os.chdir(os.path.pardir) 797 # 798 logger.info("Generate web pages") 799 # Create the WebPage using perl script 800 801 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 802 stdout = devnull) 803 804 os.chdir(os.path.pardir) 805 # 806 # obj = gen_infohtml.make_info_html(self.dir_path) 807 # [mv(name, './HTML/') for name in os.listdir('.') if \ 808 # (name.endswith('.html') or name.endswith('.jpg')) and \ 809 # name != 'index.html'] 810 # if online: 811 # nb_channel = obj.rep_rule['nb_gen_diag'] 812 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 813 814 # Write command history as proc_card_mg5 815 if os.path.isdir('Cards'): 816 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 817 history.write(output_file) 818 819 # Duplicate run_card and FO_analyse_card 820 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 821 try: 822 shutil.copy(pjoin(self.dir_path, 'Cards', 823 card + '.dat'), 824 pjoin(self.dir_path, 'Cards', 825 card + '_default.dat')) 826 except IOError: 827 logger.warning("Failed to copy " + card + ".dat to default") 828 829 830 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 831 stdout = devnull) 832 833 # Run "make" to generate madevent.tar.gz file 834 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 835 if os.path.exists('amcatnlo.tar.gz'): 836 os.remove('amcatnlo.tar.gz') 837 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 838 stdout = devnull) 839 # 840 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 841 stdout = devnull) 842 843 #return to the initial dir 844 os.chdir(old_pos) 845 846 # Setup stdHep 847 # Find the correct fortran compiler 848 base_compiler= ['FC=g77','FC=gfortran'] 849 850 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 851 if output_dependencies == 'external': 852 # check if stdhep has to be compiled (only the first time) 853 if (not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 854 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a'))) and \ 855 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP','fail')): 856 if 'FC' not in os.environ or not os.environ['FC']: 857 path = os.path.join(StdHep_path, 'src', 'make_opts') 858 text = open(path).read() 859 for base in base_compiler: 860 text = text.replace(base,'FC=%s' % fcompiler_chosen) 861 open(path, 'w').writelines(text) 862 logger.info('Compiling StdHEP. This has to be done only once.') 863 try: 864 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 865 except Exception as error: 866 logger.debug(str(error)) 867 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6") 868 logger.info("details on the compilation error are available on %s", pjoin(MG5DIR, 'vendor', 'StdHEP','fail')) 869 logger.info("if you want to retry the compilation automatically, you have to remove that file first") 870 with open(pjoin(MG5DIR, 'vendor', 'StdHEP','fail'),'w') as fsock: 871 fsock.write(str(error)) 872 else: 873 logger.info('Done.') 874 if os.path.exists(pjoin(StdHep_path, 'lib', 'libstdhep.a')): 875 #then link the libraries in the exported dir 876 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 877 pjoin(self.dir_path, 'MCatNLO', 'lib')) 878 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 879 pjoin(self.dir_path, 'MCatNLO', 'lib')) 880 881 elif output_dependencies == 'internal': 882 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 883 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 884 # Create the links to the lib folder 885 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 886 for file in linkfiles: 887 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 888 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 889 if 'FC' not in os.environ or not os.environ['FC']: 890 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 891 text = open(path).read() 892 for base in base_compiler: 893 text = text.replace(base,'FC=%s' % fcompiler_chosen) 894 open(path, 'w').writelines(text) 895 # To avoid compiler version conflicts, we force a clean here 896 misc.compile(['clean'],cwd = StdHEP_internal_path) 897 898 elif output_dependencies == 'environment_paths': 899 # Here the user chose to define the dependencies path in one of 900 # his environmental paths 901 libStdHep = misc.which_lib('libstdhep.a') 902 libFmcfio = misc.which_lib('libFmcfio.a') 903 if not libStdHep is None and not libFmcfio is None: 904 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 905 os.path.dirname(libStdHep)) 906 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 907 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 908 else: 909 raise InvalidCmd("Could not find the location of the files"+\ 910 " libstdhep.a and libFmcfio.a in you environment paths.") 911 912 else: 913 raise MadGraph5Error('output_dependencies option %s not recognized'\ 914 %output_dependencies) 915 916 # Create the default MadAnalysis5 cards 917 if 'madanalysis5_path' in self.opt and not \ 918 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 919 # When using 920 processes = sum([me.get('processes') if not isinstance(me, str) else [] \ 921 for me in matrix_elements.get('matrix_elements')],[]) 922 923 # Try getting the processes from the generation info directly if no ME are 924 # available (as it is the case for parallel generation 925 if len(processes)==0: 926 processes = self.born_processes 927 if len(processes)==0: 928 logger.warning( 929 """MG5aMC could not provide to Madanalysis5 the list of processes generated. 930 As a result, the default card will not be tailored to the process generated. 931 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""") 932 # For now, simply assign all processes to each proc_defs. 933 # That shouldn't really affect the default analysis card created by MA5 934 self.create_default_madanalysis5_cards( 935 history, self.proc_defs, [processes,]*len(self.proc_defs), 936 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 937 levels =['hadron'])
938
939 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
940 """Writes the real_from_born_configs.inc file that contains 941 the mapping to go for a given born configuration (that is used 942 e.g. in the multi-channel phase-space integration to the 943 corresponding real-emission diagram, i.e. the real emission 944 diagram in which the combined ij is split in i_fks and 945 j_fks.""" 946 lines=[] 947 lines2=[] 948 max_links=0 949 born_me=matrix_element.born_matrix_element 950 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 951 iFKS=iFKS+1 952 links=conf['fks_info']['rb_links'] 953 max_links=max(max_links,len(links)) 954 for i,diags in enumerate(links): 955 if not i == diags['born_conf']: 956 print(links) 957 raise MadGraph5Error("born_conf should be canonically ordered") 958 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 959 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 960 % (iFKS,len(links),real_configs)) 961 962 # this is for 'LOonly' processes; in this case, a fake configuration 963 # with all the born diagrams is written 964 if not matrix_element.get_fks_info_list(): 965 # compute (again) the number of configurations at the born 966 base_diagrams = born_me.get('base_amplitude').get('diagrams') 967 minvert = min([max([len(vert.get('legs')) for vert in \ 968 diag.get('vertices')]) for diag in base_diagrams]) 969 970 for idiag, diag in enumerate(base_diagrams): 971 if any([len(vert.get('legs')) > minvert for vert in 972 diag.get('vertices')]): 973 # Only 3-vertices allowed in configs.inc 974 continue 975 max_links = max_links + 1 976 977 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)]) 978 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 979 % (1,max_links,real_configs)) 980 981 lines2.append("integer irfbc") 982 lines2.append("integer real_from_born_conf(%d,%d)" \ 983 % (max_links, max(len(matrix_element.get_fks_info_list()),1))) 984 # Write the file 985 writer.writelines(lines2+lines)
986 987 988 #=============================================================================== 989 # write_get_mass_width_file 990 #=============================================================================== 991 #test written
992 - def write_get_mass_width_file(self, writer, makeinc, model):
993 """Write the get_mass_width_file.f file for MG4. 994 Also update the makeinc.inc file 995 """ 996 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 997 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 998 999 iflines_mass = '' 1000 iflines_width = '' 1001 1002 for i, part in enumerate(mass_particles): 1003 if i == 0: 1004 ifstring = 'if' 1005 else: 1006 ifstring = 'else if' 1007 if part['self_antipart']: 1008 iflines_mass += '%s (id.eq.%d) then\n' % \ 1009 (ifstring, part.get_pdg_code()) 1010 else: 1011 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1012 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1013 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 1014 1015 for i, part in enumerate(width_particles): 1016 if i == 0: 1017 ifstring = 'if' 1018 else: 1019 ifstring = 'else if' 1020 if part['self_antipart']: 1021 iflines_width += '%s (id.eq.%d) then\n' % \ 1022 (ifstring, part.get_pdg_code()) 1023 else: 1024 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1025 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1026 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 1027 1028 # Make sure it compiles with an if-statement if the above lists are empty 1029 if len(mass_particles)==0: 1030 iflines_mass = 'if (.True.) then\n' 1031 1032 if len(width_particles)==0: 1033 iflines_width = 'if (.True.) then\n' 1034 1035 replace_dict = {'iflines_mass' : iflines_mass, 1036 'iflines_width' : iflines_width} 1037 1038 file = open(os.path.join(_file_path, \ 1039 'iolibs/template_files/get_mass_width_fcts.inc')).read() 1040 file = file % replace_dict 1041 1042 # Write the file 1043 writer.writelines(file) 1044 1045 # update the makeinc 1046 makeinc_content = open(makeinc).read() 1047 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 1048 open(makeinc, 'w').write(makeinc_content) 1049 1050 return
1051 1052
1053 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
1054 """writes the declarations for the variables relevant for configs_and_props 1055 """ 1056 lines = [] 1057 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 1058 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 1059 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 1060 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 1061 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1062 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1063 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1064 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1065 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1066 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1067 1068 writer.writelines(lines)
1069 1070
1071 - def write_configs_and_props_info_file(self, filename, matrix_element):
1072 """writes the configs_and_props_info.inc file that cointains 1073 all the (real-emission) configurations (IFOREST) as well as 1074 the masses and widths of intermediate particles""" 1075 lines = [] 1076 lines.append("# C -> MAPCONFIG_D") 1077 lines.append("# F/D -> IFOREST_D") 1078 lines.append("# S -> SPROP_D") 1079 lines.append("# T -> TPRID_D") 1080 lines.append("# M -> PMASS_D/PWIDTH_D") 1081 lines.append("# P -> POW_D") 1082 lines2 = [] 1083 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1084 1085 max_iconfig=0 1086 max_leg_number=0 1087 1088 ######################################################## 1089 # this is for standard processes with [(real=)XXX] 1090 ######################################################## 1091 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1092 iFKS=iFKS+1 1093 iconfig = 0 1094 s_and_t_channels = [] 1095 mapconfigs = [] 1096 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 1097 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 1098 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 1099 minvert = min([max([len(vert.get('legs')) for vert in \ 1100 diag.get('vertices')]) for diag in base_diagrams]) 1101 1102 lines.append("# ") 1103 lines.append("# nFKSprocess %d" % iFKS) 1104 for idiag, diag in enumerate(base_diagrams): 1105 if any([len(vert.get('legs')) > minvert for vert in 1106 diag.get('vertices')]): 1107 # Only 3-vertices allowed in configs.inc 1108 continue 1109 iconfig = iconfig + 1 1110 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1111 mapconfigs.append(helas_diag.get('number')) 1112 lines.append("# Diagram %d for nFKSprocess %d" % \ 1113 (helas_diag.get('number'),iFKS)) 1114 # Correspondance between the config and the amplitudes 1115 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1116 helas_diag.get('number'))) 1117 1118 # Need to reorganize the topology so that we start with all 1119 # final state external particles and work our way inwards 1120 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1121 get_s_and_t_channels(ninitial, model, 990) 1122 1123 s_and_t_channels.append([schannels, tchannels]) 1124 1125 # Write out propagators for s-channel and t-channel vertices 1126 allchannels = schannels 1127 if len(tchannels) > 1: 1128 # Write out tchannels only if there are any non-trivial ones 1129 allchannels = schannels + tchannels 1130 1131 for vert in allchannels: 1132 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1133 last_leg = vert.get('legs')[-1] 1134 lines.append("F %4d %4d %4d %4d" % \ 1135 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1136 for d in daughters: 1137 lines.append("D %4d" % d) 1138 if vert in schannels: 1139 lines.append("S %4d %4d %4d %10d" % \ 1140 (iFKS,last_leg.get('number'), iconfig, 1141 last_leg.get('id'))) 1142 elif vert in tchannels[:-1]: 1143 lines.append("T %4d %4d %4d %10d" % \ 1144 (iFKS,last_leg.get('number'), iconfig, 1145 abs(last_leg.get('id')))) 1146 1147 # update what the array sizes (mapconfig,iforest,etc) will be 1148 max_leg_number = min(max_leg_number,last_leg.get('number')) 1149 max_iconfig = max(max_iconfig,iconfig) 1150 1151 # Write out number of configs 1152 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1153 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1154 1155 # write the props.inc information 1156 lines2.append("# ") 1157 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1158 get('particle_dict') 1159 1160 for iconf, configs in enumerate(s_and_t_channels): 1161 for vertex in configs[0] + configs[1][:-1]: 1162 leg = vertex.get('legs')[-1] 1163 if leg.get('id') not in particle_dict: 1164 # Fake propagator used in multiparticle vertices 1165 pow_part = 0 1166 else: 1167 particle = particle_dict[leg.get('id')] 1168 1169 pow_part = 1 + int(particle.is_boson()) 1170 1171 lines2.append("M %4d %4d %4d %10d " % \ 1172 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1173 lines2.append("P %4d %4d %4d %4d " % \ 1174 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1175 1176 ######################################################## 1177 # this is for [LOonly=XXX] 1178 ######################################################## 1179 if not matrix_element.get_fks_info_list(): 1180 born_me = matrix_element.born_matrix_element 1181 # as usual, in this case we assume just one FKS configuration 1182 # exists with diagrams corresponding to born ones X the ij -> i,j 1183 # splitting. Here j is chosen to be the last colored particle in 1184 # the particle list 1185 bornproc = born_me.get('processes')[0] 1186 colors = [l.get('color') for l in bornproc.get('legs')] 1187 1188 fks_i = len(colors) 1189 # use the last colored particle if it exists, or 1190 # just the last 1191 fks_j=1 1192 for cpos, col in enumerate(colors): 1193 if col != 1: 1194 fks_j = cpos+1 1195 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos] 1196 1197 # for the moment, if j is initial-state, we do nothing 1198 if fks_j > ninitial: 1199 iFKS=1 1200 iconfig = 0 1201 s_and_t_channels = [] 1202 mapconfigs = [] 1203 base_diagrams = born_me.get('base_amplitude').get('diagrams') 1204 model = born_me.get('base_amplitude').get('process').get('model') 1205 minvert = min([max([len(vert.get('legs')) for vert in \ 1206 diag.get('vertices')]) for diag in base_diagrams]) 1207 1208 lines.append("# ") 1209 lines.append("# nFKSprocess %d" % iFKS) 1210 for idiag, diag in enumerate(base_diagrams): 1211 if any([len(vert.get('legs')) > minvert for vert in 1212 diag.get('vertices')]): 1213 # Only 3-vertices allowed in configs.inc 1214 continue 1215 iconfig = iconfig + 1 1216 helas_diag = born_me.get('diagrams')[idiag] 1217 mapconfigs.append(helas_diag.get('number')) 1218 lines.append("# Diagram %d for nFKSprocess %d" % \ 1219 (helas_diag.get('number'),iFKS)) 1220 # Correspondance between the config and the amplitudes 1221 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1222 helas_diag.get('number'))) 1223 1224 # Need to reorganize the topology so that we start with all 1225 # final state external particles and work our way inwards 1226 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1227 get_s_and_t_channels(ninitial, model, 990) 1228 1229 s_and_t_channels.append([schannels, tchannels]) 1230 1231 #the first thing to write is the splitting ij -> i,j 1232 lines.append("F %4d %4d %4d %4d" % \ 1233 (iFKS,-1,iconfig,2)) 1234 #(iFKS,last_leg.get('number'), iconfig, len(daughters))) 1235 lines.append("D %4d" % nexternal) 1236 lines.append("D %4d" % fks_j) 1237 lines.append("S %4d %4d %4d %10d" % \ 1238 (iFKS,-1, iconfig,fks_j_id)) 1239 # now we continue with all the other vertices of the diagrams; 1240 # we need to shift the 'last_leg' by 1 and replace leg fks_j with -1 1241 1242 # Write out propagators for s-channel and t-channel vertices 1243 allchannels = schannels 1244 if len(tchannels) > 1: 1245 # Write out tchannels only if there are any non-trivial ones 1246 allchannels = schannels + tchannels 1247 1248 for vert in allchannels: 1249 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1250 last_leg = vert.get('legs')[-1] 1251 lines.append("F %4d %4d %4d %4d" % \ 1252 (iFKS,last_leg.get('number')-1, iconfig, len(daughters))) 1253 1254 # legs with negative number in daughters have to be shifted by -1 1255 for i_dau in range(len(daughters)): 1256 if daughters[i_dau] < 0: 1257 daughters[i_dau] += -1 1258 # finally relable fks with -1 if it appears in daughters 1259 if fks_j in daughters: 1260 daughters[daughters.index(fks_j)] = -1 1261 for d in daughters: 1262 lines.append("D %4d" % d) 1263 if vert in schannels: 1264 lines.append("S %4d %4d %4d %10d" % \ 1265 (iFKS,last_leg.get('number')-1, iconfig, 1266 last_leg.get('id'))) 1267 elif vert in tchannels[:-1]: 1268 lines.append("T %4d %4d %4d %10d" % \ 1269 (iFKS,last_leg.get('number')-1, iconfig, 1270 abs(last_leg.get('id')))) 1271 1272 # update what the array sizes (mapconfig,iforest,etc) will be 1273 max_leg_number = min(max_leg_number,last_leg.get('number')-1) 1274 max_iconfig = max(max_iconfig,iconfig) 1275 1276 # Write out number of configs 1277 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1278 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1279 1280 # write the props.inc information 1281 lines2.append("# ") 1282 particle_dict = born_me.get('processes')[0].get('model').\ 1283 get('particle_dict') 1284 1285 for iconf, configs in enumerate(s_and_t_channels): 1286 lines2.append("M %4d %4d %4d %10d " % \ 1287 (iFKS,-1, iconf + 1, fks_j_id)) 1288 pow_part = 1 + int(particle_dict[fks_j_id].is_boson()) 1289 lines2.append("P %4d %4d %4d %4d " % \ 1290 (iFKS,-1, iconf + 1, pow_part)) 1291 for vertex in configs[0] + configs[1][:-1]: 1292 leg = vertex.get('legs')[-1] 1293 if leg.get('id') not in particle_dict: 1294 # Fake propagator used in multiparticle vertices 1295 pow_part = 0 1296 else: 1297 particle = particle_dict[leg.get('id')] 1298 1299 pow_part = 1 + int(particle.is_boson()) 1300 1301 lines2.append("M %4d %4d %4d %10d " % \ 1302 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id'))) 1303 lines2.append("P %4d %4d %4d %4d " % \ 1304 (iFKS,leg.get('number')-1, iconf + 1, pow_part)) 1305 1306 # Write the file 1307 open(filename,'w').write('\n'.join(lines+lines2)) 1308 1309 return max_iconfig, max_leg_number
1310 1311
1312 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1313 maxproc, maxflow, nexternal, fortran_model):
1314 """writes the declarations for the variables relevant for leshouche_info 1315 """ 1316 lines = [] 1317 lines.append('integer maxproc_used, maxflow_used') 1318 lines.append('parameter (maxproc_used = %d)' % maxproc) 1319 lines.append('parameter (maxflow_used = %d)' % maxflow) 1320 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1321 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1322 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1323 lines.append('integer niprocs_d(%d)' % (nfksconfs)) 1324 1325 writer.writelines(lines)
1326 1327
1328 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1329 """writes the genps.inc file 1330 """ 1331 lines = [] 1332 lines.append("include 'maxparticles.inc'") 1333 lines.append("include 'maxconfigs.inc'") 1334 lines.append("integer maxproc,ngraphs,ncolor,maxflow") 1335 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \ 1336 (maxproc,ngraphs,ncolor,maxflow)) 1337 writer.writelines(lines)
1338 1339
1340 - def write_leshouche_info_file(self, filename, matrix_element):
1341 """writes the leshouche_info.inc file which contains 1342 the LHA informations for all the real emission processes 1343 """ 1344 lines = [] 1345 lines.append("# I -> IDUP_D") 1346 lines.append("# M -> MOTHUP_D") 1347 lines.append("# C -> ICOLUP_D") 1348 nfksconfs = len(matrix_element.get_fks_info_list()) 1349 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1350 1351 maxproc = 0 1352 maxflow = 0 1353 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1354 # for i, real in enumerate(matrix_element.real_processes): 1355 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1356 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1357 lines.extend(newlines) 1358 maxproc = max(maxproc, nprocs) 1359 maxflow = max(maxflow, nflows) 1360 1361 # this is for LOonly 1362 if not matrix_element.get_fks_info_list(): 1363 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_matrix_element, 1) 1364 lines.extend(newlines) 1365 1366 # Write the file 1367 open(filename,'w').write('\n'.join(lines)) 1368 1369 return nfksconfs, maxproc, maxflow, nexternal
1370 1371
1372 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1373 """writes the wrapper which allows to chose among the different real matrix elements""" 1374 1375 file = \ 1376 """double precision function dlum() 1377 implicit none 1378 integer nfksprocess 1379 common/c_nfksprocess/nfksprocess 1380 """ 1381 if matrix_element.real_processes: 1382 for n, info in enumerate(matrix_element.get_fks_info_list()): 1383 file += \ 1384 """if (nfksprocess.eq.%(n)d) then 1385 call dlum_%(n_me)d(dlum) 1386 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1387 file += \ 1388 """ 1389 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1390 stop 1391 endif 1392 return 1393 end 1394 """ 1395 else: 1396 file+= \ 1397 """call dlum_0(dlum) 1398 return 1399 end 1400 """ 1401 1402 # Write the file 1403 writer.writelines(file) 1404 return 0
1405 1406
1407 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1408 """writes the wrapper which allows to chose among the different real matrix elements""" 1409 1410 file = \ 1411 """subroutine smatrix_real(p, wgt) 1412 implicit none 1413 include 'nexternal.inc' 1414 double precision p(0:3, nexternal) 1415 double precision wgt 1416 integer nfksprocess 1417 common/c_nfksprocess/nfksprocess 1418 """ 1419 for n, info in enumerate(matrix_element.get_fks_info_list()): 1420 file += \ 1421 """if (nfksprocess.eq.%(n)d) then 1422 call smatrix_%(n_me)d(p, wgt) 1423 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1424 1425 if matrix_element.real_processes: 1426 file += \ 1427 """ 1428 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1429 stop 1430 endif 1431 return 1432 end 1433 """ 1434 else: 1435 file += \ 1436 """ 1437 wgt=0d0 1438 return 1439 end 1440 """ 1441 # Write the file 1442 writer.writelines(file) 1443 return 0
1444 1445
1446 - def draw_feynman_diagrams(self, matrix_element):
1447 """Create the ps files containing the feynman diagrams for the born process, 1448 as well as for all the real emission processes""" 1449 1450 filename = 'born.ps' 1451 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1452 get('base_amplitude').get('diagrams'), 1453 filename, 1454 model=matrix_element.born_matrix_element.\ 1455 get('processes')[0].get('model'), 1456 amplitude=True, diagram_type='born') 1457 plot.draw() 1458 1459 for n, fksreal in enumerate(matrix_element.real_processes): 1460 filename = 'matrix_%d.ps' % (n + 1) 1461 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1462 get('base_amplitude').get('diagrams'), 1463 filename, 1464 model=fksreal.matrix_element.\ 1465 get('processes')[0].get('model'), 1466 amplitude=True, diagram_type='real') 1467 plot.draw()
1468 1469
1470 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1471 """writes the matrix_i.f files which contain the real matrix elements""" 1472 1473 1474 1475 for n, fksreal in enumerate(matrix_element.real_processes): 1476 filename = 'matrix_%d.f' % (n + 1) 1477 self.write_matrix_element_fks(writers.FortranWriter(filename), 1478 fksreal.matrix_element, n + 1, 1479 fortran_model)
1480
1481 - def write_pdf_calls(self, matrix_element, fortran_model):
1482 """writes the parton_lum_i.f files which contain the real matrix elements. 1483 If no real emission existst, write the one for the born""" 1484 1485 if matrix_element.real_processes: 1486 for n, fksreal in enumerate(matrix_element.real_processes): 1487 filename = 'parton_lum_%d.f' % (n + 1) 1488 self.write_pdf_file(writers.FortranWriter(filename), 1489 fksreal.matrix_element, n + 1, 1490 fortran_model) 1491 else: 1492 filename = 'parton_lum_0.f' 1493 self.write_pdf_file(writers.FortranWriter(filename), 1494 matrix_element.born_matrix_element, 0, 1495 fortran_model)
1496 1497
1498 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1499 """generates the files needed for the born amplitude in the P* directory, which will 1500 be needed by the P* directories""" 1501 pathdir = os.getcwd() 1502 1503 filename = 'born.f' 1504 calls_born, ncolor_born = \ 1505 self.write_born_fks(writers.FortranWriter(filename),\ 1506 matrix_element, 1507 fortran_model) 1508 1509 filename = 'born_hel.f' 1510 self.write_born_hel(writers.FortranWriter(filename),\ 1511 matrix_element, 1512 fortran_model) 1513 1514 1515 filename = 'born_conf.inc' 1516 nconfigs, mapconfigs, s_and_t_channels = \ 1517 self.write_configs_file( 1518 writers.FortranWriter(filename), 1519 matrix_element.born_matrix_element, 1520 fortran_model) 1521 1522 filename = 'born_props.inc' 1523 self.write_props_file(writers.FortranWriter(filename), 1524 matrix_element.born_matrix_element, 1525 fortran_model, 1526 s_and_t_channels) 1527 1528 filename = 'born_decayBW.inc' 1529 self.write_decayBW_file(writers.FortranWriter(filename), 1530 s_and_t_channels) 1531 1532 filename = 'born_leshouche.inc' 1533 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1534 matrix_element.born_matrix_element, 1535 fortran_model) 1536 1537 filename = 'born_nhel.inc' 1538 self.write_born_nhel_file(writers.FortranWriter(filename), 1539 matrix_element.born_matrix_element, nflows, 1540 fortran_model, 1541 ncolor_born) 1542 1543 filename = 'born_ngraphs.inc' 1544 self.write_ngraphs_file(writers.FortranWriter(filename), 1545 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1546 1547 filename = 'ncombs.inc' 1548 self.write_ncombs_file(writers.FortranWriter(filename), 1549 matrix_element.born_matrix_element, 1550 fortran_model) 1551 1552 filename = 'born_maxamps.inc' 1553 maxamps = len(matrix_element.get('diagrams')) 1554 maxflows = ncolor_born 1555 self.write_maxamps_file(writers.FortranWriter(filename), 1556 maxamps, 1557 maxflows, 1558 max([len(matrix_element.get('processes')) for me in \ 1559 matrix_element.born_matrix_element]),1) 1560 1561 filename = 'config_subproc_map.inc' 1562 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1563 s_and_t_channels) 1564 1565 filename = 'coloramps.inc' 1566 self.write_coloramps_file(writers.FortranWriter(filename), 1567 mapconfigs, 1568 matrix_element.born_matrix_element, 1569 fortran_model) 1570 1571 #write the sborn_sf.f and the b_sf_files 1572 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1573 for i, links in enumerate([matrix_element.color_links, []]): 1574 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1575 links, 1576 fortran_model) 1577 self.color_link_files = [] 1578 for i in range(len(matrix_element.color_links)): 1579 filename = 'b_sf_%3.3d.f' % (i + 1) 1580 self.color_link_files.append(filename) 1581 self.write_b_sf_fks(writers.FortranWriter(filename), 1582 matrix_element, i, 1583 fortran_model)
1584 1585
1586 - def generate_virtuals_from_OLP(self,process_list,export_path, OLP):
1587 """Generates the library for computing the loop matrix elements 1588 necessary for this process using the OLP specified.""" 1589 1590 # Start by writing the BLHA order file 1591 virtual_path = pjoin(export_path,'OLP_virtuals') 1592 if not os.path.exists(virtual_path): 1593 os.makedirs(virtual_path) 1594 filename = os.path.join(virtual_path,'OLE_order.lh') 1595 self.write_lh_order(filename, process_list, OLP) 1596 1597 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1598 'Please check the virt_generation.log file in %s.'\ 1599 %str(pjoin(virtual_path,'virt_generation.log')) 1600 1601 # Perform some tasks specific to certain OLP's 1602 if OLP=='GoSam': 1603 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1604 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1605 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1606 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1607 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1608 # Now generate the process 1609 logger.info('Generating the loop matrix elements with %s...'%OLP) 1610 virt_generation_log = \ 1611 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1612 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1613 stdout=virt_generation_log, stderr=virt_generation_log) 1614 virt_generation_log.close() 1615 # Check what extension is used for the share libraries on this system 1616 possible_other_extensions = ['so','dylib'] 1617 shared_lib_ext='so' 1618 for ext in possible_other_extensions: 1619 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1620 'libgolem_olp.'+ext)): 1621 shared_lib_ext = ext 1622 1623 # Now check that everything got correctly generated 1624 files_to_check = ['olp_module.mod',str(pjoin('lib', 1625 'libgolem_olp.'+shared_lib_ext))] 1626 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1627 'Virtuals',f)) for f in files_to_check]): 1628 raise fks_common.FKSProcessError(fail_msg) 1629 # link the library to the lib folder 1630 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1631 pjoin(export_path,'lib')) 1632 1633 # Specify in make_opts the right library necessitated by the OLP 1634 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1635 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1636 if OLP=='GoSam': 1637 if platform.system().lower()=='darwin': 1638 # On mac the -rpath is not supported and the path of the dynamic 1639 # library is automatically wired in the executable 1640 make_opts_content=make_opts_content.replace('libOLP=', 1641 'libOLP=-Wl,-lgolem_olp') 1642 else: 1643 # On other platforms the option , -rpath= path to libgolem.so is necessary 1644 # Using a relative path is not ideal because the file libgolem.so is not 1645 # copied on the worker nodes. 1646 # make_opts_content=make_opts_content.replace('libOLP=', 1647 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1648 # Using the absolute path is working in the case where the disk of the 1649 # front end machine is mounted on all worker nodes as well. 1650 make_opts_content=make_opts_content.replace('libOLP=', 1651 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1652 1653 1654 make_opts.write(make_opts_content) 1655 make_opts.close() 1656 1657 # A priori this is generic to all OLP's 1658 1659 # Parse the contract file returned and propagate the process label to 1660 # the include of the BinothLHA.f file 1661 proc_to_label = self.parse_contract_file( 1662 pjoin(virtual_path,'OLE_order.olc')) 1663 1664 self.write_BinothLHA_inc(process_list,proc_to_label,\ 1665 pjoin(export_path,'SubProcesses')) 1666 1667 # Link the contract file to within the SubProcess directory 1668 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1669
1670 - def write_BinothLHA_inc(self, processes, proc_to_label, SubProcPath):
1671 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1672 to provide the right process_label to use in the OLP call to get the 1673 loop matrix element evaluation. The proc_to_label is the dictionary of 1674 the format of the one returned by the function parse_contract_file.""" 1675 1676 for proc in processes: 1677 name = "P%s"%proc.shell_string() 1678 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1679 not leg.get('state')]), 1680 tuple([leg.get('id') for leg in proc.get('legs') if \ 1681 leg.get('state')])) 1682 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1683 try: 1684 incFile.write( 1685 """ INTEGER PROC_LABEL 1686 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1687 except KeyError: 1688 raise fks_common.FKSProcessError('Could not found the target'+\ 1689 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1690 ' the proc_to_label argument in write_BinothLHA_inc.') 1691 incFile.close()
1692
1693 - def parse_contract_file(self, contract_file_path):
1694 """ Parses the BLHA contract file, make sure all parameters could be 1695 understood by the OLP and return a mapping of the processes (characterized 1696 by the pdg's of the initial and final state particles) to their process 1697 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1698 """ 1699 1700 proc_def_to_label = {} 1701 1702 if not os.path.exists(contract_file_path): 1703 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1704 ' OLE_order.olc in %s.'%str(contract_file_path)) 1705 1706 comment_re=re.compile(r"^\s*#") 1707 proc_def_re=re.compile( 1708 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1709 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1710 line_OK_re=re.compile(r"^.*\|\s*OK") 1711 for line in open(contract_file_path): 1712 # Ignore comments 1713 if not comment_re.match(line) is None: 1714 continue 1715 # Check if it is a proc definition line 1716 proc_def = proc_def_re.match(line) 1717 if not proc_def is None: 1718 if int(proc_def.group('proc_class'))!=1: 1719 raise fks_common.FKSProcessError( 1720 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1721 ' process class attribute. Found %s instead in: \n%s'\ 1722 %(proc_def.group('proc_class'),line)) 1723 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1724 proc_def.group('in_pdgs').split()]) 1725 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1726 proc_def.group('out_pdgs').split()]) 1727 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1728 int(proc_def.group('proc_label')) 1729 continue 1730 # For the other types of line, just make sure they end with | OK 1731 if line_OK_re.match(line) is None: 1732 raise fks_common.FKSProcessError( 1733 'The OLP could not process the following line: \n%s'%line) 1734 1735 return proc_def_to_label
1736 1737
1738 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1739 """writes the V**** directory inside the P**** directories specified in 1740 dir_name""" 1741 1742 cwd = os.getcwd() 1743 1744 matrix_element = loop_matrix_element 1745 1746 # Create the MadLoop5_resources directory if not already existing 1747 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1748 try: 1749 os.mkdir(dirpath) 1750 except os.error as error: 1751 logger.warning(error.strerror + " " + dirpath) 1752 1753 # Create the directory PN_xx_xxxxx in the specified path 1754 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1755 dirpath = os.path.join(dir_name, name) 1756 1757 try: 1758 os.mkdir(dirpath) 1759 except os.error as error: 1760 logger.warning(error.strerror + " " + dirpath) 1761 1762 try: 1763 os.chdir(dirpath) 1764 except os.error: 1765 logger.error('Could not cd to directory %s' % dirpath) 1766 return 0 1767 1768 logger.info('Creating files in directory %s' % name) 1769 1770 # Extract number of external particles 1771 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1772 1773 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1774 # The born matrix element, if needed 1775 filename = 'born_matrix.f' 1776 calls = self.write_bornmatrix( 1777 writers.FortranWriter(filename), 1778 matrix_element, 1779 fortran_model) 1780 1781 filename = 'nexternal.inc' 1782 self.write_nexternal_file(writers.FortranWriter(filename), 1783 nexternal, ninitial) 1784 1785 filename = 'pmass.inc' 1786 self.write_pmass_file(writers.FortranWriter(filename), 1787 matrix_element) 1788 1789 filename = 'ngraphs.inc' 1790 self.write_ngraphs_file(writers.FortranWriter(filename), 1791 len(matrix_element.get_all_amplitudes())) 1792 1793 filename = "loop_matrix.ps" 1794 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1795 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1796 filename, 1797 model=matrix_element.get('processes')[0].get('model'), 1798 amplitude='') 1799 logger.info("Drawing loop Feynman diagrams for " + \ 1800 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1801 plot.draw() 1802 1803 filename = "born_matrix.ps" 1804 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1805 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1806 get('model'),amplitude='') 1807 logger.info("Generating born Feynman diagrams for " + \ 1808 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1809 plot.draw() 1810 1811 # We also need to write the overall maximum quantities for this group 1812 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 1813 # only one process, so this is trivial 1814 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 1815 open('unique_id.inc','w').write( 1816 """ integer UNIQUE_ID 1817 parameter(UNIQUE_ID=1)""") 1818 1819 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1820 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1821 'MadLoopCommons.f','MadLoopParams.inc'] 1822 1823 # We should move to MadLoop5_resources directory from the SubProcesses 1824 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1825 pjoin('..','MadLoop5_resources')) 1826 1827 for file in linkfiles: 1828 ln('../../%s' % file) 1829 1830 os.system("ln -s ../../makefile_loop makefile") 1831 1832 linkfiles = ['mpmodule.mod'] 1833 1834 for file in linkfiles: 1835 ln('../../../lib/%s' % file) 1836 1837 linkfiles = ['coef_specs.inc'] 1838 1839 for file in linkfiles: 1840 ln('../../../Source/DHELAS/%s' % file) 1841 1842 # Return to original PWD 1843 os.chdir(cwd) 1844 1845 if not calls: 1846 calls = 0 1847 return calls
1848
1849 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1850 """computes the QED/QCD orders from the knowledge of the n of ext particles 1851 and of the weighted orders""" 1852 # n vertices = nexternal - 2 =QED + QCD 1853 # weighted = 2*QED + QCD 1854 QED = weighted - nexternal + 2 1855 QCD = weighted - 2 * QED 1856 return QED, QCD
1857 1858 1859 1860 #=============================================================================== 1861 # write_lh_order 1862 #=============================================================================== 1863 #test written
1864 - def write_lh_order(self, filename, process_list, OLP='MadLoop'):
1865 """Creates the OLE_order.lh file. This function should be edited according 1866 to the OLP which is used. For now it is generic.""" 1867 1868 1869 if len(process_list)==0: 1870 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1871 'the function write_lh_order.') 1872 return 1873 1874 # We assume the orders to be common to all Subprocesses 1875 1876 orders = process_list[0].get('orders') 1877 if 'QED' in list(orders.keys()) and 'QCD' in list(orders.keys()): 1878 QED=orders['QED'] 1879 QCD=orders['QCD'] 1880 elif 'QED' in list(orders.keys()): 1881 QED=orders['QED'] 1882 QCD=0 1883 elif 'QCD' in list(orders.keys()): 1884 QED=0 1885 QCD=orders['QCD'] 1886 else: 1887 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1888 len(process_list[0].get('legs')), 1889 orders['WEIGHTED']) 1890 1891 replace_dict = {} 1892 replace_dict['mesq'] = 'CHaveraged' 1893 replace_dict['corr'] = ' '.join(process_list[0].\ 1894 get('perturbation_couplings')) 1895 replace_dict['irreg'] = 'CDR' 1896 replace_dict['aspow'] = QCD 1897 replace_dict['aepow'] = QED 1898 replace_dict['modelfile'] = './param_card.dat' 1899 replace_dict['params'] = 'alpha_s' 1900 proc_lines=[] 1901 for proc in process_list: 1902 proc_lines.append('%s -> %s' % \ 1903 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']), 1904 ' '.join(str(l['id']) for l in proc['legs'] if l['state']))) 1905 replace_dict['pdgs'] = '\n'.join(proc_lines) 1906 replace_dict['symfin'] = 'Yes' 1907 content = \ 1908 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1909 \n\ 1910 MatrixElementSquareType %(mesq)s\n\ 1911 CorrectionType %(corr)s\n\ 1912 IRregularisation %(irreg)s\n\ 1913 AlphasPower %(aspow)d\n\ 1914 AlphaPower %(aepow)d\n\ 1915 NJetSymmetrizeFinal %(symfin)s\n\ 1916 ModelFile %(modelfile)s\n\ 1917 Parameters %(params)s\n\ 1918 \n\ 1919 # process\n\ 1920 %(pdgs)s\n\ 1921 " % replace_dict 1922 1923 file = open(filename, 'w') 1924 file.write(content) 1925 file.close 1926 return
1927 1928 1929 #=============================================================================== 1930 # write_born_fks 1931 #=============================================================================== 1932 # test written
1933 - def write_born_fks(self, writer, fksborn, fortran_model):
1934 """Export a matrix element to a born.f file in MadFKS format""" 1935 1936 matrix_element = fksborn.born_matrix_element 1937 1938 if not matrix_element.get('processes') or \ 1939 not matrix_element.get('diagrams'): 1940 return 0 1941 1942 if not isinstance(writer, writers.FortranWriter): 1943 raise writers.FortranWriter.FortranWriterError(\ 1944 "writer not FortranWriter") 1945 1946 # Add information relevant for FxFx matching: 1947 # Maximum QCD power in all the contributions 1948 max_qcd_order = 0 1949 for diag in matrix_element.get('diagrams'): 1950 orders = diag.calculate_orders() 1951 if 'QCD' in orders: 1952 max_qcd_order = max(max_qcd_order,orders['QCD']) 1953 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 1954 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 1955 proc.get('model').get_particle(id).get('color')>1]) 1956 for proc in matrix_element.get('processes')) 1957 # Maximum number of final state light jets to be matched 1958 self.proc_characteristic['max_n_matched_jets'] = max( 1959 self.proc_characteristic['max_n_matched_jets'], 1960 min(max_qcd_order,max_n_light_final_partons)) 1961 1962 1963 # Set lowercase/uppercase Fortran code 1964 writers.FortranWriter.downcase = False 1965 1966 replace_dict = {} 1967 1968 # Extract version number and date from VERSION file 1969 info_lines = self.get_mg5_info_lines() 1970 replace_dict['info_lines'] = info_lines 1971 1972 # Extract process info lines 1973 process_lines = self.get_process_info_lines(matrix_element) 1974 replace_dict['process_lines'] = process_lines 1975 1976 1977 # Extract ncomb 1978 ncomb = matrix_element.get_helicity_combinations() 1979 replace_dict['ncomb'] = ncomb 1980 1981 # Extract helicity lines 1982 helicity_lines = self.get_helicity_lines(matrix_element) 1983 replace_dict['helicity_lines'] = helicity_lines 1984 1985 # Extract IC line 1986 ic_line = self.get_ic_line(matrix_element) 1987 replace_dict['ic_line'] = ic_line 1988 1989 # Extract overall denominator 1990 # Averaging initial state color, spin, and identical FS particles 1991 #den_factor_line = get_den_factor_line(matrix_element) 1992 1993 # Extract ngraphs 1994 ngraphs = matrix_element.get_number_of_amplitudes() 1995 replace_dict['ngraphs'] = ngraphs 1996 1997 # Extract nwavefuncs 1998 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1999 replace_dict['nwavefuncs'] = nwavefuncs 2000 2001 # Extract ncolor 2002 ncolor = max(1, len(matrix_element.get('color_basis'))) 2003 replace_dict['ncolor'] = ncolor 2004 2005 # Extract color data lines 2006 color_data_lines = self.get_color_data_lines(matrix_element) 2007 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2008 2009 # Extract helas calls 2010 helas_calls = fortran_model.get_matrix_element_calls(\ 2011 matrix_element) 2012 replace_dict['helas_calls'] = "\n".join(helas_calls) 2013 2014 # Extract amp2 lines 2015 amp2_lines = self.get_amp2_lines(matrix_element) 2016 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2017 2018 # Extract JAMP lines 2019 jamp_lines = self.get_JAMP_lines(matrix_element) 2020 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2021 2022 # Set the size of Wavefunction 2023 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2024 replace_dict['wavefunctionsize'] = 20 2025 else: 2026 replace_dict['wavefunctionsize'] = 8 2027 2028 # Extract glu_ij_lines 2029 ij_lines = self.get_ij_lines(fksborn) 2030 replace_dict['ij_lines'] = '\n'.join(ij_lines) 2031 2032 # Extract den_factor_lines 2033 den_factor_lines = self.get_den_factor_lines(fksborn) 2034 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2035 2036 # Extract the number of FKS process 2037 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 2038 2039 file = open(os.path.join(_file_path, \ 2040 'iolibs/template_files/born_fks.inc')).read() 2041 file = file % replace_dict 2042 2043 # Write the file 2044 writer.writelines(file) 2045 2046 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2047 2048
2049 - def write_born_hel(self, writer, fksborn, fortran_model):
2050 """Export a matrix element to a born_hel.f file in MadFKS format""" 2051 2052 matrix_element = fksborn.born_matrix_element 2053 2054 if not matrix_element.get('processes') or \ 2055 not matrix_element.get('diagrams'): 2056 return 0 2057 2058 if not isinstance(writer, writers.FortranWriter): 2059 raise writers.FortranWriter.FortranWriterError(\ 2060 "writer not FortranWriter") 2061 # Set lowercase/uppercase Fortran code 2062 writers.FortranWriter.downcase = False 2063 2064 replace_dict = {} 2065 2066 # Extract version number and date from VERSION file 2067 info_lines = self.get_mg5_info_lines() 2068 replace_dict['info_lines'] = info_lines 2069 2070 # Extract process info lines 2071 process_lines = self.get_process_info_lines(matrix_element) 2072 replace_dict['process_lines'] = process_lines 2073 2074 2075 # Extract ncomb 2076 ncomb = matrix_element.get_helicity_combinations() 2077 replace_dict['ncomb'] = ncomb 2078 2079 # Extract helicity lines 2080 helicity_lines = self.get_helicity_lines(matrix_element) 2081 replace_dict['helicity_lines'] = helicity_lines 2082 2083 # Extract IC line 2084 ic_line = self.get_ic_line(matrix_element) 2085 replace_dict['ic_line'] = ic_line 2086 2087 # Extract overall denominator 2088 # Averaging initial state color, spin, and identical FS particles 2089 #den_factor_line = get_den_factor_line(matrix_element) 2090 2091 # Extract ngraphs 2092 ngraphs = matrix_element.get_number_of_amplitudes() 2093 replace_dict['ngraphs'] = ngraphs 2094 2095 # Extract nwavefuncs 2096 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2097 replace_dict['nwavefuncs'] = nwavefuncs 2098 2099 # Extract ncolor 2100 ncolor = max(1, len(matrix_element.get('color_basis'))) 2101 replace_dict['ncolor'] = ncolor 2102 2103 # Extract color data lines 2104 color_data_lines = self.get_color_data_lines(matrix_element) 2105 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2106 2107 # Extract amp2 lines 2108 amp2_lines = self.get_amp2_lines(matrix_element) 2109 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2110 2111 # Extract JAMP lines 2112 jamp_lines = self.get_JAMP_lines(matrix_element) 2113 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2114 2115 # Extract den_factor_lines 2116 den_factor_lines = self.get_den_factor_lines(fksborn) 2117 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2118 2119 # Extract the number of FKS process 2120 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2121 2122 file = open(os.path.join(_file_path, \ 2123 'iolibs/template_files/born_fks_hel.inc')).read() 2124 file = file % replace_dict 2125 2126 # Write the file 2127 writer.writelines(file) 2128 2129 return
2130 2131 2132 #=============================================================================== 2133 # write_born_sf_fks 2134 #=============================================================================== 2135 #test written
2136 - def write_sborn_sf(self, writer, color_links, fortran_model):
2137 """Creates the sborn_sf.f file, containing the calls to the different 2138 color linked borns""" 2139 2140 replace_dict = {} 2141 nborns = len(color_links) 2142 ifkss = [] 2143 iborns = [] 2144 mms = [] 2145 nns = [] 2146 iflines = "\n" 2147 2148 #header for the sborn_sf.f file 2149 file = """subroutine sborn_sf(p_born,m,n,wgt) 2150 implicit none 2151 include "nexternal.inc" 2152 double precision p_born(0:3,nexternal-1),wgt 2153 double complex wgt1(2) 2154 integer m,n \n""" 2155 2156 if nborns > 0: 2157 2158 for i, c_link in enumerate(color_links): 2159 iborn = i+1 2160 2161 iff = {True : 'if', False : 'elseif'}[i==0] 2162 2163 m, n = c_link['link'] 2164 2165 if m != n: 2166 iflines += \ 2167 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 2168 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 2169 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 2170 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 2171 else: 2172 iflines += \ 2173 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 2174 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 2175 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 2176 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 2177 2178 2179 file += iflines + \ 2180 """else 2181 wgt = 0d0 2182 endif 2183 2184 return 2185 end""" 2186 elif nborns == 0: 2187 #write a dummy file 2188 file+=""" 2189 c This is a dummy function because 2190 c this subdir has no soft singularities 2191 wgt = 0d0 2192 2193 return 2194 end""" 2195 # Write the end of the file 2196 2197 writer.writelines(file)
2198 2199 2200 #=============================================================================== 2201 # write_b_sf_fks 2202 #=============================================================================== 2203 #test written
2204 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
2205 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 2206 2207 matrix_element = copy.copy(fksborn.born_matrix_element) 2208 2209 if not matrix_element.get('processes') or \ 2210 not matrix_element.get('diagrams'): 2211 return 0 2212 2213 if not isinstance(writer, writers.FortranWriter): 2214 raise writers.FortranWriter.FortranWriterError(\ 2215 "writer not FortranWriter") 2216 # Set lowercase/uppercase Fortran code 2217 writers.FortranWriter.downcase = False 2218 2219 iborn = i + 1 2220 link = fksborn.color_links[i] 2221 2222 replace_dict = {} 2223 2224 replace_dict['iborn'] = iborn 2225 2226 # Extract version number and date from VERSION file 2227 info_lines = self.get_mg5_info_lines() 2228 replace_dict['info_lines'] = info_lines 2229 2230 # Extract process info lines 2231 process_lines = self.get_process_info_lines(matrix_element) 2232 replace_dict['process_lines'] = process_lines + \ 2233 "\nc spectators: %d %d \n" % tuple(link['link']) 2234 2235 # Extract ncomb 2236 ncomb = matrix_element.get_helicity_combinations() 2237 replace_dict['ncomb'] = ncomb 2238 2239 # Extract helicity lines 2240 helicity_lines = self.get_helicity_lines(matrix_element) 2241 replace_dict['helicity_lines'] = helicity_lines 2242 2243 # Extract IC line 2244 ic_line = self.get_ic_line(matrix_element) 2245 replace_dict['ic_line'] = ic_line 2246 2247 # Extract den_factor_lines 2248 den_factor_lines = self.get_den_factor_lines(fksborn) 2249 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2250 2251 # Extract ngraphs 2252 ngraphs = matrix_element.get_number_of_amplitudes() 2253 replace_dict['ngraphs'] = ngraphs 2254 2255 # Extract nwavefuncs 2256 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2257 replace_dict['nwavefuncs'] = nwavefuncs 2258 2259 # Extract ncolor 2260 ncolor1 = max(1, len(link['orig_basis'])) 2261 replace_dict['ncolor1'] = ncolor1 2262 ncolor2 = max(1, len(link['link_basis'])) 2263 replace_dict['ncolor2'] = ncolor2 2264 2265 # Extract color data lines 2266 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2267 link['link_matrix']) 2268 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2269 2270 # Extract amp2 lines 2271 amp2_lines = self.get_amp2_lines(matrix_element) 2272 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2273 2274 # Extract JAMP lines 2275 jamp_lines = self.get_JAMP_lines(matrix_element) 2276 new_jamp_lines = [] 2277 for line in jamp_lines: 2278 line = line.replace('JAMP', 'JAMP1') 2279 new_jamp_lines.append(line) 2280 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 2281 2282 matrix_element.set('color_basis', link['link_basis'] ) 2283 jamp_lines = self.get_JAMP_lines(matrix_element) 2284 new_jamp_lines = [] 2285 for line in jamp_lines: 2286 line = line.replace( 'JAMP', 'JAMP2') 2287 new_jamp_lines.append(line) 2288 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 2289 2290 2291 # Extract the number of FKS process 2292 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2293 2294 file = open(os.path.join(_file_path, \ 2295 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2296 file = file % replace_dict 2297 2298 # Write the file 2299 writer.writelines(file) 2300 2301 return 0 , ncolor1
2302 2303 2304 #=============================================================================== 2305 # write_born_nhel_file 2306 #=============================================================================== 2307 #test written
2308 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2309 """Write the born_nhel.inc file for MG4.""" 2310 2311 ncomb = matrix_element.get_helicity_combinations() 2312 file = " integer max_bhel, max_bcol \n" 2313 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2314 (ncomb, nflows) 2315 2316 # Write the file 2317 writer.writelines(file) 2318 2319 return True
2320 2321 #=============================================================================== 2322 # write_fks_info_file 2323 #===============================================================================
2324 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2325 """Writes the content of nFKSconfigs.inc, which just gives the 2326 total FKS dirs as a parameter. 2327 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2328 replace_dict = {} 2329 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2330 content = \ 2331 """ INTEGER FKS_CONFIGS 2332 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2333 2334 """ % replace_dict 2335 2336 writer.writelines(content)
2337 2338 2339 #=============================================================================== 2340 # write_fks_info_file 2341 #===============================================================================
2342 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2343 """Writes the content of fks_info.inc, which lists the informations on the 2344 possible splittings of the born ME. 2345 nconfs is always >=1 (use a fake configuration for LOonly). 2346 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2347 the last colored particle as j_fks.""" 2348 2349 replace_dict = {} 2350 fks_info_list = fksborn.get_fks_info_list() 2351 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2352 2353 # this is for processes with 'real' or 'all' as NLO mode 2354 if len(fks_info_list) > 0: 2355 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2356 for info in fks_info_list]) 2357 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2358 for info in fks_info_list]) 2359 2360 col_lines = [] 2361 pdg_lines = [] 2362 charge_lines = [] 2363 fks_j_from_i_lines = [] 2364 for i, info in enumerate(fks_info_list): 2365 col_lines.append( \ 2366 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2367 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2368 pdg_lines.append( \ 2369 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2370 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2371 charge_lines.append(\ 2372 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2373 % (i + 1, ', '.join('%19.15fd0' % charg\ 2374 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2375 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2376 i + 1)) 2377 else: 2378 # this is for 'LOonly', generate a fake FKS configuration with 2379 # - i_fks = nexternal, pdg type = -21 and color =8 2380 # - j_fks = the last colored particle 2381 bornproc = fksborn.born_matrix_element.get('processes')[0] 2382 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2383 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2384 charges = [0.] * len(colors) 2385 2386 fks_i = len(colors) 2387 # use the last colored particle if it exists, or 2388 # just the last 2389 fks_j=1 2390 for cpos, col in enumerate(colors[:-1]): 2391 if col != 1: 2392 fks_j = cpos+1 2393 2394 fks_i_values = str(fks_i) 2395 fks_j_values = str(fks_j) 2396 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2397 % ', '.join([str(col) for col in colors])] 2398 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2399 % ', '.join([str(pdg) for pdg in pdgs])] 2400 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2401 % ', '.join('%19.15fd0' % charg for charg in charges)] 2402 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2403 % (fks_i, fks_j)] 2404 2405 2406 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2407 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2408 replace_dict['col_lines'] = '\n'.join(col_lines) 2409 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2410 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2411 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2412 2413 content = \ 2414 """ INTEGER IPOS, JPOS 2415 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2416 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2417 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2418 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2419 2420 %(fks_i_line)s 2421 %(fks_j_line)s 2422 2423 %(fks_j_from_i_lines)s 2424 2425 C 2426 C Particle type: 2427 C octet = 8, triplet = 3, singlet = 1 2428 %(col_lines)s 2429 2430 C 2431 C Particle type according to PDG: 2432 C 2433 %(pdg_lines)s 2434 2435 C 2436 C Particle charge: 2437 C charge is set 0. with QCD corrections, which is irrelevant 2438 %(charge_lines)s 2439 """ % replace_dict 2440 if not isinstance(writer, writers.FortranWriter): 2441 raise writers.FortranWriter.FortranWriterError(\ 2442 "writer not FortranWriter") 2443 # Set lowercase/uppercase Fortran code 2444 writers.FortranWriter.downcase = False 2445 2446 writer.writelines(content) 2447 2448 return True
2449 2450 2451 #=============================================================================== 2452 # write_matrix_element_fks 2453 #=============================================================================== 2454 #test written
2455 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2456 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2457 2458 if not matrix_element.get('processes') or \ 2459 not matrix_element.get('diagrams'): 2460 return 0,0 2461 2462 if not isinstance(writer, writers.FortranWriter): 2463 raise writers.FortranWriter.FortranWriterError(\ 2464 "writer not FortranWriter") 2465 # Set lowercase/uppercase Fortran code 2466 writers.FortranWriter.downcase = False 2467 2468 replace_dict = {} 2469 replace_dict['N_me'] = n 2470 2471 # Extract version number and date from VERSION file 2472 info_lines = self.get_mg5_info_lines() 2473 replace_dict['info_lines'] = info_lines 2474 2475 # Extract process info lines 2476 process_lines = self.get_process_info_lines(matrix_element) 2477 replace_dict['process_lines'] = process_lines 2478 2479 # Extract ncomb 2480 ncomb = matrix_element.get_helicity_combinations() 2481 replace_dict['ncomb'] = ncomb 2482 2483 # Extract helicity lines 2484 helicity_lines = self.get_helicity_lines(matrix_element) 2485 replace_dict['helicity_lines'] = helicity_lines 2486 2487 # Extract IC line 2488 ic_line = self.get_ic_line(matrix_element) 2489 replace_dict['ic_line'] = ic_line 2490 2491 # Extract overall denominator 2492 # Averaging initial state color, spin, and identical FS particles 2493 den_factor_line = self.get_den_factor_line(matrix_element) 2494 replace_dict['den_factor_line'] = den_factor_line 2495 2496 # Extract ngraphs 2497 ngraphs = matrix_element.get_number_of_amplitudes() 2498 replace_dict['ngraphs'] = ngraphs 2499 2500 # Extract ncolor 2501 ncolor = max(1, len(matrix_element.get('color_basis'))) 2502 replace_dict['ncolor'] = ncolor 2503 2504 # Extract color data lines 2505 color_data_lines = self.get_color_data_lines(matrix_element) 2506 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2507 2508 # Extract helas calls 2509 helas_calls = fortran_model.get_matrix_element_calls(\ 2510 matrix_element) 2511 replace_dict['helas_calls'] = "\n".join(helas_calls) 2512 2513 # Extract nwavefuncs (important to place after get_matrix_element_calls 2514 # so that 'me_id' is set) 2515 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2516 replace_dict['nwavefuncs'] = nwavefuncs 2517 2518 # Extract amp2 lines 2519 amp2_lines = self.get_amp2_lines(matrix_element) 2520 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2521 2522 # Set the size of Wavefunction 2523 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2524 replace_dict['wavefunctionsize'] = 20 2525 else: 2526 replace_dict['wavefunctionsize'] = 8 2527 2528 # Extract JAMP lines 2529 jamp_lines = self.get_JAMP_lines(matrix_element) 2530 2531 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2532 2533 realfile = open(os.path.join(_file_path, \ 2534 'iolibs/template_files/realmatrix_fks.inc')).read() 2535 2536 realfile = realfile % replace_dict 2537 2538 # Write the file 2539 writer.writelines(realfile) 2540 2541 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2542 2543 2544 #=============================================================================== 2545 # write_pdf_file 2546 #===============================================================================
2547 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2548 #test written 2549 """Write the auto_dsig.f file for MadFKS, which contains 2550 pdf call information""" 2551 2552 if not matrix_element.get('processes') or \ 2553 not matrix_element.get('diagrams'): 2554 return 0 2555 2556 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2557 2558 if ninitial < 1 or ninitial > 2: 2559 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 2560 2561 replace_dict = {} 2562 2563 replace_dict['N_me'] = n 2564 2565 # Extract version number and date from VERSION file 2566 info_lines = self.get_mg5_info_lines() 2567 replace_dict['info_lines'] = info_lines 2568 2569 # Extract process info lines 2570 process_lines = self.get_process_info_lines(matrix_element) 2571 replace_dict['process_lines'] = process_lines 2572 2573 pdf_vars, pdf_data, pdf_lines = \ 2574 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2575 replace_dict['pdf_vars'] = pdf_vars 2576 replace_dict['pdf_data'] = pdf_data 2577 replace_dict['pdf_lines'] = pdf_lines 2578 2579 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2580 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2581 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2582 2583 file = open(os.path.join(_file_path, \ 2584 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2585 file = file % replace_dict 2586 2587 # Write the file 2588 writer.writelines(file)
2589 2590 2591 2592 #=============================================================================== 2593 # write_coloramps_file 2594 #=============================================================================== 2595 #test written
2596 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2597 """Write the coloramps.inc file for MadEvent""" 2598 2599 lines = [] 2600 lines.append( "logical icolamp(%d,%d,1)" % \ 2601 (max(len(list(matrix_element.get('color_basis').keys())), 1), 2602 len(mapconfigs))) 2603 2604 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2605 2606 # Write the file 2607 writer.writelines(lines) 2608 2609 return True
2610 2611 2612 #=============================================================================== 2613 # write_leshouche_file 2614 #=============================================================================== 2615 #test written
2616 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2617 """Write the leshouche.inc file for MG4""" 2618 2619 # Extract number of external particles 2620 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2621 2622 lines = [] 2623 for iproc, proc in enumerate(matrix_element.get('processes')): 2624 legs = proc.get_legs_with_decays() 2625 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2626 (iproc + 1, nexternal, 2627 ",".join([str(l.get('id')) for l in legs]))) 2628 for i in [1, 2]: 2629 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2630 (i, iproc + 1, nexternal, 2631 ",".join([ "%3r" % 0 ] * ninitial + \ 2632 [ "%3r" % i ] * (nexternal - ninitial)))) 2633 2634 # Here goes the color connections corresponding to the JAMPs 2635 # Only one output, for the first subproc! 2636 if iproc == 0: 2637 # If no color basis, just output trivial color flow 2638 if not matrix_element.get('color_basis'): 2639 for i in [1, 2]: 2640 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2641 (i, nexternal, 2642 ",".join([ "%3r" % 0 ] * nexternal))) 2643 color_flow_list = [] 2644 2645 else: 2646 # First build a color representation dictionnary 2647 repr_dict = {} 2648 for l in legs: 2649 repr_dict[l.get('number')] = \ 2650 proc.get('model').get_particle(l.get('id')).get_color()\ 2651 * (-1)**(1+l.get('state')) 2652 # Get the list of color flows 2653 color_flow_list = \ 2654 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2655 ninitial) 2656 # And output them properly 2657 for cf_i, color_flow_dict in enumerate(color_flow_list): 2658 for i in [0, 1]: 2659 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2660 (i + 1, cf_i + 1, nexternal, 2661 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2662 for l in legs]))) 2663 2664 # Write the file 2665 writer.writelines(lines) 2666 2667 return len(color_flow_list)
2668 2669 2670 #=============================================================================== 2671 # write_configs_file 2672 #=============================================================================== 2673 #test_written
2674 - def write_configs_file(self, writer, matrix_element, fortran_model):
2675 """Write the configs.inc file for MadEvent""" 2676 2677 # Extract number of external particles 2678 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2679 lines = [] 2680 2681 iconfig = 0 2682 2683 s_and_t_channels = [] 2684 mapconfigs = [] 2685 2686 model = matrix_element.get('processes')[0].get('model') 2687 # new_pdg = model.get_first_non_pdg() 2688 2689 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2690 model = matrix_element.get('base_amplitude').get('process').get('model') 2691 minvert = min([max([len(vert.get('legs')) for vert in \ 2692 diag.get('vertices')]) for diag in base_diagrams]) 2693 2694 for idiag, diag in enumerate(base_diagrams): 2695 if any([len(vert.get('legs')) > minvert for vert in 2696 diag.get('vertices')]): 2697 # Only 3-vertices allowed in configs.inc 2698 continue 2699 iconfig = iconfig + 1 2700 helas_diag = matrix_element.get('diagrams')[idiag] 2701 mapconfigs.append(helas_diag.get('number')) 2702 lines.append("# Diagram %d, Amplitude %d" % \ 2703 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2704 # Correspondance between the config and the amplitudes 2705 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2706 helas_diag.get('amplitudes')[0]['number'])) 2707 2708 # Need to reorganize the topology so that we start with all 2709 # final state external particles and work our way inwards 2710 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2711 get_s_and_t_channels(ninitial, model, 990) 2712 2713 s_and_t_channels.append([schannels, tchannels]) 2714 2715 # Write out propagators for s-channel and t-channel vertices 2716 allchannels = schannels 2717 if len(tchannels) > 1: 2718 # Write out tchannels only if there are any non-trivial ones 2719 allchannels = schannels + tchannels 2720 2721 for vert in allchannels: 2722 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2723 last_leg = vert.get('legs')[-1] 2724 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2725 (last_leg.get('number'), iconfig, len(daughters), 2726 ",".join(["%3d" % d for d in daughters]))) 2727 if vert in schannels: 2728 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2729 (last_leg.get('number'), iconfig, 2730 last_leg.get('id'))) 2731 elif vert in tchannels[:-1]: 2732 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2733 (last_leg.get('number'), iconfig, 2734 abs(last_leg.get('id')))) 2735 2736 # Write out number of configs 2737 lines.append("# Number of configs") 2738 lines.append("data mapconfig(0)/%4d/" % iconfig) 2739 2740 # Write the file 2741 writer.writelines(lines) 2742 2743 return iconfig, mapconfigs, s_and_t_channels
2744 2745 2746 #=============================================================================== 2747 # write_decayBW_file 2748 #=============================================================================== 2749 #test written
2750 - def write_decayBW_file(self, writer, s_and_t_channels):
2751 """Write the decayBW.inc file for MadEvent""" 2752 2753 lines = [] 2754 2755 booldict = {False: ".false.", True: ".false."} 2756 ####Changed by MZ 2011-11-23!!!! 2757 2758 for iconf, config in enumerate(s_and_t_channels): 2759 schannels = config[0] 2760 for vertex in schannels: 2761 # For the resulting leg, pick out whether it comes from 2762 # decay or not, as given by the from_group flag 2763 leg = vertex.get('legs')[-1] 2764 lines.append("data gForceBW(%d,%d)/%s/" % \ 2765 (leg.get('number'), iconf + 1, 2766 booldict[leg.get('from_group')])) 2767 2768 # Write the file 2769 writer.writelines(lines) 2770 2771 return True
2772 2773 2774 #=============================================================================== 2775 # write_dname_file 2776 #===============================================================================
2777 - def write_dname_file(self, writer, matrix_element, fortran_model):
2778 """Write the dname.mg file for MG4""" 2779 2780 line = "DIRNAME=P%s" % \ 2781 matrix_element.get('processes')[0].shell_string() 2782 2783 # Write the file 2784 writer.write(line + "\n") 2785 2786 return True
2787 2788 2789 #=============================================================================== 2790 # write_iproc_file 2791 #===============================================================================
2792 - def write_iproc_file(self, writer, me_number):
2793 """Write the iproc.dat file for MG4""" 2794 2795 line = "%d" % (me_number + 1) 2796 2797 # Write the file 2798 for line_to_write in writer.write_line(line): 2799 writer.write(line_to_write) 2800 return True
2801 2802 2803 #=============================================================================== 2804 # Helper functions 2805 #=============================================================================== 2806 2807 2808 #=============================================================================== 2809 # get_fks_j_from_i_lines 2810 #=============================================================================== 2811
2812 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2813 """generate the lines for fks.inc describing initializating the 2814 fks_j_from_i array""" 2815 lines = [] 2816 if not me.isfinite: 2817 for ii, js in me.fks_j_from_i.items(): 2818 if js: 2819 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2820 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2821 else: 2822 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2823 % (2, 1, 1, '1')) 2824 lines.append('') 2825 2826 return lines 2827 2828 2829 #=============================================================================== 2830 # get_leshouche_lines 2831 #===============================================================================
2832 - def get_leshouche_lines(self, matrix_element, ime):
2833 #test written 2834 """Write the leshouche.inc file for MG4""" 2835 2836 # Extract number of external particles 2837 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2838 2839 lines = [] 2840 for iproc, proc in enumerate(matrix_element.get('processes')): 2841 legs = proc.get_legs_with_decays() 2842 lines.append("I %4d %4d %s" % \ 2843 (ime, iproc + 1, 2844 " ".join([str(l.get('id')) for l in legs]))) 2845 for i in [1, 2]: 2846 lines.append("M %4d %4d %4d %s" % \ 2847 (ime, i, iproc + 1, 2848 " ".join([ "%3d" % 0 ] * ninitial + \ 2849 [ "%3d" % i ] * (nexternal - ninitial)))) 2850 2851 # Here goes the color connections corresponding to the JAMPs 2852 # Only one output, for the first subproc! 2853 if iproc == 0: 2854 # If no color basis, just output trivial color flow 2855 if not matrix_element.get('color_basis'): 2856 for i in [1, 2]: 2857 lines.append("C %4d %4d 1 %s" % \ 2858 (ime, i, 2859 " ".join([ "%3d" % 0 ] * nexternal))) 2860 color_flow_list = [] 2861 nflow = 1 2862 2863 else: 2864 # First build a color representation dictionnary 2865 repr_dict = {} 2866 for l in legs: 2867 repr_dict[l.get('number')] = \ 2868 proc.get('model').get_particle(l.get('id')).get_color()\ 2869 * (-1)**(1+l.get('state')) 2870 # Get the list of color flows 2871 color_flow_list = \ 2872 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2873 ninitial) 2874 # And output them properly 2875 for cf_i, color_flow_dict in enumerate(color_flow_list): 2876 for i in [0, 1]: 2877 lines.append("C %4d %4d %4d %s" % \ 2878 (ime, i + 1, cf_i + 1, 2879 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2880 for l in legs]))) 2881 2882 nflow = len(color_flow_list) 2883 2884 nproc = len(matrix_element.get('processes')) 2885 2886 return lines, nproc, nflow
2887 2888
2889 - def get_leshouche_lines_dummy(self, matrix_element, ime):
2890 #test written 2891 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly 2892 In this case, write born color structure times ij -> i,j splitting) 2893 """ 2894 2895 bornproc = matrix_element.get('processes')[0] 2896 colors = [l.get('color') for l in bornproc.get('legs')] 2897 2898 fks_i = len(colors) 2899 # use the last colored particle if it exists, or 2900 # just the last 2901 fks_j=1 2902 for cpos, col in enumerate(colors): 2903 if col != 1: 2904 fks_j = cpos+1 2905 2906 # Extract number of external particles 2907 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2908 nexternal+=1 # remember, in this case matrix_element is born 2909 2910 lines = [] 2911 for iproc, proc in enumerate(matrix_element.get('processes')): 2912 # add the fake extra leg 2913 legs = proc.get_legs_with_decays() + \ 2914 [fks_common.FKSLeg({'id': -21, 2915 'number': nexternal, 2916 'state': True, 2917 'fks': 'i', 2918 'color': 8, 2919 'charge': 0., 2920 'massless': True, 2921 'spin': 3, 2922 'is_part': True, 2923 'self_antipart': True})] 2924 2925 lines.append("I %4d %4d %s" % \ 2926 (ime, iproc + 1, 2927 " ".join([str(l.get('id')) for l in legs]))) 2928 for i in [1, 2]: 2929 lines.append("M %4d %4d %4d %s" % \ 2930 (ime, i, iproc + 1, 2931 " ".join([ "%3d" % 0 ] * ninitial + \ 2932 [ "%3d" % i ] * (nexternal - ninitial)))) 2933 2934 # Here goes the color connections corresponding to the JAMPs 2935 # Only one output, for the first subproc! 2936 if iproc == 0: 2937 # If no color basis, just output trivial color flow 2938 if not matrix_element.get('color_basis'): 2939 for i in [1, 2]: 2940 lines.append("C %4d %4d 1 %s" % \ 2941 (ime, i, 2942 " ".join([ "%3d" % 0 ] * nexternal))) 2943 color_flow_list = [] 2944 nflow = 1 2945 2946 else: 2947 # in this case the last particle (-21) has two color indices 2948 # and it has to be emitted by j_fks 2949 # First build a color representation dictionnary 2950 repr_dict = {} 2951 for l in legs[:-1]: 2952 repr_dict[l.get('number')] = \ 2953 proc.get('model').get_particle(l.get('id')).get_color()\ 2954 * (-1)**(1+l.get('state')) 2955 # Get the list of color flows 2956 color_flow_list = \ 2957 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2958 ninitial) 2959 # And output them properly 2960 for cf_i, color_flow_dict in enumerate(color_flow_list): 2961 # we have to add the extra leg (-21), linked to the j_fks leg 2962 # first, find the maximum color label 2963 maxicol = max(sum(list(color_flow_dict.values()), [])) 2964 #then, replace the color labels 2965 if color_flow_dict[fks_j][0] == 0: 2966 anti = True 2967 icol_j = color_flow_dict[fks_j][1] 2968 else: 2969 anti = False 2970 icol_j = color_flow_dict[fks_j][0] 2971 2972 if anti: 2973 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1]) 2974 color_flow_dict[fks_j][1] = maxicol + 1 2975 else: 2976 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1) 2977 color_flow_dict[fks_j][0] = maxicol + 1 2978 2979 for i in [0, 1]: 2980 lines.append("C %4d %4d %4d %s" % \ 2981 (ime, i + 1, cf_i + 1, 2982 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2983 for l in legs]))) 2984 2985 nflow = len(color_flow_list) 2986 2987 nproc = len(matrix_element.get('processes')) 2988 2989 return lines, nproc, nflow
2990 2991 2992 #=============================================================================== 2993 # get_den_factor_lines 2994 #===============================================================================
2995 - def get_den_factor_lines(self, fks_born):
2996 """returns the lines with the information on the denominator keeping care 2997 of the identical particle factors in the various real emissions""" 2998 2999 lines = [] 3000 info_list = fks_born.get_fks_info_list() 3001 if info_list: 3002 # if the reals have been generated, fill with the corresponding average factor 3003 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 3004 lines.append('DATA IDEN_VALUES /' + \ 3005 ', '.join(['%d' % ( 3006 fks_born.born_matrix_element.get_denominator_factor() ) \ 3007 for info in info_list]) + '/') 3008 else: 3009 # otherwise use the born 3010 lines.append('INTEGER IDEN_VALUES(1)') 3011 lines.append('DATA IDEN_VALUES / %d /' \ 3012 % fks_born.born_matrix_element.get_denominator_factor()) 3013 3014 return lines
3015 3016 3017 #=============================================================================== 3018 # get_ij_lines 3019 #===============================================================================
3020 - def get_ij_lines(self, fks_born):
3021 """returns the lines with the information on the particle number of the born 3022 that splits""" 3023 info_list = fks_born.get_fks_info_list() 3024 lines = [] 3025 if info_list: 3026 # if the reals have been generated, fill with the corresponding value of ij if 3027 # ij is massless, or with 0 if ij is massive (no collinear singularity) 3028 ij_list = [info['fks_info']['ij']if \ 3029 fks_born.born_matrix_element['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \ 3030 else 0 for info in info_list] 3031 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 3032 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/') 3033 else: 3034 #otherwise just put the first leg 3035 lines.append('INTEGER IJ_VALUES(1)') 3036 lines.append('DATA IJ_VALUES / 1 /') 3037 3038 return lines
3039 3040
3041 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 3042 mirror = False): #test written
3043 """Generate the PDF lines for the auto_dsig.f file""" 3044 3045 processes = matrix_element.get('processes') 3046 model = processes[0].get('model') 3047 3048 pdf_definition_lines = "" 3049 pdf_data_lines = "" 3050 pdf_lines = "" 3051 3052 if ninitial == 1: 3053 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 3054 for i, proc in enumerate(processes): 3055 process_line = proc.base_string() 3056 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3057 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 3058 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 3059 else: 3060 # Pick out all initial state particles for the two beams 3061 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 3062 p in processes]))), 3063 sorted(list(set([p.get_initial_pdg(2) for \ 3064 p in processes])))] 3065 3066 # Prepare all variable names 3067 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 3068 sum(initial_states,[])]) 3069 for key,val in pdf_codes.items(): 3070 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 3071 3072 # Set conversion from PDG code to number used in PDF calls 3073 pdgtopdf = {21: 0, 22: 7} 3074 # Fill in missing entries of pdgtopdf 3075 for pdg in sum(initial_states,[]): 3076 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 3077 pdgtopdf[pdg] = pdg 3078 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 3079 # If any particle has pdg code 7, we need to use something else 3080 pdgtopdf[pdg] = 6000000 + pdg 3081 3082 # Get PDF variable declarations for all initial states 3083 for i in [0,1]: 3084 pdf_definition_lines += "DOUBLE PRECISION " + \ 3085 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3086 for pdg in \ 3087 initial_states[i]]) + \ 3088 "\n" 3089 3090 # Get PDF data lines for all initial states 3091 for i in [0,1]: 3092 pdf_data_lines += "DATA " + \ 3093 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3094 for pdg in initial_states[i]]) + \ 3095 "/%d*1D0/" % len(initial_states[i]) + \ 3096 "\n" 3097 3098 # Get PDF values for the different initial states 3099 for i, init_states in enumerate(initial_states): 3100 if not mirror: 3101 ibeam = i + 1 3102 else: 3103 ibeam = 2 - i 3104 if subproc_group: 3105 pdf_lines = pdf_lines + \ 3106 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 3107 % (ibeam, ibeam) 3108 else: 3109 pdf_lines = pdf_lines + \ 3110 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 3111 % (ibeam, ibeam) 3112 3113 for initial_state in init_states: 3114 if initial_state in list(pdf_codes.keys()): 3115 if subproc_group: 3116 if abs(pdgtopdf[initial_state]) <= 7: 3117 pdf_lines = pdf_lines + \ 3118 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 3119 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 3120 (pdf_codes[initial_state], 3121 i + 1, ibeam, pdgtopdf[initial_state], 3122 ibeam, ibeam) 3123 else: 3124 # setting other partons flavours outside quark, gluon, photon to be 0d0 3125 pdf_lines = pdf_lines + \ 3126 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3127 "%s%d=0d0\n") % \ 3128 (pdf_codes[initial_state],i + 1) 3129 else: 3130 if abs(pdgtopdf[initial_state]) <= 7: 3131 pdf_lines = pdf_lines + \ 3132 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 3133 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 3134 (pdf_codes[initial_state], 3135 i + 1, ibeam, pdgtopdf[initial_state], 3136 ibeam, ibeam) 3137 else: 3138 # setting other partons flavours outside quark, gluon, photon to be 0d0 3139 pdf_lines = pdf_lines + \ 3140 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3141 "%s%d=0d0\n") % \ 3142 (pdf_codes[initial_state],i + 1) 3143 3144 pdf_lines = pdf_lines + "ENDIF\n" 3145 3146 # Add up PDFs for the different initial state particles 3147 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 3148 for proc in processes: 3149 process_line = proc.base_string() 3150 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3151 pdf_lines = pdf_lines + "\nPD(IPROC) = " 3152 for ibeam in [1, 2]: 3153 initial_state = proc.get_initial_pdg(ibeam) 3154 if initial_state in list(pdf_codes.keys()): 3155 pdf_lines = pdf_lines + "%s%d*" % \ 3156 (pdf_codes[initial_state], ibeam) 3157 else: 3158 pdf_lines = pdf_lines + "1d0*" 3159 # Remove last "*" from pdf_lines 3160 pdf_lines = pdf_lines[:-1] + "\n" 3161 3162 # Remove last line break from pdf_lines 3163 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 3164 3165 3166 #test written
3167 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
3168 """Return the color matrix definition lines for the given color_matrix. Split 3169 rows in chunks of size n.""" 3170 3171 if not color_matrix: 3172 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 3173 else: 3174 ret_list = [] 3175 my_cs = color.ColorString() 3176 for index, denominator in \ 3177 enumerate(color_matrix.get_line_denominators()): 3178 # First write the common denominator for this color matrix line 3179 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 3180 # Then write the numerators for the matrix elements 3181 num_list = color_matrix.get_line_numerators(index, denominator) 3182 for k in range(0, len(num_list), n): 3183 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 3184 (index + 1, k + 1, min(k + n, len(num_list)), 3185 ','.join(["%5r" % int(i) for i in num_list[k:k + n]]))) 3186 return ret_list
3187 3188 #=========================================================================== 3189 # write_maxamps_file 3190 #===========================================================================
3191 - def write_maxamps_file(self, writer, maxamps, maxflows, 3192 maxproc,maxsproc):
3193 """Write the maxamps.inc file for MG4.""" 3194 3195 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 3196 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 3197 (maxamps, maxflows) 3198 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 3199 (maxproc, maxsproc) 3200 3201 # Write the file 3202 writer.writelines(file) 3203 3204 return True
3205 3206 #=============================================================================== 3207 # write_ncombs_file 3208 #===============================================================================
3209 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
3210 # #test written 3211 """Write the ncombs.inc file for MadEvent.""" 3212 3213 # Extract number of external particles 3214 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3215 3216 # ncomb (used for clustering) is 2^(nexternal) 3217 file = " integer n_max_cl\n" 3218 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 3219 3220 # Write the file 3221 writer.writelines(file) 3222 3223 return True
3224 3225 #=========================================================================== 3226 # write_config_subproc_map_file 3227 #===========================================================================
3228 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3229 """Write a dummy config_subproc.inc file for MadEvent""" 3230 3231 lines = [] 3232 3233 for iconfig in range(len(s_and_t_channels)): 3234 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3235 (iconfig + 1)) 3236 3237 # Write the file 3238 writer.writelines(lines) 3239 3240 return True
3241 3242 #=========================================================================== 3243 # write_colors_file 3244 #===========================================================================
3245 - def write_colors_file(self, writer, matrix_element):
3246 """Write the get_color.f file for MadEvent, which returns color 3247 for all particles used in the matrix element.""" 3248 3249 try: 3250 matrix_elements=matrix_element.real_processes[0].matrix_element 3251 except IndexError: 3252 matrix_elements=[matrix_element.born_matrix_element] 3253 3254 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3255 matrix_elements = [matrix_elements] 3256 3257 model = matrix_elements[0].get('processes')[0].get('model') 3258 3259 # We need the both particle and antiparticle wf_ids, since the identity 3260 # depends on the direction of the wf. 3261 # loop on the real emissions 3262 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3263 for wf in d.get('wavefunctions')],[]) \ 3264 for d in me.get('diagrams')],[]) \ 3265 for me in [real_proc.matrix_element]],[])\ 3266 for real_proc in matrix_element.real_processes],[])) 3267 # and also on the born 3268 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3269 for wf in d.get('wavefunctions')],[]) \ 3270 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 3271 3272 # loop on the real emissions 3273 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 3274 p.get_legs_with_decays()] for p in \ 3275 me.get('processes')], []) for me in \ 3276 [real_proc.matrix_element]], []) for real_proc in \ 3277 matrix_element.real_processes],[])) 3278 # and also on the born 3279 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 3280 p.get_legs_with_decays()] for p in \ 3281 matrix_element.born_matrix_element.get('processes')], []))) 3282 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3283 3284 lines = """function get_color(ipdg) 3285 implicit none 3286 integer get_color, ipdg 3287 3288 if(ipdg.eq.%d)then 3289 get_color=%d 3290 return 3291 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3292 3293 for part_id in particle_ids[1:]: 3294 lines += """else if(ipdg.eq.%d)then 3295 get_color=%d 3296 return 3297 """ % (part_id, model.get_particle(part_id).get_color()) 3298 # Dummy particle for multiparticle vertices with pdg given by 3299 # first code not in the model 3300 lines += """else if(ipdg.eq.%d)then 3301 c This is dummy particle used in multiparticle vertices 3302 get_color=2 3303 return 3304 """ % model.get_first_non_pdg() 3305 lines += """else 3306 write(*,*)'Error: No color given for pdg ',ipdg 3307 get_color=0 3308 return 3309 endif 3310 end 3311 """ 3312 3313 # Write the file 3314 writer.writelines(lines) 3315 3316 return True
3317 3318 #=============================================================================== 3319 # write_props_file 3320 #=============================================================================== 3321 #test_written
3322 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3323 """Write the props.inc file for MadEvent. Needs input from 3324 write_configs_file. With respect to the parent routine, it has some 3325 more specific formats that allow the props.inc file to be read by the 3326 link program""" 3327 3328 lines = [] 3329 3330 particle_dict = matrix_element.get('processes')[0].get('model').\ 3331 get('particle_dict') 3332 3333 for iconf, configs in enumerate(s_and_t_channels): 3334 for vertex in configs[0] + configs[1][:-1]: 3335 leg = vertex.get('legs')[-1] 3336 if leg.get('id') not in particle_dict: 3337 # Fake propagator used in multiparticle vertices 3338 mass = 'zero' 3339 width = 'zero' 3340 pow_part = 0 3341 else: 3342 particle = particle_dict[leg.get('id')] 3343 # Get mass 3344 if particle.get('mass').lower() == 'zero': 3345 mass = particle.get('mass') 3346 else: 3347 mass = "abs(%s)" % particle.get('mass') 3348 # Get width 3349 if particle.get('width').lower() == 'zero': 3350 width = particle.get('width') 3351 else: 3352 width = "abs(%s)" % particle.get('width') 3353 3354 pow_part = 1 + int(particle.is_boson()) 3355 3356 lines.append("pmass(%3d,%4d) = %s" % \ 3357 (leg.get('number'), iconf + 1, mass)) 3358 lines.append("pwidth(%3d,%4d) = %s" % \ 3359 (leg.get('number'), iconf + 1, width)) 3360 lines.append("pow(%3d,%4d) = %d" % \ 3361 (leg.get('number'), iconf + 1, pow_part)) 3362 3363 # Write the file 3364 writer.writelines(lines) 3365 3366 return True
3367 3368 3369 #=========================================================================== 3370 # write_subproc 3371 #===========================================================================
3372 - def write_subproc(self, writer, subprocdir):
3373 """Append this subprocess to the subproc.mg file for MG4""" 3374 3375 # Write line to file 3376 writer.write(subprocdir + "\n") 3377 3378 return True
3379 3380 3381 3382 3383 3384 #================================================================================= 3385 # Class for using the optimized Loop process 3386 #=================================================================================
3387 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3388 ProcessExporterFortranFKS):
3389 """Class to take care of exporting a set of matrix elements to 3390 Fortran (v4) format.""" 3391 3392
3393 - def finalize(self, *args, **opts):
3395 #export_v4.ProcessExporterFortranSA.finalize(self, *args, **opts) 3396 3397 #=============================================================================== 3398 # copy the Template in a new directory. 3399 #===============================================================================
3400 - def copy_fkstemplate(self):
3401 """create the directory run_name as a copy of the MadEvent 3402 Template, and clean the directory 3403 For now it is just the same as copy_v4template, but it will be modified 3404 """ 3405 mgme_dir = self.mgme_dir 3406 dir_path = self.dir_path 3407 clean =self.opt['clean'] 3408 3409 #First copy the full template tree if dir_path doesn't exit 3410 if not os.path.isdir(dir_path): 3411 if not mgme_dir: 3412 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3413 logger.info('initialize a new directory: %s' % \ 3414 os.path.basename(dir_path)) 3415 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3416 # misc.copytree since dir_path already exists 3417 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'), 3418 dir_path) 3419 # Copy plot_card 3420 for card in ['plot_card']: 3421 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 3422 try: 3423 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 3424 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 3425 except IOError: 3426 logger.warning("Failed to copy " + card + ".dat to default") 3427 3428 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3429 if not mgme_dir: 3430 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3431 try: 3432 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3433 except IOError: 3434 MG5_version = misc.get_pkg_info() 3435 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3436 "5." + MG5_version['version']) 3437 3438 #Ensure that the Template is clean 3439 if clean: 3440 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3441 if 'MADGRAPH_BASE' in os.environ: 3442 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3443 '--web'], cwd=dir_path) 3444 else: 3445 try: 3446 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3447 cwd=dir_path) 3448 except Exception as why: 3449 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3450 % (os.path.basename(dir_path),why)) 3451 #Write version info 3452 MG_version = misc.get_pkg_info() 3453 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3454 MG_version['version']) 3455 3456 # We must link the CutTools to the Library folder of the active Template 3457 self.link_CutTools(dir_path) 3458 # We must link the TIR to the Library folder of the active Template 3459 link_tir_libs=[] 3460 tir_libs=[] 3461 tir_include=[] 3462 for tir in self.all_tir: 3463 tir_dir="%s_dir"%tir 3464 libpath=getattr(self,tir_dir) 3465 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3466 libpath,"lib%s.a"%tir,tir_name=tir) 3467 setattr(self,tir_dir,libpath) 3468 if libpath != "": 3469 if tir in ['pjfry','ninja','golem', 'samurai','collier']: 3470 # We should link dynamically when possible, so we use the original 3471 # location of these libraries. 3472 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3473 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3474 # For Ninja, we must also link against OneLoop. 3475 if tir in ['ninja']: 3476 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext)) 3477 for ext in ['a','dylib','so']): 3478 raise MadGraph5Error( 3479 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath) 3480 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo')) 3481 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo')) 3482 # We must add the corresponding includes for these TIR 3483 if tir in ['golem','samurai','ninja','collier']: 3484 trg_path = pjoin(os.path.dirname(libpath),'include') 3485 if os.path.isdir(trg_path): 3486 to_include = misc.find_includes_path(trg_path, 3487 self.include_names[tir]) 3488 else: 3489 to_include = None 3490 # Special possible location for collier 3491 if to_include is None and tir=='collier': 3492 to_include = misc.find_includes_path( 3493 pjoin(libpath,'modules'),self.include_names[tir]) 3494 if to_include is None: 3495 logger.error( 3496 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+ 3497 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3498 to_include = '<Not_found_define_it_yourself>' 3499 tir_include.append('-I %s'%to_include) 3500 else: 3501 link_tir_libs.append('-l%s'%tir) 3502 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3503 3504 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3505 cwd = os.getcwd() 3506 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3507 try: 3508 os.chdir(dirpath) 3509 except os.error: 3510 logger.error('Could not cd to directory %s' % dirpath) 3511 return 0 3512 filename = 'makefile_loop' 3513 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3514 link_tir_libs,tir_libs,tir_include=tir_include) 3515 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3516 dirpath = os.path.join(self.dir_path, 'Source') 3517 try: 3518 os.chdir(dirpath) 3519 except os.error: 3520 logger.error('Could not cd to directory %s' % dirpath) 3521 return 0 3522 filename = 'make_opts' 3523 calls = self.write_make_opts(writers.MakefileWriter(filename), 3524 link_tir_libs,tir_libs) 3525 # Return to original PWD 3526 os.chdir(cwd) 3527 3528 cwd = os.getcwd() 3529 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3530 try: 3531 os.chdir(dirpath) 3532 except os.error: 3533 logger.error('Could not cd to directory %s' % dirpath) 3534 return 0 3535 3536 # We add here the user-friendly MadLoop option setter. 3537 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3538 "Cards/MadLoopParams.dat", 3539 "SubProcesses/MadLoopParams.inc"] 3540 3541 for file in cpfiles: 3542 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3543 os.path.join(self.dir_path, file)) 3544 3545 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 3546 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 3547 3548 3549 3550 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3551 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3552 'Cards', 'MadLoopParams.dat')) 3553 # write the output file 3554 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3555 "MadLoopParams.dat")) 3556 3557 # We need minimal editing of MadLoopCommons.f 3558 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3559 "SubProcesses","MadLoopCommons.inc")).read() 3560 writer = writers.FortranWriter(os.path.join(self.dir_path, 3561 "SubProcesses","MadLoopCommons.f")) 3562 writer.writelines(MadLoopCommon%{ 3563 'print_banner_commands':self.MadLoop_banner}, 3564 context={'collier_available':self.tir_available_dict['collier']}) 3565 writer.close() 3566 3567 # link the files from the MODEL 3568 model_path = self.dir_path + '/Source/MODEL/' 3569 # Note that for the [real=] mode, these files are not present 3570 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3571 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3572 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3573 ln(model_path + '/mp_coupl_same_name.inc', \ 3574 self.dir_path + '/SubProcesses') 3575 3576 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3577 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3578 writers.FortranWriter('cts_mpc.h'),) 3579 3580 self.copy_python_files() 3581 3582 3583 # We need to create the correct open_data for the pdf 3584 self.write_pdf_opendata() 3585 3586 3587 # Return to original PWD 3588 os.chdir(cwd)
3589
3590 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3591 """writes the V**** directory inside the P**** directories specified in 3592 dir_name""" 3593 3594 cwd = os.getcwd() 3595 3596 matrix_element = loop_matrix_element 3597 3598 # Create the MadLoop5_resources directory if not already existing 3599 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3600 try: 3601 os.mkdir(dirpath) 3602 except os.error as error: 3603 logger.warning(error.strerror + " " + dirpath) 3604 3605 # Create the directory PN_xx_xxxxx in the specified path 3606 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3607 dirpath = os.path.join(dir_name, name) 3608 3609 try: 3610 os.mkdir(dirpath) 3611 except os.error as error: 3612 logger.warning(error.strerror + " " + dirpath) 3613 3614 try: 3615 os.chdir(dirpath) 3616 except os.error: 3617 logger.error('Could not cd to directory %s' % dirpath) 3618 return 0 3619 3620 logger.info('Creating files in directory %s' % name) 3621 3622 # Extract number of external particles 3623 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3624 3625 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3626 3627 # We need a link to coefs.inc from DHELAS 3628 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'), 3629 abspath=False, cwd=None) 3630 3631 # The born matrix element, if needed 3632 filename = 'born_matrix.f' 3633 calls = self.write_bornmatrix( 3634 writers.FortranWriter(filename), 3635 matrix_element, 3636 fortran_model) 3637 3638 filename = 'nexternal.inc' 3639 self.write_nexternal_file(writers.FortranWriter(filename), 3640 nexternal, ninitial) 3641 3642 filename = 'pmass.inc' 3643 self.write_pmass_file(writers.FortranWriter(filename), 3644 matrix_element) 3645 3646 filename = 'ngraphs.inc' 3647 self.write_ngraphs_file(writers.FortranWriter(filename), 3648 len(matrix_element.get_all_amplitudes())) 3649 3650 filename = "loop_matrix.ps" 3651 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3652 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3653 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3654 filename, 3655 model=matrix_element.get('processes')[0].get('model'), 3656 amplitude='') 3657 logger.info("Drawing loop Feynman diagrams for " + \ 3658 matrix_element.get('processes')[0].nice_string(\ 3659 print_weighted=False)) 3660 plot.draw() 3661 3662 filename = "born_matrix.ps" 3663 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3664 get('born_diagrams'), 3665 filename, 3666 model=matrix_element.get('processes')[0].\ 3667 get('model'), 3668 amplitude='') 3669 logger.info("Generating born Feynman diagrams for " + \ 3670 matrix_element.get('processes')[0].nice_string(\ 3671 print_weighted=False)) 3672 plot.draw() 3673 3674 # We also need to write the overall maximum quantities for this group 3675 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 3676 # only one process, so this is trivial 3677 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 3678 3679 open('unique_id.inc','w').write( 3680 """ integer UNIQUE_ID 3681 parameter(UNIQUE_ID=1)""") 3682 3683 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3684 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3685 'MadLoopParams.inc','MadLoopCommons.f'] 3686 3687 for file in linkfiles: 3688 ln('../../%s' % file) 3689 3690 os.system("ln -s ../../makefile_loop makefile") 3691 3692 # We should move to MadLoop5_resources directory from the SubProcesses 3693 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3694 pjoin('..','MadLoop5_resources')) 3695 3696 linkfiles = ['mpmodule.mod'] 3697 3698 for file in linkfiles: 3699 ln('../../../lib/%s' % file) 3700 3701 linkfiles = ['coef_specs.inc'] 3702 3703 for file in linkfiles: 3704 ln('../../../Source/DHELAS/%s' % file) 3705 3706 # Return to original PWD 3707 os.chdir(cwd) 3708 3709 if not calls: 3710 calls = 0 3711 return calls
3712 3713 3714 #=============================================================================== 3715 # write_coef_specs 3716 #===============================================================================
3717 - def write_coef_specs_file(self, max_loop_vertex_ranks):
3718 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3719 non-optimized mode""" 3720 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3721 3722 replace_dict = {} 3723 replace_dict['max_lwf_size'] = 4 3724 replace_dict['vertex_max_coefs'] = max(\ 3725 [q_polynomial.get_number_of_coefs_for_rank(n) 3726 for n in max_loop_vertex_ranks]) 3727 IncWriter=writers.FortranWriter(filename,'w') 3728 IncWriter.writelines("""INTEGER MAXLWFSIZE 3729 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3730 INTEGER VERTEXMAXCOEFS 3731 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3732 % replace_dict) 3733 IncWriter.close()
3734