Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 # cmd_options is a dictionary with all the optional argurment passed at output time 101 return
102
103 - def copy_template(self, model):
104 return
105
106 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
107 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 108 return 0 # return an integer stating the number of call to helicity routine
109
110 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
111 return
112
113 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
114 return
115 116
117 - def pass_information_from_cmd(self, cmd):
118 """pass information from the command interface to the exporter. 119 Please do not modify any object of the interface from the exporter. 120 """ 121 return
122
123 - def modify_grouping(self, matrix_element):
124 return False, matrix_element
125
126 - def export_model_files(self, model_v4_path):
127 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 128 return
129
130 - def export_helas(self, HELAS_PATH):
131 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 132 return
133
134 #=============================================================================== 135 # ProcessExporterFortran 136 #=============================================================================== 137 -class ProcessExporterFortran(VirtualExporter):
138 """Class to take care of exporting a set of matrix elements to 139 Fortran (v4) format.""" 140 141 default_opt = {'clean': False, 'complex_mass':False, 142 'export_format':'madevent', 'mp': False, 143 'v5_model': True, 144 'output_options':{} 145 } 146 grouped_mode = False 147
148 - def __init__(self, dir_path = "", opt=None):
149 """Initiate the ProcessExporterFortran with directory information""" 150 self.mgme_dir = MG5DIR 151 self.dir_path = dir_path 152 self.model = None 153 154 self.opt = dict(self.default_opt) 155 if opt: 156 self.opt.update(opt) 157 158 self.cmd_options = self.opt['output_options'] 159 160 #place holder to pass information to the run_interface 161 self.proc_characteristic = banner_mod.ProcCharacteristic()
162 163 164 #=========================================================================== 165 # process exporter fortran switch between group and not grouped 166 #===========================================================================
167 - def export_processes(self, matrix_elements, fortran_model):
168 """Make the switch between grouped and not grouped output""" 169 170 calls = 0 171 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 172 for (group_number, me_group) in enumerate(matrix_elements): 173 calls = calls + self.generate_subprocess_directory(\ 174 me_group, fortran_model, group_number) 175 else: 176 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 177 calls = calls + self.generate_subprocess_directory(\ 178 me, fortran_model, me_number) 179 180 return calls
181 182 183 #=========================================================================== 184 # create the run_card 185 #===========================================================================
186 - def create_run_card(self, matrix_elements, history):
187 """ """ 188 189 190 # bypass this for the loop-check 191 import madgraph.loop.loop_helas_objects as loop_helas_objects 192 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 193 matrix_elements = None 194 195 run_card = banner_mod.RunCard() 196 197 198 default=True 199 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 200 processes = [me.get('processes') for megroup in matrix_elements 201 for me in megroup['matrix_elements']] 202 elif matrix_elements: 203 processes = [me.get('processes') 204 for me in matrix_elements['matrix_elements']] 205 else: 206 default =False 207 208 if default: 209 run_card.create_default_for_process(self.proc_characteristic, 210 history, 211 processes) 212 213 214 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 215 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
216 217 218 #=========================================================================== 219 # copy the Template in a new directory. 220 #===========================================================================
221 - def copy_template(self, model):
222 """create the directory run_name as a copy of the MadEvent 223 Template, and clean the directory 224 """ 225 226 #First copy the full template tree if dir_path doesn't exit 227 if not os.path.isdir(self.dir_path): 228 assert self.mgme_dir, \ 229 "No valid MG_ME path given for MG4 run directory creation." 230 logger.info('initialize a new directory: %s' % \ 231 os.path.basename(self.dir_path)) 232 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 233 self.dir_path, True) 234 # distutils.dir_util.copy_tree since dir_path already exists 235 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 236 self.dir_path) 237 # copy plot_card 238 for card in ['plot_card']: 239 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 240 try: 241 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 242 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 243 except IOError: 244 logger.warning("Failed to copy " + card + ".dat to default") 245 elif os.getcwd() == os.path.realpath(self.dir_path): 246 logger.info('working in local directory: %s' % \ 247 os.path.realpath(self.dir_path)) 248 # distutils.dir_util.copy_tree since dir_path already exists 249 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 250 self.dir_path) 251 # for name in misc.glob('Template/LO/*', self.mgme_dir): 252 # name = os.path.basename(name) 253 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 254 # if os.path.isfile(filename): 255 # files.cp(filename, pjoin(self.dir_path,name)) 256 # elif os.path.isdir(filename): 257 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 258 # distutils.dir_util.copy_tree since dir_path already exists 259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 260 self.dir_path) 261 # Copy plot_card 262 for card in ['plot_card']: 263 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 264 try: 265 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 266 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 267 except IOError: 268 logger.warning("Failed to copy " + card + ".dat to default") 269 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 270 assert self.mgme_dir, \ 271 "No valid MG_ME path given for MG4 run directory creation." 272 try: 273 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 274 except IOError: 275 MG5_version = misc.get_pkg_info() 276 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 277 278 #Ensure that the Template is clean 279 if self.opt['clean']: 280 logger.info('remove old information in %s' % \ 281 os.path.basename(self.dir_path)) 282 if os.environ.has_key('MADGRAPH_BASE'): 283 misc.call([pjoin('bin', 'internal', 'clean_template'), 284 '--web'], cwd=self.dir_path) 285 else: 286 try: 287 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 288 cwd=self.dir_path) 289 except Exception, why: 290 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 291 % (os.path.basename(self.dir_path),why)) 292 293 #Write version info 294 MG_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 296 MG_version['version']) 297 298 # add the makefile in Source directory 299 filename = pjoin(self.dir_path,'Source','makefile') 300 self.write_source_makefile(writers.FileWriter(filename)) 301 302 # add the DiscreteSampler information 303 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 304 pjoin(self.dir_path, 'Source')) 305 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 306 pjoin(self.dir_path, 'Source')) 307 308 # We need to create the correct open_data for the pdf 309 self.write_pdf_opendata()
310 311 312 #=========================================================================== 313 # Call MadAnalysis5 to generate the default cards for this process 314 #===========================================================================
315 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 316 ma5_path, output_dir, levels = ['parton','hadron']):
317 """ Call MA5 so that it writes default cards for both parton and 318 post-shower levels, tailored for this particular process.""" 319 320 if len(levels)==0: 321 return 322 start = time.time() 323 logger.info('Generating MadAnalysis5 default cards tailored to this process') 324 try: 325 MA5_interpreter = common_run_interface.CommonRunCmd.\ 326 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 327 except (Exception, SystemExit) as e: 328 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 329 return 330 if MA5_interpreter is None: 331 return 332 333 MA5_main = MA5_interpreter.main 334 for lvl in ['parton','hadron']: 335 if lvl in levels: 336 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 337 try: 338 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 339 except (Exception, SystemExit) as e: 340 # keep the default card (skip only) 341 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 342 ' default analysis card for this process.') 343 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 344 error=StringIO() 345 traceback.print_exc(file=error) 346 logger.debug('MadAnalysis5 error was:') 347 logger.debug('-'*60) 348 logger.debug(error.getvalue()[:-1]) 349 logger.debug('-'*60) 350 else: 351 open(card_to_generate,'w').write(text) 352 stop = time.time() 353 if stop-start >1: 354 logger.info('Cards created in %.2fs' % (stop-start))
355 356 #=========================================================================== 357 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 358 #===========================================================================
359 - def write_procdef_mg5(self, file_pos, modelname, process_str):
360 """ write an equivalent of the MG4 proc_card in order that all the Madevent 361 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 362 363 proc_card_template = template_files.mg4_proc_card.mg4_template 364 process_template = template_files.mg4_proc_card.process_template 365 process_text = '' 366 coupling = '' 367 new_process_content = [] 368 369 370 # First find the coupling and suppress the coupling from process_str 371 #But first ensure that coupling are define whithout spaces: 372 process_str = process_str.replace(' =', '=') 373 process_str = process_str.replace('= ', '=') 374 process_str = process_str.replace(',',' , ') 375 #now loop on the element and treat all the coupling 376 for info in process_str.split(): 377 if '=' in info: 378 coupling += info + '\n' 379 else: 380 new_process_content.append(info) 381 # Recombine the process_str (which is the input process_str without coupling 382 #info) 383 process_str = ' '.join(new_process_content) 384 385 #format the SubProcess 386 replace_dict = {'process': process_str, 387 'coupling': coupling} 388 process_text += process_template.substitute(replace_dict) 389 390 replace_dict = {'process': process_text, 391 'model': modelname, 392 'multiparticle':''} 393 text = proc_card_template.substitute(replace_dict) 394 395 if file_pos: 396 ff = open(file_pos, 'w') 397 ff.write(text) 398 ff.close() 399 else: 400 return replace_dict
401 402
403 - def pass_information_from_cmd(self, cmd):
404 """Pass information for MA5""" 405 406 self.proc_defs = cmd._curr_proc_defs
407 408 #=========================================================================== 409 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 410 #===========================================================================
411 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
412 """Function to finalize v4 directory, for inheritance.""" 413 414 self.create_run_card(matrix_elements, history) 415 self.create_MA5_cards(matrix_elements, history)
416
417 - def create_MA5_cards(self,matrix_elements,history):
418 """ A wrapper around the creation of the MA5 cards so that it can be 419 bypassed by daughter classes (i.e. in standalone).""" 420 if 'madanalysis5_path' in self.opt and not \ 421 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 422 processes = None 423 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 424 processes = [me.get('processes') for megroup in matrix_elements 425 for me in megroup['matrix_elements']] 426 elif matrix_elements: 427 processes = [me.get('processes') 428 for me in matrix_elements['matrix_elements']] 429 430 self.create_default_madanalysis5_cards( 431 history, self.proc_defs, processes, 432 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 433 levels = ['hadron','parton']) 434 435 for level in ['hadron','parton']: 436 # Copying these cards turn on the use of MadAnalysis5 by default. 437 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 438 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 439 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
440 441 #=========================================================================== 442 # Create the proc_characteristic file passing information to the run_interface 443 #===========================================================================
444 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
445 446 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
447 448 #=========================================================================== 449 # write_matrix_element_v4 450 #===========================================================================
451 - def write_matrix_element_v4(self):
452 """Function to write a matrix.f file, for inheritance. 453 """ 454 pass
455 456 #=========================================================================== 457 # write_pdf_opendata 458 #===========================================================================
459 - def write_pdf_opendata(self):
460 """ modify the pdf opendata file, to allow direct access to cluster node 461 repository if configure""" 462 463 if not self.opt["cluster_local_path"]: 464 changer = {"pdf_systemwide": ""} 465 else: 466 to_add = """ 467 tempname='%(path)s'//Tablefile 468 open(IU,file=tempname,status='old',ERR=1) 469 return 470 1 tempname='%(path)s/Pdfdata/'//Tablefile 471 open(IU,file=tempname,status='old',ERR=2) 472 return 473 2 tempname='%(path)s/lhapdf'//Tablefile 474 open(IU,file=tempname,status='old',ERR=3) 475 return 476 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 477 open(IU,file=tempname,status='old',ERR=4) 478 return 479 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 480 open(IU,file=tempname,status='old',ERR=5) 481 return 482 """ % {"path" : self.opt["cluster_local_path"]} 483 484 changer = {"pdf_systemwide": to_add} 485 486 487 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 488 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 489 ff.writelines(template % changer) 490 491 # Do the same for lhapdf set 492 if not self.opt["cluster_local_path"]: 493 changer = {"cluster_specific_path": ""} 494 else: 495 to_add=""" 496 LHAPath='%(path)s/PDFsets' 497 Inquire(File=LHAPath, exist=exists) 498 if(exists)return 499 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 500 Inquire(File=LHAPath, exist=exists) 501 if(exists)return 502 LHAPath='%(path)s/../lhapdf/pdfsets/' 503 Inquire(File=LHAPath, exist=exists) 504 if(exists)return 505 LHAPath='./PDFsets' 506 """ % {"path" : self.opt["cluster_local_path"]} 507 changer = {"cluster_specific_path": to_add} 508 509 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 510 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 512 ff.writelines(template % changer) 513 514 515 return
516 517 518 519 #=========================================================================== 520 # write_maxparticles_file 521 #===========================================================================
522 - def write_maxparticles_file(self, writer, matrix_elements):
523 """Write the maxparticles.inc file for MadEvent""" 524 525 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 526 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 527 matrix_elements.get('matrix_elements')]) 528 else: 529 maxparticles = max([me.get_nexternal_ninitial()[0] \ 530 for me in matrix_elements]) 531 532 lines = "integer max_particles\n" 533 lines += "parameter(max_particles=%d)" % maxparticles 534 535 # Write the file 536 writer.writelines(lines) 537 538 return True
539 540 541 #=========================================================================== 542 # export the model 543 #===========================================================================
544 - def export_model_files(self, model_path):
545 """Configure the files/link of the process according to the model""" 546 547 # Import the model 548 for file in os.listdir(model_path): 549 if os.path.isfile(pjoin(model_path, file)): 550 shutil.copy2(pjoin(model_path, file), \ 551 pjoin(self.dir_path, 'Source', 'MODEL'))
552 553 567 575 576 577 #=========================================================================== 578 # export the helas routine 579 #===========================================================================
580 - def export_helas(self, helas_path):
581 """Configure the files/link of the process according to the model""" 582 583 # Import helas routine 584 for filename in os.listdir(helas_path): 585 filepos = pjoin(helas_path, filename) 586 if os.path.isfile(filepos): 587 if filepos.endswith('Makefile.template'): 588 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 589 elif filepos.endswith('Makefile'): 590 pass 591 else: 592 cp(filepos, self.dir_path + '/Source/DHELAS')
593 # following lines do the same but whithout symbolic link 594 # 595 #def export_helas(mgme_dir, dir_path): 596 # 597 # # Copy the HELAS directory 598 # helas_dir = pjoin(mgme_dir, 'HELAS') 599 # for filename in os.listdir(helas_dir): 600 # if os.path.isfile(pjoin(helas_dir, filename)): 601 # shutil.copy2(pjoin(helas_dir, filename), 602 # pjoin(dir_path, 'Source', 'DHELAS')) 603 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 604 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 605 # 606 607 #=========================================================================== 608 # generate_subprocess_directory 609 #===========================================================================
610 - def generate_subprocess_directory(self, matrix_element, 611 fortran_model, 612 me_number):
613 """Routine to generate a subprocess directory (for inheritance)""" 614 615 pass
616 617 #=========================================================================== 618 # get_source_libraries_list 619 #===========================================================================
620 - def get_source_libraries_list(self):
621 """ Returns the list of libraries to be compiling when compiling the 622 SOURCE directory. It is different for loop_induced processes and 623 also depends on the value of the 'output_dependencies' option""" 624 625 return ['$(LIBDIR)libdhelas.$(libext)', 626 '$(LIBDIR)libpdf.$(libext)', 627 '$(LIBDIR)libmodel.$(libext)', 628 '$(LIBDIR)libcernlib.$(libext)', 629 '$(LIBDIR)libbias.$(libext)']
630 631 #=========================================================================== 632 # write_source_makefile 633 #===========================================================================
634 - def write_source_makefile(self, writer):
635 """Write the nexternal.inc file for MG4""" 636 637 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 638 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 639 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 640 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 641 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 642 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 643 else: 644 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 645 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 646 647 replace_dict= {'libraries': set_of_lib, 648 'model':model_line, 649 'additional_dsample': '', 650 'additional_dependencies':''} 651 652 if writer: 653 text = open(path).read() % replace_dict 654 writer.write(text) 655 656 return replace_dict
657 658 #=========================================================================== 659 # write_nexternal_madspin 660 #===========================================================================
661 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
662 """Write the nexternal_prod.inc file for madspin""" 663 664 replace_dict = {} 665 666 replace_dict['nexternal'] = nexternal 667 replace_dict['ninitial'] = ninitial 668 669 file = """ \ 670 integer nexternal_prod 671 parameter (nexternal_prod=%(nexternal)d) 672 integer nincoming_prod 673 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 674 675 # Write the file 676 if writer: 677 writer.writelines(file) 678 return True 679 else: 680 return replace_dict
681 682 #=========================================================================== 683 # write_helamp_madspin 684 #===========================================================================
685 - def write_helamp_madspin(self, writer, ncomb):
686 """Write the helamp.inc file for madspin""" 687 688 replace_dict = {} 689 690 replace_dict['ncomb'] = ncomb 691 692 file = """ \ 693 integer ncomb1 694 parameter (ncomb1=%(ncomb)d) 695 double precision helamp(ncomb1) 696 common /to_helamp/helamp """ % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 706 707 #=========================================================================== 708 # write_nexternal_file 709 #===========================================================================
710 - def write_nexternal_file(self, writer, nexternal, ninitial):
711 """Write the nexternal.inc file for MG4""" 712 713 replace_dict = {} 714 715 replace_dict['nexternal'] = nexternal 716 replace_dict['ninitial'] = ninitial 717 718 file = """ \ 719 integer nexternal 720 parameter (nexternal=%(nexternal)d) 721 integer nincoming 722 parameter (nincoming=%(ninitial)d)""" % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 #=========================================================================== 731 # write_pmass_file 732 #===========================================================================
733 - def write_pmass_file(self, writer, matrix_element):
734 """Write the pmass.inc file for MG4""" 735 736 model = matrix_element.get('processes')[0].get('model') 737 738 lines = [] 739 for wf in matrix_element.get_external_wavefunctions(): 740 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 741 if mass.lower() != "zero": 742 mass = "abs(%s)" % mass 743 744 lines.append("pmass(%d)=%s" % \ 745 (wf.get('number_external'), mass)) 746 747 # Write the file 748 writer.writelines(lines) 749 750 return True
751 752 #=========================================================================== 753 # write_ngraphs_file 754 #===========================================================================
755 - def write_ngraphs_file(self, writer, nconfigs):
756 """Write the ngraphs.inc file for MG4. Needs input from 757 write_configs_file.""" 758 759 file = " integer n_max_cg\n" 760 file = file + "parameter (n_max_cg=%d)" % nconfigs 761 762 # Write the file 763 writer.writelines(file) 764 765 return True
766 767 #=========================================================================== 768 # write_leshouche_file 769 #===========================================================================
770 - def write_leshouche_file(self, writer, matrix_element):
771 """Write the leshouche.inc file for MG4""" 772 773 # Write the file 774 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 775 776 return True
777 778 #=========================================================================== 779 # get_leshouche_lines 780 #===========================================================================
781 - def get_leshouche_lines(self, matrix_element, numproc):
782 """Write the leshouche.inc file for MG4""" 783 784 # Extract number of external particles 785 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 786 787 lines = [] 788 for iproc, proc in enumerate(matrix_element.get('processes')): 789 legs = proc.get_legs_with_decays() 790 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 791 (iproc + 1, numproc+1, nexternal, 792 ",".join([str(l.get('id')) for l in legs]))) 793 if iproc == 0 and numproc == 0: 794 for i in [1, 2]: 795 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 796 (i, nexternal, 797 ",".join([ "%3r" % 0 ] * ninitial + \ 798 [ "%3r" % i ] * (nexternal - ninitial)))) 799 800 # Here goes the color connections corresponding to the JAMPs 801 # Only one output, for the first subproc! 802 if iproc == 0: 803 # If no color basis, just output trivial color flow 804 if not matrix_element.get('color_basis'): 805 for i in [1, 2]: 806 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 807 (i, numproc+1,nexternal, 808 ",".join([ "%3r" % 0 ] * nexternal))) 809 810 else: 811 # First build a color representation dictionnary 812 repr_dict = {} 813 for l in legs: 814 repr_dict[l.get('number')] = \ 815 proc.get('model').get_particle(l.get('id')).get_color()\ 816 * (-1)**(1+l.get('state')) 817 # Get the list of color flows 818 color_flow_list = \ 819 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 820 ninitial) 821 # And output them properly 822 for cf_i, color_flow_dict in enumerate(color_flow_list): 823 for i in [0, 1]: 824 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 825 (i + 1, cf_i + 1, numproc+1, nexternal, 826 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 827 for l in legs]))) 828 829 return lines
830 831 832 833 834 #=========================================================================== 835 # write_maxamps_file 836 #===========================================================================
837 - def write_maxamps_file(self, writer, maxamps, maxflows, 838 maxproc,maxsproc):
839 """Write the maxamps.inc file for MG4.""" 840 841 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 842 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 843 (maxamps, maxflows) 844 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 845 (maxproc, maxsproc) 846 847 # Write the file 848 writer.writelines(file) 849 850 return True
851 852 853 #=========================================================================== 854 # Routines to output UFO models in MG4 format 855 #=========================================================================== 856
857 - def convert_model(self, model, wanted_lorentz = [], 858 wanted_couplings = []):
859 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 860 861 # Make sure aloha is in quadruple precision if needed 862 old_aloha_mp=aloha.mp_precision 863 aloha.mp_precision=self.opt['mp'] 864 865 # create the MODEL 866 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 867 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 868 model_builder.build(wanted_couplings) 869 870 # Backup the loop mode, because it can be changed in what follows. 871 old_loop_mode = aloha.loop_mode 872 873 # Create the aloha model or use the existing one (for loop exporters 874 # this is useful as the aloha model will be used again in the 875 # LoopHelasMatrixElements generated). We do not save the model generated 876 # here if it didn't exist already because it would be a waste of 877 # memory for tree level applications since aloha is only needed at the 878 # time of creating the aloha fortran subroutines. 879 if hasattr(self, 'aloha_model'): 880 aloha_model = self.aloha_model 881 else: 882 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 883 aloha_model.add_Lorentz_object(model.get('lorentz')) 884 885 # Compute the subroutines 886 if wanted_lorentz: 887 aloha_model.compute_subset(wanted_lorentz) 888 else: 889 aloha_model.compute_all(save=False) 890 891 # Write them out 892 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 893 aloha_model.write(write_dir, 'Fortran') 894 895 # Revert the original aloha loop mode 896 aloha.loop_mode = old_loop_mode 897 898 #copy Helas Template 899 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 900 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 901 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 902 write_dir+'/aloha_functions.f') 903 aloha_model.loop_mode = False 904 else: 905 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 906 write_dir+'/aloha_functions.f') 907 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 908 909 # Make final link in the Process 910 self.make_model_symbolic_link() 911 912 # Re-establish original aloha mode 913 aloha.mp_precision=old_aloha_mp
914 915 916 #=========================================================================== 917 # Helper functions 918 #===========================================================================
919 - def modify_grouping(self, matrix_element):
920 """allow to modify the grouping (if grouping is in place) 921 return two value: 922 - True/False if the matrix_element was modified 923 - the new(or old) matrix element""" 924 925 return False, matrix_element
926 927 #=========================================================================== 928 # Helper functions 929 #===========================================================================
930 - def get_mg5_info_lines(self):
931 """Return info lines for MG5, suitable to place at beginning of 932 Fortran files""" 933 934 info = misc.get_pkg_info() 935 info_lines = "" 936 if info and info.has_key('version') and info.has_key('date'): 937 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 938 (info['version'], info['date']) 939 info_lines = info_lines + \ 940 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 941 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 942 else: 943 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 944 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 945 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 946 947 return info_lines
948
949 - def get_process_info_lines(self, matrix_element):
950 """Return info lines describing the processes for this matrix element""" 951 952 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 953 for process in matrix_element.get('processes')])
954 955
956 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
957 """Return the Helicity matrix definition lines for this matrix element""" 958 959 helicity_line_list = [] 960 i = 0 961 for helicities in matrix_element.get_helicity_matrix(): 962 i = i + 1 963 int_list = [i, len(helicities)] 964 int_list.extend(helicities) 965 helicity_line_list.append(\ 966 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 967 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 968 969 return "\n".join(helicity_line_list)
970
971 - def get_ic_line(self, matrix_element):
972 """Return the IC definition line coming after helicities, required by 973 switchmom in madevent""" 974 975 nexternal = matrix_element.get_nexternal_ninitial()[0] 976 int_list = range(1, nexternal + 1) 977 978 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 979 ",".join([str(i) for \ 980 i in int_list]))
981
982 - def set_chosen_SO_index(self, process, squared_orders):
983 """ From the squared order constraints set by the user, this function 984 finds what indices of the squared_orders list the user intends to pick. 985 It returns this as a string of comma-separated successive '.true.' or 986 '.false.' for each index.""" 987 988 user_squared_orders = process.get('squared_orders') 989 split_orders = process.get('split_orders') 990 991 if len(user_squared_orders)==0: 992 return ','.join(['.true.']*len(squared_orders)) 993 994 res = [] 995 for sqsos in squared_orders: 996 is_a_match = True 997 for user_sqso, value in user_squared_orders.items(): 998 if (process.get_squared_order_type(user_sqso) =='==' and \ 999 value!=sqsos[split_orders.index(user_sqso)]) or \ 1000 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1001 value<sqsos[split_orders.index(user_sqso)]) or \ 1002 (process.get_squared_order_type(user_sqso) == '>' and \ 1003 value>=sqsos[split_orders.index(user_sqso)]): 1004 is_a_match = False 1005 break 1006 res.append('.true.' if is_a_match else '.false.') 1007 1008 return ','.join(res)
1009
1010 - def get_split_orders_lines(self, orders, array_name, n=5):
1011 """ Return the split orders definition as defined in the list orders and 1012 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1013 1014 ret_list = [] 1015 for index, order in enumerate(orders): 1016 for k in xrange(0, len(order), n): 1017 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1018 (array_name,index + 1, k + 1, min(k + n, len(order)), 1019 ','.join(["%5r" % i for i in order[k:k + n]]))) 1020 return ret_list
1021
1022 - def format_integer_list(self, list, name, n=5):
1023 """ Return an initialization of the python list in argument following 1024 the fortran syntax using the data keyword assignment, filling an array 1025 of name 'name'. It splits rows in chunks of size n.""" 1026 1027 ret_list = [] 1028 for k in xrange(0, len(list), n): 1029 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1030 (name, k + 1, min(k + n, len(list)), 1031 ','.join(["%5r" % i for i in list[k:k + n]]))) 1032 return ret_list
1033
1034 - def get_color_data_lines(self, matrix_element, n=6):
1035 """Return the color matrix definition lines for this matrix element. Split 1036 rows in chunks of size n.""" 1037 1038 if not matrix_element.get('color_matrix'): 1039 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1040 else: 1041 ret_list = [] 1042 my_cs = color.ColorString() 1043 for index, denominator in \ 1044 enumerate(matrix_element.get('color_matrix').\ 1045 get_line_denominators()): 1046 # First write the common denominator for this color matrix line 1047 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1048 # Then write the numerators for the matrix elements 1049 num_list = matrix_element.get('color_matrix').\ 1050 get_line_numerators(index, denominator) 1051 1052 for k in xrange(0, len(num_list), n): 1053 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1054 (index + 1, k + 1, min(k + n, len(num_list)), 1055 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1056 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1057 ret_list.append("C %s" % repr(my_cs)) 1058 return ret_list
1059 1060
1061 - def get_den_factor_line(self, matrix_element):
1062 """Return the denominator factor line for this matrix element""" 1063 1064 return "DATA IDEN/%2r/" % \ 1065 matrix_element.get_denominator_factor()
1066
1067 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1068 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1069 which configs (diagrams).""" 1070 1071 ret_list = [] 1072 1073 booldict = {False: ".false.", True: ".true."} 1074 1075 if not matrix_element.get('color_basis'): 1076 # No color, so only one color factor. Simply write a ".true." 1077 # for each config (i.e., each diagram with only 3 particle 1078 # vertices 1079 configs = len(mapconfigs) 1080 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1081 (num_matrix_element, configs, 1082 ','.join([".true." for i in range(configs)]))) 1083 return ret_list 1084 1085 # There is a color basis - create a list showing which JAMPs have 1086 # contributions to which configs 1087 1088 # Only want to include leading color flows, so find max_Nc 1089 color_basis = matrix_element.get('color_basis') 1090 1091 # We don't want to include the power of Nc's which come from the potential 1092 # loop color trace (i.e. in the case of a closed fermion loop for example) 1093 # so we subtract it here when computing max_Nc 1094 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1095 color_basis.values()],[])) 1096 1097 # Crate dictionary between diagram number and JAMP number 1098 diag_jamp = {} 1099 for ijamp, col_basis_elem in \ 1100 enumerate(sorted(matrix_element.get('color_basis').keys())): 1101 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1102 # Only use color flows with Nc == max_Nc. However, notice that 1103 # we don't want to include the Nc power coming from the loop 1104 # in this counting. 1105 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1106 diag_num = diag_tuple[0] + 1 1107 # Add this JAMP number to this diag_num 1108 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1109 [ijamp+1] 1110 1111 colamps = ijamp + 1 1112 for iconfig, num_diag in enumerate(mapconfigs): 1113 if num_diag == 0: 1114 continue 1115 1116 # List of True or False 1117 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1118 # Add line 1119 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1120 (iconfig+1, num_matrix_element, colamps, 1121 ','.join(["%s" % booldict[b] for b in \ 1122 bool_list]))) 1123 1124 return ret_list
1125
1126 - def get_amp2_lines(self, matrix_element, config_map = []):
1127 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1128 1129 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1130 # Get minimum legs in a vertex 1131 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1132 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1133 minvert = min(vert_list) if vert_list!=[] else 0 1134 1135 ret_lines = [] 1136 if config_map: 1137 # In this case, we need to sum up all amplitudes that have 1138 # identical topologies, as given by the config_map (which 1139 # gives the topology/config for each of the diagrams 1140 diagrams = matrix_element.get('diagrams') 1141 # Combine the diagrams with identical topologies 1142 config_to_diag_dict = {} 1143 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1144 if config_map[idiag] == 0: 1145 continue 1146 try: 1147 config_to_diag_dict[config_map[idiag]].append(idiag) 1148 except KeyError: 1149 config_to_diag_dict[config_map[idiag]] = [idiag] 1150 # Write out the AMP2s summing squares of amplitudes belonging 1151 # to eiher the same diagram or different diagrams with 1152 # identical propagator properties. Note that we need to use 1153 # AMP2 number corresponding to the first diagram number used 1154 # for that AMP2. 1155 for config in sorted(config_to_diag_dict.keys()): 1156 1157 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1158 {"num": (config_to_diag_dict[config][0] + 1)} 1159 1160 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1161 sum([diagrams[idiag].get('amplitudes') for \ 1162 idiag in config_to_diag_dict[config]], [])]) 1163 1164 # Not using \sum |M|^2 anymore since this creates troubles 1165 # when ckm is not diagonal due to the JIM mechanism. 1166 if '+' in amp: 1167 line += "(%s)*dconjg(%s)" % (amp, amp) 1168 else: 1169 line += "%s*dconjg(%s)" % (amp, amp) 1170 ret_lines.append(line) 1171 else: 1172 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1173 # Ignore any diagrams with 4-particle vertices. 1174 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1175 continue 1176 # Now write out the expression for AMP2, meaning the sum of 1177 # squared amplitudes belonging to the same diagram 1178 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1179 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1180 {"num": a.get('number')} for a in \ 1181 diag.get('amplitudes')]) 1182 ret_lines.append(line) 1183 1184 return ret_lines
1185 1186 #=========================================================================== 1187 # Returns the data statements initializing the coeffictients for the JAMP 1188 # decomposition. It is used when the JAMP initialization is decided to be 1189 # done through big arrays containing the projection coefficients. 1190 #===========================================================================
1191 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1192 n=50, Nc_value=3):
1193 """This functions return the lines defining the DATA statement setting 1194 the coefficients building the JAMPS out of the AMPS. Split rows in 1195 bunches of size n. 1196 One can specify the color_basis from which the color amplitudes originates 1197 so that there are commentaries telling what color structure each JAMP 1198 corresponds to.""" 1199 1200 if(not isinstance(color_amplitudes,list) or 1201 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1202 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1203 1204 res_list = [] 1205 my_cs = color.ColorString() 1206 for index, coeff_list in enumerate(color_amplitudes): 1207 # Create the list of the complete numerical coefficient. 1208 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1209 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1210 coefficient in coeff_list] 1211 # Create the list of the numbers of the contributing amplitudes. 1212 # Mutliply by -1 for those which have an imaginary coefficient. 1213 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1214 for coefficient in coeff_list] 1215 # Find the common denominator. 1216 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1217 num_list=[(coefficient*commondenom).numerator \ 1218 for coefficient in coefs_list] 1219 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1220 index+1,len(num_list))) 1221 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1222 index+1,commondenom)) 1223 if color_basis: 1224 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1225 res_list.append("C %s" % repr(my_cs)) 1226 for k in xrange(0, len(num_list), n): 1227 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1228 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1229 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1230 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1231 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1232 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1233 pass 1234 return res_list
1235 1236
1237 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1238 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1239 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1240 defined as a matrix element or directly as a color_amplitudes dictionary. 1241 The split_order_amps specifies the group of amplitudes sharing the same 1242 amplitude orders which should be put in together in a given set of JAMPS. 1243 The split_order_amps is supposed to have the format of the second output 1244 of the function get_split_orders_mapping function in helas_objects.py. 1245 The split_order_names is optional (it should correspond to the process 1246 'split_orders' attribute) and only present to provide comments in the 1247 JAMP definitions in the code.""" 1248 1249 # Let the user call get_JAMP_lines_split_order directly from a 1250 error_msg="Malformed '%s' argument passed to the "+\ 1251 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1252 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1253 color_amplitudes=col_amps.get_color_amplitudes() 1254 elif(isinstance(col_amps,list)): 1255 if(col_amps and isinstance(col_amps[0],list)): 1256 color_amplitudes=col_amps 1257 else: 1258 raise MadGraph5Error, error_msg%'col_amps' 1259 else: 1260 raise MadGraph5Error, error_msg%'col_amps' 1261 1262 # Verify the sanity of the split_order_amps and split_order_names args 1263 if isinstance(split_order_amps,list): 1264 for elem in split_order_amps: 1265 if len(elem)!=2: 1266 raise MadGraph5Error, error_msg%'split_order_amps' 1267 # Check the first element of the two lists to make sure they are 1268 # integers, although in principle they should all be integers. 1269 if not isinstance(elem[0],tuple) or \ 1270 not isinstance(elem[1],tuple) or \ 1271 not isinstance(elem[0][0],int) or \ 1272 not isinstance(elem[1][0],int): 1273 raise MadGraph5Error, error_msg%'split_order_amps' 1274 else: 1275 raise MadGraph5Error, error_msg%'split_order_amps' 1276 1277 if not split_order_names is None: 1278 if isinstance(split_order_names,list): 1279 # Should specify the same number of names as there are elements 1280 # in the key of the split_order_amps. 1281 if len(split_order_names)!=len(split_order_amps[0][0]): 1282 raise MadGraph5Error, error_msg%'split_order_names' 1283 # Check the first element of the list to be a string 1284 if not isinstance(split_order_names[0],str): 1285 raise MadGraph5Error, error_msg%'split_order_names' 1286 else: 1287 raise MadGraph5Error, error_msg%'split_order_names' 1288 1289 # Now scan all contributing orders to be individually computed and 1290 # construct the list of color_amplitudes for JAMP to be constructed 1291 # accordingly. 1292 res_list=[] 1293 for i, amp_order in enumerate(split_order_amps): 1294 col_amps_order = [] 1295 for jamp in color_amplitudes: 1296 col_amps_order.append(filter(lambda col_amp: 1297 col_amp[1] in amp_order[1],jamp)) 1298 if split_order_names: 1299 res_list.append('C JAMPs contributing to orders '+' '.join( 1300 ['%s=%i'%order for order in zip(split_order_names, 1301 amp_order[0])])) 1302 if self.opt['export_format'] in ['madloop_matchbox']: 1303 res_list.extend(self.get_JAMP_lines(col_amps_order, 1304 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1305 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1306 else: 1307 res_list.extend(self.get_JAMP_lines(col_amps_order, 1308 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1309 1310 return res_list
1311 1312
1313 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1314 split=-1):
1315 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1316 defined as a matrix element or directly as a color_amplitudes dictionary, 1317 Jamp_formatLC should be define to allow to add LeadingColor computation 1318 (usefull for MatchBox) 1319 The split argument defines how the JAMP lines should be split in order 1320 not to be too long.""" 1321 1322 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1323 # the color amplitudes lists. 1324 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1325 color_amplitudes=col_amps.get_color_amplitudes() 1326 elif(isinstance(col_amps,list)): 1327 if(col_amps and isinstance(col_amps[0],list)): 1328 color_amplitudes=col_amps 1329 else: 1330 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1331 else: 1332 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1333 1334 1335 res_list = [] 1336 for i, coeff_list in enumerate(color_amplitudes): 1337 # It might happen that coeff_list is empty if this function was 1338 # called from get_JAMP_lines_split_order (i.e. if some color flow 1339 # does not contribute at all for a given order). 1340 # In this case we simply set it to 0. 1341 if coeff_list==[]: 1342 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1343 continue 1344 # Break the JAMP definition into 'n=split' pieces to avoid having 1345 # arbitrarly long lines. 1346 first=True 1347 n = (len(coeff_list)+1 if split<=0 else split) 1348 while coeff_list!=[]: 1349 coefs=coeff_list[:n] 1350 coeff_list=coeff_list[n:] 1351 res = ((JAMP_format+"=") % str(i + 1)) + \ 1352 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1353 1354 first=False 1355 # Optimization: if all contributions to that color basis element have 1356 # the same coefficient (up to a sign), put it in front 1357 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1358 common_factor = False 1359 diff_fracs = list(set(list_fracs)) 1360 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1361 common_factor = True 1362 global_factor = diff_fracs[0] 1363 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1364 1365 # loop for JAMP 1366 for (coefficient, amp_number) in coefs: 1367 if not coefficient: 1368 continue 1369 if common_factor: 1370 res = (res + "%s" + AMP_format) % \ 1371 (self.coeff(coefficient[0], 1372 coefficient[1] / abs(coefficient[1]), 1373 coefficient[2], 1374 coefficient[3]), 1375 str(amp_number)) 1376 else: 1377 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1378 coefficient[1], 1379 coefficient[2], 1380 coefficient[3]), 1381 str(amp_number)) 1382 1383 if common_factor: 1384 res = res + ')' 1385 1386 res_list.append(res) 1387 1388 return res_list
1389
1390 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1391 """Generate the PDF lines for the auto_dsig.f file""" 1392 1393 processes = matrix_element.get('processes') 1394 model = processes[0].get('model') 1395 1396 pdf_definition_lines = "" 1397 pdf_data_lines = "" 1398 pdf_lines = "" 1399 1400 if ninitial == 1: 1401 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1402 for i, proc in enumerate(processes): 1403 process_line = proc.base_string() 1404 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1405 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1406 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1407 else: 1408 # Pick out all initial state particles for the two beams 1409 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1410 p in processes]))), 1411 sorted(list(set([p.get_initial_pdg(2) for \ 1412 p in processes])))] 1413 1414 # Prepare all variable names 1415 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1416 sum(initial_states,[])]) 1417 for key,val in pdf_codes.items(): 1418 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1419 1420 # Set conversion from PDG code to number used in PDF calls 1421 pdgtopdf = {21: 0, 22: 7} 1422 1423 # Fill in missing entries of pdgtopdf 1424 for pdg in sum(initial_states,[]): 1425 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1426 pdgtopdf[pdg] = pdg 1427 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1428 # If any particle has pdg code 7, we need to use something else 1429 pdgtopdf[pdg] = 6000000 + pdg 1430 1431 # Get PDF variable declarations for all initial states 1432 for i in [0,1]: 1433 pdf_definition_lines += "DOUBLE PRECISION " + \ 1434 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1435 for pdg in \ 1436 initial_states[i]]) + \ 1437 "\n" 1438 1439 # Get PDF data lines for all initial states 1440 for i in [0,1]: 1441 pdf_data_lines += "DATA " + \ 1442 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1443 for pdg in initial_states[i]]) + \ 1444 "/%d*1D0/" % len(initial_states[i]) + \ 1445 "\n" 1446 1447 # Get PDF lines for all different initial states 1448 for i, init_states in enumerate(initial_states): 1449 if subproc_group: 1450 pdf_lines = pdf_lines + \ 1451 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1452 % (i + 1, i + 1) 1453 else: 1454 pdf_lines = pdf_lines + \ 1455 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1456 % (i + 1, i + 1) 1457 1458 for initial_state in init_states: 1459 if initial_state in pdf_codes.keys(): 1460 if subproc_group: 1461 pdf_lines = pdf_lines + \ 1462 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1463 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1464 (pdf_codes[initial_state], 1465 i + 1, i + 1, pdgtopdf[initial_state], 1466 i + 1, i + 1) 1467 else: 1468 pdf_lines = pdf_lines + \ 1469 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1470 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1471 (pdf_codes[initial_state], 1472 i + 1, i + 1, pdgtopdf[initial_state], 1473 i + 1, i + 1) 1474 pdf_lines = pdf_lines + "ENDIF\n" 1475 1476 # Add up PDFs for the different initial state particles 1477 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1478 for proc in processes: 1479 process_line = proc.base_string() 1480 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1481 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1482 for ibeam in [1, 2]: 1483 initial_state = proc.get_initial_pdg(ibeam) 1484 if initial_state in pdf_codes.keys(): 1485 pdf_lines = pdf_lines + "%s%d*" % \ 1486 (pdf_codes[initial_state], ibeam) 1487 else: 1488 pdf_lines = pdf_lines + "1d0*" 1489 # Remove last "*" from pdf_lines 1490 pdf_lines = pdf_lines[:-1] + "\n" 1491 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1492 1493 # Remove last line break from the return variables 1494 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1495 1496 #=========================================================================== 1497 # write_props_file 1498 #===========================================================================
1499 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1500 """Write the props.inc file for MadEvent. Needs input from 1501 write_configs_file.""" 1502 1503 lines = [] 1504 1505 particle_dict = matrix_element.get('processes')[0].get('model').\ 1506 get('particle_dict') 1507 1508 for iconf, configs in enumerate(s_and_t_channels): 1509 for vertex in configs[0] + configs[1][:-1]: 1510 leg = vertex.get('legs')[-1] 1511 if leg.get('id') not in particle_dict: 1512 # Fake propagator used in multiparticle vertices 1513 mass = 'zero' 1514 width = 'zero' 1515 pow_part = 0 1516 else: 1517 particle = particle_dict[leg.get('id')] 1518 # Get mass 1519 if particle.get('mass').lower() == 'zero': 1520 mass = particle.get('mass') 1521 else: 1522 mass = "abs(%s)" % particle.get('mass') 1523 # Get width 1524 if particle.get('width').lower() == 'zero': 1525 width = particle.get('width') 1526 else: 1527 width = "abs(%s)" % particle.get('width') 1528 1529 pow_part = 1 + int(particle.is_boson()) 1530 1531 lines.append("prmass(%d,%d) = %s" % \ 1532 (leg.get('number'), iconf + 1, mass)) 1533 lines.append("prwidth(%d,%d) = %s" % \ 1534 (leg.get('number'), iconf + 1, width)) 1535 lines.append("pow(%d,%d) = %d" % \ 1536 (leg.get('number'), iconf + 1, pow_part)) 1537 1538 # Write the file 1539 writer.writelines(lines) 1540 1541 return True
1542 1543 #=========================================================================== 1544 # write_configs_file 1545 #===========================================================================
1546 - def write_configs_file(self, writer, matrix_element):
1547 """Write the configs.inc file for MadEvent""" 1548 1549 # Extract number of external particles 1550 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1551 1552 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1553 mapconfigs = [c[0] for c in configs] 1554 model = matrix_element.get('processes')[0].get('model') 1555 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1556 [[c[1]] for c in configs], 1557 mapconfigs, 1558 nexternal, ninitial, 1559 model)
1560 1561 #=========================================================================== 1562 # write_configs_file_from_diagrams 1563 #===========================================================================
1564 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1565 nexternal, ninitial, model):
1566 """Write the actual configs.inc file. 1567 1568 configs is the diagrams corresponding to configs (each 1569 diagrams is a list of corresponding diagrams for all 1570 subprocesses, with None if there is no corresponding diagrams 1571 for a given process). 1572 mapconfigs gives the diagram number for each config. 1573 1574 For s-channels, we need to output one PDG for each subprocess in 1575 the subprocess group, in order to be able to pick the right 1576 one for multiprocesses.""" 1577 1578 lines = [] 1579 1580 s_and_t_channels = [] 1581 1582 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1583 for config in configs if [d for d in config if d][0].\ 1584 get_vertex_leg_numbers()!=[]] 1585 minvert = min(vert_list) if vert_list!=[] else 0 1586 1587 # Number of subprocesses 1588 nsubprocs = len(configs[0]) 1589 1590 nconfigs = 0 1591 1592 new_pdg = model.get_first_non_pdg() 1593 1594 for iconfig, helas_diags in enumerate(configs): 1595 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1596 [0].get_vertex_leg_numbers()) : 1597 # Only 3-vertices allowed in configs.inc except for vertices 1598 # which originate from a shrunk loop. 1599 continue 1600 nconfigs += 1 1601 1602 # Need s- and t-channels for all subprocesses, including 1603 # those that don't contribute to this config 1604 empty_verts = [] 1605 stchannels = [] 1606 for h in helas_diags: 1607 if h: 1608 # get_s_and_t_channels gives vertices starting from 1609 # final state external particles and working inwards 1610 stchannels.append(h.get('amplitudes')[0].\ 1611 get_s_and_t_channels(ninitial, model, new_pdg)) 1612 else: 1613 stchannels.append((empty_verts, None)) 1614 1615 # For t-channels, just need the first non-empty one 1616 tchannels = [t for s,t in stchannels if t != None][0] 1617 1618 # For s_and_t_channels (to be used later) use only first config 1619 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1620 tchannels]) 1621 1622 # Make sure empty_verts is same length as real vertices 1623 if any([s for s,t in stchannels]): 1624 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1625 1626 # Reorganize s-channel vertices to get a list of all 1627 # subprocesses for each vertex 1628 schannels = zip(*[s for s,t in stchannels]) 1629 else: 1630 schannels = [] 1631 1632 allchannels = schannels 1633 if len(tchannels) > 1: 1634 # Write out tchannels only if there are any non-trivial ones 1635 allchannels = schannels + tchannels 1636 1637 # Write out propagators for s-channel and t-channel vertices 1638 1639 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1640 # Correspondance between the config and the diagram = amp2 1641 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1642 mapconfigs[iconfig])) 1643 1644 for verts in allchannels: 1645 if verts in schannels: 1646 vert = [v for v in verts if v][0] 1647 else: 1648 vert = verts 1649 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1650 last_leg = vert.get('legs')[-1] 1651 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1652 (last_leg.get('number'), nconfigs, len(daughters), 1653 ",".join([str(d) for d in daughters]))) 1654 if verts in schannels: 1655 pdgs = [] 1656 for v in verts: 1657 if v: 1658 pdgs.append(v.get('legs')[-1].get('id')) 1659 else: 1660 pdgs.append(0) 1661 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1662 (last_leg.get('number'), nconfigs, nsubprocs, 1663 ",".join([str(d) for d in pdgs]))) 1664 lines.append("data tprid(%d,%d)/0/" % \ 1665 (last_leg.get('number'), nconfigs)) 1666 elif verts in tchannels[:-1]: 1667 lines.append("data tprid(%d,%d)/%d/" % \ 1668 (last_leg.get('number'), nconfigs, 1669 abs(last_leg.get('id')))) 1670 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1671 (last_leg.get('number'), nconfigs, nsubprocs, 1672 ",".join(['0'] * nsubprocs))) 1673 1674 # Write out number of configs 1675 lines.append("# Number of configs") 1676 lines.append("data mapconfig(0)/%d/" % nconfigs) 1677 1678 # Write the file 1679 writer.writelines(lines) 1680 1681 return s_and_t_channels
1682 1683 #=========================================================================== 1684 # Global helper methods 1685 #=========================================================================== 1686
1687 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1688 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1689 1690 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1691 1692 if total_coeff == 1: 1693 if is_imaginary: 1694 return '+imag1*' 1695 else: 1696 return '+' 1697 elif total_coeff == -1: 1698 if is_imaginary: 1699 return '-imag1*' 1700 else: 1701 return '-' 1702 1703 res_str = '%+iD0' % total_coeff.numerator 1704 1705 if total_coeff.denominator != 1: 1706 # Check if total_coeff is an integer 1707 res_str = res_str + '/%iD0' % total_coeff.denominator 1708 1709 if is_imaginary: 1710 res_str = res_str + '*imag1' 1711 1712 return res_str + '*'
1713 1714
1715 - def set_fortran_compiler(self, default_compiler, force=False):
1716 """Set compiler based on what's available on the system""" 1717 1718 # Check for compiler 1719 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1720 f77_compiler = default_compiler['fortran'] 1721 elif misc.which('gfortran'): 1722 f77_compiler = 'gfortran' 1723 elif misc.which('g77'): 1724 f77_compiler = 'g77' 1725 elif misc.which('f77'): 1726 f77_compiler = 'f77' 1727 elif default_compiler['fortran']: 1728 logger.warning('No Fortran Compiler detected! Please install one') 1729 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1730 else: 1731 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1732 logger.info('Use Fortran compiler ' + f77_compiler) 1733 1734 1735 # Check for compiler. 1. set default. 1736 if default_compiler['f2py']: 1737 f2py_compiler = default_compiler['f2py'] 1738 else: 1739 f2py_compiler = '' 1740 # Try to find the correct one. 1741 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1742 f2py_compiler = default_compiler 1743 elif misc.which('f2py'): 1744 f2py_compiler = 'f2py' 1745 elif sys.version_info[1] == 6: 1746 if misc.which('f2py-2.6'): 1747 f2py_compiler = 'f2py-2.6' 1748 elif misc.which('f2py2.6'): 1749 f2py_compiler = 'f2py2.6' 1750 elif sys.version_info[1] == 7: 1751 if misc.which('f2py-2.7'): 1752 f2py_compiler = 'f2py-2.7' 1753 elif misc.which('f2py2.7'): 1754 f2py_compiler = 'f2py2.7' 1755 1756 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1757 1758 1759 self.replace_make_opt_f_compiler(to_replace) 1760 # Replace also for Template but not for cluster 1761 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1762 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1763 1764 return f77_compiler
1765 1766 # an alias for backward compatibility 1767 set_compiler = set_fortran_compiler 1768 1769
1770 - def set_cpp_compiler(self, default_compiler, force=False):
1771 """Set compiler based on what's available on the system""" 1772 1773 # Check for compiler 1774 if default_compiler and misc.which(default_compiler): 1775 compiler = default_compiler 1776 elif misc.which('g++'): 1777 #check if clang version 1778 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1779 stderr=subprocess.PIPE) 1780 out, _ = p.communicate() 1781 if 'clang' in out and misc.which('clang'): 1782 compiler = 'clang' 1783 else: 1784 compiler = 'g++' 1785 elif misc.which('c++'): 1786 compiler = 'c++' 1787 elif misc.which('clang'): 1788 compiler = 'clang' 1789 elif default_compiler: 1790 logger.warning('No c++ Compiler detected! Please install one') 1791 compiler = default_compiler # maybe misc fail so try with it 1792 else: 1793 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1794 logger.info('Use c++ compiler ' + compiler) 1795 self.replace_make_opt_c_compiler(compiler) 1796 # Replace also for Template but not for cluster 1797 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1798 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1799 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1800 1801 return compiler
1802 1803
1804 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1805 """Set FC=compiler in Source/make_opts""" 1806 1807 assert isinstance(compilers, dict) 1808 1809 mod = False #avoid to rewrite the file if not needed 1810 if not root_dir: 1811 root_dir = self.dir_path 1812 1813 compiler= compilers['fortran'] 1814 f2py_compiler = compilers['f2py'] 1815 if not f2py_compiler: 1816 f2py_compiler = 'f2py' 1817 for_update= {'DEFAULT_F_COMPILER':compiler, 1818 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1819 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1820 1821 try: 1822 common_run_interface.CommonRunCmd.update_make_opts_full( 1823 make_opts, for_update) 1824 except IOError: 1825 if root_dir == self.dir_path: 1826 logger.info('Fail to set compiler. Trying to continue anyway.')
1827
1828 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1829 """Set CXX=compiler in Source/make_opts. 1830 The version is also checked, in order to set some extra flags 1831 if the compiler is clang (on MACOS)""" 1832 1833 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1834 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1835 1836 # list of the variable to set in the make_opts file 1837 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1838 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1839 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1840 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1841 } 1842 1843 if not root_dir: 1844 root_dir = self.dir_path 1845 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1846 1847 try: 1848 common_run_interface.CommonRunCmd.update_make_opts_full( 1849 make_opts, for_update) 1850 except IOError: 1851 if root_dir == self.dir_path: 1852 logger.info('Fail to set compiler. Trying to continue anyway.') 1853 1854 return
1855
1856 #=============================================================================== 1857 # ProcessExporterFortranSA 1858 #=============================================================================== 1859 -class ProcessExporterFortranSA(ProcessExporterFortran):
1860 """Class to take care of exporting a set of matrix elements to 1861 MadGraph v4 StandAlone format.""" 1862 1863 matrix_template = "matrix_standalone_v4.inc" 1864
1865 - def __init__(self, *args,**opts):
1866 """add the format information compare to standard init""" 1867 1868 if 'format' in opts: 1869 self.format = opts['format'] 1870 del opts['format'] 1871 else: 1872 self.format = 'standalone' 1873 1874 self.prefix_info = {} 1875 ProcessExporterFortran.__init__(self, *args, **opts)
1876
1877 - def copy_template(self, model):
1878 """Additional actions needed for setup of Template 1879 """ 1880 1881 #First copy the full template tree if dir_path doesn't exit 1882 if os.path.isdir(self.dir_path): 1883 return 1884 1885 logger.info('initialize a new standalone directory: %s' % \ 1886 os.path.basename(self.dir_path)) 1887 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1888 1889 # Create the directory structure 1890 os.mkdir(self.dir_path) 1891 os.mkdir(pjoin(self.dir_path, 'Source')) 1892 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1893 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1894 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1895 os.mkdir(pjoin(self.dir_path, 'bin')) 1896 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1897 os.mkdir(pjoin(self.dir_path, 'lib')) 1898 os.mkdir(pjoin(self.dir_path, 'Cards')) 1899 1900 # Information at top-level 1901 #Write version info 1902 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1903 try: 1904 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1905 except IOError: 1906 MG5_version = misc.get_pkg_info() 1907 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1908 "5." + MG5_version['version']) 1909 1910 1911 # Add file in SubProcesses 1912 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1913 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1914 1915 if self.format == 'standalone': 1916 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1917 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1918 1919 # Add file in Source 1920 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1921 pjoin(self.dir_path, 'Source')) 1922 # add the makefile 1923 filename = pjoin(self.dir_path,'Source','makefile') 1924 self.write_source_makefile(writers.FileWriter(filename))
1925 1926 #=========================================================================== 1927 # export model files 1928 #===========================================================================
1929 - def export_model_files(self, model_path):
1930 """export the model dependent files for V4 model""" 1931 1932 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1933 # Add the routine update_as_param in v4 model 1934 # This is a function created in the UFO 1935 text=""" 1936 subroutine update_as_param() 1937 call setpara('param_card.dat',.false.) 1938 return 1939 end 1940 """ 1941 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1942 ff.write(text) 1943 ff.close() 1944 1945 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1946 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1947 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1948 fsock.write(text) 1949 fsock.close() 1950 1951 self.make_model_symbolic_link()
1952 1953 #=========================================================================== 1954 # Make the Helas and Model directories for Standalone directory 1955 #===========================================================================
1956 - def make(self):
1957 """Run make in the DHELAS and MODEL directories, to set up 1958 everything for running standalone 1959 """ 1960 1961 source_dir = pjoin(self.dir_path, "Source") 1962 logger.info("Running make for Helas") 1963 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1964 logger.info("Running make for Model") 1965 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1966 1967 #=========================================================================== 1968 # Create proc_card_mg5.dat for Standalone directory 1969 #===========================================================================
1970 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1971 """Finalize Standalone MG4 directory by 1972 generation proc_card_mg5.dat 1973 generate a global makefile 1974 """ 1975 1976 compiler = {'fortran': mg5options['fortran_compiler'], 1977 'cpp': mg5options['cpp_compiler'], 1978 'f2py': mg5options['f2py_compiler']} 1979 1980 self.compiler_choice(compiler) 1981 self.make() 1982 1983 # Write command history as proc_card_mg5 1984 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1985 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1986 history.write(output_file) 1987 1988 ProcessExporterFortran.finalize(self, matrix_elements, 1989 history, mg5options, flaglist) 1990 open(pjoin(self.dir_path,'__init__.py'),'w') 1991 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1992 1993 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1994 #add the module to hande the NLO weight 1995 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1996 pjoin(self.dir_path, 'Source')) 1997 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1998 pjoin(self.dir_path, 'Source', 'PDF')) 1999 self.write_pdf_opendata() 2000 2001 if self.prefix_info: 2002 self.write_f2py_splitter() 2003 self.write_f2py_makefile() 2004 self.write_f2py_check_sa(matrix_elements, 2005 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2006 else: 2007 # create a single makefile to compile all the subprocesses 2008 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2009 deppython = '' 2010 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2011 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2012 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2013 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2014 text+='all: %s\n\techo \'done\'' % deppython 2015 2016 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2017 ff.write(text) 2018 ff.close()
2019
2020 - def write_f2py_splitter(self):
2021 """write a function to call the correct matrix element""" 2022 2023 template = """ 2024 %(python_information)s 2025 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2026 IMPLICIT NONE 2027 2028 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2029 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2030 CF2PY integer, intent(in) :: npdg 2031 CF2PY double precision, intent(out) :: ANS 2032 CF2PY double precision, intent(in) :: ALPHAS 2033 CF2PY double precision, intent(in) :: SCALE2 2034 integer pdgs(*) 2035 integer npdg, nhel 2036 double precision p(*) 2037 double precision ANS, ALPHAS, PI,SCALE2 2038 include 'coupl.inc' 2039 2040 PI = 3.141592653589793D0 2041 G = 2* DSQRT(ALPHAS*PI) 2042 CALL UPDATE_AS_PARAM() 2043 if (scale2.ne.0d0) stop 1 2044 2045 %(smatrixhel)s 2046 2047 return 2048 end 2049 2050 SUBROUTINE INITIALISE(PATH) 2051 C ROUTINE FOR F2PY to read the benchmark point. 2052 IMPLICIT NONE 2053 CHARACTER*512 PATH 2054 CF2PY INTENT(IN) :: PATH 2055 CALL SETPARA(PATH) !first call to setup the paramaters 2056 RETURN 2057 END 2058 2059 subroutine get_pdg_order(PDG) 2060 IMPLICIT NONE 2061 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2062 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2063 DATA PDGS/ %(pdgs)s / 2064 PDG = PDGS 2065 RETURN 2066 END 2067 2068 subroutine get_prefix(PREFIX) 2069 IMPLICIT NONE 2070 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2071 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2072 DATA PREF / '%(prefix)s'/ 2073 PREFIX = PREF 2074 RETURN 2075 END 2076 2077 2078 """ 2079 2080 allids = self.prefix_info.keys() 2081 allprefix = [self.prefix_info[key][0] for key in allids] 2082 min_nexternal = min([len(ids) for ids in allids]) 2083 max_nexternal = max([len(ids) for ids in allids]) 2084 2085 info = [] 2086 for key, (prefix, tag) in self.prefix_info.items(): 2087 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2088 2089 2090 text = [] 2091 for n_ext in range(min_nexternal, max_nexternal+1): 2092 current = [ids for ids in allids if len(ids)==n_ext] 2093 if not current: 2094 continue 2095 if min_nexternal != max_nexternal: 2096 if n_ext == min_nexternal: 2097 text.append(' if (npdg.eq.%i)then' % n_ext) 2098 else: 2099 text.append(' else if (npdg.eq.%i)then' % n_ext) 2100 for ii,pdgs in enumerate(current): 2101 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2102 if ii==0: 2103 text.append( ' if(%s) then ! %i' % (condition, i)) 2104 else: 2105 text.append( ' else if(%s) then ! %i' % (condition,i)) 2106 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2107 text.append(' endif') 2108 #close the function 2109 if min_nexternal != max_nexternal: 2110 text.append('endif') 2111 2112 formatting = {'python_information':'\n'.join(info), 2113 'smatrixhel': '\n'.join(text), 2114 'maxpart': max_nexternal, 2115 'nb_me': len(allids), 2116 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2117 for i in range(max_nexternal) for pdg in allids), 2118 'prefix':'\',\''.join(allprefix) 2119 } 2120 formatting['lenprefix'] = len(formatting['prefix']) 2121 text = template % formatting 2122 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2123 fsock.writelines(text) 2124 fsock.close()
2125
2126 - def write_f2py_check_sa(self, matrix_element, writer):
2127 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2128 # To be implemented. It is just an example file, i.e. not crucial. 2129 return
2130
2131 - def write_f2py_makefile(self):
2132 """ """ 2133 # Add file in SubProcesses 2134 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2135 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2136
2137 - def create_MA5_cards(self,*args,**opts):
2138 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2139 pass
2140
2141 - def compiler_choice(self, compiler):
2142 """ Different daughter classes might want different compilers. 2143 So this function is meant to be overloaded if desired.""" 2144 2145 self.set_compiler(compiler)
2146 2147 #=========================================================================== 2148 # generate_subprocess_directory 2149 #===========================================================================
2150 - def generate_subprocess_directory(self, matrix_element, 2151 fortran_model, number):
2152 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2153 including the necessary matrix.f and nexternal.inc files""" 2154 2155 cwd = os.getcwd() 2156 # Create the directory PN_xx_xxxxx in the specified path 2157 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2158 "P%s" % matrix_element.get('processes')[0].shell_string()) 2159 2160 if self.opt['sa_symmetry']: 2161 # avoid symmetric output 2162 for i,proc in enumerate(matrix_element.get('processes')): 2163 2164 tag = proc.get_tag() 2165 legs = proc.get('legs')[:] 2166 leg0 = proc.get('legs')[0] 2167 leg1 = proc.get('legs')[1] 2168 if not leg1.get('state'): 2169 proc.get('legs')[0] = leg1 2170 proc.get('legs')[1] = leg0 2171 flegs = proc.get('legs')[2:] 2172 for perm in itertools.permutations(flegs): 2173 for i,p in enumerate(perm): 2174 proc.get('legs')[i+2] = p 2175 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2176 "P%s" % proc.shell_string()) 2177 #restore original order 2178 proc.get('legs')[2:] = legs[2:] 2179 if os.path.exists(dirpath2): 2180 proc.get('legs')[:] = legs 2181 return 0 2182 proc.get('legs')[:] = legs 2183 2184 try: 2185 os.mkdir(dirpath) 2186 except os.error as error: 2187 logger.warning(error.strerror + " " + dirpath) 2188 2189 #try: 2190 # os.chdir(dirpath) 2191 #except os.error: 2192 # logger.error('Could not cd to directory %s' % dirpath) 2193 # return 0 2194 2195 logger.info('Creating files in directory %s' % dirpath) 2196 2197 # Extract number of external particles 2198 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2199 2200 # Create the matrix.f file and the nexternal.inc file 2201 if self.opt['export_format']=='standalone_msP': 2202 filename = pjoin(dirpath, 'matrix_prod.f') 2203 else: 2204 filename = pjoin(dirpath, 'matrix.f') 2205 2206 proc_prefix = '' 2207 if 'prefix' in self.cmd_options: 2208 if self.cmd_options['prefix'] == 'int': 2209 proc_prefix = 'M%s_' % number 2210 elif self.cmd_options['prefix'] == 'proc': 2211 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2212 else: 2213 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2214 for proc in matrix_element.get('processes'): 2215 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2216 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2217 2218 calls = self.write_matrix_element_v4( 2219 writers.FortranWriter(filename), 2220 matrix_element, 2221 fortran_model, 2222 proc_prefix=proc_prefix) 2223 2224 if self.opt['export_format'] == 'standalone_msP': 2225 filename = pjoin(dirpath,'configs_production.inc') 2226 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2227 writers.FortranWriter(filename), 2228 matrix_element) 2229 2230 filename = pjoin(dirpath,'props_production.inc') 2231 self.write_props_file(writers.FortranWriter(filename), 2232 matrix_element, 2233 s_and_t_channels) 2234 2235 filename = pjoin(dirpath,'nexternal_prod.inc') 2236 self.write_nexternal_madspin(writers.FortranWriter(filename), 2237 nexternal, ninitial) 2238 2239 if self.opt['export_format']=='standalone_msF': 2240 filename = pjoin(dirpath, 'helamp.inc') 2241 ncomb=matrix_element.get_helicity_combinations() 2242 self.write_helamp_madspin(writers.FortranWriter(filename), 2243 ncomb) 2244 2245 filename = pjoin(dirpath, 'nexternal.inc') 2246 self.write_nexternal_file(writers.FortranWriter(filename), 2247 nexternal, ninitial) 2248 2249 filename = pjoin(dirpath, 'pmass.inc') 2250 self.write_pmass_file(writers.FortranWriter(filename), 2251 matrix_element) 2252 2253 filename = pjoin(dirpath, 'ngraphs.inc') 2254 self.write_ngraphs_file(writers.FortranWriter(filename), 2255 len(matrix_element.get_all_amplitudes())) 2256 2257 # Generate diagrams 2258 filename = pjoin(dirpath, "matrix.ps") 2259 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2260 get('diagrams'), 2261 filename, 2262 model=matrix_element.get('processes')[0].\ 2263 get('model'), 2264 amplitude=True) 2265 logger.info("Generating Feynman diagrams for " + \ 2266 matrix_element.get('processes')[0].nice_string()) 2267 plot.draw() 2268 2269 linkfiles = ['check_sa.f', 'coupl.inc'] 2270 2271 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2272 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2273 new_text, n = re.subn('smatrix', '%ssmatrix' % proc_prefix, text, flags=re.I) 2274 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2275 f.write(new_text) 2276 linkfiles.pop(0) 2277 2278 for file in linkfiles: 2279 ln('../%s' % file, cwd=dirpath) 2280 ln('../makefileP', name='makefile', cwd=dirpath) 2281 # Return to original PWD 2282 #os.chdir(cwd) 2283 2284 if not calls: 2285 calls = 0 2286 return calls
2287 2288 2289 #=========================================================================== 2290 # write_source_makefile 2291 #===========================================================================
2292 - def write_source_makefile(self, writer):
2293 """Write the nexternal.inc file for MG4""" 2294 2295 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2296 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2297 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2298 2299 replace_dict= {'libraries': set_of_lib, 2300 'model':model_line, 2301 'additional_dsample': '', 2302 'additional_dependencies':''} 2303 2304 text = open(path).read() % replace_dict 2305 2306 if writer: 2307 writer.write(text) 2308 2309 return replace_dict
2310 2311 #=========================================================================== 2312 # write_matrix_element_v4 2313 #===========================================================================
2314 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2315 write=True, proc_prefix=''):
2316 """Export a matrix element to a matrix.f file in MG4 standalone format 2317 if write is on False, just return the replace_dict and not write anything.""" 2318 2319 2320 if not matrix_element.get('processes') or \ 2321 not matrix_element.get('diagrams'): 2322 return 0 2323 2324 if writer: 2325 if not isinstance(writer, writers.FortranWriter): 2326 raise writers.FortranWriter.FortranWriterError(\ 2327 "writer not FortranWriter but %s" % type(writer)) 2328 # Set lowercase/uppercase Fortran code 2329 writers.FortranWriter.downcase = False 2330 2331 2332 if not self.opt.has_key('sa_symmetry'): 2333 self.opt['sa_symmetry']=False 2334 2335 2336 # The proc_id is for MadEvent grouping which is never used in SA. 2337 replace_dict = {'global_variable':'', 'amp2_lines':'', 2338 'proc_prefix':proc_prefix, 'proc_id':''} 2339 2340 # Extract helas calls 2341 helas_calls = fortran_model.get_matrix_element_calls(\ 2342 matrix_element) 2343 2344 replace_dict['helas_calls'] = "\n".join(helas_calls) 2345 2346 # Extract version number and date from VERSION file 2347 info_lines = self.get_mg5_info_lines() 2348 replace_dict['info_lines'] = info_lines 2349 2350 # Extract process info lines 2351 process_lines = self.get_process_info_lines(matrix_element) 2352 replace_dict['process_lines'] = process_lines 2353 2354 # Extract number of external particles 2355 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2356 replace_dict['nexternal'] = nexternal 2357 replace_dict['nincoming'] = ninitial 2358 2359 # Extract ncomb 2360 ncomb = matrix_element.get_helicity_combinations() 2361 replace_dict['ncomb'] = ncomb 2362 2363 # Extract helicity lines 2364 helicity_lines = self.get_helicity_lines(matrix_element) 2365 replace_dict['helicity_lines'] = helicity_lines 2366 2367 # Extract overall denominator 2368 # Averaging initial state color, spin, and identical FS particles 2369 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2370 2371 # Extract ngraphs 2372 ngraphs = matrix_element.get_number_of_amplitudes() 2373 replace_dict['ngraphs'] = ngraphs 2374 2375 # Extract nwavefuncs 2376 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2377 replace_dict['nwavefuncs'] = nwavefuncs 2378 2379 # Extract ncolor 2380 ncolor = max(1, len(matrix_element.get('color_basis'))) 2381 replace_dict['ncolor'] = ncolor 2382 2383 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2384 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2385 matrix_element.get_beams_hel_avg_factor() 2386 2387 # Extract color data lines 2388 color_data_lines = self.get_color_data_lines(matrix_element) 2389 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2390 2391 if self.opt['export_format']=='standalone_msP': 2392 # For MadSpin need to return the AMP2 2393 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2394 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2395 replace_dict['global_variable'] = \ 2396 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2397 2398 # JAMP definition, depends on the number of independent split orders 2399 split_orders=matrix_element.get('processes')[0].get('split_orders') 2400 2401 if len(split_orders)==0: 2402 replace_dict['nSplitOrders']='' 2403 # Extract JAMP lines 2404 jamp_lines = self.get_JAMP_lines(matrix_element) 2405 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2406 # set all amplitude order to weight 1 and only one squared order 2407 # contribution which is of course ALL_ORDERS=2. 2408 squared_orders = [(2,),] 2409 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2410 replace_dict['chosen_so_configs'] = '.TRUE.' 2411 replace_dict['nSqAmpSplitOrders']=1 2412 replace_dict['split_order_str_list']='' 2413 else: 2414 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2415 replace_dict['nAmpSplitOrders']=len(amp_orders) 2416 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2417 replace_dict['nSplitOrders']=len(split_orders) 2418 replace_dict['split_order_str_list']=str(split_orders) 2419 amp_so = self.get_split_orders_lines( 2420 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2421 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2422 replace_dict['ampsplitorders']='\n'.join(amp_so) 2423 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2424 jamp_lines = self.get_JAMP_lines_split_order(\ 2425 matrix_element,amp_orders,split_order_names=split_orders) 2426 2427 # Now setup the array specifying what squared split order is chosen 2428 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2429 matrix_element.get('processes')[0],squared_orders) 2430 2431 # For convenience we also write the driver check_sa_splitOrders.f 2432 # that explicitely writes out the contribution from each squared order. 2433 # The original driver still works and is compiled with 'make' while 2434 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2435 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2436 self.write_check_sa_splitOrders(squared_orders,split_orders, 2437 nexternal,ninitial,proc_prefix,check_sa_writer) 2438 2439 if write: 2440 writers.FortranWriter('nsqso_born.inc').writelines( 2441 """INTEGER NSQSO_BORN 2442 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2443 2444 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2445 2446 matrix_template = self.matrix_template 2447 if self.opt['export_format']=='standalone_msP' : 2448 matrix_template = 'matrix_standalone_msP_v4.inc' 2449 elif self.opt['export_format']=='standalone_msF': 2450 matrix_template = 'matrix_standalone_msF_v4.inc' 2451 elif self.opt['export_format']=='matchbox': 2452 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2453 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2454 2455 if len(split_orders)>0: 2456 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2457 logger.debug("Warning: The export format %s is not "+\ 2458 " available for individual ME evaluation of given coupl. orders."+\ 2459 " Only the total ME will be computed.", self.opt['export_format']) 2460 elif self.opt['export_format'] in ['madloop_matchbox']: 2461 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2462 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2463 else: 2464 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2465 2466 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2467 replace_dict['template_file2'] = pjoin(_file_path, \ 2468 'iolibs/template_files/split_orders_helping_functions.inc') 2469 if write and writer: 2470 path = replace_dict['template_file'] 2471 content = open(path).read() 2472 content = content % replace_dict 2473 # Write the file 2474 writer.writelines(content) 2475 # Add the helper functions. 2476 if len(split_orders)>0: 2477 content = '\n' + open(replace_dict['template_file2'])\ 2478 .read()%replace_dict 2479 writer.writelines(content) 2480 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2481 else: 2482 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2483 return replace_dict # for subclass update
2484
2485 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2486 nincoming, proc_prefix, writer):
2487 """ Write out a more advanced version of the check_sa drivers that 2488 individually returns the matrix element for each contributing squared 2489 order.""" 2490 2491 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2492 'template_files', 'check_sa_splitOrders.f')).read() 2493 printout_sq_orders=[] 2494 for i, squared_order in enumerate(squared_orders): 2495 sq_orders=[] 2496 for j, sqo in enumerate(squared_order): 2497 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2498 printout_sq_orders.append(\ 2499 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2500 %(i+1,' '.join(sq_orders),i+1)) 2501 printout_sq_orders='\n'.join(printout_sq_orders) 2502 replace_dict = {'printout_sqorders':printout_sq_orders, 2503 'nSplitOrders':len(squared_orders), 2504 'nexternal':nexternal, 2505 'nincoming':nincoming, 2506 'proc_prefix':proc_prefix} 2507 2508 if writer: 2509 writer.writelines(check_sa_content % replace_dict) 2510 else: 2511 return replace_dict
2512
2513 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2514 """class to take care of exporting a set of matrix element for the Matchbox 2515 code in the case of Born only routine""" 2516 2517 default_opt = {'clean': False, 'complex_mass':False, 2518 'export_format':'matchbox', 'mp': False, 2519 'sa_symmetry': True} 2520 2521 #specific template of the born 2522 2523 2524 matrix_template = "matrix_standalone_matchbox.inc" 2525 2526 @staticmethod
2527 - def get_color_string_lines(matrix_element):
2528 """Return the color matrix definition lines for this matrix element. Split 2529 rows in chunks of size n.""" 2530 2531 if not matrix_element.get('color_matrix'): 2532 return "\n".join(["out = 1"]) 2533 2534 #start the real work 2535 color_denominators = matrix_element.get('color_matrix').\ 2536 get_line_denominators() 2537 matrix_strings = [] 2538 my_cs = color.ColorString() 2539 for i_color in xrange(len(color_denominators)): 2540 # Then write the numerators for the matrix elements 2541 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2542 t_str=repr(my_cs) 2543 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2544 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2545 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2546 all_matches = t_match.findall(t_str) 2547 output = {} 2548 arg=[] 2549 for match in all_matches: 2550 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2551 if ctype in ['ColorOne' ]: 2552 continue 2553 if ctype not in ['T', 'Tr' ]: 2554 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2555 tmparg += ['0'] 2556 arg +=tmparg 2557 for j, v in enumerate(arg): 2558 output[(i_color,j)] = v 2559 2560 for key in output: 2561 if matrix_strings == []: 2562 #first entry 2563 matrix_strings.append(""" 2564 if (in1.eq.%s.and.in2.eq.%s)then 2565 out = %s 2566 """ % (key[0], key[1], output[key])) 2567 else: 2568 #not first entry 2569 matrix_strings.append(""" 2570 elseif (in1.eq.%s.and.in2.eq.%s)then 2571 out = %s 2572 """ % (key[0], key[1], output[key])) 2573 if len(matrix_strings): 2574 matrix_strings.append(" else \n out = - 1 \n endif") 2575 else: 2576 return "\n out = - 1 \n " 2577 return "\n".join(matrix_strings)
2578
2579 - def make(self,*args,**opts):
2580 pass
2581
2582 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2583 JAMP_formatLC=None):
2584 2585 """Adding leading color part of the colorflow""" 2586 2587 if not JAMP_formatLC: 2588 JAMP_formatLC= "LN%s" % JAMP_format 2589 2590 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2591 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2592 col_amps=col_amps.get_color_amplitudes() 2593 elif(isinstance(col_amps,list)): 2594 if(col_amps and isinstance(col_amps[0],list)): 2595 col_amps=col_amps 2596 else: 2597 raise MadGraph5Error, error_msg % 'col_amps' 2598 else: 2599 raise MadGraph5Error, error_msg % 'col_amps' 2600 2601 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2602 JAMP_format=JAMP_format, 2603 AMP_format=AMP_format, 2604 split=-1) 2605 2606 2607 # Filter the col_ampls to generate only those without any 1/NC terms 2608 2609 LC_col_amps = [] 2610 for coeff_list in col_amps: 2611 to_add = [] 2612 for (coefficient, amp_number) in coeff_list: 2613 if coefficient[3]==0: 2614 to_add.append( (coefficient, amp_number) ) 2615 LC_col_amps.append(to_add) 2616 2617 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2618 JAMP_format=JAMP_formatLC, 2619 AMP_format=AMP_format, 2620 split=-1) 2621 2622 return text
2623
2624 2625 2626 2627 #=============================================================================== 2628 # ProcessExporterFortranMW 2629 #=============================================================================== 2630 -class ProcessExporterFortranMW(ProcessExporterFortran):
2631 """Class to take care of exporting a set of matrix elements to 2632 MadGraph v4 - MadWeight format.""" 2633 2634 matrix_file="matrix_standalone_v4.inc" 2635
2636 - def copy_template(self, model):
2637 """Additional actions needed for setup of Template 2638 """ 2639 2640 super(ProcessExporterFortranMW, self).copy_template(model) 2641 2642 # Add the MW specific file 2643 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2644 pjoin(self.dir_path, 'Source','MadWeight'), True) 2645 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2646 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2647 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2648 pjoin(self.dir_path, 'Source','setrun.f')) 2649 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2650 pjoin(self.dir_path, 'Source','run.inc')) 2651 # File created from Template (Different in some child class) 2652 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2653 self.write_run_config_file(writers.FortranWriter(filename)) 2654 2655 try: 2656 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2657 stdout = os.open(os.devnull, os.O_RDWR), 2658 stderr = os.open(os.devnull, os.O_RDWR), 2659 cwd=self.dir_path) 2660 except OSError: 2661 # Probably madweight already called 2662 pass 2663 2664 # Copy the different python file in the Template 2665 self.copy_python_file() 2666 # create the appropriate cuts.f 2667 self.get_mw_cuts_version() 2668 2669 # add the makefile in Source directory 2670 filename = os.path.join(self.dir_path,'Source','makefile') 2671 self.write_source_makefile(writers.FortranWriter(filename))
2672 2673 2674 2675 2676 #=========================================================================== 2677 # convert_model 2678 #===========================================================================
2679 - def convert_model(self, model, wanted_lorentz = [], 2680 wanted_couplings = []):
2681 2682 super(ProcessExporterFortranMW,self).convert_model(model, 2683 wanted_lorentz, wanted_couplings) 2684 2685 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2686 try: 2687 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2688 except OSError as error: 2689 pass 2690 model_path = model.get('modelpath') 2691 # This is not safe if there is a '##' or '-' in the path. 2692 shutil.copytree(model_path, 2693 pjoin(self.dir_path,'bin','internal','ufomodel'), 2694 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2695 if hasattr(model, 'restrict_card'): 2696 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2697 'restrict_default.dat') 2698 if isinstance(model.restrict_card, check_param_card.ParamCard): 2699 model.restrict_card.write(out_path) 2700 else: 2701 files.cp(model.restrict_card, out_path)
2702 2703 #=========================================================================== 2704 # generate_subprocess_directory 2705 #===========================================================================
2706 - def copy_python_file(self):
2707 """copy the python file require for the Template""" 2708 2709 # madevent interface 2710 cp(_file_path+'/interface/madweight_interface.py', 2711 self.dir_path+'/bin/internal/madweight_interface.py') 2712 cp(_file_path+'/interface/extended_cmd.py', 2713 self.dir_path+'/bin/internal/extended_cmd.py') 2714 cp(_file_path+'/interface/common_run_interface.py', 2715 self.dir_path+'/bin/internal/common_run_interface.py') 2716 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2717 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2718 cp(_file_path+'/iolibs/save_load_object.py', 2719 self.dir_path+'/bin/internal/save_load_object.py') 2720 cp(_file_path+'/madevent/gen_crossxhtml.py', 2721 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2722 cp(_file_path+'/various/FO_analyse_card.py', 2723 self.dir_path+'/bin/internal/FO_analyse_card.py') 2724 cp(_file_path+'/iolibs/file_writers.py', 2725 self.dir_path+'/bin/internal/file_writers.py') 2726 #model file 2727 cp(_file_path+'../models/check_param_card.py', 2728 self.dir_path+'/bin/internal/check_param_card.py') 2729 2730 #madevent file 2731 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2732 cp(_file_path+'/various/lhe_parser.py', 2733 self.dir_path+'/bin/internal/lhe_parser.py') 2734 2735 cp(_file_path+'/various/banner.py', 2736 self.dir_path+'/bin/internal/banner.py') 2737 cp(_file_path+'/various/shower_card.py', 2738 self.dir_path+'/bin/internal/shower_card.py') 2739 cp(_file_path+'/various/cluster.py', 2740 self.dir_path+'/bin/internal/cluster.py') 2741 2742 # logging configuration 2743 cp(_file_path+'/interface/.mg5_logging.conf', 2744 self.dir_path+'/bin/internal/me5_logging.conf') 2745 cp(_file_path+'/interface/coloring_logging.py', 2746 self.dir_path+'/bin/internal/coloring_logging.py')
2747 2748 2749 #=========================================================================== 2750 # Change the version of cuts.f to the one compatible with MW 2751 #===========================================================================
2752 - def get_mw_cuts_version(self, outpath=None):
2753 """create the appropriate cuts.f 2754 This is based on the one associated to ME output but: 2755 1) No clustering (=> remove initcluster/setclscales) 2756 2) Adding the definition of cut_bw at the file. 2757 """ 2758 2759 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2760 2761 text = StringIO() 2762 #1) remove all dependencies in ickkw >1: 2763 nb_if = 0 2764 for line in template: 2765 if 'if(xqcut.gt.0d0' in line: 2766 nb_if = 1 2767 if nb_if == 0: 2768 text.write(line) 2769 continue 2770 if re.search(r'if\(.*\)\s*then', line): 2771 nb_if += 1 2772 elif 'endif' in line: 2773 nb_if -= 1 2774 2775 #2) add fake cut_bw (have to put the true one later) 2776 text.write(""" 2777 logical function cut_bw(p) 2778 include 'madweight_param.inc' 2779 double precision p(*) 2780 if (bw_cut) then 2781 cut_bw = .true. 2782 else 2783 stop 1 2784 endif 2785 return 2786 end 2787 """) 2788 2789 final = text.getvalue() 2790 #3) remove the call to initcluster: 2791 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2792 template = template.replace('genps.inc', 'maxparticles.inc') 2793 #Now we can write it 2794 if not outpath: 2795 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2796 elif isinstance(outpath, str): 2797 fsock = open(outpath, 'w') 2798 else: 2799 fsock = outpath 2800 fsock.write(template)
2801 2802 2803 2804 #=========================================================================== 2805 # Make the Helas and Model directories for Standalone directory 2806 #===========================================================================
2807 - def make(self):
2808 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2809 everything for running madweight 2810 """ 2811 2812 source_dir = os.path.join(self.dir_path, "Source") 2813 logger.info("Running make for Helas") 2814 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2815 logger.info("Running make for Model") 2816 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2817 logger.info("Running make for PDF") 2818 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2819 logger.info("Running make for CERNLIB") 2820 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2821 logger.info("Running make for GENERIC") 2822 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2823 logger.info("Running make for blocks") 2824 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2825 logger.info("Running make for tools") 2826 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2827 2828 #=========================================================================== 2829 # Create proc_card_mg5.dat for MadWeight directory 2830 #===========================================================================
2831 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2832 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2833 2834 compiler = {'fortran': mg5options['fortran_compiler'], 2835 'cpp': mg5options['cpp_compiler'], 2836 'f2py': mg5options['f2py_compiler']} 2837 2838 2839 2840 #proc_charac 2841 self.create_proc_charac() 2842 2843 # Write maxparticles.inc based on max of ME's/subprocess groups 2844 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2845 self.write_maxparticles_file(writers.FortranWriter(filename), 2846 matrix_elements) 2847 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2848 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2849 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2850 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2851 2852 self.set_compiler(compiler) 2853 self.make() 2854 2855 # Write command history as proc_card_mg5 2856 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2857 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2858 history.write(output_file) 2859 2860 ProcessExporterFortran.finalize(self, matrix_elements, 2861 history, mg5options, flaglist)
2862 2863 2864 2865 #=========================================================================== 2866 # create the run_card for MW 2867 #===========================================================================
2868 - def create_run_card(self, matrix_elements, history):
2869 """ """ 2870 2871 run_card = banner_mod.RunCard() 2872 2873 # pass to default for MW 2874 run_card["run_tag"] = "\'not_use\'" 2875 run_card["fixed_ren_scale"] = "T" 2876 run_card["fixed_fac_scale"] = "T" 2877 run_card.remove_all_cut() 2878 2879 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2880 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2881 python_template=True) 2882 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2883 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2884 python_template=True)
2885 2886 #=========================================================================== 2887 # export model files 2888 #===========================================================================
2889 - def export_model_files(self, model_path):
2890 """export the model dependent files for V4 model""" 2891 2892 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2893 # Add the routine update_as_param in v4 model 2894 # This is a function created in the UFO 2895 text=""" 2896 subroutine update_as_param() 2897 call setpara('param_card.dat',.false.) 2898 return 2899 end 2900 """ 2901 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2902 ff.write(text) 2903 ff.close() 2904 2905 # Modify setrun.f 2906 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2907 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2908 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2909 fsock.write(text) 2910 fsock.close() 2911 2912 # Modify initialization.f 2913 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2914 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2915 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2916 fsock.write(text) 2917 fsock.close() 2918 2919 2920 self.make_model_symbolic_link()
2921 2922 #=========================================================================== 2923 # generate_subprocess_directory 2924 #===========================================================================
2925 - def generate_subprocess_directory(self, matrix_element, 2926 fortran_model,number):
2927 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2928 including the necessary matrix.f and nexternal.inc files""" 2929 2930 cwd = os.getcwd() 2931 misc.sprint(type(matrix_element)) 2932 # Create the directory PN_xx_xxxxx in the specified path 2933 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2934 "P%s" % matrix_element.get('processes')[0].shell_string()) 2935 2936 try: 2937 os.mkdir(dirpath) 2938 except os.error as error: 2939 logger.warning(error.strerror + " " + dirpath) 2940 2941 #try: 2942 # os.chdir(dirpath) 2943 #except os.error: 2944 # logger.error('Could not cd to directory %s' % dirpath) 2945 # return 0 2946 2947 logger.info('Creating files in directory %s' % dirpath) 2948 2949 # Extract number of external particles 2950 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2951 2952 # Create the matrix.f file and the nexternal.inc file 2953 filename = pjoin(dirpath,'matrix.f') 2954 calls,ncolor = self.write_matrix_element_v4( 2955 writers.FortranWriter(filename), 2956 matrix_element, 2957 fortran_model) 2958 2959 filename = pjoin(dirpath, 'auto_dsig.f') 2960 self.write_auto_dsig_file(writers.FortranWriter(filename), 2961 matrix_element) 2962 2963 filename = pjoin(dirpath, 'configs.inc') 2964 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2965 writers.FortranWriter(filename), 2966 matrix_element) 2967 2968 filename = pjoin(dirpath, 'nexternal.inc') 2969 self.write_nexternal_file(writers.FortranWriter(filename), 2970 nexternal, ninitial) 2971 2972 filename = pjoin(dirpath, 'leshouche.inc') 2973 self.write_leshouche_file(writers.FortranWriter(filename), 2974 matrix_element) 2975 2976 filename = pjoin(dirpath, 'props.inc') 2977 self.write_props_file(writers.FortranWriter(filename), 2978 matrix_element, 2979 s_and_t_channels) 2980 2981 filename = pjoin(dirpath, 'pmass.inc') 2982 self.write_pmass_file(writers.FortranWriter(filename), 2983 matrix_element) 2984 2985 filename = pjoin(dirpath, 'ngraphs.inc') 2986 self.write_ngraphs_file(writers.FortranWriter(filename), 2987 len(matrix_element.get_all_amplitudes())) 2988 2989 filename = pjoin(dirpath, 'maxamps.inc') 2990 self.write_maxamps_file(writers.FortranWriter(filename), 2991 len(matrix_element.get('diagrams')), 2992 ncolor, 2993 len(matrix_element.get('processes')), 2994 1) 2995 2996 filename = pjoin(dirpath, 'phasespace.inc') 2997 self.write_phasespace_file(writers.FortranWriter(filename), 2998 len(matrix_element.get('diagrams')), 2999 ) 3000 3001 # Generate diagrams 3002 filename = pjoin(dirpath, "matrix.ps") 3003 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3004 get('diagrams'), 3005 filename, 3006 model=matrix_element.get('processes')[0].\ 3007 get('model'), 3008 amplitude='') 3009 logger.info("Generating Feynman diagrams for " + \ 3010 matrix_element.get('processes')[0].nice_string()) 3011 plot.draw() 3012 3013 #import genps.inc and maxconfigs.inc into Subprocesses 3014 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3015 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3016 3017 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3018 3019 for file in linkfiles: 3020 ln('../%s' % file, starting_dir=cwd) 3021 3022 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3023 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3024 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3025 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3026 # Return to original PWD 3027 #os.chdir(cwd) 3028 3029 if not calls: 3030 calls = 0 3031 return calls
3032 3033 #=========================================================================== 3034 # write_matrix_element_v4 3035 #===========================================================================
3036 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3037 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3038 3039 if not matrix_element.get('processes') or \ 3040 not matrix_element.get('diagrams'): 3041 return 0 3042 3043 if writer: 3044 if not isinstance(writer, writers.FortranWriter): 3045 raise writers.FortranWriter.FortranWriterError(\ 3046 "writer not FortranWriter") 3047 3048 # Set lowercase/uppercase Fortran code 3049 writers.FortranWriter.downcase = False 3050 3051 replace_dict = {} 3052 3053 # Extract version number and date from VERSION file 3054 info_lines = self.get_mg5_info_lines() 3055 replace_dict['info_lines'] = info_lines 3056 3057 # Extract process info lines 3058 process_lines = self.get_process_info_lines(matrix_element) 3059 replace_dict['process_lines'] = process_lines 3060 3061 # Set proc_id 3062 replace_dict['proc_id'] = proc_id 3063 3064 # Extract number of external particles 3065 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3066 replace_dict['nexternal'] = nexternal 3067 3068 # Extract ncomb 3069 ncomb = matrix_element.get_helicity_combinations() 3070 replace_dict['ncomb'] = ncomb 3071 3072 # Extract helicity lines 3073 helicity_lines = self.get_helicity_lines(matrix_element) 3074 replace_dict['helicity_lines'] = helicity_lines 3075 3076 # Extract overall denominator 3077 # Averaging initial state color, spin, and identical FS particles 3078 den_factor_line = self.get_den_factor_line(matrix_element) 3079 replace_dict['den_factor_line'] = den_factor_line 3080 3081 # Extract ngraphs 3082 ngraphs = matrix_element.get_number_of_amplitudes() 3083 replace_dict['ngraphs'] = ngraphs 3084 3085 # Extract nwavefuncs 3086 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3087 replace_dict['nwavefuncs'] = nwavefuncs 3088 3089 # Extract ncolor 3090 ncolor = max(1, len(matrix_element.get('color_basis'))) 3091 replace_dict['ncolor'] = ncolor 3092 3093 # Extract color data lines 3094 color_data_lines = self.get_color_data_lines(matrix_element) 3095 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3096 3097 # Extract helas calls 3098 helas_calls = fortran_model.get_matrix_element_calls(\ 3099 matrix_element) 3100 3101 replace_dict['helas_calls'] = "\n".join(helas_calls) 3102 3103 # Extract JAMP lines 3104 jamp_lines = self.get_JAMP_lines(matrix_element) 3105 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3106 3107 replace_dict['template_file'] = os.path.join(_file_path, \ 3108 'iolibs/template_files/%s' % self.matrix_file) 3109 replace_dict['template_file2'] = '' 3110 3111 if writer: 3112 file = open(replace_dict['template_file']).read() 3113 file = file % replace_dict 3114 # Write the file 3115 writer.writelines(file) 3116 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3117 else: 3118 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3119 3120 #=========================================================================== 3121 # write_source_makefile 3122 #===========================================================================
3123 - def write_source_makefile(self, writer):
3124 """Write the nexternal.inc file for madweight""" 3125 3126 3127 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3128 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3129 text = open(path).read() % {'libraries': set_of_lib} 3130 writer.write(text) 3131 3132 return True
3133
3134 - def write_phasespace_file(self, writer, nb_diag):
3135 """ """ 3136 3137 template = """ include 'maxparticles.inc' 3138 integer max_branches 3139 parameter (max_branches=max_particles-1) 3140 integer max_configs 3141 parameter (max_configs=%(nb_diag)s) 3142 3143 c channel position 3144 integer config_pos,perm_pos 3145 common /to_config/config_pos,perm_pos 3146 3147 """ 3148 3149 writer.write(template % {'nb_diag': nb_diag})
3150 3151 3152 #=========================================================================== 3153 # write_auto_dsig_file 3154 #===========================================================================
3155 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3156 """Write the auto_dsig.f file for the differential cross section 3157 calculation, includes pdf call information (MadWeight format)""" 3158 3159 if not matrix_element.get('processes') or \ 3160 not matrix_element.get('diagrams'): 3161 return 0 3162 3163 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3164 3165 if ninitial < 1 or ninitial > 2: 3166 raise writers.FortranWriter.FortranWriterError, \ 3167 """Need ninitial = 1 or 2 to write auto_dsig file""" 3168 3169 replace_dict = {} 3170 3171 # Extract version number and date from VERSION file 3172 info_lines = self.get_mg5_info_lines() 3173 replace_dict['info_lines'] = info_lines 3174 3175 # Extract process info lines 3176 process_lines = self.get_process_info_lines(matrix_element) 3177 replace_dict['process_lines'] = process_lines 3178 3179 # Set proc_id 3180 replace_dict['proc_id'] = proc_id 3181 replace_dict['numproc'] = 1 3182 3183 # Set dsig_line 3184 if ninitial == 1: 3185 # No conversion, since result of decay should be given in GeV 3186 dsig_line = "pd(0)*dsiguu" 3187 else: 3188 # Convert result (in GeV) to pb 3189 dsig_line = "pd(0)*conv*dsiguu" 3190 3191 replace_dict['dsig_line'] = dsig_line 3192 3193 # Extract pdf lines 3194 pdf_vars, pdf_data, pdf_lines = \ 3195 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3196 replace_dict['pdf_vars'] = pdf_vars 3197 replace_dict['pdf_data'] = pdf_data 3198 replace_dict['pdf_lines'] = pdf_lines 3199 3200 # Lines that differ between subprocess group and regular 3201 if proc_id: 3202 replace_dict['numproc'] = int(proc_id) 3203 replace_dict['passcuts_begin'] = "" 3204 replace_dict['passcuts_end'] = "" 3205 # Set lines for subprocess group version 3206 # Set define_iconfigs_lines 3207 replace_dict['define_subdiag_lines'] = \ 3208 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3209 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3210 else: 3211 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3212 replace_dict['passcuts_end'] = "ENDIF" 3213 replace_dict['define_subdiag_lines'] = "" 3214 3215 if writer: 3216 file = open(os.path.join(_file_path, \ 3217 'iolibs/template_files/auto_dsig_mw.inc')).read() 3218 3219 file = file % replace_dict 3220 # Write the file 3221 writer.writelines(file) 3222 else: 3223 return replace_dict
3224 #=========================================================================== 3225 # write_configs_file 3226 #===========================================================================
3227 - def write_configs_file(self, writer, matrix_element):
3228 """Write the configs.inc file for MadEvent""" 3229 3230 # Extract number of external particles 3231 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3232 3233 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3234 mapconfigs = [c[0] for c in configs] 3235 model = matrix_element.get('processes')[0].get('model') 3236 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3237 [[c[1]] for c in configs], 3238 mapconfigs, 3239 nexternal, ninitial,matrix_element, model)
3240 3241 #=========================================================================== 3242 # write_run_configs_file 3243 #===========================================================================
3244 - def write_run_config_file(self, writer):
3245 """Write the run_configs.inc file for MadWeight""" 3246 3247 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3248 text = open(path).read() % {'chanperjob':'5'} 3249 writer.write(text) 3250 return True
3251 3252 #=========================================================================== 3253 # write_configs_file_from_diagrams 3254 #===========================================================================
3255 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3256 nexternal, ninitial, matrix_element, model):
3257 """Write the actual configs.inc file. 3258 3259 configs is the diagrams corresponding to configs (each 3260 diagrams is a list of corresponding diagrams for all 3261 subprocesses, with None if there is no corresponding diagrams 3262 for a given process). 3263 mapconfigs gives the diagram number for each config. 3264 3265 For s-channels, we need to output one PDG for each subprocess in 3266 the subprocess group, in order to be able to pick the right 3267 one for multiprocesses.""" 3268 3269 lines = [] 3270 3271 particle_dict = matrix_element.get('processes')[0].get('model').\ 3272 get('particle_dict') 3273 3274 s_and_t_channels = [] 3275 3276 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3277 for config in configs if [d for d in config if d][0].\ 3278 get_vertex_leg_numbers()!=[]] 3279 3280 minvert = min(vert_list) if vert_list!=[] else 0 3281 # Number of subprocesses 3282 nsubprocs = len(configs[0]) 3283 3284 nconfigs = 0 3285 3286 new_pdg = model.get_first_non_pdg() 3287 3288 for iconfig, helas_diags in enumerate(configs): 3289 if any([vert > minvert for vert in 3290 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3291 # Only 3-vertices allowed in configs.inc 3292 continue 3293 nconfigs += 1 3294 3295 # Need s- and t-channels for all subprocesses, including 3296 # those that don't contribute to this config 3297 empty_verts = [] 3298 stchannels = [] 3299 for h in helas_diags: 3300 if h: 3301 # get_s_and_t_channels gives vertices starting from 3302 # final state external particles and working inwards 3303 stchannels.append(h.get('amplitudes')[0].\ 3304 get_s_and_t_channels(ninitial,model,new_pdg)) 3305 else: 3306 stchannels.append((empty_verts, None)) 3307 3308 # For t-channels, just need the first non-empty one 3309 tchannels = [t for s,t in stchannels if t != None][0] 3310 3311 # For s_and_t_channels (to be used later) use only first config 3312 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3313 tchannels]) 3314 3315 # Make sure empty_verts is same length as real vertices 3316 if any([s for s,t in stchannels]): 3317 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3318 3319 # Reorganize s-channel vertices to get a list of all 3320 # subprocesses for each vertex 3321 schannels = zip(*[s for s,t in stchannels]) 3322 else: 3323 schannels = [] 3324 3325 allchannels = schannels 3326 if len(tchannels) > 1: 3327 # Write out tchannels only if there are any non-trivial ones 3328 allchannels = schannels + tchannels 3329 3330 # Write out propagators for s-channel and t-channel vertices 3331 3332 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3333 # Correspondance between the config and the diagram = amp2 3334 lines.append("* %d %d " % (nconfigs, 3335 mapconfigs[iconfig])) 3336 3337 for verts in allchannels: 3338 if verts in schannels: 3339 vert = [v for v in verts if v][0] 3340 else: 3341 vert = verts 3342 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3343 last_leg = vert.get('legs')[-1] 3344 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3345 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3346 # (last_leg.get('number'), nconfigs, len(daughters), 3347 # ",".join([str(d) for d in daughters]))) 3348 3349 if last_leg.get('id') == 21 and 21 not in particle_dict: 3350 # Fake propagator used in multiparticle vertices 3351 mass = 'zero' 3352 width = 'zero' 3353 pow_part = 0 3354 else: 3355 if (last_leg.get('id')!=7): 3356 particle = particle_dict[last_leg.get('id')] 3357 # Get mass 3358 mass = particle.get('mass') 3359 # Get width 3360 width = particle.get('width') 3361 else : # fake propagator used in multiparticle vertices 3362 mass= 'zero' 3363 width= 'zero' 3364 3365 line=line+" "+mass+" "+width+" " 3366 3367 if verts in schannels: 3368 pdgs = [] 3369 for v in verts: 3370 if v: 3371 pdgs.append(v.get('legs')[-1].get('id')) 3372 else: 3373 pdgs.append(0) 3374 lines.append(line+" S "+str(last_leg.get('id'))) 3375 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3376 # (last_leg.get('number'), nconfigs, nsubprocs, 3377 # ",".join([str(d) for d in pdgs]))) 3378 # lines.append("data tprid(%d,%d)/0/" % \ 3379 # (last_leg.get('number'), nconfigs)) 3380 elif verts in tchannels[:-1]: 3381 lines.append(line+" T "+str(last_leg.get('id'))) 3382 # lines.append("data tprid(%d,%d)/%d/" % \ 3383 # (last_leg.get('number'), nconfigs, 3384 # abs(last_leg.get('id')))) 3385 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3386 # (last_leg.get('number'), nconfigs, nsubprocs, 3387 # ",".join(['0'] * nsubprocs))) 3388 3389 # Write out number of configs 3390 # lines.append("# Number of configs") 3391 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3392 lines.append(" * ") # a line with just a star indicates this is the end of file 3393 # Write the file 3394 writer.writelines(lines) 3395 3396 return s_and_t_channels
3397
3398 3399 #=============================================================================== 3400 # ProcessExporterFortranME 3401 #=============================================================================== 3402 -class ProcessExporterFortranME(ProcessExporterFortran):
3403 """Class to take care of exporting a set of matrix elements to 3404 MadEvent format.""" 3405 3406 matrix_file = "matrix_madevent_v4.inc" 3407
3408 - def copy_template(self, model):
3409 """Additional actions needed for setup of Template 3410 """ 3411 3412 super(ProcessExporterFortranME, self).copy_template(model) 3413 3414 # File created from Template (Different in some child class) 3415 filename = pjoin(self.dir_path,'Source','run_config.inc') 3416 self.write_run_config_file(writers.FortranWriter(filename)) 3417 3418 # The next file are model dependant (due to SLAH convention) 3419 self.model_name = model.get('name') 3420 # Add the symmetry.f 3421 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3422 self.write_symmetry(writers.FortranWriter(filename)) 3423 # 3424 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3425 self.write_addmothers(writers.FortranWriter(filename)) 3426 # Copy the different python file in the Template 3427 self.copy_python_file()
3428 3429 3430 3431 3432 3433 #=========================================================================== 3434 # generate_subprocess_directory 3435 #===========================================================================
3436 - def copy_python_file(self):
3437 """copy the python file require for the Template""" 3438 3439 # madevent interface 3440 cp(_file_path+'/interface/madevent_interface.py', 3441 self.dir_path+'/bin/internal/madevent_interface.py') 3442 cp(_file_path+'/interface/extended_cmd.py', 3443 self.dir_path+'/bin/internal/extended_cmd.py') 3444 cp(_file_path+'/interface/common_run_interface.py', 3445 self.dir_path+'/bin/internal/common_run_interface.py') 3446 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3447 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3448 cp(_file_path+'/iolibs/save_load_object.py', 3449 self.dir_path+'/bin/internal/save_load_object.py') 3450 cp(_file_path+'/iolibs/file_writers.py', 3451 self.dir_path+'/bin/internal/file_writers.py') 3452 #model file 3453 cp(_file_path+'../models/check_param_card.py', 3454 self.dir_path+'/bin/internal/check_param_card.py') 3455 3456 #copy all the file present in madevent directory 3457 for name in os.listdir(pjoin(_file_path, 'madevent')): 3458 if name not in ['__init__.py'] and name.endswith('.py'): 3459 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3460 3461 #madevent file 3462 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3463 cp(_file_path+'/various/lhe_parser.py', 3464 self.dir_path+'/bin/internal/lhe_parser.py') 3465 cp(_file_path+'/various/banner.py', 3466 self.dir_path+'/bin/internal/banner.py') 3467 cp(_file_path+'/various/histograms.py', 3468 self.dir_path+'/bin/internal/histograms.py') 3469 cp(_file_path+'/various/plot_djrs.py', 3470 self.dir_path+'/bin/internal/plot_djrs.py') 3471 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3472 3473 cp(_file_path+'/various/cluster.py', 3474 self.dir_path+'/bin/internal/cluster.py') 3475 cp(_file_path+'/madevent/combine_runs.py', 3476 self.dir_path+'/bin/internal/combine_runs.py') 3477 # logging configuration 3478 cp(_file_path+'/interface/.mg5_logging.conf', 3479 self.dir_path+'/bin/internal/me5_logging.conf') 3480 cp(_file_path+'/interface/coloring_logging.py', 3481 self.dir_path+'/bin/internal/coloring_logging.py') 3482 # shower card and FO_analyse_card. 3483 # Although not needed, it is imported by banner.py 3484 cp(_file_path+'/various/shower_card.py', 3485 self.dir_path+'/bin/internal/shower_card.py') 3486 cp(_file_path+'/various/FO_analyse_card.py', 3487 self.dir_path+'/bin/internal/FO_analyse_card.py')
3488 3489
3490 - def convert_model(self, model, wanted_lorentz = [], 3491 wanted_couplings = []):
3492 3493 super(ProcessExporterFortranME,self).convert_model(model, 3494 wanted_lorentz, wanted_couplings) 3495 3496 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3497 try: 3498 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3499 except OSError as error: 3500 pass 3501 model_path = model.get('modelpath') 3502 # This is not safe if there is a '##' or '-' in the path. 3503 shutil.copytree(model_path, 3504 pjoin(self.dir_path,'bin','internal','ufomodel'), 3505 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3506 if hasattr(model, 'restrict_card'): 3507 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3508 'restrict_default.dat') 3509 if isinstance(model.restrict_card, check_param_card.ParamCard): 3510 model.restrict_card.write(out_path) 3511 else: 3512 files.cp(model.restrict_card, out_path)
3513 3514 #=========================================================================== 3515 # export model files 3516 #===========================================================================
3517 - def export_model_files(self, model_path):
3518 """export the model dependent files""" 3519 3520 super(ProcessExporterFortranME,self).export_model_files(model_path) 3521 3522 # Add the routine update_as_param in v4 model 3523 # This is a function created in the UFO 3524 text=""" 3525 subroutine update_as_param() 3526 call setpara('param_card.dat',.false.) 3527 return 3528 end 3529 """ 3530 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3531 ff.write(text) 3532 ff.close() 3533 3534 # Add the symmetry.f 3535 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3536 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3537 3538 # Modify setrun.f 3539 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3540 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3541 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3542 fsock.write(text) 3543 fsock.close() 3544 3545 self.make_model_symbolic_link()
3546 3547 #=========================================================================== 3548 # generate_subprocess_directory 3549 #===========================================================================
3550 - def generate_subprocess_directory(self, matrix_element, 3551 fortran_model, 3552 me_number):
3553 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3554 including the necessary matrix.f and various helper files""" 3555 3556 cwd = os.getcwd() 3557 path = pjoin(self.dir_path, 'SubProcesses') 3558 3559 3560 if not self.model: 3561 self.model = matrix_element.get('processes')[0].get('model') 3562 3563 3564 3565 #os.chdir(path) 3566 # Create the directory PN_xx_xxxxx in the specified path 3567 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3568 try: 3569 os.mkdir(pjoin(path,subprocdir)) 3570 except os.error as error: 3571 logger.warning(error.strerror + " " + subprocdir) 3572 3573 #try: 3574 # os.chdir(subprocdir) 3575 #except os.error: 3576 # logger.error('Could not cd to directory %s' % subprocdir) 3577 # return 0 3578 3579 logger.info('Creating files in directory %s' % subprocdir) 3580 Ppath = pjoin(path, subprocdir) 3581 3582 # Extract number of external particles 3583 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3584 3585 # Add the driver.f 3586 ncomb = matrix_element.get_helicity_combinations() 3587 filename = pjoin(Ppath,'driver.f') 3588 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3589 v5=self.opt['v5_model']) 3590 3591 # Create the matrix.f file, auto_dsig.f file and all inc files 3592 filename = pjoin(Ppath, 'matrix.f') 3593 calls, ncolor = \ 3594 self.write_matrix_element_v4(writers.FortranWriter(filename), 3595 matrix_element, fortran_model, subproc_number = me_number) 3596 3597 filename = pjoin(Ppath, 'auto_dsig.f') 3598 self.write_auto_dsig_file(writers.FortranWriter(filename), 3599 matrix_element) 3600 3601 filename = pjoin(Ppath, 'configs.inc') 3602 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3603 writers.FortranWriter(filename), 3604 matrix_element) 3605 3606 filename = pjoin(Ppath, 'config_nqcd.inc') 3607 self.write_config_nqcd_file(writers.FortranWriter(filename), 3608 nqcd_list) 3609 3610 filename = pjoin(Ppath, 'config_subproc_map.inc') 3611 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3612 s_and_t_channels) 3613 3614 filename = pjoin(Ppath, 'coloramps.inc') 3615 self.write_coloramps_file(writers.FortranWriter(filename), 3616 mapconfigs, 3617 matrix_element) 3618 3619 filename = pjoin(Ppath, 'get_color.f') 3620 self.write_colors_file(writers.FortranWriter(filename), 3621 matrix_element) 3622 3623 filename = pjoin(Ppath, 'decayBW.inc') 3624 self.write_decayBW_file(writers.FortranWriter(filename), 3625 s_and_t_channels) 3626 3627 filename = pjoin(Ppath, 'dname.mg') 3628 self.write_dname_file(writers.FileWriter(filename), 3629 "P"+matrix_element.get('processes')[0].shell_string()) 3630 3631 filename = pjoin(Ppath, 'iproc.dat') 3632 self.write_iproc_file(writers.FortranWriter(filename), 3633 me_number) 3634 3635 filename = pjoin(Ppath, 'leshouche.inc') 3636 self.write_leshouche_file(writers.FortranWriter(filename), 3637 matrix_element) 3638 3639 filename = pjoin(Ppath, 'maxamps.inc') 3640 self.write_maxamps_file(writers.FortranWriter(filename), 3641 len(matrix_element.get('diagrams')), 3642 ncolor, 3643 len(matrix_element.get('processes')), 3644 1) 3645 3646 filename = pjoin(Ppath, 'mg.sym') 3647 self.write_mg_sym_file(writers.FortranWriter(filename), 3648 matrix_element) 3649 3650 filename = pjoin(Ppath, 'ncombs.inc') 3651 self.write_ncombs_file(writers.FortranWriter(filename), 3652 nexternal) 3653 3654 filename = pjoin(Ppath, 'nexternal.inc') 3655 self.write_nexternal_file(writers.FortranWriter(filename), 3656 nexternal, ninitial) 3657 3658 filename = pjoin(Ppath, 'ngraphs.inc') 3659 self.write_ngraphs_file(writers.FortranWriter(filename), 3660 len(mapconfigs)) 3661 3662 3663 filename = pjoin(Ppath, 'pmass.inc') 3664 self.write_pmass_file(writers.FortranWriter(filename), 3665 matrix_element) 3666 3667 filename = pjoin(Ppath, 'props.inc') 3668 self.write_props_file(writers.FortranWriter(filename), 3669 matrix_element, 3670 s_and_t_channels) 3671 3672 # Find config symmetries and permutations 3673 symmetry, perms, ident_perms = \ 3674 diagram_symmetry.find_symmetry(matrix_element) 3675 3676 filename = pjoin(Ppath, 'symswap.inc') 3677 self.write_symswap_file(writers.FortranWriter(filename), 3678 ident_perms) 3679 3680 filename = pjoin(Ppath, 'symfact_orig.dat') 3681 self.write_symfact_file(open(filename, 'w'), symmetry) 3682 3683 # Generate diagrams 3684 filename = pjoin(Ppath, "matrix.ps") 3685 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3686 get('diagrams'), 3687 filename, 3688 model=matrix_element.get('processes')[0].\ 3689 get('model'), 3690 amplitude=True) 3691 logger.info("Generating Feynman diagrams for " + \ 3692 matrix_element.get('processes')[0].nice_string()) 3693 plot.draw() 3694 3695 self.link_files_in_SubProcess(Ppath) 3696 3697 #import nexternal/leshouche in Source 3698 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3699 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3700 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3701 # Return to SubProcesses dir 3702 #os.chdir(os.path.pardir) 3703 3704 # Add subprocess to subproc.mg 3705 filename = pjoin(path, 'subproc.mg') 3706 files.append_to_file(filename, 3707 self.write_subproc, 3708 subprocdir) 3709 3710 # Return to original dir 3711 #os.chdir(cwd) 3712 3713 # Generate info page 3714 gen_infohtml.make_info_html(self.dir_path) 3715 3716 3717 if not calls: 3718 calls = 0 3719 return calls
3720 3721 link_Sub_files = ['addmothers.f', 3722 'cluster.f', 3723 'cluster.inc', 3724 'coupl.inc', 3725 'cuts.f', 3726 'cuts.inc', 3727 'genps.f', 3728 'genps.inc', 3729 'idenparts.f', 3730 'initcluster.f', 3731 'makefile', 3732 'message.inc', 3733 'myamp.f', 3734 'reweight.f', 3735 'run.inc', 3736 'maxconfigs.inc', 3737 'maxparticles.inc', 3738 'run_config.inc', 3739 'lhe_event_infos.inc', 3740 'setcuts.f', 3741 'setscales.f', 3742 'sudakov.inc', 3743 'symmetry.f', 3744 'unwgt.f', 3745 'dummy_fct.f' 3746 ] 3747 3761 3762
3763 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3764 """Finalize ME v4 directory by creating jpeg diagrams, html 3765 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3766 3767 if 'nojpeg' in flaglist: 3768 makejpg = False 3769 else: 3770 makejpg = True 3771 if 'online' in flaglist: 3772 online = True 3773 else: 3774 online = False 3775 3776 compiler = {'fortran': mg5options['fortran_compiler'], 3777 'cpp': mg5options['cpp_compiler'], 3778 'f2py': mg5options['f2py_compiler']} 3779 3780 # indicate that the output type is not grouped 3781 if not isinstance(self, ProcessExporterFortranMEGroup): 3782 self.proc_characteristic['grouped_matrix'] = False 3783 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3784 3785 modelname = self.opt['model'] 3786 if modelname == 'mssm' or modelname.startswith('mssm-'): 3787 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3788 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3789 check_param_card.convert_to_mg5card(param_card, mg5_param) 3790 check_param_card.check_valid_param_card(mg5_param) 3791 3792 # Add the combine_events.f modify param_card path/number of @X 3793 filename = pjoin(self.dir_path,'Source','combine_events.f') 3794 try: 3795 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3796 except AttributeError: 3797 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3798 nb_proc = len(set(nb_proc)) 3799 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3800 # Write maxconfigs.inc based on max of ME's/subprocess groups 3801 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3802 self.write_maxconfigs_file(writers.FortranWriter(filename), 3803 matrix_elements) 3804 3805 # Write maxparticles.inc based on max of ME's/subprocess groups 3806 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3807 self.write_maxparticles_file(writers.FortranWriter(filename), 3808 matrix_elements) 3809 3810 # Touch "done" file 3811 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3812 3813 # Check for compiler 3814 self.set_compiler(compiler) 3815 self.set_cpp_compiler(compiler['cpp']) 3816 3817 3818 old_pos = os.getcwd() 3819 subpath = pjoin(self.dir_path, 'SubProcesses') 3820 3821 P_dir_list = [proc for proc in os.listdir(subpath) 3822 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3823 3824 devnull = os.open(os.devnull, os.O_RDWR) 3825 # Convert the poscript in jpg files (if authorize) 3826 if makejpg: 3827 try: 3828 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3829 except Exception, error: 3830 pass 3831 3832 if misc.which('gs'): 3833 logger.info("Generate jpeg diagrams") 3834 for Pdir in P_dir_list: 3835 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3836 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3837 3838 logger.info("Generate web pages") 3839 # Create the WebPage using perl script 3840 3841 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3842 stdout = devnull,cwd=pjoin(self.dir_path)) 3843 3844 #os.chdir(os.path.pardir) 3845 3846 obj = gen_infohtml.make_info_html(self.dir_path) 3847 3848 if online: 3849 nb_channel = obj.rep_rule['nb_gen_diag'] 3850 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3851 #add the information to proc_charac 3852 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3853 3854 # Write command history as proc_card_mg5 3855 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3856 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3857 history.write(output_file) 3858 3859 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3860 stdout = devnull) 3861 3862 #crate the proc_characteristic file 3863 self.create_proc_charac(matrix_elements, history) 3864 3865 # create the run_card 3866 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3867 3868 # Run "make" to generate madevent.tar.gz file 3869 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3870 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3871 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3872 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3873 stdout = devnull, cwd=self.dir_path) 3874 3875 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3876 stdout = devnull, cwd=self.dir_path)
3877 3878 3879 3880 3881 3882 3883 #return to the initial dir 3884 #os.chdir(old_pos) 3885 3886 #=========================================================================== 3887 # write_matrix_element_v4 3888 #===========================================================================
3889 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3890 proc_id = "", config_map = [], subproc_number = ""):
3891 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3892 3893 if not matrix_element.get('processes') or \ 3894 not matrix_element.get('diagrams'): 3895 return 0 3896 3897 if writer: 3898 if not isinstance(writer, writers.FortranWriter): 3899 raise writers.FortranWriter.FortranWriterError(\ 3900 "writer not FortranWriter") 3901 # Set lowercase/uppercase Fortran code 3902 writers.FortranWriter.downcase = False 3903 3904 # The proc prefix is not used for MadEvent output so it can safely be set 3905 # to an empty string. 3906 replace_dict = {'proc_prefix':''} 3907 3908 # Extract helas calls 3909 helas_calls = fortran_model.get_matrix_element_calls(\ 3910 matrix_element) 3911 3912 replace_dict['helas_calls'] = "\n".join(helas_calls) 3913 3914 3915 # Extract version number and date from VERSION file 3916 info_lines = self.get_mg5_info_lines() 3917 replace_dict['info_lines'] = info_lines 3918 3919 # Extract process info lines 3920 process_lines = self.get_process_info_lines(matrix_element) 3921 replace_dict['process_lines'] = process_lines 3922 3923 # Set proc_id 3924 replace_dict['proc_id'] = proc_id 3925 3926 # Extract ncomb 3927 ncomb = matrix_element.get_helicity_combinations() 3928 replace_dict['ncomb'] = ncomb 3929 3930 # Extract helicity lines 3931 helicity_lines = self.get_helicity_lines(matrix_element) 3932 replace_dict['helicity_lines'] = helicity_lines 3933 3934 # Extract IC line 3935 ic_line = self.get_ic_line(matrix_element) 3936 replace_dict['ic_line'] = ic_line 3937 3938 # Extract overall denominator 3939 # Averaging initial state color, spin, and identical FS particles 3940 den_factor_line = self.get_den_factor_line(matrix_element) 3941 replace_dict['den_factor_line'] = den_factor_line 3942 3943 # Extract ngraphs 3944 ngraphs = matrix_element.get_number_of_amplitudes() 3945 replace_dict['ngraphs'] = ngraphs 3946 3947 # Extract ndiags 3948 ndiags = len(matrix_element.get('diagrams')) 3949 replace_dict['ndiags'] = ndiags 3950 3951 # Set define_iconfigs_lines 3952 replace_dict['define_iconfigs_lines'] = \ 3953 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3954 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3955 3956 if proc_id: 3957 # Set lines for subprocess group version 3958 # Set define_iconfigs_lines 3959 replace_dict['define_iconfigs_lines'] += \ 3960 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3961 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3962 # Set set_amp2_line 3963 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3964 proc_id 3965 else: 3966 # Standard running 3967 # Set set_amp2_line 3968 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3969 3970 # Extract nwavefuncs 3971 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3972 replace_dict['nwavefuncs'] = nwavefuncs 3973 3974 # Extract ncolor 3975 ncolor = max(1, len(matrix_element.get('color_basis'))) 3976 replace_dict['ncolor'] = ncolor 3977 3978 # Extract color data lines 3979 color_data_lines = self.get_color_data_lines(matrix_element) 3980 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3981 3982 3983 # Set the size of Wavefunction 3984 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3985 replace_dict['wavefunctionsize'] = 18 3986 else: 3987 replace_dict['wavefunctionsize'] = 6 3988 3989 # Extract amp2 lines 3990 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3991 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3992 3993 # The JAMP definition depends on the splitting order 3994 split_orders=matrix_element.get('processes')[0].get('split_orders') 3995 if len(split_orders)>0: 3996 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3997 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3998 matrix_element.get('processes')[0],squared_orders) 3999 else: 4000 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4001 # set all amplitude order to weight 1 and only one squared order 4002 # contribution which is of course ALL_ORDERS=2. 4003 squared_orders = [(2,),] 4004 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4005 replace_dict['chosen_so_configs'] = '.TRUE.' 4006 4007 replace_dict['nAmpSplitOrders']=len(amp_orders) 4008 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4009 replace_dict['split_order_str_list']=str(split_orders) 4010 replace_dict['nSplitOrders']=max(len(split_orders),1) 4011 amp_so = self.get_split_orders_lines( 4012 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4013 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4014 replace_dict['ampsplitorders']='\n'.join(amp_so) 4015 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4016 4017 4018 # Extract JAMP lines 4019 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4020 jamp_lines = self.get_JAMP_lines_split_order(\ 4021 matrix_element,amp_orders,split_order_names= 4022 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4023 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4024 4025 replace_dict['template_file'] = pjoin(_file_path, \ 4026 'iolibs/template_files/%s' % self.matrix_file) 4027 replace_dict['template_file2'] = pjoin(_file_path, \ 4028 'iolibs/template_files/split_orders_helping_functions.inc') 4029 if writer: 4030 file = open(replace_dict['template_file']).read() 4031 file = file % replace_dict 4032 # Add the split orders helper functions. 4033 file = file + '\n' + open(replace_dict['template_file2'])\ 4034 .read()%replace_dict 4035 # Write the file 4036 writer.writelines(file) 4037 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4038 else: 4039 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4040 return replace_dict
4041 4042 #=========================================================================== 4043 # write_auto_dsig_file 4044 #===========================================================================
4045 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4046 """Write the auto_dsig.f file for the differential cross section 4047 calculation, includes pdf call information""" 4048 4049 if not matrix_element.get('processes') or \ 4050 not matrix_element.get('diagrams'): 4051 return 0 4052 4053 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4054 self.proc_characteristic['ninitial'] = ninitial 4055 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4056 4057 # Add information relevant for MLM matching: 4058 # Maximum QCD power in all the contributions 4059 max_qcd_order = 0 4060 for diag in matrix_element.get('diagrams'): 4061 orders = diag.calculate_orders() 4062 if 'QCD' in orders: 4063 max_qcd_order = max(max_qcd_order,orders['QCD']) 4064 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4065 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4066 proc.get('model').get_particle(id).get('color')>1]) 4067 for proc in matrix_element.get('processes')) 4068 # Maximum number of final state light jets to be matched 4069 self.proc_characteristic['max_n_matched_jets'] = max( 4070 self.proc_characteristic['max_n_matched_jets'], 4071 min(max_qcd_order,max_n_light_final_partons)) 4072 4073 # List of default pdgs to be considered for the CKKWl merging cut 4074 self.proc_characteristic['colored_pdgs'] = \ 4075 sorted(list(set([abs(p.get('pdg_code')) for p in 4076 matrix_element.get('processes')[0].get('model').get('particles') if 4077 p.get('color')>1]))) 4078 4079 if ninitial < 1 or ninitial > 2: 4080 raise writers.FortranWriter.FortranWriterError, \ 4081 """Need ninitial = 1 or 2 to write auto_dsig file""" 4082 4083 replace_dict = {} 4084 4085 # Extract version number and date from VERSION file 4086 info_lines = self.get_mg5_info_lines() 4087 replace_dict['info_lines'] = info_lines 4088 4089 # Extract process info lines 4090 process_lines = self.get_process_info_lines(matrix_element) 4091 replace_dict['process_lines'] = process_lines 4092 4093 # Set proc_id 4094 replace_dict['proc_id'] = proc_id 4095 replace_dict['numproc'] = 1 4096 4097 # Set dsig_line 4098 if ninitial == 1: 4099 # No conversion, since result of decay should be given in GeV 4100 dsig_line = "pd(0)*dsiguu" 4101 else: 4102 # Convert result (in GeV) to pb 4103 dsig_line = "pd(0)*conv*dsiguu" 4104 4105 replace_dict['dsig_line'] = dsig_line 4106 4107 # Extract pdf lines 4108 pdf_vars, pdf_data, pdf_lines = \ 4109 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4110 replace_dict['pdf_vars'] = pdf_vars 4111 replace_dict['pdf_data'] = pdf_data 4112 replace_dict['pdf_lines'] = pdf_lines 4113 4114 # Lines that differ between subprocess group and regular 4115 if proc_id: 4116 replace_dict['numproc'] = int(proc_id) 4117 replace_dict['passcuts_begin'] = "" 4118 replace_dict['passcuts_end'] = "" 4119 # Set lines for subprocess group version 4120 # Set define_iconfigs_lines 4121 replace_dict['define_subdiag_lines'] = \ 4122 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4123 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4124 replace_dict['cutsdone'] = "" 4125 else: 4126 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4127 replace_dict['passcuts_end'] = "ENDIF" 4128 replace_dict['define_subdiag_lines'] = "" 4129 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4130 4131 if not isinstance(self, ProcessExporterFortranMEGroup): 4132 ncomb=matrix_element.get_helicity_combinations() 4133 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4134 else: 4135 replace_dict['read_write_good_hel'] = "" 4136 4137 context = {'read_write_good_hel':True} 4138 4139 if writer: 4140 file = open(pjoin(_file_path, \ 4141 'iolibs/template_files/auto_dsig_v4.inc')).read() 4142 file = file % replace_dict 4143 4144 # Write the file 4145 writer.writelines(file, context=context) 4146 else: 4147 return replace_dict, context
4148 #=========================================================================== 4149 # write_coloramps_file 4150 #===========================================================================
4151 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4152 """Write the coloramps.inc file for MadEvent""" 4153 4154 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4155 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4156 (max(len(matrix_element.get('color_basis').keys()), 1), 4157 len(mapconfigs))) 4158 4159 4160 # Write the file 4161 writer.writelines(lines) 4162 4163 return True
4164 4165 #=========================================================================== 4166 # write_colors_file 4167 #===========================================================================
4168 - def write_colors_file(self, writer, matrix_elements):
4169 """Write the get_color.f file for MadEvent, which returns color 4170 for all particles used in the matrix element.""" 4171 4172 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4173 matrix_elements = [matrix_elements] 4174 4175 model = matrix_elements[0].get('processes')[0].get('model') 4176 4177 # We need the both particle and antiparticle wf_ids, since the identity 4178 # depends on the direction of the wf. 4179 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4180 for wf in d.get('wavefunctions')],[]) \ 4181 for d in me.get('diagrams')], []) \ 4182 for me in matrix_elements], [])) 4183 4184 leg_ids = set(sum([sum([sum([[l.get('id'), 4185 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4186 for l in p.get_legs_with_decays()], []) \ 4187 for p in me.get('processes')], []) \ 4188 for me in matrix_elements], [])) 4189 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4190 4191 lines = """function get_color(ipdg) 4192 implicit none 4193 integer get_color, ipdg 4194 4195 if(ipdg.eq.%d)then 4196 get_color=%d 4197 return 4198 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4199 4200 for part_id in particle_ids[1:]: 4201 lines += """else if(ipdg.eq.%d)then 4202 get_color=%d 4203 return 4204 """ % (part_id, model.get_particle(part_id).get_color()) 4205 # Dummy particle for multiparticle vertices with pdg given by 4206 # first code not in the model 4207 lines += """else if(ipdg.eq.%d)then 4208 c This is dummy particle used in multiparticle vertices 4209 get_color=2 4210 return 4211 """ % model.get_first_non_pdg() 4212 lines += """else 4213 write(*,*)'Error: No color given for pdg ',ipdg 4214 get_color=0 4215 return 4216 endif 4217 end 4218 """ 4219 4220 # Write the file 4221 writer.writelines(lines) 4222 4223 return True
4224 4225 #=========================================================================== 4226 # write_config_nqcd_file 4227 #===========================================================================
4228 - def write_config_nqcd_file(self, writer, nqcd_list):
4229 """Write the config_nqcd.inc with the number of QCD couplings 4230 for each config""" 4231 4232 lines = [] 4233 for iconf, n in enumerate(nqcd_list): 4234 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4235 4236 # Write the file 4237 writer.writelines(lines) 4238 4239 return True
4240 4241 #=========================================================================== 4242 # write_maxconfigs_file 4243 #===========================================================================
4244 - def write_maxconfigs_file(self, writer, matrix_elements):
4245 """Write the maxconfigs.inc file for MadEvent""" 4246 4247 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4248 maxconfigs = max([me.get_num_configs() for me in \ 4249 matrix_elements.get('matrix_elements')]) 4250 else: 4251 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4252 4253 lines = "integer lmaxconfigs\n" 4254 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4255 4256 # Write the file 4257 writer.writelines(lines) 4258 4259 return True
4260 4261 #=========================================================================== 4262 # read_write_good_hel 4263 #===========================================================================
4264 - def read_write_good_hel(self, ncomb):
4265 """return the code to read/write the good_hel common_block""" 4266 4267 convert = {'ncomb' : ncomb} 4268 output = """ 4269 subroutine write_good_hel(stream_id) 4270 implicit none 4271 integer stream_id 4272 INTEGER NCOMB 4273 PARAMETER ( NCOMB=%(ncomb)d) 4274 LOGICAL GOODHEL(NCOMB) 4275 INTEGER NTRY 4276 common/BLOCK_GOODHEL/NTRY,GOODHEL 4277 write(stream_id,*) GOODHEL 4278 return 4279 end 4280 4281 4282 subroutine read_good_hel(stream_id) 4283 implicit none 4284 include 'genps.inc' 4285 integer stream_id 4286 INTEGER NCOMB 4287 PARAMETER ( NCOMB=%(ncomb)d) 4288 LOGICAL GOODHEL(NCOMB) 4289 INTEGER NTRY 4290 common/BLOCK_GOODHEL/NTRY,GOODHEL 4291 read(stream_id,*) GOODHEL 4292 NTRY = MAXTRIES + 1 4293 return 4294 end 4295 4296 subroutine init_good_hel() 4297 implicit none 4298 INTEGER NCOMB 4299 PARAMETER ( NCOMB=%(ncomb)d) 4300 LOGICAL GOODHEL(NCOMB) 4301 INTEGER NTRY 4302 INTEGER I 4303 4304 do i=1,NCOMB 4305 GOODHEL(I) = .false. 4306 enddo 4307 NTRY = 0 4308 end 4309 4310 integer function get_maxsproc() 4311 implicit none 4312 get_maxsproc = 1 4313 return 4314 end 4315 4316 """ % convert 4317 4318 return output
4319 4320 #=========================================================================== 4321 # write_config_subproc_map_file 4322 #===========================================================================
4323 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4324 """Write a dummy config_subproc.inc file for MadEvent""" 4325 4326 lines = [] 4327 4328 for iconfig in range(len(s_and_t_channels)): 4329 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4330 (iconfig + 1)) 4331 4332 # Write the file 4333 writer.writelines(lines) 4334 4335 return True
4336 4337 #=========================================================================== 4338 # write_configs_file 4339 #===========================================================================
4340 - def write_configs_file(self, writer, matrix_element):
4341 """Write the configs.inc file for MadEvent""" 4342 4343 # Extract number of external particles 4344 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4345 4346 model = matrix_element.get('processes')[0].get('model') 4347 configs = [(i+1, d) for (i, d) in \ 4348 enumerate(matrix_element.get('diagrams'))] 4349 mapconfigs = [c[0] for c in configs] 4350 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4351 [[c[1]] for c in configs], 4352 mapconfigs, 4353 nexternal, ninitial, 4354 model)
4355 4356 #=========================================================================== 4357 # write_run_configs_file 4358 #===========================================================================
4359 - def write_run_config_file(self, writer):
4360 """Write the run_configs.inc file for MadEvent""" 4361 4362 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4363 4364 if self.proc_characteristic['loop_induced']: 4365 job_per_chan = 1 4366 else: 4367 job_per_chan = 5 4368 4369 if writer: 4370 text = open(path).read() % {'chanperjob': job_per_chan} 4371 writer.write(text) 4372 return True 4373 else: 4374 return {'chanperjob': job_per_chan}
4375 4376 #=========================================================================== 4377 # write_configs_file_from_diagrams 4378 #===========================================================================
4379 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4380 nexternal, ninitial, model):
4381 """Write the actual configs.inc file. 4382 4383 configs is the diagrams corresponding to configs (each 4384 diagrams is a list of corresponding diagrams for all 4385 subprocesses, with None if there is no corresponding diagrams 4386 for a given process). 4387 mapconfigs gives the diagram number for each config. 4388 4389 For s-channels, we need to output one PDG for each subprocess in 4390 the subprocess group, in order to be able to pick the right 4391 one for multiprocesses.""" 4392 4393 lines = [] 4394 4395 s_and_t_channels = [] 4396 4397 nqcd_list = [] 4398 4399 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4400 for config in configs if [d for d in config if d][0].\ 4401 get_vertex_leg_numbers()!=[]] 4402 minvert = min(vert_list) if vert_list!=[] else 0 4403 4404 # Number of subprocesses 4405 nsubprocs = len(configs[0]) 4406 4407 nconfigs = 0 4408 4409 new_pdg = model.get_first_non_pdg() 4410 4411 for iconfig, helas_diags in enumerate(configs): 4412 if any([vert > minvert for vert in 4413 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4414 # Only 3-vertices allowed in configs.inc 4415 continue 4416 nconfigs += 1 4417 4418 # Need s- and t-channels for all subprocesses, including 4419 # those that don't contribute to this config 4420 empty_verts = [] 4421 stchannels = [] 4422 for h in helas_diags: 4423 if h: 4424 # get_s_and_t_channels gives vertices starting from 4425 # final state external particles and working inwards 4426 stchannels.append(h.get('amplitudes')[0].\ 4427 get_s_and_t_channels(ninitial, model, 4428 new_pdg)) 4429 else: 4430 stchannels.append((empty_verts, None)) 4431 4432 # For t-channels, just need the first non-empty one 4433 tchannels = [t for s,t in stchannels if t != None][0] 4434 4435 # For s_and_t_channels (to be used later) use only first config 4436 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4437 tchannels]) 4438 4439 # Make sure empty_verts is same length as real vertices 4440 if any([s for s,t in stchannels]): 4441 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4442 4443 # Reorganize s-channel vertices to get a list of all 4444 # subprocesses for each vertex 4445 schannels = zip(*[s for s,t in stchannels]) 4446 else: 4447 schannels = [] 4448 4449 allchannels = schannels 4450 if len(tchannels) > 1: 4451 # Write out tchannels only if there are any non-trivial ones 4452 allchannels = schannels + tchannels 4453 4454 # Write out propagators for s-channel and t-channel vertices 4455 4456 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4457 # Correspondance between the config and the diagram = amp2 4458 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4459 mapconfigs[iconfig])) 4460 # Number of QCD couplings in this diagram 4461 nqcd = 0 4462 for h in helas_diags: 4463 if h: 4464 try: 4465 nqcd = h.calculate_orders()['QCD'] 4466 except KeyError: 4467 pass 4468 break 4469 else: 4470 continue 4471 4472 nqcd_list.append(nqcd) 4473 4474 for verts in allchannels: 4475 if verts in schannels: 4476 vert = [v for v in verts if v][0] 4477 else: 4478 vert = verts 4479 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4480 last_leg = vert.get('legs')[-1] 4481 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4482 (last_leg.get('number'), nconfigs, len(daughters), 4483 ",".join([str(d) for d in daughters]))) 4484 if verts in schannels: 4485 pdgs = [] 4486 for v in verts: 4487 if v: 4488 pdgs.append(v.get('legs')[-1].get('id')) 4489 else: 4490 pdgs.append(0) 4491 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4492 (last_leg.get('number'), nconfigs, nsubprocs, 4493 ",".join([str(d) for d in pdgs]))) 4494 lines.append("data tprid(%d,%d)/0/" % \ 4495 (last_leg.get('number'), nconfigs)) 4496 elif verts in tchannels[:-1]: 4497 lines.append("data tprid(%d,%d)/%d/" % \ 4498 (last_leg.get('number'), nconfigs, 4499 abs(last_leg.get('id')))) 4500 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4501 (last_leg.get('number'), nconfigs, nsubprocs, 4502 ",".join(['0'] * nsubprocs))) 4503 4504 # Write out number of configs 4505 lines.append("# Number of configs") 4506 lines.append("data mapconfig(0)/%d/" % nconfigs) 4507 4508 # Write the file 4509 writer.writelines(lines) 4510 4511 return s_and_t_channels, nqcd_list
4512 4513 #=========================================================================== 4514 # write_decayBW_file 4515 #===========================================================================
4516 - def write_decayBW_file(self, writer, s_and_t_channels):
4517 """Write the decayBW.inc file for MadEvent""" 4518 4519 lines = [] 4520 4521 booldict = {None: "0", True: "1", False: "2"} 4522 4523 for iconf, config in enumerate(s_and_t_channels): 4524 schannels = config[0] 4525 for vertex in schannels: 4526 # For the resulting leg, pick out whether it comes from 4527 # decay or not, as given by the onshell flag 4528 leg = vertex.get('legs')[-1] 4529 lines.append("data gForceBW(%d,%d)/%s/" % \ 4530 (leg.get('number'), iconf + 1, 4531 booldict[leg.get('onshell')])) 4532 4533 # Write the file 4534 writer.writelines(lines) 4535 4536 return True
4537 4538 #=========================================================================== 4539 # write_dname_file 4540 #===========================================================================
4541 - def write_dname_file(self, writer, dir_name):
4542 """Write the dname.mg file for MG4""" 4543 4544 line = "DIRNAME=%s" % dir_name 4545 4546 # Write the file 4547 writer.write(line + "\n") 4548 4549 return True
4550 4551 #=========================================================================== 4552 # write_driver 4553 #===========================================================================
4554 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4555 """Write the SubProcess/driver.f file for MG4""" 4556 4557 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4558 4559 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4560 card = 'Source/MODEL/MG5_param.dat' 4561 else: 4562 card = 'param_card.dat' 4563 # Requiring each helicity configuration to be probed by 10 points for 4564 # matrix element before using the resulting grid for MC over helicity 4565 # sampling. 4566 # We multiply this by 2 because each grouped subprocess is called at most 4567 # twice for each IMIRROR. 4568 replace_dict = {'param_card_name':card, 4569 'ncomb':ncomb, 4570 'hel_init_points':n_grouped_proc*10*2} 4571 if not v5: 4572 replace_dict['secondparam']=',.true.' 4573 else: 4574 replace_dict['secondparam']='' 4575 4576 if writer: 4577 text = open(path).read() % replace_dict 4578 writer.write(text) 4579 return True 4580 else: 4581 return replace_dict
4582 4583 #=========================================================================== 4584 # write_addmothers 4585 #===========================================================================
4586 - def write_addmothers(self, writer):
4587 """Write the SubProcess/addmothers.f""" 4588 4589 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4590 4591 text = open(path).read() % {'iconfig': 'diag_number'} 4592 writer.write(text) 4593 4594 return True
4595 4596 4597 #=========================================================================== 4598 # write_combine_events 4599 #===========================================================================
4600 - def write_combine_events(self, writer, nb_proc=100):
4601 """Write the SubProcess/driver.f file for MG4""" 4602 4603 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4604 4605 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4606 card = 'Source/MODEL/MG5_param.dat' 4607 else: 4608 card = 'param_card.dat' 4609 4610 #set maxpup (number of @X in the process card) 4611 4612 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4613 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4614 writer.write(text) 4615 4616 return True
4617 4618 4619 #=========================================================================== 4620 # write_symmetry 4621 #===========================================================================
4622 - def write_symmetry(self, writer, v5=True):
4623 """Write the SubProcess/driver.f file for ME""" 4624 4625 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4626 4627 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4628 card = 'Source/MODEL/MG5_param.dat' 4629 else: 4630 card = 'param_card.dat' 4631 4632 if v5: 4633 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4634 else: 4635 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4636 4637 if writer: 4638 text = open(path).read() 4639 text = text % replace_dict 4640 writer.write(text) 4641 return True 4642 else: 4643 return replace_dict
4644 4645 4646 4647 #=========================================================================== 4648 # write_iproc_file 4649 #===========================================================================
4650 - def write_iproc_file(self, writer, me_number):
4651 """Write the iproc.dat file for MG4""" 4652 line = "%d" % (me_number + 1) 4653 4654 # Write the file 4655 for line_to_write in writer.write_line(line): 4656 writer.write(line_to_write) 4657 return True
4658 4659 #=========================================================================== 4660 # write_mg_sym_file 4661 #===========================================================================
4662 - def write_mg_sym_file(self, writer, matrix_element):
4663 """Write the mg.sym file for MadEvent.""" 4664 4665 lines = [] 4666 4667 # Extract process with all decays included 4668 final_legs = filter(lambda leg: leg.get('state') == True, 4669 matrix_element.get('processes')[0].get_legs_with_decays()) 4670 4671 ninitial = len(filter(lambda leg: leg.get('state') == False, 4672 matrix_element.get('processes')[0].get('legs'))) 4673 4674 identical_indices = {} 4675 4676 # Extract identical particle info 4677 for i, leg in enumerate(final_legs): 4678 if leg.get('id') in identical_indices: 4679 identical_indices[leg.get('id')].append(\ 4680 i + ninitial + 1) 4681 else: 4682 identical_indices[leg.get('id')] = [i + ninitial + 1] 4683 4684 # Remove keys which have only one particle 4685 for key in identical_indices.keys(): 4686 if len(identical_indices[key]) < 2: 4687 del identical_indices[key] 4688 4689 # Write mg.sym file 4690 lines.append(str(len(identical_indices.keys()))) 4691 for key in identical_indices.keys(): 4692 lines.append(str(len(identical_indices[key]))) 4693 for number in identical_indices[key]: 4694 lines.append(str(number)) 4695 4696 # Write the file 4697 writer.writelines(lines) 4698 4699 return True
4700 4701 #=========================================================================== 4702 # write_mg_sym_file 4703 #===========================================================================
4704 - def write_default_mg_sym_file(self, writer):
4705 """Write the mg.sym file for MadEvent.""" 4706 4707 lines = "0" 4708 4709 # Write the file 4710 writer.writelines(lines) 4711 4712 return True
4713 4714 #=========================================================================== 4715 # write_ncombs_file 4716 #===========================================================================
4717 - def write_ncombs_file(self, writer, nexternal):
4718 """Write the ncombs.inc file for MadEvent.""" 4719 4720 # ncomb (used for clustering) is 2^nexternal 4721 file = " integer n_max_cl\n" 4722 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4723 4724 # Write the file 4725 writer.writelines(file) 4726 4727 return True
4728 4729 #=========================================================================== 4730 # write_processes_file 4731 #===========================================================================
4732 - def write_processes_file(self, writer, subproc_group):
4733 """Write the processes.dat file with info about the subprocesses 4734 in this group.""" 4735 4736 lines = [] 4737 4738 for ime, me in \ 4739 enumerate(subproc_group.get('matrix_elements')): 4740 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4741 ",".join(p.base_string() for p in \ 4742 me.get('processes')))) 4743 if me.get('has_mirror_process'): 4744 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4745 for proc in mirror_procs: 4746 legs = copy.copy(proc.get('legs_with_decays')) 4747 legs.insert(0, legs.pop(1)) 4748 proc.set("legs_with_decays", legs) 4749 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4750 mirror_procs)) 4751 else: 4752 lines.append("mirror none") 4753 4754 # Write the file 4755 writer.write("\n".join(lines)) 4756 4757 return True
4758 4759 #=========================================================================== 4760 # write_symswap_file 4761 #===========================================================================
4762 - def write_symswap_file(self, writer, ident_perms):
4763 """Write the file symswap.inc for MG4 by comparing diagrams using 4764 the internal matrix element value functionality.""" 4765 4766 lines = [] 4767 4768 # Write out lines for symswap.inc file (used to permute the 4769 # external leg momenta 4770 for iperm, perm in enumerate(ident_perms): 4771 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4772 (iperm+1, ",".join([str(i+1) for i in perm]))) 4773 lines.append("data nsym/%d/" % len(ident_perms)) 4774 4775 # Write the file 4776 writer.writelines(lines) 4777 4778 return True
4779 4780 #=========================================================================== 4781 # write_symfact_file 4782 #===========================================================================
4783 - def write_symfact_file(self, writer, symmetry):
4784 """Write the files symfact.dat for MG4 by comparing diagrams using 4785 the internal matrix element value functionality.""" 4786 4787 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4788 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4789 # Write out lines for symswap.inc file (used to permute the 4790 # external leg momenta 4791 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4792 # Write the file 4793 writer.write('\n'.join(lines)) 4794 writer.write('\n') 4795 4796 return True
4797 4798 #=========================================================================== 4799 # write_symperms_file 4800 #===========================================================================
4801 - def write_symperms_file(self, writer, perms):
4802 """Write the symperms.inc file for subprocess group, used for 4803 symmetric configurations""" 4804 4805 lines = [] 4806 for iperm, perm in enumerate(perms): 4807 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4808 (iperm+1, ",".join([str(i+1) for i in perm]))) 4809 4810 # Write the file 4811 writer.writelines(lines) 4812 4813 return True
4814 4815 #=========================================================================== 4816 # write_subproc 4817 #===========================================================================
4818 - def write_subproc(self, writer, subprocdir):
4819 """Append this subprocess to the subproc.mg file for MG4""" 4820 4821 # Write line to file 4822 writer.write(subprocdir + "\n") 4823 4824 return True
4825
4826 #=============================================================================== 4827 # ProcessExporterFortranMEGroup 4828 #=============================================================================== 4829 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4830 """Class to take care of exporting a set of matrix elements to 4831 MadEvent subprocess group format.""" 4832 4833 matrix_file = "matrix_madevent_group_v4.inc" 4834 grouped_mode = 'madevent' 4835 #=========================================================================== 4836 # generate_subprocess_directory 4837 #===========================================================================
4838 - def generate_subprocess_directory(self, subproc_group, 4839 fortran_model, 4840 group_number):
4841 """Generate the Pn directory for a subprocess group in MadEvent, 4842 including the necessary matrix_N.f files, configs.inc and various 4843 other helper files.""" 4844 4845 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4846 "subproc_group object not SubProcessGroup" 4847 4848 if not self.model: 4849 self.model = subproc_group.get('matrix_elements')[0].\ 4850 get('processes')[0].get('model') 4851 4852 cwd = os.getcwd() 4853 path = pjoin(self.dir_path, 'SubProcesses') 4854 4855 os.chdir(path) 4856 pathdir = os.getcwd() 4857 4858 # Create the directory PN in the specified path 4859 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4860 subproc_group.get('name')) 4861 try: 4862 os.mkdir(subprocdir) 4863 except os.error as error: 4864 logger.warning(error.strerror + " " + subprocdir) 4865 4866 try: 4867 os.chdir(subprocdir) 4868 except os.error: 4869 logger.error('Could not cd to directory %s' % subprocdir) 4870 return 0 4871 4872 logger.info('Creating files in directory %s' % subprocdir) 4873 4874 # Create the matrix.f files, auto_dsig.f files and all inc files 4875 # for all subprocesses in the group 4876 4877 maxamps = 0 4878 maxflows = 0 4879 tot_calls = 0 4880 4881 matrix_elements = subproc_group.get('matrix_elements') 4882 4883 # Add the driver.f, all grouped ME's must share the same number of 4884 # helicity configuration 4885 ncomb = matrix_elements[0].get_helicity_combinations() 4886 for me in matrix_elements[1:]: 4887 if ncomb!=me.get_helicity_combinations(): 4888 raise MadGraph5Error, "All grouped processes must share the "+\ 4889 "same number of helicity configurations." 4890 4891 filename = 'driver.f' 4892 self.write_driver(writers.FortranWriter(filename),ncomb, 4893 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4894 4895 for ime, matrix_element in \ 4896 enumerate(matrix_elements): 4897 filename = 'matrix%d.f' % (ime+1) 4898 calls, ncolor = \ 4899 self.write_matrix_element_v4(writers.FortranWriter(filename), 4900 matrix_element, 4901 fortran_model, 4902 proc_id=str(ime+1), 4903 config_map=subproc_group.get('diagram_maps')[ime], 4904 subproc_number=group_number) 4905 4906 filename = 'auto_dsig%d.f' % (ime+1) 4907 self.write_auto_dsig_file(writers.FortranWriter(filename), 4908 matrix_element, 4909 str(ime+1)) 4910 4911 # Keep track of needed quantities 4912 tot_calls += int(calls) 4913 maxflows = max(maxflows, ncolor) 4914 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4915 4916 # Draw diagrams 4917 filename = "matrix%d.ps" % (ime+1) 4918 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4919 get('diagrams'), 4920 filename, 4921 model = \ 4922 matrix_element.get('processes')[0].\ 4923 get('model'), 4924 amplitude=True) 4925 logger.info("Generating Feynman diagrams for " + \ 4926 matrix_element.get('processes')[0].nice_string()) 4927 plot.draw() 4928 4929 # Extract number of external particles 4930 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4931 4932 # Generate a list of diagrams corresponding to each configuration 4933 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4934 # If a subprocess has no diagrams for this config, the number is 0 4935 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4936 4937 filename = 'auto_dsig.f' 4938 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4939 subproc_group) 4940 4941 filename = 'coloramps.inc' 4942 self.write_coloramps_file(writers.FortranWriter(filename), 4943 subproc_diagrams_for_config, 4944 maxflows, 4945 matrix_elements) 4946 4947 filename = 'get_color.f' 4948 self.write_colors_file(writers.FortranWriter(filename), 4949 matrix_elements) 4950 4951 filename = 'config_subproc_map.inc' 4952 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4953 subproc_diagrams_for_config) 4954 4955 filename = 'configs.inc' 4956 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4957 writers.FortranWriter(filename), 4958 subproc_group, 4959 subproc_diagrams_for_config) 4960 4961 filename = 'config_nqcd.inc' 4962 self.write_config_nqcd_file(writers.FortranWriter(filename), 4963 nqcd_list) 4964 4965 filename = 'decayBW.inc' 4966 self.write_decayBW_file(writers.FortranWriter(filename), 4967 s_and_t_channels) 4968 4969 filename = 'dname.mg' 4970 self.write_dname_file(writers.FortranWriter(filename), 4971 subprocdir) 4972 4973 filename = 'iproc.dat' 4974 self.write_iproc_file(writers.FortranWriter(filename), 4975 group_number) 4976 4977 filename = 'leshouche.inc' 4978 self.write_leshouche_file(writers.FortranWriter(filename), 4979 subproc_group) 4980 4981 filename = 'maxamps.inc' 4982 self.write_maxamps_file(writers.FortranWriter(filename), 4983 maxamps, 4984 maxflows, 4985 max([len(me.get('processes')) for me in \ 4986 matrix_elements]), 4987 len(matrix_elements)) 4988 4989 # Note that mg.sym is not relevant for this case 4990 filename = 'mg.sym' 4991 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4992 4993 filename = 'mirrorprocs.inc' 4994 self.write_mirrorprocs(writers.FortranWriter(filename), 4995 subproc_group) 4996 4997 filename = 'ncombs.inc' 4998 self.write_ncombs_file(writers.FortranWriter(filename), 4999 nexternal) 5000 5001 filename = 'nexternal.inc' 5002 self.write_nexternal_file(writers.FortranWriter(filename), 5003 nexternal, ninitial) 5004 5005 filename = 'ngraphs.inc' 5006 self.write_ngraphs_file(writers.FortranWriter(filename), 5007 nconfigs) 5008 5009 filename = 'pmass.inc' 5010 self.write_pmass_file(writers.FortranWriter(filename), 5011 matrix_element) 5012 5013 filename = 'props.inc' 5014 self.write_props_file(writers.FortranWriter(filename), 5015 matrix_element, 5016 s_and_t_channels) 5017 5018 filename = 'processes.dat' 5019 files.write_to_file(filename, 5020 self.write_processes_file, 5021 subproc_group) 5022 5023 # Find config symmetries and permutations 5024 symmetry, perms, ident_perms = \ 5025 diagram_symmetry.find_symmetry(subproc_group) 5026 5027 filename = 'symswap.inc' 5028 self.write_symswap_file(writers.FortranWriter(filename), 5029 ident_perms) 5030 5031 filename = 'symfact_orig.dat' 5032 self.write_symfact_file(open(filename, 'w'), symmetry) 5033 5034 filename = 'symperms.inc' 5035 self.write_symperms_file(writers.FortranWriter(filename), 5036 perms) 5037 5038 # Generate jpgs -> pass in make_html 5039 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5040 5041 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5042 5043 #import nexternal/leshouch in Source 5044 ln('nexternal.inc', '../../Source', log=False) 5045 ln('leshouche.inc', '../../Source', log=False) 5046 ln('maxamps.inc', '../../Source', log=False) 5047 5048 # Return to SubProcesses dir) 5049 os.chdir(pathdir) 5050 5051 # Add subprocess to subproc.mg 5052 filename = 'subproc.mg' 5053 files.append_to_file(filename, 5054 self.write_subproc, 5055 subprocdir) 5056 5057 # Return to original dir 5058 os.chdir(cwd) 5059 5060 if not tot_calls: 5061 tot_calls = 0 5062 return tot_calls
5063 5064 #=========================================================================== 5065 # write_super_auto_dsig_file 5066 #===========================================================================
5067 - def write_super_auto_dsig_file(self, writer, subproc_group):
5068 """Write the auto_dsig.f file selecting between the subprocesses 5069 in subprocess group mode""" 5070 5071 replace_dict = {} 5072 5073 # Extract version number and date from VERSION file 5074 info_lines = self.get_mg5_info_lines() 5075 replace_dict['info_lines'] = info_lines 5076 5077 matrix_elements = subproc_group.get('matrix_elements') 5078 5079 # Extract process info lines 5080 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5081 matrix_elements]) 5082 replace_dict['process_lines'] = process_lines 5083 5084 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5085 replace_dict['nexternal'] = nexternal 5086 5087 replace_dict['nsprocs'] = 2*len(matrix_elements) 5088 5089 # Generate dsig definition line 5090 dsig_def_line = "DOUBLE PRECISION " + \ 5091 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5092 range(len(matrix_elements))]) 5093 replace_dict["dsig_def_line"] = dsig_def_line 5094 5095 # Generate dsig process lines 5096 call_dsig_proc_lines = [] 5097 for iproc in range(len(matrix_elements)): 5098 call_dsig_proc_lines.append(\ 5099 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5100 {"num": iproc + 1, 5101 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5102 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5103 5104 ncomb=matrix_elements[0].get_helicity_combinations() 5105 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5106 5107 if writer: 5108 file = open(pjoin(_file_path, \ 5109 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5110 file = file % replace_dict 5111 5112 # Write the file 5113 writer.writelines(file) 5114 else: 5115 return replace_dict
5116 5117 #=========================================================================== 5118 # write_mirrorprocs 5119 #===========================================================================
5120 - def write_mirrorprocs(self, writer, subproc_group):
5121 """Write the mirrorprocs.inc file determining which processes have 5122 IS mirror process in subprocess group mode.""" 5123 5124 lines = [] 5125 bool_dict = {True: '.true.', False: '.false.'} 5126 matrix_elements = subproc_group.get('matrix_elements') 5127 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5128 (len(matrix_elements), 5129 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5130 me in matrix_elements]))) 5131 # Write the file 5132 writer.writelines(lines)
5133 5134 #=========================================================================== 5135 # write_addmothers 5136 #===========================================================================
5137 - def write_addmothers(self, writer):
5138 """Write the SubProcess/addmothers.f""" 5139 5140 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5141 5142 text = open(path).read() % {'iconfig': 'lconfig'} 5143 writer.write(text) 5144 5145 return True
5146 5147 5148 #=========================================================================== 5149 # write_coloramps_file 5150 #===========================================================================
5151 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5152 matrix_elements):
5153 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5154 5155 # Create a map from subprocess (matrix element) to a list of 5156 # the diagrams corresponding to each config 5157 5158 lines = [] 5159 5160 subproc_to_confdiag = {} 5161 for config in diagrams_for_config: 5162 for subproc, diag in enumerate(config): 5163 try: 5164 subproc_to_confdiag[subproc].append(diag) 5165 except KeyError: 5166 subproc_to_confdiag[subproc] = [diag] 5167 5168 for subproc in sorted(subproc_to_confdiag.keys()): 5169 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5170 matrix_elements[subproc], 5171 subproc + 1)) 5172 5173 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5174 (maxflows, 5175 len(diagrams_for_config), 5176 len(matrix_elements))) 5177 5178 # Write the file 5179 writer.writelines(lines) 5180 5181 return True
5182 5183 #=========================================================================== 5184 # write_config_subproc_map_file 5185 #===========================================================================
5186 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5187 """Write the config_subproc_map.inc file for subprocess groups""" 5188 5189 lines = [] 5190 # Output only configs that have some corresponding diagrams 5191 iconfig = 0 5192 for config in config_subproc_map: 5193 if set(config) == set([0]): 5194 continue 5195 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5196 (iconfig + 1, len(config), 5197 ",".join([str(i) for i in config]))) 5198 iconfig += 1 5199 # Write the file 5200 writer.writelines(lines) 5201 5202 return True
5203 5204 #=========================================================================== 5205 # read_write_good_hel 5206 #===========================================================================
5207 - def read_write_good_hel(self, ncomb):
5208 """return the code to read/write the good_hel common_block""" 5209 5210 convert = {'ncomb' : ncomb} 5211 5212 output = """ 5213 subroutine write_good_hel(stream_id) 5214 implicit none 5215 integer stream_id 5216 INTEGER NCOMB 5217 PARAMETER ( NCOMB=%(ncomb)d) 5218 LOGICAL GOODHEL(NCOMB, 2) 5219 INTEGER NTRY(2) 5220 common/BLOCK_GOODHEL/NTRY,GOODHEL 5221 write(stream_id,*) GOODHEL 5222 return 5223 end 5224 5225 5226 subroutine read_good_hel(stream_id) 5227 implicit none 5228 include 'genps.inc' 5229 integer stream_id 5230 INTEGER NCOMB 5231 PARAMETER ( NCOMB=%(ncomb)d) 5232 LOGICAL GOODHEL(NCOMB, 2) 5233 INTEGER NTRY(2) 5234 common/BLOCK_GOODHEL/NTRY,GOODHEL 5235 read(stream_id,*) GOODHEL 5236 NTRY(1) = MAXTRIES + 1 5237 NTRY(2) = MAXTRIES + 1 5238 return 5239 end 5240 5241 subroutine init_good_hel() 5242 implicit none 5243 INTEGER NCOMB 5244 PARAMETER ( NCOMB=%(ncomb)d) 5245 LOGICAL GOODHEL(NCOMB, 2) 5246 INTEGER NTRY(2) 5247 INTEGER I 5248 5249 do i=1,NCOMB 5250 GOODHEL(I,1) = .false. 5251 GOODHEL(I,2) = .false. 5252 enddo 5253 NTRY(1) = 0 5254 NTRY(2) = 0 5255 end 5256 5257 integer function get_maxsproc() 5258 implicit none 5259 include 'maxamps.inc' 5260 5261 get_maxsproc = maxsproc 5262 return 5263 end 5264 5265 """ % convert 5266 5267 return output
5268 5269 5270 5271 #=========================================================================== 5272 # write_configs_file 5273 #===========================================================================
5274 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5275 """Write the configs.inc file with topology information for a 5276 subprocess group. Use the first subprocess with a diagram for each 5277 configuration.""" 5278 5279 matrix_elements = subproc_group.get('matrix_elements') 5280 model = matrix_elements[0].get('processes')[0].get('model') 5281 5282 diagrams = [] 5283 config_numbers = [] 5284 for iconfig, config in enumerate(diagrams_for_config): 5285 # Check if any diagrams correspond to this config 5286 if set(config) == set([0]): 5287 continue 5288 subproc_diags = [] 5289 for s,d in enumerate(config): 5290 if d: 5291 subproc_diags.append(matrix_elements[s].\ 5292 get('diagrams')[d-1]) 5293 else: 5294 subproc_diags.append(None) 5295 diagrams.append(subproc_diags) 5296 config_numbers.append(iconfig + 1) 5297 5298 # Extract number of external particles 5299 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5300 5301 return len(diagrams), \ 5302 self.write_configs_file_from_diagrams(writer, diagrams, 5303 config_numbers, 5304 nexternal, ninitial, 5305 model)
5306 5307 #=========================================================================== 5308 # write_run_configs_file 5309 #===========================================================================
5310 - def write_run_config_file(self, writer):
5311 """Write the run_configs.inc file for MadEvent""" 5312 5313 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5314 if self.proc_characteristic['loop_induced']: 5315 job_per_chan = 1 5316 else: 5317 job_per_chan = 2 5318 text = open(path).read() % {'chanperjob':job_per_chan} 5319 writer.write(text) 5320 return True
5321 5322 5323 #=========================================================================== 5324 # write_leshouche_file 5325 #===========================================================================
5326 - def write_leshouche_file(self, writer, subproc_group):
5327 """Write the leshouche.inc file for MG4""" 5328 5329 all_lines = [] 5330 5331 for iproc, matrix_element in \ 5332 enumerate(subproc_group.get('matrix_elements')): 5333 all_lines.extend(self.get_leshouche_lines(matrix_element, 5334 iproc)) 5335 # Write the file 5336 writer.writelines(all_lines) 5337 return True
5338 5339
5340 - def finalize(self,*args, **opts):
5341 5342 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5343 #ensure that the grouping information is on the correct value 5344 self.proc_characteristic['grouped_matrix'] = True
5345 5346 5347 #=============================================================================== 5348 # UFO_model_to_mg4 5349 #=============================================================================== 5350 5351 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5352 5353 -class UFO_model_to_mg4(object):
5354 """ A converter of the UFO-MG5 Model to the MG4 format """ 5355 5356 # The list below shows the only variables the user is allowed to change by 5357 # himself for each PS point. If he changes any other, then calling 5358 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5359 # correctly account for the change. 5360 PS_dependent_key = ['aS','MU_R'] 5361 mp_complex_format = 'complex*32' 5362 mp_real_format = 'real*16' 5363 # Warning, it is crucial none of the couplings/parameters of the model 5364 # starts with this prefix. I should add a check for this. 5365 # You can change it as the global variable to check_param_card.ParamCard 5366 mp_prefix = check_param_card.ParamCard.mp_prefix 5367
5368 - def __init__(self, model, output_path, opt=None):
5369 """ initialization of the objects """ 5370 5371 self.model = model 5372 self.model_name = model['name'] 5373 self.dir_path = output_path 5374 5375 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5376 'loop_induced': False} 5377 if opt: 5378 self.opt.update(opt) 5379 5380 self.coups_dep = [] # (name, expression, type) 5381 self.coups_indep = [] # (name, expression, type) 5382 self.params_dep = [] # (name, expression, type) 5383 self.params_indep = [] # (name, expression, type) 5384 self.params_ext = [] # external parameter 5385 self.p_to_f = parsers.UFOExpressionParserFortran() 5386 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5387
5389 """modify the parameter if some of them are identical up to the case""" 5390 5391 lower_dict={} 5392 duplicate = set() 5393 keys = self.model['parameters'].keys() 5394 for key in keys: 5395 for param in self.model['parameters'][key]: 5396 lower_name = param.name.lower() 5397 if not lower_name: 5398 continue 5399 try: 5400 lower_dict[lower_name].append(param) 5401 except KeyError,error: 5402 lower_dict[lower_name] = [param] 5403 else: 5404 duplicate.add(lower_name) 5405 logger.debug('%s is define both as lower case and upper case.' 5406 % lower_name) 5407 if not duplicate: 5408 return 5409 5410 re_expr = r'''\b(%s)\b''' 5411 to_change = [] 5412 change={} 5413 for value in duplicate: 5414 for i, var in enumerate(lower_dict[value]): 5415 to_change.append(var.name) 5416 new_name = '%s%s' % (var.name.lower(), 5417 ('__%d'%(i+1) if i>0 else '')) 5418 change[var.name] = new_name 5419 var.name = new_name 5420 5421 # Apply the modification to the map_CTcoup_CTparam of the model 5422 # if it has one (giving for each coupling the CT parameters whcih 5423 # are necessary and which should be exported to the model. 5424 if hasattr(self.model,'map_CTcoup_CTparam'): 5425 for coup, ctparams in self.model.map_CTcoup_CTparam: 5426 for i, ctparam in enumerate(ctparams): 5427 try: 5428 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5429 except KeyError: 5430 pass 5431 5432 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5433 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5434 5435 # change parameters 5436 for key in keys: 5437 if key == ('external',): 5438 continue 5439 for param in self.model['parameters'][key]: 5440 param.expr = rep_pattern.sub(replace, param.expr) 5441 5442 # change couplings 5443 for key in self.model['couplings'].keys(): 5444 for coup in self.model['couplings'][key]: 5445 coup.expr = rep_pattern.sub(replace, coup.expr) 5446 5447 # change mass/width 5448 for part in self.model['particles']: 5449 if str(part.get('mass')) in to_change: 5450 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5451 if str(part.get('width')) in to_change: 5452 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5453
5454 - def refactorize(self, wanted_couplings = []):
5455 """modify the couplings to fit with MG4 convention """ 5456 5457 # Keep only separation in alphaS 5458 keys = self.model['parameters'].keys() 5459 keys.sort(key=len) 5460 for key in keys: 5461 to_add = [o for o in self.model['parameters'][key] if o.name] 5462 5463 if key == ('external',): 5464 self.params_ext += to_add 5465 elif any([(k in key) for k in self.PS_dependent_key]): 5466 self.params_dep += to_add 5467 else: 5468 self.params_indep += to_add 5469 # same for couplings 5470 keys = self.model['couplings'].keys() 5471 keys.sort(key=len) 5472 for key, coup_list in self.model['couplings'].items(): 5473 if any([(k in key) for k in self.PS_dependent_key]): 5474 self.coups_dep += [c for c in coup_list if 5475 (not wanted_couplings or c.name in \ 5476 wanted_couplings)] 5477 else: 5478 self.coups_indep += [c for c in coup_list if 5479 (not wanted_couplings or c.name in \ 5480 wanted_couplings)] 5481 5482 # MG4 use G and not aS as it basic object for alphas related computation 5483 #Pass G in the independant list 5484 if 'G' in self.params_dep: 5485 index = self.params_dep.index('G') 5486 G = self.params_dep.pop(index) 5487 # G.expr = '2*cmath.sqrt(as*pi)' 5488 # self.params_indep.insert(0, self.params_dep.pop(index)) 5489 # No need to add it if not defined 5490 5491 if 'aS' not in self.params_ext: 5492 logger.critical('aS not define as external parameter adding it!') 5493 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5494 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5495 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5496 - def build(self, wanted_couplings = [], full=True):
5497 """modify the couplings to fit with MG4 convention and creates all the 5498 different files""" 5499 5500 self.pass_parameter_to_case_insensitive() 5501 self.refactorize(wanted_couplings) 5502 5503 # write the files 5504 if full: 5505 if wanted_couplings: 5506 # extract the wanted ct parameters 5507 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5508 self.write_all()
5509 5510
5511 - def open(self, name, comment='c', format='default'):
5512 """ Open the file name in the correct directory and with a valid 5513 header.""" 5514 5515 file_path = pjoin(self.dir_path, name) 5516 5517 if format == 'fortran': 5518 fsock = writers.FortranWriter(file_path, 'w') 5519 else: 5520 fsock = open(file_path, 'w') 5521 5522 file.writelines(fsock, comment * 77 + '\n') 5523 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5524 {'comment': comment + (6 - len(comment)) * ' '}) 5525 file.writelines(fsock, comment * 77 + '\n\n') 5526 return fsock
5527 5528
5529 - def write_all(self):
5530 """ write all the files """ 5531 #write the part related to the external parameter 5532 self.create_ident_card() 5533 self.create_param_read() 5534 5535 #write the definition of the parameter 5536 self.create_input() 5537 self.create_intparam_def(dp=True,mp=False) 5538 if self.opt['mp']: 5539 self.create_intparam_def(dp=False,mp=True) 5540 5541 # definition of the coupling. 5542 self.create_actualize_mp_ext_param_inc() 5543 self.create_coupl_inc() 5544 self.create_write_couplings() 5545 self.create_couplings() 5546 5547 # the makefile 5548 self.create_makeinc() 5549 self.create_param_write() 5550 5551 # The model functions 5552 self.create_model_functions_inc() 5553 self.create_model_functions_def() 5554 5555 # The param_card.dat 5556 self.create_param_card() 5557 5558 5559 # All the standard files 5560 self.copy_standard_file()
5561 5562 ############################################################################ 5563 ## ROUTINE CREATING THE FILES ############################################ 5564 ############################################################################ 5565
5566 - def copy_standard_file(self):
5567 """Copy the standard files for the fortran model.""" 5568 5569 #copy the library files 5570 file_to_link = ['formats.inc','printout.f', \ 5571 'rw_para.f', 'testprog.f'] 5572 5573 for filename in file_to_link: 5574 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5575 self.dir_path) 5576 5577 file = open(os.path.join(MG5DIR,\ 5578 'models/template_files/fortran/rw_para.f')).read() 5579 5580 includes=["include \'coupl.inc\'","include \'input.inc\'", 5581 "include \'model_functions.inc\'"] 5582 if self.opt['mp']: 5583 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5584 # In standalone and madloop we do no use the compiled param card but 5585 # still parse the .dat one so we must load it. 5586 if self.opt['loop_induced']: 5587 #loop induced follow MadEvent way to handle the card. 5588 load_card = '' 5589 lha_read_filename='lha_read.f' 5590 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5591 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5592 lha_read_filename='lha_read_mp.f' 5593 elif self.opt['export_format'].startswith('standalone') \ 5594 or self.opt['export_format'] in ['madweight', 'plugin']\ 5595 or self.opt['export_format'].startswith('matchbox'): 5596 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5597 lha_read_filename='lha_read.f' 5598 else: 5599 load_card = '' 5600 lha_read_filename='lha_read.f' 5601 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5602 os.path.join(self.dir_path,'lha_read.f')) 5603 5604 file=file%{'includes':'\n '.join(includes), 5605 'load_card':load_card} 5606 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5607 writer.writelines(file) 5608 writer.close() 5609 5610 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5611 or self.opt['loop_induced']: 5612 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5613 self.dir_path + '/makefile') 5614 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5615 path = pjoin(self.dir_path, 'makefile') 5616 text = open(path).read() 5617 text = text.replace('madevent','aMCatNLO') 5618 open(path, 'w').writelines(text) 5619 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5620 'madloop','madloop_optimized', 'standalone_rw', 5621 'madweight','matchbox','madloop_matchbox', 'plugin']: 5622 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5623 self.dir_path + '/makefile') 5624 #elif self.opt['export_format'] in []: 5625 #pass 5626 else: 5627 raise MadGraph5Error('Unknown format')
5628
5629 - def create_coupl_inc(self):
5630 """ write coupling.inc """ 5631 5632 fsock = self.open('coupl.inc', format='fortran') 5633 if self.opt['mp']: 5634 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5635 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5636 format='fortran') 5637 5638 # Write header 5639 header = """double precision G 5640 common/strong/ G 5641 5642 double complex gal(2) 5643 common/weak/ gal 5644 5645 double precision MU_R 5646 common/rscale/ MU_R 5647 5648 double precision Nf 5649 parameter(Nf=%d) 5650 """ % self.model.get_nflav() 5651 5652 fsock.writelines(header) 5653 5654 if self.opt['mp']: 5655 header = """%(real_mp_format)s %(mp_prefix)sG 5656 common/MP_strong/ %(mp_prefix)sG 5657 5658 %(complex_mp_format)s %(mp_prefix)sgal(2) 5659 common/MP_weak/ %(mp_prefix)sgal 5660 5661 %(complex_mp_format)s %(mp_prefix)sMU_R 5662 common/MP_rscale/ %(mp_prefix)sMU_R 5663 5664 """ 5665 5666 5667 5668 5669 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5670 'complex_mp_format':self.mp_complex_format, 5671 'mp_prefix':self.mp_prefix}) 5672 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5673 'complex_mp_format':self.mp_complex_format, 5674 'mp_prefix':''}) 5675 5676 # Write the Mass definition/ common block 5677 masses = set() 5678 widths = set() 5679 if self.opt['complex_mass']: 5680 complex_mass = set() 5681 5682 for particle in self.model.get('particles'): 5683 #find masses 5684 one_mass = particle.get('mass') 5685 if one_mass.lower() != 'zero': 5686 masses.add(one_mass) 5687 5688 # find width 5689 one_width = particle.get('width') 5690 if one_width.lower() != 'zero': 5691 widths.add(one_width) 5692 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5693 complex_mass.add('CMASS_%s' % one_mass) 5694 5695 if masses: 5696 fsock.writelines('double precision '+','.join(masses)+'\n') 5697 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5698 if self.opt['mp']: 5699 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5700 ','.join(masses)+'\n') 5701 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5702 ','.join(masses)+'\n\n') 5703 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5704 self.mp_prefix+m for m in masses])+'\n') 5705 mp_fsock.writelines('common/MP_masses/ '+\ 5706 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5707 5708 if widths: 5709 fsock.writelines('double precision '+','.join(widths)+'\n') 5710 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5711 if self.opt['mp']: 5712 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5713 ','.join(widths)+'\n') 5714 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5715 ','.join(widths)+'\n\n') 5716 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5717 self.mp_prefix+w for w in widths])+'\n') 5718 mp_fsock.writelines('common/MP_widths/ '+\ 5719 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5720 5721 # Write the Couplings 5722 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5723 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5724 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5725 if self.opt['mp']: 5726 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5727 ','.join(coupling_list)+'\n') 5728 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5729 ','.join(coupling_list)+'\n\n') 5730 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5731 self.mp_prefix+c for c in coupling_list])+'\n') 5732 mp_fsock.writelines('common/MP_couplings/ '+\ 5733 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5734 5735 # Write complex mass for complex mass scheme (if activated) 5736 if self.opt['complex_mass'] and complex_mass: 5737 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5738 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5739 if self.opt['mp']: 5740 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5741 ','.join(complex_mass)+'\n') 5742 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5743 ','.join(complex_mass)+'\n\n') 5744 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5745 self.mp_prefix+cm for cm in complex_mass])+'\n') 5746 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5747 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5748
5749 - def create_write_couplings(self):
5750 """ write the file coupl_write.inc """ 5751 5752 fsock = self.open('coupl_write.inc', format='fortran') 5753 5754 fsock.writelines("""write(*,*) ' Couplings of %s' 5755 write(*,*) ' ---------------------------------' 5756 write(*,*) ' '""" % self.model_name) 5757 def format(coupl): 5758 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5759 5760 # Write the Couplings 5761 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5762 fsock.writelines('\n'.join(lines)) 5763 5764
5765 - def create_input(self):
5766 """create input.inc containing the definition of the parameters""" 5767 5768 fsock = self.open('input.inc', format='fortran') 5769 if self.opt['mp']: 5770 mp_fsock = self.open('mp_input.inc', format='fortran') 5771 5772 #find mass/ width since they are already define 5773 already_def = set() 5774 for particle in self.model.get('particles'): 5775 already_def.add(particle.get('mass').lower()) 5776 already_def.add(particle.get('width').lower()) 5777 if self.opt['complex_mass']: 5778 already_def.add('cmass_%s' % particle.get('mass').lower()) 5779 5780 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5781 name.lower() not in already_def 5782 5783 real_parameters = [param.name for param in self.params_dep + 5784 self.params_indep if param.type == 'real' 5785 and is_valid(param.name)] 5786 5787 real_parameters += [param.name for param in self.params_ext 5788 if param.type == 'real'and 5789 is_valid(param.name)] 5790 5791 # check the parameter is a CT parameter or not 5792 # if yes, just use the needed ones 5793 real_parameters = [param for param in real_parameters \ 5794 if self.check_needed_param(param)] 5795 5796 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5797 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5798 if self.opt['mp']: 5799 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5800 self.mp_prefix+p for p in real_parameters])+'\n') 5801 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5802 self.mp_prefix+p for p in real_parameters])+'\n\n') 5803 5804 complex_parameters = [param.name for param in self.params_dep + 5805 self.params_indep if param.type == 'complex' and 5806 is_valid(param.name)] 5807 5808 # check the parameter is a CT parameter or not 5809 # if yes, just use the needed ones 5810 complex_parameters = [param for param in complex_parameters \ 5811 if self.check_needed_param(param)] 5812 5813 if complex_parameters: 5814 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5815 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5816 if self.opt['mp']: 5817 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5818 self.mp_prefix+p for p in complex_parameters])+'\n') 5819 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5820 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5821
5822 - def check_needed_param(self, param):
5823 """ Returns whether the parameter in argument is needed for this 5824 specific computation or not.""" 5825 5826 # If this is a leading order model or if there was no CT parameter 5827 # employed in this NLO model, one can directly return that the 5828 # parameter is needed since only CTParameters are filtered. 5829 if not hasattr(self, 'allCTparameters') or \ 5830 self.allCTparameters is None or self.usedCTparameters is None or \ 5831 len(self.allCTparameters)==0: 5832 return True 5833 5834 # We must allow the conjugate shorthand for the complex parameter as 5835 # well so we check wether either the parameter name or its name with 5836 # 'conjg__' substituted with '' is present in the list. 5837 # This is acceptable even if some parameter had an original name 5838 # including 'conjg__' in it, because at worst we export a parameter 5839 # was not needed. 5840 param = param.lower() 5841 cjg_param = param.replace('conjg__','',1) 5842 5843 # First make sure it is a CTparameter 5844 if param not in self.allCTparameters and \ 5845 cjg_param not in self.allCTparameters: 5846 return True 5847 5848 # Now check if it is in the list of CTparameters actually used 5849 return (param in self.usedCTparameters or \ 5850 cjg_param in self.usedCTparameters)
5851
5852 - def extract_needed_CTparam(self,wanted_couplings=[]):
5853 """ Extract what are the needed CT parameters given the wanted_couplings""" 5854 5855 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5856 # Setting these lists to none wil disable the filtering in 5857 # check_needed_param 5858 self.allCTparameters = None 5859 self.usedCTparameters = None 5860 return 5861 5862 # All CTparameters appearin in all CT couplings 5863 allCTparameters=self.model.map_CTcoup_CTparam.values() 5864 # Define in this class the list of all CT parameters 5865 self.allCTparameters=list(\ 5866 set(itertools.chain.from_iterable(allCTparameters))) 5867 5868 # All used CT couplings 5869 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5870 allUsedCTCouplings = [coupl for coupl in 5871 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5872 5873 # Now define the list of all CT parameters that are actually used 5874 self.usedCTparameters=list(\ 5875 set(itertools.chain.from_iterable([ 5876 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5877 ]))) 5878 5879 # Now at last, make these list case insensitive 5880 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5881 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5882
5883 - def create_intparam_def(self, dp=True, mp=False):
5884 """ create intparam_definition.inc setting the internal parameters. 5885 Output the double precision and/or the multiple precision parameters 5886 depending on the parameters dp and mp. If mp only, then the file names 5887 get the 'mp_' prefix. 5888 """ 5889 5890 fsock = self.open('%sintparam_definition.inc'% 5891 ('mp_' if mp and not dp else ''), format='fortran') 5892 5893 fsock.write_comments(\ 5894 "Parameters that should not be recomputed event by event.\n") 5895 fsock.writelines("if(readlha) then\n") 5896 if dp: 5897 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5898 if mp: 5899 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5900 5901 for param in self.params_indep: 5902 if param.name == 'ZERO': 5903 continue 5904 # check whether the parameter is a CT parameter 5905 # if yes,just used the needed ones 5906 if not self.check_needed_param(param.name): 5907 continue 5908 if dp: 5909 fsock.writelines("%s = %s\n" % (param.name, 5910 self.p_to_f.parse(param.expr))) 5911 if mp: 5912 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5913 self.mp_p_to_f.parse(param.expr))) 5914 5915 fsock.writelines('endif') 5916 5917 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5918 if dp: 5919 fsock.writelines("aS = G**2/4/pi\n") 5920 if mp: 5921 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5922 for param in self.params_dep: 5923 # check whether the parameter is a CT parameter 5924 # if yes,just used the needed ones 5925 if not self.check_needed_param(param.name): 5926 continue 5927 if dp: 5928 fsock.writelines("%s = %s\n" % (param.name, 5929 self.p_to_f.parse(param.expr))) 5930 elif mp: 5931 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5932 self.mp_p_to_f.parse(param.expr))) 5933 5934 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5935 if ('aEWM1',) in self.model['parameters']: 5936 if dp: 5937 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5938 gal(2) = 1d0 5939 """) 5940 elif mp: 5941 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5942 %(mp_prefix)sgal(2) = 1d0 5943 """ %{'mp_prefix':self.mp_prefix}) 5944 pass 5945 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5946 elif ('Gf',) in self.model['parameters']: 5947 if dp: 5948 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5949 gal(2) = 1d0 5950 """) 5951 elif mp: 5952 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5953 %(mp_prefix)sgal(2) = 1d0 5954 """ %{'mp_prefix':self.mp_prefix}) 5955 pass 5956 else: 5957 if dp: 5958 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5959 fsock.writelines(""" gal(1) = 1d0 5960 gal(2) = 1d0 5961 """) 5962 elif mp: 5963 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5964 %(mp_prefix)sgal(2) = 1e0_16 5965 """%{'mp_prefix':self.mp_prefix})
5966 5967
5968 - def create_couplings(self):
5969 """ create couplings.f and all couplingsX.f """ 5970 5971 nb_def_by_file = 25 5972 5973 self.create_couplings_main(nb_def_by_file) 5974 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5975 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5976 5977 for i in range(nb_coup_indep): 5978 # For the independent couplings, we compute the double and multiple 5979 # precision ones together 5980 data = self.coups_indep[nb_def_by_file * i: 5981 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5982 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5983 5984 for i in range(nb_coup_dep): 5985 # For the dependent couplings, we compute the double and multiple 5986 # precision ones in separate subroutines. 5987 data = self.coups_dep[nb_def_by_file * i: 5988 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5989 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5990 dp=True,mp=False) 5991 if self.opt['mp']: 5992 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5993 dp=False,mp=True)
5994 5995
5996 - def create_couplings_main(self, nb_def_by_file=25):
5997 """ create couplings.f """ 5998 5999 fsock = self.open('couplings.f', format='fortran') 6000 6001 fsock.writelines("""subroutine coup() 6002 6003 implicit none 6004 double precision PI, ZERO 6005 logical READLHA 6006 parameter (PI=3.141592653589793d0) 6007 parameter (ZERO=0d0) 6008 include \'model_functions.inc\'""") 6009 if self.opt['mp']: 6010 fsock.writelines("""%s MP__PI, MP__ZERO 6011 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6012 parameter (MP__ZERO=0e0_16) 6013 include \'mp_input.inc\' 6014 include \'mp_coupl.inc\' 6015 """%self.mp_real_format) 6016 fsock.writelines("""include \'input.inc\' 6017 include \'coupl.inc\' 6018 READLHA = .true. 6019 include \'intparam_definition.inc\'""") 6020 if self.opt['mp']: 6021 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6022 6023 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6024 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6025 6026 fsock.writelines('\n'.join(\ 6027 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6028 6029 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6030 6031 fsock.writelines('\n'.join(\ 6032 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6033 for i in range(nb_coup_dep)])) 6034 if self.opt['mp']: 6035 fsock.writelines('\n'.join(\ 6036 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6037 for i in range(nb_coup_dep)])) 6038 fsock.writelines('''\n return \n end\n''') 6039 6040 fsock.writelines("""subroutine update_as_param() 6041 6042 implicit none 6043 double precision PI, ZERO 6044 logical READLHA 6045 parameter (PI=3.141592653589793d0) 6046 parameter (ZERO=0d0) 6047 include \'model_functions.inc\'""") 6048 fsock.writelines("""include \'input.inc\' 6049 include \'coupl.inc\' 6050 READLHA = .false.""") 6051 fsock.writelines(""" 6052 include \'intparam_definition.inc\'\n 6053 """) 6054 6055 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6056 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6057 6058 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6059 6060 fsock.writelines('\n'.join(\ 6061 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6062 for i in range(nb_coup_dep)])) 6063 fsock.writelines('''\n return \n end\n''') 6064 6065 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6066 6067 implicit none 6068 double precision PI 6069 parameter (PI=3.141592653589793d0) 6070 double precision mu_r2, as2 6071 include \'model_functions.inc\'""") 6072 fsock.writelines("""include \'input.inc\' 6073 include \'coupl.inc\'""") 6074 fsock.writelines(""" 6075 if (mu_r2.gt.0d0) MU_R = mu_r2 6076 G = SQRT(4.0d0*PI*AS2) 6077 AS = as2 6078 6079 CALL UPDATE_AS_PARAM() 6080 """) 6081 fsock.writelines('''\n return \n end\n''') 6082 6083 if self.opt['mp']: 6084 fsock.writelines("""subroutine mp_update_as_param() 6085 6086 implicit none 6087 logical READLHA 6088 include \'model_functions.inc\'""") 6089 fsock.writelines("""%s MP__PI, MP__ZERO 6090 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6091 parameter (MP__ZERO=0e0_16) 6092 include \'mp_input.inc\' 6093 include \'mp_coupl.inc\' 6094 """%self.mp_real_format) 6095 fsock.writelines("""include \'input.inc\' 6096 include \'coupl.inc\' 6097 include \'actualize_mp_ext_params.inc\' 6098 READLHA = .false. 6099 include \'mp_intparam_definition.inc\'\n 6100 """) 6101 6102 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6103 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6104 6105 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6106 6107 fsock.writelines('\n'.join(\ 6108 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6109 for i in range(nb_coup_dep)])) 6110 fsock.writelines('''\n return \n end\n''')
6111
6112 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6113 """ create couplings[nb_file].f containing information coming from data. 6114 Outputs the computation of the double precision and/or the multiple 6115 precision couplings depending on the parameters dp and mp. 6116 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6117 filename and subroutine name. 6118 """ 6119 6120 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6121 nb_file), format='fortran') 6122 fsock.writelines("""subroutine %scoup%s() 6123 6124 implicit none 6125 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6126 if dp: 6127 fsock.writelines(""" 6128 double precision PI, ZERO 6129 parameter (PI=3.141592653589793d0) 6130 parameter (ZERO=0d0) 6131 include 'input.inc' 6132 include 'coupl.inc'""") 6133 if mp: 6134 fsock.writelines("""%s MP__PI, MP__ZERO 6135 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6136 parameter (MP__ZERO=0e0_16) 6137 include \'mp_input.inc\' 6138 include \'mp_coupl.inc\' 6139 """%self.mp_real_format) 6140 6141 for coupling in data: 6142 if dp: 6143 fsock.writelines('%s = %s' % (coupling.name, 6144 self.p_to_f.parse(coupling.expr))) 6145 if mp: 6146 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6147 self.mp_p_to_f.parse(coupling.expr))) 6148 fsock.writelines('end')
6149
6150 - def create_model_functions_inc(self):
6151 """ Create model_functions.inc which contains the various declarations 6152 of auxiliary functions which might be used in the couplings expressions 6153 """ 6154 6155 additional_fct = [] 6156 # check for functions define in the UFO model 6157 ufo_fct = self.model.get('functions') 6158 if ufo_fct: 6159 for fct in ufo_fct: 6160 # already handle by default 6161 if fct.name not in ["complexconjugate", "re", "im", "sec", 6162 "csc", "asec", "acsc", "theta_function", "cond", 6163 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 6164 additional_fct.append(fct.name) 6165 6166 6167 fsock = self.open('model_functions.inc', format='fortran') 6168 fsock.writelines("""double complex cond 6169 double complex condif 6170 double complex reglog 6171 double complex reglogp 6172 double complex reglogm 6173 double complex recms 6174 double complex arg 6175 %s 6176 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6177 6178 6179 if self.opt['mp']: 6180 fsock.writelines("""%(complex_mp_format)s mp_cond 6181 %(complex_mp_format)s mp_condif 6182 %(complex_mp_format)s mp_reglog 6183 %(complex_mp_format)s mp_reglogp 6184 %(complex_mp_format)s mp_reglogm 6185 %(complex_mp_format)s mp_recms 6186 %(complex_mp_format)s mp_arg 6187 %(additional)s 6188 """ %\ 6189 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 6190 'complex_mp_format':self.mp_complex_format 6191 })
6192
6193 - def create_model_functions_def(self):
6194 """ Create model_functions.f which contains the various definitions 6195 of auxiliary functions which might be used in the couplings expressions 6196 Add the functions.f functions for formfactors support 6197 """ 6198 6199 fsock = self.open('model_functions.f', format='fortran') 6200 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6201 implicit none 6202 double complex condition,truecase,falsecase 6203 if(condition.eq.(0.0d0,0.0d0)) then 6204 cond=truecase 6205 else 6206 cond=falsecase 6207 endif 6208 end 6209 6210 double complex function condif(condition,truecase,falsecase) 6211 implicit none 6212 logical condition 6213 double complex truecase,falsecase 6214 if(condition) then 6215 condif=truecase 6216 else 6217 condif=falsecase 6218 endif 6219 end 6220 6221 double complex function recms(condition,expr) 6222 implicit none 6223 logical condition 6224 double complex expr 6225 if(condition)then 6226 recms=expr 6227 else 6228 recms=dcmplx(dble(expr)) 6229 endif 6230 end 6231 6232 double complex function reglog(arg) 6233 implicit none 6234 double complex TWOPII 6235 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6236 double complex arg 6237 if(arg.eq.(0.0d0,0.0d0)) then 6238 reglog=(0.0d0,0.0d0) 6239 else 6240 reglog=log(arg) 6241 endif 6242 end 6243 6244 double complex function reglogp(arg) 6245 implicit none 6246 double complex TWOPII 6247 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6248 double complex arg 6249 if(arg.eq.(0.0d0,0.0d0))then 6250 reglogp=(0.0d0,0.0d0) 6251 else 6252 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6253 reglogp=log(arg) + TWOPII 6254 else 6255 reglogp=log(arg) 6256 endif 6257 endif 6258 end 6259 6260 double complex function reglogm(arg) 6261 implicit none 6262 double complex TWOPII 6263 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6264 double complex arg 6265 if(arg.eq.(0.0d0,0.0d0))then 6266 reglogm=(0.0d0,0.0d0) 6267 else 6268 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6269 reglogm=log(arg) - TWOPII 6270 else 6271 reglogm=log(arg) 6272 endif 6273 endif 6274 end 6275 6276 double complex function arg(comnum) 6277 implicit none 6278 double complex comnum 6279 double complex iim 6280 iim = (0.0d0,1.0d0) 6281 if(comnum.eq.(0.0d0,0.0d0)) then 6282 arg=(0.0d0,0.0d0) 6283 else 6284 arg=log(comnum/abs(comnum))/iim 6285 endif 6286 end""") 6287 if self.opt['mp']: 6288 fsock.writelines(""" 6289 6290 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6291 implicit none 6292 %(complex_mp_format)s condition,truecase,falsecase 6293 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6294 mp_cond=truecase 6295 else 6296 mp_cond=falsecase 6297 endif 6298 end 6299 6300 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6301 implicit none 6302 logical condition 6303 %(complex_mp_format)s truecase,falsecase 6304 if(condition) then 6305 mp_condif=truecase 6306 else 6307 mp_condif=falsecase 6308 endif 6309 end 6310 6311 %(complex_mp_format)s function mp_recms(condition,expr) 6312 implicit none 6313 logical condition 6314 %(complex_mp_format)s expr 6315 if(condition)then 6316 mp_recms=expr 6317 else 6318 mp_recms=cmplx(real(expr),kind=16) 6319 endif 6320 end 6321 6322 %(complex_mp_format)s function mp_reglog(arg) 6323 implicit none 6324 %(complex_mp_format)s TWOPII 6325 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6326 %(complex_mp_format)s arg 6327 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6328 mp_reglog=(0.0e0_16,0.0e0_16) 6329 else 6330 mp_reglog=log(arg) 6331 endif 6332 end 6333 6334 %(complex_mp_format)s function mp_reglogp(arg) 6335 implicit none 6336 %(complex_mp_format)s TWOPII 6337 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6338 %(complex_mp_format)s arg 6339 if(arg.eq.(0.0e0_16,0.0e0_16))then 6340 mp_reglogp=(0.0e0_16,0.0e0_16) 6341 else 6342 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6343 mp_reglogp=log(arg) + TWOPII 6344 else 6345 mp_reglogp=log(arg) 6346 endif 6347 endif 6348 end 6349 6350 %(complex_mp_format)s function mp_reglogm(arg) 6351 implicit none 6352 %(complex_mp_format)s TWOPII 6353 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6354 %(complex_mp_format)s arg 6355 if(arg.eq.(0.0e0_16,0.0e0_16))then 6356 mp_reglogm=(0.0e0_16,0.0e0_16) 6357 else 6358 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6359 mp_reglogm=log(arg) - TWOPII 6360 else 6361 mp_reglogm=log(arg) 6362 endif 6363 endif 6364 end 6365 6366 %(complex_mp_format)s function mp_arg(comnum) 6367 implicit none 6368 %(complex_mp_format)s comnum 6369 %(complex_mp_format)s imm 6370 imm = (0.0e0_16,1.0e0_16) 6371 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6372 mp_arg=(0.0e0_16,0.0e0_16) 6373 else 6374 mp_arg=log(comnum/abs(comnum))/imm 6375 endif 6376 end"""%{'complex_mp_format':self.mp_complex_format}) 6377 6378 6379 #check for the file functions.f 6380 model_path = self.model.get('modelpath') 6381 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6382 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6383 input = pjoin(model_path,'Fortran','functions.f') 6384 file.writelines(fsock, open(input).read()) 6385 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6386 6387 # check for functions define in the UFO model 6388 ufo_fct = self.model.get('functions') 6389 if ufo_fct: 6390 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6391 for fct in ufo_fct: 6392 # already handle by default 6393 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6394 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6395 ufo_fct_template = """ 6396 double complex function %(name)s(%(args)s) 6397 implicit none 6398 double complex %(args)s 6399 %(definitions)s 6400 %(name)s = %(fct)s 6401 6402 return 6403 end 6404 """ 6405 str_fct = self.p_to_f.parse(fct.expr) 6406 if not self.p_to_f.to_define: 6407 definitions = [] 6408 else: 6409 definitions=[] 6410 for d in self.p_to_f.to_define: 6411 if d == 'pi': 6412 definitions.append(' double precision pi') 6413 definitions.append(' data pi /3.1415926535897932d0/') 6414 else: 6415 definitions.append(' double complex %s' % d) 6416 6417 text = ufo_fct_template % { 6418 'name': fct.name, 6419 'args': ", ".join(fct.arguments), 6420 'fct': str_fct, 6421 'definitions': '\n'.join(definitions) 6422 } 6423 6424 fsock.writelines(text) 6425 if self.opt['mp']: 6426 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6427 for fct in ufo_fct: 6428 # already handle by default 6429 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6430 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6431 ufo_fct_template = """ 6432 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6433 implicit none 6434 %(complex_mp_format)s mp__%(args)s 6435 %(definitions)s 6436 mp__%(name)s = %(fct)s 6437 6438 return 6439 end 6440 """ 6441 6442 str_fct = self.mp_p_to_f.parse(fct.expr) 6443 if not self.p_to_f.to_define: 6444 definitions = [] 6445 else: 6446 definitions=[] 6447 for d in self.p_to_f.to_define: 6448 if d == 'mp_pi': 6449 definitions.append(' %s mp_pi' % self.mp_real_format) 6450 definitions.append(' data mp_pi /3.141592653589793238462643383279502884197e+00_16/') 6451 else: 6452 definitions.append(' %s %s' % (self.mp_complex_format,d)) 6453 text = ufo_fct_template % { 6454 'name': fct.name, 6455 'args': ", mp__".join(fct.arguments), 6456 'fct': str_fct, 6457 'definitions': '\n'.join(definitions), 6458 'complex_mp_format': self.mp_complex_format 6459 } 6460 fsock.writelines(text) 6461 6462 6463 6464 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6465 6466 6467
6468 - def create_makeinc(self):
6469 """create makeinc.inc containing the file to compile """ 6470 6471 fsock = self.open('makeinc.inc', comment='#') 6472 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6473 text += ' model_functions.o ' 6474 6475 nb_coup_indep = 1 + len(self.coups_dep) // 25 6476 nb_coup_dep = 1 + len(self.coups_indep) // 25 6477 couplings_files=['couplings%s.o' % (i+1) \ 6478 for i in range(nb_coup_dep + nb_coup_indep) ] 6479 if self.opt['mp']: 6480 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6481 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6482 text += ' '.join(couplings_files) 6483 fsock.writelines(text)
6484
6485 - def create_param_write(self):
6486 """ create param_write """ 6487 6488 fsock = self.open('param_write.inc', format='fortran') 6489 6490 fsock.writelines("""write(*,*) ' External Params' 6491 write(*,*) ' ---------------------------------' 6492 write(*,*) ' '""") 6493 def format(name): 6494 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6495 6496 # Write the external parameter 6497 lines = [format(param.name) for param in self.params_ext] 6498 fsock.writelines('\n'.join(lines)) 6499 6500 fsock.writelines("""write(*,*) ' Internal Params' 6501 write(*,*) ' ---------------------------------' 6502 write(*,*) ' '""") 6503 lines = [format(data.name) for data in self.params_indep 6504 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6505 fsock.writelines('\n'.join(lines)) 6506 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6507 write(*,*) ' ----------------------------------------' 6508 write(*,*) ' '""") 6509 lines = [format(data.name) for data in self.params_dep \ 6510 if self.check_needed_param(data.name)] 6511 6512 fsock.writelines('\n'.join(lines)) 6513 6514 6515
6516 - def create_ident_card(self):
6517 """ create the ident_card.dat """ 6518 6519 def format(parameter): 6520 """return the line for the ident_card corresponding to this parameter""" 6521 colum = [parameter.lhablock.lower()] + \ 6522 [str(value) for value in parameter.lhacode] + \ 6523 [parameter.name] 6524 if not parameter.name: 6525 return '' 6526 return ' '.join(colum)+'\n'
6527 6528 fsock = self.open('ident_card.dat') 6529 6530 external_param = [format(param) for param in self.params_ext] 6531 fsock.writelines('\n'.join(external_param)) 6532
6533 - def create_actualize_mp_ext_param_inc(self):
6534 """ create the actualize_mp_ext_params.inc code """ 6535 6536 # In principle one should actualize all external, but for now, it is 6537 # hardcoded that only AS and MU_R can by dynamically changed by the user 6538 # so that we only update those ones. 6539 # Of course, to be on the safe side, one could decide to update all 6540 # external parameters. 6541 update_params_list=[p for p in self.params_ext if p.name in 6542 self.PS_dependent_key] 6543 6544 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6545 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6546 for param in update_params_list] 6547 # When read_lha is false, it is G which is taken in input and not AS, so 6548 # this is what should be reset here too. 6549 if 'aS' in [param.name for param in update_params_list]: 6550 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6551 6552 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6553 fsock.writelines('\n'.join(res_strings))
6554
6555 - def create_param_read(self):
6556 """create param_read""" 6557 6558 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6559 or self.opt['loop_induced']: 6560 fsock = self.open('param_read.inc', format='fortran') 6561 fsock.writelines(' include \'../param_card.inc\'') 6562 return 6563 6564 def format_line(parameter): 6565 """return the line for the ident_card corresponding to this 6566 parameter""" 6567 template = \ 6568 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6569 % {'name': parameter.name, 6570 'value': self.p_to_f.parse(str(parameter.value.real))} 6571 if self.opt['mp']: 6572 template = template+ \ 6573 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6574 "%(mp_prefix)s%(name)s,%(value)s)") \ 6575 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6576 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6577 return template 6578 6579 fsock = self.open('param_read.inc', format='fortran') 6580 res_strings = [format_line(param) \ 6581 for param in self.params_ext] 6582 6583 # Correct width sign for Majorana particles (where the width 6584 # and mass need to have the same sign) 6585 for particle in self.model.get('particles'): 6586 if particle.is_fermion() and particle.get('self_antipart') and \ 6587 particle.get('width').lower() != 'zero': 6588 6589 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6590 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6591 if self.opt['mp']: 6592 res_strings.append(\ 6593 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6594 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6595 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6596 6597 fsock.writelines('\n'.join(res_strings)) 6598 6599 6600 @staticmethod
6601 - def create_param_card_static(model, output_path, rule_card_path=False, 6602 mssm_convert=True):
6603 """ create the param_card.dat for a givent model --static method-- """ 6604 #1. Check if a default param_card is present: 6605 done = False 6606 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6607 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6608 model_path = model.get('modelpath') 6609 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6610 done = True 6611 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6612 output_path) 6613 if not done: 6614 param_writer.ParamCardWriter(model, output_path) 6615 6616 if rule_card_path: 6617 if hasattr(model, 'rule_card'): 6618 model.rule_card.write_file(rule_card_path) 6619 6620 if mssm_convert: 6621 model_name = model.get('name') 6622 # IF MSSM convert the card to SLAH1 6623 if model_name == 'mssm' or model_name.startswith('mssm-'): 6624 import models.check_param_card as translator 6625 # Check the format of the param_card for Pythia and make it correct 6626 if rule_card_path: 6627 translator.make_valid_param_card(output_path, rule_card_path) 6628 translator.convert_to_slha1(output_path)
6629
6630 - def create_param_card(self):
6631 """ create the param_card.dat """ 6632 6633 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6634 if not hasattr(self.model, 'rule_card'): 6635 rule_card=False 6636 self.create_param_card_static(self.model, 6637 output_path=pjoin(self.dir_path, 'param_card.dat'), 6638 rule_card_path=rule_card, 6639 mssm_convert=True)
6640
6641 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6642 """ Determine which Export_v4 class is required. cmd is the command 6643 interface containing all potential usefull information. 6644 The output_type argument specifies from which context the output 6645 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6646 and 'default' for tree-level outputs.""" 6647 6648 opt = dict(cmd.options) 6649 opt['output_options'] = cmd_options 6650 6651 # ========================================================================== 6652 # First check whether Ninja must be installed. 6653 # Ninja would only be required if: 6654 # a) Loop optimized output is selected 6655 # b) the process gathered from the amplitude generated use loops 6656 6657 if len(cmd._curr_amps)>0: 6658 try: 6659 curr_proc = cmd._curr_amps[0].get('process') 6660 except base_objects.PhysicsObject.PhysicsObjectError: 6661 curr_proc = None 6662 elif hasattr(cmd,'_fks_multi_proc') and \ 6663 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6664 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6665 else: 6666 curr_proc = None 6667 6668 requires_reduction_tool = opt['loop_optimized_output'] and \ 6669 (not curr_proc is None) and \ 6670 (curr_proc.get('perturbation_couplings') != [] and \ 6671 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6672 6673 # An installation is required then, but only if the specified path is the 6674 # default local one and that the Ninja library appears missing. 6675 if requires_reduction_tool: 6676 cmd.install_reduction_library() 6677 6678 # ========================================================================== 6679 # First treat the MadLoop5 standalone case 6680 MadLoop_SA_options = {'clean': not noclean, 6681 'complex_mass':cmd.options['complex_mass_scheme'], 6682 'export_format':'madloop', 6683 'mp':True, 6684 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6685 'cuttools_dir': cmd._cuttools_dir, 6686 'iregi_dir':cmd._iregi_dir, 6687 'pjfry_dir':cmd.options['pjfry'], 6688 'golem_dir':cmd.options['golem'], 6689 'samurai_dir':cmd.options['samurai'], 6690 'ninja_dir':cmd.options['ninja'], 6691 'collier_dir':cmd.options['collier'], 6692 'fortran_compiler':cmd.options['fortran_compiler'], 6693 'f2py_compiler':cmd.options['f2py_compiler'], 6694 'output_dependencies':cmd.options['output_dependencies'], 6695 'SubProc_prefix':'P', 6696 'compute_color_flows':cmd.options['loop_color_flows'], 6697 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6698 'cluster_local_path': cmd.options['cluster_local_path'], 6699 'output_options': cmd_options 6700 } 6701 6702 if output_type.startswith('madloop'): 6703 import madgraph.loop.loop_exporters as loop_exporters 6704 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6705 ExporterClass=None 6706 if not cmd.options['loop_optimized_output']: 6707 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6708 else: 6709 if output_type == "madloop": 6710 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6711 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6712 elif output_type == "madloop_matchbox": 6713 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6714 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6715 else: 6716 raise Exception, "output_type not recognize %s" % output_type 6717 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6718 else: 6719 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6720 ' in %s'%str(cmd._mgme_dir)) 6721 6722 # Then treat the aMC@NLO output 6723 elif output_type=='amcatnlo': 6724 import madgraph.iolibs.export_fks as export_fks 6725 ExporterClass=None 6726 amcatnlo_options = dict(opt) 6727 amcatnlo_options.update(MadLoop_SA_options) 6728 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6729 if not cmd.options['loop_optimized_output']: 6730 logger.info("Writing out the aMC@NLO code") 6731 ExporterClass = export_fks.ProcessExporterFortranFKS 6732 amcatnlo_options['export_format']='FKS5_default' 6733 else: 6734 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6735 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6736 amcatnlo_options['export_format']='FKS5_optimized' 6737 return ExporterClass(cmd._export_dir, amcatnlo_options) 6738 6739 6740 # Then the default tree-level output 6741 elif output_type=='default': 6742 assert group_subprocesses in [True, False] 6743 6744 opt = dict(opt) 6745 opt.update({'clean': not noclean, 6746 'complex_mass': cmd.options['complex_mass_scheme'], 6747 'export_format':cmd._export_format, 6748 'mp': False, 6749 'sa_symmetry':False, 6750 'model': cmd._curr_model.get('name'), 6751 'v5_model': False if cmd._model_v4_path else True }) 6752 6753 format = cmd._export_format #shortcut 6754 6755 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6756 opt['sa_symmetry'] = True 6757 elif format == 'plugin': 6758 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6759 6760 loop_induced_opt = dict(opt) 6761 loop_induced_opt.update(MadLoop_SA_options) 6762 loop_induced_opt['export_format'] = 'madloop_optimized' 6763 loop_induced_opt['SubProc_prefix'] = 'PV' 6764 # For loop_induced output with MadEvent, we must have access to the 6765 # color flows. 6766 loop_induced_opt['compute_color_flows'] = True 6767 for key in opt: 6768 if key not in loop_induced_opt: 6769 loop_induced_opt[key] = opt[key] 6770 6771 # Madevent output supports MadAnalysis5 6772 if format in ['madevent']: 6773 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6774 6775 if format == 'matrix' or format.startswith('standalone'): 6776 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6777 6778 elif format in ['madevent'] and group_subprocesses: 6779 if isinstance(cmd._curr_amps[0], 6780 loop_diagram_generation.LoopAmplitude): 6781 import madgraph.loop.loop_exporters as loop_exporters 6782 return loop_exporters.LoopInducedExporterMEGroup( 6783 cmd._export_dir,loop_induced_opt) 6784 else: 6785 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6786 elif format in ['madevent']: 6787 if isinstance(cmd._curr_amps[0], 6788 loop_diagram_generation.LoopAmplitude): 6789 import madgraph.loop.loop_exporters as loop_exporters 6790 return loop_exporters.LoopInducedExporterMENoGroup( 6791 cmd._export_dir,loop_induced_opt) 6792 else: 6793 return ProcessExporterFortranME(cmd._export_dir,opt) 6794 elif format in ['matchbox']: 6795 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6796 elif cmd._export_format in ['madweight'] and group_subprocesses: 6797 6798 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6799 elif cmd._export_format in ['madweight']: 6800 return ProcessExporterFortranMW(cmd._export_dir, opt) 6801 elif format == 'plugin': 6802 if isinstance(cmd._curr_amps[0], 6803 loop_diagram_generation.LoopAmplitude): 6804 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 6805 else: 6806 return cmd._export_plugin(cmd._export_dir, opt) 6807 6808 else: 6809 raise Exception, 'Wrong export_v4 format' 6810 else: 6811 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6812
6813 6814 6815 6816 #=============================================================================== 6817 # ProcessExporterFortranMWGroup 6818 #=============================================================================== 6819 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6820 """Class to take care of exporting a set of matrix elements to 6821 MadEvent subprocess group format.""" 6822 6823 matrix_file = "matrix_madweight_group_v4.inc" 6824 grouped_mode = 'madweight' 6825 #=========================================================================== 6826 # generate_subprocess_directory 6827 #===========================================================================
6828 - def generate_subprocess_directory(self, subproc_group, 6829 fortran_model, 6830 group_number):
6831 """Generate the Pn directory for a subprocess group in MadEvent, 6832 including the necessary matrix_N.f files, configs.inc and various 6833 other helper files.""" 6834 6835 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6836 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6837 "subproc_group object not SubProcessGroup" 6838 6839 if not self.model: 6840 self.model = subproc_group.get('matrix_elements')[0].\ 6841 get('processes')[0].get('model') 6842 6843 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6844 6845 # Create the directory PN in the specified path 6846 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6847 subproc_group.get('name')) 6848 try: 6849 os.mkdir(pjoin(pathdir, subprocdir)) 6850 except os.error as error: 6851 logger.warning(error.strerror + " " + subprocdir) 6852 6853 6854 logger.info('Creating files in directory %s' % subprocdir) 6855 Ppath = pjoin(pathdir, subprocdir) 6856 6857 # Create the matrix.f files, auto_dsig.f files and all inc files 6858 # for all subprocesses in the group 6859 6860 maxamps = 0 6861 maxflows = 0 6862 tot_calls = 0 6863 6864 matrix_elements = subproc_group.get('matrix_elements') 6865 6866 for ime, matrix_element in \ 6867 enumerate(matrix_elements): 6868 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6869 calls, ncolor = \ 6870 self.write_matrix_element_v4(writers.FortranWriter(filename), 6871 matrix_element, 6872 fortran_model, 6873 str(ime+1), 6874 subproc_group.get('diagram_maps')[\ 6875 ime]) 6876 6877 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6878 self.write_auto_dsig_file(writers.FortranWriter(filename), 6879 matrix_element, 6880 str(ime+1)) 6881 6882 # Keep track of needed quantities 6883 tot_calls += int(calls) 6884 maxflows = max(maxflows, ncolor) 6885 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6886 6887 # Draw diagrams 6888 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6889 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6890 get('diagrams'), 6891 filename, 6892 model = \ 6893 matrix_element.get('processes')[0].\ 6894 get('model'), 6895 amplitude=True) 6896 logger.info("Generating Feynman diagrams for " + \ 6897 matrix_element.get('processes')[0].nice_string()) 6898 plot.draw() 6899 6900 # Extract number of external particles 6901 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6902 6903 # Generate a list of diagrams corresponding to each configuration 6904 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6905 # If a subprocess has no diagrams for this config, the number is 0 6906 6907 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6908 6909 filename = pjoin(Ppath, 'auto_dsig.f') 6910 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6911 subproc_group) 6912 6913 filename = pjoin(Ppath,'configs.inc') 6914 nconfigs, s_and_t_channels = self.write_configs_file(\ 6915 writers.FortranWriter(filename), 6916 subproc_group, 6917 subproc_diagrams_for_config) 6918 6919 filename = pjoin(Ppath, 'leshouche.inc') 6920 self.write_leshouche_file(writers.FortranWriter(filename), 6921 subproc_group) 6922 6923 filename = pjoin(Ppath, 'phasespace.inc') 6924 self.write_phasespace_file(writers.FortranWriter(filename), 6925 nconfigs) 6926 6927 6928 filename = pjoin(Ppath, 'maxamps.inc') 6929 self.write_maxamps_file(writers.FortranWriter(filename), 6930 maxamps, 6931 maxflows, 6932 max([len(me.get('processes')) for me in \ 6933 matrix_elements]), 6934 len(matrix_elements)) 6935 6936 filename = pjoin(Ppath, 'mirrorprocs.inc') 6937 self.write_mirrorprocs(writers.FortranWriter(filename), 6938 subproc_group) 6939 6940 filename = pjoin(Ppath, 'nexternal.inc') 6941 self.write_nexternal_file(writers.FortranWriter(filename), 6942 nexternal, ninitial) 6943 6944 filename = pjoin(Ppath, 'pmass.inc') 6945 self.write_pmass_file(writers.FortranWriter(filename), 6946 matrix_element) 6947 6948 filename = pjoin(Ppath, 'props.inc') 6949 self.write_props_file(writers.FortranWriter(filename), 6950 matrix_element, 6951 s_and_t_channels) 6952 6953 # filename = pjoin(Ppath, 'processes.dat') 6954 # files.write_to_file(filename, 6955 # self.write_processes_file, 6956 # subproc_group) 6957 6958 # Generate jpgs -> pass in make_html 6959 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6960 6961 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6962 6963 for file in linkfiles: 6964 ln('../%s' % file, cwd=Ppath) 6965 6966 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6967 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6968 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6969 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6970 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6971 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6972 if not tot_calls: 6973 tot_calls = 0 6974 return tot_calls
6975 6976 6977 #=========================================================================== 6978 # Helper functions 6979 #===========================================================================
6980 - def modify_grouping(self, matrix_element):
6981 """allow to modify the grouping (if grouping is in place) 6982 return two value: 6983 - True/False if the matrix_element was modified 6984 - the new(or old) matrix element""" 6985 6986 return True, matrix_element.split_lepton_grouping()
6987 6988 #=========================================================================== 6989 # write_super_auto_dsig_file 6990 #===========================================================================
6991 - def write_super_auto_dsig_file(self, writer, subproc_group):
6992 """Write the auto_dsig.f file selecting between the subprocesses 6993 in subprocess group mode""" 6994 6995 replace_dict = {} 6996 6997 # Extract version number and date from VERSION file 6998 info_lines = self.get_mg5_info_lines() 6999 replace_dict['info_lines'] = info_lines 7000 7001 matrix_elements = subproc_group.get('matrix_elements') 7002 7003 # Extract process info lines 7004 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7005 matrix_elements]) 7006 replace_dict['process_lines'] = process_lines 7007 7008 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7009 replace_dict['nexternal'] = nexternal 7010 7011 replace_dict['nsprocs'] = 2*len(matrix_elements) 7012 7013 # Generate dsig definition line 7014 dsig_def_line = "DOUBLE PRECISION " + \ 7015 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7016 range(len(matrix_elements))]) 7017 replace_dict["dsig_def_line"] = dsig_def_line 7018 7019 # Generate dsig process lines 7020 call_dsig_proc_lines = [] 7021 for iproc in range(len(matrix_elements)): 7022 call_dsig_proc_lines.append(\ 7023 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7024 {"num": iproc + 1, 7025 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7026 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7027 7028 if writer: 7029 file = open(os.path.join(_file_path, \ 7030 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7031 file = file % replace_dict 7032 # Write the file 7033 writer.writelines(file) 7034 else: 7035 return replace_dict
7036 7037 #=========================================================================== 7038 # write_mirrorprocs 7039 #===========================================================================
7040 - def write_mirrorprocs(self, writer, subproc_group):
7041 """Write the mirrorprocs.inc file determining which processes have 7042 IS mirror process in subprocess group mode.""" 7043 7044 lines = [] 7045 bool_dict = {True: '.true.', False: '.false.'} 7046 matrix_elements = subproc_group.get('matrix_elements') 7047 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7048 (len(matrix_elements), 7049 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7050 me in matrix_elements]))) 7051 # Write the file 7052 writer.writelines(lines)
7053 7054 #=========================================================================== 7055 # write_configs_file 7056 #===========================================================================
7057 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7058 """Write the configs.inc file with topology information for a 7059 subprocess group. Use the first subprocess with a diagram for each 7060 configuration.""" 7061 7062 matrix_elements = subproc_group.get('matrix_elements') 7063 model = matrix_elements[0].get('processes')[0].get('model') 7064 7065 diagrams = [] 7066 config_numbers = [] 7067 for iconfig, config in enumerate(diagrams_for_config): 7068 # Check if any diagrams correspond to this config 7069 if set(config) == set([0]): 7070 continue 7071 subproc_diags = [] 7072 for s,d in enumerate(config): 7073 if d: 7074 subproc_diags.append(matrix_elements[s].\ 7075 get('diagrams')[d-1]) 7076 else: 7077 subproc_diags.append(None) 7078 diagrams.append(subproc_diags) 7079 config_numbers.append(iconfig + 1) 7080 7081 # Extract number of external particles 7082 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7083 7084 return len(diagrams), \ 7085 self.write_configs_file_from_diagrams(writer, diagrams, 7086 config_numbers, 7087 nexternal, ninitial, 7088 matrix_elements[0],model)
7089 7090 #=========================================================================== 7091 # write_run_configs_file 7092 #===========================================================================
7093 - def write_run_config_file(self, writer):
7094 """Write the run_configs.inc file for MadEvent""" 7095 7096 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7097 text = open(path).read() % {'chanperjob':'2'} 7098 writer.write(text) 7099 return True
7100 7101 7102 #=========================================================================== 7103 # write_leshouche_file 7104 #===========================================================================
7105 - def write_leshouche_file(self, writer, subproc_group):
7106 """Write the leshouche.inc file for MG4""" 7107 7108 all_lines = [] 7109 7110 for iproc, matrix_element in \ 7111 enumerate(subproc_group.get('matrix_elements')): 7112 all_lines.extend(self.get_leshouche_lines(matrix_element, 7113 iproc)) 7114 7115 # Write the file 7116 writer.writelines(all_lines) 7117 7118 return True
7119