Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 # cmd_options is a dictionary with all the optional argurment passed at output time 101 return
102
103 - def copy_template(self, model):
104 return
105
106 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
107 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 108 return 0 # return an integer stating the number of call to helicity routine
109
110 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
111 return
112
113 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
114 return
115 116
117 - def pass_information_from_cmd(self, cmd):
118 """pass information from the command interface to the exporter. 119 Please do not modify any object of the interface from the exporter. 120 """ 121 return
122
123 - def modify_grouping(self, matrix_element):
124 return False, matrix_element
125
126 - def export_model_files(self, model_v4_path):
127 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 128 return
129
130 - def export_helas(self, HELAS_PATH):
131 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 132 return
133
134 #=============================================================================== 135 # ProcessExporterFortran 136 #=============================================================================== 137 -class ProcessExporterFortran(VirtualExporter):
138 """Class to take care of exporting a set of matrix elements to 139 Fortran (v4) format.""" 140 141 default_opt = {'clean': False, 'complex_mass':False, 142 'export_format':'madevent', 'mp': False, 143 'v5_model': True, 144 'output_options':{} 145 } 146 grouped_mode = False 147
148 - def __init__(self, dir_path = "", opt=None):
149 """Initiate the ProcessExporterFortran with directory information""" 150 self.mgme_dir = MG5DIR 151 self.dir_path = dir_path 152 self.model = None 153 154 self.opt = dict(self.default_opt) 155 if opt: 156 self.opt.update(opt) 157 158 self.cmd_options = self.opt['output_options'] 159 160 #place holder to pass information to the run_interface 161 self.proc_characteristic = banner_mod.ProcCharacteristic()
162 163 164 #=========================================================================== 165 # process exporter fortran switch between group and not grouped 166 #===========================================================================
167 - def export_processes(self, matrix_elements, fortran_model):
168 """Make the switch between grouped and not grouped output""" 169 170 calls = 0 171 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 172 for (group_number, me_group) in enumerate(matrix_elements): 173 calls = calls + self.generate_subprocess_directory(\ 174 me_group, fortran_model, group_number) 175 else: 176 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 177 calls = calls + self.generate_subprocess_directory(\ 178 me, fortran_model, me_number) 179 180 return calls
181 182 183 #=========================================================================== 184 # create the run_card 185 #===========================================================================
186 - def create_run_card(self, matrix_elements, history):
187 """ """ 188 189 190 # bypass this for the loop-check 191 import madgraph.loop.loop_helas_objects as loop_helas_objects 192 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 193 matrix_elements = None 194 195 run_card = banner_mod.RunCard() 196 197 198 default=True 199 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 200 processes = [me.get('processes') for megroup in matrix_elements 201 for me in megroup['matrix_elements']] 202 elif matrix_elements: 203 processes = [me.get('processes') 204 for me in matrix_elements['matrix_elements']] 205 else: 206 default =False 207 208 if default: 209 run_card.create_default_for_process(self.proc_characteristic, 210 history, 211 processes) 212 213 214 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 215 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
216 217 218 #=========================================================================== 219 # copy the Template in a new directory. 220 #===========================================================================
221 - def copy_template(self, model):
222 """create the directory run_name as a copy of the MadEvent 223 Template, and clean the directory 224 """ 225 226 #First copy the full template tree if dir_path doesn't exit 227 if not os.path.isdir(self.dir_path): 228 assert self.mgme_dir, \ 229 "No valid MG_ME path given for MG4 run directory creation." 230 logger.info('initialize a new directory: %s' % \ 231 os.path.basename(self.dir_path)) 232 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 233 self.dir_path, True) 234 # distutils.dir_util.copy_tree since dir_path already exists 235 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 236 self.dir_path) 237 # copy plot_card 238 for card in ['plot_card']: 239 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 240 try: 241 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 242 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 243 except IOError: 244 logger.warning("Failed to copy " + card + ".dat to default") 245 elif os.getcwd() == os.path.realpath(self.dir_path): 246 logger.info('working in local directory: %s' % \ 247 os.path.realpath(self.dir_path)) 248 # distutils.dir_util.copy_tree since dir_path already exists 249 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 250 self.dir_path) 251 # for name in misc.glob('Template/LO/*', self.mgme_dir): 252 # name = os.path.basename(name) 253 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 254 # if os.path.isfile(filename): 255 # files.cp(filename, pjoin(self.dir_path,name)) 256 # elif os.path.isdir(filename): 257 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 258 # distutils.dir_util.copy_tree since dir_path already exists 259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 260 self.dir_path) 261 # Copy plot_card 262 for card in ['plot_card']: 263 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 264 try: 265 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 266 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 267 except IOError: 268 logger.warning("Failed to copy " + card + ".dat to default") 269 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 270 assert self.mgme_dir, \ 271 "No valid MG_ME path given for MG4 run directory creation." 272 try: 273 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 274 except IOError: 275 MG5_version = misc.get_pkg_info() 276 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 277 278 #Ensure that the Template is clean 279 if self.opt['clean']: 280 logger.info('remove old information in %s' % \ 281 os.path.basename(self.dir_path)) 282 if os.environ.has_key('MADGRAPH_BASE'): 283 misc.call([pjoin('bin', 'internal', 'clean_template'), 284 '--web'], cwd=self.dir_path) 285 else: 286 try: 287 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 288 cwd=self.dir_path) 289 except Exception, why: 290 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 291 % (os.path.basename(self.dir_path),why)) 292 293 #Write version info 294 MG_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 296 MG_version['version']) 297 298 # add the makefile in Source directory 299 filename = pjoin(self.dir_path,'Source','makefile') 300 self.write_source_makefile(writers.FileWriter(filename)) 301 302 # add the DiscreteSampler information 303 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 304 pjoin(self.dir_path, 'Source')) 305 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 306 pjoin(self.dir_path, 'Source')) 307 308 # We need to create the correct open_data for the pdf 309 self.write_pdf_opendata()
310 311 312 #=========================================================================== 313 # Call MadAnalysis5 to generate the default cards for this process 314 #===========================================================================
315 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 316 ma5_path, output_dir, levels = ['parton','hadron']):
317 """ Call MA5 so that it writes default cards for both parton and 318 post-shower levels, tailored for this particular process.""" 319 320 if len(levels)==0: 321 return 322 start = time.time() 323 logger.info('Generating MadAnalysis5 default cards tailored to this process') 324 try: 325 MA5_interpreter = common_run_interface.CommonRunCmd.\ 326 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 327 except (Exception, SystemExit) as e: 328 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 329 return 330 if MA5_interpreter is None: 331 return 332 333 MA5_main = MA5_interpreter.main 334 for lvl in ['parton','hadron']: 335 if lvl in levels: 336 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 337 try: 338 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 339 except (Exception, SystemExit) as e: 340 # keep the default card (skip only) 341 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 342 ' default analysis card for this process.') 343 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 344 error=StringIO() 345 traceback.print_exc(file=error) 346 logger.debug('MadAnalysis5 error was:') 347 logger.debug('-'*60) 348 logger.debug(error.getvalue()[:-1]) 349 logger.debug('-'*60) 350 else: 351 open(card_to_generate,'w').write(text) 352 stop = time.time() 353 if stop-start >1: 354 logger.info('Cards created in %.2fs' % (stop-start))
355 356 #=========================================================================== 357 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 358 #===========================================================================
359 - def write_procdef_mg5(self, file_pos, modelname, process_str):
360 """ write an equivalent of the MG4 proc_card in order that all the Madevent 361 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 362 363 proc_card_template = template_files.mg4_proc_card.mg4_template 364 process_template = template_files.mg4_proc_card.process_template 365 process_text = '' 366 coupling = '' 367 new_process_content = [] 368 369 370 # First find the coupling and suppress the coupling from process_str 371 #But first ensure that coupling are define whithout spaces: 372 process_str = process_str.replace(' =', '=') 373 process_str = process_str.replace('= ', '=') 374 process_str = process_str.replace(',',' , ') 375 #now loop on the element and treat all the coupling 376 for info in process_str.split(): 377 if '=' in info: 378 coupling += info + '\n' 379 else: 380 new_process_content.append(info) 381 # Recombine the process_str (which is the input process_str without coupling 382 #info) 383 process_str = ' '.join(new_process_content) 384 385 #format the SubProcess 386 replace_dict = {'process': process_str, 387 'coupling': coupling} 388 process_text += process_template.substitute(replace_dict) 389 390 replace_dict = {'process': process_text, 391 'model': modelname, 392 'multiparticle':''} 393 text = proc_card_template.substitute(replace_dict) 394 395 if file_pos: 396 ff = open(file_pos, 'w') 397 ff.write(text) 398 ff.close() 399 else: 400 return replace_dict
401 402
403 - def pass_information_from_cmd(self, cmd):
404 """Pass information for MA5""" 405 406 self.proc_defs = cmd._curr_proc_defs
407 408 #=========================================================================== 409 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 410 #===========================================================================
411 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
412 """Function to finalize v4 directory, for inheritance.""" 413 414 self.create_run_card(matrix_elements, history) 415 self.create_MA5_cards(matrix_elements, history)
416
417 - def create_MA5_cards(self,matrix_elements,history):
418 """ A wrapper around the creation of the MA5 cards so that it can be 419 bypassed by daughter classes (i.e. in standalone).""" 420 if 'madanalysis5_path' in self.opt and not \ 421 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 422 processes = None 423 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 424 processes = [me.get('processes') for megroup in matrix_elements 425 for me in megroup['matrix_elements']] 426 elif matrix_elements: 427 processes = [me.get('processes') 428 for me in matrix_elements['matrix_elements']] 429 430 self.create_default_madanalysis5_cards( 431 history, self.proc_defs, processes, 432 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 433 levels = ['hadron','parton']) 434 435 for level in ['hadron','parton']: 436 # Copying these cards turn on the use of MadAnalysis5 by default. 437 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 438 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 439 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
440 441 #=========================================================================== 442 # Create the proc_characteristic file passing information to the run_interface 443 #===========================================================================
444 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
445 446 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
447 448 #=========================================================================== 449 # write_matrix_element_v4 450 #===========================================================================
451 - def write_matrix_element_v4(self):
452 """Function to write a matrix.f file, for inheritance. 453 """ 454 pass
455 456 #=========================================================================== 457 # write_pdf_opendata 458 #===========================================================================
459 - def write_pdf_opendata(self):
460 """ modify the pdf opendata file, to allow direct access to cluster node 461 repository if configure""" 462 463 if not self.opt["cluster_local_path"]: 464 changer = {"pdf_systemwide": ""} 465 else: 466 to_add = """ 467 tempname='%(path)s'//Tablefile 468 open(IU,file=tempname,status='old',ERR=1) 469 return 470 1 tempname='%(path)s/Pdfdata/'//Tablefile 471 open(IU,file=tempname,status='old',ERR=2) 472 return 473 2 tempname='%(path)s/lhapdf'//Tablefile 474 open(IU,file=tempname,status='old',ERR=3) 475 return 476 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 477 open(IU,file=tempname,status='old',ERR=4) 478 return 479 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 480 open(IU,file=tempname,status='old',ERR=5) 481 return 482 """ % {"path" : self.opt["cluster_local_path"]} 483 484 changer = {"pdf_systemwide": to_add} 485 486 487 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 488 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 489 ff.writelines(template % changer) 490 491 # Do the same for lhapdf set 492 if not self.opt["cluster_local_path"]: 493 changer = {"cluster_specific_path": ""} 494 else: 495 to_add=""" 496 LHAPath='%(path)s/PDFsets' 497 Inquire(File=LHAPath, exist=exists) 498 if(exists)return 499 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 500 Inquire(File=LHAPath, exist=exists) 501 if(exists)return 502 LHAPath='%(path)s/../lhapdf/pdfsets/' 503 Inquire(File=LHAPath, exist=exists) 504 if(exists)return 505 LHAPath='./PDFsets' 506 """ % {"path" : self.opt["cluster_local_path"]} 507 changer = {"cluster_specific_path": to_add} 508 509 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 510 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 512 ff.writelines(template % changer) 513 514 515 return
516 517 518 519 #=========================================================================== 520 # write_maxparticles_file 521 #===========================================================================
522 - def write_maxparticles_file(self, writer, matrix_elements):
523 """Write the maxparticles.inc file for MadEvent""" 524 525 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 526 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 527 matrix_elements.get('matrix_elements')]) 528 else: 529 maxparticles = max([me.get_nexternal_ninitial()[0] \ 530 for me in matrix_elements]) 531 532 lines = "integer max_particles\n" 533 lines += "parameter(max_particles=%d)" % maxparticles 534 535 # Write the file 536 writer.writelines(lines) 537 538 return True
539 540 541 #=========================================================================== 542 # export the model 543 #===========================================================================
544 - def export_model_files(self, model_path):
545 """Configure the files/link of the process according to the model""" 546 547 # Import the model 548 for file in os.listdir(model_path): 549 if os.path.isfile(pjoin(model_path, file)): 550 shutil.copy2(pjoin(model_path, file), \ 551 pjoin(self.dir_path, 'Source', 'MODEL'))
552 553 567 575 576 577 #=========================================================================== 578 # export the helas routine 579 #===========================================================================
580 - def export_helas(self, helas_path):
581 """Configure the files/link of the process according to the model""" 582 583 # Import helas routine 584 for filename in os.listdir(helas_path): 585 filepos = pjoin(helas_path, filename) 586 if os.path.isfile(filepos): 587 if filepos.endswith('Makefile.template'): 588 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 589 elif filepos.endswith('Makefile'): 590 pass 591 else: 592 cp(filepos, self.dir_path + '/Source/DHELAS')
593 # following lines do the same but whithout symbolic link 594 # 595 #def export_helas(mgme_dir, dir_path): 596 # 597 # # Copy the HELAS directory 598 # helas_dir = pjoin(mgme_dir, 'HELAS') 599 # for filename in os.listdir(helas_dir): 600 # if os.path.isfile(pjoin(helas_dir, filename)): 601 # shutil.copy2(pjoin(helas_dir, filename), 602 # pjoin(dir_path, 'Source', 'DHELAS')) 603 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 604 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 605 # 606 607 #=========================================================================== 608 # generate_subprocess_directory 609 #===========================================================================
610 - def generate_subprocess_directory(self, matrix_element, 611 fortran_model, 612 me_number):
613 """Routine to generate a subprocess directory (for inheritance)""" 614 615 pass
616 617 #=========================================================================== 618 # get_source_libraries_list 619 #===========================================================================
620 - def get_source_libraries_list(self):
621 """ Returns the list of libraries to be compiling when compiling the 622 SOURCE directory. It is different for loop_induced processes and 623 also depends on the value of the 'output_dependencies' option""" 624 625 return ['$(LIBDIR)libdhelas.$(libext)', 626 '$(LIBDIR)libpdf.$(libext)', 627 '$(LIBDIR)libmodel.$(libext)', 628 '$(LIBDIR)libcernlib.$(libext)', 629 '$(LIBDIR)libbias.$(libext)']
630 631 #=========================================================================== 632 # write_source_makefile 633 #===========================================================================
634 - def write_source_makefile(self, writer):
635 """Write the nexternal.inc file for MG4""" 636 637 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 638 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 639 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 640 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 641 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 642 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 643 else: 644 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 645 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 646 647 replace_dict= {'libraries': set_of_lib, 648 'model':model_line, 649 'additional_dsample': '', 650 'additional_dependencies':''} 651 652 if writer: 653 text = open(path).read() % replace_dict 654 writer.write(text) 655 656 return replace_dict
657 658 #=========================================================================== 659 # write_nexternal_madspin 660 #===========================================================================
661 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
662 """Write the nexternal_prod.inc file for madspin""" 663 664 replace_dict = {} 665 666 replace_dict['nexternal'] = nexternal 667 replace_dict['ninitial'] = ninitial 668 669 file = """ \ 670 integer nexternal_prod 671 parameter (nexternal_prod=%(nexternal)d) 672 integer nincoming_prod 673 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 674 675 # Write the file 676 if writer: 677 writer.writelines(file) 678 return True 679 else: 680 return replace_dict
681 682 #=========================================================================== 683 # write_helamp_madspin 684 #===========================================================================
685 - def write_helamp_madspin(self, writer, ncomb):
686 """Write the helamp.inc file for madspin""" 687 688 replace_dict = {} 689 690 replace_dict['ncomb'] = ncomb 691 692 file = """ \ 693 integer ncomb1 694 parameter (ncomb1=%(ncomb)d) 695 double precision helamp(ncomb1) 696 common /to_helamp/helamp """ % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 706 707 #=========================================================================== 708 # write_nexternal_file 709 #===========================================================================
710 - def write_nexternal_file(self, writer, nexternal, ninitial):
711 """Write the nexternal.inc file for MG4""" 712 713 replace_dict = {} 714 715 replace_dict['nexternal'] = nexternal 716 replace_dict['ninitial'] = ninitial 717 718 file = """ \ 719 integer nexternal 720 parameter (nexternal=%(nexternal)d) 721 integer nincoming 722 parameter (nincoming=%(ninitial)d)""" % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 #=========================================================================== 731 # write_pmass_file 732 #===========================================================================
733 - def write_pmass_file(self, writer, matrix_element):
734 """Write the pmass.inc file for MG4""" 735 736 model = matrix_element.get('processes')[0].get('model') 737 738 lines = [] 739 for wf in matrix_element.get_external_wavefunctions(): 740 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 741 if mass.lower() != "zero": 742 mass = "abs(%s)" % mass 743 744 lines.append("pmass(%d)=%s" % \ 745 (wf.get('number_external'), mass)) 746 747 # Write the file 748 writer.writelines(lines) 749 750 return True
751 752 #=========================================================================== 753 # write_ngraphs_file 754 #===========================================================================
755 - def write_ngraphs_file(self, writer, nconfigs):
756 """Write the ngraphs.inc file for MG4. Needs input from 757 write_configs_file.""" 758 759 file = " integer n_max_cg\n" 760 file = file + "parameter (n_max_cg=%d)" % nconfigs 761 762 # Write the file 763 writer.writelines(file) 764 765 return True
766 767 #=========================================================================== 768 # write_leshouche_file 769 #===========================================================================
770 - def write_leshouche_file(self, writer, matrix_element):
771 """Write the leshouche.inc file for MG4""" 772 773 # Write the file 774 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 775 776 return True
777 778 #=========================================================================== 779 # get_leshouche_lines 780 #===========================================================================
781 - def get_leshouche_lines(self, matrix_element, numproc):
782 """Write the leshouche.inc file for MG4""" 783 784 # Extract number of external particles 785 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 786 787 lines = [] 788 for iproc, proc in enumerate(matrix_element.get('processes')): 789 legs = proc.get_legs_with_decays() 790 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 791 (iproc + 1, numproc+1, nexternal, 792 ",".join([str(l.get('id')) for l in legs]))) 793 if iproc == 0 and numproc == 0: 794 for i in [1, 2]: 795 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 796 (i, nexternal, 797 ",".join([ "%3r" % 0 ] * ninitial + \ 798 [ "%3r" % i ] * (nexternal - ninitial)))) 799 800 # Here goes the color connections corresponding to the JAMPs 801 # Only one output, for the first subproc! 802 if iproc == 0: 803 # If no color basis, just output trivial color flow 804 if not matrix_element.get('color_basis'): 805 for i in [1, 2]: 806 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 807 (i, numproc+1,nexternal, 808 ",".join([ "%3r" % 0 ] * nexternal))) 809 810 else: 811 # First build a color representation dictionnary 812 repr_dict = {} 813 for l in legs: 814 repr_dict[l.get('number')] = \ 815 proc.get('model').get_particle(l.get('id')).get_color()\ 816 * (-1)**(1+l.get('state')) 817 # Get the list of color flows 818 color_flow_list = \ 819 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 820 ninitial) 821 # And output them properly 822 for cf_i, color_flow_dict in enumerate(color_flow_list): 823 for i in [0, 1]: 824 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 825 (i + 1, cf_i + 1, numproc+1, nexternal, 826 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 827 for l in legs]))) 828 829 return lines
830 831 832 833 834 #=========================================================================== 835 # write_maxamps_file 836 #===========================================================================
837 - def write_maxamps_file(self, writer, maxamps, maxflows, 838 maxproc,maxsproc):
839 """Write the maxamps.inc file for MG4.""" 840 841 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 842 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 843 (maxamps, maxflows) 844 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 845 (maxproc, maxsproc) 846 847 # Write the file 848 writer.writelines(file) 849 850 return True
851 852 853 #=========================================================================== 854 # Routines to output UFO models in MG4 format 855 #=========================================================================== 856
857 - def convert_model(self, model, wanted_lorentz = [], 858 wanted_couplings = []):
859 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 860 861 # Make sure aloha is in quadruple precision if needed 862 old_aloha_mp=aloha.mp_precision 863 aloha.mp_precision=self.opt['mp'] 864 865 # create the MODEL 866 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 867 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 868 model_builder.build(wanted_couplings) 869 870 # Backup the loop mode, because it can be changed in what follows. 871 old_loop_mode = aloha.loop_mode 872 873 # Create the aloha model or use the existing one (for loop exporters 874 # this is useful as the aloha model will be used again in the 875 # LoopHelasMatrixElements generated). We do not save the model generated 876 # here if it didn't exist already because it would be a waste of 877 # memory for tree level applications since aloha is only needed at the 878 # time of creating the aloha fortran subroutines. 879 if hasattr(self, 'aloha_model'): 880 aloha_model = self.aloha_model 881 else: 882 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 883 aloha_model.add_Lorentz_object(model.get('lorentz')) 884 885 # Compute the subroutines 886 if wanted_lorentz: 887 aloha_model.compute_subset(wanted_lorentz) 888 else: 889 aloha_model.compute_all(save=False) 890 891 # Write them out 892 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 893 aloha_model.write(write_dir, 'Fortran') 894 895 # Revert the original aloha loop mode 896 aloha.loop_mode = old_loop_mode 897 898 #copy Helas Template 899 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 900 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 901 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 902 write_dir+'/aloha_functions.f') 903 aloha_model.loop_mode = False 904 else: 905 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 906 write_dir+'/aloha_functions.f') 907 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 908 909 # Make final link in the Process 910 self.make_model_symbolic_link() 911 912 # Re-establish original aloha mode 913 aloha.mp_precision=old_aloha_mp
914 915 916 #=========================================================================== 917 # Helper functions 918 #===========================================================================
919 - def modify_grouping(self, matrix_element):
920 """allow to modify the grouping (if grouping is in place) 921 return two value: 922 - True/False if the matrix_element was modified 923 - the new(or old) matrix element""" 924 925 return False, matrix_element
926 927 #=========================================================================== 928 # Helper functions 929 #===========================================================================
930 - def get_mg5_info_lines(self):
931 """Return info lines for MG5, suitable to place at beginning of 932 Fortran files""" 933 934 info = misc.get_pkg_info() 935 info_lines = "" 936 if info and info.has_key('version') and info.has_key('date'): 937 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 938 (info['version'], info['date']) 939 info_lines = info_lines + \ 940 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 941 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 942 else: 943 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 944 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 945 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 946 947 return info_lines
948
949 - def get_process_info_lines(self, matrix_element):
950 """Return info lines describing the processes for this matrix element""" 951 952 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 953 for process in matrix_element.get('processes')])
954 955
956 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
957 """Return the Helicity matrix definition lines for this matrix element""" 958 959 helicity_line_list = [] 960 i = 0 961 for helicities in matrix_element.get_helicity_matrix(): 962 i = i + 1 963 int_list = [i, len(helicities)] 964 int_list.extend(helicities) 965 helicity_line_list.append(\ 966 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 967 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 968 969 return "\n".join(helicity_line_list)
970
971 - def get_ic_line(self, matrix_element):
972 """Return the IC definition line coming after helicities, required by 973 switchmom in madevent""" 974 975 nexternal = matrix_element.get_nexternal_ninitial()[0] 976 int_list = range(1, nexternal + 1) 977 978 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 979 ",".join([str(i) for \ 980 i in int_list]))
981
982 - def set_chosen_SO_index(self, process, squared_orders):
983 """ From the squared order constraints set by the user, this function 984 finds what indices of the squared_orders list the user intends to pick. 985 It returns this as a string of comma-separated successive '.true.' or 986 '.false.' for each index.""" 987 988 user_squared_orders = process.get('squared_orders') 989 split_orders = process.get('split_orders') 990 991 if len(user_squared_orders)==0: 992 return ','.join(['.true.']*len(squared_orders)) 993 994 res = [] 995 for sqsos in squared_orders: 996 is_a_match = True 997 for user_sqso, value in user_squared_orders.items(): 998 if (process.get_squared_order_type(user_sqso) =='==' and \ 999 value!=sqsos[split_orders.index(user_sqso)]) or \ 1000 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1001 value<sqsos[split_orders.index(user_sqso)]) or \ 1002 (process.get_squared_order_type(user_sqso) == '>' and \ 1003 value>=sqsos[split_orders.index(user_sqso)]): 1004 is_a_match = False 1005 break 1006 res.append('.true.' if is_a_match else '.false.') 1007 1008 return ','.join(res)
1009
1010 - def get_split_orders_lines(self, orders, array_name, n=5):
1011 """ Return the split orders definition as defined in the list orders and 1012 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1013 1014 ret_list = [] 1015 for index, order in enumerate(orders): 1016 for k in xrange(0, len(order), n): 1017 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1018 (array_name,index + 1, k + 1, min(k + n, len(order)), 1019 ','.join(["%5r" % i for i in order[k:k + n]]))) 1020 return ret_list
1021
1022 - def format_integer_list(self, list, name, n=5):
1023 """ Return an initialization of the python list in argument following 1024 the fortran syntax using the data keyword assignment, filling an array 1025 of name 'name'. It splits rows in chunks of size n.""" 1026 1027 ret_list = [] 1028 for k in xrange(0, len(list), n): 1029 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1030 (name, k + 1, min(k + n, len(list)), 1031 ','.join(["%5r" % i for i in list[k:k + n]]))) 1032 return ret_list
1033
1034 - def get_color_data_lines(self, matrix_element, n=6):
1035 """Return the color matrix definition lines for this matrix element. Split 1036 rows in chunks of size n.""" 1037 1038 if not matrix_element.get('color_matrix'): 1039 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1040 else: 1041 ret_list = [] 1042 my_cs = color.ColorString() 1043 for index, denominator in \ 1044 enumerate(matrix_element.get('color_matrix').\ 1045 get_line_denominators()): 1046 # First write the common denominator for this color matrix line 1047 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1048 # Then write the numerators for the matrix elements 1049 num_list = matrix_element.get('color_matrix').\ 1050 get_line_numerators(index, denominator) 1051 1052 for k in xrange(0, len(num_list), n): 1053 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1054 (index + 1, k + 1, min(k + n, len(num_list)), 1055 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1056 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1057 ret_list.append("C %s" % repr(my_cs)) 1058 return ret_list
1059 1060
1061 - def get_den_factor_line(self, matrix_element):
1062 """Return the denominator factor line for this matrix element""" 1063 1064 return "DATA IDEN/%2r/" % \ 1065 matrix_element.get_denominator_factor()
1066
1067 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1068 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1069 which configs (diagrams).""" 1070 1071 ret_list = [] 1072 1073 booldict = {False: ".false.", True: ".true."} 1074 1075 if not matrix_element.get('color_basis'): 1076 # No color, so only one color factor. Simply write a ".true." 1077 # for each config (i.e., each diagram with only 3 particle 1078 # vertices 1079 configs = len(mapconfigs) 1080 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1081 (num_matrix_element, configs, 1082 ','.join([".true." for i in range(configs)]))) 1083 return ret_list 1084 1085 # There is a color basis - create a list showing which JAMPs have 1086 # contributions to which configs 1087 1088 # Only want to include leading color flows, so find max_Nc 1089 color_basis = matrix_element.get('color_basis') 1090 1091 # We don't want to include the power of Nc's which come from the potential 1092 # loop color trace (i.e. in the case of a closed fermion loop for example) 1093 # so we subtract it here when computing max_Nc 1094 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1095 color_basis.values()],[])) 1096 1097 # Crate dictionary between diagram number and JAMP number 1098 diag_jamp = {} 1099 for ijamp, col_basis_elem in \ 1100 enumerate(sorted(matrix_element.get('color_basis').keys())): 1101 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1102 # Only use color flows with Nc == max_Nc. However, notice that 1103 # we don't want to include the Nc power coming from the loop 1104 # in this counting. 1105 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1106 diag_num = diag_tuple[0] + 1 1107 # Add this JAMP number to this diag_num 1108 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1109 [ijamp+1] 1110 1111 colamps = ijamp + 1 1112 for iconfig, num_diag in enumerate(mapconfigs): 1113 if num_diag == 0: 1114 continue 1115 1116 # List of True or False 1117 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1118 # Add line 1119 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1120 (iconfig+1, num_matrix_element, colamps, 1121 ','.join(["%s" % booldict[b] for b in \ 1122 bool_list]))) 1123 1124 return ret_list
1125
1126 - def get_amp2_lines(self, matrix_element, config_map = []):
1127 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1128 1129 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1130 # Get minimum legs in a vertex 1131 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1132 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1133 minvert = min(vert_list) if vert_list!=[] else 0 1134 1135 ret_lines = [] 1136 if config_map: 1137 # In this case, we need to sum up all amplitudes that have 1138 # identical topologies, as given by the config_map (which 1139 # gives the topology/config for each of the diagrams 1140 diagrams = matrix_element.get('diagrams') 1141 # Combine the diagrams with identical topologies 1142 config_to_diag_dict = {} 1143 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1144 if config_map[idiag] == 0: 1145 continue 1146 try: 1147 config_to_diag_dict[config_map[idiag]].append(idiag) 1148 except KeyError: 1149 config_to_diag_dict[config_map[idiag]] = [idiag] 1150 # Write out the AMP2s summing squares of amplitudes belonging 1151 # to eiher the same diagram or different diagrams with 1152 # identical propagator properties. Note that we need to use 1153 # AMP2 number corresponding to the first diagram number used 1154 # for that AMP2. 1155 for config in sorted(config_to_diag_dict.keys()): 1156 1157 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1158 {"num": (config_to_diag_dict[config][0] + 1)} 1159 1160 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1161 sum([diagrams[idiag].get('amplitudes') for \ 1162 idiag in config_to_diag_dict[config]], [])]) 1163 1164 # Not using \sum |M|^2 anymore since this creates troubles 1165 # when ckm is not diagonal due to the JIM mechanism. 1166 if '+' in amp: 1167 line += "(%s)*dconjg(%s)" % (amp, amp) 1168 else: 1169 line += "%s*dconjg(%s)" % (amp, amp) 1170 ret_lines.append(line) 1171 else: 1172 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1173 # Ignore any diagrams with 4-particle vertices. 1174 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1175 continue 1176 # Now write out the expression for AMP2, meaning the sum of 1177 # squared amplitudes belonging to the same diagram 1178 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1179 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1180 {"num": a.get('number')} for a in \ 1181 diag.get('amplitudes')]) 1182 ret_lines.append(line) 1183 1184 return ret_lines
1185 1186 #=========================================================================== 1187 # Returns the data statements initializing the coeffictients for the JAMP 1188 # decomposition. It is used when the JAMP initialization is decided to be 1189 # done through big arrays containing the projection coefficients. 1190 #===========================================================================
1191 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1192 n=50, Nc_value=3):
1193 """This functions return the lines defining the DATA statement setting 1194 the coefficients building the JAMPS out of the AMPS. Split rows in 1195 bunches of size n. 1196 One can specify the color_basis from which the color amplitudes originates 1197 so that there are commentaries telling what color structure each JAMP 1198 corresponds to.""" 1199 1200 if(not isinstance(color_amplitudes,list) or 1201 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1202 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1203 1204 res_list = [] 1205 my_cs = color.ColorString() 1206 for index, coeff_list in enumerate(color_amplitudes): 1207 # Create the list of the complete numerical coefficient. 1208 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1209 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1210 coefficient in coeff_list] 1211 # Create the list of the numbers of the contributing amplitudes. 1212 # Mutliply by -1 for those which have an imaginary coefficient. 1213 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1214 for coefficient in coeff_list] 1215 # Find the common denominator. 1216 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1217 num_list=[(coefficient*commondenom).numerator \ 1218 for coefficient in coefs_list] 1219 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1220 index+1,len(num_list))) 1221 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1222 index+1,commondenom)) 1223 if color_basis: 1224 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1225 res_list.append("C %s" % repr(my_cs)) 1226 for k in xrange(0, len(num_list), n): 1227 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1228 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1229 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1230 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1231 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1232 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1233 pass 1234 return res_list
1235 1236
1237 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1238 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1239 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1240 defined as a matrix element or directly as a color_amplitudes dictionary. 1241 The split_order_amps specifies the group of amplitudes sharing the same 1242 amplitude orders which should be put in together in a given set of JAMPS. 1243 The split_order_amps is supposed to have the format of the second output 1244 of the function get_split_orders_mapping function in helas_objects.py. 1245 The split_order_names is optional (it should correspond to the process 1246 'split_orders' attribute) and only present to provide comments in the 1247 JAMP definitions in the code.""" 1248 1249 # Let the user call get_JAMP_lines_split_order directly from a 1250 error_msg="Malformed '%s' argument passed to the "+\ 1251 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1252 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1253 color_amplitudes=col_amps.get_color_amplitudes() 1254 elif(isinstance(col_amps,list)): 1255 if(col_amps and isinstance(col_amps[0],list)): 1256 color_amplitudes=col_amps 1257 else: 1258 raise MadGraph5Error, error_msg%'col_amps' 1259 else: 1260 raise MadGraph5Error, error_msg%'col_amps' 1261 1262 # Verify the sanity of the split_order_amps and split_order_names args 1263 if isinstance(split_order_amps,list): 1264 for elem in split_order_amps: 1265 if len(elem)!=2: 1266 raise MadGraph5Error, error_msg%'split_order_amps' 1267 # Check the first element of the two lists to make sure they are 1268 # integers, although in principle they should all be integers. 1269 if not isinstance(elem[0],tuple) or \ 1270 not isinstance(elem[1],tuple) or \ 1271 not isinstance(elem[0][0],int) or \ 1272 not isinstance(elem[1][0],int): 1273 raise MadGraph5Error, error_msg%'split_order_amps' 1274 else: 1275 raise MadGraph5Error, error_msg%'split_order_amps' 1276 1277 if not split_order_names is None: 1278 if isinstance(split_order_names,list): 1279 # Should specify the same number of names as there are elements 1280 # in the key of the split_order_amps. 1281 if len(split_order_names)!=len(split_order_amps[0][0]): 1282 raise MadGraph5Error, error_msg%'split_order_names' 1283 # Check the first element of the list to be a string 1284 if not isinstance(split_order_names[0],str): 1285 raise MadGraph5Error, error_msg%'split_order_names' 1286 else: 1287 raise MadGraph5Error, error_msg%'split_order_names' 1288 1289 # Now scan all contributing orders to be individually computed and 1290 # construct the list of color_amplitudes for JAMP to be constructed 1291 # accordingly. 1292 res_list=[] 1293 for i, amp_order in enumerate(split_order_amps): 1294 col_amps_order = [] 1295 for jamp in color_amplitudes: 1296 col_amps_order.append(filter(lambda col_amp: 1297 col_amp[1] in amp_order[1],jamp)) 1298 if split_order_names: 1299 res_list.append('C JAMPs contributing to orders '+' '.join( 1300 ['%s=%i'%order for order in zip(split_order_names, 1301 amp_order[0])])) 1302 if self.opt['export_format'] in ['madloop_matchbox']: 1303 res_list.extend(self.get_JAMP_lines(col_amps_order, 1304 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1305 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1306 else: 1307 res_list.extend(self.get_JAMP_lines(col_amps_order, 1308 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1309 1310 return res_list
1311 1312
1313 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1314 split=-1):
1315 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1316 defined as a matrix element or directly as a color_amplitudes dictionary, 1317 Jamp_formatLC should be define to allow to add LeadingColor computation 1318 (usefull for MatchBox) 1319 The split argument defines how the JAMP lines should be split in order 1320 not to be too long.""" 1321 1322 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1323 # the color amplitudes lists. 1324 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1325 color_amplitudes=col_amps.get_color_amplitudes() 1326 elif(isinstance(col_amps,list)): 1327 if(col_amps and isinstance(col_amps[0],list)): 1328 color_amplitudes=col_amps 1329 else: 1330 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1331 else: 1332 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1333 1334 1335 res_list = [] 1336 for i, coeff_list in enumerate(color_amplitudes): 1337 # It might happen that coeff_list is empty if this function was 1338 # called from get_JAMP_lines_split_order (i.e. if some color flow 1339 # does not contribute at all for a given order). 1340 # In this case we simply set it to 0. 1341 if coeff_list==[]: 1342 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1343 continue 1344 # Break the JAMP definition into 'n=split' pieces to avoid having 1345 # arbitrarly long lines. 1346 first=True 1347 n = (len(coeff_list)+1 if split<=0 else split) 1348 while coeff_list!=[]: 1349 coefs=coeff_list[:n] 1350 coeff_list=coeff_list[n:] 1351 res = ((JAMP_format+"=") % str(i + 1)) + \ 1352 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1353 1354 first=False 1355 # Optimization: if all contributions to that color basis element have 1356 # the same coefficient (up to a sign), put it in front 1357 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1358 common_factor = False 1359 diff_fracs = list(set(list_fracs)) 1360 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1361 common_factor = True 1362 global_factor = diff_fracs[0] 1363 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1364 1365 # loop for JAMP 1366 for (coefficient, amp_number) in coefs: 1367 if not coefficient: 1368 continue 1369 if common_factor: 1370 res = (res + "%s" + AMP_format) % \ 1371 (self.coeff(coefficient[0], 1372 coefficient[1] / abs(coefficient[1]), 1373 coefficient[2], 1374 coefficient[3]), 1375 str(amp_number)) 1376 else: 1377 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1378 coefficient[1], 1379 coefficient[2], 1380 coefficient[3]), 1381 str(amp_number)) 1382 1383 if common_factor: 1384 res = res + ')' 1385 1386 res_list.append(res) 1387 1388 return res_list
1389
1390 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1391 """Generate the PDF lines for the auto_dsig.f file""" 1392 1393 processes = matrix_element.get('processes') 1394 model = processes[0].get('model') 1395 1396 pdf_definition_lines = "" 1397 pdf_data_lines = "" 1398 pdf_lines = "" 1399 1400 if ninitial == 1: 1401 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1402 for i, proc in enumerate(processes): 1403 process_line = proc.base_string() 1404 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1405 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1406 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1407 else: 1408 # Pick out all initial state particles for the two beams 1409 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1410 p in processes]))), 1411 sorted(list(set([p.get_initial_pdg(2) for \ 1412 p in processes])))] 1413 1414 # Prepare all variable names 1415 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1416 sum(initial_states,[])]) 1417 for key,val in pdf_codes.items(): 1418 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1419 1420 # Set conversion from PDG code to number used in PDF calls 1421 pdgtopdf = {21: 0, 22: 7} 1422 1423 # Fill in missing entries of pdgtopdf 1424 for pdg in sum(initial_states,[]): 1425 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1426 pdgtopdf[pdg] = pdg 1427 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1428 # If any particle has pdg code 7, we need to use something else 1429 pdgtopdf[pdg] = 6000000 + pdg 1430 1431 # Get PDF variable declarations for all initial states 1432 for i in [0,1]: 1433 pdf_definition_lines += "DOUBLE PRECISION " + \ 1434 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1435 for pdg in \ 1436 initial_states[i]]) + \ 1437 "\n" 1438 1439 # Get PDF data lines for all initial states 1440 for i in [0,1]: 1441 pdf_data_lines += "DATA " + \ 1442 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1443 for pdg in initial_states[i]]) + \ 1444 "/%d*1D0/" % len(initial_states[i]) + \ 1445 "\n" 1446 1447 # Get PDF lines for all different initial states 1448 for i, init_states in enumerate(initial_states): 1449 if subproc_group: 1450 pdf_lines = pdf_lines + \ 1451 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1452 % (i + 1, i + 1) 1453 else: 1454 pdf_lines = pdf_lines + \ 1455 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1456 % (i + 1, i + 1) 1457 1458 for initial_state in init_states: 1459 if initial_state in pdf_codes.keys(): 1460 if subproc_group: 1461 pdf_lines = pdf_lines + \ 1462 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1463 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1464 (pdf_codes[initial_state], 1465 i + 1, i + 1, pdgtopdf[initial_state], 1466 i + 1, i + 1) 1467 else: 1468 pdf_lines = pdf_lines + \ 1469 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1470 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1471 (pdf_codes[initial_state], 1472 i + 1, i + 1, pdgtopdf[initial_state], 1473 i + 1, i + 1) 1474 pdf_lines = pdf_lines + "ENDIF\n" 1475 1476 # Add up PDFs for the different initial state particles 1477 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1478 for proc in processes: 1479 process_line = proc.base_string() 1480 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1481 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1482 for ibeam in [1, 2]: 1483 initial_state = proc.get_initial_pdg(ibeam) 1484 if initial_state in pdf_codes.keys(): 1485 pdf_lines = pdf_lines + "%s%d*" % \ 1486 (pdf_codes[initial_state], ibeam) 1487 else: 1488 pdf_lines = pdf_lines + "1d0*" 1489 # Remove last "*" from pdf_lines 1490 pdf_lines = pdf_lines[:-1] + "\n" 1491 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1492 1493 # Remove last line break from the return variables 1494 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1495 1496 #=========================================================================== 1497 # write_props_file 1498 #===========================================================================
1499 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1500 """Write the props.inc file for MadEvent. Needs input from 1501 write_configs_file.""" 1502 1503 lines = [] 1504 1505 particle_dict = matrix_element.get('processes')[0].get('model').\ 1506 get('particle_dict') 1507 1508 for iconf, configs in enumerate(s_and_t_channels): 1509 for vertex in configs[0] + configs[1][:-1]: 1510 leg = vertex.get('legs')[-1] 1511 if leg.get('id') not in particle_dict: 1512 # Fake propagator used in multiparticle vertices 1513 mass = 'zero' 1514 width = 'zero' 1515 pow_part = 0 1516 else: 1517 particle = particle_dict[leg.get('id')] 1518 # Get mass 1519 if particle.get('mass').lower() == 'zero': 1520 mass = particle.get('mass') 1521 else: 1522 mass = "abs(%s)" % particle.get('mass') 1523 # Get width 1524 if particle.get('width').lower() == 'zero': 1525 width = particle.get('width') 1526 else: 1527 width = "abs(%s)" % particle.get('width') 1528 1529 pow_part = 1 + int(particle.is_boson()) 1530 1531 lines.append("prmass(%d,%d) = %s" % \ 1532 (leg.get('number'), iconf + 1, mass)) 1533 lines.append("prwidth(%d,%d) = %s" % \ 1534 (leg.get('number'), iconf + 1, width)) 1535 lines.append("pow(%d,%d) = %d" % \ 1536 (leg.get('number'), iconf + 1, pow_part)) 1537 1538 # Write the file 1539 writer.writelines(lines) 1540 1541 return True
1542 1543 #=========================================================================== 1544 # write_configs_file 1545 #===========================================================================
1546 - def write_configs_file(self, writer, matrix_element):
1547 """Write the configs.inc file for MadEvent""" 1548 1549 # Extract number of external particles 1550 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1551 1552 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1553 mapconfigs = [c[0] for c in configs] 1554 model = matrix_element.get('processes')[0].get('model') 1555 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1556 [[c[1]] for c in configs], 1557 mapconfigs, 1558 nexternal, ninitial, 1559 model)
1560 1561 #=========================================================================== 1562 # write_configs_file_from_diagrams 1563 #===========================================================================
1564 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1565 nexternal, ninitial, model):
1566 """Write the actual configs.inc file. 1567 1568 configs is the diagrams corresponding to configs (each 1569 diagrams is a list of corresponding diagrams for all 1570 subprocesses, with None if there is no corresponding diagrams 1571 for a given process). 1572 mapconfigs gives the diagram number for each config. 1573 1574 For s-channels, we need to output one PDG for each subprocess in 1575 the subprocess group, in order to be able to pick the right 1576 one for multiprocesses.""" 1577 1578 lines = [] 1579 1580 s_and_t_channels = [] 1581 1582 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1583 for config in configs if [d for d in config if d][0].\ 1584 get_vertex_leg_numbers()!=[]] 1585 minvert = min(vert_list) if vert_list!=[] else 0 1586 1587 # Number of subprocesses 1588 nsubprocs = len(configs[0]) 1589 1590 nconfigs = 0 1591 1592 new_pdg = model.get_first_non_pdg() 1593 1594 for iconfig, helas_diags in enumerate(configs): 1595 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1596 [0].get_vertex_leg_numbers()) : 1597 # Only 3-vertices allowed in configs.inc except for vertices 1598 # which originate from a shrunk loop. 1599 continue 1600 nconfigs += 1 1601 1602 # Need s- and t-channels for all subprocesses, including 1603 # those that don't contribute to this config 1604 empty_verts = [] 1605 stchannels = [] 1606 for h in helas_diags: 1607 if h: 1608 # get_s_and_t_channels gives vertices starting from 1609 # final state external particles and working inwards 1610 stchannels.append(h.get('amplitudes')[0].\ 1611 get_s_and_t_channels(ninitial, model, new_pdg)) 1612 else: 1613 stchannels.append((empty_verts, None)) 1614 1615 # For t-channels, just need the first non-empty one 1616 tchannels = [t for s,t in stchannels if t != None][0] 1617 1618 # For s_and_t_channels (to be used later) use only first config 1619 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1620 tchannels]) 1621 1622 # Make sure empty_verts is same length as real vertices 1623 if any([s for s,t in stchannels]): 1624 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1625 1626 # Reorganize s-channel vertices to get a list of all 1627 # subprocesses for each vertex 1628 schannels = zip(*[s for s,t in stchannels]) 1629 else: 1630 schannels = [] 1631 1632 allchannels = schannels 1633 if len(tchannels) > 1: 1634 # Write out tchannels only if there are any non-trivial ones 1635 allchannels = schannels + tchannels 1636 1637 # Write out propagators for s-channel and t-channel vertices 1638 1639 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1640 # Correspondance between the config and the diagram = amp2 1641 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1642 mapconfigs[iconfig])) 1643 1644 for verts in allchannels: 1645 if verts in schannels: 1646 vert = [v for v in verts if v][0] 1647 else: 1648 vert = verts 1649 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1650 last_leg = vert.get('legs')[-1] 1651 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1652 (last_leg.get('number'), nconfigs, len(daughters), 1653 ",".join([str(d) for d in daughters]))) 1654 if verts in schannels: 1655 pdgs = [] 1656 for v in verts: 1657 if v: 1658 pdgs.append(v.get('legs')[-1].get('id')) 1659 else: 1660 pdgs.append(0) 1661 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1662 (last_leg.get('number'), nconfigs, nsubprocs, 1663 ",".join([str(d) for d in pdgs]))) 1664 lines.append("data tprid(%d,%d)/0/" % \ 1665 (last_leg.get('number'), nconfigs)) 1666 elif verts in tchannels[:-1]: 1667 lines.append("data tprid(%d,%d)/%d/" % \ 1668 (last_leg.get('number'), nconfigs, 1669 abs(last_leg.get('id')))) 1670 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1671 (last_leg.get('number'), nconfigs, nsubprocs, 1672 ",".join(['0'] * nsubprocs))) 1673 1674 # Write out number of configs 1675 lines.append("# Number of configs") 1676 lines.append("data mapconfig(0)/%d/" % nconfigs) 1677 1678 # Write the file 1679 writer.writelines(lines) 1680 1681 return s_and_t_channels
1682 1683 #=========================================================================== 1684 # Global helper methods 1685 #=========================================================================== 1686
1687 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1688 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1689 1690 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1691 1692 if total_coeff == 1: 1693 if is_imaginary: 1694 return '+imag1*' 1695 else: 1696 return '+' 1697 elif total_coeff == -1: 1698 if is_imaginary: 1699 return '-imag1*' 1700 else: 1701 return '-' 1702 1703 res_str = '%+iD0' % total_coeff.numerator 1704 1705 if total_coeff.denominator != 1: 1706 # Check if total_coeff is an integer 1707 res_str = res_str + '/%iD0' % total_coeff.denominator 1708 1709 if is_imaginary: 1710 res_str = res_str + '*imag1' 1711 1712 return res_str + '*'
1713 1714
1715 - def set_fortran_compiler(self, default_compiler, force=False):
1716 """Set compiler based on what's available on the system""" 1717 1718 # Check for compiler 1719 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1720 f77_compiler = default_compiler['fortran'] 1721 elif misc.which('gfortran'): 1722 f77_compiler = 'gfortran' 1723 elif misc.which('g77'): 1724 f77_compiler = 'g77' 1725 elif misc.which('f77'): 1726 f77_compiler = 'f77' 1727 elif default_compiler['fortran']: 1728 logger.warning('No Fortran Compiler detected! Please install one') 1729 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1730 else: 1731 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1732 logger.info('Use Fortran compiler ' + f77_compiler) 1733 1734 1735 # Check for compiler. 1. set default. 1736 if default_compiler['f2py']: 1737 f2py_compiler = default_compiler['f2py'] 1738 else: 1739 f2py_compiler = '' 1740 # Try to find the correct one. 1741 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1742 f2py_compiler = default_compiler['f2py'] 1743 elif misc.which('f2py'): 1744 f2py_compiler = 'f2py' 1745 elif sys.version_info[1] == 6: 1746 if misc.which('f2py-2.6'): 1747 f2py_compiler = 'f2py-2.6' 1748 elif misc.which('f2py2.6'): 1749 f2py_compiler = 'f2py2.6' 1750 elif sys.version_info[1] == 7: 1751 if misc.which('f2py-2.7'): 1752 f2py_compiler = 'f2py-2.7' 1753 elif misc.which('f2py2.7'): 1754 f2py_compiler = 'f2py2.7' 1755 1756 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1757 1758 1759 self.replace_make_opt_f_compiler(to_replace) 1760 # Replace also for Template but not for cluster 1761 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1762 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1763 1764 return f77_compiler
1765 1766 # an alias for backward compatibility 1767 set_compiler = set_fortran_compiler 1768 1769
1770 - def set_cpp_compiler(self, default_compiler, force=False):
1771 """Set compiler based on what's available on the system""" 1772 1773 # Check for compiler 1774 if default_compiler and misc.which(default_compiler): 1775 compiler = default_compiler 1776 elif misc.which('g++'): 1777 #check if clang version 1778 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1779 stderr=subprocess.PIPE) 1780 out, _ = p.communicate() 1781 if 'clang' in out and misc.which('clang'): 1782 compiler = 'clang' 1783 else: 1784 compiler = 'g++' 1785 elif misc.which('c++'): 1786 compiler = 'c++' 1787 elif misc.which('clang'): 1788 compiler = 'clang' 1789 elif default_compiler: 1790 logger.warning('No c++ Compiler detected! Please install one') 1791 compiler = default_compiler # maybe misc fail so try with it 1792 else: 1793 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1794 logger.info('Use c++ compiler ' + compiler) 1795 self.replace_make_opt_c_compiler(compiler) 1796 # Replace also for Template but not for cluster 1797 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1798 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1799 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1800 1801 return compiler
1802 1803
1804 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1805 """Set FC=compiler in Source/make_opts""" 1806 1807 assert isinstance(compilers, dict) 1808 1809 mod = False #avoid to rewrite the file if not needed 1810 if not root_dir: 1811 root_dir = self.dir_path 1812 1813 compiler= compilers['fortran'] 1814 f2py_compiler = compilers['f2py'] 1815 if not f2py_compiler: 1816 f2py_compiler = 'f2py' 1817 for_update= {'DEFAULT_F_COMPILER':compiler, 1818 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1819 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1820 1821 try: 1822 common_run_interface.CommonRunCmd.update_make_opts_full( 1823 make_opts, for_update) 1824 except IOError: 1825 if root_dir == self.dir_path: 1826 logger.info('Fail to set compiler. Trying to continue anyway.')
1827
1828 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1829 """Set CXX=compiler in Source/make_opts. 1830 The version is also checked, in order to set some extra flags 1831 if the compiler is clang (on MACOS)""" 1832 1833 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1834 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1835 1836 # list of the variable to set in the make_opts file 1837 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1838 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1839 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1840 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1841 } 1842 1843 if not root_dir: 1844 root_dir = self.dir_path 1845 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1846 1847 try: 1848 common_run_interface.CommonRunCmd.update_make_opts_full( 1849 make_opts, for_update) 1850 except IOError: 1851 if root_dir == self.dir_path: 1852 logger.info('Fail to set compiler. Trying to continue anyway.') 1853 1854 return
1855
1856 #=============================================================================== 1857 # ProcessExporterFortranSA 1858 #=============================================================================== 1859 -class ProcessExporterFortranSA(ProcessExporterFortran):
1860 """Class to take care of exporting a set of matrix elements to 1861 MadGraph v4 StandAlone format.""" 1862 1863 matrix_template = "matrix_standalone_v4.inc" 1864
1865 - def __init__(self, *args,**opts):
1866 """add the format information compare to standard init""" 1867 1868 if 'format' in opts: 1869 self.format = opts['format'] 1870 del opts['format'] 1871 else: 1872 self.format = 'standalone' 1873 1874 self.prefix_info = {} 1875 ProcessExporterFortran.__init__(self, *args, **opts)
1876
1877 - def copy_template(self, model):
1878 """Additional actions needed for setup of Template 1879 """ 1880 1881 #First copy the full template tree if dir_path doesn't exit 1882 if os.path.isdir(self.dir_path): 1883 return 1884 1885 logger.info('initialize a new standalone directory: %s' % \ 1886 os.path.basename(self.dir_path)) 1887 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1888 1889 # Create the directory structure 1890 os.mkdir(self.dir_path) 1891 os.mkdir(pjoin(self.dir_path, 'Source')) 1892 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1893 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1894 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1895 os.mkdir(pjoin(self.dir_path, 'bin')) 1896 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1897 os.mkdir(pjoin(self.dir_path, 'lib')) 1898 os.mkdir(pjoin(self.dir_path, 'Cards')) 1899 1900 # Information at top-level 1901 #Write version info 1902 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1903 try: 1904 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1905 except IOError: 1906 MG5_version = misc.get_pkg_info() 1907 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1908 "5." + MG5_version['version']) 1909 1910 1911 # Add file in SubProcesses 1912 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1913 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1914 1915 if self.format == 'standalone': 1916 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1917 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1918 1919 # Add file in Source 1920 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1921 pjoin(self.dir_path, 'Source')) 1922 # add the makefile 1923 filename = pjoin(self.dir_path,'Source','makefile') 1924 self.write_source_makefile(writers.FileWriter(filename))
1925 1926 #=========================================================================== 1927 # export model files 1928 #===========================================================================
1929 - def export_model_files(self, model_path):
1930 """export the model dependent files for V4 model""" 1931 1932 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1933 # Add the routine update_as_param in v4 model 1934 # This is a function created in the UFO 1935 text=""" 1936 subroutine update_as_param() 1937 call setpara('param_card.dat',.false.) 1938 return 1939 end 1940 """ 1941 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1942 ff.write(text) 1943 ff.close() 1944 1945 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1946 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1947 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1948 fsock.write(text) 1949 fsock.close() 1950 1951 self.make_model_symbolic_link()
1952 1953 #=========================================================================== 1954 # Make the Helas and Model directories for Standalone directory 1955 #===========================================================================
1956 - def make(self):
1957 """Run make in the DHELAS and MODEL directories, to set up 1958 everything for running standalone 1959 """ 1960 1961 source_dir = pjoin(self.dir_path, "Source") 1962 logger.info("Running make for Helas") 1963 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1964 logger.info("Running make for Model") 1965 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1966 1967 #=========================================================================== 1968 # Create proc_card_mg5.dat for Standalone directory 1969 #===========================================================================
1970 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1971 """Finalize Standalone MG4 directory by 1972 generation proc_card_mg5.dat 1973 generate a global makefile 1974 """ 1975 1976 compiler = {'fortran': mg5options['fortran_compiler'], 1977 'cpp': mg5options['cpp_compiler'], 1978 'f2py': mg5options['f2py_compiler']} 1979 1980 self.compiler_choice(compiler) 1981 self.make() 1982 1983 # Write command history as proc_card_mg5 1984 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1985 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1986 history.write(output_file) 1987 1988 ProcessExporterFortran.finalize(self, matrix_elements, 1989 history, mg5options, flaglist) 1990 open(pjoin(self.dir_path,'__init__.py'),'w') 1991 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1992 1993 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1994 #add the module to hande the NLO weight 1995 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1996 pjoin(self.dir_path, 'Source')) 1997 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1998 pjoin(self.dir_path, 'Source', 'PDF')) 1999 self.write_pdf_opendata() 2000 2001 if self.prefix_info: 2002 self.write_f2py_splitter() 2003 self.write_f2py_makefile() 2004 self.write_f2py_check_sa(matrix_elements, 2005 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2006 else: 2007 # create a single makefile to compile all the subprocesses 2008 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2009 deppython = '' 2010 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2011 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2012 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2013 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2014 text+='all: %s\n\techo \'done\'' % deppython 2015 2016 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2017 ff.write(text) 2018 ff.close()
2019
2020 - def write_f2py_splitter(self):
2021 """write a function to call the correct matrix element""" 2022 2023 template = """ 2024 %(python_information)s 2025 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2026 IMPLICIT NONE 2027 2028 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2029 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2030 CF2PY integer, intent(in) :: npdg 2031 CF2PY double precision, intent(out) :: ANS 2032 CF2PY double precision, intent(in) :: ALPHAS 2033 CF2PY double precision, intent(in) :: SCALE2 2034 integer pdgs(*) 2035 integer npdg, nhel 2036 double precision p(*) 2037 double precision ANS, ALPHAS, PI,SCALE2 2038 include 'coupl.inc' 2039 2040 PI = 3.141592653589793D0 2041 G = 2* DSQRT(ALPHAS*PI) 2042 CALL UPDATE_AS_PARAM() 2043 if (scale2.ne.0d0) stop 1 2044 2045 %(smatrixhel)s 2046 2047 return 2048 end 2049 2050 SUBROUTINE INITIALISE(PATH) 2051 C ROUTINE FOR F2PY to read the benchmark point. 2052 IMPLICIT NONE 2053 CHARACTER*512 PATH 2054 CF2PY INTENT(IN) :: PATH 2055 CALL SETPARA(PATH) !first call to setup the paramaters 2056 RETURN 2057 END 2058 2059 subroutine get_pdg_order(PDG) 2060 IMPLICIT NONE 2061 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2062 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2063 DATA PDGS/ %(pdgs)s / 2064 PDG = PDGS 2065 RETURN 2066 END 2067 2068 subroutine get_prefix(PREFIX) 2069 IMPLICIT NONE 2070 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2071 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2072 DATA PREF / '%(prefix)s'/ 2073 PREFIX = PREF 2074 RETURN 2075 END 2076 2077 2078 """ 2079 2080 allids = self.prefix_info.keys() 2081 allprefix = [self.prefix_info[key][0] for key in allids] 2082 min_nexternal = min([len(ids) for ids in allids]) 2083 max_nexternal = max([len(ids) for ids in allids]) 2084 2085 info = [] 2086 for key, (prefix, tag) in self.prefix_info.items(): 2087 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2088 2089 2090 text = [] 2091 for n_ext in range(min_nexternal, max_nexternal+1): 2092 current = [ids for ids in allids if len(ids)==n_ext] 2093 if not current: 2094 continue 2095 if min_nexternal != max_nexternal: 2096 if n_ext == min_nexternal: 2097 text.append(' if (npdg.eq.%i)then' % n_ext) 2098 else: 2099 text.append(' else if (npdg.eq.%i)then' % n_ext) 2100 for ii,pdgs in enumerate(current): 2101 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2102 if ii==0: 2103 text.append( ' if(%s) then ! %i' % (condition, i)) 2104 else: 2105 text.append( ' else if(%s) then ! %i' % (condition,i)) 2106 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2107 text.append(' endif') 2108 #close the function 2109 if min_nexternal != max_nexternal: 2110 text.append('endif') 2111 2112 formatting = {'python_information':'\n'.join(info), 2113 'smatrixhel': '\n'.join(text), 2114 'maxpart': max_nexternal, 2115 'nb_me': len(allids), 2116 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2117 for i in range(max_nexternal) for pdg in allids), 2118 'prefix':'\',\''.join(allprefix) 2119 } 2120 formatting['lenprefix'] = len(formatting['prefix']) 2121 text = template % formatting 2122 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2123 fsock.writelines(text) 2124 fsock.close()
2125
2126 - def write_f2py_check_sa(self, matrix_element, writer):
2127 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2128 # To be implemented. It is just an example file, i.e. not crucial. 2129 return
2130
2131 - def write_f2py_makefile(self):
2132 """ """ 2133 # Add file in SubProcesses 2134 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2135 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2136
2137 - def create_MA5_cards(self,*args,**opts):
2138 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2139 pass
2140
2141 - def compiler_choice(self, compiler):
2142 """ Different daughter classes might want different compilers. 2143 So this function is meant to be overloaded if desired.""" 2144 2145 self.set_compiler(compiler)
2146 2147 #=========================================================================== 2148 # generate_subprocess_directory 2149 #===========================================================================
2150 - def generate_subprocess_directory(self, matrix_element, 2151 fortran_model, number):
2152 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2153 including the necessary matrix.f and nexternal.inc files""" 2154 2155 cwd = os.getcwd() 2156 # Create the directory PN_xx_xxxxx in the specified path 2157 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2158 "P%s" % matrix_element.get('processes')[0].shell_string()) 2159 2160 if self.opt['sa_symmetry']: 2161 # avoid symmetric output 2162 for i,proc in enumerate(matrix_element.get('processes')): 2163 2164 tag = proc.get_tag() 2165 legs = proc.get('legs')[:] 2166 leg0 = proc.get('legs')[0] 2167 leg1 = proc.get('legs')[1] 2168 if not leg1.get('state'): 2169 proc.get('legs')[0] = leg1 2170 proc.get('legs')[1] = leg0 2171 flegs = proc.get('legs')[2:] 2172 for perm in itertools.permutations(flegs): 2173 for i,p in enumerate(perm): 2174 proc.get('legs')[i+2] = p 2175 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2176 "P%s" % proc.shell_string()) 2177 #restore original order 2178 proc.get('legs')[2:] = legs[2:] 2179 if os.path.exists(dirpath2): 2180 proc.get('legs')[:] = legs 2181 return 0 2182 proc.get('legs')[:] = legs 2183 2184 try: 2185 os.mkdir(dirpath) 2186 except os.error as error: 2187 logger.warning(error.strerror + " " + dirpath) 2188 2189 #try: 2190 # os.chdir(dirpath) 2191 #except os.error: 2192 # logger.error('Could not cd to directory %s' % dirpath) 2193 # return 0 2194 2195 logger.info('Creating files in directory %s' % dirpath) 2196 2197 # Extract number of external particles 2198 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2199 2200 # Create the matrix.f file and the nexternal.inc file 2201 if self.opt['export_format']=='standalone_msP': 2202 filename = pjoin(dirpath, 'matrix_prod.f') 2203 else: 2204 filename = pjoin(dirpath, 'matrix.f') 2205 2206 proc_prefix = '' 2207 if 'prefix' in self.cmd_options: 2208 if self.cmd_options['prefix'] == 'int': 2209 proc_prefix = 'M%s_' % number 2210 elif self.cmd_options['prefix'] == 'proc': 2211 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2212 else: 2213 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2214 for proc in matrix_element.get('processes'): 2215 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2216 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2217 2218 calls = self.write_matrix_element_v4( 2219 writers.FortranWriter(filename), 2220 matrix_element, 2221 fortran_model, 2222 proc_prefix=proc_prefix) 2223 2224 if self.opt['export_format'] == 'standalone_msP': 2225 filename = pjoin(dirpath,'configs_production.inc') 2226 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2227 writers.FortranWriter(filename), 2228 matrix_element) 2229 2230 filename = pjoin(dirpath,'props_production.inc') 2231 self.write_props_file(writers.FortranWriter(filename), 2232 matrix_element, 2233 s_and_t_channels) 2234 2235 filename = pjoin(dirpath,'nexternal_prod.inc') 2236 self.write_nexternal_madspin(writers.FortranWriter(filename), 2237 nexternal, ninitial) 2238 2239 if self.opt['export_format']=='standalone_msF': 2240 filename = pjoin(dirpath, 'helamp.inc') 2241 ncomb=matrix_element.get_helicity_combinations() 2242 self.write_helamp_madspin(writers.FortranWriter(filename), 2243 ncomb) 2244 2245 filename = pjoin(dirpath, 'nexternal.inc') 2246 self.write_nexternal_file(writers.FortranWriter(filename), 2247 nexternal, ninitial) 2248 2249 filename = pjoin(dirpath, 'pmass.inc') 2250 self.write_pmass_file(writers.FortranWriter(filename), 2251 matrix_element) 2252 2253 filename = pjoin(dirpath, 'ngraphs.inc') 2254 self.write_ngraphs_file(writers.FortranWriter(filename), 2255 len(matrix_element.get_all_amplitudes())) 2256 2257 # Generate diagrams 2258 filename = pjoin(dirpath, "matrix.ps") 2259 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2260 get('diagrams'), 2261 filename, 2262 model=matrix_element.get('processes')[0].\ 2263 get('model'), 2264 amplitude=True) 2265 logger.info("Generating Feynman diagrams for " + \ 2266 matrix_element.get('processes')[0].nice_string()) 2267 plot.draw() 2268 2269 linkfiles = ['check_sa.f', 'coupl.inc'] 2270 2271 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2272 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2273 pat = re.compile('smatrix', re.I) 2274 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2275 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2276 f.write(new_text) 2277 linkfiles.pop(0) 2278 2279 for file in linkfiles: 2280 ln('../%s' % file, cwd=dirpath) 2281 ln('../makefileP', name='makefile', cwd=dirpath) 2282 # Return to original PWD 2283 #os.chdir(cwd) 2284 2285 if not calls: 2286 calls = 0 2287 return calls
2288 2289 2290 #=========================================================================== 2291 # write_source_makefile 2292 #===========================================================================
2293 - def write_source_makefile(self, writer):
2294 """Write the nexternal.inc file for MG4""" 2295 2296 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2297 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2298 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2299 2300 replace_dict= {'libraries': set_of_lib, 2301 'model':model_line, 2302 'additional_dsample': '', 2303 'additional_dependencies':''} 2304 2305 text = open(path).read() % replace_dict 2306 2307 if writer: 2308 writer.write(text) 2309 2310 return replace_dict
2311 2312 #=========================================================================== 2313 # write_matrix_element_v4 2314 #===========================================================================
2315 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2316 write=True, proc_prefix=''):
2317 """Export a matrix element to a matrix.f file in MG4 standalone format 2318 if write is on False, just return the replace_dict and not write anything.""" 2319 2320 2321 if not matrix_element.get('processes') or \ 2322 not matrix_element.get('diagrams'): 2323 return 0 2324 2325 if writer: 2326 if not isinstance(writer, writers.FortranWriter): 2327 raise writers.FortranWriter.FortranWriterError(\ 2328 "writer not FortranWriter but %s" % type(writer)) 2329 # Set lowercase/uppercase Fortran code 2330 writers.FortranWriter.downcase = False 2331 2332 2333 if not self.opt.has_key('sa_symmetry'): 2334 self.opt['sa_symmetry']=False 2335 2336 2337 # The proc_id is for MadEvent grouping which is never used in SA. 2338 replace_dict = {'global_variable':'', 'amp2_lines':'', 2339 'proc_prefix':proc_prefix, 'proc_id':''} 2340 2341 # Extract helas calls 2342 helas_calls = fortran_model.get_matrix_element_calls(\ 2343 matrix_element) 2344 2345 replace_dict['helas_calls'] = "\n".join(helas_calls) 2346 2347 # Extract version number and date from VERSION file 2348 info_lines = self.get_mg5_info_lines() 2349 replace_dict['info_lines'] = info_lines 2350 2351 # Extract process info lines 2352 process_lines = self.get_process_info_lines(matrix_element) 2353 replace_dict['process_lines'] = process_lines 2354 2355 # Extract number of external particles 2356 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2357 replace_dict['nexternal'] = nexternal 2358 replace_dict['nincoming'] = ninitial 2359 2360 # Extract ncomb 2361 ncomb = matrix_element.get_helicity_combinations() 2362 replace_dict['ncomb'] = ncomb 2363 2364 # Extract helicity lines 2365 helicity_lines = self.get_helicity_lines(matrix_element) 2366 replace_dict['helicity_lines'] = helicity_lines 2367 2368 # Extract overall denominator 2369 # Averaging initial state color, spin, and identical FS particles 2370 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2371 2372 # Extract ngraphs 2373 ngraphs = matrix_element.get_number_of_amplitudes() 2374 replace_dict['ngraphs'] = ngraphs 2375 2376 # Extract nwavefuncs 2377 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2378 replace_dict['nwavefuncs'] = nwavefuncs 2379 2380 # Extract ncolor 2381 ncolor = max(1, len(matrix_element.get('color_basis'))) 2382 replace_dict['ncolor'] = ncolor 2383 2384 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2385 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2386 matrix_element.get_beams_hel_avg_factor() 2387 2388 # Extract color data lines 2389 color_data_lines = self.get_color_data_lines(matrix_element) 2390 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2391 2392 if self.opt['export_format']=='standalone_msP': 2393 # For MadSpin need to return the AMP2 2394 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2395 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2396 replace_dict['global_variable'] = \ 2397 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2398 2399 # JAMP definition, depends on the number of independent split orders 2400 split_orders=matrix_element.get('processes')[0].get('split_orders') 2401 2402 if len(split_orders)==0: 2403 replace_dict['nSplitOrders']='' 2404 # Extract JAMP lines 2405 jamp_lines = self.get_JAMP_lines(matrix_element) 2406 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2407 # set all amplitude order to weight 1 and only one squared order 2408 # contribution which is of course ALL_ORDERS=2. 2409 squared_orders = [(2,),] 2410 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2411 replace_dict['chosen_so_configs'] = '.TRUE.' 2412 replace_dict['nSqAmpSplitOrders']=1 2413 replace_dict['split_order_str_list']='' 2414 else: 2415 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2416 replace_dict['nAmpSplitOrders']=len(amp_orders) 2417 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2418 replace_dict['nSplitOrders']=len(split_orders) 2419 replace_dict['split_order_str_list']=str(split_orders) 2420 amp_so = self.get_split_orders_lines( 2421 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2422 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2423 replace_dict['ampsplitorders']='\n'.join(amp_so) 2424 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2425 jamp_lines = self.get_JAMP_lines_split_order(\ 2426 matrix_element,amp_orders,split_order_names=split_orders) 2427 2428 # Now setup the array specifying what squared split order is chosen 2429 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2430 matrix_element.get('processes')[0],squared_orders) 2431 2432 # For convenience we also write the driver check_sa_splitOrders.f 2433 # that explicitely writes out the contribution from each squared order. 2434 # The original driver still works and is compiled with 'make' while 2435 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2436 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2437 self.write_check_sa_splitOrders(squared_orders,split_orders, 2438 nexternal,ninitial,proc_prefix,check_sa_writer) 2439 2440 if write: 2441 writers.FortranWriter('nsqso_born.inc').writelines( 2442 """INTEGER NSQSO_BORN 2443 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2444 2445 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2446 2447 matrix_template = self.matrix_template 2448 if self.opt['export_format']=='standalone_msP' : 2449 matrix_template = 'matrix_standalone_msP_v4.inc' 2450 elif self.opt['export_format']=='standalone_msF': 2451 matrix_template = 'matrix_standalone_msF_v4.inc' 2452 elif self.opt['export_format']=='matchbox': 2453 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2454 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2455 2456 if len(split_orders)>0: 2457 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2458 logger.debug("Warning: The export format %s is not "+\ 2459 " available for individual ME evaluation of given coupl. orders."+\ 2460 " Only the total ME will be computed.", self.opt['export_format']) 2461 elif self.opt['export_format'] in ['madloop_matchbox']: 2462 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2463 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2464 else: 2465 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2466 2467 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2468 replace_dict['template_file2'] = pjoin(_file_path, \ 2469 'iolibs/template_files/split_orders_helping_functions.inc') 2470 if write and writer: 2471 path = replace_dict['template_file'] 2472 content = open(path).read() 2473 content = content % replace_dict 2474 # Write the file 2475 writer.writelines(content) 2476 # Add the helper functions. 2477 if len(split_orders)>0: 2478 content = '\n' + open(replace_dict['template_file2'])\ 2479 .read()%replace_dict 2480 writer.writelines(content) 2481 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2482 else: 2483 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2484 return replace_dict # for subclass update
2485
2486 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2487 nincoming, proc_prefix, writer):
2488 """ Write out a more advanced version of the check_sa drivers that 2489 individually returns the matrix element for each contributing squared 2490 order.""" 2491 2492 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2493 'template_files', 'check_sa_splitOrders.f')).read() 2494 printout_sq_orders=[] 2495 for i, squared_order in enumerate(squared_orders): 2496 sq_orders=[] 2497 for j, sqo in enumerate(squared_order): 2498 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2499 printout_sq_orders.append(\ 2500 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2501 %(i+1,' '.join(sq_orders),i+1)) 2502 printout_sq_orders='\n'.join(printout_sq_orders) 2503 replace_dict = {'printout_sqorders':printout_sq_orders, 2504 'nSplitOrders':len(squared_orders), 2505 'nexternal':nexternal, 2506 'nincoming':nincoming, 2507 'proc_prefix':proc_prefix} 2508 2509 if writer: 2510 writer.writelines(check_sa_content % replace_dict) 2511 else: 2512 return replace_dict
2513
2514 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2515 """class to take care of exporting a set of matrix element for the Matchbox 2516 code in the case of Born only routine""" 2517 2518 default_opt = {'clean': False, 'complex_mass':False, 2519 'export_format':'matchbox', 'mp': False, 2520 'sa_symmetry': True} 2521 2522 #specific template of the born 2523 2524 2525 matrix_template = "matrix_standalone_matchbox.inc" 2526 2527 @staticmethod
2528 - def get_color_string_lines(matrix_element):
2529 """Return the color matrix definition lines for this matrix element. Split 2530 rows in chunks of size n.""" 2531 2532 if not matrix_element.get('color_matrix'): 2533 return "\n".join(["out = 1"]) 2534 2535 #start the real work 2536 color_denominators = matrix_element.get('color_matrix').\ 2537 get_line_denominators() 2538 matrix_strings = [] 2539 my_cs = color.ColorString() 2540 for i_color in xrange(len(color_denominators)): 2541 # Then write the numerators for the matrix elements 2542 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2543 t_str=repr(my_cs) 2544 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2545 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2546 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2547 all_matches = t_match.findall(t_str) 2548 output = {} 2549 arg=[] 2550 for match in all_matches: 2551 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2552 if ctype in ['ColorOne' ]: 2553 continue 2554 if ctype not in ['T', 'Tr' ]: 2555 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2556 tmparg += ['0'] 2557 arg +=tmparg 2558 for j, v in enumerate(arg): 2559 output[(i_color,j)] = v 2560 2561 for key in output: 2562 if matrix_strings == []: 2563 #first entry 2564 matrix_strings.append(""" 2565 if (in1.eq.%s.and.in2.eq.%s)then 2566 out = %s 2567 """ % (key[0], key[1], output[key])) 2568 else: 2569 #not first entry 2570 matrix_strings.append(""" 2571 elseif (in1.eq.%s.and.in2.eq.%s)then 2572 out = %s 2573 """ % (key[0], key[1], output[key])) 2574 if len(matrix_strings): 2575 matrix_strings.append(" else \n out = - 1 \n endif") 2576 else: 2577 return "\n out = - 1 \n " 2578 return "\n".join(matrix_strings)
2579
2580 - def make(self,*args,**opts):
2581 pass
2582
2583 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2584 JAMP_formatLC=None):
2585 2586 """Adding leading color part of the colorflow""" 2587 2588 if not JAMP_formatLC: 2589 JAMP_formatLC= "LN%s" % JAMP_format 2590 2591 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2592 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2593 col_amps=col_amps.get_color_amplitudes() 2594 elif(isinstance(col_amps,list)): 2595 if(col_amps and isinstance(col_amps[0],list)): 2596 col_amps=col_amps 2597 else: 2598 raise MadGraph5Error, error_msg % 'col_amps' 2599 else: 2600 raise MadGraph5Error, error_msg % 'col_amps' 2601 2602 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2603 JAMP_format=JAMP_format, 2604 AMP_format=AMP_format, 2605 split=-1) 2606 2607 2608 # Filter the col_ampls to generate only those without any 1/NC terms 2609 2610 LC_col_amps = [] 2611 for coeff_list in col_amps: 2612 to_add = [] 2613 for (coefficient, amp_number) in coeff_list: 2614 if coefficient[3]==0: 2615 to_add.append( (coefficient, amp_number) ) 2616 LC_col_amps.append(to_add) 2617 2618 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2619 JAMP_format=JAMP_formatLC, 2620 AMP_format=AMP_format, 2621 split=-1) 2622 2623 return text
2624
2625 2626 2627 2628 #=============================================================================== 2629 # ProcessExporterFortranMW 2630 #=============================================================================== 2631 -class ProcessExporterFortranMW(ProcessExporterFortran):
2632 """Class to take care of exporting a set of matrix elements to 2633 MadGraph v4 - MadWeight format.""" 2634 2635 matrix_file="matrix_standalone_v4.inc" 2636
2637 - def copy_template(self, model):
2638 """Additional actions needed for setup of Template 2639 """ 2640 2641 super(ProcessExporterFortranMW, self).copy_template(model) 2642 2643 # Add the MW specific file 2644 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2645 pjoin(self.dir_path, 'Source','MadWeight'), True) 2646 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2647 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2648 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2649 pjoin(self.dir_path, 'Source','setrun.f')) 2650 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2651 pjoin(self.dir_path, 'Source','run.inc')) 2652 # File created from Template (Different in some child class) 2653 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2654 self.write_run_config_file(writers.FortranWriter(filename)) 2655 2656 try: 2657 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2658 stdout = os.open(os.devnull, os.O_RDWR), 2659 stderr = os.open(os.devnull, os.O_RDWR), 2660 cwd=self.dir_path) 2661 except OSError: 2662 # Probably madweight already called 2663 pass 2664 2665 # Copy the different python file in the Template 2666 self.copy_python_file() 2667 # create the appropriate cuts.f 2668 self.get_mw_cuts_version() 2669 2670 # add the makefile in Source directory 2671 filename = os.path.join(self.dir_path,'Source','makefile') 2672 self.write_source_makefile(writers.FortranWriter(filename))
2673 2674 2675 2676 2677 #=========================================================================== 2678 # convert_model 2679 #===========================================================================
2680 - def convert_model(self, model, wanted_lorentz = [], 2681 wanted_couplings = []):
2682 2683 super(ProcessExporterFortranMW,self).convert_model(model, 2684 wanted_lorentz, wanted_couplings) 2685 2686 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2687 try: 2688 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2689 except OSError as error: 2690 pass 2691 model_path = model.get('modelpath') 2692 # This is not safe if there is a '##' or '-' in the path. 2693 shutil.copytree(model_path, 2694 pjoin(self.dir_path,'bin','internal','ufomodel'), 2695 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2696 if hasattr(model, 'restrict_card'): 2697 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2698 'restrict_default.dat') 2699 if isinstance(model.restrict_card, check_param_card.ParamCard): 2700 model.restrict_card.write(out_path) 2701 else: 2702 files.cp(model.restrict_card, out_path)
2703 2704 #=========================================================================== 2705 # generate_subprocess_directory 2706 #===========================================================================
2707 - def copy_python_file(self):
2708 """copy the python file require for the Template""" 2709 2710 # madevent interface 2711 cp(_file_path+'/interface/madweight_interface.py', 2712 self.dir_path+'/bin/internal/madweight_interface.py') 2713 cp(_file_path+'/interface/extended_cmd.py', 2714 self.dir_path+'/bin/internal/extended_cmd.py') 2715 cp(_file_path+'/interface/common_run_interface.py', 2716 self.dir_path+'/bin/internal/common_run_interface.py') 2717 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2718 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2719 cp(_file_path+'/iolibs/save_load_object.py', 2720 self.dir_path+'/bin/internal/save_load_object.py') 2721 cp(_file_path+'/madevent/gen_crossxhtml.py', 2722 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2723 cp(_file_path+'/various/FO_analyse_card.py', 2724 self.dir_path+'/bin/internal/FO_analyse_card.py') 2725 cp(_file_path+'/iolibs/file_writers.py', 2726 self.dir_path+'/bin/internal/file_writers.py') 2727 #model file 2728 cp(_file_path+'../models/check_param_card.py', 2729 self.dir_path+'/bin/internal/check_param_card.py') 2730 2731 #madevent file 2732 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2733 cp(_file_path+'/various/lhe_parser.py', 2734 self.dir_path+'/bin/internal/lhe_parser.py') 2735 2736 cp(_file_path+'/various/banner.py', 2737 self.dir_path+'/bin/internal/banner.py') 2738 cp(_file_path+'/various/shower_card.py', 2739 self.dir_path+'/bin/internal/shower_card.py') 2740 cp(_file_path+'/various/cluster.py', 2741 self.dir_path+'/bin/internal/cluster.py') 2742 2743 # logging configuration 2744 cp(_file_path+'/interface/.mg5_logging.conf', 2745 self.dir_path+'/bin/internal/me5_logging.conf') 2746 cp(_file_path+'/interface/coloring_logging.py', 2747 self.dir_path+'/bin/internal/coloring_logging.py')
2748 2749 2750 #=========================================================================== 2751 # Change the version of cuts.f to the one compatible with MW 2752 #===========================================================================
2753 - def get_mw_cuts_version(self, outpath=None):
2754 """create the appropriate cuts.f 2755 This is based on the one associated to ME output but: 2756 1) No clustering (=> remove initcluster/setclscales) 2757 2) Adding the definition of cut_bw at the file. 2758 """ 2759 2760 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2761 2762 text = StringIO() 2763 #1) remove all dependencies in ickkw >1: 2764 nb_if = 0 2765 for line in template: 2766 if 'if(xqcut.gt.0d0' in line: 2767 nb_if = 1 2768 if nb_if == 0: 2769 text.write(line) 2770 continue 2771 if re.search(r'if\(.*\)\s*then', line): 2772 nb_if += 1 2773 elif 'endif' in line: 2774 nb_if -= 1 2775 2776 #2) add fake cut_bw (have to put the true one later) 2777 text.write(""" 2778 logical function cut_bw(p) 2779 include 'madweight_param.inc' 2780 double precision p(*) 2781 if (bw_cut) then 2782 cut_bw = .true. 2783 else 2784 stop 1 2785 endif 2786 return 2787 end 2788 """) 2789 2790 final = text.getvalue() 2791 #3) remove the call to initcluster: 2792 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2793 template = template.replace('genps.inc', 'maxparticles.inc') 2794 #Now we can write it 2795 if not outpath: 2796 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2797 elif isinstance(outpath, str): 2798 fsock = open(outpath, 'w') 2799 else: 2800 fsock = outpath 2801 fsock.write(template)
2802 2803 2804 2805 #=========================================================================== 2806 # Make the Helas and Model directories for Standalone directory 2807 #===========================================================================
2808 - def make(self):
2809 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2810 everything for running madweight 2811 """ 2812 2813 source_dir = os.path.join(self.dir_path, "Source") 2814 logger.info("Running make for Helas") 2815 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2816 logger.info("Running make for Model") 2817 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2818 logger.info("Running make for PDF") 2819 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2820 logger.info("Running make for CERNLIB") 2821 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2822 logger.info("Running make for GENERIC") 2823 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2824 logger.info("Running make for blocks") 2825 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2826 logger.info("Running make for tools") 2827 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2828 2829 #=========================================================================== 2830 # Create proc_card_mg5.dat for MadWeight directory 2831 #===========================================================================
2832 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2833 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2834 2835 compiler = {'fortran': mg5options['fortran_compiler'], 2836 'cpp': mg5options['cpp_compiler'], 2837 'f2py': mg5options['f2py_compiler']} 2838 2839 2840 2841 #proc_charac 2842 self.create_proc_charac() 2843 2844 # Write maxparticles.inc based on max of ME's/subprocess groups 2845 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2846 self.write_maxparticles_file(writers.FortranWriter(filename), 2847 matrix_elements) 2848 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2849 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2850 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2851 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2852 2853 self.set_compiler(compiler) 2854 self.make() 2855 2856 # Write command history as proc_card_mg5 2857 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2858 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2859 history.write(output_file) 2860 2861 ProcessExporterFortran.finalize(self, matrix_elements, 2862 history, mg5options, flaglist)
2863 2864 2865 2866 #=========================================================================== 2867 # create the run_card for MW 2868 #===========================================================================
2869 - def create_run_card(self, matrix_elements, history):
2870 """ """ 2871 2872 run_card = banner_mod.RunCard() 2873 2874 # pass to default for MW 2875 run_card["run_tag"] = "\'not_use\'" 2876 run_card["fixed_ren_scale"] = "T" 2877 run_card["fixed_fac_scale"] = "T" 2878 run_card.remove_all_cut() 2879 2880 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2881 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2882 python_template=True) 2883 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2884 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2885 python_template=True)
2886 2887 #=========================================================================== 2888 # export model files 2889 #===========================================================================
2890 - def export_model_files(self, model_path):
2891 """export the model dependent files for V4 model""" 2892 2893 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2894 # Add the routine update_as_param in v4 model 2895 # This is a function created in the UFO 2896 text=""" 2897 subroutine update_as_param() 2898 call setpara('param_card.dat',.false.) 2899 return 2900 end 2901 """ 2902 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2903 ff.write(text) 2904 ff.close() 2905 2906 # Modify setrun.f 2907 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2908 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2909 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2910 fsock.write(text) 2911 fsock.close() 2912 2913 # Modify initialization.f 2914 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2915 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2916 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2917 fsock.write(text) 2918 fsock.close() 2919 2920 2921 self.make_model_symbolic_link()
2922 2923 #=========================================================================== 2924 # generate_subprocess_directory 2925 #===========================================================================
2926 - def generate_subprocess_directory(self, matrix_element, 2927 fortran_model,number):
2928 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2929 including the necessary matrix.f and nexternal.inc files""" 2930 2931 cwd = os.getcwd() 2932 misc.sprint(type(matrix_element)) 2933 # Create the directory PN_xx_xxxxx in the specified path 2934 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2935 "P%s" % matrix_element.get('processes')[0].shell_string()) 2936 2937 try: 2938 os.mkdir(dirpath) 2939 except os.error as error: 2940 logger.warning(error.strerror + " " + dirpath) 2941 2942 #try: 2943 # os.chdir(dirpath) 2944 #except os.error: 2945 # logger.error('Could not cd to directory %s' % dirpath) 2946 # return 0 2947 2948 logger.info('Creating files in directory %s' % dirpath) 2949 2950 # Extract number of external particles 2951 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2952 2953 # Create the matrix.f file and the nexternal.inc file 2954 filename = pjoin(dirpath,'matrix.f') 2955 calls,ncolor = self.write_matrix_element_v4( 2956 writers.FortranWriter(filename), 2957 matrix_element, 2958 fortran_model) 2959 2960 filename = pjoin(dirpath, 'auto_dsig.f') 2961 self.write_auto_dsig_file(writers.FortranWriter(filename), 2962 matrix_element) 2963 2964 filename = pjoin(dirpath, 'configs.inc') 2965 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2966 writers.FortranWriter(filename), 2967 matrix_element) 2968 2969 filename = pjoin(dirpath, 'nexternal.inc') 2970 self.write_nexternal_file(writers.FortranWriter(filename), 2971 nexternal, ninitial) 2972 2973 filename = pjoin(dirpath, 'leshouche.inc') 2974 self.write_leshouche_file(writers.FortranWriter(filename), 2975 matrix_element) 2976 2977 filename = pjoin(dirpath, 'props.inc') 2978 self.write_props_file(writers.FortranWriter(filename), 2979 matrix_element, 2980 s_and_t_channels) 2981 2982 filename = pjoin(dirpath, 'pmass.inc') 2983 self.write_pmass_file(writers.FortranWriter(filename), 2984 matrix_element) 2985 2986 filename = pjoin(dirpath, 'ngraphs.inc') 2987 self.write_ngraphs_file(writers.FortranWriter(filename), 2988 len(matrix_element.get_all_amplitudes())) 2989 2990 filename = pjoin(dirpath, 'maxamps.inc') 2991 self.write_maxamps_file(writers.FortranWriter(filename), 2992 len(matrix_element.get('diagrams')), 2993 ncolor, 2994 len(matrix_element.get('processes')), 2995 1) 2996 2997 filename = pjoin(dirpath, 'phasespace.inc') 2998 self.write_phasespace_file(writers.FortranWriter(filename), 2999 len(matrix_element.get('diagrams')), 3000 ) 3001 3002 # Generate diagrams 3003 filename = pjoin(dirpath, "matrix.ps") 3004 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3005 get('diagrams'), 3006 filename, 3007 model=matrix_element.get('processes')[0].\ 3008 get('model'), 3009 amplitude='') 3010 logger.info("Generating Feynman diagrams for " + \ 3011 matrix_element.get('processes')[0].nice_string()) 3012 plot.draw() 3013 3014 #import genps.inc and maxconfigs.inc into Subprocesses 3015 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3016 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3017 3018 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3019 3020 for file in linkfiles: 3021 ln('../%s' % file, starting_dir=cwd) 3022 3023 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3024 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3025 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3026 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3027 # Return to original PWD 3028 #os.chdir(cwd) 3029 3030 if not calls: 3031 calls = 0 3032 return calls
3033 3034 #=========================================================================== 3035 # write_matrix_element_v4 3036 #===========================================================================
3037 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3038 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3039 3040 if not matrix_element.get('processes') or \ 3041 not matrix_element.get('diagrams'): 3042 return 0 3043 3044 if writer: 3045 if not isinstance(writer, writers.FortranWriter): 3046 raise writers.FortranWriter.FortranWriterError(\ 3047 "writer not FortranWriter") 3048 3049 # Set lowercase/uppercase Fortran code 3050 writers.FortranWriter.downcase = False 3051 3052 replace_dict = {} 3053 3054 # Extract version number and date from VERSION file 3055 info_lines = self.get_mg5_info_lines() 3056 replace_dict['info_lines'] = info_lines 3057 3058 # Extract process info lines 3059 process_lines = self.get_process_info_lines(matrix_element) 3060 replace_dict['process_lines'] = process_lines 3061 3062 # Set proc_id 3063 replace_dict['proc_id'] = proc_id 3064 3065 # Extract number of external particles 3066 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3067 replace_dict['nexternal'] = nexternal 3068 3069 # Extract ncomb 3070 ncomb = matrix_element.get_helicity_combinations() 3071 replace_dict['ncomb'] = ncomb 3072 3073 # Extract helicity lines 3074 helicity_lines = self.get_helicity_lines(matrix_element) 3075 replace_dict['helicity_lines'] = helicity_lines 3076 3077 # Extract overall denominator 3078 # Averaging initial state color, spin, and identical FS particles 3079 den_factor_line = self.get_den_factor_line(matrix_element) 3080 replace_dict['den_factor_line'] = den_factor_line 3081 3082 # Extract ngraphs 3083 ngraphs = matrix_element.get_number_of_amplitudes() 3084 replace_dict['ngraphs'] = ngraphs 3085 3086 # Extract nwavefuncs 3087 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3088 replace_dict['nwavefuncs'] = nwavefuncs 3089 3090 # Extract ncolor 3091 ncolor = max(1, len(matrix_element.get('color_basis'))) 3092 replace_dict['ncolor'] = ncolor 3093 3094 # Extract color data lines 3095 color_data_lines = self.get_color_data_lines(matrix_element) 3096 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3097 3098 # Extract helas calls 3099 helas_calls = fortran_model.get_matrix_element_calls(\ 3100 matrix_element) 3101 3102 replace_dict['helas_calls'] = "\n".join(helas_calls) 3103 3104 # Extract JAMP lines 3105 jamp_lines = self.get_JAMP_lines(matrix_element) 3106 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3107 3108 replace_dict['template_file'] = os.path.join(_file_path, \ 3109 'iolibs/template_files/%s' % self.matrix_file) 3110 replace_dict['template_file2'] = '' 3111 3112 if writer: 3113 file = open(replace_dict['template_file']).read() 3114 file = file % replace_dict 3115 # Write the file 3116 writer.writelines(file) 3117 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3118 else: 3119 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3120 3121 #=========================================================================== 3122 # write_source_makefile 3123 #===========================================================================
3124 - def write_source_makefile(self, writer):
3125 """Write the nexternal.inc file for madweight""" 3126 3127 3128 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3129 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3130 text = open(path).read() % {'libraries': set_of_lib} 3131 writer.write(text) 3132 3133 return True
3134
3135 - def write_phasespace_file(self, writer, nb_diag):
3136 """ """ 3137 3138 template = """ include 'maxparticles.inc' 3139 integer max_branches 3140 parameter (max_branches=max_particles-1) 3141 integer max_configs 3142 parameter (max_configs=%(nb_diag)s) 3143 3144 c channel position 3145 integer config_pos,perm_pos 3146 common /to_config/config_pos,perm_pos 3147 3148 """ 3149 3150 writer.write(template % {'nb_diag': nb_diag})
3151 3152 3153 #=========================================================================== 3154 # write_auto_dsig_file 3155 #===========================================================================
3156 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3157 """Write the auto_dsig.f file for the differential cross section 3158 calculation, includes pdf call information (MadWeight format)""" 3159 3160 if not matrix_element.get('processes') or \ 3161 not matrix_element.get('diagrams'): 3162 return 0 3163 3164 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3165 3166 if ninitial < 1 or ninitial > 2: 3167 raise writers.FortranWriter.FortranWriterError, \ 3168 """Need ninitial = 1 or 2 to write auto_dsig file""" 3169 3170 replace_dict = {} 3171 3172 # Extract version number and date from VERSION file 3173 info_lines = self.get_mg5_info_lines() 3174 replace_dict['info_lines'] = info_lines 3175 3176 # Extract process info lines 3177 process_lines = self.get_process_info_lines(matrix_element) 3178 replace_dict['process_lines'] = process_lines 3179 3180 # Set proc_id 3181 replace_dict['proc_id'] = proc_id 3182 replace_dict['numproc'] = 1 3183 3184 # Set dsig_line 3185 if ninitial == 1: 3186 # No conversion, since result of decay should be given in GeV 3187 dsig_line = "pd(0)*dsiguu" 3188 else: 3189 # Convert result (in GeV) to pb 3190 dsig_line = "pd(0)*conv*dsiguu" 3191 3192 replace_dict['dsig_line'] = dsig_line 3193 3194 # Extract pdf lines 3195 pdf_vars, pdf_data, pdf_lines = \ 3196 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3197 replace_dict['pdf_vars'] = pdf_vars 3198 replace_dict['pdf_data'] = pdf_data 3199 replace_dict['pdf_lines'] = pdf_lines 3200 3201 # Lines that differ between subprocess group and regular 3202 if proc_id: 3203 replace_dict['numproc'] = int(proc_id) 3204 replace_dict['passcuts_begin'] = "" 3205 replace_dict['passcuts_end'] = "" 3206 # Set lines for subprocess group version 3207 # Set define_iconfigs_lines 3208 replace_dict['define_subdiag_lines'] = \ 3209 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3210 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3211 else: 3212 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3213 replace_dict['passcuts_end'] = "ENDIF" 3214 replace_dict['define_subdiag_lines'] = "" 3215 3216 if writer: 3217 file = open(os.path.join(_file_path, \ 3218 'iolibs/template_files/auto_dsig_mw.inc')).read() 3219 3220 file = file % replace_dict 3221 # Write the file 3222 writer.writelines(file) 3223 else: 3224 return replace_dict
3225 #=========================================================================== 3226 # write_configs_file 3227 #===========================================================================
3228 - def write_configs_file(self, writer, matrix_element):
3229 """Write the configs.inc file for MadEvent""" 3230 3231 # Extract number of external particles 3232 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3233 3234 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3235 mapconfigs = [c[0] for c in configs] 3236 model = matrix_element.get('processes')[0].get('model') 3237 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3238 [[c[1]] for c in configs], 3239 mapconfigs, 3240 nexternal, ninitial,matrix_element, model)
3241 3242 #=========================================================================== 3243 # write_run_configs_file 3244 #===========================================================================
3245 - def write_run_config_file(self, writer):
3246 """Write the run_configs.inc file for MadWeight""" 3247 3248 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3249 text = open(path).read() % {'chanperjob':'5'} 3250 writer.write(text) 3251 return True
3252 3253 #=========================================================================== 3254 # write_configs_file_from_diagrams 3255 #===========================================================================
3256 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3257 nexternal, ninitial, matrix_element, model):
3258 """Write the actual configs.inc file. 3259 3260 configs is the diagrams corresponding to configs (each 3261 diagrams is a list of corresponding diagrams for all 3262 subprocesses, with None if there is no corresponding diagrams 3263 for a given process). 3264 mapconfigs gives the diagram number for each config. 3265 3266 For s-channels, we need to output one PDG for each subprocess in 3267 the subprocess group, in order to be able to pick the right 3268 one for multiprocesses.""" 3269 3270 lines = [] 3271 3272 particle_dict = matrix_element.get('processes')[0].get('model').\ 3273 get('particle_dict') 3274 3275 s_and_t_channels = [] 3276 3277 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3278 for config in configs if [d for d in config if d][0].\ 3279 get_vertex_leg_numbers()!=[]] 3280 3281 minvert = min(vert_list) if vert_list!=[] else 0 3282 # Number of subprocesses 3283 nsubprocs = len(configs[0]) 3284 3285 nconfigs = 0 3286 3287 new_pdg = model.get_first_non_pdg() 3288 3289 for iconfig, helas_diags in enumerate(configs): 3290 if any([vert > minvert for vert in 3291 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3292 # Only 3-vertices allowed in configs.inc 3293 continue 3294 nconfigs += 1 3295 3296 # Need s- and t-channels for all subprocesses, including 3297 # those that don't contribute to this config 3298 empty_verts = [] 3299 stchannels = [] 3300 for h in helas_diags: 3301 if h: 3302 # get_s_and_t_channels gives vertices starting from 3303 # final state external particles and working inwards 3304 stchannels.append(h.get('amplitudes')[0].\ 3305 get_s_and_t_channels(ninitial,model,new_pdg)) 3306 else: 3307 stchannels.append((empty_verts, None)) 3308 3309 # For t-channels, just need the first non-empty one 3310 tchannels = [t for s,t in stchannels if t != None][0] 3311 3312 # For s_and_t_channels (to be used later) use only first config 3313 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3314 tchannels]) 3315 3316 # Make sure empty_verts is same length as real vertices 3317 if any([s for s,t in stchannels]): 3318 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3319 3320 # Reorganize s-channel vertices to get a list of all 3321 # subprocesses for each vertex 3322 schannels = zip(*[s for s,t in stchannels]) 3323 else: 3324 schannels = [] 3325 3326 allchannels = schannels 3327 if len(tchannels) > 1: 3328 # Write out tchannels only if there are any non-trivial ones 3329 allchannels = schannels + tchannels 3330 3331 # Write out propagators for s-channel and t-channel vertices 3332 3333 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3334 # Correspondance between the config and the diagram = amp2 3335 lines.append("* %d %d " % (nconfigs, 3336 mapconfigs[iconfig])) 3337 3338 for verts in allchannels: 3339 if verts in schannels: 3340 vert = [v for v in verts if v][0] 3341 else: 3342 vert = verts 3343 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3344 last_leg = vert.get('legs')[-1] 3345 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3346 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3347 # (last_leg.get('number'), nconfigs, len(daughters), 3348 # ",".join([str(d) for d in daughters]))) 3349 3350 if last_leg.get('id') == 21 and 21 not in particle_dict: 3351 # Fake propagator used in multiparticle vertices 3352 mass = 'zero' 3353 width = 'zero' 3354 pow_part = 0 3355 else: 3356 if (last_leg.get('id')!=7): 3357 particle = particle_dict[last_leg.get('id')] 3358 # Get mass 3359 mass = particle.get('mass') 3360 # Get width 3361 width = particle.get('width') 3362 else : # fake propagator used in multiparticle vertices 3363 mass= 'zero' 3364 width= 'zero' 3365 3366 line=line+" "+mass+" "+width+" " 3367 3368 if verts in schannels: 3369 pdgs = [] 3370 for v in verts: 3371 if v: 3372 pdgs.append(v.get('legs')[-1].get('id')) 3373 else: 3374 pdgs.append(0) 3375 lines.append(line+" S "+str(last_leg.get('id'))) 3376 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3377 # (last_leg.get('number'), nconfigs, nsubprocs, 3378 # ",".join([str(d) for d in pdgs]))) 3379 # lines.append("data tprid(%d,%d)/0/" % \ 3380 # (last_leg.get('number'), nconfigs)) 3381 elif verts in tchannels[:-1]: 3382 lines.append(line+" T "+str(last_leg.get('id'))) 3383 # lines.append("data tprid(%d,%d)/%d/" % \ 3384 # (last_leg.get('number'), nconfigs, 3385 # abs(last_leg.get('id')))) 3386 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3387 # (last_leg.get('number'), nconfigs, nsubprocs, 3388 # ",".join(['0'] * nsubprocs))) 3389 3390 # Write out number of configs 3391 # lines.append("# Number of configs") 3392 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3393 lines.append(" * ") # a line with just a star indicates this is the end of file 3394 # Write the file 3395 writer.writelines(lines) 3396 3397 return s_and_t_channels
3398
3399 3400 #=============================================================================== 3401 # ProcessExporterFortranME 3402 #=============================================================================== 3403 -class ProcessExporterFortranME(ProcessExporterFortran):
3404 """Class to take care of exporting a set of matrix elements to 3405 MadEvent format.""" 3406 3407 matrix_file = "matrix_madevent_v4.inc" 3408
3409 - def copy_template(self, model):
3410 """Additional actions needed for setup of Template 3411 """ 3412 3413 super(ProcessExporterFortranME, self).copy_template(model) 3414 3415 # File created from Template (Different in some child class) 3416 filename = pjoin(self.dir_path,'Source','run_config.inc') 3417 self.write_run_config_file(writers.FortranWriter(filename)) 3418 3419 # The next file are model dependant (due to SLAH convention) 3420 self.model_name = model.get('name') 3421 # Add the symmetry.f 3422 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3423 self.write_symmetry(writers.FortranWriter(filename)) 3424 # 3425 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3426 self.write_addmothers(writers.FortranWriter(filename)) 3427 # Copy the different python file in the Template 3428 self.copy_python_file()
3429 3430 3431 3432 3433 3434 #=========================================================================== 3435 # generate_subprocess_directory 3436 #===========================================================================
3437 - def copy_python_file(self):
3438 """copy the python file require for the Template""" 3439 3440 # madevent interface 3441 cp(_file_path+'/interface/madevent_interface.py', 3442 self.dir_path+'/bin/internal/madevent_interface.py') 3443 cp(_file_path+'/interface/extended_cmd.py', 3444 self.dir_path+'/bin/internal/extended_cmd.py') 3445 cp(_file_path+'/interface/common_run_interface.py', 3446 self.dir_path+'/bin/internal/common_run_interface.py') 3447 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3448 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3449 cp(_file_path+'/iolibs/save_load_object.py', 3450 self.dir_path+'/bin/internal/save_load_object.py') 3451 cp(_file_path+'/iolibs/file_writers.py', 3452 self.dir_path+'/bin/internal/file_writers.py') 3453 #model file 3454 cp(_file_path+'../models/check_param_card.py', 3455 self.dir_path+'/bin/internal/check_param_card.py') 3456 3457 #copy all the file present in madevent directory 3458 for name in os.listdir(pjoin(_file_path, 'madevent')): 3459 if name not in ['__init__.py'] and name.endswith('.py'): 3460 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3461 3462 #madevent file 3463 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3464 cp(_file_path+'/various/lhe_parser.py', 3465 self.dir_path+'/bin/internal/lhe_parser.py') 3466 cp(_file_path+'/various/banner.py', 3467 self.dir_path+'/bin/internal/banner.py') 3468 cp(_file_path+'/various/histograms.py', 3469 self.dir_path+'/bin/internal/histograms.py') 3470 cp(_file_path+'/various/plot_djrs.py', 3471 self.dir_path+'/bin/internal/plot_djrs.py') 3472 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3473 3474 cp(_file_path+'/various/cluster.py', 3475 self.dir_path+'/bin/internal/cluster.py') 3476 cp(_file_path+'/madevent/combine_runs.py', 3477 self.dir_path+'/bin/internal/combine_runs.py') 3478 # logging configuration 3479 cp(_file_path+'/interface/.mg5_logging.conf', 3480 self.dir_path+'/bin/internal/me5_logging.conf') 3481 cp(_file_path+'/interface/coloring_logging.py', 3482 self.dir_path+'/bin/internal/coloring_logging.py') 3483 # shower card and FO_analyse_card. 3484 # Although not needed, it is imported by banner.py 3485 cp(_file_path+'/various/shower_card.py', 3486 self.dir_path+'/bin/internal/shower_card.py') 3487 cp(_file_path+'/various/FO_analyse_card.py', 3488 self.dir_path+'/bin/internal/FO_analyse_card.py')
3489 3490
3491 - def convert_model(self, model, wanted_lorentz = [], 3492 wanted_couplings = []):
3493 3494 super(ProcessExporterFortranME,self).convert_model(model, 3495 wanted_lorentz, wanted_couplings) 3496 3497 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3498 try: 3499 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3500 except OSError as error: 3501 pass 3502 model_path = model.get('modelpath') 3503 # This is not safe if there is a '##' or '-' in the path. 3504 shutil.copytree(model_path, 3505 pjoin(self.dir_path,'bin','internal','ufomodel'), 3506 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3507 if hasattr(model, 'restrict_card'): 3508 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3509 'restrict_default.dat') 3510 if isinstance(model.restrict_card, check_param_card.ParamCard): 3511 model.restrict_card.write(out_path) 3512 else: 3513 files.cp(model.restrict_card, out_path)
3514 3515 #=========================================================================== 3516 # export model files 3517 #===========================================================================
3518 - def export_model_files(self, model_path):
3519 """export the model dependent files""" 3520 3521 super(ProcessExporterFortranME,self).export_model_files(model_path) 3522 3523 # Add the routine update_as_param in v4 model 3524 # This is a function created in the UFO 3525 text=""" 3526 subroutine update_as_param() 3527 call setpara('param_card.dat',.false.) 3528 return 3529 end 3530 """ 3531 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3532 ff.write(text) 3533 ff.close() 3534 3535 # Add the symmetry.f 3536 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3537 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3538 3539 # Modify setrun.f 3540 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3541 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3542 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3543 fsock.write(text) 3544 fsock.close() 3545 3546 self.make_model_symbolic_link()
3547 3548 #=========================================================================== 3549 # generate_subprocess_directory 3550 #===========================================================================
3551 - def generate_subprocess_directory(self, matrix_element, 3552 fortran_model, 3553 me_number):
3554 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3555 including the necessary matrix.f and various helper files""" 3556 3557 cwd = os.getcwd() 3558 path = pjoin(self.dir_path, 'SubProcesses') 3559 3560 3561 if not self.model: 3562 self.model = matrix_element.get('processes')[0].get('model') 3563 3564 3565 3566 #os.chdir(path) 3567 # Create the directory PN_xx_xxxxx in the specified path 3568 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3569 try: 3570 os.mkdir(pjoin(path,subprocdir)) 3571 except os.error as error: 3572 logger.warning(error.strerror + " " + subprocdir) 3573 3574 #try: 3575 # os.chdir(subprocdir) 3576 #except os.error: 3577 # logger.error('Could not cd to directory %s' % subprocdir) 3578 # return 0 3579 3580 logger.info('Creating files in directory %s' % subprocdir) 3581 Ppath = pjoin(path, subprocdir) 3582 3583 # Extract number of external particles 3584 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3585 3586 # Add the driver.f 3587 ncomb = matrix_element.get_helicity_combinations() 3588 filename = pjoin(Ppath,'driver.f') 3589 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3590 v5=self.opt['v5_model']) 3591 3592 # Create the matrix.f file, auto_dsig.f file and all inc files 3593 filename = pjoin(Ppath, 'matrix.f') 3594 calls, ncolor = \ 3595 self.write_matrix_element_v4(writers.FortranWriter(filename), 3596 matrix_element, fortran_model, subproc_number = me_number) 3597 3598 filename = pjoin(Ppath, 'auto_dsig.f') 3599 self.write_auto_dsig_file(writers.FortranWriter(filename), 3600 matrix_element) 3601 3602 filename = pjoin(Ppath, 'configs.inc') 3603 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3604 writers.FortranWriter(filename), 3605 matrix_element) 3606 3607 filename = pjoin(Ppath, 'config_nqcd.inc') 3608 self.write_config_nqcd_file(writers.FortranWriter(filename), 3609 nqcd_list) 3610 3611 filename = pjoin(Ppath, 'config_subproc_map.inc') 3612 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3613 s_and_t_channels) 3614 3615 filename = pjoin(Ppath, 'coloramps.inc') 3616 self.write_coloramps_file(writers.FortranWriter(filename), 3617 mapconfigs, 3618 matrix_element) 3619 3620 filename = pjoin(Ppath, 'get_color.f') 3621 self.write_colors_file(writers.FortranWriter(filename), 3622 matrix_element) 3623 3624 filename = pjoin(Ppath, 'decayBW.inc') 3625 self.write_decayBW_file(writers.FortranWriter(filename), 3626 s_and_t_channels) 3627 3628 filename = pjoin(Ppath, 'dname.mg') 3629 self.write_dname_file(writers.FileWriter(filename), 3630 "P"+matrix_element.get('processes')[0].shell_string()) 3631 3632 filename = pjoin(Ppath, 'iproc.dat') 3633 self.write_iproc_file(writers.FortranWriter(filename), 3634 me_number) 3635 3636 filename = pjoin(Ppath, 'leshouche.inc') 3637 self.write_leshouche_file(writers.FortranWriter(filename), 3638 matrix_element) 3639 3640 filename = pjoin(Ppath, 'maxamps.inc') 3641 self.write_maxamps_file(writers.FortranWriter(filename), 3642 len(matrix_element.get('diagrams')), 3643 ncolor, 3644 len(matrix_element.get('processes')), 3645 1) 3646 3647 filename = pjoin(Ppath, 'mg.sym') 3648 self.write_mg_sym_file(writers.FortranWriter(filename), 3649 matrix_element) 3650 3651 filename = pjoin(Ppath, 'ncombs.inc') 3652 self.write_ncombs_file(writers.FortranWriter(filename), 3653 nexternal) 3654 3655 filename = pjoin(Ppath, 'nexternal.inc') 3656 self.write_nexternal_file(writers.FortranWriter(filename), 3657 nexternal, ninitial) 3658 3659 filename = pjoin(Ppath, 'ngraphs.inc') 3660 self.write_ngraphs_file(writers.FortranWriter(filename), 3661 len(mapconfigs)) 3662 3663 3664 filename = pjoin(Ppath, 'pmass.inc') 3665 self.write_pmass_file(writers.FortranWriter(filename), 3666 matrix_element) 3667 3668 filename = pjoin(Ppath, 'props.inc') 3669 self.write_props_file(writers.FortranWriter(filename), 3670 matrix_element, 3671 s_and_t_channels) 3672 3673 # Find config symmetries and permutations 3674 symmetry, perms, ident_perms = \ 3675 diagram_symmetry.find_symmetry(matrix_element) 3676 3677 filename = pjoin(Ppath, 'symswap.inc') 3678 self.write_symswap_file(writers.FortranWriter(filename), 3679 ident_perms) 3680 3681 filename = pjoin(Ppath, 'symfact_orig.dat') 3682 self.write_symfact_file(open(filename, 'w'), symmetry) 3683 3684 # Generate diagrams 3685 filename = pjoin(Ppath, "matrix.ps") 3686 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3687 get('diagrams'), 3688 filename, 3689 model=matrix_element.get('processes')[0].\ 3690 get('model'), 3691 amplitude=True) 3692 logger.info("Generating Feynman diagrams for " + \ 3693 matrix_element.get('processes')[0].nice_string()) 3694 plot.draw() 3695 3696 self.link_files_in_SubProcess(Ppath) 3697 3698 #import nexternal/leshouche in Source 3699 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3700 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3701 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3702 # Return to SubProcesses dir 3703 #os.chdir(os.path.pardir) 3704 3705 # Add subprocess to subproc.mg 3706 filename = pjoin(path, 'subproc.mg') 3707 files.append_to_file(filename, 3708 self.write_subproc, 3709 subprocdir) 3710 3711 # Return to original dir 3712 #os.chdir(cwd) 3713 3714 # Generate info page 3715 gen_infohtml.make_info_html(self.dir_path) 3716 3717 3718 if not calls: 3719 calls = 0 3720 return calls
3721 3722 link_Sub_files = ['addmothers.f', 3723 'cluster.f', 3724 'cluster.inc', 3725 'coupl.inc', 3726 'cuts.f', 3727 'cuts.inc', 3728 'genps.f', 3729 'genps.inc', 3730 'idenparts.f', 3731 'initcluster.f', 3732 'makefile', 3733 'message.inc', 3734 'myamp.f', 3735 'reweight.f', 3736 'run.inc', 3737 'maxconfigs.inc', 3738 'maxparticles.inc', 3739 'run_config.inc', 3740 'lhe_event_infos.inc', 3741 'setcuts.f', 3742 'setscales.f', 3743 'sudakov.inc', 3744 'symmetry.f', 3745 'unwgt.f', 3746 'dummy_fct.f' 3747 ] 3748 3762 3763
3764 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3765 """Finalize ME v4 directory by creating jpeg diagrams, html 3766 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3767 3768 if 'nojpeg' in flaglist: 3769 makejpg = False 3770 else: 3771 makejpg = True 3772 if 'online' in flaglist: 3773 online = True 3774 else: 3775 online = False 3776 3777 compiler = {'fortran': mg5options['fortran_compiler'], 3778 'cpp': mg5options['cpp_compiler'], 3779 'f2py': mg5options['f2py_compiler']} 3780 3781 # indicate that the output type is not grouped 3782 if not isinstance(self, ProcessExporterFortranMEGroup): 3783 self.proc_characteristic['grouped_matrix'] = False 3784 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3785 3786 modelname = self.opt['model'] 3787 if modelname == 'mssm' or modelname.startswith('mssm-'): 3788 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3789 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3790 check_param_card.convert_to_mg5card(param_card, mg5_param) 3791 check_param_card.check_valid_param_card(mg5_param) 3792 3793 # Add the combine_events.f modify param_card path/number of @X 3794 filename = pjoin(self.dir_path,'Source','combine_events.f') 3795 try: 3796 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3797 except AttributeError: 3798 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3799 nb_proc = len(set(nb_proc)) 3800 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3801 # Write maxconfigs.inc based on max of ME's/subprocess groups 3802 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3803 self.write_maxconfigs_file(writers.FortranWriter(filename), 3804 matrix_elements) 3805 3806 # Write maxparticles.inc based on max of ME's/subprocess groups 3807 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3808 self.write_maxparticles_file(writers.FortranWriter(filename), 3809 matrix_elements) 3810 3811 # Touch "done" file 3812 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3813 3814 # Check for compiler 3815 self.set_compiler(compiler) 3816 self.set_cpp_compiler(compiler['cpp']) 3817 3818 3819 old_pos = os.getcwd() 3820 subpath = pjoin(self.dir_path, 'SubProcesses') 3821 3822 P_dir_list = [proc for proc in os.listdir(subpath) 3823 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3824 3825 devnull = os.open(os.devnull, os.O_RDWR) 3826 # Convert the poscript in jpg files (if authorize) 3827 if makejpg: 3828 try: 3829 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3830 except Exception, error: 3831 pass 3832 3833 if misc.which('gs'): 3834 logger.info("Generate jpeg diagrams") 3835 for Pdir in P_dir_list: 3836 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3837 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3838 3839 logger.info("Generate web pages") 3840 # Create the WebPage using perl script 3841 3842 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3843 stdout = devnull,cwd=pjoin(self.dir_path)) 3844 3845 #os.chdir(os.path.pardir) 3846 3847 obj = gen_infohtml.make_info_html(self.dir_path) 3848 3849 if online: 3850 nb_channel = obj.rep_rule['nb_gen_diag'] 3851 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3852 #add the information to proc_charac 3853 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3854 3855 # Write command history as proc_card_mg5 3856 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3857 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3858 history.write(output_file) 3859 3860 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3861 stdout = devnull) 3862 3863 #crate the proc_characteristic file 3864 self.create_proc_charac(matrix_elements, history) 3865 3866 # create the run_card 3867 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3868 3869 # Run "make" to generate madevent.tar.gz file 3870 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3871 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3872 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3873 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3874 stdout = devnull, cwd=self.dir_path) 3875 3876 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3877 stdout = devnull, cwd=self.dir_path)
3878 3879 3880 3881 3882 3883 3884 #return to the initial dir 3885 #os.chdir(old_pos) 3886 3887 #=========================================================================== 3888 # write_matrix_element_v4 3889 #===========================================================================
3890 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3891 proc_id = "", config_map = [], subproc_number = ""):
3892 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3893 3894 if not matrix_element.get('processes') or \ 3895 not matrix_element.get('diagrams'): 3896 return 0 3897 3898 if writer: 3899 if not isinstance(writer, writers.FortranWriter): 3900 raise writers.FortranWriter.FortranWriterError(\ 3901 "writer not FortranWriter") 3902 # Set lowercase/uppercase Fortran code 3903 writers.FortranWriter.downcase = False 3904 3905 # The proc prefix is not used for MadEvent output so it can safely be set 3906 # to an empty string. 3907 replace_dict = {'proc_prefix':''} 3908 3909 # Extract helas calls 3910 helas_calls = fortran_model.get_matrix_element_calls(\ 3911 matrix_element) 3912 3913 replace_dict['helas_calls'] = "\n".join(helas_calls) 3914 3915 3916 # Extract version number and date from VERSION file 3917 info_lines = self.get_mg5_info_lines() 3918 replace_dict['info_lines'] = info_lines 3919 3920 # Extract process info lines 3921 process_lines = self.get_process_info_lines(matrix_element) 3922 replace_dict['process_lines'] = process_lines 3923 3924 # Set proc_id 3925 replace_dict['proc_id'] = proc_id 3926 3927 # Extract ncomb 3928 ncomb = matrix_element.get_helicity_combinations() 3929 replace_dict['ncomb'] = ncomb 3930 3931 # Extract helicity lines 3932 helicity_lines = self.get_helicity_lines(matrix_element) 3933 replace_dict['helicity_lines'] = helicity_lines 3934 3935 # Extract IC line 3936 ic_line = self.get_ic_line(matrix_element) 3937 replace_dict['ic_line'] = ic_line 3938 3939 # Extract overall denominator 3940 # Averaging initial state color, spin, and identical FS particles 3941 den_factor_line = self.get_den_factor_line(matrix_element) 3942 replace_dict['den_factor_line'] = den_factor_line 3943 3944 # Extract ngraphs 3945 ngraphs = matrix_element.get_number_of_amplitudes() 3946 replace_dict['ngraphs'] = ngraphs 3947 3948 # Extract ndiags 3949 ndiags = len(matrix_element.get('diagrams')) 3950 replace_dict['ndiags'] = ndiags 3951 3952 # Set define_iconfigs_lines 3953 replace_dict['define_iconfigs_lines'] = \ 3954 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3955 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3956 3957 if proc_id: 3958 # Set lines for subprocess group version 3959 # Set define_iconfigs_lines 3960 replace_dict['define_iconfigs_lines'] += \ 3961 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3962 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3963 # Set set_amp2_line 3964 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3965 proc_id 3966 else: 3967 # Standard running 3968 # Set set_amp2_line 3969 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3970 3971 # Extract nwavefuncs 3972 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3973 replace_dict['nwavefuncs'] = nwavefuncs 3974 3975 # Extract ncolor 3976 ncolor = max(1, len(matrix_element.get('color_basis'))) 3977 replace_dict['ncolor'] = ncolor 3978 3979 # Extract color data lines 3980 color_data_lines = self.get_color_data_lines(matrix_element) 3981 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3982 3983 3984 # Set the size of Wavefunction 3985 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3986 replace_dict['wavefunctionsize'] = 18 3987 else: 3988 replace_dict['wavefunctionsize'] = 6 3989 3990 # Extract amp2 lines 3991 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3992 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3993 3994 # The JAMP definition depends on the splitting order 3995 split_orders=matrix_element.get('processes')[0].get('split_orders') 3996 if len(split_orders)>0: 3997 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3998 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3999 matrix_element.get('processes')[0],squared_orders) 4000 else: 4001 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4002 # set all amplitude order to weight 1 and only one squared order 4003 # contribution which is of course ALL_ORDERS=2. 4004 squared_orders = [(2,),] 4005 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4006 replace_dict['chosen_so_configs'] = '.TRUE.' 4007 4008 replace_dict['nAmpSplitOrders']=len(amp_orders) 4009 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4010 replace_dict['split_order_str_list']=str(split_orders) 4011 replace_dict['nSplitOrders']=max(len(split_orders),1) 4012 amp_so = self.get_split_orders_lines( 4013 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4014 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4015 replace_dict['ampsplitorders']='\n'.join(amp_so) 4016 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4017 4018 4019 # Extract JAMP lines 4020 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4021 jamp_lines = self.get_JAMP_lines_split_order(\ 4022 matrix_element,amp_orders,split_order_names= 4023 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4024 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4025 4026 replace_dict['template_file'] = pjoin(_file_path, \ 4027 'iolibs/template_files/%s' % self.matrix_file) 4028 replace_dict['template_file2'] = pjoin(_file_path, \ 4029 'iolibs/template_files/split_orders_helping_functions.inc') 4030 if writer: 4031 file = open(replace_dict['template_file']).read() 4032 file = file % replace_dict 4033 # Add the split orders helper functions. 4034 file = file + '\n' + open(replace_dict['template_file2'])\ 4035 .read()%replace_dict 4036 # Write the file 4037 writer.writelines(file) 4038 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4039 else: 4040 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4041 return replace_dict
4042 4043 #=========================================================================== 4044 # write_auto_dsig_file 4045 #===========================================================================
4046 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4047 """Write the auto_dsig.f file for the differential cross section 4048 calculation, includes pdf call information""" 4049 4050 if not matrix_element.get('processes') or \ 4051 not matrix_element.get('diagrams'): 4052 return 0 4053 4054 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4055 self.proc_characteristic['ninitial'] = ninitial 4056 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4057 4058 # Add information relevant for MLM matching: 4059 # Maximum QCD power in all the contributions 4060 max_qcd_order = 0 4061 for diag in matrix_element.get('diagrams'): 4062 orders = diag.calculate_orders() 4063 if 'QCD' in orders: 4064 max_qcd_order = max(max_qcd_order,orders['QCD']) 4065 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4066 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4067 proc.get('model').get_particle(id).get('color')>1]) 4068 for proc in matrix_element.get('processes')) 4069 # Maximum number of final state light jets to be matched 4070 self.proc_characteristic['max_n_matched_jets'] = max( 4071 self.proc_characteristic['max_n_matched_jets'], 4072 min(max_qcd_order,max_n_light_final_partons)) 4073 4074 # List of default pdgs to be considered for the CKKWl merging cut 4075 self.proc_characteristic['colored_pdgs'] = \ 4076 sorted(list(set([abs(p.get('pdg_code')) for p in 4077 matrix_element.get('processes')[0].get('model').get('particles') if 4078 p.get('color')>1]))) 4079 4080 if ninitial < 1 or ninitial > 2: 4081 raise writers.FortranWriter.FortranWriterError, \ 4082 """Need ninitial = 1 or 2 to write auto_dsig file""" 4083 4084 replace_dict = {} 4085 4086 # Extract version number and date from VERSION file 4087 info_lines = self.get_mg5_info_lines() 4088 replace_dict['info_lines'] = info_lines 4089 4090 # Extract process info lines 4091 process_lines = self.get_process_info_lines(matrix_element) 4092 replace_dict['process_lines'] = process_lines 4093 4094 # Set proc_id 4095 replace_dict['proc_id'] = proc_id 4096 replace_dict['numproc'] = 1 4097 4098 # Set dsig_line 4099 if ninitial == 1: 4100 # No conversion, since result of decay should be given in GeV 4101 dsig_line = "pd(0)*dsiguu" 4102 else: 4103 # Convert result (in GeV) to pb 4104 dsig_line = "pd(0)*conv*dsiguu" 4105 4106 replace_dict['dsig_line'] = dsig_line 4107 4108 # Extract pdf lines 4109 pdf_vars, pdf_data, pdf_lines = \ 4110 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4111 replace_dict['pdf_vars'] = pdf_vars 4112 replace_dict['pdf_data'] = pdf_data 4113 replace_dict['pdf_lines'] = pdf_lines 4114 4115 # Lines that differ between subprocess group and regular 4116 if proc_id: 4117 replace_dict['numproc'] = int(proc_id) 4118 replace_dict['passcuts_begin'] = "" 4119 replace_dict['passcuts_end'] = "" 4120 # Set lines for subprocess group version 4121 # Set define_iconfigs_lines 4122 replace_dict['define_subdiag_lines'] = \ 4123 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4124 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4125 replace_dict['cutsdone'] = "" 4126 else: 4127 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4128 replace_dict['passcuts_end'] = "ENDIF" 4129 replace_dict['define_subdiag_lines'] = "" 4130 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4131 4132 if not isinstance(self, ProcessExporterFortranMEGroup): 4133 ncomb=matrix_element.get_helicity_combinations() 4134 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4135 else: 4136 replace_dict['read_write_good_hel'] = "" 4137 4138 context = {'read_write_good_hel':True} 4139 4140 if writer: 4141 file = open(pjoin(_file_path, \ 4142 'iolibs/template_files/auto_dsig_v4.inc')).read() 4143 file = file % replace_dict 4144 4145 # Write the file 4146 writer.writelines(file, context=context) 4147 else: 4148 return replace_dict, context
4149 #=========================================================================== 4150 # write_coloramps_file 4151 #===========================================================================
4152 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4153 """Write the coloramps.inc file for MadEvent""" 4154 4155 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4156 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4157 (max(len(matrix_element.get('color_basis').keys()), 1), 4158 len(mapconfigs))) 4159 4160 4161 # Write the file 4162 writer.writelines(lines) 4163 4164 return True
4165 4166 #=========================================================================== 4167 # write_colors_file 4168 #===========================================================================
4169 - def write_colors_file(self, writer, matrix_elements):
4170 """Write the get_color.f file for MadEvent, which returns color 4171 for all particles used in the matrix element.""" 4172 4173 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4174 matrix_elements = [matrix_elements] 4175 4176 model = matrix_elements[0].get('processes')[0].get('model') 4177 4178 # We need the both particle and antiparticle wf_ids, since the identity 4179 # depends on the direction of the wf. 4180 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4181 for wf in d.get('wavefunctions')],[]) \ 4182 for d in me.get('diagrams')], []) \ 4183 for me in matrix_elements], [])) 4184 4185 leg_ids = set(sum([sum([sum([[l.get('id'), 4186 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4187 for l in p.get_legs_with_decays()], []) \ 4188 for p in me.get('processes')], []) \ 4189 for me in matrix_elements], [])) 4190 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4191 4192 lines = """function get_color(ipdg) 4193 implicit none 4194 integer get_color, ipdg 4195 4196 if(ipdg.eq.%d)then 4197 get_color=%d 4198 return 4199 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4200 4201 for part_id in particle_ids[1:]: 4202 lines += """else if(ipdg.eq.%d)then 4203 get_color=%d 4204 return 4205 """ % (part_id, model.get_particle(part_id).get_color()) 4206 # Dummy particle for multiparticle vertices with pdg given by 4207 # first code not in the model 4208 lines += """else if(ipdg.eq.%d)then 4209 c This is dummy particle used in multiparticle vertices 4210 get_color=2 4211 return 4212 """ % model.get_first_non_pdg() 4213 lines += """else 4214 write(*,*)'Error: No color given for pdg ',ipdg 4215 get_color=0 4216 return 4217 endif 4218 end 4219 """ 4220 4221 # Write the file 4222 writer.writelines(lines) 4223 4224 return True
4225 4226 #=========================================================================== 4227 # write_config_nqcd_file 4228 #===========================================================================
4229 - def write_config_nqcd_file(self, writer, nqcd_list):
4230 """Write the config_nqcd.inc with the number of QCD couplings 4231 for each config""" 4232 4233 lines = [] 4234 for iconf, n in enumerate(nqcd_list): 4235 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4236 4237 # Write the file 4238 writer.writelines(lines) 4239 4240 return True
4241 4242 #=========================================================================== 4243 # write_maxconfigs_file 4244 #===========================================================================
4245 - def write_maxconfigs_file(self, writer, matrix_elements):
4246 """Write the maxconfigs.inc file for MadEvent""" 4247 4248 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4249 maxconfigs = max([me.get_num_configs() for me in \ 4250 matrix_elements.get('matrix_elements')]) 4251 else: 4252 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4253 4254 lines = "integer lmaxconfigs\n" 4255 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4256 4257 # Write the file 4258 writer.writelines(lines) 4259 4260 return True
4261 4262 #=========================================================================== 4263 # read_write_good_hel 4264 #===========================================================================
4265 - def read_write_good_hel(self, ncomb):
4266 """return the code to read/write the good_hel common_block""" 4267 4268 convert = {'ncomb' : ncomb} 4269 output = """ 4270 subroutine write_good_hel(stream_id) 4271 implicit none 4272 integer stream_id 4273 INTEGER NCOMB 4274 PARAMETER ( NCOMB=%(ncomb)d) 4275 LOGICAL GOODHEL(NCOMB) 4276 INTEGER NTRY 4277 common/BLOCK_GOODHEL/NTRY,GOODHEL 4278 write(stream_id,*) GOODHEL 4279 return 4280 end 4281 4282 4283 subroutine read_good_hel(stream_id) 4284 implicit none 4285 include 'genps.inc' 4286 integer stream_id 4287 INTEGER NCOMB 4288 PARAMETER ( NCOMB=%(ncomb)d) 4289 LOGICAL GOODHEL(NCOMB) 4290 INTEGER NTRY 4291 common/BLOCK_GOODHEL/NTRY,GOODHEL 4292 read(stream_id,*) GOODHEL 4293 NTRY = MAXTRIES + 1 4294 return 4295 end 4296 4297 subroutine init_good_hel() 4298 implicit none 4299 INTEGER NCOMB 4300 PARAMETER ( NCOMB=%(ncomb)d) 4301 LOGICAL GOODHEL(NCOMB) 4302 INTEGER NTRY 4303 INTEGER I 4304 4305 do i=1,NCOMB 4306 GOODHEL(I) = .false. 4307 enddo 4308 NTRY = 0 4309 end 4310 4311 integer function get_maxsproc() 4312 implicit none 4313 get_maxsproc = 1 4314 return 4315 end 4316 4317 """ % convert 4318 4319 return output
4320 4321 #=========================================================================== 4322 # write_config_subproc_map_file 4323 #===========================================================================
4324 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4325 """Write a dummy config_subproc.inc file for MadEvent""" 4326 4327 lines = [] 4328 4329 for iconfig in range(len(s_and_t_channels)): 4330 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4331 (iconfig + 1)) 4332 4333 # Write the file 4334 writer.writelines(lines) 4335 4336 return True
4337 4338 #=========================================================================== 4339 # write_configs_file 4340 #===========================================================================
4341 - def write_configs_file(self, writer, matrix_element):
4342 """Write the configs.inc file for MadEvent""" 4343 4344 # Extract number of external particles 4345 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4346 4347 model = matrix_element.get('processes')[0].get('model') 4348 configs = [(i+1, d) for (i, d) in \ 4349 enumerate(matrix_element.get('diagrams'))] 4350 mapconfigs = [c[0] for c in configs] 4351 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4352 [[c[1]] for c in configs], 4353 mapconfigs, 4354 nexternal, ninitial, 4355 model)
4356 4357 #=========================================================================== 4358 # write_run_configs_file 4359 #===========================================================================
4360 - def write_run_config_file(self, writer):
4361 """Write the run_configs.inc file for MadEvent""" 4362 4363 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4364 4365 if self.proc_characteristic['loop_induced']: 4366 job_per_chan = 1 4367 else: 4368 job_per_chan = 5 4369 4370 if writer: 4371 text = open(path).read() % {'chanperjob': job_per_chan} 4372 writer.write(text) 4373 return True 4374 else: 4375 return {'chanperjob': job_per_chan}
4376 4377 #=========================================================================== 4378 # write_configs_file_from_diagrams 4379 #===========================================================================
4380 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4381 nexternal, ninitial, model):
4382 """Write the actual configs.inc file. 4383 4384 configs is the diagrams corresponding to configs (each 4385 diagrams is a list of corresponding diagrams for all 4386 subprocesses, with None if there is no corresponding diagrams 4387 for a given process). 4388 mapconfigs gives the diagram number for each config. 4389 4390 For s-channels, we need to output one PDG for each subprocess in 4391 the subprocess group, in order to be able to pick the right 4392 one for multiprocesses.""" 4393 4394 lines = [] 4395 4396 s_and_t_channels = [] 4397 4398 nqcd_list = [] 4399 4400 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4401 for config in configs if [d for d in config if d][0].\ 4402 get_vertex_leg_numbers()!=[]] 4403 minvert = min(vert_list) if vert_list!=[] else 0 4404 4405 # Number of subprocesses 4406 nsubprocs = len(configs[0]) 4407 4408 nconfigs = 0 4409 4410 new_pdg = model.get_first_non_pdg() 4411 4412 for iconfig, helas_diags in enumerate(configs): 4413 if any([vert > minvert for vert in 4414 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4415 # Only 3-vertices allowed in configs.inc 4416 continue 4417 nconfigs += 1 4418 4419 # Need s- and t-channels for all subprocesses, including 4420 # those that don't contribute to this config 4421 empty_verts = [] 4422 stchannels = [] 4423 for h in helas_diags: 4424 if h: 4425 # get_s_and_t_channels gives vertices starting from 4426 # final state external particles and working inwards 4427 stchannels.append(h.get('amplitudes')[0].\ 4428 get_s_and_t_channels(ninitial, model, 4429 new_pdg)) 4430 else: 4431 stchannels.append((empty_verts, None)) 4432 4433 # For t-channels, just need the first non-empty one 4434 tchannels = [t for s,t in stchannels if t != None][0] 4435 4436 # For s_and_t_channels (to be used later) use only first config 4437 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4438 tchannels]) 4439 4440 # Make sure empty_verts is same length as real vertices 4441 if any([s for s,t in stchannels]): 4442 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4443 4444 # Reorganize s-channel vertices to get a list of all 4445 # subprocesses for each vertex 4446 schannels = zip(*[s for s,t in stchannels]) 4447 else: 4448 schannels = [] 4449 4450 allchannels = schannels 4451 if len(tchannels) > 1: 4452 # Write out tchannels only if there are any non-trivial ones 4453 allchannels = schannels + tchannels 4454 4455 # Write out propagators for s-channel and t-channel vertices 4456 4457 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4458 # Correspondance between the config and the diagram = amp2 4459 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4460 mapconfigs[iconfig])) 4461 # Number of QCD couplings in this diagram 4462 nqcd = 0 4463 for h in helas_diags: 4464 if h: 4465 try: 4466 nqcd = h.calculate_orders()['QCD'] 4467 except KeyError: 4468 pass 4469 break 4470 else: 4471 continue 4472 4473 nqcd_list.append(nqcd) 4474 4475 for verts in allchannels: 4476 if verts in schannels: 4477 vert = [v for v in verts if v][0] 4478 else: 4479 vert = verts 4480 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4481 last_leg = vert.get('legs')[-1] 4482 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4483 (last_leg.get('number'), nconfigs, len(daughters), 4484 ",".join([str(d) for d in daughters]))) 4485 if verts in schannels: 4486 pdgs = [] 4487 for v in verts: 4488 if v: 4489 pdgs.append(v.get('legs')[-1].get('id')) 4490 else: 4491 pdgs.append(0) 4492 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4493 (last_leg.get('number'), nconfigs, nsubprocs, 4494 ",".join([str(d) for d in pdgs]))) 4495 lines.append("data tprid(%d,%d)/0/" % \ 4496 (last_leg.get('number'), nconfigs)) 4497 elif verts in tchannels[:-1]: 4498 lines.append("data tprid(%d,%d)/%d/" % \ 4499 (last_leg.get('number'), nconfigs, 4500 abs(last_leg.get('id')))) 4501 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4502 (last_leg.get('number'), nconfigs, nsubprocs, 4503 ",".join(['0'] * nsubprocs))) 4504 4505 # Write out number of configs 4506 lines.append("# Number of configs") 4507 lines.append("data mapconfig(0)/%d/" % nconfigs) 4508 4509 # Write the file 4510 writer.writelines(lines) 4511 4512 return s_and_t_channels, nqcd_list
4513 4514 #=========================================================================== 4515 # write_decayBW_file 4516 #===========================================================================
4517 - def write_decayBW_file(self, writer, s_and_t_channels):
4518 """Write the decayBW.inc file for MadEvent""" 4519 4520 lines = [] 4521 4522 booldict = {None: "0", True: "1", False: "2"} 4523 4524 for iconf, config in enumerate(s_and_t_channels): 4525 schannels = config[0] 4526 for vertex in schannels: 4527 # For the resulting leg, pick out whether it comes from 4528 # decay or not, as given by the onshell flag 4529 leg = vertex.get('legs')[-1] 4530 lines.append("data gForceBW(%d,%d)/%s/" % \ 4531 (leg.get('number'), iconf + 1, 4532 booldict[leg.get('onshell')])) 4533 4534 # Write the file 4535 writer.writelines(lines) 4536 4537 return True
4538 4539 #=========================================================================== 4540 # write_dname_file 4541 #===========================================================================
4542 - def write_dname_file(self, writer, dir_name):
4543 """Write the dname.mg file for MG4""" 4544 4545 line = "DIRNAME=%s" % dir_name 4546 4547 # Write the file 4548 writer.write(line + "\n") 4549 4550 return True
4551 4552 #=========================================================================== 4553 # write_driver 4554 #===========================================================================
4555 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4556 """Write the SubProcess/driver.f file for MG4""" 4557 4558 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4559 4560 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4561 card = 'Source/MODEL/MG5_param.dat' 4562 else: 4563 card = 'param_card.dat' 4564 # Requiring each helicity configuration to be probed by 10 points for 4565 # matrix element before using the resulting grid for MC over helicity 4566 # sampling. 4567 # We multiply this by 2 because each grouped subprocess is called at most 4568 # twice for each IMIRROR. 4569 replace_dict = {'param_card_name':card, 4570 'ncomb':ncomb, 4571 'hel_init_points':n_grouped_proc*10*2} 4572 if not v5: 4573 replace_dict['secondparam']=',.true.' 4574 else: 4575 replace_dict['secondparam']='' 4576 4577 if writer: 4578 text = open(path).read() % replace_dict 4579 writer.write(text) 4580 return True 4581 else: 4582 return replace_dict
4583 4584 #=========================================================================== 4585 # write_addmothers 4586 #===========================================================================
4587 - def write_addmothers(self, writer):
4588 """Write the SubProcess/addmothers.f""" 4589 4590 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4591 4592 text = open(path).read() % {'iconfig': 'diag_number'} 4593 writer.write(text) 4594 4595 return True
4596 4597 4598 #=========================================================================== 4599 # write_combine_events 4600 #===========================================================================
4601 - def write_combine_events(self, writer, nb_proc=100):
4602 """Write the SubProcess/driver.f file for MG4""" 4603 4604 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4605 4606 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4607 card = 'Source/MODEL/MG5_param.dat' 4608 else: 4609 card = 'param_card.dat' 4610 4611 #set maxpup (number of @X in the process card) 4612 4613 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4614 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4615 writer.write(text) 4616 4617 return True
4618 4619 4620 #=========================================================================== 4621 # write_symmetry 4622 #===========================================================================
4623 - def write_symmetry(self, writer, v5=True):
4624 """Write the SubProcess/driver.f file for ME""" 4625 4626 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4627 4628 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4629 card = 'Source/MODEL/MG5_param.dat' 4630 else: 4631 card = 'param_card.dat' 4632 4633 if v5: 4634 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4635 else: 4636 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4637 4638 if writer: 4639 text = open(path).read() 4640 text = text % replace_dict 4641 writer.write(text) 4642 return True 4643 else: 4644 return replace_dict
4645 4646 4647 4648 #=========================================================================== 4649 # write_iproc_file 4650 #===========================================================================
4651 - def write_iproc_file(self, writer, me_number):
4652 """Write the iproc.dat file for MG4""" 4653 line = "%d" % (me_number + 1) 4654 4655 # Write the file 4656 for line_to_write in writer.write_line(line): 4657 writer.write(line_to_write) 4658 return True
4659 4660 #=========================================================================== 4661 # write_mg_sym_file 4662 #===========================================================================
4663 - def write_mg_sym_file(self, writer, matrix_element):
4664 """Write the mg.sym file for MadEvent.""" 4665 4666 lines = [] 4667 4668 # Extract process with all decays included 4669 final_legs = filter(lambda leg: leg.get('state') == True, 4670 matrix_element.get('processes')[0].get_legs_with_decays()) 4671 4672 ninitial = len(filter(lambda leg: leg.get('state') == False, 4673 matrix_element.get('processes')[0].get('legs'))) 4674 4675 identical_indices = {} 4676 4677 # Extract identical particle info 4678 for i, leg in enumerate(final_legs): 4679 if leg.get('id') in identical_indices: 4680 identical_indices[leg.get('id')].append(\ 4681 i + ninitial + 1) 4682 else: 4683 identical_indices[leg.get('id')] = [i + ninitial + 1] 4684 4685 # Remove keys which have only one particle 4686 for key in identical_indices.keys(): 4687 if len(identical_indices[key]) < 2: 4688 del identical_indices[key] 4689 4690 # Write mg.sym file 4691 lines.append(str(len(identical_indices.keys()))) 4692 for key in identical_indices.keys(): 4693 lines.append(str(len(identical_indices[key]))) 4694 for number in identical_indices[key]: 4695 lines.append(str(number)) 4696 4697 # Write the file 4698 writer.writelines(lines) 4699 4700 return True
4701 4702 #=========================================================================== 4703 # write_mg_sym_file 4704 #===========================================================================
4705 - def write_default_mg_sym_file(self, writer):
4706 """Write the mg.sym file for MadEvent.""" 4707 4708 lines = "0" 4709 4710 # Write the file 4711 writer.writelines(lines) 4712 4713 return True
4714 4715 #=========================================================================== 4716 # write_ncombs_file 4717 #===========================================================================
4718 - def write_ncombs_file(self, writer, nexternal):
4719 """Write the ncombs.inc file for MadEvent.""" 4720 4721 # ncomb (used for clustering) is 2^nexternal 4722 file = " integer n_max_cl\n" 4723 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4724 4725 # Write the file 4726 writer.writelines(file) 4727 4728 return True
4729 4730 #=========================================================================== 4731 # write_processes_file 4732 #===========================================================================
4733 - def write_processes_file(self, writer, subproc_group):
4734 """Write the processes.dat file with info about the subprocesses 4735 in this group.""" 4736 4737 lines = [] 4738 4739 for ime, me in \ 4740 enumerate(subproc_group.get('matrix_elements')): 4741 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4742 ",".join(p.base_string() for p in \ 4743 me.get('processes')))) 4744 if me.get('has_mirror_process'): 4745 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4746 for proc in mirror_procs: 4747 legs = copy.copy(proc.get('legs_with_decays')) 4748 legs.insert(0, legs.pop(1)) 4749 proc.set("legs_with_decays", legs) 4750 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4751 mirror_procs)) 4752 else: 4753 lines.append("mirror none") 4754 4755 # Write the file 4756 writer.write("\n".join(lines)) 4757 4758 return True
4759 4760 #=========================================================================== 4761 # write_symswap_file 4762 #===========================================================================
4763 - def write_symswap_file(self, writer, ident_perms):
4764 """Write the file symswap.inc for MG4 by comparing diagrams using 4765 the internal matrix element value functionality.""" 4766 4767 lines = [] 4768 4769 # Write out lines for symswap.inc file (used to permute the 4770 # external leg momenta 4771 for iperm, perm in enumerate(ident_perms): 4772 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4773 (iperm+1, ",".join([str(i+1) for i in perm]))) 4774 lines.append("data nsym/%d/" % len(ident_perms)) 4775 4776 # Write the file 4777 writer.writelines(lines) 4778 4779 return True
4780 4781 #=========================================================================== 4782 # write_symfact_file 4783 #===========================================================================
4784 - def write_symfact_file(self, writer, symmetry):
4785 """Write the files symfact.dat for MG4 by comparing diagrams using 4786 the internal matrix element value functionality.""" 4787 4788 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4789 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4790 # Write out lines for symswap.inc file (used to permute the 4791 # external leg momenta 4792 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4793 # Write the file 4794 writer.write('\n'.join(lines)) 4795 writer.write('\n') 4796 4797 return True
4798 4799 #=========================================================================== 4800 # write_symperms_file 4801 #===========================================================================
4802 - def write_symperms_file(self, writer, perms):
4803 """Write the symperms.inc file for subprocess group, used for 4804 symmetric configurations""" 4805 4806 lines = [] 4807 for iperm, perm in enumerate(perms): 4808 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4809 (iperm+1, ",".join([str(i+1) for i in perm]))) 4810 4811 # Write the file 4812 writer.writelines(lines) 4813 4814 return True
4815 4816 #=========================================================================== 4817 # write_subproc 4818 #===========================================================================
4819 - def write_subproc(self, writer, subprocdir):
4820 """Append this subprocess to the subproc.mg file for MG4""" 4821 4822 # Write line to file 4823 writer.write(subprocdir + "\n") 4824 4825 return True
4826
4827 #=============================================================================== 4828 # ProcessExporterFortranMEGroup 4829 #=============================================================================== 4830 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4831 """Class to take care of exporting a set of matrix elements to 4832 MadEvent subprocess group format.""" 4833 4834 matrix_file = "matrix_madevent_group_v4.inc" 4835 grouped_mode = 'madevent' 4836 #=========================================================================== 4837 # generate_subprocess_directory 4838 #===========================================================================
4839 - def generate_subprocess_directory(self, subproc_group, 4840 fortran_model, 4841 group_number):
4842 """Generate the Pn directory for a subprocess group in MadEvent, 4843 including the necessary matrix_N.f files, configs.inc and various 4844 other helper files.""" 4845 4846 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4847 "subproc_group object not SubProcessGroup" 4848 4849 if not self.model: 4850 self.model = subproc_group.get('matrix_elements')[0].\ 4851 get('processes')[0].get('model') 4852 4853 cwd = os.getcwd() 4854 path = pjoin(self.dir_path, 'SubProcesses') 4855 4856 os.chdir(path) 4857 pathdir = os.getcwd() 4858 4859 # Create the directory PN in the specified path 4860 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4861 subproc_group.get('name')) 4862 try: 4863 os.mkdir(subprocdir) 4864 except os.error as error: 4865 logger.warning(error.strerror + " " + subprocdir) 4866 4867 try: 4868 os.chdir(subprocdir) 4869 except os.error: 4870 logger.error('Could not cd to directory %s' % subprocdir) 4871 return 0 4872 4873 logger.info('Creating files in directory %s' % subprocdir) 4874 4875 # Create the matrix.f files, auto_dsig.f files and all inc files 4876 # for all subprocesses in the group 4877 4878 maxamps = 0 4879 maxflows = 0 4880 tot_calls = 0 4881 4882 matrix_elements = subproc_group.get('matrix_elements') 4883 4884 # Add the driver.f, all grouped ME's must share the same number of 4885 # helicity configuration 4886 ncomb = matrix_elements[0].get_helicity_combinations() 4887 for me in matrix_elements[1:]: 4888 if ncomb!=me.get_helicity_combinations(): 4889 raise MadGraph5Error, "All grouped processes must share the "+\ 4890 "same number of helicity configurations." 4891 4892 filename = 'driver.f' 4893 self.write_driver(writers.FortranWriter(filename),ncomb, 4894 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4895 4896 for ime, matrix_element in \ 4897 enumerate(matrix_elements): 4898 filename = 'matrix%d.f' % (ime+1) 4899 calls, ncolor = \ 4900 self.write_matrix_element_v4(writers.FortranWriter(filename), 4901 matrix_element, 4902 fortran_model, 4903 proc_id=str(ime+1), 4904 config_map=subproc_group.get('diagram_maps')[ime], 4905 subproc_number=group_number) 4906 4907 filename = 'auto_dsig%d.f' % (ime+1) 4908 self.write_auto_dsig_file(writers.FortranWriter(filename), 4909 matrix_element, 4910 str(ime+1)) 4911 4912 # Keep track of needed quantities 4913 tot_calls += int(calls) 4914 maxflows = max(maxflows, ncolor) 4915 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4916 4917 # Draw diagrams 4918 filename = "matrix%d.ps" % (ime+1) 4919 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4920 get('diagrams'), 4921 filename, 4922 model = \ 4923 matrix_element.get('processes')[0].\ 4924 get('model'), 4925 amplitude=True) 4926 logger.info("Generating Feynman diagrams for " + \ 4927 matrix_element.get('processes')[0].nice_string()) 4928 plot.draw() 4929 4930 # Extract number of external particles 4931 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4932 4933 # Generate a list of diagrams corresponding to each configuration 4934 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4935 # If a subprocess has no diagrams for this config, the number is 0 4936 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4937 4938 filename = 'auto_dsig.f' 4939 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4940 subproc_group) 4941 4942 filename = 'coloramps.inc' 4943 self.write_coloramps_file(writers.FortranWriter(filename), 4944 subproc_diagrams_for_config, 4945 maxflows, 4946 matrix_elements) 4947 4948 filename = 'get_color.f' 4949 self.write_colors_file(writers.FortranWriter(filename), 4950 matrix_elements) 4951 4952 filename = 'config_subproc_map.inc' 4953 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4954 subproc_diagrams_for_config) 4955 4956 filename = 'configs.inc' 4957 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4958 writers.FortranWriter(filename), 4959 subproc_group, 4960 subproc_diagrams_for_config) 4961 4962 filename = 'config_nqcd.inc' 4963 self.write_config_nqcd_file(writers.FortranWriter(filename), 4964 nqcd_list) 4965 4966 filename = 'decayBW.inc' 4967 self.write_decayBW_file(writers.FortranWriter(filename), 4968 s_and_t_channels) 4969 4970 filename = 'dname.mg' 4971 self.write_dname_file(writers.FortranWriter(filename), 4972 subprocdir) 4973 4974 filename = 'iproc.dat' 4975 self.write_iproc_file(writers.FortranWriter(filename), 4976 group_number) 4977 4978 filename = 'leshouche.inc' 4979 self.write_leshouche_file(writers.FortranWriter(filename), 4980 subproc_group) 4981 4982 filename = 'maxamps.inc' 4983 self.write_maxamps_file(writers.FortranWriter(filename), 4984 maxamps, 4985 maxflows, 4986 max([len(me.get('processes')) for me in \ 4987 matrix_elements]), 4988 len(matrix_elements)) 4989 4990 # Note that mg.sym is not relevant for this case 4991 filename = 'mg.sym' 4992 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4993 4994 filename = 'mirrorprocs.inc' 4995 self.write_mirrorprocs(writers.FortranWriter(filename), 4996 subproc_group) 4997 4998 filename = 'ncombs.inc' 4999 self.write_ncombs_file(writers.FortranWriter(filename), 5000 nexternal) 5001 5002 filename = 'nexternal.inc' 5003 self.write_nexternal_file(writers.FortranWriter(filename), 5004 nexternal, ninitial) 5005 5006 filename = 'ngraphs.inc' 5007 self.write_ngraphs_file(writers.FortranWriter(filename), 5008 nconfigs) 5009 5010 filename = 'pmass.inc' 5011 self.write_pmass_file(writers.FortranWriter(filename), 5012 matrix_element) 5013 5014 filename = 'props.inc' 5015 self.write_props_file(writers.FortranWriter(filename), 5016 matrix_element, 5017 s_and_t_channels) 5018 5019 filename = 'processes.dat' 5020 files.write_to_file(filename, 5021 self.write_processes_file, 5022 subproc_group) 5023 5024 # Find config symmetries and permutations 5025 symmetry, perms, ident_perms = \ 5026 diagram_symmetry.find_symmetry(subproc_group) 5027 5028 filename = 'symswap.inc' 5029 self.write_symswap_file(writers.FortranWriter(filename), 5030 ident_perms) 5031 5032 filename = 'symfact_orig.dat' 5033 self.write_symfact_file(open(filename, 'w'), symmetry) 5034 5035 filename = 'symperms.inc' 5036 self.write_symperms_file(writers.FortranWriter(filename), 5037 perms) 5038 5039 # Generate jpgs -> pass in make_html 5040 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5041 5042 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5043 5044 #import nexternal/leshouch in Source 5045 ln('nexternal.inc', '../../Source', log=False) 5046 ln('leshouche.inc', '../../Source', log=False) 5047 ln('maxamps.inc', '../../Source', log=False) 5048 5049 # Return to SubProcesses dir) 5050 os.chdir(pathdir) 5051 5052 # Add subprocess to subproc.mg 5053 filename = 'subproc.mg' 5054 files.append_to_file(filename, 5055 self.write_subproc, 5056 subprocdir) 5057 5058 # Return to original dir 5059 os.chdir(cwd) 5060 5061 if not tot_calls: 5062 tot_calls = 0 5063 return tot_calls
5064 5065 #=========================================================================== 5066 # write_super_auto_dsig_file 5067 #===========================================================================
5068 - def write_super_auto_dsig_file(self, writer, subproc_group):
5069 """Write the auto_dsig.f file selecting between the subprocesses 5070 in subprocess group mode""" 5071 5072 replace_dict = {} 5073 5074 # Extract version number and date from VERSION file 5075 info_lines = self.get_mg5_info_lines() 5076 replace_dict['info_lines'] = info_lines 5077 5078 matrix_elements = subproc_group.get('matrix_elements') 5079 5080 # Extract process info lines 5081 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5082 matrix_elements]) 5083 replace_dict['process_lines'] = process_lines 5084 5085 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5086 replace_dict['nexternal'] = nexternal 5087 5088 replace_dict['nsprocs'] = 2*len(matrix_elements) 5089 5090 # Generate dsig definition line 5091 dsig_def_line = "DOUBLE PRECISION " + \ 5092 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5093 range(len(matrix_elements))]) 5094 replace_dict["dsig_def_line"] = dsig_def_line 5095 5096 # Generate dsig process lines 5097 call_dsig_proc_lines = [] 5098 for iproc in range(len(matrix_elements)): 5099 call_dsig_proc_lines.append(\ 5100 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5101 {"num": iproc + 1, 5102 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5103 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5104 5105 ncomb=matrix_elements[0].get_helicity_combinations() 5106 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5107 5108 if writer: 5109 file = open(pjoin(_file_path, \ 5110 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5111 file = file % replace_dict 5112 5113 # Write the file 5114 writer.writelines(file) 5115 else: 5116 return replace_dict
5117 5118 #=========================================================================== 5119 # write_mirrorprocs 5120 #===========================================================================
5121 - def write_mirrorprocs(self, writer, subproc_group):
5122 """Write the mirrorprocs.inc file determining which processes have 5123 IS mirror process in subprocess group mode.""" 5124 5125 lines = [] 5126 bool_dict = {True: '.true.', False: '.false.'} 5127 matrix_elements = subproc_group.get('matrix_elements') 5128 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5129 (len(matrix_elements), 5130 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5131 me in matrix_elements]))) 5132 # Write the file 5133 writer.writelines(lines)
5134 5135 #=========================================================================== 5136 # write_addmothers 5137 #===========================================================================
5138 - def write_addmothers(self, writer):
5139 """Write the SubProcess/addmothers.f""" 5140 5141 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5142 5143 text = open(path).read() % {'iconfig': 'lconfig'} 5144 writer.write(text) 5145 5146 return True
5147 5148 5149 #=========================================================================== 5150 # write_coloramps_file 5151 #===========================================================================
5152 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5153 matrix_elements):
5154 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5155 5156 # Create a map from subprocess (matrix element) to a list of 5157 # the diagrams corresponding to each config 5158 5159 lines = [] 5160 5161 subproc_to_confdiag = {} 5162 for config in diagrams_for_config: 5163 for subproc, diag in enumerate(config): 5164 try: 5165 subproc_to_confdiag[subproc].append(diag) 5166 except KeyError: 5167 subproc_to_confdiag[subproc] = [diag] 5168 5169 for subproc in sorted(subproc_to_confdiag.keys()): 5170 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5171 matrix_elements[subproc], 5172 subproc + 1)) 5173 5174 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5175 (maxflows, 5176 len(diagrams_for_config), 5177 len(matrix_elements))) 5178 5179 # Write the file 5180 writer.writelines(lines) 5181 5182 return True
5183 5184 #=========================================================================== 5185 # write_config_subproc_map_file 5186 #===========================================================================
5187 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5188 """Write the config_subproc_map.inc file for subprocess groups""" 5189 5190 lines = [] 5191 # Output only configs that have some corresponding diagrams 5192 iconfig = 0 5193 for config in config_subproc_map: 5194 if set(config) == set([0]): 5195 continue 5196 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5197 (iconfig + 1, len(config), 5198 ",".join([str(i) for i in config]))) 5199 iconfig += 1 5200 # Write the file 5201 writer.writelines(lines) 5202 5203 return True
5204 5205 #=========================================================================== 5206 # read_write_good_hel 5207 #===========================================================================
5208 - def read_write_good_hel(self, ncomb):
5209 """return the code to read/write the good_hel common_block""" 5210 5211 convert = {'ncomb' : ncomb} 5212 5213 output = """ 5214 subroutine write_good_hel(stream_id) 5215 implicit none 5216 integer stream_id 5217 INTEGER NCOMB 5218 PARAMETER ( NCOMB=%(ncomb)d) 5219 LOGICAL GOODHEL(NCOMB, 2) 5220 INTEGER NTRY(2) 5221 common/BLOCK_GOODHEL/NTRY,GOODHEL 5222 write(stream_id,*) GOODHEL 5223 return 5224 end 5225 5226 5227 subroutine read_good_hel(stream_id) 5228 implicit none 5229 include 'genps.inc' 5230 integer stream_id 5231 INTEGER NCOMB 5232 PARAMETER ( NCOMB=%(ncomb)d) 5233 LOGICAL GOODHEL(NCOMB, 2) 5234 INTEGER NTRY(2) 5235 common/BLOCK_GOODHEL/NTRY,GOODHEL 5236 read(stream_id,*) GOODHEL 5237 NTRY(1) = MAXTRIES + 1 5238 NTRY(2) = MAXTRIES + 1 5239 return 5240 end 5241 5242 subroutine init_good_hel() 5243 implicit none 5244 INTEGER NCOMB 5245 PARAMETER ( NCOMB=%(ncomb)d) 5246 LOGICAL GOODHEL(NCOMB, 2) 5247 INTEGER NTRY(2) 5248 INTEGER I 5249 5250 do i=1,NCOMB 5251 GOODHEL(I,1) = .false. 5252 GOODHEL(I,2) = .false. 5253 enddo 5254 NTRY(1) = 0 5255 NTRY(2) = 0 5256 end 5257 5258 integer function get_maxsproc() 5259 implicit none 5260 include 'maxamps.inc' 5261 5262 get_maxsproc = maxsproc 5263 return 5264 end 5265 5266 """ % convert 5267 5268 return output
5269 5270 5271 5272 #=========================================================================== 5273 # write_configs_file 5274 #===========================================================================
5275 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5276 """Write the configs.inc file with topology information for a 5277 subprocess group. Use the first subprocess with a diagram for each 5278 configuration.""" 5279 5280 matrix_elements = subproc_group.get('matrix_elements') 5281 model = matrix_elements[0].get('processes')[0].get('model') 5282 5283 diagrams = [] 5284 config_numbers = [] 5285 for iconfig, config in enumerate(diagrams_for_config): 5286 # Check if any diagrams correspond to this config 5287 if set(config) == set([0]): 5288 continue 5289 subproc_diags = [] 5290 for s,d in enumerate(config): 5291 if d: 5292 subproc_diags.append(matrix_elements[s].\ 5293 get('diagrams')[d-1]) 5294 else: 5295 subproc_diags.append(None) 5296 diagrams.append(subproc_diags) 5297 config_numbers.append(iconfig + 1) 5298 5299 # Extract number of external particles 5300 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5301 5302 return len(diagrams), \ 5303 self.write_configs_file_from_diagrams(writer, diagrams, 5304 config_numbers, 5305 nexternal, ninitial, 5306 model)
5307 5308 #=========================================================================== 5309 # write_run_configs_file 5310 #===========================================================================
5311 - def write_run_config_file(self, writer):
5312 """Write the run_configs.inc file for MadEvent""" 5313 5314 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5315 if self.proc_characteristic['loop_induced']: 5316 job_per_chan = 1 5317 else: 5318 job_per_chan = 2 5319 text = open(path).read() % {'chanperjob':job_per_chan} 5320 writer.write(text) 5321 return True
5322 5323 5324 #=========================================================================== 5325 # write_leshouche_file 5326 #===========================================================================
5327 - def write_leshouche_file(self, writer, subproc_group):
5328 """Write the leshouche.inc file for MG4""" 5329 5330 all_lines = [] 5331 5332 for iproc, matrix_element in \ 5333 enumerate(subproc_group.get('matrix_elements')): 5334 all_lines.extend(self.get_leshouche_lines(matrix_element, 5335 iproc)) 5336 # Write the file 5337 writer.writelines(all_lines) 5338 return True
5339 5340
5341 - def finalize(self,*args, **opts):
5342 5343 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5344 #ensure that the grouping information is on the correct value 5345 self.proc_characteristic['grouped_matrix'] = True
5346 5347 5348 #=============================================================================== 5349 # UFO_model_to_mg4 5350 #=============================================================================== 5351 5352 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5353 5354 -class UFO_model_to_mg4(object):
5355 """ A converter of the UFO-MG5 Model to the MG4 format """ 5356 5357 # The list below shows the only variables the user is allowed to change by 5358 # himself for each PS point. If he changes any other, then calling 5359 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5360 # correctly account for the change. 5361 PS_dependent_key = ['aS','MU_R'] 5362 mp_complex_format = 'complex*32' 5363 mp_real_format = 'real*16' 5364 # Warning, it is crucial none of the couplings/parameters of the model 5365 # starts with this prefix. I should add a check for this. 5366 # You can change it as the global variable to check_param_card.ParamCard 5367 mp_prefix = check_param_card.ParamCard.mp_prefix 5368
5369 - def __init__(self, model, output_path, opt=None):
5370 """ initialization of the objects """ 5371 5372 self.model = model 5373 self.model_name = model['name'] 5374 self.dir_path = output_path 5375 5376 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5377 'loop_induced': False} 5378 if opt: 5379 self.opt.update(opt) 5380 5381 self.coups_dep = [] # (name, expression, type) 5382 self.coups_indep = [] # (name, expression, type) 5383 self.params_dep = [] # (name, expression, type) 5384 self.params_indep = [] # (name, expression, type) 5385 self.params_ext = [] # external parameter 5386 self.p_to_f = parsers.UFOExpressionParserFortran() 5387 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5388
5390 """modify the parameter if some of them are identical up to the case""" 5391 5392 lower_dict={} 5393 duplicate = set() 5394 keys = self.model['parameters'].keys() 5395 for key in keys: 5396 for param in self.model['parameters'][key]: 5397 lower_name = param.name.lower() 5398 if not lower_name: 5399 continue 5400 try: 5401 lower_dict[lower_name].append(param) 5402 except KeyError,error: 5403 lower_dict[lower_name] = [param] 5404 else: 5405 duplicate.add(lower_name) 5406 logger.debug('%s is define both as lower case and upper case.' 5407 % lower_name) 5408 if not duplicate: 5409 return 5410 5411 re_expr = r'''\b(%s)\b''' 5412 to_change = [] 5413 change={} 5414 for value in duplicate: 5415 for i, var in enumerate(lower_dict[value]): 5416 to_change.append(var.name) 5417 new_name = '%s%s' % (var.name.lower(), 5418 ('__%d'%(i+1) if i>0 else '')) 5419 change[var.name] = new_name 5420 var.name = new_name 5421 5422 # Apply the modification to the map_CTcoup_CTparam of the model 5423 # if it has one (giving for each coupling the CT parameters whcih 5424 # are necessary and which should be exported to the model. 5425 if hasattr(self.model,'map_CTcoup_CTparam'): 5426 for coup, ctparams in self.model.map_CTcoup_CTparam: 5427 for i, ctparam in enumerate(ctparams): 5428 try: 5429 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5430 except KeyError: 5431 pass 5432 5433 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5434 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5435 5436 # change parameters 5437 for key in keys: 5438 if key == ('external',): 5439 continue 5440 for param in self.model['parameters'][key]: 5441 param.expr = rep_pattern.sub(replace, param.expr) 5442 5443 # change couplings 5444 for key in self.model['couplings'].keys(): 5445 for coup in self.model['couplings'][key]: 5446 coup.expr = rep_pattern.sub(replace, coup.expr) 5447 5448 # change mass/width 5449 for part in self.model['particles']: 5450 if str(part.get('mass')) in to_change: 5451 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5452 if str(part.get('width')) in to_change: 5453 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5454
5455 - def refactorize(self, wanted_couplings = []):
5456 """modify the couplings to fit with MG4 convention """ 5457 5458 # Keep only separation in alphaS 5459 keys = self.model['parameters'].keys() 5460 keys.sort(key=len) 5461 for key in keys: 5462 to_add = [o for o in self.model['parameters'][key] if o.name] 5463 5464 if key == ('external',): 5465 self.params_ext += to_add 5466 elif any([(k in key) for k in self.PS_dependent_key]): 5467 self.params_dep += to_add 5468 else: 5469 self.params_indep += to_add 5470 # same for couplings 5471 keys = self.model['couplings'].keys() 5472 keys.sort(key=len) 5473 for key, coup_list in self.model['couplings'].items(): 5474 if any([(k in key) for k in self.PS_dependent_key]): 5475 self.coups_dep += [c for c in coup_list if 5476 (not wanted_couplings or c.name in \ 5477 wanted_couplings)] 5478 else: 5479 self.coups_indep += [c for c in coup_list if 5480 (not wanted_couplings or c.name in \ 5481 wanted_couplings)] 5482 5483 # MG4 use G and not aS as it basic object for alphas related computation 5484 #Pass G in the independant list 5485 if 'G' in self.params_dep: 5486 index = self.params_dep.index('G') 5487 G = self.params_dep.pop(index) 5488 # G.expr = '2*cmath.sqrt(as*pi)' 5489 # self.params_indep.insert(0, self.params_dep.pop(index)) 5490 # No need to add it if not defined 5491 5492 if 'aS' not in self.params_ext: 5493 logger.critical('aS not define as external parameter adding it!') 5494 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5495 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5496 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5497 - def build(self, wanted_couplings = [], full=True):
5498 """modify the couplings to fit with MG4 convention and creates all the 5499 different files""" 5500 5501 self.pass_parameter_to_case_insensitive() 5502 self.refactorize(wanted_couplings) 5503 5504 # write the files 5505 if full: 5506 if wanted_couplings: 5507 # extract the wanted ct parameters 5508 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5509 self.write_all()
5510 5511
5512 - def open(self, name, comment='c', format='default'):
5513 """ Open the file name in the correct directory and with a valid 5514 header.""" 5515 5516 file_path = pjoin(self.dir_path, name) 5517 5518 if format == 'fortran': 5519 fsock = writers.FortranWriter(file_path, 'w') 5520 else: 5521 fsock = open(file_path, 'w') 5522 5523 file.writelines(fsock, comment * 77 + '\n') 5524 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5525 {'comment': comment + (6 - len(comment)) * ' '}) 5526 file.writelines(fsock, comment * 77 + '\n\n') 5527 return fsock
5528 5529
5530 - def write_all(self):
5531 """ write all the files """ 5532 #write the part related to the external parameter 5533 self.create_ident_card() 5534 self.create_param_read() 5535 5536 #write the definition of the parameter 5537 self.create_input() 5538 self.create_intparam_def(dp=True,mp=False) 5539 if self.opt['mp']: 5540 self.create_intparam_def(dp=False,mp=True) 5541 5542 # definition of the coupling. 5543 self.create_actualize_mp_ext_param_inc() 5544 self.create_coupl_inc() 5545 self.create_write_couplings() 5546 self.create_couplings() 5547 5548 # the makefile 5549 self.create_makeinc() 5550 self.create_param_write() 5551 5552 # The model functions 5553 self.create_model_functions_inc() 5554 self.create_model_functions_def() 5555 5556 # The param_card.dat 5557 self.create_param_card() 5558 5559 5560 # All the standard files 5561 self.copy_standard_file()
5562 5563 ############################################################################ 5564 ## ROUTINE CREATING THE FILES ############################################ 5565 ############################################################################ 5566
5567 - def copy_standard_file(self):
5568 """Copy the standard files for the fortran model.""" 5569 5570 #copy the library files 5571 file_to_link = ['formats.inc','printout.f', \ 5572 'rw_para.f', 'testprog.f'] 5573 5574 for filename in file_to_link: 5575 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5576 self.dir_path) 5577 5578 file = open(os.path.join(MG5DIR,\ 5579 'models/template_files/fortran/rw_para.f')).read() 5580 5581 includes=["include \'coupl.inc\'","include \'input.inc\'", 5582 "include \'model_functions.inc\'"] 5583 if self.opt['mp']: 5584 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5585 # In standalone and madloop we do no use the compiled param card but 5586 # still parse the .dat one so we must load it. 5587 if self.opt['loop_induced']: 5588 #loop induced follow MadEvent way to handle the card. 5589 load_card = '' 5590 lha_read_filename='lha_read.f' 5591 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5592 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5593 lha_read_filename='lha_read_mp.f' 5594 elif self.opt['export_format'].startswith('standalone') \ 5595 or self.opt['export_format'] in ['madweight', 'plugin']\ 5596 or self.opt['export_format'].startswith('matchbox'): 5597 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5598 lha_read_filename='lha_read.f' 5599 else: 5600 load_card = '' 5601 lha_read_filename='lha_read.f' 5602 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5603 os.path.join(self.dir_path,'lha_read.f')) 5604 5605 file=file%{'includes':'\n '.join(includes), 5606 'load_card':load_card} 5607 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5608 writer.writelines(file) 5609 writer.close() 5610 5611 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5612 or self.opt['loop_induced']: 5613 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5614 self.dir_path + '/makefile') 5615 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5616 path = pjoin(self.dir_path, 'makefile') 5617 text = open(path).read() 5618 text = text.replace('madevent','aMCatNLO') 5619 open(path, 'w').writelines(text) 5620 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5621 'madloop','madloop_optimized', 'standalone_rw', 5622 'madweight','matchbox','madloop_matchbox', 'plugin']: 5623 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5624 self.dir_path + '/makefile') 5625 #elif self.opt['export_format'] in []: 5626 #pass 5627 else: 5628 raise MadGraph5Error('Unknown format')
5629
5630 - def create_coupl_inc(self):
5631 """ write coupling.inc """ 5632 5633 fsock = self.open('coupl.inc', format='fortran') 5634 if self.opt['mp']: 5635 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5636 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5637 format='fortran') 5638 5639 # Write header 5640 header = """double precision G 5641 common/strong/ G 5642 5643 double complex gal(2) 5644 common/weak/ gal 5645 5646 double precision MU_R 5647 common/rscale/ MU_R 5648 5649 double precision Nf 5650 parameter(Nf=%d) 5651 """ % self.model.get_nflav() 5652 5653 fsock.writelines(header) 5654 5655 if self.opt['mp']: 5656 header = """%(real_mp_format)s %(mp_prefix)sG 5657 common/MP_strong/ %(mp_prefix)sG 5658 5659 %(complex_mp_format)s %(mp_prefix)sgal(2) 5660 common/MP_weak/ %(mp_prefix)sgal 5661 5662 %(complex_mp_format)s %(mp_prefix)sMU_R 5663 common/MP_rscale/ %(mp_prefix)sMU_R 5664 5665 """ 5666 5667 5668 5669 5670 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5671 'complex_mp_format':self.mp_complex_format, 5672 'mp_prefix':self.mp_prefix}) 5673 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5674 'complex_mp_format':self.mp_complex_format, 5675 'mp_prefix':''}) 5676 5677 # Write the Mass definition/ common block 5678 masses = set() 5679 widths = set() 5680 if self.opt['complex_mass']: 5681 complex_mass = set() 5682 5683 for particle in self.model.get('particles'): 5684 #find masses 5685 one_mass = particle.get('mass') 5686 if one_mass.lower() != 'zero': 5687 masses.add(one_mass) 5688 5689 # find width 5690 one_width = particle.get('width') 5691 if one_width.lower() != 'zero': 5692 widths.add(one_width) 5693 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5694 complex_mass.add('CMASS_%s' % one_mass) 5695 5696 if masses: 5697 fsock.writelines('double precision '+','.join(masses)+'\n') 5698 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5699 if self.opt['mp']: 5700 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5701 ','.join(masses)+'\n') 5702 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5703 ','.join(masses)+'\n\n') 5704 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5705 self.mp_prefix+m for m in masses])+'\n') 5706 mp_fsock.writelines('common/MP_masses/ '+\ 5707 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5708 5709 if widths: 5710 fsock.writelines('double precision '+','.join(widths)+'\n') 5711 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5712 if self.opt['mp']: 5713 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5714 ','.join(widths)+'\n') 5715 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5716 ','.join(widths)+'\n\n') 5717 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5718 self.mp_prefix+w for w in widths])+'\n') 5719 mp_fsock.writelines('common/MP_widths/ '+\ 5720 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5721 5722 # Write the Couplings 5723 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5724 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5725 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5726 if self.opt['mp']: 5727 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5728 ','.join(coupling_list)+'\n') 5729 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5730 ','.join(coupling_list)+'\n\n') 5731 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5732 self.mp_prefix+c for c in coupling_list])+'\n') 5733 mp_fsock.writelines('common/MP_couplings/ '+\ 5734 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5735 5736 # Write complex mass for complex mass scheme (if activated) 5737 if self.opt['complex_mass'] and complex_mass: 5738 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5739 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5740 if self.opt['mp']: 5741 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5742 ','.join(complex_mass)+'\n') 5743 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5744 ','.join(complex_mass)+'\n\n') 5745 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5746 self.mp_prefix+cm for cm in complex_mass])+'\n') 5747 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5748 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5749
5750 - def create_write_couplings(self):
5751 """ write the file coupl_write.inc """ 5752 5753 fsock = self.open('coupl_write.inc', format='fortran') 5754 5755 fsock.writelines("""write(*,*) ' Couplings of %s' 5756 write(*,*) ' ---------------------------------' 5757 write(*,*) ' '""" % self.model_name) 5758 def format(coupl): 5759 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5760 5761 # Write the Couplings 5762 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5763 fsock.writelines('\n'.join(lines)) 5764 5765
5766 - def create_input(self):
5767 """create input.inc containing the definition of the parameters""" 5768 5769 fsock = self.open('input.inc', format='fortran') 5770 if self.opt['mp']: 5771 mp_fsock = self.open('mp_input.inc', format='fortran') 5772 5773 #find mass/ width since they are already define 5774 already_def = set() 5775 for particle in self.model.get('particles'): 5776 already_def.add(particle.get('mass').lower()) 5777 already_def.add(particle.get('width').lower()) 5778 if self.opt['complex_mass']: 5779 already_def.add('cmass_%s' % particle.get('mass').lower()) 5780 5781 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5782 name.lower() not in already_def 5783 5784 real_parameters = [param.name for param in self.params_dep + 5785 self.params_indep if param.type == 'real' 5786 and is_valid(param.name)] 5787 5788 real_parameters += [param.name for param in self.params_ext 5789 if param.type == 'real'and 5790 is_valid(param.name)] 5791 5792 # check the parameter is a CT parameter or not 5793 # if yes, just use the needed ones 5794 real_parameters = [param for param in real_parameters \ 5795 if self.check_needed_param(param)] 5796 5797 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5798 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5799 if self.opt['mp']: 5800 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5801 self.mp_prefix+p for p in real_parameters])+'\n') 5802 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5803 self.mp_prefix+p for p in real_parameters])+'\n\n') 5804 5805 complex_parameters = [param.name for param in self.params_dep + 5806 self.params_indep if param.type == 'complex' and 5807 is_valid(param.name)] 5808 5809 # check the parameter is a CT parameter or not 5810 # if yes, just use the needed ones 5811 complex_parameters = [param for param in complex_parameters \ 5812 if self.check_needed_param(param)] 5813 5814 if complex_parameters: 5815 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5816 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5817 if self.opt['mp']: 5818 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5819 self.mp_prefix+p for p in complex_parameters])+'\n') 5820 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5821 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5822
5823 - def check_needed_param(self, param):
5824 """ Returns whether the parameter in argument is needed for this 5825 specific computation or not.""" 5826 5827 # If this is a leading order model or if there was no CT parameter 5828 # employed in this NLO model, one can directly return that the 5829 # parameter is needed since only CTParameters are filtered. 5830 if not hasattr(self, 'allCTparameters') or \ 5831 self.allCTparameters is None or self.usedCTparameters is None or \ 5832 len(self.allCTparameters)==0: 5833 return True 5834 5835 # We must allow the conjugate shorthand for the complex parameter as 5836 # well so we check wether either the parameter name or its name with 5837 # 'conjg__' substituted with '' is present in the list. 5838 # This is acceptable even if some parameter had an original name 5839 # including 'conjg__' in it, because at worst we export a parameter 5840 # was not needed. 5841 param = param.lower() 5842 cjg_param = param.replace('conjg__','',1) 5843 5844 # First make sure it is a CTparameter 5845 if param not in self.allCTparameters and \ 5846 cjg_param not in self.allCTparameters: 5847 return True 5848 5849 # Now check if it is in the list of CTparameters actually used 5850 return (param in self.usedCTparameters or \ 5851 cjg_param in self.usedCTparameters)
5852
5853 - def extract_needed_CTparam(self,wanted_couplings=[]):
5854 """ Extract what are the needed CT parameters given the wanted_couplings""" 5855 5856 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5857 # Setting these lists to none wil disable the filtering in 5858 # check_needed_param 5859 self.allCTparameters = None 5860 self.usedCTparameters = None 5861 return 5862 5863 # All CTparameters appearin in all CT couplings 5864 allCTparameters=self.model.map_CTcoup_CTparam.values() 5865 # Define in this class the list of all CT parameters 5866 self.allCTparameters=list(\ 5867 set(itertools.chain.from_iterable(allCTparameters))) 5868 5869 # All used CT couplings 5870 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5871 allUsedCTCouplings = [coupl for coupl in 5872 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5873 5874 # Now define the list of all CT parameters that are actually used 5875 self.usedCTparameters=list(\ 5876 set(itertools.chain.from_iterable([ 5877 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5878 ]))) 5879 5880 # Now at last, make these list case insensitive 5881 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5882 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5883
5884 - def create_intparam_def(self, dp=True, mp=False):
5885 """ create intparam_definition.inc setting the internal parameters. 5886 Output the double precision and/or the multiple precision parameters 5887 depending on the parameters dp and mp. If mp only, then the file names 5888 get the 'mp_' prefix. 5889 """ 5890 5891 fsock = self.open('%sintparam_definition.inc'% 5892 ('mp_' if mp and not dp else ''), format='fortran') 5893 5894 fsock.write_comments(\ 5895 "Parameters that should not be recomputed event by event.\n") 5896 fsock.writelines("if(readlha) then\n") 5897 if dp: 5898 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5899 if mp: 5900 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5901 5902 for param in self.params_indep: 5903 if param.name == 'ZERO': 5904 continue 5905 # check whether the parameter is a CT parameter 5906 # if yes,just used the needed ones 5907 if not self.check_needed_param(param.name): 5908 continue 5909 if dp: 5910 fsock.writelines("%s = %s\n" % (param.name, 5911 self.p_to_f.parse(param.expr))) 5912 if mp: 5913 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5914 self.mp_p_to_f.parse(param.expr))) 5915 5916 fsock.writelines('endif') 5917 5918 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5919 if dp: 5920 fsock.writelines("aS = G**2/4/pi\n") 5921 if mp: 5922 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5923 for param in self.params_dep: 5924 # check whether the parameter is a CT parameter 5925 # if yes,just used the needed ones 5926 if not self.check_needed_param(param.name): 5927 continue 5928 if dp: 5929 fsock.writelines("%s = %s\n" % (param.name, 5930 self.p_to_f.parse(param.expr))) 5931 elif mp: 5932 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5933 self.mp_p_to_f.parse(param.expr))) 5934 5935 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5936 if ('aEWM1',) in self.model['parameters']: 5937 if dp: 5938 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5939 gal(2) = 1d0 5940 """) 5941 elif mp: 5942 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5943 %(mp_prefix)sgal(2) = 1d0 5944 """ %{'mp_prefix':self.mp_prefix}) 5945 pass 5946 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5947 elif ('Gf',) in self.model['parameters']: 5948 if dp: 5949 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5950 gal(2) = 1d0 5951 """) 5952 elif mp: 5953 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5954 %(mp_prefix)sgal(2) = 1d0 5955 """ %{'mp_prefix':self.mp_prefix}) 5956 pass 5957 else: 5958 if dp: 5959 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5960 fsock.writelines(""" gal(1) = 1d0 5961 gal(2) = 1d0 5962 """) 5963 elif mp: 5964 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5965 %(mp_prefix)sgal(2) = 1e0_16 5966 """%{'mp_prefix':self.mp_prefix})
5967 5968
5969 - def create_couplings(self):
5970 """ create couplings.f and all couplingsX.f """ 5971 5972 nb_def_by_file = 25 5973 5974 self.create_couplings_main(nb_def_by_file) 5975 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5976 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5977 5978 for i in range(nb_coup_indep): 5979 # For the independent couplings, we compute the double and multiple 5980 # precision ones together 5981 data = self.coups_indep[nb_def_by_file * i: 5982 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5983 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5984 5985 for i in range(nb_coup_dep): 5986 # For the dependent couplings, we compute the double and multiple 5987 # precision ones in separate subroutines. 5988 data = self.coups_dep[nb_def_by_file * i: 5989 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5990 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5991 dp=True,mp=False) 5992 if self.opt['mp']: 5993 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5994 dp=False,mp=True)
5995 5996
5997 - def create_couplings_main(self, nb_def_by_file=25):
5998 """ create couplings.f """ 5999 6000 fsock = self.open('couplings.f', format='fortran') 6001 6002 fsock.writelines("""subroutine coup() 6003 6004 implicit none 6005 double precision PI, ZERO 6006 logical READLHA 6007 parameter (PI=3.141592653589793d0) 6008 parameter (ZERO=0d0) 6009 include \'model_functions.inc\'""") 6010 if self.opt['mp']: 6011 fsock.writelines("""%s MP__PI, MP__ZERO 6012 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6013 parameter (MP__ZERO=0e0_16) 6014 include \'mp_input.inc\' 6015 include \'mp_coupl.inc\' 6016 """%self.mp_real_format) 6017 fsock.writelines("""include \'input.inc\' 6018 include \'coupl.inc\' 6019 READLHA = .true. 6020 include \'intparam_definition.inc\'""") 6021 if self.opt['mp']: 6022 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6023 6024 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6025 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6026 6027 fsock.writelines('\n'.join(\ 6028 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6029 6030 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6031 6032 fsock.writelines('\n'.join(\ 6033 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6034 for i in range(nb_coup_dep)])) 6035 if self.opt['mp']: 6036 fsock.writelines('\n'.join(\ 6037 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6038 for i in range(nb_coup_dep)])) 6039 fsock.writelines('''\n return \n end\n''') 6040 6041 fsock.writelines("""subroutine update_as_param() 6042 6043 implicit none 6044 double precision PI, ZERO 6045 logical READLHA 6046 parameter (PI=3.141592653589793d0) 6047 parameter (ZERO=0d0) 6048 include \'model_functions.inc\'""") 6049 fsock.writelines("""include \'input.inc\' 6050 include \'coupl.inc\' 6051 READLHA = .false.""") 6052 fsock.writelines(""" 6053 include \'intparam_definition.inc\'\n 6054 """) 6055 6056 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6057 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6058 6059 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6060 6061 fsock.writelines('\n'.join(\ 6062 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6063 for i in range(nb_coup_dep)])) 6064 fsock.writelines('''\n return \n end\n''') 6065 6066 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6067 6068 implicit none 6069 double precision PI 6070 parameter (PI=3.141592653589793d0) 6071 double precision mu_r2, as2 6072 include \'model_functions.inc\'""") 6073 fsock.writelines("""include \'input.inc\' 6074 include \'coupl.inc\'""") 6075 fsock.writelines(""" 6076 if (mu_r2.gt.0d0) MU_R = mu_r2 6077 G = SQRT(4.0d0*PI*AS2) 6078 AS = as2 6079 6080 CALL UPDATE_AS_PARAM() 6081 """) 6082 fsock.writelines('''\n return \n end\n''') 6083 6084 if self.opt['mp']: 6085 fsock.writelines("""subroutine mp_update_as_param() 6086 6087 implicit none 6088 logical READLHA 6089 include \'model_functions.inc\'""") 6090 fsock.writelines("""%s MP__PI, MP__ZERO 6091 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6092 parameter (MP__ZERO=0e0_16) 6093 include \'mp_input.inc\' 6094 include \'mp_coupl.inc\' 6095 """%self.mp_real_format) 6096 fsock.writelines("""include \'input.inc\' 6097 include \'coupl.inc\' 6098 include \'actualize_mp_ext_params.inc\' 6099 READLHA = .false. 6100 include \'mp_intparam_definition.inc\'\n 6101 """) 6102 6103 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6104 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6105 6106 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6107 6108 fsock.writelines('\n'.join(\ 6109 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6110 for i in range(nb_coup_dep)])) 6111 fsock.writelines('''\n return \n end\n''')
6112
6113 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6114 """ create couplings[nb_file].f containing information coming from data. 6115 Outputs the computation of the double precision and/or the multiple 6116 precision couplings depending on the parameters dp and mp. 6117 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6118 filename and subroutine name. 6119 """ 6120 6121 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6122 nb_file), format='fortran') 6123 fsock.writelines("""subroutine %scoup%s() 6124 6125 implicit none 6126 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6127 if dp: 6128 fsock.writelines(""" 6129 double precision PI, ZERO 6130 parameter (PI=3.141592653589793d0) 6131 parameter (ZERO=0d0) 6132 include 'input.inc' 6133 include 'coupl.inc'""") 6134 if mp: 6135 fsock.writelines("""%s MP__PI, MP__ZERO 6136 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6137 parameter (MP__ZERO=0e0_16) 6138 include \'mp_input.inc\' 6139 include \'mp_coupl.inc\' 6140 """%self.mp_real_format) 6141 6142 for coupling in data: 6143 if dp: 6144 fsock.writelines('%s = %s' % (coupling.name, 6145 self.p_to_f.parse(coupling.expr))) 6146 if mp: 6147 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6148 self.mp_p_to_f.parse(coupling.expr))) 6149 fsock.writelines('end')
6150
6151 - def create_model_functions_inc(self):
6152 """ Create model_functions.inc which contains the various declarations 6153 of auxiliary functions which might be used in the couplings expressions 6154 """ 6155 6156 additional_fct = [] 6157 # check for functions define in the UFO model 6158 ufo_fct = self.model.get('functions') 6159 if ufo_fct: 6160 for fct in ufo_fct: 6161 # already handle by default 6162 if fct.name not in ["complexconjugate", "re", "im", "sec", 6163 "csc", "asec", "acsc", "theta_function", "cond", 6164 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 6165 additional_fct.append(fct.name) 6166 6167 6168 fsock = self.open('model_functions.inc', format='fortran') 6169 fsock.writelines("""double complex cond 6170 double complex condif 6171 double complex reglog 6172 double complex reglogp 6173 double complex reglogm 6174 double complex recms 6175 double complex arg 6176 %s 6177 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6178 6179 6180 if self.opt['mp']: 6181 fsock.writelines("""%(complex_mp_format)s mp_cond 6182 %(complex_mp_format)s mp_condif 6183 %(complex_mp_format)s mp_reglog 6184 %(complex_mp_format)s mp_reglogp 6185 %(complex_mp_format)s mp_reglogm 6186 %(complex_mp_format)s mp_recms 6187 %(complex_mp_format)s mp_arg 6188 %(additional)s 6189 """ %\ 6190 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 6191 'complex_mp_format':self.mp_complex_format 6192 })
6193
6194 - def create_model_functions_def(self):
6195 """ Create model_functions.f which contains the various definitions 6196 of auxiliary functions which might be used in the couplings expressions 6197 Add the functions.f functions for formfactors support 6198 """ 6199 6200 fsock = self.open('model_functions.f', format='fortran') 6201 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6202 implicit none 6203 double complex condition,truecase,falsecase 6204 if(condition.eq.(0.0d0,0.0d0)) then 6205 cond=truecase 6206 else 6207 cond=falsecase 6208 endif 6209 end 6210 6211 double complex function condif(condition,truecase,falsecase) 6212 implicit none 6213 logical condition 6214 double complex truecase,falsecase 6215 if(condition) then 6216 condif=truecase 6217 else 6218 condif=falsecase 6219 endif 6220 end 6221 6222 double complex function recms(condition,expr) 6223 implicit none 6224 logical condition 6225 double complex expr 6226 if(condition)then 6227 recms=expr 6228 else 6229 recms=dcmplx(dble(expr)) 6230 endif 6231 end 6232 6233 double complex function reglog(arg) 6234 implicit none 6235 double complex TWOPII 6236 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6237 double complex arg 6238 if(arg.eq.(0.0d0,0.0d0)) then 6239 reglog=(0.0d0,0.0d0) 6240 else 6241 reglog=log(arg) 6242 endif 6243 end 6244 6245 double complex function reglogp(arg) 6246 implicit none 6247 double complex TWOPII 6248 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6249 double complex arg 6250 if(arg.eq.(0.0d0,0.0d0))then 6251 reglogp=(0.0d0,0.0d0) 6252 else 6253 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6254 reglogp=log(arg) + TWOPII 6255 else 6256 reglogp=log(arg) 6257 endif 6258 endif 6259 end 6260 6261 double complex function reglogm(arg) 6262 implicit none 6263 double complex TWOPII 6264 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6265 double complex arg 6266 if(arg.eq.(0.0d0,0.0d0))then 6267 reglogm=(0.0d0,0.0d0) 6268 else 6269 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6270 reglogm=log(arg) - TWOPII 6271 else 6272 reglogm=log(arg) 6273 endif 6274 endif 6275 end 6276 6277 double complex function arg(comnum) 6278 implicit none 6279 double complex comnum 6280 double complex iim 6281 iim = (0.0d0,1.0d0) 6282 if(comnum.eq.(0.0d0,0.0d0)) then 6283 arg=(0.0d0,0.0d0) 6284 else 6285 arg=log(comnum/abs(comnum))/iim 6286 endif 6287 end""") 6288 if self.opt['mp']: 6289 fsock.writelines(""" 6290 6291 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6292 implicit none 6293 %(complex_mp_format)s condition,truecase,falsecase 6294 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6295 mp_cond=truecase 6296 else 6297 mp_cond=falsecase 6298 endif 6299 end 6300 6301 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6302 implicit none 6303 logical condition 6304 %(complex_mp_format)s truecase,falsecase 6305 if(condition) then 6306 mp_condif=truecase 6307 else 6308 mp_condif=falsecase 6309 endif 6310 end 6311 6312 %(complex_mp_format)s function mp_recms(condition,expr) 6313 implicit none 6314 logical condition 6315 %(complex_mp_format)s expr 6316 if(condition)then 6317 mp_recms=expr 6318 else 6319 mp_recms=cmplx(real(expr),kind=16) 6320 endif 6321 end 6322 6323 %(complex_mp_format)s function mp_reglog(arg) 6324 implicit none 6325 %(complex_mp_format)s TWOPII 6326 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6327 %(complex_mp_format)s arg 6328 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6329 mp_reglog=(0.0e0_16,0.0e0_16) 6330 else 6331 mp_reglog=log(arg) 6332 endif 6333 end 6334 6335 %(complex_mp_format)s function mp_reglogp(arg) 6336 implicit none 6337 %(complex_mp_format)s TWOPII 6338 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6339 %(complex_mp_format)s arg 6340 if(arg.eq.(0.0e0_16,0.0e0_16))then 6341 mp_reglogp=(0.0e0_16,0.0e0_16) 6342 else 6343 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6344 mp_reglogp=log(arg) + TWOPII 6345 else 6346 mp_reglogp=log(arg) 6347 endif 6348 endif 6349 end 6350 6351 %(complex_mp_format)s function mp_reglogm(arg) 6352 implicit none 6353 %(complex_mp_format)s TWOPII 6354 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6355 %(complex_mp_format)s arg 6356 if(arg.eq.(0.0e0_16,0.0e0_16))then 6357 mp_reglogm=(0.0e0_16,0.0e0_16) 6358 else 6359 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6360 mp_reglogm=log(arg) - TWOPII 6361 else 6362 mp_reglogm=log(arg) 6363 endif 6364 endif 6365 end 6366 6367 %(complex_mp_format)s function mp_arg(comnum) 6368 implicit none 6369 %(complex_mp_format)s comnum 6370 %(complex_mp_format)s imm 6371 imm = (0.0e0_16,1.0e0_16) 6372 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6373 mp_arg=(0.0e0_16,0.0e0_16) 6374 else 6375 mp_arg=log(comnum/abs(comnum))/imm 6376 endif 6377 end"""%{'complex_mp_format':self.mp_complex_format}) 6378 6379 6380 #check for the file functions.f 6381 model_path = self.model.get('modelpath') 6382 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6383 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6384 input = pjoin(model_path,'Fortran','functions.f') 6385 file.writelines(fsock, open(input).read()) 6386 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6387 6388 # check for functions define in the UFO model 6389 ufo_fct = self.model.get('functions') 6390 if ufo_fct: 6391 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6392 for fct in ufo_fct: 6393 # already handle by default 6394 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6395 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6396 ufo_fct_template = """ 6397 double complex function %(name)s(%(args)s) 6398 implicit none 6399 double complex %(args)s 6400 %(definitions)s 6401 %(name)s = %(fct)s 6402 6403 return 6404 end 6405 """ 6406 str_fct = self.p_to_f.parse(fct.expr) 6407 if not self.p_to_f.to_define: 6408 definitions = [] 6409 else: 6410 definitions=[] 6411 for d in self.p_to_f.to_define: 6412 if d == 'pi': 6413 definitions.append(' double precision pi') 6414 definitions.append(' data pi /3.1415926535897932d0/') 6415 else: 6416 definitions.append(' double complex %s' % d) 6417 6418 text = ufo_fct_template % { 6419 'name': fct.name, 6420 'args': ", ".join(fct.arguments), 6421 'fct': str_fct, 6422 'definitions': '\n'.join(definitions) 6423 } 6424 6425 fsock.writelines(text) 6426 if self.opt['mp']: 6427 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6428 for fct in ufo_fct: 6429 # already handle by default 6430 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6431 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6432 ufo_fct_template = """ 6433 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6434 implicit none 6435 %(complex_mp_format)s mp__%(args)s 6436 %(definitions)s 6437 mp__%(name)s = %(fct)s 6438 6439 return 6440 end 6441 """ 6442 6443 str_fct = self.mp_p_to_f.parse(fct.expr) 6444 if not self.p_to_f.to_define: 6445 definitions = [] 6446 else: 6447 definitions=[] 6448 for d in self.p_to_f.to_define: 6449 if d == 'mp_pi': 6450 definitions.append(' %s mp_pi' % self.mp_real_format) 6451 definitions.append(' data mp_pi /3.141592653589793238462643383279502884197e+00_16/') 6452 else: 6453 definitions.append(' %s %s' % (self.mp_complex_format,d)) 6454 text = ufo_fct_template % { 6455 'name': fct.name, 6456 'args': ", mp__".join(fct.arguments), 6457 'fct': str_fct, 6458 'definitions': '\n'.join(definitions), 6459 'complex_mp_format': self.mp_complex_format 6460 } 6461 fsock.writelines(text) 6462 6463 6464 6465 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6466 6467 6468
6469 - def create_makeinc(self):
6470 """create makeinc.inc containing the file to compile """ 6471 6472 fsock = self.open('makeinc.inc', comment='#') 6473 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6474 text += ' model_functions.o ' 6475 6476 nb_coup_indep = 1 + len(self.coups_dep) // 25 6477 nb_coup_dep = 1 + len(self.coups_indep) // 25 6478 couplings_files=['couplings%s.o' % (i+1) \ 6479 for i in range(nb_coup_dep + nb_coup_indep) ] 6480 if self.opt['mp']: 6481 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6482 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6483 text += ' '.join(couplings_files) 6484 fsock.writelines(text)
6485
6486 - def create_param_write(self):
6487 """ create param_write """ 6488 6489 fsock = self.open('param_write.inc', format='fortran') 6490 6491 fsock.writelines("""write(*,*) ' External Params' 6492 write(*,*) ' ---------------------------------' 6493 write(*,*) ' '""") 6494 def format(name): 6495 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6496 6497 # Write the external parameter 6498 lines = [format(param.name) for param in self.params_ext] 6499 fsock.writelines('\n'.join(lines)) 6500 6501 fsock.writelines("""write(*,*) ' Internal Params' 6502 write(*,*) ' ---------------------------------' 6503 write(*,*) ' '""") 6504 lines = [format(data.name) for data in self.params_indep 6505 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6506 fsock.writelines('\n'.join(lines)) 6507 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6508 write(*,*) ' ----------------------------------------' 6509 write(*,*) ' '""") 6510 lines = [format(data.name) for data in self.params_dep \ 6511 if self.check_needed_param(data.name)] 6512 6513 fsock.writelines('\n'.join(lines)) 6514 6515 6516
6517 - def create_ident_card(self):
6518 """ create the ident_card.dat """ 6519 6520 def format(parameter): 6521 """return the line for the ident_card corresponding to this parameter""" 6522 colum = [parameter.lhablock.lower()] + \ 6523 [str(value) for value in parameter.lhacode] + \ 6524 [parameter.name] 6525 if not parameter.name: 6526 return '' 6527 return ' '.join(colum)+'\n'
6528 6529 fsock = self.open('ident_card.dat') 6530 6531 external_param = [format(param) for param in self.params_ext] 6532 fsock.writelines('\n'.join(external_param)) 6533
6534 - def create_actualize_mp_ext_param_inc(self):
6535 """ create the actualize_mp_ext_params.inc code """ 6536 6537 # In principle one should actualize all external, but for now, it is 6538 # hardcoded that only AS and MU_R can by dynamically changed by the user 6539 # so that we only update those ones. 6540 # Of course, to be on the safe side, one could decide to update all 6541 # external parameters. 6542 update_params_list=[p for p in self.params_ext if p.name in 6543 self.PS_dependent_key] 6544 6545 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6546 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6547 for param in update_params_list] 6548 # When read_lha is false, it is G which is taken in input and not AS, so 6549 # this is what should be reset here too. 6550 if 'aS' in [param.name for param in update_params_list]: 6551 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6552 6553 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6554 fsock.writelines('\n'.join(res_strings))
6555
6556 - def create_param_read(self):
6557 """create param_read""" 6558 6559 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6560 or self.opt['loop_induced']: 6561 fsock = self.open('param_read.inc', format='fortran') 6562 fsock.writelines(' include \'../param_card.inc\'') 6563 return 6564 6565 def format_line(parameter): 6566 """return the line for the ident_card corresponding to this 6567 parameter""" 6568 template = \ 6569 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6570 % {'name': parameter.name, 6571 'value': self.p_to_f.parse(str(parameter.value.real))} 6572 if self.opt['mp']: 6573 template = template+ \ 6574 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6575 "%(mp_prefix)s%(name)s,%(value)s)") \ 6576 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6577 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6578 return template 6579 6580 fsock = self.open('param_read.inc', format='fortran') 6581 res_strings = [format_line(param) \ 6582 for param in self.params_ext] 6583 6584 # Correct width sign for Majorana particles (where the width 6585 # and mass need to have the same sign) 6586 for particle in self.model.get('particles'): 6587 if particle.is_fermion() and particle.get('self_antipart') and \ 6588 particle.get('width').lower() != 'zero': 6589 6590 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6591 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6592 if self.opt['mp']: 6593 res_strings.append(\ 6594 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6595 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6596 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6597 6598 fsock.writelines('\n'.join(res_strings)) 6599 6600 6601 @staticmethod
6602 - def create_param_card_static(model, output_path, rule_card_path=False, 6603 mssm_convert=True):
6604 """ create the param_card.dat for a givent model --static method-- """ 6605 #1. Check if a default param_card is present: 6606 done = False 6607 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6608 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6609 model_path = model.get('modelpath') 6610 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6611 done = True 6612 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6613 output_path) 6614 if not done: 6615 param_writer.ParamCardWriter(model, output_path) 6616 6617 if rule_card_path: 6618 if hasattr(model, 'rule_card'): 6619 model.rule_card.write_file(rule_card_path) 6620 6621 if mssm_convert: 6622 model_name = model.get('name') 6623 # IF MSSM convert the card to SLAH1 6624 if model_name == 'mssm' or model_name.startswith('mssm-'): 6625 import models.check_param_card as translator 6626 # Check the format of the param_card for Pythia and make it correct 6627 if rule_card_path: 6628 translator.make_valid_param_card(output_path, rule_card_path) 6629 translator.convert_to_slha1(output_path)
6630
6631 - def create_param_card(self):
6632 """ create the param_card.dat """ 6633 6634 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6635 if not hasattr(self.model, 'rule_card'): 6636 rule_card=False 6637 self.create_param_card_static(self.model, 6638 output_path=pjoin(self.dir_path, 'param_card.dat'), 6639 rule_card_path=rule_card, 6640 mssm_convert=True)
6641
6642 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6643 """ Determine which Export_v4 class is required. cmd is the command 6644 interface containing all potential usefull information. 6645 The output_type argument specifies from which context the output 6646 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6647 and 'default' for tree-level outputs.""" 6648 6649 opt = dict(cmd.options) 6650 opt['output_options'] = cmd_options 6651 6652 # ========================================================================== 6653 # First check whether Ninja must be installed. 6654 # Ninja would only be required if: 6655 # a) Loop optimized output is selected 6656 # b) the process gathered from the amplitude generated use loops 6657 6658 if len(cmd._curr_amps)>0: 6659 try: 6660 curr_proc = cmd._curr_amps[0].get('process') 6661 except base_objects.PhysicsObject.PhysicsObjectError: 6662 curr_proc = None 6663 elif hasattr(cmd,'_fks_multi_proc') and \ 6664 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6665 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6666 else: 6667 curr_proc = None 6668 6669 requires_reduction_tool = opt['loop_optimized_output'] and \ 6670 (not curr_proc is None) and \ 6671 (curr_proc.get('perturbation_couplings') != [] and \ 6672 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6673 6674 # An installation is required then, but only if the specified path is the 6675 # default local one and that the Ninja library appears missing. 6676 if requires_reduction_tool: 6677 cmd.install_reduction_library() 6678 6679 # ========================================================================== 6680 # First treat the MadLoop5 standalone case 6681 MadLoop_SA_options = {'clean': not noclean, 6682 'complex_mass':cmd.options['complex_mass_scheme'], 6683 'export_format':'madloop', 6684 'mp':True, 6685 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6686 'cuttools_dir': cmd._cuttools_dir, 6687 'iregi_dir':cmd._iregi_dir, 6688 'pjfry_dir':cmd.options['pjfry'], 6689 'golem_dir':cmd.options['golem'], 6690 'samurai_dir':cmd.options['samurai'], 6691 'ninja_dir':cmd.options['ninja'], 6692 'collier_dir':cmd.options['collier'], 6693 'fortran_compiler':cmd.options['fortran_compiler'], 6694 'f2py_compiler':cmd.options['f2py_compiler'], 6695 'output_dependencies':cmd.options['output_dependencies'], 6696 'SubProc_prefix':'P', 6697 'compute_color_flows':cmd.options['loop_color_flows'], 6698 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6699 'cluster_local_path': cmd.options['cluster_local_path'], 6700 'output_options': cmd_options 6701 } 6702 6703 if output_type.startswith('madloop'): 6704 import madgraph.loop.loop_exporters as loop_exporters 6705 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6706 ExporterClass=None 6707 if not cmd.options['loop_optimized_output']: 6708 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6709 else: 6710 if output_type == "madloop": 6711 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6712 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6713 elif output_type == "madloop_matchbox": 6714 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6715 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6716 else: 6717 raise Exception, "output_type not recognize %s" % output_type 6718 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6719 else: 6720 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6721 ' in %s'%str(cmd._mgme_dir)) 6722 6723 # Then treat the aMC@NLO output 6724 elif output_type=='amcatnlo': 6725 import madgraph.iolibs.export_fks as export_fks 6726 ExporterClass=None 6727 amcatnlo_options = dict(opt) 6728 amcatnlo_options.update(MadLoop_SA_options) 6729 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6730 if not cmd.options['loop_optimized_output']: 6731 logger.info("Writing out the aMC@NLO code") 6732 ExporterClass = export_fks.ProcessExporterFortranFKS 6733 amcatnlo_options['export_format']='FKS5_default' 6734 else: 6735 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6736 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6737 amcatnlo_options['export_format']='FKS5_optimized' 6738 return ExporterClass(cmd._export_dir, amcatnlo_options) 6739 6740 6741 # Then the default tree-level output 6742 elif output_type=='default': 6743 assert group_subprocesses in [True, False] 6744 6745 opt = dict(opt) 6746 opt.update({'clean': not noclean, 6747 'complex_mass': cmd.options['complex_mass_scheme'], 6748 'export_format':cmd._export_format, 6749 'mp': False, 6750 'sa_symmetry':False, 6751 'model': cmd._curr_model.get('name'), 6752 'v5_model': False if cmd._model_v4_path else True }) 6753 6754 format = cmd._export_format #shortcut 6755 6756 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6757 opt['sa_symmetry'] = True 6758 elif format == 'plugin': 6759 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6760 6761 loop_induced_opt = dict(opt) 6762 loop_induced_opt.update(MadLoop_SA_options) 6763 loop_induced_opt['export_format'] = 'madloop_optimized' 6764 loop_induced_opt['SubProc_prefix'] = 'PV' 6765 # For loop_induced output with MadEvent, we must have access to the 6766 # color flows. 6767 loop_induced_opt['compute_color_flows'] = True 6768 for key in opt: 6769 if key not in loop_induced_opt: 6770 loop_induced_opt[key] = opt[key] 6771 6772 # Madevent output supports MadAnalysis5 6773 if format in ['madevent']: 6774 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6775 6776 if format == 'matrix' or format.startswith('standalone'): 6777 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6778 6779 elif format in ['madevent'] and group_subprocesses: 6780 if isinstance(cmd._curr_amps[0], 6781 loop_diagram_generation.LoopAmplitude): 6782 import madgraph.loop.loop_exporters as loop_exporters 6783 return loop_exporters.LoopInducedExporterMEGroup( 6784 cmd._export_dir,loop_induced_opt) 6785 else: 6786 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6787 elif format in ['madevent']: 6788 if isinstance(cmd._curr_amps[0], 6789 loop_diagram_generation.LoopAmplitude): 6790 import madgraph.loop.loop_exporters as loop_exporters 6791 return loop_exporters.LoopInducedExporterMENoGroup( 6792 cmd._export_dir,loop_induced_opt) 6793 else: 6794 return ProcessExporterFortranME(cmd._export_dir,opt) 6795 elif format in ['matchbox']: 6796 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6797 elif cmd._export_format in ['madweight'] and group_subprocesses: 6798 6799 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6800 elif cmd._export_format in ['madweight']: 6801 return ProcessExporterFortranMW(cmd._export_dir, opt) 6802 elif format == 'plugin': 6803 if isinstance(cmd._curr_amps[0], 6804 loop_diagram_generation.LoopAmplitude): 6805 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 6806 else: 6807 return cmd._export_plugin(cmd._export_dir, opt) 6808 6809 else: 6810 raise Exception, 'Wrong export_v4 format' 6811 else: 6812 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6813
6814 6815 6816 6817 #=============================================================================== 6818 # ProcessExporterFortranMWGroup 6819 #=============================================================================== 6820 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6821 """Class to take care of exporting a set of matrix elements to 6822 MadEvent subprocess group format.""" 6823 6824 matrix_file = "matrix_madweight_group_v4.inc" 6825 grouped_mode = 'madweight' 6826 #=========================================================================== 6827 # generate_subprocess_directory 6828 #===========================================================================
6829 - def generate_subprocess_directory(self, subproc_group, 6830 fortran_model, 6831 group_number):
6832 """Generate the Pn directory for a subprocess group in MadEvent, 6833 including the necessary matrix_N.f files, configs.inc and various 6834 other helper files.""" 6835 6836 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6837 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6838 "subproc_group object not SubProcessGroup" 6839 6840 if not self.model: 6841 self.model = subproc_group.get('matrix_elements')[0].\ 6842 get('processes')[0].get('model') 6843 6844 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6845 6846 # Create the directory PN in the specified path 6847 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6848 subproc_group.get('name')) 6849 try: 6850 os.mkdir(pjoin(pathdir, subprocdir)) 6851 except os.error as error: 6852 logger.warning(error.strerror + " " + subprocdir) 6853 6854 6855 logger.info('Creating files in directory %s' % subprocdir) 6856 Ppath = pjoin(pathdir, subprocdir) 6857 6858 # Create the matrix.f files, auto_dsig.f files and all inc files 6859 # for all subprocesses in the group 6860 6861 maxamps = 0 6862 maxflows = 0 6863 tot_calls = 0 6864 6865 matrix_elements = subproc_group.get('matrix_elements') 6866 6867 for ime, matrix_element in \ 6868 enumerate(matrix_elements): 6869 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6870 calls, ncolor = \ 6871 self.write_matrix_element_v4(writers.FortranWriter(filename), 6872 matrix_element, 6873 fortran_model, 6874 str(ime+1), 6875 subproc_group.get('diagram_maps')[\ 6876 ime]) 6877 6878 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6879 self.write_auto_dsig_file(writers.FortranWriter(filename), 6880 matrix_element, 6881 str(ime+1)) 6882 6883 # Keep track of needed quantities 6884 tot_calls += int(calls) 6885 maxflows = max(maxflows, ncolor) 6886 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6887 6888 # Draw diagrams 6889 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6890 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6891 get('diagrams'), 6892 filename, 6893 model = \ 6894 matrix_element.get('processes')[0].\ 6895 get('model'), 6896 amplitude=True) 6897 logger.info("Generating Feynman diagrams for " + \ 6898 matrix_element.get('processes')[0].nice_string()) 6899 plot.draw() 6900 6901 # Extract number of external particles 6902 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6903 6904 # Generate a list of diagrams corresponding to each configuration 6905 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6906 # If a subprocess has no diagrams for this config, the number is 0 6907 6908 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6909 6910 filename = pjoin(Ppath, 'auto_dsig.f') 6911 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6912 subproc_group) 6913 6914 filename = pjoin(Ppath,'configs.inc') 6915 nconfigs, s_and_t_channels = self.write_configs_file(\ 6916 writers.FortranWriter(filename), 6917 subproc_group, 6918 subproc_diagrams_for_config) 6919 6920 filename = pjoin(Ppath, 'leshouche.inc') 6921 self.write_leshouche_file(writers.FortranWriter(filename), 6922 subproc_group) 6923 6924 filename = pjoin(Ppath, 'phasespace.inc') 6925 self.write_phasespace_file(writers.FortranWriter(filename), 6926 nconfigs) 6927 6928 6929 filename = pjoin(Ppath, 'maxamps.inc') 6930 self.write_maxamps_file(writers.FortranWriter(filename), 6931 maxamps, 6932 maxflows, 6933 max([len(me.get('processes')) for me in \ 6934 matrix_elements]), 6935 len(matrix_elements)) 6936 6937 filename = pjoin(Ppath, 'mirrorprocs.inc') 6938 self.write_mirrorprocs(writers.FortranWriter(filename), 6939 subproc_group) 6940 6941 filename = pjoin(Ppath, 'nexternal.inc') 6942 self.write_nexternal_file(writers.FortranWriter(filename), 6943 nexternal, ninitial) 6944 6945 filename = pjoin(Ppath, 'pmass.inc') 6946 self.write_pmass_file(writers.FortranWriter(filename), 6947 matrix_element) 6948 6949 filename = pjoin(Ppath, 'props.inc') 6950 self.write_props_file(writers.FortranWriter(filename), 6951 matrix_element, 6952 s_and_t_channels) 6953 6954 # filename = pjoin(Ppath, 'processes.dat') 6955 # files.write_to_file(filename, 6956 # self.write_processes_file, 6957 # subproc_group) 6958 6959 # Generate jpgs -> pass in make_html 6960 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6961 6962 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6963 6964 for file in linkfiles: 6965 ln('../%s' % file, cwd=Ppath) 6966 6967 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6968 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6969 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6970 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6971 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6972 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6973 if not tot_calls: 6974 tot_calls = 0 6975 return tot_calls
6976 6977 6978 #=========================================================================== 6979 # Helper functions 6980 #===========================================================================
6981 - def modify_grouping(self, matrix_element):
6982 """allow to modify the grouping (if grouping is in place) 6983 return two value: 6984 - True/False if the matrix_element was modified 6985 - the new(or old) matrix element""" 6986 6987 return True, matrix_element.split_lepton_grouping()
6988 6989 #=========================================================================== 6990 # write_super_auto_dsig_file 6991 #===========================================================================
6992 - def write_super_auto_dsig_file(self, writer, subproc_group):
6993 """Write the auto_dsig.f file selecting between the subprocesses 6994 in subprocess group mode""" 6995 6996 replace_dict = {} 6997 6998 # Extract version number and date from VERSION file 6999 info_lines = self.get_mg5_info_lines() 7000 replace_dict['info_lines'] = info_lines 7001 7002 matrix_elements = subproc_group.get('matrix_elements') 7003 7004 # Extract process info lines 7005 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7006 matrix_elements]) 7007 replace_dict['process_lines'] = process_lines 7008 7009 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7010 replace_dict['nexternal'] = nexternal 7011 7012 replace_dict['nsprocs'] = 2*len(matrix_elements) 7013 7014 # Generate dsig definition line 7015 dsig_def_line = "DOUBLE PRECISION " + \ 7016 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7017 range(len(matrix_elements))]) 7018 replace_dict["dsig_def_line"] = dsig_def_line 7019 7020 # Generate dsig process lines 7021 call_dsig_proc_lines = [] 7022 for iproc in range(len(matrix_elements)): 7023 call_dsig_proc_lines.append(\ 7024 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7025 {"num": iproc + 1, 7026 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7027 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7028 7029 if writer: 7030 file = open(os.path.join(_file_path, \ 7031 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7032 file = file % replace_dict 7033 # Write the file 7034 writer.writelines(file) 7035 else: 7036 return replace_dict
7037 7038 #=========================================================================== 7039 # write_mirrorprocs 7040 #===========================================================================
7041 - def write_mirrorprocs(self, writer, subproc_group):
7042 """Write the mirrorprocs.inc file determining which processes have 7043 IS mirror process in subprocess group mode.""" 7044 7045 lines = [] 7046 bool_dict = {True: '.true.', False: '.false.'} 7047 matrix_elements = subproc_group.get('matrix_elements') 7048 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7049 (len(matrix_elements), 7050 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7051 me in matrix_elements]))) 7052 # Write the file 7053 writer.writelines(lines)
7054 7055 #=========================================================================== 7056 # write_configs_file 7057 #===========================================================================
7058 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7059 """Write the configs.inc file with topology information for a 7060 subprocess group. Use the first subprocess with a diagram for each 7061 configuration.""" 7062 7063 matrix_elements = subproc_group.get('matrix_elements') 7064 model = matrix_elements[0].get('processes')[0].get('model') 7065 7066 diagrams = [] 7067 config_numbers = [] 7068 for iconfig, config in enumerate(diagrams_for_config): 7069 # Check if any diagrams correspond to this config 7070 if set(config) == set([0]): 7071 continue 7072 subproc_diags = [] 7073 for s,d in enumerate(config): 7074 if d: 7075 subproc_diags.append(matrix_elements[s].\ 7076 get('diagrams')[d-1]) 7077 else: 7078 subproc_diags.append(None) 7079 diagrams.append(subproc_diags) 7080 config_numbers.append(iconfig + 1) 7081 7082 # Extract number of external particles 7083 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7084 7085 return len(diagrams), \ 7086 self.write_configs_file_from_diagrams(writer, diagrams, 7087 config_numbers, 7088 nexternal, ninitial, 7089 matrix_elements[0],model)
7090 7091 #=========================================================================== 7092 # write_run_configs_file 7093 #===========================================================================
7094 - def write_run_config_file(self, writer):
7095 """Write the run_configs.inc file for MadEvent""" 7096 7097 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7098 text = open(path).read() % {'chanperjob':'2'} 7099 writer.write(text) 7100 return True
7101 7102 7103 #=========================================================================== 7104 # write_leshouche_file 7105 #===========================================================================
7106 - def write_leshouche_file(self, writer, subproc_group):
7107 """Write the leshouche.inc file for MG4""" 7108 7109 all_lines = [] 7110 7111 for iproc, matrix_element in \ 7112 enumerate(subproc_group.get('matrix_elements')): 7113 all_lines.extend(self.get_leshouche_lines(matrix_element, 7114 iproc)) 7115 7116 # Write the file 7117 writer.writelines(all_lines) 7118 7119 return True
7120