Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import traceback 
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.interface.common_run_interface as common_run_interface 
  46  import madgraph.various.diagram_symmetry as diagram_symmetry 
  47  import madgraph.various.misc as misc 
  48  import madgraph.various.banner as banner_mod 
  49  import madgraph.various.process_checks as process_checks 
  50  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  51  import aloha.create_aloha as create_aloha 
  52  import models.import_ufo as import_ufo 
  53  import models.write_param_card as param_writer 
  54  import models.check_param_card as check_param_card 
  55   
  56   
  57  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  58  from madgraph.iolibs.files import cp, ln, mv 
  59   
  60  from madgraph import InvalidCmd 
  61   
  62  pjoin = os.path.join 
  63   
  64  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  65  logger = logging.getLogger('madgraph.export_v4') 
  66   
  67  default_compiler= {'fortran': 'gfortran', 
  68                         'f2py': 'f2py', 
  69                         'cpp':'g++'} 
70 71 72 -class VirtualExporter(object):
73 74 #exporter variable who modified the way madgraph interacts with this class 75 76 grouped_mode = 'madevent' 77 # This variable changes the type of object called within 'generate_subprocess_directory' 78 #functions. 79 # False to avoid grouping (only identical matrix element are merged) 80 # 'madevent' group the massless quark and massless lepton 81 # 'madweight' group the gluon with the massless quark 82 sa_symmetry = False 83 # If no grouped_mode=False, uu~ and u~u will be called independently. 84 #Putting sa_symmetry generates only one of the two matrix-element. 85 check = True 86 # Ask madgraph to check if the directory already exists and propose to the user to 87 #remove it first if this is the case 88 output = 'Template' 89 # [Template, None, dir] 90 # - Template, madgraph will call copy_template 91 # - dir, madgraph will just create an empty directory for initialisation 92 # - None, madgraph do nothing for initialisation 93 exporter = 'v4' 94 # language of the output 'v4' for Fortran output 95 # 'cpp' for C++ output 96 97
98 - def __init__(self, dir_path = "", opt=None):
99 return
100
101 - def copy_template(self, model):
102 return
103
104 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
105 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 106 return 0 # return an integer stating the number of call to helicity routine
107
108 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
109 return
110
111 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
112 return
113 114
115 - def pass_information_from_cmd(self, cmd):
116 """pass information from the command interface to the exporter. 117 Please do not modify any object of the interface from the exporter. 118 """ 119 return
120
121 - def modify_grouping(self, matrix_element):
122 return False, matrix_element
123
124 - def export_model_files(self, model_v4_path):
125 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 126 return
127
128 - def export_helas(self, HELAS_PATH):
129 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 130 return
131
132 #=============================================================================== 133 # ProcessExporterFortran 134 #=============================================================================== 135 -class ProcessExporterFortran(VirtualExporter):
136 """Class to take care of exporting a set of matrix elements to 137 Fortran (v4) format.""" 138 139 default_opt = {'clean': False, 'complex_mass':False, 140 'export_format':'madevent', 'mp': False, 141 'v5_model': True 142 } 143 grouped_mode = False 144
145 - def __init__(self, dir_path = "", opt=None):
146 """Initiate the ProcessExporterFortran with directory information""" 147 self.mgme_dir = MG5DIR 148 self.dir_path = dir_path 149 self.model = None 150 151 self.opt = dict(self.default_opt) 152 if opt: 153 self.opt.update(opt) 154 155 #place holder to pass information to the run_interface 156 self.proc_characteristic = banner_mod.ProcCharacteristic()
157 158 159 #=========================================================================== 160 # process exporter fortran switch between group and not grouped 161 #===========================================================================
162 - def export_processes(self, matrix_elements, fortran_model):
163 """Make the switch between grouped and not grouped output""" 164 165 calls = 0 166 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 167 for (group_number, me_group) in enumerate(matrix_elements): 168 calls = calls + self.generate_subprocess_directory(\ 169 me_group, fortran_model, group_number) 170 else: 171 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 172 calls = calls + self.generate_subprocess_directory(\ 173 me, fortran_model, me_number) 174 175 return calls
176 177 178 #=========================================================================== 179 # create the run_card 180 #===========================================================================
181 - def create_run_card(self, matrix_elements, history):
182 """ """ 183 184 185 # bypass this for the loop-check 186 import madgraph.loop.loop_helas_objects as loop_helas_objects 187 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 188 matrix_elements = None 189 190 run_card = banner_mod.RunCard() 191 192 193 default=True 194 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 195 processes = [me.get('processes') for megroup in matrix_elements 196 for me in megroup['matrix_elements']] 197 elif matrix_elements: 198 processes = [me.get('processes') 199 for me in matrix_elements['matrix_elements']] 200 else: 201 default =False 202 203 if default: 204 run_card.create_default_for_process(self.proc_characteristic, 205 history, 206 processes) 207 208 209 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 210 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
211 212 213 #=========================================================================== 214 # copy the Template in a new directory. 215 #===========================================================================
216 - def copy_template(self, model):
217 """create the directory run_name as a copy of the MadEvent 218 Template, and clean the directory 219 """ 220 221 #First copy the full template tree if dir_path doesn't exit 222 if not os.path.isdir(self.dir_path): 223 assert self.mgme_dir, \ 224 "No valid MG_ME path given for MG4 run directory creation." 225 logger.info('initialize a new directory: %s' % \ 226 os.path.basename(self.dir_path)) 227 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 228 self.dir_path, True) 229 # distutils.dir_util.copy_tree since dir_path already exists 230 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 231 self.dir_path) 232 # copy plot_card 233 for card in ['plot_card']: 234 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 235 try: 236 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 237 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 238 except IOError: 239 logger.warning("Failed to copy " + card + ".dat to default") 240 elif os.getcwd() == os.path.realpath(self.dir_path): 241 logger.info('working in local directory: %s' % \ 242 os.path.realpath(self.dir_path)) 243 # distutils.dir_util.copy_tree since dir_path already exists 244 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 245 self.dir_path) 246 # for name in misc.glob('Template/LO/*', self.mgme_dir): 247 # name = os.path.basename(name) 248 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 249 # if os.path.isfile(filename): 250 # files.cp(filename, pjoin(self.dir_path,name)) 251 # elif os.path.isdir(filename): 252 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 253 # distutils.dir_util.copy_tree since dir_path already exists 254 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 255 self.dir_path) 256 # Copy plot_card 257 for card in ['plot_card']: 258 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 259 try: 260 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 261 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 262 except IOError: 263 logger.warning("Failed to copy " + card + ".dat to default") 264 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 265 assert self.mgme_dir, \ 266 "No valid MG_ME path given for MG4 run directory creation." 267 try: 268 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 269 except IOError: 270 MG5_version = misc.get_pkg_info() 271 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 272 273 #Ensure that the Template is clean 274 if self.opt['clean']: 275 logger.info('remove old information in %s' % \ 276 os.path.basename(self.dir_path)) 277 if os.environ.has_key('MADGRAPH_BASE'): 278 misc.call([pjoin('bin', 'internal', 'clean_template'), 279 '--web'], cwd=self.dir_path) 280 else: 281 try: 282 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 283 cwd=self.dir_path) 284 except Exception, why: 285 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 286 % (os.path.basename(self.dir_path),why)) 287 288 #Write version info 289 MG_version = misc.get_pkg_info() 290 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 291 MG_version['version']) 292 293 # add the makefile in Source directory 294 filename = pjoin(self.dir_path,'Source','makefile') 295 self.write_source_makefile(writers.FileWriter(filename)) 296 297 # add the DiscreteSampler information 298 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 299 pjoin(self.dir_path, 'Source')) 300 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 301 pjoin(self.dir_path, 'Source')) 302 303 # We need to create the correct open_data for the pdf 304 self.write_pdf_opendata()
305 306 307 #=========================================================================== 308 # Call MadAnalysis5 to generate the default cards for this process 309 #===========================================================================
310 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 311 ma5_path, output_dir, levels = ['parton','hadron']):
312 """ Call MA5 so that it writes default cards for both parton and 313 post-shower levels, tailored for this particular process.""" 314 315 if len(levels)==0: 316 return 317 318 logger.info('Generating MadAnalysis5 default cards tailored to this process') 319 try: 320 MA5_interpreter = common_run_interface.CommonRunCmd.\ 321 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 322 except (Exception, SystemExit) as e: 323 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 324 return 325 if MA5_interpreter is None: 326 return 327 328 MA5_main = MA5_interpreter.main 329 330 for lvl in ['parton','hadron']: 331 if lvl in levels: 332 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 333 try: 334 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 335 except (Exception, SystemExit) as e: 336 # keep the default card (skip only) 337 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 338 ' default analysis card for this process.') 339 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 340 error=StringIO() 341 traceback.print_exc(file=error) 342 logger.debug('MadAnalysis5 error was:') 343 logger.debug('-'*60) 344 logger.debug(error.getvalue()[:-1]) 345 logger.debug('-'*60) 346 else: 347 open(card_to_generate,'w').write(text)
348 349 #=========================================================================== 350 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 351 #===========================================================================
352 - def write_procdef_mg5(self, file_pos, modelname, process_str):
353 """ write an equivalent of the MG4 proc_card in order that all the Madevent 354 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 355 356 proc_card_template = template_files.mg4_proc_card.mg4_template 357 process_template = template_files.mg4_proc_card.process_template 358 process_text = '' 359 coupling = '' 360 new_process_content = [] 361 362 363 # First find the coupling and suppress the coupling from process_str 364 #But first ensure that coupling are define whithout spaces: 365 process_str = process_str.replace(' =', '=') 366 process_str = process_str.replace('= ', '=') 367 process_str = process_str.replace(',',' , ') 368 #now loop on the element and treat all the coupling 369 for info in process_str.split(): 370 if '=' in info: 371 coupling += info + '\n' 372 else: 373 new_process_content.append(info) 374 # Recombine the process_str (which is the input process_str without coupling 375 #info) 376 process_str = ' '.join(new_process_content) 377 378 #format the SubProcess 379 replace_dict = {'process': process_str, 380 'coupling': coupling} 381 process_text += process_template.substitute(replace_dict) 382 383 replace_dict = {'process': process_text, 384 'model': modelname, 385 'multiparticle':''} 386 text = proc_card_template.substitute(replace_dict) 387 388 if file_pos: 389 ff = open(file_pos, 'w') 390 ff.write(text) 391 ff.close() 392 else: 393 return replace_dict
394 395
396 - def pass_information_from_cmd(self, cmd):
397 """Pass information for MA5""" 398 399 self.proc_defs = cmd._curr_proc_defs
400 401 #=========================================================================== 402 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 403 #===========================================================================
404 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
405 """Function to finalize v4 directory, for inheritance.""" 406 407 self.create_run_card(matrix_elements, history) 408 self.create_MA5_cards(matrix_elements, history)
409
410 - def create_MA5_cards(self,matrix_elements,history):
411 """ A wrapper around the creation of the MA5 cards so that it can be 412 bypassed by daughter classes (i.e. in standalone).""" 413 if 'madanalysis5_path' in self.opt and not \ 414 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 415 processes = None 416 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 417 processes = [me.get('processes') for megroup in matrix_elements 418 for me in megroup['matrix_elements']] 419 elif matrix_elements: 420 processes = [me.get('processes') 421 for me in matrix_elements['matrix_elements']] 422 423 self.create_default_madanalysis5_cards( 424 history, self.proc_defs, processes, 425 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 426 levels = ['hadron','parton']) 427 428 for level in ['hadron','parton']: 429 # Copying these cards turn on the use of MadAnalysis5 by default. 430 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 431 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 432 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
433 434 #=========================================================================== 435 # Create the proc_characteristic file passing information to the run_interface 436 #===========================================================================
437 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
438 439 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
440 441 #=========================================================================== 442 # write_matrix_element_v4 443 #===========================================================================
444 - def write_matrix_element_v4(self):
445 """Function to write a matrix.f file, for inheritance. 446 """ 447 pass
448 449 #=========================================================================== 450 # write_pdf_opendata 451 #===========================================================================
452 - def write_pdf_opendata(self):
453 """ modify the pdf opendata file, to allow direct access to cluster node 454 repository if configure""" 455 456 if not self.opt["cluster_local_path"]: 457 changer = {"pdf_systemwide": ""} 458 else: 459 to_add = """ 460 tempname='%(path)s'//Tablefile 461 open(IU,file=tempname,status='old',ERR=1) 462 return 463 1 tempname='%(path)s/Pdfdata/'//Tablefile 464 open(IU,file=tempname,status='old',ERR=2) 465 return 466 2 tempname='%(path)s/lhapdf'//Tablefile 467 open(IU,file=tempname,status='old',ERR=3) 468 return 469 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 470 open(IU,file=tempname,status='old',ERR=4) 471 return 472 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 473 open(IU,file=tempname,status='old',ERR=5) 474 return 475 """ % {"path" : self.opt["cluster_local_path"]} 476 477 changer = {"pdf_systemwide": to_add} 478 479 480 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 481 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 482 ff.writelines(template % changer) 483 484 # Do the same for lhapdf set 485 if not self.opt["cluster_local_path"]: 486 changer = {"cluster_specific_path": ""} 487 else: 488 to_add=""" 489 LHAPath='%(path)s/PDFsets' 490 Inquire(File=LHAPath, exist=exists) 491 if(exists)return 492 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 493 Inquire(File=LHAPath, exist=exists) 494 if(exists)return 495 LHAPath='%(path)s/../lhapdf/pdfsets/' 496 Inquire(File=LHAPath, exist=exists) 497 if(exists)return 498 LHAPath='./PDFsets' 499 """ % {"path" : self.opt["cluster_local_path"]} 500 changer = {"cluster_specific_path": to_add} 501 502 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 503 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 504 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 505 ff.writelines(template % changer) 506 507 508 return
509 510 511 512 #=========================================================================== 513 # write_maxparticles_file 514 #===========================================================================
515 - def write_maxparticles_file(self, writer, matrix_elements):
516 """Write the maxparticles.inc file for MadEvent""" 517 518 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 519 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 520 matrix_elements.get('matrix_elements')]) 521 else: 522 maxparticles = max([me.get_nexternal_ninitial()[0] \ 523 for me in matrix_elements]) 524 525 lines = "integer max_particles\n" 526 lines += "parameter(max_particles=%d)" % maxparticles 527 528 # Write the file 529 writer.writelines(lines) 530 531 return True
532 533 534 #=========================================================================== 535 # export the model 536 #===========================================================================
537 - def export_model_files(self, model_path):
538 """Configure the files/link of the process according to the model""" 539 540 # Import the model 541 for file in os.listdir(model_path): 542 if os.path.isfile(pjoin(model_path, file)): 543 shutil.copy2(pjoin(model_path, file), \ 544 pjoin(self.dir_path, 'Source', 'MODEL'))
545 546 560 568 569 570 #=========================================================================== 571 # export the helas routine 572 #===========================================================================
573 - def export_helas(self, helas_path):
574 """Configure the files/link of the process according to the model""" 575 576 # Import helas routine 577 for filename in os.listdir(helas_path): 578 filepos = pjoin(helas_path, filename) 579 if os.path.isfile(filepos): 580 if filepos.endswith('Makefile.template'): 581 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 582 elif filepos.endswith('Makefile'): 583 pass 584 else: 585 cp(filepos, self.dir_path + '/Source/DHELAS')
586 # following lines do the same but whithout symbolic link 587 # 588 #def export_helas(mgme_dir, dir_path): 589 # 590 # # Copy the HELAS directory 591 # helas_dir = pjoin(mgme_dir, 'HELAS') 592 # for filename in os.listdir(helas_dir): 593 # if os.path.isfile(pjoin(helas_dir, filename)): 594 # shutil.copy2(pjoin(helas_dir, filename), 595 # pjoin(dir_path, 'Source', 'DHELAS')) 596 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 597 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 598 # 599 600 #=========================================================================== 601 # generate_subprocess_directory 602 #===========================================================================
603 - def generate_subprocess_directory(self, matrix_element, 604 fortran_model, 605 me_number):
606 """Routine to generate a subprocess directory (for inheritance)""" 607 608 pass
609 610 #=========================================================================== 611 # get_source_libraries_list 612 #===========================================================================
613 - def get_source_libraries_list(self):
614 """ Returns the list of libraries to be compiling when compiling the 615 SOURCE directory. It is different for loop_induced processes and 616 also depends on the value of the 'output_dependencies' option""" 617 618 return ['$(LIBDIR)libdhelas.$(libext)', 619 '$(LIBDIR)libpdf.$(libext)', 620 '$(LIBDIR)libmodel.$(libext)', 621 '$(LIBDIR)libcernlib.$(libext)', 622 '$(LIBDIR)libbias.$(libext)']
623 624 #=========================================================================== 625 # write_source_makefile 626 #===========================================================================
627 - def write_source_makefile(self, writer):
628 """Write the nexternal.inc file for MG4""" 629 630 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 631 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 632 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 633 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 634 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 635 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 636 else: 637 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 638 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 639 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 640 writer.write(text) 641 642 return True
643 644 #=========================================================================== 645 # write_nexternal_madspin 646 #===========================================================================
647 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
648 """Write the nexternal_prod.inc file for madspin""" 649 650 replace_dict = {} 651 652 replace_dict['nexternal'] = nexternal 653 replace_dict['ninitial'] = ninitial 654 655 file = """ \ 656 integer nexternal_prod 657 parameter (nexternal_prod=%(nexternal)d) 658 integer nincoming_prod 659 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 660 661 # Write the file 662 if writer: 663 writer.writelines(file) 664 return True 665 else: 666 return replace_dict
667 668 #=========================================================================== 669 # write_helamp_madspin 670 #===========================================================================
671 - def write_helamp_madspin(self, writer, ncomb):
672 """Write the helamp.inc file for madspin""" 673 674 replace_dict = {} 675 676 replace_dict['ncomb'] = ncomb 677 678 file = """ \ 679 integer ncomb1 680 parameter (ncomb1=%(ncomb)d) 681 double precision helamp(ncomb1) 682 common /to_helamp/helamp """ % replace_dict 683 684 # Write the file 685 if writer: 686 writer.writelines(file) 687 return True 688 else: 689 return replace_dict
690 691 692 693 #=========================================================================== 694 # write_nexternal_file 695 #===========================================================================
696 - def write_nexternal_file(self, writer, nexternal, ninitial):
697 """Write the nexternal.inc file for MG4""" 698 699 replace_dict = {} 700 701 replace_dict['nexternal'] = nexternal 702 replace_dict['ninitial'] = ninitial 703 704 file = """ \ 705 integer nexternal 706 parameter (nexternal=%(nexternal)d) 707 integer nincoming 708 parameter (nincoming=%(ninitial)d)""" % replace_dict 709 710 # Write the file 711 if writer: 712 writer.writelines(file) 713 return True 714 else: 715 return replace_dict
716 #=========================================================================== 717 # write_pmass_file 718 #===========================================================================
719 - def write_pmass_file(self, writer, matrix_element):
720 """Write the pmass.inc file for MG4""" 721 722 model = matrix_element.get('processes')[0].get('model') 723 724 lines = [] 725 for wf in matrix_element.get_external_wavefunctions(): 726 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 727 if mass.lower() != "zero": 728 mass = "abs(%s)" % mass 729 730 lines.append("pmass(%d)=%s" % \ 731 (wf.get('number_external'), mass)) 732 733 # Write the file 734 writer.writelines(lines) 735 736 return True
737 738 #=========================================================================== 739 # write_ngraphs_file 740 #===========================================================================
741 - def write_ngraphs_file(self, writer, nconfigs):
742 """Write the ngraphs.inc file for MG4. Needs input from 743 write_configs_file.""" 744 745 file = " integer n_max_cg\n" 746 file = file + "parameter (n_max_cg=%d)" % nconfigs 747 748 # Write the file 749 writer.writelines(file) 750 751 return True
752 753 #=========================================================================== 754 # write_leshouche_file 755 #===========================================================================
756 - def write_leshouche_file(self, writer, matrix_element):
757 """Write the leshouche.inc file for MG4""" 758 759 # Write the file 760 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 761 762 return True
763 764 #=========================================================================== 765 # get_leshouche_lines 766 #===========================================================================
767 - def get_leshouche_lines(self, matrix_element, numproc):
768 """Write the leshouche.inc file for MG4""" 769 770 # Extract number of external particles 771 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 772 773 lines = [] 774 for iproc, proc in enumerate(matrix_element.get('processes')): 775 legs = proc.get_legs_with_decays() 776 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 777 (iproc + 1, numproc+1, nexternal, 778 ",".join([str(l.get('id')) for l in legs]))) 779 if iproc == 0 and numproc == 0: 780 for i in [1, 2]: 781 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 782 (i, nexternal, 783 ",".join([ "%3r" % 0 ] * ninitial + \ 784 [ "%3r" % i ] * (nexternal - ninitial)))) 785 786 # Here goes the color connections corresponding to the JAMPs 787 # Only one output, for the first subproc! 788 if iproc == 0: 789 # If no color basis, just output trivial color flow 790 if not matrix_element.get('color_basis'): 791 for i in [1, 2]: 792 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 793 (i, numproc+1,nexternal, 794 ",".join([ "%3r" % 0 ] * nexternal))) 795 796 else: 797 # First build a color representation dictionnary 798 repr_dict = {} 799 for l in legs: 800 repr_dict[l.get('number')] = \ 801 proc.get('model').get_particle(l.get('id')).get_color()\ 802 * (-1)**(1+l.get('state')) 803 # Get the list of color flows 804 color_flow_list = \ 805 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 806 ninitial) 807 # And output them properly 808 for cf_i, color_flow_dict in enumerate(color_flow_list): 809 for i in [0, 1]: 810 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 811 (i + 1, cf_i + 1, numproc+1, nexternal, 812 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 813 for l in legs]))) 814 815 return lines
816 817 818 819 820 #=========================================================================== 821 # write_maxamps_file 822 #===========================================================================
823 - def write_maxamps_file(self, writer, maxamps, maxflows, 824 maxproc,maxsproc):
825 """Write the maxamps.inc file for MG4.""" 826 827 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 828 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 829 (maxamps, maxflows) 830 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 831 (maxproc, maxsproc) 832 833 # Write the file 834 writer.writelines(file) 835 836 return True
837 838 839 #=========================================================================== 840 # Routines to output UFO models in MG4 format 841 #=========================================================================== 842
843 - def convert_model(self, model, wanted_lorentz = [], 844 wanted_couplings = []):
845 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 846 847 # Make sure aloha is in quadruple precision if needed 848 old_aloha_mp=aloha.mp_precision 849 aloha.mp_precision=self.opt['mp'] 850 851 # create the MODEL 852 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 853 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 854 model_builder.build(wanted_couplings) 855 856 # Backup the loop mode, because it can be changed in what follows. 857 old_loop_mode = aloha.loop_mode 858 859 # Create the aloha model or use the existing one (for loop exporters 860 # this is useful as the aloha model will be used again in the 861 # LoopHelasMatrixElements generated). We do not save the model generated 862 # here if it didn't exist already because it would be a waste of 863 # memory for tree level applications since aloha is only needed at the 864 # time of creating the aloha fortran subroutines. 865 if hasattr(self, 'aloha_model'): 866 aloha_model = self.aloha_model 867 else: 868 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 869 aloha_model.add_Lorentz_object(model.get('lorentz')) 870 871 # Compute the subroutines 872 if wanted_lorentz: 873 aloha_model.compute_subset(wanted_lorentz) 874 else: 875 aloha_model.compute_all(save=False) 876 877 # Write them out 878 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 879 aloha_model.write(write_dir, 'Fortran') 880 881 # Revert the original aloha loop mode 882 aloha.loop_mode = old_loop_mode 883 884 #copy Helas Template 885 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 886 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 887 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 888 write_dir+'/aloha_functions.f') 889 aloha_model.loop_mode = False 890 else: 891 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 892 write_dir+'/aloha_functions.f') 893 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 894 895 # Make final link in the Process 896 self.make_model_symbolic_link() 897 898 # Re-establish original aloha mode 899 aloha.mp_precision=old_aloha_mp
900 901 902 #=========================================================================== 903 # Helper functions 904 #===========================================================================
905 - def modify_grouping(self, matrix_element):
906 """allow to modify the grouping (if grouping is in place) 907 return two value: 908 - True/False if the matrix_element was modified 909 - the new(or old) matrix element""" 910 misc.sprint("DO not modify grouping") 911 return False, matrix_element
912 913 #=========================================================================== 914 # Helper functions 915 #===========================================================================
916 - def get_mg5_info_lines(self):
917 """Return info lines for MG5, suitable to place at beginning of 918 Fortran files""" 919 920 info = misc.get_pkg_info() 921 info_lines = "" 922 if info and info.has_key('version') and info.has_key('date'): 923 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 924 (info['version'], info['date']) 925 info_lines = info_lines + \ 926 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 927 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 928 else: 929 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 930 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 931 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 932 933 return info_lines
934
935 - def get_process_info_lines(self, matrix_element):
936 """Return info lines describing the processes for this matrix element""" 937 938 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 939 for process in matrix_element.get('processes')])
940 941
942 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
943 """Return the Helicity matrix definition lines for this matrix element""" 944 945 helicity_line_list = [] 946 i = 0 947 for helicities in matrix_element.get_helicity_matrix(): 948 i = i + 1 949 int_list = [i, len(helicities)] 950 int_list.extend(helicities) 951 helicity_line_list.append(\ 952 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 953 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 954 955 return "\n".join(helicity_line_list)
956
957 - def get_ic_line(self, matrix_element):
958 """Return the IC definition line coming after helicities, required by 959 switchmom in madevent""" 960 961 nexternal = matrix_element.get_nexternal_ninitial()[0] 962 int_list = range(1, nexternal + 1) 963 964 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 965 ",".join([str(i) for \ 966 i in int_list]))
967
968 - def set_chosen_SO_index(self, process, squared_orders):
969 """ From the squared order constraints set by the user, this function 970 finds what indices of the squared_orders list the user intends to pick. 971 It returns this as a string of comma-separated successive '.true.' or 972 '.false.' for each index.""" 973 974 user_squared_orders = process.get('squared_orders') 975 split_orders = process.get('split_orders') 976 977 if len(user_squared_orders)==0: 978 return ','.join(['.true.']*len(squared_orders)) 979 980 res = [] 981 for sqsos in squared_orders: 982 is_a_match = True 983 for user_sqso, value in user_squared_orders.items(): 984 if (process.get_squared_order_type(user_sqso) =='==' and \ 985 value!=sqsos[split_orders.index(user_sqso)]) or \ 986 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 987 value<sqsos[split_orders.index(user_sqso)]) or \ 988 (process.get_squared_order_type(user_sqso) == '>' and \ 989 value>=sqsos[split_orders.index(user_sqso)]): 990 is_a_match = False 991 break 992 res.append('.true.' if is_a_match else '.false.') 993 994 return ','.join(res)
995
996 - def get_split_orders_lines(self, orders, array_name, n=5):
997 """ Return the split orders definition as defined in the list orders and 998 for the name of the array 'array_name'. Split rows in chunks of size n.""" 999 1000 ret_list = [] 1001 for index, order in enumerate(orders): 1002 for k in xrange(0, len(order), n): 1003 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1004 (array_name,index + 1, k + 1, min(k + n, len(order)), 1005 ','.join(["%5r" % i for i in order[k:k + n]]))) 1006 return ret_list
1007
1008 - def format_integer_list(self, list, name, n=5):
1009 """ Return an initialization of the python list in argument following 1010 the fortran syntax using the data keyword assignment, filling an array 1011 of name 'name'. It splits rows in chunks of size n.""" 1012 1013 ret_list = [] 1014 for k in xrange(0, len(list), n): 1015 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1016 (name, k + 1, min(k + n, len(list)), 1017 ','.join(["%5r" % i for i in list[k:k + n]]))) 1018 return ret_list
1019
1020 - def get_color_data_lines(self, matrix_element, n=6):
1021 """Return the color matrix definition lines for this matrix element. Split 1022 rows in chunks of size n.""" 1023 1024 if not matrix_element.get('color_matrix'): 1025 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1026 else: 1027 ret_list = [] 1028 my_cs = color.ColorString() 1029 for index, denominator in \ 1030 enumerate(matrix_element.get('color_matrix').\ 1031 get_line_denominators()): 1032 # First write the common denominator for this color matrix line 1033 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1034 # Then write the numerators for the matrix elements 1035 num_list = matrix_element.get('color_matrix').\ 1036 get_line_numerators(index, denominator) 1037 1038 for k in xrange(0, len(num_list), n): 1039 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1040 (index + 1, k + 1, min(k + n, len(num_list)), 1041 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1042 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1043 ret_list.append("C %s" % repr(my_cs)) 1044 return ret_list
1045 1046
1047 - def get_den_factor_line(self, matrix_element):
1048 """Return the denominator factor line for this matrix element""" 1049 1050 return "DATA IDEN/%2r/" % \ 1051 matrix_element.get_denominator_factor()
1052
1053 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1054 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1055 which configs (diagrams).""" 1056 1057 ret_list = [] 1058 1059 booldict = {False: ".false.", True: ".true."} 1060 1061 if not matrix_element.get('color_basis'): 1062 # No color, so only one color factor. Simply write a ".true." 1063 # for each config (i.e., each diagram with only 3 particle 1064 # vertices 1065 configs = len(mapconfigs) 1066 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1067 (num_matrix_element, configs, 1068 ','.join([".true." for i in range(configs)]))) 1069 return ret_list 1070 1071 # There is a color basis - create a list showing which JAMPs have 1072 # contributions to which configs 1073 1074 # Only want to include leading color flows, so find max_Nc 1075 color_basis = matrix_element.get('color_basis') 1076 1077 # We don't want to include the power of Nc's which come from the potential 1078 # loop color trace (i.e. in the case of a closed fermion loop for example) 1079 # so we subtract it here when computing max_Nc 1080 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1081 color_basis.values()],[])) 1082 1083 # Crate dictionary between diagram number and JAMP number 1084 diag_jamp = {} 1085 for ijamp, col_basis_elem in \ 1086 enumerate(sorted(matrix_element.get('color_basis').keys())): 1087 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1088 # Only use color flows with Nc == max_Nc. However, notice that 1089 # we don't want to include the Nc power coming from the loop 1090 # in this counting. 1091 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1092 diag_num = diag_tuple[0] + 1 1093 # Add this JAMP number to this diag_num 1094 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1095 [ijamp+1] 1096 1097 colamps = ijamp + 1 1098 for iconfig, num_diag in enumerate(mapconfigs): 1099 if num_diag == 0: 1100 continue 1101 1102 # List of True or False 1103 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1104 # Add line 1105 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1106 (iconfig+1, num_matrix_element, colamps, 1107 ','.join(["%s" % booldict[b] for b in \ 1108 bool_list]))) 1109 1110 return ret_list
1111
1112 - def get_amp2_lines(self, matrix_element, config_map = []):
1113 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1114 1115 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1116 # Get minimum legs in a vertex 1117 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1118 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1119 minvert = min(vert_list) if vert_list!=[] else 0 1120 1121 ret_lines = [] 1122 if config_map: 1123 # In this case, we need to sum up all amplitudes that have 1124 # identical topologies, as given by the config_map (which 1125 # gives the topology/config for each of the diagrams 1126 diagrams = matrix_element.get('diagrams') 1127 # Combine the diagrams with identical topologies 1128 config_to_diag_dict = {} 1129 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1130 if config_map[idiag] == 0: 1131 continue 1132 try: 1133 config_to_diag_dict[config_map[idiag]].append(idiag) 1134 except KeyError: 1135 config_to_diag_dict[config_map[idiag]] = [idiag] 1136 # Write out the AMP2s summing squares of amplitudes belonging 1137 # to eiher the same diagram or different diagrams with 1138 # identical propagator properties. Note that we need to use 1139 # AMP2 number corresponding to the first diagram number used 1140 # for that AMP2. 1141 for config in sorted(config_to_diag_dict.keys()): 1142 1143 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1144 {"num": (config_to_diag_dict[config][0] + 1)} 1145 1146 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1147 sum([diagrams[idiag].get('amplitudes') for \ 1148 idiag in config_to_diag_dict[config]], [])]) 1149 1150 # Not using \sum |M|^2 anymore since this creates troubles 1151 # when ckm is not diagonal due to the JIM mechanism. 1152 if '+' in amp: 1153 line += "(%s)*dconjg(%s)" % (amp, amp) 1154 else: 1155 line += "%s*dconjg(%s)" % (amp, amp) 1156 ret_lines.append(line) 1157 else: 1158 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1159 # Ignore any diagrams with 4-particle vertices. 1160 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1161 continue 1162 # Now write out the expression for AMP2, meaning the sum of 1163 # squared amplitudes belonging to the same diagram 1164 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1165 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1166 {"num": a.get('number')} for a in \ 1167 diag.get('amplitudes')]) 1168 ret_lines.append(line) 1169 1170 return ret_lines
1171 1172 #=========================================================================== 1173 # Returns the data statements initializing the coeffictients for the JAMP 1174 # decomposition. It is used when the JAMP initialization is decided to be 1175 # done through big arrays containing the projection coefficients. 1176 #===========================================================================
1177 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1178 n=50, Nc_value=3):
1179 """This functions return the lines defining the DATA statement setting 1180 the coefficients building the JAMPS out of the AMPS. Split rows in 1181 bunches of size n. 1182 One can specify the color_basis from which the color amplitudes originates 1183 so that there are commentaries telling what color structure each JAMP 1184 corresponds to.""" 1185 1186 if(not isinstance(color_amplitudes,list) or 1187 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1188 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1189 1190 res_list = [] 1191 my_cs = color.ColorString() 1192 for index, coeff_list in enumerate(color_amplitudes): 1193 # Create the list of the complete numerical coefficient. 1194 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1195 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1196 coefficient in coeff_list] 1197 # Create the list of the numbers of the contributing amplitudes. 1198 # Mutliply by -1 for those which have an imaginary coefficient. 1199 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1200 for coefficient in coeff_list] 1201 # Find the common denominator. 1202 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1203 num_list=[(coefficient*commondenom).numerator \ 1204 for coefficient in coefs_list] 1205 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1206 index+1,len(num_list))) 1207 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1208 index+1,commondenom)) 1209 if color_basis: 1210 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1211 res_list.append("C %s" % repr(my_cs)) 1212 for k in xrange(0, len(num_list), n): 1213 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1214 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1215 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1216 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1217 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1218 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1219 pass 1220 return res_list
1221 1222
1223 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1224 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1225 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1226 defined as a matrix element or directly as a color_amplitudes dictionary. 1227 The split_order_amps specifies the group of amplitudes sharing the same 1228 amplitude orders which should be put in together in a given set of JAMPS. 1229 The split_order_amps is supposed to have the format of the second output 1230 of the function get_split_orders_mapping function in helas_objects.py. 1231 The split_order_names is optional (it should correspond to the process 1232 'split_orders' attribute) and only present to provide comments in the 1233 JAMP definitions in the code.""" 1234 1235 # Let the user call get_JAMP_lines_split_order directly from a 1236 error_msg="Malformed '%s' argument passed to the "+\ 1237 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1238 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1239 color_amplitudes=col_amps.get_color_amplitudes() 1240 elif(isinstance(col_amps,list)): 1241 if(col_amps and isinstance(col_amps[0],list)): 1242 color_amplitudes=col_amps 1243 else: 1244 raise MadGraph5Error, error_msg%'col_amps' 1245 else: 1246 raise MadGraph5Error, error_msg%'col_amps' 1247 1248 # Verify the sanity of the split_order_amps and split_order_names args 1249 if isinstance(split_order_amps,list): 1250 for elem in split_order_amps: 1251 if len(elem)!=2: 1252 raise MadGraph5Error, error_msg%'split_order_amps' 1253 # Check the first element of the two lists to make sure they are 1254 # integers, although in principle they should all be integers. 1255 if not isinstance(elem[0],tuple) or \ 1256 not isinstance(elem[1],tuple) or \ 1257 not isinstance(elem[0][0],int) or \ 1258 not isinstance(elem[1][0],int): 1259 raise MadGraph5Error, error_msg%'split_order_amps' 1260 else: 1261 raise MadGraph5Error, error_msg%'split_order_amps' 1262 1263 if not split_order_names is None: 1264 if isinstance(split_order_names,list): 1265 # Should specify the same number of names as there are elements 1266 # in the key of the split_order_amps. 1267 if len(split_order_names)!=len(split_order_amps[0][0]): 1268 raise MadGraph5Error, error_msg%'split_order_names' 1269 # Check the first element of the list to be a string 1270 if not isinstance(split_order_names[0],str): 1271 raise MadGraph5Error, error_msg%'split_order_names' 1272 else: 1273 raise MadGraph5Error, error_msg%'split_order_names' 1274 1275 # Now scan all contributing orders to be individually computed and 1276 # construct the list of color_amplitudes for JAMP to be constructed 1277 # accordingly. 1278 res_list=[] 1279 for i, amp_order in enumerate(split_order_amps): 1280 col_amps_order = [] 1281 for jamp in color_amplitudes: 1282 col_amps_order.append(filter(lambda col_amp: 1283 col_amp[1] in amp_order[1],jamp)) 1284 if split_order_names: 1285 res_list.append('C JAMPs contributing to orders '+' '.join( 1286 ['%s=%i'%order for order in zip(split_order_names, 1287 amp_order[0])])) 1288 if self.opt['export_format'] in ['madloop_matchbox']: 1289 res_list.extend(self.get_JAMP_lines(col_amps_order, 1290 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1291 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1292 else: 1293 res_list.extend(self.get_JAMP_lines(col_amps_order, 1294 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1295 1296 return res_list
1297 1298
1299 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1300 split=-1):
1301 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1302 defined as a matrix element or directly as a color_amplitudes dictionary, 1303 Jamp_formatLC should be define to allow to add LeadingColor computation 1304 (usefull for MatchBox) 1305 The split argument defines how the JAMP lines should be split in order 1306 not to be too long.""" 1307 1308 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1309 # the color amplitudes lists. 1310 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1311 color_amplitudes=col_amps.get_color_amplitudes() 1312 elif(isinstance(col_amps,list)): 1313 if(col_amps and isinstance(col_amps[0],list)): 1314 color_amplitudes=col_amps 1315 else: 1316 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1317 else: 1318 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1319 1320 1321 res_list = [] 1322 for i, coeff_list in enumerate(color_amplitudes): 1323 # It might happen that coeff_list is empty if this function was 1324 # called from get_JAMP_lines_split_order (i.e. if some color flow 1325 # does not contribute at all for a given order). 1326 # In this case we simply set it to 0. 1327 if coeff_list==[]: 1328 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1329 continue 1330 # Break the JAMP definition into 'n=split' pieces to avoid having 1331 # arbitrarly long lines. 1332 first=True 1333 n = (len(coeff_list)+1 if split<=0 else split) 1334 while coeff_list!=[]: 1335 coefs=coeff_list[:n] 1336 coeff_list=coeff_list[n:] 1337 res = ((JAMP_format+"=") % str(i + 1)) + \ 1338 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1339 1340 first=False 1341 # Optimization: if all contributions to that color basis element have 1342 # the same coefficient (up to a sign), put it in front 1343 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1344 common_factor = False 1345 diff_fracs = list(set(list_fracs)) 1346 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1347 common_factor = True 1348 global_factor = diff_fracs[0] 1349 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1350 1351 # loop for JAMP 1352 for (coefficient, amp_number) in coefs: 1353 if not coefficient: 1354 continue 1355 if common_factor: 1356 res = (res + "%s" + AMP_format) % \ 1357 (self.coeff(coefficient[0], 1358 coefficient[1] / abs(coefficient[1]), 1359 coefficient[2], 1360 coefficient[3]), 1361 str(amp_number)) 1362 else: 1363 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1364 coefficient[1], 1365 coefficient[2], 1366 coefficient[3]), 1367 str(amp_number)) 1368 1369 if common_factor: 1370 res = res + ')' 1371 1372 res_list.append(res) 1373 1374 return res_list
1375
1376 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1377 """Generate the PDF lines for the auto_dsig.f file""" 1378 1379 processes = matrix_element.get('processes') 1380 model = processes[0].get('model') 1381 1382 pdf_definition_lines = "" 1383 pdf_data_lines = "" 1384 pdf_lines = "" 1385 1386 if ninitial == 1: 1387 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1388 for i, proc in enumerate(processes): 1389 process_line = proc.base_string() 1390 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1391 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1392 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1393 else: 1394 # Pick out all initial state particles for the two beams 1395 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1396 p in processes]))), 1397 sorted(list(set([p.get_initial_pdg(2) for \ 1398 p in processes])))] 1399 1400 # Prepare all variable names 1401 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1402 sum(initial_states,[])]) 1403 for key,val in pdf_codes.items(): 1404 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1405 1406 # Set conversion from PDG code to number used in PDF calls 1407 pdgtopdf = {21: 0, 22: 7} 1408 1409 # Fill in missing entries of pdgtopdf 1410 for pdg in sum(initial_states,[]): 1411 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1412 pdgtopdf[pdg] = pdg 1413 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1414 # If any particle has pdg code 7, we need to use something else 1415 pdgtopdf[pdg] = 6000000 + pdg 1416 1417 # Get PDF variable declarations for all initial states 1418 for i in [0,1]: 1419 pdf_definition_lines += "DOUBLE PRECISION " + \ 1420 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1421 for pdg in \ 1422 initial_states[i]]) + \ 1423 "\n" 1424 1425 # Get PDF data lines for all initial states 1426 for i in [0,1]: 1427 pdf_data_lines += "DATA " + \ 1428 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1429 for pdg in initial_states[i]]) + \ 1430 "/%d*1D0/" % len(initial_states[i]) + \ 1431 "\n" 1432 1433 # Get PDF lines for all different initial states 1434 for i, init_states in enumerate(initial_states): 1435 if subproc_group: 1436 pdf_lines = pdf_lines + \ 1437 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1438 % (i + 1, i + 1) 1439 else: 1440 pdf_lines = pdf_lines + \ 1441 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1442 % (i + 1, i + 1) 1443 1444 for initial_state in init_states: 1445 if initial_state in pdf_codes.keys(): 1446 if subproc_group: 1447 pdf_lines = pdf_lines + \ 1448 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1449 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1450 (pdf_codes[initial_state], 1451 i + 1, i + 1, pdgtopdf[initial_state], 1452 i + 1, i + 1) 1453 else: 1454 pdf_lines = pdf_lines + \ 1455 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1456 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1457 (pdf_codes[initial_state], 1458 i + 1, i + 1, pdgtopdf[initial_state], 1459 i + 1, i + 1) 1460 pdf_lines = pdf_lines + "ENDIF\n" 1461 1462 # Add up PDFs for the different initial state particles 1463 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1464 for proc in processes: 1465 process_line = proc.base_string() 1466 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1467 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1468 for ibeam in [1, 2]: 1469 initial_state = proc.get_initial_pdg(ibeam) 1470 if initial_state in pdf_codes.keys(): 1471 pdf_lines = pdf_lines + "%s%d*" % \ 1472 (pdf_codes[initial_state], ibeam) 1473 else: 1474 pdf_lines = pdf_lines + "1d0*" 1475 # Remove last "*" from pdf_lines 1476 pdf_lines = pdf_lines[:-1] + "\n" 1477 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1478 1479 # Remove last line break from the return variables 1480 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1481 1482 #=========================================================================== 1483 # write_props_file 1484 #===========================================================================
1485 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1486 """Write the props.inc file for MadEvent. Needs input from 1487 write_configs_file.""" 1488 1489 lines = [] 1490 1491 particle_dict = matrix_element.get('processes')[0].get('model').\ 1492 get('particle_dict') 1493 1494 for iconf, configs in enumerate(s_and_t_channels): 1495 for vertex in configs[0] + configs[1][:-1]: 1496 leg = vertex.get('legs')[-1] 1497 if leg.get('id') not in particle_dict: 1498 # Fake propagator used in multiparticle vertices 1499 mass = 'zero' 1500 width = 'zero' 1501 pow_part = 0 1502 else: 1503 particle = particle_dict[leg.get('id')] 1504 # Get mass 1505 if particle.get('mass').lower() == 'zero': 1506 mass = particle.get('mass') 1507 else: 1508 mass = "abs(%s)" % particle.get('mass') 1509 # Get width 1510 if particle.get('width').lower() == 'zero': 1511 width = particle.get('width') 1512 else: 1513 width = "abs(%s)" % particle.get('width') 1514 1515 pow_part = 1 + int(particle.is_boson()) 1516 1517 lines.append("prmass(%d,%d) = %s" % \ 1518 (leg.get('number'), iconf + 1, mass)) 1519 lines.append("prwidth(%d,%d) = %s" % \ 1520 (leg.get('number'), iconf + 1, width)) 1521 lines.append("pow(%d,%d) = %d" % \ 1522 (leg.get('number'), iconf + 1, pow_part)) 1523 1524 # Write the file 1525 writer.writelines(lines) 1526 1527 return True
1528 1529 #=========================================================================== 1530 # write_configs_file 1531 #===========================================================================
1532 - def write_configs_file(self, writer, matrix_element):
1533 """Write the configs.inc file for MadEvent""" 1534 1535 # Extract number of external particles 1536 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1537 1538 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1539 mapconfigs = [c[0] for c in configs] 1540 model = matrix_element.get('processes')[0].get('model') 1541 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1542 [[c[1]] for c in configs], 1543 mapconfigs, 1544 nexternal, ninitial, 1545 model)
1546 1547 #=========================================================================== 1548 # write_configs_file_from_diagrams 1549 #===========================================================================
1550 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1551 nexternal, ninitial, model):
1552 """Write the actual configs.inc file. 1553 1554 configs is the diagrams corresponding to configs (each 1555 diagrams is a list of corresponding diagrams for all 1556 subprocesses, with None if there is no corresponding diagrams 1557 for a given process). 1558 mapconfigs gives the diagram number for each config. 1559 1560 For s-channels, we need to output one PDG for each subprocess in 1561 the subprocess group, in order to be able to pick the right 1562 one for multiprocesses.""" 1563 1564 lines = [] 1565 1566 s_and_t_channels = [] 1567 1568 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1569 for config in configs if [d for d in config if d][0].\ 1570 get_vertex_leg_numbers()!=[]] 1571 minvert = min(vert_list) if vert_list!=[] else 0 1572 1573 # Number of subprocesses 1574 nsubprocs = len(configs[0]) 1575 1576 nconfigs = 0 1577 1578 new_pdg = model.get_first_non_pdg() 1579 1580 for iconfig, helas_diags in enumerate(configs): 1581 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1582 [0].get_vertex_leg_numbers()) : 1583 # Only 3-vertices allowed in configs.inc except for vertices 1584 # which originate from a shrunk loop. 1585 continue 1586 nconfigs += 1 1587 1588 # Need s- and t-channels for all subprocesses, including 1589 # those that don't contribute to this config 1590 empty_verts = [] 1591 stchannels = [] 1592 for h in helas_diags: 1593 if h: 1594 # get_s_and_t_channels gives vertices starting from 1595 # final state external particles and working inwards 1596 stchannels.append(h.get('amplitudes')[0].\ 1597 get_s_and_t_channels(ninitial, model, new_pdg)) 1598 else: 1599 stchannels.append((empty_verts, None)) 1600 1601 # For t-channels, just need the first non-empty one 1602 tchannels = [t for s,t in stchannels if t != None][0] 1603 1604 # For s_and_t_channels (to be used later) use only first config 1605 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1606 tchannels]) 1607 1608 # Make sure empty_verts is same length as real vertices 1609 if any([s for s,t in stchannels]): 1610 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1611 1612 # Reorganize s-channel vertices to get a list of all 1613 # subprocesses for each vertex 1614 schannels = zip(*[s for s,t in stchannels]) 1615 else: 1616 schannels = [] 1617 1618 allchannels = schannels 1619 if len(tchannels) > 1: 1620 # Write out tchannels only if there are any non-trivial ones 1621 allchannels = schannels + tchannels 1622 1623 # Write out propagators for s-channel and t-channel vertices 1624 1625 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1626 # Correspondance between the config and the diagram = amp2 1627 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1628 mapconfigs[iconfig])) 1629 1630 for verts in allchannels: 1631 if verts in schannels: 1632 vert = [v for v in verts if v][0] 1633 else: 1634 vert = verts 1635 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1636 last_leg = vert.get('legs')[-1] 1637 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1638 (last_leg.get('number'), nconfigs, len(daughters), 1639 ",".join([str(d) for d in daughters]))) 1640 if verts in schannels: 1641 pdgs = [] 1642 for v in verts: 1643 if v: 1644 pdgs.append(v.get('legs')[-1].get('id')) 1645 else: 1646 pdgs.append(0) 1647 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1648 (last_leg.get('number'), nconfigs, nsubprocs, 1649 ",".join([str(d) for d in pdgs]))) 1650 lines.append("data tprid(%d,%d)/0/" % \ 1651 (last_leg.get('number'), nconfigs)) 1652 elif verts in tchannels[:-1]: 1653 lines.append("data tprid(%d,%d)/%d/" % \ 1654 (last_leg.get('number'), nconfigs, 1655 abs(last_leg.get('id')))) 1656 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1657 (last_leg.get('number'), nconfigs, nsubprocs, 1658 ",".join(['0'] * nsubprocs))) 1659 1660 # Write out number of configs 1661 lines.append("# Number of configs") 1662 lines.append("data mapconfig(0)/%d/" % nconfigs) 1663 1664 # Write the file 1665 writer.writelines(lines) 1666 1667 return s_and_t_channels
1668 1669 #=========================================================================== 1670 # Global helper methods 1671 #=========================================================================== 1672
1673 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1674 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1675 1676 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1677 1678 if total_coeff == 1: 1679 if is_imaginary: 1680 return '+imag1*' 1681 else: 1682 return '+' 1683 elif total_coeff == -1: 1684 if is_imaginary: 1685 return '-imag1*' 1686 else: 1687 return '-' 1688 1689 res_str = '%+iD0' % total_coeff.numerator 1690 1691 if total_coeff.denominator != 1: 1692 # Check if total_coeff is an integer 1693 res_str = res_str + '/%iD0' % total_coeff.denominator 1694 1695 if is_imaginary: 1696 res_str = res_str + '*imag1' 1697 1698 return res_str + '*'
1699 1700
1701 - def set_fortran_compiler(self, default_compiler, force=False):
1702 """Set compiler based on what's available on the system""" 1703 1704 # Check for compiler 1705 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1706 f77_compiler = default_compiler['fortran'] 1707 elif misc.which('gfortran'): 1708 f77_compiler = 'gfortran' 1709 elif misc.which('g77'): 1710 f77_compiler = 'g77' 1711 elif misc.which('f77'): 1712 f77_compiler = 'f77' 1713 elif default_compiler['fortran']: 1714 logger.warning('No Fortran Compiler detected! Please install one') 1715 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1716 else: 1717 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1718 logger.info('Use Fortran compiler ' + f77_compiler) 1719 1720 1721 # Check for compiler. 1. set default. 1722 if default_compiler['f2py']: 1723 f2py_compiler = default_compiler['f2py'] 1724 else: 1725 f2py_compiler = '' 1726 # Try to find the correct one. 1727 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1728 f2py_compiler = default_compiler 1729 elif misc.which('f2py'): 1730 f2py_compiler = 'f2py' 1731 elif sys.version_info[1] == 6: 1732 if misc.which('f2py-2.6'): 1733 f2py_compiler = 'f2py-2.6' 1734 elif misc.which('f2py2.6'): 1735 f2py_compiler = 'f2py2.6' 1736 elif sys.version_info[1] == 7: 1737 if misc.which('f2py-2.7'): 1738 f2py_compiler = 'f2py-2.7' 1739 elif misc.which('f2py2.7'): 1740 f2py_compiler = 'f2py2.7' 1741 1742 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1743 1744 1745 self.replace_make_opt_f_compiler(to_replace) 1746 # Replace also for Template but not for cluster 1747 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1748 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1749 1750 return f77_compiler
1751 1752 # an alias for backward compatibility 1753 set_compiler = set_fortran_compiler 1754 1755
1756 - def set_cpp_compiler(self, default_compiler, force=False):
1757 """Set compiler based on what's available on the system""" 1758 1759 # Check for compiler 1760 if default_compiler and misc.which(default_compiler): 1761 compiler = default_compiler 1762 elif misc.which('g++'): 1763 #check if clang version 1764 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1765 stderr=subprocess.PIPE) 1766 out, _ = p.communicate() 1767 if 'clang' in out and misc.which('clang'): 1768 compiler = 'clang' 1769 else: 1770 compiler = 'g++' 1771 elif misc.which('c++'): 1772 compiler = 'c++' 1773 elif misc.which('clang'): 1774 compiler = 'clang' 1775 elif default_compiler: 1776 logger.warning('No c++ Compiler detected! Please install one') 1777 compiler = default_compiler # maybe misc fail so try with it 1778 else: 1779 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1780 logger.info('Use c++ compiler ' + compiler) 1781 self.replace_make_opt_c_compiler(compiler) 1782 # Replace also for Template but not for cluster 1783 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1784 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1785 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1786 1787 return compiler
1788 1789
1790 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1791 """Set FC=compiler in Source/make_opts""" 1792 1793 assert isinstance(compilers, dict) 1794 1795 mod = False #avoid to rewrite the file if not needed 1796 if not root_dir: 1797 root_dir = self.dir_path 1798 1799 compiler= compilers['fortran'] 1800 f2py_compiler = compilers['f2py'] 1801 if not f2py_compiler: 1802 f2py_compiler = 'f2py' 1803 for_update= {'DEFAULT_F_COMPILER':compiler, 1804 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1805 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1806 1807 try: 1808 common_run_interface.CommonRunCmd.update_make_opts_full( 1809 make_opts, for_update) 1810 except IOError: 1811 if root_dir == self.dir_path: 1812 logger.info('Fail to set compiler. Trying to continue anyway.')
1813
1814 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1815 """Set CXX=compiler in Source/make_opts. 1816 The version is also checked, in order to set some extra flags 1817 if the compiler is clang (on MACOS)""" 1818 1819 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1820 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1821 1822 # list of the variable to set in the make_opts file 1823 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1824 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1825 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1826 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1827 } 1828 1829 if not root_dir: 1830 root_dir = self.dir_path 1831 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1832 1833 try: 1834 common_run_interface.CommonRunCmd.update_make_opts_full( 1835 make_opts, for_update) 1836 except IOError: 1837 if root_dir == self.dir_path: 1838 logger.info('Fail to set compiler. Trying to continue anyway.') 1839 1840 return
1841
1842 #=============================================================================== 1843 # ProcessExporterFortranSA 1844 #=============================================================================== 1845 -class ProcessExporterFortranSA(ProcessExporterFortran):
1846 """Class to take care of exporting a set of matrix elements to 1847 MadGraph v4 StandAlone format.""" 1848 1849 matrix_template = "matrix_standalone_v4.inc" 1850
1851 - def __init__(self, *args, **opts):
1852 """add the format information compare to standard init""" 1853 1854 if 'format' in opts: 1855 self.format = opts['format'] 1856 del opts['format'] 1857 else: 1858 self.format = 'standalone' 1859 ProcessExporterFortran.__init__(self, *args, **opts)
1860
1861 - def copy_template(self, model):
1862 """Additional actions needed for setup of Template 1863 """ 1864 1865 #First copy the full template tree if dir_path doesn't exit 1866 if os.path.isdir(self.dir_path): 1867 return 1868 1869 logger.info('initialize a new standalone directory: %s' % \ 1870 os.path.basename(self.dir_path)) 1871 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1872 1873 # Create the directory structure 1874 os.mkdir(self.dir_path) 1875 os.mkdir(pjoin(self.dir_path, 'Source')) 1876 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1877 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1878 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1879 os.mkdir(pjoin(self.dir_path, 'bin')) 1880 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1881 os.mkdir(pjoin(self.dir_path, 'lib')) 1882 os.mkdir(pjoin(self.dir_path, 'Cards')) 1883 1884 # Information at top-level 1885 #Write version info 1886 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1887 try: 1888 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1889 except IOError: 1890 MG5_version = misc.get_pkg_info() 1891 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1892 "5." + MG5_version['version']) 1893 1894 1895 # Add file in SubProcesses 1896 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1897 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1898 1899 if self.format == 'standalone': 1900 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1901 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1902 1903 # Add file in Source 1904 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1905 pjoin(self.dir_path, 'Source')) 1906 # add the makefile 1907 filename = pjoin(self.dir_path,'Source','makefile') 1908 self.write_source_makefile(writers.FileWriter(filename))
1909 1910 #=========================================================================== 1911 # export model files 1912 #===========================================================================
1913 - def export_model_files(self, model_path):
1914 """export the model dependent files for V4 model""" 1915 1916 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1917 # Add the routine update_as_param in v4 model 1918 # This is a function created in the UFO 1919 text=""" 1920 subroutine update_as_param() 1921 call setpara('param_card.dat',.false.) 1922 return 1923 end 1924 """ 1925 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1926 ff.write(text) 1927 ff.close() 1928 1929 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1930 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1931 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1932 fsock.write(text) 1933 fsock.close() 1934 1935 self.make_model_symbolic_link()
1936 1937 #=========================================================================== 1938 # Make the Helas and Model directories for Standalone directory 1939 #===========================================================================
1940 - def make(self):
1941 """Run make in the DHELAS and MODEL directories, to set up 1942 everything for running standalone 1943 """ 1944 1945 source_dir = pjoin(self.dir_path, "Source") 1946 logger.info("Running make for Helas") 1947 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1948 logger.info("Running make for Model") 1949 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1950 1951 #=========================================================================== 1952 # Create proc_card_mg5.dat for Standalone directory 1953 #===========================================================================
1954 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1955 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 1956 1957 compiler = {'fortran': mg5options['fortran_compiler'], 1958 'cpp': mg5options['cpp_compiler'], 1959 'f2py': mg5options['f2py_compiler']} 1960 1961 self.compiler_choice(compiler) 1962 self.make() 1963 1964 # Write command history as proc_card_mg5 1965 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1966 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1967 history.write(output_file) 1968 1969 ProcessExporterFortran.finalize(self, matrix_elements, 1970 history, mg5options, flaglist) 1971 open(pjoin(self.dir_path,'__init__.py'),'w') 1972 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1973 1974 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1975 #add the module to hande the NLO weight 1976 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1977 pjoin(self.dir_path, 'Source')) 1978 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1979 pjoin(self.dir_path, 'Source', 'PDF')) 1980 self.write_pdf_opendata()
1981
1982 - def create_MA5_cards(self,*args,**opts):
1983 """ Overload the function of the mother so as to bypass this in StandAlone.""" 1984 pass
1985
1986 - def compiler_choice(self, compiler):
1987 """ Different daughter classes might want different compilers. 1988 So this function is meant to be overloaded if desired.""" 1989 1990 self.set_compiler(compiler)
1991 1992 #=========================================================================== 1993 # generate_subprocess_directory 1994 #===========================================================================
1995 - def generate_subprocess_directory(self, matrix_element, 1996 fortran_model, number):
1997 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 1998 including the necessary matrix.f and nexternal.inc files""" 1999 2000 cwd = os.getcwd() 2001 2002 # Create the directory PN_xx_xxxxx in the specified path 2003 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2004 "P%s" % matrix_element.get('processes')[0].shell_string()) 2005 2006 if self.opt['sa_symmetry']: 2007 # avoid symmetric output 2008 for i,proc in enumerate(matrix_element.get('processes')): 2009 2010 initial = [] #filled in the next line 2011 final = [l.get('id') for l in proc.get('legs')\ 2012 if l.get('state') or initial.append(l.get('id'))] 2013 decay_finals = proc.get_final_ids_after_decay() 2014 decay_finals.sort() 2015 tag = (tuple(initial), tuple(decay_finals)) 2016 legs = proc.get('legs')[:] 2017 leg0 = proc.get('legs')[0] 2018 leg1 = proc.get('legs')[1] 2019 if not leg1.get('state'): 2020 proc.get('legs')[0] = leg1 2021 proc.get('legs')[1] = leg0 2022 flegs = proc.get('legs')[2:] 2023 for perm in itertools.permutations(flegs): 2024 for i,p in enumerate(perm): 2025 proc.get('legs')[i+2] = p 2026 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2027 "P%s" % proc.shell_string()) 2028 #restore original order 2029 proc.get('legs')[2:] = legs[2:] 2030 if os.path.exists(dirpath2): 2031 proc.get('legs')[:] = legs 2032 return 0 2033 proc.get('legs')[:] = legs 2034 2035 try: 2036 os.mkdir(dirpath) 2037 except os.error as error: 2038 logger.warning(error.strerror + " " + dirpath) 2039 2040 #try: 2041 # os.chdir(dirpath) 2042 #except os.error: 2043 # logger.error('Could not cd to directory %s' % dirpath) 2044 # return 0 2045 2046 logger.info('Creating files in directory %s' % dirpath) 2047 2048 # Extract number of external particles 2049 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2050 2051 # Create the matrix.f file and the nexternal.inc file 2052 if self.opt['export_format']=='standalone_msP': 2053 filename = pjoin(dirpath, 'matrix_prod.f') 2054 else: 2055 filename = pjoin(dirpath, 'matrix.f') 2056 calls = self.write_matrix_element_v4( 2057 writers.FortranWriter(filename), 2058 matrix_element, 2059 fortran_model) 2060 2061 if self.opt['export_format'] == 'standalone_msP': 2062 filename = pjoin(dirpath,'configs_production.inc') 2063 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2064 writers.FortranWriter(filename), 2065 matrix_element) 2066 2067 filename = pjoin(dirpath,'props_production.inc') 2068 self.write_props_file(writers.FortranWriter(filename), 2069 matrix_element, 2070 s_and_t_channels) 2071 2072 filename = pjoin(dirpath,'nexternal_prod.inc') 2073 self.write_nexternal_madspin(writers.FortranWriter(filename), 2074 nexternal, ninitial) 2075 2076 if self.opt['export_format']=='standalone_msF': 2077 filename = pjoin(dirpath, 'helamp.inc') 2078 ncomb=matrix_element.get_helicity_combinations() 2079 self.write_helamp_madspin(writers.FortranWriter(filename), 2080 ncomb) 2081 2082 filename = pjoin(dirpath, 'nexternal.inc') 2083 self.write_nexternal_file(writers.FortranWriter(filename), 2084 nexternal, ninitial) 2085 2086 filename = pjoin(dirpath, 'pmass.inc') 2087 self.write_pmass_file(writers.FortranWriter(filename), 2088 matrix_element) 2089 2090 filename = pjoin(dirpath, 'ngraphs.inc') 2091 self.write_ngraphs_file(writers.FortranWriter(filename), 2092 len(matrix_element.get_all_amplitudes())) 2093 2094 # Generate diagrams 2095 filename = pjoin(dirpath, "matrix.ps") 2096 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2097 get('diagrams'), 2098 filename, 2099 model=matrix_element.get('processes')[0].\ 2100 get('model'), 2101 amplitude=True) 2102 logger.info("Generating Feynman diagrams for " + \ 2103 matrix_element.get('processes')[0].nice_string()) 2104 plot.draw() 2105 2106 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 2107 2108 for file in linkfiles: 2109 ln('../%s' % file, cwd=dirpath) 2110 2111 # Return to original PWD 2112 #os.chdir(cwd) 2113 2114 if not calls: 2115 calls = 0 2116 return calls
2117 2118 2119 #=========================================================================== 2120 # write_source_makefile 2121 #===========================================================================
2122 - def write_source_makefile(self, writer):
2123 """Write the nexternal.inc file for MG4""" 2124 2125 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2126 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2127 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2128 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2129 writer.write(text) 2130 2131 return True
2132 2133 #=========================================================================== 2134 # write_matrix_element_v4 2135 #===========================================================================
2136 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2137 write=True, proc_prefix=''):
2138 """Export a matrix element to a matrix.f file in MG4 standalone format 2139 if write is on False, just return the replace_dict and not write anything.""" 2140 2141 2142 if not matrix_element.get('processes') or \ 2143 not matrix_element.get('diagrams'): 2144 return 0 2145 2146 if writer: 2147 if not isinstance(writer, writers.FortranWriter): 2148 raise writers.FortranWriter.FortranWriterError(\ 2149 "writer not FortranWriter but %s" % type(writer)) 2150 # Set lowercase/uppercase Fortran code 2151 writers.FortranWriter.downcase = False 2152 2153 2154 if not self.opt.has_key('sa_symmetry'): 2155 self.opt['sa_symmetry']=False 2156 2157 2158 2159 # The proc_id is for MadEvent grouping which is never used in SA. 2160 replace_dict = {'global_variable':'', 'amp2_lines':'', 2161 'proc_prefix':proc_prefix, 'proc_id':''} 2162 2163 # Extract helas calls 2164 helas_calls = fortran_model.get_matrix_element_calls(\ 2165 matrix_element) 2166 2167 replace_dict['helas_calls'] = "\n".join(helas_calls) 2168 2169 # Extract version number and date from VERSION file 2170 info_lines = self.get_mg5_info_lines() 2171 replace_dict['info_lines'] = info_lines 2172 2173 # Extract process info lines 2174 process_lines = self.get_process_info_lines(matrix_element) 2175 replace_dict['process_lines'] = process_lines 2176 2177 # Extract number of external particles 2178 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2179 replace_dict['nexternal'] = nexternal 2180 replace_dict['nincoming'] = ninitial 2181 2182 # Extract ncomb 2183 ncomb = matrix_element.get_helicity_combinations() 2184 replace_dict['ncomb'] = ncomb 2185 2186 # Extract helicity lines 2187 helicity_lines = self.get_helicity_lines(matrix_element) 2188 replace_dict['helicity_lines'] = helicity_lines 2189 2190 # Extract overall denominator 2191 # Averaging initial state color, spin, and identical FS particles 2192 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2193 2194 # Extract ngraphs 2195 ngraphs = matrix_element.get_number_of_amplitudes() 2196 replace_dict['ngraphs'] = ngraphs 2197 2198 # Extract nwavefuncs 2199 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2200 replace_dict['nwavefuncs'] = nwavefuncs 2201 2202 # Extract ncolor 2203 ncolor = max(1, len(matrix_element.get('color_basis'))) 2204 replace_dict['ncolor'] = ncolor 2205 2206 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2207 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2208 matrix_element.get_beams_hel_avg_factor() 2209 2210 # Extract color data lines 2211 color_data_lines = self.get_color_data_lines(matrix_element) 2212 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2213 2214 if self.opt['export_format']=='standalone_msP': 2215 # For MadSpin need to return the AMP2 2216 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2217 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2218 replace_dict['global_variable'] = \ 2219 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2220 2221 # JAMP definition, depends on the number of independent split orders 2222 split_orders=matrix_element.get('processes')[0].get('split_orders') 2223 2224 if len(split_orders)==0: 2225 replace_dict['nSplitOrders']='' 2226 # Extract JAMP lines 2227 jamp_lines = self.get_JAMP_lines(matrix_element) 2228 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2229 # set all amplitude order to weight 1 and only one squared order 2230 # contribution which is of course ALL_ORDERS=2. 2231 squared_orders = [(2,),] 2232 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2233 replace_dict['chosen_so_configs'] = '.TRUE.' 2234 replace_dict['nSqAmpSplitOrders']=1 2235 replace_dict['split_order_str_list']='' 2236 else: 2237 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2238 replace_dict['nAmpSplitOrders']=len(amp_orders) 2239 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2240 replace_dict['nSplitOrders']=len(split_orders) 2241 replace_dict['split_order_str_list']=str(split_orders) 2242 amp_so = self.get_split_orders_lines( 2243 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2244 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2245 replace_dict['ampsplitorders']='\n'.join(amp_so) 2246 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2247 jamp_lines = self.get_JAMP_lines_split_order(\ 2248 matrix_element,amp_orders,split_order_names=split_orders) 2249 2250 # Now setup the array specifying what squared split order is chosen 2251 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2252 matrix_element.get('processes')[0],squared_orders) 2253 2254 # For convenience we also write the driver check_sa_splitOrders.f 2255 # that explicitely writes out the contribution from each squared order. 2256 # The original driver still works and is compiled with 'make' while 2257 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2258 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2259 self.write_check_sa_splitOrders(squared_orders,split_orders, 2260 nexternal,ninitial,proc_prefix,check_sa_writer) 2261 2262 if write: 2263 writers.FortranWriter('nsqso_born.inc').writelines( 2264 """INTEGER NSQSO_BORN 2265 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2266 2267 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2268 2269 matrix_template = self.matrix_template 2270 if self.opt['export_format']=='standalone_msP' : 2271 matrix_template = 'matrix_standalone_msP_v4.inc' 2272 elif self.opt['export_format']=='standalone_msF': 2273 matrix_template = 'matrix_standalone_msF_v4.inc' 2274 elif self.opt['export_format']=='matchbox': 2275 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2276 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2277 2278 if len(split_orders)>0: 2279 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2280 logger.debug("Warning: The export format %s is not "+\ 2281 " available for individual ME evaluation of given coupl. orders."+\ 2282 " Only the total ME will be computed.", self.opt['export_format']) 2283 elif self.opt['export_format'] in ['madloop_matchbox']: 2284 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2285 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2286 else: 2287 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2288 2289 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2290 replace_dict['template_file2'] = pjoin(_file_path, \ 2291 'iolibs/template_files/split_orders_helping_functions.inc') 2292 if write and writer: 2293 path = replace_dict['template_file'] 2294 content = open(path).read() 2295 content = content % replace_dict 2296 # Write the file 2297 writer.writelines(content) 2298 # Add the helper functions. 2299 if len(split_orders)>0: 2300 content = '\n' + open(replace_dict['template_file2'])\ 2301 .read()%replace_dict 2302 writer.writelines(content) 2303 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2304 else: 2305 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2306 return replace_dict # for subclass update
2307
2308 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2309 nincoming, proc_prefix, writer):
2310 """ Write out a more advanced version of the check_sa drivers that 2311 individually returns the matrix element for each contributing squared 2312 order.""" 2313 2314 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2315 'template_files', 'check_sa_splitOrders.f')).read() 2316 printout_sq_orders=[] 2317 for i, squared_order in enumerate(squared_orders): 2318 sq_orders=[] 2319 for j, sqo in enumerate(squared_order): 2320 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2321 printout_sq_orders.append(\ 2322 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2323 %(i+1,' '.join(sq_orders),i+1)) 2324 printout_sq_orders='\n'.join(printout_sq_orders) 2325 replace_dict = {'printout_sqorders':printout_sq_orders, 2326 'nSplitOrders':len(squared_orders), 2327 'nexternal':nexternal, 2328 'nincoming':nincoming, 2329 'proc_prefix':proc_prefix} 2330 2331 if writer: 2332 writer.writelines(check_sa_content % replace_dict) 2333 else: 2334 return replace_dict
2335
2336 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2337 """class to take care of exporting a set of matrix element for the Matchbox 2338 code in the case of Born only routine""" 2339 2340 default_opt = {'clean': False, 'complex_mass':False, 2341 'export_format':'matchbox', 'mp': False, 2342 'sa_symmetry': True} 2343 2344 #specific template of the born 2345 2346 2347 matrix_template = "matrix_standalone_matchbox.inc" 2348 2349 @staticmethod
2350 - def get_color_string_lines(matrix_element):
2351 """Return the color matrix definition lines for this matrix element. Split 2352 rows in chunks of size n.""" 2353 2354 if not matrix_element.get('color_matrix'): 2355 return "\n".join(["out = 1"]) 2356 2357 #start the real work 2358 color_denominators = matrix_element.get('color_matrix').\ 2359 get_line_denominators() 2360 matrix_strings = [] 2361 my_cs = color.ColorString() 2362 for i_color in xrange(len(color_denominators)): 2363 # Then write the numerators for the matrix elements 2364 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2365 t_str=repr(my_cs) 2366 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2367 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2368 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2369 all_matches = t_match.findall(t_str) 2370 output = {} 2371 arg=[] 2372 for match in all_matches: 2373 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2374 if ctype in ['ColorOne' ]: 2375 continue 2376 if ctype not in ['T', 'Tr' ]: 2377 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2378 tmparg += ['0'] 2379 arg +=tmparg 2380 for j, v in enumerate(arg): 2381 output[(i_color,j)] = v 2382 2383 for key in output: 2384 if matrix_strings == []: 2385 #first entry 2386 matrix_strings.append(""" 2387 if (in1.eq.%s.and.in2.eq.%s)then 2388 out = %s 2389 """ % (key[0], key[1], output[key])) 2390 else: 2391 #not first entry 2392 matrix_strings.append(""" 2393 elseif (in1.eq.%s.and.in2.eq.%s)then 2394 out = %s 2395 """ % (key[0], key[1], output[key])) 2396 if len(matrix_strings): 2397 matrix_strings.append(" else \n out = - 1 \n endif") 2398 else: 2399 return "\n out = - 1 \n " 2400 return "\n".join(matrix_strings)
2401
2402 - def make(self,*args,**opts):
2403 pass
2404
2405 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2406 JAMP_formatLC=None):
2407 2408 """Adding leading color part of the colorflow""" 2409 2410 if not JAMP_formatLC: 2411 JAMP_formatLC= "LN%s" % JAMP_format 2412 2413 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2414 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2415 col_amps=col_amps.get_color_amplitudes() 2416 elif(isinstance(col_amps,list)): 2417 if(col_amps and isinstance(col_amps[0],list)): 2418 col_amps=col_amps 2419 else: 2420 raise MadGraph5Error, error_msg % 'col_amps' 2421 else: 2422 raise MadGraph5Error, error_msg % 'col_amps' 2423 2424 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2425 JAMP_format=JAMP_format, 2426 AMP_format=AMP_format, 2427 split=-1) 2428 2429 2430 # Filter the col_ampls to generate only those without any 1/NC terms 2431 2432 LC_col_amps = [] 2433 for coeff_list in col_amps: 2434 to_add = [] 2435 for (coefficient, amp_number) in coeff_list: 2436 if coefficient[3]==0: 2437 to_add.append( (coefficient, amp_number) ) 2438 LC_col_amps.append(to_add) 2439 2440 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2441 JAMP_format=JAMP_formatLC, 2442 AMP_format=AMP_format, 2443 split=-1) 2444 2445 return text
2446
2447 2448 2449 2450 #=============================================================================== 2451 # ProcessExporterFortranMW 2452 #=============================================================================== 2453 -class ProcessExporterFortranMW(ProcessExporterFortran):
2454 """Class to take care of exporting a set of matrix elements to 2455 MadGraph v4 - MadWeight format.""" 2456 2457 matrix_file="matrix_standalone_v4.inc" 2458
2459 - def copy_template(self, model):
2460 """Additional actions needed for setup of Template 2461 """ 2462 2463 super(ProcessExporterFortranMW, self).copy_template(model) 2464 2465 # Add the MW specific file 2466 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2467 pjoin(self.dir_path, 'Source','MadWeight'), True) 2468 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2469 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2470 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2471 pjoin(self.dir_path, 'Source','setrun.f')) 2472 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2473 pjoin(self.dir_path, 'Source','run.inc')) 2474 # File created from Template (Different in some child class) 2475 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2476 self.write_run_config_file(writers.FortranWriter(filename)) 2477 2478 try: 2479 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2480 stdout = os.open(os.devnull, os.O_RDWR), 2481 stderr = os.open(os.devnull, os.O_RDWR), 2482 cwd=self.dir_path) 2483 except OSError: 2484 # Probably madweight already called 2485 pass 2486 2487 # Copy the different python file in the Template 2488 self.copy_python_file() 2489 # create the appropriate cuts.f 2490 self.get_mw_cuts_version() 2491 2492 # add the makefile in Source directory 2493 filename = os.path.join(self.dir_path,'Source','makefile') 2494 self.write_source_makefile(writers.FortranWriter(filename))
2495 2496 2497 2498 2499 #=========================================================================== 2500 # convert_model 2501 #===========================================================================
2502 - def convert_model(self, model, wanted_lorentz = [], 2503 wanted_couplings = []):
2504 2505 super(ProcessExporterFortranMW,self).convert_model(model, 2506 wanted_lorentz, wanted_couplings) 2507 2508 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2509 try: 2510 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2511 except OSError as error: 2512 pass 2513 model_path = model.get('modelpath') 2514 # This is not safe if there is a '##' or '-' in the path. 2515 shutil.copytree(model_path, 2516 pjoin(self.dir_path,'bin','internal','ufomodel'), 2517 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2518 if hasattr(model, 'restrict_card'): 2519 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2520 'restrict_default.dat') 2521 if isinstance(model.restrict_card, check_param_card.ParamCard): 2522 model.restrict_card.write(out_path) 2523 else: 2524 files.cp(model.restrict_card, out_path)
2525 2526 #=========================================================================== 2527 # generate_subprocess_directory 2528 #===========================================================================
2529 - def copy_python_file(self):
2530 """copy the python file require for the Template""" 2531 2532 # madevent interface 2533 cp(_file_path+'/interface/madweight_interface.py', 2534 self.dir_path+'/bin/internal/madweight_interface.py') 2535 cp(_file_path+'/interface/extended_cmd.py', 2536 self.dir_path+'/bin/internal/extended_cmd.py') 2537 cp(_file_path+'/interface/common_run_interface.py', 2538 self.dir_path+'/bin/internal/common_run_interface.py') 2539 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2540 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2541 #cp(_file_path+'/iolibs/save_load_object.py', 2542 # self.dir_path+'/bin/internal/save_load_object.py') 2543 cp(_file_path+'/iolibs/file_writers.py', 2544 self.dir_path+'/bin/internal/file_writers.py') 2545 #model file 2546 cp(_file_path+'../models/check_param_card.py', 2547 self.dir_path+'/bin/internal/check_param_card.py') 2548 2549 #madevent file 2550 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2551 cp(_file_path+'/various/lhe_parser.py', 2552 self.dir_path+'/bin/internal/lhe_parser.py') 2553 2554 cp(_file_path+'/various/banner.py', 2555 self.dir_path+'/bin/internal/banner.py') 2556 cp(_file_path+'/various/shower_card.py', 2557 self.dir_path+'/bin/internal/shower_card.py') 2558 cp(_file_path+'/various/cluster.py', 2559 self.dir_path+'/bin/internal/cluster.py') 2560 2561 # logging configuration 2562 cp(_file_path+'/interface/.mg5_logging.conf', 2563 self.dir_path+'/bin/internal/me5_logging.conf') 2564 cp(_file_path+'/interface/coloring_logging.py', 2565 self.dir_path+'/bin/internal/coloring_logging.py')
2566 2567 2568 #=========================================================================== 2569 # Change the version of cuts.f to the one compatible with MW 2570 #===========================================================================
2571 - def get_mw_cuts_version(self, outpath=None):
2572 """create the appropriate cuts.f 2573 This is based on the one associated to ME output but: 2574 1) No clustering (=> remove initcluster/setclscales) 2575 2) Adding the definition of cut_bw at the file. 2576 """ 2577 2578 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2579 2580 text = StringIO() 2581 #1) remove all dependencies in ickkw >1: 2582 nb_if = 0 2583 for line in template: 2584 if 'if(xqcut.gt.0d0' in line: 2585 nb_if = 1 2586 if nb_if == 0: 2587 text.write(line) 2588 continue 2589 if re.search(r'if\(.*\)\s*then', line): 2590 nb_if += 1 2591 elif 'endif' in line: 2592 nb_if -= 1 2593 2594 #2) add fake cut_bw (have to put the true one later) 2595 text.write(""" 2596 logical function cut_bw(p) 2597 include 'madweight_param.inc' 2598 double precision p(*) 2599 if (bw_cut) then 2600 cut_bw = .true. 2601 else 2602 stop 1 2603 endif 2604 return 2605 end 2606 """) 2607 2608 final = text.getvalue() 2609 #3) remove the call to initcluster: 2610 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2611 template = template.replace('genps.inc', 'maxparticles.inc') 2612 #Now we can write it 2613 if not outpath: 2614 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2615 elif isinstance(outpath, str): 2616 fsock = open(outpath, 'w') 2617 else: 2618 fsock = outpath 2619 fsock.write(template)
2620 2621 2622 2623 #=========================================================================== 2624 # Make the Helas and Model directories for Standalone directory 2625 #===========================================================================
2626 - def make(self):
2627 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2628 everything for running madweight 2629 """ 2630 2631 source_dir = os.path.join(self.dir_path, "Source") 2632 logger.info("Running make for Helas") 2633 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2634 logger.info("Running make for Model") 2635 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2636 logger.info("Running make for PDF") 2637 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2638 logger.info("Running make for CERNLIB") 2639 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2640 logger.info("Running make for GENERIC") 2641 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2642 logger.info("Running make for blocks") 2643 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2644 logger.info("Running make for tools") 2645 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2646 2647 #=========================================================================== 2648 # Create proc_card_mg5.dat for MadWeight directory 2649 #===========================================================================
2650 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2651 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2652 2653 compiler = {'fortran': mg5options['fortran_compiler'], 2654 'cpp': mg5options['cpp_compiler'], 2655 'f2py': mg5options['f2py_compiler']} 2656 2657 2658 2659 #proc_charac 2660 self.create_proc_charac() 2661 2662 # Write maxparticles.inc based on max of ME's/subprocess groups 2663 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2664 self.write_maxparticles_file(writers.FortranWriter(filename), 2665 matrix_elements) 2666 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2667 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2668 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2669 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2670 2671 self.set_compiler(compiler) 2672 self.make() 2673 2674 # Write command history as proc_card_mg5 2675 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2676 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2677 history.write(output_file) 2678 2679 ProcessExporterFortran.finalize(self, matrix_elements, 2680 history, mg5options, flaglist)
2681 2682 2683 2684 #=========================================================================== 2685 # create the run_card for MW 2686 #===========================================================================
2687 - def create_run_card(self, matrix_elements, history):
2688 """ """ 2689 2690 run_card = banner_mod.RunCard() 2691 2692 # pass to default for MW 2693 run_card["run_tag"] = "\'not_use\'" 2694 run_card["fixed_ren_scale"] = "T" 2695 run_card["fixed_fac_scale"] = "T" 2696 run_card.remove_all_cut() 2697 2698 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2699 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2700 python_template=True) 2701 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2702 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2703 python_template=True)
2704 2705 #=========================================================================== 2706 # export model files 2707 #===========================================================================
2708 - def export_model_files(self, model_path):
2709 """export the model dependent files for V4 model""" 2710 2711 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2712 # Add the routine update_as_param in v4 model 2713 # This is a function created in the UFO 2714 text=""" 2715 subroutine update_as_param() 2716 call setpara('param_card.dat',.false.) 2717 return 2718 end 2719 """ 2720 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2721 ff.write(text) 2722 ff.close() 2723 2724 # Modify setrun.f 2725 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2726 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2727 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2728 fsock.write(text) 2729 fsock.close() 2730 2731 # Modify initialization.f 2732 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2733 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2734 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2735 fsock.write(text) 2736 fsock.close() 2737 2738 2739 self.make_model_symbolic_link()
2740 2741 #=========================================================================== 2742 # generate_subprocess_directory 2743 #===========================================================================
2744 - def generate_subprocess_directory(self, matrix_element, 2745 fortran_model,number):
2746 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2747 including the necessary matrix.f and nexternal.inc files""" 2748 2749 cwd = os.getcwd() 2750 misc.sprint(type(matrix_element)) 2751 # Create the directory PN_xx_xxxxx in the specified path 2752 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2753 "P%s" % matrix_element.get('processes')[0].shell_string()) 2754 2755 try: 2756 os.mkdir(dirpath) 2757 except os.error as error: 2758 logger.warning(error.strerror + " " + dirpath) 2759 2760 #try: 2761 # os.chdir(dirpath) 2762 #except os.error: 2763 # logger.error('Could not cd to directory %s' % dirpath) 2764 # return 0 2765 2766 logger.info('Creating files in directory %s' % dirpath) 2767 2768 # Extract number of external particles 2769 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2770 2771 # Create the matrix.f file and the nexternal.inc file 2772 filename = pjoin(dirpath,'matrix.f') 2773 calls,ncolor = self.write_matrix_element_v4( 2774 writers.FortranWriter(filename), 2775 matrix_element, 2776 fortran_model) 2777 2778 filename = pjoin(dirpath, 'auto_dsig.f') 2779 self.write_auto_dsig_file(writers.FortranWriter(filename), 2780 matrix_element) 2781 2782 filename = pjoin(dirpath, 'configs.inc') 2783 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2784 writers.FortranWriter(filename), 2785 matrix_element) 2786 2787 filename = pjoin(dirpath, 'nexternal.inc') 2788 self.write_nexternal_file(writers.FortranWriter(filename), 2789 nexternal, ninitial) 2790 2791 filename = pjoin(dirpath, 'leshouche.inc') 2792 self.write_leshouche_file(writers.FortranWriter(filename), 2793 matrix_element) 2794 2795 filename = pjoin(dirpath, 'props.inc') 2796 self.write_props_file(writers.FortranWriter(filename), 2797 matrix_element, 2798 s_and_t_channels) 2799 2800 filename = pjoin(dirpath, 'pmass.inc') 2801 self.write_pmass_file(writers.FortranWriter(filename), 2802 matrix_element) 2803 2804 filename = pjoin(dirpath, 'ngraphs.inc') 2805 self.write_ngraphs_file(writers.FortranWriter(filename), 2806 len(matrix_element.get_all_amplitudes())) 2807 2808 filename = pjoin(dirpath, 'maxamps.inc') 2809 self.write_maxamps_file(writers.FortranWriter(filename), 2810 len(matrix_element.get('diagrams')), 2811 ncolor, 2812 len(matrix_element.get('processes')), 2813 1) 2814 2815 filename = pjoin(dirpath, 'phasespace.inc') 2816 self.write_phasespace_file(writers.FortranWriter(filename), 2817 len(matrix_element.get('diagrams')), 2818 ) 2819 2820 # Generate diagrams 2821 filename = pjoin(dirpath, "matrix.ps") 2822 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2823 get('diagrams'), 2824 filename, 2825 model=matrix_element.get('processes')[0].\ 2826 get('model'), 2827 amplitude='') 2828 logger.info("Generating Feynman diagrams for " + \ 2829 matrix_element.get('processes')[0].nice_string()) 2830 plot.draw() 2831 2832 #import genps.inc and maxconfigs.inc into Subprocesses 2833 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2834 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2835 2836 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2837 2838 for file in linkfiles: 2839 ln('../%s' % file, starting_dir=cwd) 2840 2841 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2842 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2843 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2844 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2845 # Return to original PWD 2846 #os.chdir(cwd) 2847 2848 if not calls: 2849 calls = 0 2850 return calls
2851 2852 #=========================================================================== 2853 # write_matrix_element_v4 2854 #===========================================================================
2855 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2856 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2857 2858 if not matrix_element.get('processes') or \ 2859 not matrix_element.get('diagrams'): 2860 return 0 2861 2862 if writer: 2863 if not isinstance(writer, writers.FortranWriter): 2864 raise writers.FortranWriter.FortranWriterError(\ 2865 "writer not FortranWriter") 2866 2867 # Set lowercase/uppercase Fortran code 2868 writers.FortranWriter.downcase = False 2869 2870 replace_dict = {} 2871 2872 # Extract version number and date from VERSION file 2873 info_lines = self.get_mg5_info_lines() 2874 replace_dict['info_lines'] = info_lines 2875 2876 # Extract process info lines 2877 process_lines = self.get_process_info_lines(matrix_element) 2878 replace_dict['process_lines'] = process_lines 2879 2880 # Set proc_id 2881 replace_dict['proc_id'] = proc_id 2882 2883 # Extract number of external particles 2884 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2885 replace_dict['nexternal'] = nexternal 2886 2887 # Extract ncomb 2888 ncomb = matrix_element.get_helicity_combinations() 2889 replace_dict['ncomb'] = ncomb 2890 2891 # Extract helicity lines 2892 helicity_lines = self.get_helicity_lines(matrix_element) 2893 replace_dict['helicity_lines'] = helicity_lines 2894 2895 # Extract overall denominator 2896 # Averaging initial state color, spin, and identical FS particles 2897 den_factor_line = self.get_den_factor_line(matrix_element) 2898 replace_dict['den_factor_line'] = den_factor_line 2899 2900 # Extract ngraphs 2901 ngraphs = matrix_element.get_number_of_amplitudes() 2902 replace_dict['ngraphs'] = ngraphs 2903 2904 # Extract nwavefuncs 2905 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2906 replace_dict['nwavefuncs'] = nwavefuncs 2907 2908 # Extract ncolor 2909 ncolor = max(1, len(matrix_element.get('color_basis'))) 2910 replace_dict['ncolor'] = ncolor 2911 2912 # Extract color data lines 2913 color_data_lines = self.get_color_data_lines(matrix_element) 2914 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2915 2916 # Extract helas calls 2917 helas_calls = fortran_model.get_matrix_element_calls(\ 2918 matrix_element) 2919 2920 replace_dict['helas_calls'] = "\n".join(helas_calls) 2921 2922 # Extract JAMP lines 2923 jamp_lines = self.get_JAMP_lines(matrix_element) 2924 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2925 2926 replace_dict['template_file'] = os.path.join(_file_path, \ 2927 'iolibs/template_files/%s' % self.matrix_file) 2928 replace_dict['template_file2'] = '' 2929 2930 if writer: 2931 file = open(replace_dict['template_file']).read() 2932 file = file % replace_dict 2933 # Write the file 2934 writer.writelines(file) 2935 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 2936 else: 2937 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
2938 2939 #=========================================================================== 2940 # write_source_makefile 2941 #===========================================================================
2942 - def write_source_makefile(self, writer):
2943 """Write the nexternal.inc file for madweight""" 2944 2945 2946 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2947 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2948 text = open(path).read() % {'libraries': set_of_lib} 2949 writer.write(text) 2950 2951 return True
2952
2953 - def write_phasespace_file(self, writer, nb_diag):
2954 """ """ 2955 2956 template = """ include 'maxparticles.inc' 2957 integer max_branches 2958 parameter (max_branches=max_particles-1) 2959 integer max_configs 2960 parameter (max_configs=%(nb_diag)s) 2961 2962 c channel position 2963 integer config_pos,perm_pos 2964 common /to_config/config_pos,perm_pos 2965 2966 """ 2967 2968 writer.write(template % {'nb_diag': nb_diag})
2969 2970 2971 #=========================================================================== 2972 # write_auto_dsig_file 2973 #===========================================================================
2974 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2975 """Write the auto_dsig.f file for the differential cross section 2976 calculation, includes pdf call information (MadWeight format)""" 2977 2978 if not matrix_element.get('processes') or \ 2979 not matrix_element.get('diagrams'): 2980 return 0 2981 2982 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2983 2984 if ninitial < 1 or ninitial > 2: 2985 raise writers.FortranWriter.FortranWriterError, \ 2986 """Need ninitial = 1 or 2 to write auto_dsig file""" 2987 2988 replace_dict = {} 2989 2990 # Extract version number and date from VERSION file 2991 info_lines = self.get_mg5_info_lines() 2992 replace_dict['info_lines'] = info_lines 2993 2994 # Extract process info lines 2995 process_lines = self.get_process_info_lines(matrix_element) 2996 replace_dict['process_lines'] = process_lines 2997 2998 # Set proc_id 2999 replace_dict['proc_id'] = proc_id 3000 replace_dict['numproc'] = 1 3001 3002 # Set dsig_line 3003 if ninitial == 1: 3004 # No conversion, since result of decay should be given in GeV 3005 dsig_line = "pd(0)*dsiguu" 3006 else: 3007 # Convert result (in GeV) to pb 3008 dsig_line = "pd(0)*conv*dsiguu" 3009 3010 replace_dict['dsig_line'] = dsig_line 3011 3012 # Extract pdf lines 3013 pdf_vars, pdf_data, pdf_lines = \ 3014 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3015 replace_dict['pdf_vars'] = pdf_vars 3016 replace_dict['pdf_data'] = pdf_data 3017 replace_dict['pdf_lines'] = pdf_lines 3018 3019 # Lines that differ between subprocess group and regular 3020 if proc_id: 3021 replace_dict['numproc'] = int(proc_id) 3022 replace_dict['passcuts_begin'] = "" 3023 replace_dict['passcuts_end'] = "" 3024 # Set lines for subprocess group version 3025 # Set define_iconfigs_lines 3026 replace_dict['define_subdiag_lines'] = \ 3027 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3028 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3029 else: 3030 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3031 replace_dict['passcuts_end'] = "ENDIF" 3032 replace_dict['define_subdiag_lines'] = "" 3033 3034 if writer: 3035 file = open(os.path.join(_file_path, \ 3036 'iolibs/template_files/auto_dsig_mw.inc')).read() 3037 3038 file = file % replace_dict 3039 # Write the file 3040 writer.writelines(file) 3041 else: 3042 return replace_dict
3043 #=========================================================================== 3044 # write_configs_file 3045 #===========================================================================
3046 - def write_configs_file(self, writer, matrix_element):
3047 """Write the configs.inc file for MadEvent""" 3048 3049 # Extract number of external particles 3050 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3051 3052 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3053 mapconfigs = [c[0] for c in configs] 3054 model = matrix_element.get('processes')[0].get('model') 3055 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3056 [[c[1]] for c in configs], 3057 mapconfigs, 3058 nexternal, ninitial,matrix_element, model)
3059 3060 #=========================================================================== 3061 # write_run_configs_file 3062 #===========================================================================
3063 - def write_run_config_file(self, writer):
3064 """Write the run_configs.inc file for MadWeight""" 3065 3066 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3067 text = open(path).read() % {'chanperjob':'5'} 3068 writer.write(text) 3069 return True
3070 3071 #=========================================================================== 3072 # write_configs_file_from_diagrams 3073 #===========================================================================
3074 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3075 nexternal, ninitial, matrix_element, model):
3076 """Write the actual configs.inc file. 3077 3078 configs is the diagrams corresponding to configs (each 3079 diagrams is a list of corresponding diagrams for all 3080 subprocesses, with None if there is no corresponding diagrams 3081 for a given process). 3082 mapconfigs gives the diagram number for each config. 3083 3084 For s-channels, we need to output one PDG for each subprocess in 3085 the subprocess group, in order to be able to pick the right 3086 one for multiprocesses.""" 3087 3088 lines = [] 3089 3090 particle_dict = matrix_element.get('processes')[0].get('model').\ 3091 get('particle_dict') 3092 3093 s_and_t_channels = [] 3094 3095 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3096 for config in configs if [d for d in config if d][0].\ 3097 get_vertex_leg_numbers()!=[]] 3098 3099 minvert = min(vert_list) if vert_list!=[] else 0 3100 # Number of subprocesses 3101 nsubprocs = len(configs[0]) 3102 3103 nconfigs = 0 3104 3105 new_pdg = model.get_first_non_pdg() 3106 3107 for iconfig, helas_diags in enumerate(configs): 3108 if any([vert > minvert for vert in 3109 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3110 # Only 3-vertices allowed in configs.inc 3111 continue 3112 nconfigs += 1 3113 3114 # Need s- and t-channels for all subprocesses, including 3115 # those that don't contribute to this config 3116 empty_verts = [] 3117 stchannels = [] 3118 for h in helas_diags: 3119 if h: 3120 # get_s_and_t_channels gives vertices starting from 3121 # final state external particles and working inwards 3122 stchannels.append(h.get('amplitudes')[0].\ 3123 get_s_and_t_channels(ninitial,model,new_pdg)) 3124 else: 3125 stchannels.append((empty_verts, None)) 3126 3127 # For t-channels, just need the first non-empty one 3128 tchannels = [t for s,t in stchannels if t != None][0] 3129 3130 # For s_and_t_channels (to be used later) use only first config 3131 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3132 tchannels]) 3133 3134 # Make sure empty_verts is same length as real vertices 3135 if any([s for s,t in stchannels]): 3136 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3137 3138 # Reorganize s-channel vertices to get a list of all 3139 # subprocesses for each vertex 3140 schannels = zip(*[s for s,t in stchannels]) 3141 else: 3142 schannels = [] 3143 3144 allchannels = schannels 3145 if len(tchannels) > 1: 3146 # Write out tchannels only if there are any non-trivial ones 3147 allchannels = schannels + tchannels 3148 3149 # Write out propagators for s-channel and t-channel vertices 3150 3151 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3152 # Correspondance between the config and the diagram = amp2 3153 lines.append("* %d %d " % (nconfigs, 3154 mapconfigs[iconfig])) 3155 3156 for verts in allchannels: 3157 if verts in schannels: 3158 vert = [v for v in verts if v][0] 3159 else: 3160 vert = verts 3161 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3162 last_leg = vert.get('legs')[-1] 3163 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3164 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3165 # (last_leg.get('number'), nconfigs, len(daughters), 3166 # ",".join([str(d) for d in daughters]))) 3167 3168 if last_leg.get('id') == 21 and 21 not in particle_dict: 3169 # Fake propagator used in multiparticle vertices 3170 mass = 'zero' 3171 width = 'zero' 3172 pow_part = 0 3173 else: 3174 if (last_leg.get('id')!=7): 3175 particle = particle_dict[last_leg.get('id')] 3176 # Get mass 3177 mass = particle.get('mass') 3178 # Get width 3179 width = particle.get('width') 3180 else : # fake propagator used in multiparticle vertices 3181 mass= 'zero' 3182 width= 'zero' 3183 3184 line=line+" "+mass+" "+width+" " 3185 3186 if verts in schannels: 3187 pdgs = [] 3188 for v in verts: 3189 if v: 3190 pdgs.append(v.get('legs')[-1].get('id')) 3191 else: 3192 pdgs.append(0) 3193 lines.append(line+" S "+str(last_leg.get('id'))) 3194 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3195 # (last_leg.get('number'), nconfigs, nsubprocs, 3196 # ",".join([str(d) for d in pdgs]))) 3197 # lines.append("data tprid(%d,%d)/0/" % \ 3198 # (last_leg.get('number'), nconfigs)) 3199 elif verts in tchannels[:-1]: 3200 lines.append(line+" T "+str(last_leg.get('id'))) 3201 # lines.append("data tprid(%d,%d)/%d/" % \ 3202 # (last_leg.get('number'), nconfigs, 3203 # abs(last_leg.get('id')))) 3204 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3205 # (last_leg.get('number'), nconfigs, nsubprocs, 3206 # ",".join(['0'] * nsubprocs))) 3207 3208 # Write out number of configs 3209 # lines.append("# Number of configs") 3210 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3211 lines.append(" * ") # a line with just a star indicates this is the end of file 3212 # Write the file 3213 writer.writelines(lines) 3214 3215 return s_and_t_channels
3216
3217 3218 #=============================================================================== 3219 # ProcessExporterFortranME 3220 #=============================================================================== 3221 -class ProcessExporterFortranME(ProcessExporterFortran):
3222 """Class to take care of exporting a set of matrix elements to 3223 MadEvent format.""" 3224 3225 matrix_file = "matrix_madevent_v4.inc" 3226
3227 - def copy_template(self, model):
3228 """Additional actions needed for setup of Template 3229 """ 3230 3231 super(ProcessExporterFortranME, self).copy_template(model) 3232 3233 # File created from Template (Different in some child class) 3234 filename = pjoin(self.dir_path,'Source','run_config.inc') 3235 self.write_run_config_file(writers.FortranWriter(filename)) 3236 3237 # The next file are model dependant (due to SLAH convention) 3238 self.model_name = model.get('name') 3239 # Add the symmetry.f 3240 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3241 self.write_symmetry(writers.FortranWriter(filename)) 3242 # 3243 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3244 self.write_addmothers(writers.FortranWriter(filename)) 3245 # Copy the different python file in the Template 3246 self.copy_python_file()
3247 3248 3249 3250 3251 3252 #=========================================================================== 3253 # generate_subprocess_directory 3254 #===========================================================================
3255 - def copy_python_file(self):
3256 """copy the python file require for the Template""" 3257 3258 # madevent interface 3259 cp(_file_path+'/interface/madevent_interface.py', 3260 self.dir_path+'/bin/internal/madevent_interface.py') 3261 cp(_file_path+'/interface/extended_cmd.py', 3262 self.dir_path+'/bin/internal/extended_cmd.py') 3263 cp(_file_path+'/interface/common_run_interface.py', 3264 self.dir_path+'/bin/internal/common_run_interface.py') 3265 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3266 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3267 cp(_file_path+'/iolibs/save_load_object.py', 3268 self.dir_path+'/bin/internal/save_load_object.py') 3269 cp(_file_path+'/iolibs/file_writers.py', 3270 self.dir_path+'/bin/internal/file_writers.py') 3271 #model file 3272 cp(_file_path+'../models/check_param_card.py', 3273 self.dir_path+'/bin/internal/check_param_card.py') 3274 3275 #copy all the file present in madevent directory 3276 for name in os.listdir(pjoin(_file_path, 'madevent')): 3277 if name not in ['__init__.py'] and name.endswith('.py'): 3278 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3279 3280 #madevent file 3281 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3282 cp(_file_path+'/various/lhe_parser.py', 3283 self.dir_path+'/bin/internal/lhe_parser.py') 3284 cp(_file_path+'/various/banner.py', 3285 self.dir_path+'/bin/internal/banner.py') 3286 cp(_file_path+'/various/histograms.py', 3287 self.dir_path+'/bin/internal/histograms.py') 3288 cp(_file_path+'/various/plot_djrs.py', 3289 self.dir_path+'/bin/internal/plot_djrs.py') 3290 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3291 3292 cp(_file_path+'/various/cluster.py', 3293 self.dir_path+'/bin/internal/cluster.py') 3294 cp(_file_path+'/madevent/combine_runs.py', 3295 self.dir_path+'/bin/internal/combine_runs.py') 3296 # logging configuration 3297 cp(_file_path+'/interface/.mg5_logging.conf', 3298 self.dir_path+'/bin/internal/me5_logging.conf') 3299 cp(_file_path+'/interface/coloring_logging.py', 3300 self.dir_path+'/bin/internal/coloring_logging.py') 3301 # shower card and FO_analyse_card. 3302 # Although not needed, it is imported by banner.py 3303 cp(_file_path+'/various/shower_card.py', 3304 self.dir_path+'/bin/internal/shower_card.py') 3305 cp(_file_path+'/various/FO_analyse_card.py', 3306 self.dir_path+'/bin/internal/FO_analyse_card.py')
3307 3308
3309 - def convert_model(self, model, wanted_lorentz = [], 3310 wanted_couplings = []):
3311 3312 super(ProcessExporterFortranME,self).convert_model(model, 3313 wanted_lorentz, wanted_couplings) 3314 3315 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3316 try: 3317 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3318 except OSError as error: 3319 pass 3320 model_path = model.get('modelpath') 3321 # This is not safe if there is a '##' or '-' in the path. 3322 shutil.copytree(model_path, 3323 pjoin(self.dir_path,'bin','internal','ufomodel'), 3324 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3325 if hasattr(model, 'restrict_card'): 3326 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3327 'restrict_default.dat') 3328 if isinstance(model.restrict_card, check_param_card.ParamCard): 3329 model.restrict_card.write(out_path) 3330 else: 3331 files.cp(model.restrict_card, out_path)
3332 3333 #=========================================================================== 3334 # export model files 3335 #===========================================================================
3336 - def export_model_files(self, model_path):
3337 """export the model dependent files""" 3338 3339 super(ProcessExporterFortranME,self).export_model_files(model_path) 3340 3341 # Add the routine update_as_param in v4 model 3342 # This is a function created in the UFO 3343 text=""" 3344 subroutine update_as_param() 3345 call setpara('param_card.dat',.false.) 3346 return 3347 end 3348 """ 3349 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3350 ff.write(text) 3351 ff.close() 3352 3353 # Add the symmetry.f 3354 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3355 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3356 3357 # Modify setrun.f 3358 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3359 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3360 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3361 fsock.write(text) 3362 fsock.close() 3363 3364 self.make_model_symbolic_link()
3365 3366 #=========================================================================== 3367 # generate_subprocess_directory 3368 #===========================================================================
3369 - def generate_subprocess_directory(self, matrix_element, 3370 fortran_model, 3371 me_number):
3372 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3373 including the necessary matrix.f and various helper files""" 3374 3375 cwd = os.getcwd() 3376 path = pjoin(self.dir_path, 'SubProcesses') 3377 3378 3379 if not self.model: 3380 self.model = matrix_element.get('processes')[0].get('model') 3381 3382 3383 3384 #os.chdir(path) 3385 # Create the directory PN_xx_xxxxx in the specified path 3386 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3387 try: 3388 os.mkdir(pjoin(path,subprocdir)) 3389 except os.error as error: 3390 logger.warning(error.strerror + " " + subprocdir) 3391 3392 #try: 3393 # os.chdir(subprocdir) 3394 #except os.error: 3395 # logger.error('Could not cd to directory %s' % subprocdir) 3396 # return 0 3397 3398 logger.info('Creating files in directory %s' % subprocdir) 3399 Ppath = pjoin(path, subprocdir) 3400 3401 # Extract number of external particles 3402 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3403 3404 # Add the driver.f 3405 ncomb = matrix_element.get_helicity_combinations() 3406 filename = pjoin(Ppath,'driver.f') 3407 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3408 v5=self.opt['v5_model']) 3409 3410 # Create the matrix.f file, auto_dsig.f file and all inc files 3411 filename = pjoin(Ppath, 'matrix.f') 3412 calls, ncolor = \ 3413 self.write_matrix_element_v4(writers.FortranWriter(filename), 3414 matrix_element, fortran_model, subproc_number = me_number) 3415 3416 filename = pjoin(Ppath, 'auto_dsig.f') 3417 self.write_auto_dsig_file(writers.FortranWriter(filename), 3418 matrix_element) 3419 3420 filename = pjoin(Ppath, 'configs.inc') 3421 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3422 writers.FortranWriter(filename), 3423 matrix_element) 3424 3425 filename = pjoin(Ppath, 'config_nqcd.inc') 3426 self.write_config_nqcd_file(writers.FortranWriter(filename), 3427 nqcd_list) 3428 3429 filename = pjoin(Ppath, 'config_subproc_map.inc') 3430 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3431 s_and_t_channels) 3432 3433 filename = pjoin(Ppath, 'coloramps.inc') 3434 self.write_coloramps_file(writers.FortranWriter(filename), 3435 mapconfigs, 3436 matrix_element) 3437 3438 filename = pjoin(Ppath, 'get_color.f') 3439 self.write_colors_file(writers.FortranWriter(filename), 3440 matrix_element) 3441 3442 filename = pjoin(Ppath, 'decayBW.inc') 3443 self.write_decayBW_file(writers.FortranWriter(filename), 3444 s_and_t_channels) 3445 3446 filename = pjoin(Ppath, 'dname.mg') 3447 self.write_dname_file(writers.FileWriter(filename), 3448 "P"+matrix_element.get('processes')[0].shell_string()) 3449 3450 filename = pjoin(Ppath, 'iproc.dat') 3451 self.write_iproc_file(writers.FortranWriter(filename), 3452 me_number) 3453 3454 filename = pjoin(Ppath, 'leshouche.inc') 3455 self.write_leshouche_file(writers.FortranWriter(filename), 3456 matrix_element) 3457 3458 filename = pjoin(Ppath, 'maxamps.inc') 3459 self.write_maxamps_file(writers.FortranWriter(filename), 3460 len(matrix_element.get('diagrams')), 3461 ncolor, 3462 len(matrix_element.get('processes')), 3463 1) 3464 3465 filename = pjoin(Ppath, 'mg.sym') 3466 self.write_mg_sym_file(writers.FortranWriter(filename), 3467 matrix_element) 3468 3469 filename = pjoin(Ppath, 'ncombs.inc') 3470 self.write_ncombs_file(writers.FortranWriter(filename), 3471 nexternal) 3472 3473 filename = pjoin(Ppath, 'nexternal.inc') 3474 self.write_nexternal_file(writers.FortranWriter(filename), 3475 nexternal, ninitial) 3476 3477 filename = pjoin(Ppath, 'ngraphs.inc') 3478 self.write_ngraphs_file(writers.FortranWriter(filename), 3479 len(mapconfigs)) 3480 3481 3482 filename = pjoin(Ppath, 'pmass.inc') 3483 self.write_pmass_file(writers.FortranWriter(filename), 3484 matrix_element) 3485 3486 filename = pjoin(Ppath, 'props.inc') 3487 self.write_props_file(writers.FortranWriter(filename), 3488 matrix_element, 3489 s_and_t_channels) 3490 3491 # Find config symmetries and permutations 3492 symmetry, perms, ident_perms = \ 3493 diagram_symmetry.find_symmetry(matrix_element) 3494 3495 filename = pjoin(Ppath, 'symswap.inc') 3496 self.write_symswap_file(writers.FortranWriter(filename), 3497 ident_perms) 3498 3499 filename = pjoin(Ppath, 'symfact_orig.dat') 3500 self.write_symfact_file(open(filename, 'w'), symmetry) 3501 3502 # Generate diagrams 3503 filename = pjoin(Ppath, "matrix.ps") 3504 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3505 get('diagrams'), 3506 filename, 3507 model=matrix_element.get('processes')[0].\ 3508 get('model'), 3509 amplitude=True) 3510 logger.info("Generating Feynman diagrams for " + \ 3511 matrix_element.get('processes')[0].nice_string()) 3512 plot.draw() 3513 3514 self.link_files_in_SubProcess(Ppath) 3515 3516 #import nexternal/leshouche in Source 3517 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3518 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3519 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3520 # Return to SubProcesses dir 3521 #os.chdir(os.path.pardir) 3522 3523 # Add subprocess to subproc.mg 3524 filename = pjoin(path, 'subproc.mg') 3525 files.append_to_file(filename, 3526 self.write_subproc, 3527 subprocdir) 3528 3529 # Return to original dir 3530 #os.chdir(cwd) 3531 3532 # Generate info page 3533 gen_infohtml.make_info_html(self.dir_path) 3534 3535 3536 if not calls: 3537 calls = 0 3538 return calls
3539 3576 3577
3578 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3579 """Finalize ME v4 directory by creating jpeg diagrams, html 3580 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3581 3582 if 'nojpeg' in flaglist: 3583 makejpg = False 3584 else: 3585 makejpg = True 3586 if 'online' in flaglist: 3587 online = True 3588 else: 3589 online = False 3590 3591 compiler = {'fortran': mg5options['fortran_compiler'], 3592 'cpp': mg5options['cpp_compiler'], 3593 'f2py': mg5options['f2py_compiler']} 3594 3595 # indicate that the output type is not grouped 3596 if not isinstance(self, ProcessExporterFortranMEGroup): 3597 self.proc_characteristic['grouped_matrix'] = False 3598 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3599 3600 modelname = self.opt['model'] 3601 if modelname == 'mssm' or modelname.startswith('mssm-'): 3602 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3603 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3604 check_param_card.convert_to_mg5card(param_card, mg5_param) 3605 check_param_card.check_valid_param_card(mg5_param) 3606 3607 # Add the combine_events.f modify param_card path/number of @X 3608 filename = pjoin(self.dir_path,'Source','combine_events.f') 3609 try: 3610 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3611 except AttributeError: 3612 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3613 nb_proc = len(set(nb_proc)) 3614 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3615 # Write maxconfigs.inc based on max of ME's/subprocess groups 3616 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3617 self.write_maxconfigs_file(writers.FortranWriter(filename), 3618 matrix_elements) 3619 3620 # Write maxparticles.inc based on max of ME's/subprocess groups 3621 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3622 self.write_maxparticles_file(writers.FortranWriter(filename), 3623 matrix_elements) 3624 3625 # Touch "done" file 3626 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3627 3628 # Check for compiler 3629 self.set_compiler(compiler) 3630 self.set_cpp_compiler(compiler['cpp']) 3631 3632 3633 old_pos = os.getcwd() 3634 subpath = pjoin(self.dir_path, 'SubProcesses') 3635 3636 P_dir_list = [proc for proc in os.listdir(subpath) 3637 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3638 3639 devnull = os.open(os.devnull, os.O_RDWR) 3640 # Convert the poscript in jpg files (if authorize) 3641 if makejpg: 3642 try: 3643 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3644 except Exception, error: 3645 pass 3646 logger.info("Generate jpeg diagrams") 3647 for Pdir in P_dir_list: 3648 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3649 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3650 3651 logger.info("Generate web pages") 3652 # Create the WebPage using perl script 3653 3654 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3655 stdout = devnull,cwd=pjoin(self.dir_path)) 3656 3657 #os.chdir(os.path.pardir) 3658 3659 obj = gen_infohtml.make_info_html(self.dir_path) 3660 3661 if online: 3662 nb_channel = obj.rep_rule['nb_gen_diag'] 3663 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3664 #add the information to proc_charac 3665 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3666 3667 # Write command history as proc_card_mg5 3668 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3669 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3670 history.write(output_file) 3671 3672 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3673 stdout = devnull) 3674 3675 #crate the proc_characteristic file 3676 self.create_proc_charac(matrix_elements, history) 3677 3678 # create the run_card 3679 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3680 3681 # Run "make" to generate madevent.tar.gz file 3682 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3683 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3684 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3685 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3686 stdout = devnull, cwd=self.dir_path) 3687 3688 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3689 stdout = devnull, cwd=self.dir_path)
3690 3691 3692 3693 3694 3695 3696 #return to the initial dir 3697 #os.chdir(old_pos) 3698 3699 #=========================================================================== 3700 # write_matrix_element_v4 3701 #===========================================================================
3702 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3703 proc_id = "", config_map = [], subproc_number = ""):
3704 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3705 3706 if not matrix_element.get('processes') or \ 3707 not matrix_element.get('diagrams'): 3708 return 0 3709 3710 if writer: 3711 if not isinstance(writer, writers.FortranWriter): 3712 raise writers.FortranWriter.FortranWriterError(\ 3713 "writer not FortranWriter") 3714 # Set lowercase/uppercase Fortran code 3715 writers.FortranWriter.downcase = False 3716 3717 # The proc prefix is not used for MadEvent output so it can safely be set 3718 # to an empty string. 3719 replace_dict = {'proc_prefix':''} 3720 3721 # Extract helas calls 3722 helas_calls = fortran_model.get_matrix_element_calls(\ 3723 matrix_element) 3724 3725 replace_dict['helas_calls'] = "\n".join(helas_calls) 3726 3727 3728 # Extract version number and date from VERSION file 3729 info_lines = self.get_mg5_info_lines() 3730 replace_dict['info_lines'] = info_lines 3731 3732 # Extract process info lines 3733 process_lines = self.get_process_info_lines(matrix_element) 3734 replace_dict['process_lines'] = process_lines 3735 3736 # Set proc_id 3737 replace_dict['proc_id'] = proc_id 3738 3739 # Extract ncomb 3740 ncomb = matrix_element.get_helicity_combinations() 3741 replace_dict['ncomb'] = ncomb 3742 3743 # Extract helicity lines 3744 helicity_lines = self.get_helicity_lines(matrix_element) 3745 replace_dict['helicity_lines'] = helicity_lines 3746 3747 # Extract IC line 3748 ic_line = self.get_ic_line(matrix_element) 3749 replace_dict['ic_line'] = ic_line 3750 3751 # Extract overall denominator 3752 # Averaging initial state color, spin, and identical FS particles 3753 den_factor_line = self.get_den_factor_line(matrix_element) 3754 replace_dict['den_factor_line'] = den_factor_line 3755 3756 # Extract ngraphs 3757 ngraphs = matrix_element.get_number_of_amplitudes() 3758 replace_dict['ngraphs'] = ngraphs 3759 3760 # Extract ndiags 3761 ndiags = len(matrix_element.get('diagrams')) 3762 replace_dict['ndiags'] = ndiags 3763 3764 # Set define_iconfigs_lines 3765 replace_dict['define_iconfigs_lines'] = \ 3766 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3767 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3768 3769 if proc_id: 3770 # Set lines for subprocess group version 3771 # Set define_iconfigs_lines 3772 replace_dict['define_iconfigs_lines'] += \ 3773 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3774 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3775 # Set set_amp2_line 3776 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3777 proc_id 3778 else: 3779 # Standard running 3780 # Set set_amp2_line 3781 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3782 3783 # Extract nwavefuncs 3784 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3785 replace_dict['nwavefuncs'] = nwavefuncs 3786 3787 # Extract ncolor 3788 ncolor = max(1, len(matrix_element.get('color_basis'))) 3789 replace_dict['ncolor'] = ncolor 3790 3791 # Extract color data lines 3792 color_data_lines = self.get_color_data_lines(matrix_element) 3793 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3794 3795 3796 # Set the size of Wavefunction 3797 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3798 replace_dict['wavefunctionsize'] = 18 3799 else: 3800 replace_dict['wavefunctionsize'] = 6 3801 3802 # Extract amp2 lines 3803 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3804 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3805 3806 # The JAMP definition depends on the splitting order 3807 split_orders=matrix_element.get('processes')[0].get('split_orders') 3808 if len(split_orders)>0: 3809 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3810 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3811 matrix_element.get('processes')[0],squared_orders) 3812 else: 3813 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3814 # set all amplitude order to weight 1 and only one squared order 3815 # contribution which is of course ALL_ORDERS=2. 3816 squared_orders = [(2,),] 3817 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3818 replace_dict['chosen_so_configs'] = '.TRUE.' 3819 3820 replace_dict['nAmpSplitOrders']=len(amp_orders) 3821 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3822 replace_dict['split_order_str_list']=str(split_orders) 3823 replace_dict['nSplitOrders']=max(len(split_orders),1) 3824 amp_so = self.get_split_orders_lines( 3825 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3826 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3827 replace_dict['ampsplitorders']='\n'.join(amp_so) 3828 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3829 3830 3831 # Extract JAMP lines 3832 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3833 jamp_lines = self.get_JAMP_lines_split_order(\ 3834 matrix_element,amp_orders,split_order_names= 3835 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3836 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3837 3838 replace_dict['template_file'] = pjoin(_file_path, \ 3839 'iolibs/template_files/%s' % self.matrix_file) 3840 replace_dict['template_file2'] = pjoin(_file_path, \ 3841 'iolibs/template_files/split_orders_helping_functions.inc') 3842 if writer: 3843 file = open(replace_dict['template_file']).read() 3844 file = file % replace_dict 3845 # Add the split orders helper functions. 3846 file = file + '\n' + open(replace_dict['template_file2'])\ 3847 .read()%replace_dict 3848 # Write the file 3849 writer.writelines(file) 3850 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 3851 else: 3852 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 3853 return replace_dict
3854 3855 #=========================================================================== 3856 # write_auto_dsig_file 3857 #===========================================================================
3858 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3859 """Write the auto_dsig.f file for the differential cross section 3860 calculation, includes pdf call information""" 3861 3862 if not matrix_element.get('processes') or \ 3863 not matrix_element.get('diagrams'): 3864 return 0 3865 3866 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3867 self.proc_characteristic['ninitial'] = ninitial 3868 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3869 3870 # Add information relevant for MLM matching: 3871 # Maximum QCD power in all the contributions 3872 max_qcd_order = 0 3873 for diag in matrix_element.get('diagrams'): 3874 orders = diag.calculate_orders() 3875 if 'QCD' in orders: 3876 max_qcd_order = max(max_qcd_order,orders['QCD']) 3877 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 3878 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 3879 proc.get('model').get_particle(id).get('color')>1]) 3880 for proc in matrix_element.get('processes')) 3881 # Maximum number of final state light jets to be matched 3882 self.proc_characteristic['max_n_matched_jets'] = max( 3883 self.proc_characteristic['max_n_matched_jets'], 3884 min(max_qcd_order,max_n_light_final_partons)) 3885 3886 # List of default pdgs to be considered for the CKKWl merging cut 3887 self.proc_characteristic['colored_pdgs'] = \ 3888 sorted(list(set([abs(p.get('pdg_code')) for p in 3889 matrix_element.get('processes')[0].get('model').get('particles') if 3890 p.get('color')>1]))) 3891 3892 if ninitial < 1 or ninitial > 2: 3893 raise writers.FortranWriter.FortranWriterError, \ 3894 """Need ninitial = 1 or 2 to write auto_dsig file""" 3895 3896 replace_dict = {} 3897 3898 # Extract version number and date from VERSION file 3899 info_lines = self.get_mg5_info_lines() 3900 replace_dict['info_lines'] = info_lines 3901 3902 # Extract process info lines 3903 process_lines = self.get_process_info_lines(matrix_element) 3904 replace_dict['process_lines'] = process_lines 3905 3906 # Set proc_id 3907 replace_dict['proc_id'] = proc_id 3908 replace_dict['numproc'] = 1 3909 3910 # Set dsig_line 3911 if ninitial == 1: 3912 # No conversion, since result of decay should be given in GeV 3913 dsig_line = "pd(0)*dsiguu" 3914 else: 3915 # Convert result (in GeV) to pb 3916 dsig_line = "pd(0)*conv*dsiguu" 3917 3918 replace_dict['dsig_line'] = dsig_line 3919 3920 # Extract pdf lines 3921 pdf_vars, pdf_data, pdf_lines = \ 3922 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3923 replace_dict['pdf_vars'] = pdf_vars 3924 replace_dict['pdf_data'] = pdf_data 3925 replace_dict['pdf_lines'] = pdf_lines 3926 3927 # Lines that differ between subprocess group and regular 3928 if proc_id: 3929 replace_dict['numproc'] = int(proc_id) 3930 replace_dict['passcuts_begin'] = "" 3931 replace_dict['passcuts_end'] = "" 3932 # Set lines for subprocess group version 3933 # Set define_iconfigs_lines 3934 replace_dict['define_subdiag_lines'] = \ 3935 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3936 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3937 replace_dict['cutsdone'] = "" 3938 else: 3939 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3940 replace_dict['passcuts_end'] = "ENDIF" 3941 replace_dict['define_subdiag_lines'] = "" 3942 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3943 3944 if not isinstance(self, ProcessExporterFortranMEGroup): 3945 ncomb=matrix_element.get_helicity_combinations() 3946 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3947 else: 3948 replace_dict['read_write_good_hel'] = "" 3949 3950 context = {'read_write_good_hel':True} 3951 3952 if writer: 3953 file = open(pjoin(_file_path, \ 3954 'iolibs/template_files/auto_dsig_v4.inc')).read() 3955 file = file % replace_dict 3956 3957 # Write the file 3958 writer.writelines(file, context=context) 3959 else: 3960 return replace_dict, context
3961 #=========================================================================== 3962 # write_coloramps_file 3963 #===========================================================================
3964 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3965 """Write the coloramps.inc file for MadEvent""" 3966 3967 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3968 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3969 (max(len(matrix_element.get('color_basis').keys()), 1), 3970 len(mapconfigs))) 3971 3972 3973 # Write the file 3974 writer.writelines(lines) 3975 3976 return True
3977 3978 #=========================================================================== 3979 # write_colors_file 3980 #===========================================================================
3981 - def write_colors_file(self, writer, matrix_elements):
3982 """Write the get_color.f file for MadEvent, which returns color 3983 for all particles used in the matrix element.""" 3984 3985 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3986 matrix_elements = [matrix_elements] 3987 3988 model = matrix_elements[0].get('processes')[0].get('model') 3989 3990 # We need the both particle and antiparticle wf_ids, since the identity 3991 # depends on the direction of the wf. 3992 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3993 for wf in d.get('wavefunctions')],[]) \ 3994 for d in me.get('diagrams')], []) \ 3995 for me in matrix_elements], [])) 3996 3997 leg_ids = set(sum([sum([sum([[l.get('id'), 3998 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 3999 for l in p.get_legs_with_decays()], []) \ 4000 for p in me.get('processes')], []) \ 4001 for me in matrix_elements], [])) 4002 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4003 4004 lines = """function get_color(ipdg) 4005 implicit none 4006 integer get_color, ipdg 4007 4008 if(ipdg.eq.%d)then 4009 get_color=%d 4010 return 4011 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4012 4013 for part_id in particle_ids[1:]: 4014 lines += """else if(ipdg.eq.%d)then 4015 get_color=%d 4016 return 4017 """ % (part_id, model.get_particle(part_id).get_color()) 4018 # Dummy particle for multiparticle vertices with pdg given by 4019 # first code not in the model 4020 lines += """else if(ipdg.eq.%d)then 4021 c This is dummy particle used in multiparticle vertices 4022 get_color=2 4023 return 4024 """ % model.get_first_non_pdg() 4025 lines += """else 4026 write(*,*)'Error: No color given for pdg ',ipdg 4027 get_color=0 4028 return 4029 endif 4030 end 4031 """ 4032 4033 # Write the file 4034 writer.writelines(lines) 4035 4036 return True
4037 4038 #=========================================================================== 4039 # write_config_nqcd_file 4040 #===========================================================================
4041 - def write_config_nqcd_file(self, writer, nqcd_list):
4042 """Write the config_nqcd.inc with the number of QCD couplings 4043 for each config""" 4044 4045 lines = [] 4046 for iconf, n in enumerate(nqcd_list): 4047 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4048 4049 # Write the file 4050 writer.writelines(lines) 4051 4052 return True
4053 4054 #=========================================================================== 4055 # write_maxconfigs_file 4056 #===========================================================================
4057 - def write_maxconfigs_file(self, writer, matrix_elements):
4058 """Write the maxconfigs.inc file for MadEvent""" 4059 4060 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4061 maxconfigs = max([me.get_num_configs() for me in \ 4062 matrix_elements.get('matrix_elements')]) 4063 else: 4064 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4065 4066 lines = "integer lmaxconfigs\n" 4067 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4068 4069 # Write the file 4070 writer.writelines(lines) 4071 4072 return True
4073 4074 #=========================================================================== 4075 # read_write_good_hel 4076 #===========================================================================
4077 - def read_write_good_hel(self, ncomb):
4078 """return the code to read/write the good_hel common_block""" 4079 4080 convert = {'ncomb' : ncomb} 4081 output = """ 4082 subroutine write_good_hel(stream_id) 4083 implicit none 4084 integer stream_id 4085 INTEGER NCOMB 4086 PARAMETER ( NCOMB=%(ncomb)d) 4087 LOGICAL GOODHEL(NCOMB) 4088 INTEGER NTRY 4089 common/BLOCK_GOODHEL/NTRY,GOODHEL 4090 write(stream_id,*) GOODHEL 4091 return 4092 end 4093 4094 4095 subroutine read_good_hel(stream_id) 4096 implicit none 4097 include 'genps.inc' 4098 integer stream_id 4099 INTEGER NCOMB 4100 PARAMETER ( NCOMB=%(ncomb)d) 4101 LOGICAL GOODHEL(NCOMB) 4102 INTEGER NTRY 4103 common/BLOCK_GOODHEL/NTRY,GOODHEL 4104 read(stream_id,*) GOODHEL 4105 NTRY = MAXTRIES + 1 4106 return 4107 end 4108 4109 subroutine init_good_hel() 4110 implicit none 4111 INTEGER NCOMB 4112 PARAMETER ( NCOMB=%(ncomb)d) 4113 LOGICAL GOODHEL(NCOMB) 4114 INTEGER NTRY 4115 INTEGER I 4116 4117 do i=1,NCOMB 4118 GOODHEL(I) = .false. 4119 enddo 4120 NTRY = 0 4121 end 4122 4123 integer function get_maxsproc() 4124 implicit none 4125 get_maxsproc = 1 4126 return 4127 end 4128 4129 """ % convert 4130 4131 return output
4132 4133 #=========================================================================== 4134 # write_config_subproc_map_file 4135 #===========================================================================
4136 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4137 """Write a dummy config_subproc.inc file for MadEvent""" 4138 4139 lines = [] 4140 4141 for iconfig in range(len(s_and_t_channels)): 4142 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4143 (iconfig + 1)) 4144 4145 # Write the file 4146 writer.writelines(lines) 4147 4148 return True
4149 4150 #=========================================================================== 4151 # write_configs_file 4152 #===========================================================================
4153 - def write_configs_file(self, writer, matrix_element):
4154 """Write the configs.inc file for MadEvent""" 4155 4156 # Extract number of external particles 4157 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4158 4159 model = matrix_element.get('processes')[0].get('model') 4160 configs = [(i+1, d) for (i, d) in \ 4161 enumerate(matrix_element.get('diagrams'))] 4162 mapconfigs = [c[0] for c in configs] 4163 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4164 [[c[1]] for c in configs], 4165 mapconfigs, 4166 nexternal, ninitial, 4167 model)
4168 4169 #=========================================================================== 4170 # write_run_configs_file 4171 #===========================================================================
4172 - def write_run_config_file(self, writer):
4173 """Write the run_configs.inc file for MadEvent""" 4174 4175 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4176 4177 if self.proc_characteristic['loop_induced']: 4178 job_per_chan = 1 4179 else: 4180 job_per_chan = 5 4181 4182 if writer: 4183 text = open(path).read() % {'chanperjob': job_per_chan} 4184 writer.write(text) 4185 return True 4186 else: 4187 return {'chanperjob': job_per_chan}
4188 4189 #=========================================================================== 4190 # write_configs_file_from_diagrams 4191 #===========================================================================
4192 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4193 nexternal, ninitial, model):
4194 """Write the actual configs.inc file. 4195 4196 configs is the diagrams corresponding to configs (each 4197 diagrams is a list of corresponding diagrams for all 4198 subprocesses, with None if there is no corresponding diagrams 4199 for a given process). 4200 mapconfigs gives the diagram number for each config. 4201 4202 For s-channels, we need to output one PDG for each subprocess in 4203 the subprocess group, in order to be able to pick the right 4204 one for multiprocesses.""" 4205 4206 lines = [] 4207 4208 s_and_t_channels = [] 4209 4210 nqcd_list = [] 4211 4212 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4213 for config in configs if [d for d in config if d][0].\ 4214 get_vertex_leg_numbers()!=[]] 4215 minvert = min(vert_list) if vert_list!=[] else 0 4216 4217 # Number of subprocesses 4218 nsubprocs = len(configs[0]) 4219 4220 nconfigs = 0 4221 4222 new_pdg = model.get_first_non_pdg() 4223 4224 for iconfig, helas_diags in enumerate(configs): 4225 if any([vert > minvert for vert in 4226 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4227 # Only 3-vertices allowed in configs.inc 4228 continue 4229 nconfigs += 1 4230 4231 # Need s- and t-channels for all subprocesses, including 4232 # those that don't contribute to this config 4233 empty_verts = [] 4234 stchannels = [] 4235 for h in helas_diags: 4236 if h: 4237 # get_s_and_t_channels gives vertices starting from 4238 # final state external particles and working inwards 4239 stchannels.append(h.get('amplitudes')[0].\ 4240 get_s_and_t_channels(ninitial, model, 4241 new_pdg)) 4242 else: 4243 stchannels.append((empty_verts, None)) 4244 4245 # For t-channels, just need the first non-empty one 4246 tchannels = [t for s,t in stchannels if t != None][0] 4247 4248 # For s_and_t_channels (to be used later) use only first config 4249 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4250 tchannels]) 4251 4252 # Make sure empty_verts is same length as real vertices 4253 if any([s for s,t in stchannels]): 4254 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4255 4256 # Reorganize s-channel vertices to get a list of all 4257 # subprocesses for each vertex 4258 schannels = zip(*[s for s,t in stchannels]) 4259 else: 4260 schannels = [] 4261 4262 allchannels = schannels 4263 if len(tchannels) > 1: 4264 # Write out tchannels only if there are any non-trivial ones 4265 allchannels = schannels + tchannels 4266 4267 # Write out propagators for s-channel and t-channel vertices 4268 4269 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4270 # Correspondance between the config and the diagram = amp2 4271 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4272 mapconfigs[iconfig])) 4273 # Number of QCD couplings in this diagram 4274 nqcd = 0 4275 for h in helas_diags: 4276 if h: 4277 try: 4278 nqcd = h.calculate_orders()['QCD'] 4279 except KeyError: 4280 pass 4281 break 4282 else: 4283 continue 4284 4285 nqcd_list.append(nqcd) 4286 4287 for verts in allchannels: 4288 if verts in schannels: 4289 vert = [v for v in verts if v][0] 4290 else: 4291 vert = verts 4292 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4293 last_leg = vert.get('legs')[-1] 4294 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4295 (last_leg.get('number'), nconfigs, len(daughters), 4296 ",".join([str(d) for d in daughters]))) 4297 if verts in schannels: 4298 pdgs = [] 4299 for v in verts: 4300 if v: 4301 pdgs.append(v.get('legs')[-1].get('id')) 4302 else: 4303 pdgs.append(0) 4304 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4305 (last_leg.get('number'), nconfigs, nsubprocs, 4306 ",".join([str(d) for d in pdgs]))) 4307 lines.append("data tprid(%d,%d)/0/" % \ 4308 (last_leg.get('number'), nconfigs)) 4309 elif verts in tchannels[:-1]: 4310 lines.append("data tprid(%d,%d)/%d/" % \ 4311 (last_leg.get('number'), nconfigs, 4312 abs(last_leg.get('id')))) 4313 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4314 (last_leg.get('number'), nconfigs, nsubprocs, 4315 ",".join(['0'] * nsubprocs))) 4316 4317 # Write out number of configs 4318 lines.append("# Number of configs") 4319 lines.append("data mapconfig(0)/%d/" % nconfigs) 4320 4321 # Write the file 4322 writer.writelines(lines) 4323 4324 return s_and_t_channels, nqcd_list
4325 4326 #=========================================================================== 4327 # write_decayBW_file 4328 #===========================================================================
4329 - def write_decayBW_file(self, writer, s_and_t_channels):
4330 """Write the decayBW.inc file for MadEvent""" 4331 4332 lines = [] 4333 4334 booldict = {None: "0", True: "1", False: "2"} 4335 4336 for iconf, config in enumerate(s_and_t_channels): 4337 schannels = config[0] 4338 for vertex in schannels: 4339 # For the resulting leg, pick out whether it comes from 4340 # decay or not, as given by the onshell flag 4341 leg = vertex.get('legs')[-1] 4342 lines.append("data gForceBW(%d,%d)/%s/" % \ 4343 (leg.get('number'), iconf + 1, 4344 booldict[leg.get('onshell')])) 4345 4346 # Write the file 4347 writer.writelines(lines) 4348 4349 return True
4350 4351 #=========================================================================== 4352 # write_dname_file 4353 #===========================================================================
4354 - def write_dname_file(self, writer, dir_name):
4355 """Write the dname.mg file for MG4""" 4356 4357 line = "DIRNAME=%s" % dir_name 4358 4359 # Write the file 4360 writer.write(line + "\n") 4361 4362 return True
4363 4364 #=========================================================================== 4365 # write_driver 4366 #===========================================================================
4367 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4368 """Write the SubProcess/driver.f file for MG4""" 4369 4370 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4371 4372 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4373 card = 'Source/MODEL/MG5_param.dat' 4374 else: 4375 card = 'param_card.dat' 4376 # Requiring each helicity configuration to be probed by 10 points for 4377 # matrix element before using the resulting grid for MC over helicity 4378 # sampling. 4379 # We multiply this by 2 because each grouped subprocess is called at most 4380 # twice for each IMIRROR. 4381 replace_dict = {'param_card_name':card, 4382 'ncomb':ncomb, 4383 'hel_init_points':n_grouped_proc*10*2} 4384 if not v5: 4385 replace_dict['secondparam']=',.true.' 4386 else: 4387 replace_dict['secondparam']='' 4388 4389 if writer: 4390 text = open(path).read() % replace_dict 4391 writer.write(text) 4392 return True 4393 else: 4394 return replace_dict
4395 4396 #=========================================================================== 4397 # write_addmothers 4398 #===========================================================================
4399 - def write_addmothers(self, writer):
4400 """Write the SubProcess/addmothers.f""" 4401 4402 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4403 4404 text = open(path).read() % {'iconfig': 'diag_number'} 4405 writer.write(text) 4406 4407 return True
4408 4409 4410 #=========================================================================== 4411 # write_combine_events 4412 #===========================================================================
4413 - def write_combine_events(self, writer, nb_proc=100):
4414 """Write the SubProcess/driver.f file for MG4""" 4415 4416 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4417 4418 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4419 card = 'Source/MODEL/MG5_param.dat' 4420 else: 4421 card = 'param_card.dat' 4422 4423 #set maxpup (number of @X in the process card) 4424 4425 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4426 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4427 writer.write(text) 4428 4429 return True
4430 4431 4432 #=========================================================================== 4433 # write_symmetry 4434 #===========================================================================
4435 - def write_symmetry(self, writer, v5=True):
4436 """Write the SubProcess/driver.f file for ME""" 4437 4438 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4439 4440 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4441 card = 'Source/MODEL/MG5_param.dat' 4442 else: 4443 card = 'param_card.dat' 4444 4445 if v5: 4446 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4447 else: 4448 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4449 4450 if writer: 4451 text = open(path).read() 4452 text = text % replace_dict 4453 writer.write(text) 4454 return True 4455 else: 4456 return replace_dict
4457 4458 4459 4460 #=========================================================================== 4461 # write_iproc_file 4462 #===========================================================================
4463 - def write_iproc_file(self, writer, me_number):
4464 """Write the iproc.dat file for MG4""" 4465 line = "%d" % (me_number + 1) 4466 4467 # Write the file 4468 for line_to_write in writer.write_line(line): 4469 writer.write(line_to_write) 4470 return True
4471 4472 #=========================================================================== 4473 # write_mg_sym_file 4474 #===========================================================================
4475 - def write_mg_sym_file(self, writer, matrix_element):
4476 """Write the mg.sym file for MadEvent.""" 4477 4478 lines = [] 4479 4480 # Extract process with all decays included 4481 final_legs = filter(lambda leg: leg.get('state') == True, 4482 matrix_element.get('processes')[0].get_legs_with_decays()) 4483 4484 ninitial = len(filter(lambda leg: leg.get('state') == False, 4485 matrix_element.get('processes')[0].get('legs'))) 4486 4487 identical_indices = {} 4488 4489 # Extract identical particle info 4490 for i, leg in enumerate(final_legs): 4491 if leg.get('id') in identical_indices: 4492 identical_indices[leg.get('id')].append(\ 4493 i + ninitial + 1) 4494 else: 4495 identical_indices[leg.get('id')] = [i + ninitial + 1] 4496 4497 # Remove keys which have only one particle 4498 for key in identical_indices.keys(): 4499 if len(identical_indices[key]) < 2: 4500 del identical_indices[key] 4501 4502 # Write mg.sym file 4503 lines.append(str(len(identical_indices.keys()))) 4504 for key in identical_indices.keys(): 4505 lines.append(str(len(identical_indices[key]))) 4506 for number in identical_indices[key]: 4507 lines.append(str(number)) 4508 4509 # Write the file 4510 writer.writelines(lines) 4511 4512 return True
4513 4514 #=========================================================================== 4515 # write_mg_sym_file 4516 #===========================================================================
4517 - def write_default_mg_sym_file(self, writer):
4518 """Write the mg.sym file for MadEvent.""" 4519 4520 lines = "0" 4521 4522 # Write the file 4523 writer.writelines(lines) 4524 4525 return True
4526 4527 #=========================================================================== 4528 # write_ncombs_file 4529 #===========================================================================
4530 - def write_ncombs_file(self, writer, nexternal):
4531 """Write the ncombs.inc file for MadEvent.""" 4532 4533 # ncomb (used for clustering) is 2^nexternal 4534 file = " integer n_max_cl\n" 4535 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4536 4537 # Write the file 4538 writer.writelines(file) 4539 4540 return True
4541 4542 #=========================================================================== 4543 # write_processes_file 4544 #===========================================================================
4545 - def write_processes_file(self, writer, subproc_group):
4546 """Write the processes.dat file with info about the subprocesses 4547 in this group.""" 4548 4549 lines = [] 4550 4551 for ime, me in \ 4552 enumerate(subproc_group.get('matrix_elements')): 4553 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4554 ",".join(p.base_string() for p in \ 4555 me.get('processes')))) 4556 if me.get('has_mirror_process'): 4557 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4558 for proc in mirror_procs: 4559 legs = copy.copy(proc.get('legs_with_decays')) 4560 legs.insert(0, legs.pop(1)) 4561 proc.set("legs_with_decays", legs) 4562 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4563 mirror_procs)) 4564 else: 4565 lines.append("mirror none") 4566 4567 # Write the file 4568 writer.write("\n".join(lines)) 4569 4570 return True
4571 4572 #=========================================================================== 4573 # write_symswap_file 4574 #===========================================================================
4575 - def write_symswap_file(self, writer, ident_perms):
4576 """Write the file symswap.inc for MG4 by comparing diagrams using 4577 the internal matrix element value functionality.""" 4578 4579 lines = [] 4580 4581 # Write out lines for symswap.inc file (used to permute the 4582 # external leg momenta 4583 for iperm, perm in enumerate(ident_perms): 4584 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4585 (iperm+1, ",".join([str(i+1) for i in perm]))) 4586 lines.append("data nsym/%d/" % len(ident_perms)) 4587 4588 # Write the file 4589 writer.writelines(lines) 4590 4591 return True
4592 4593 #=========================================================================== 4594 # write_symfact_file 4595 #===========================================================================
4596 - def write_symfact_file(self, writer, symmetry):
4597 """Write the files symfact.dat for MG4 by comparing diagrams using 4598 the internal matrix element value functionality.""" 4599 4600 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4601 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4602 # Write out lines for symswap.inc file (used to permute the 4603 # external leg momenta 4604 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4605 # Write the file 4606 writer.write('\n'.join(lines)) 4607 writer.write('\n') 4608 4609 return True
4610 4611 #=========================================================================== 4612 # write_symperms_file 4613 #===========================================================================
4614 - def write_symperms_file(self, writer, perms):
4615 """Write the symperms.inc file for subprocess group, used for 4616 symmetric configurations""" 4617 4618 lines = [] 4619 for iperm, perm in enumerate(perms): 4620 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4621 (iperm+1, ",".join([str(i+1) for i in perm]))) 4622 4623 # Write the file 4624 writer.writelines(lines) 4625 4626 return True
4627 4628 #=========================================================================== 4629 # write_subproc 4630 #===========================================================================
4631 - def write_subproc(self, writer, subprocdir):
4632 """Append this subprocess to the subproc.mg file for MG4""" 4633 4634 # Write line to file 4635 writer.write(subprocdir + "\n") 4636 4637 return True
4638
4639 #=============================================================================== 4640 # ProcessExporterFortranMEGroup 4641 #=============================================================================== 4642 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4643 """Class to take care of exporting a set of matrix elements to 4644 MadEvent subprocess group format.""" 4645 4646 matrix_file = "matrix_madevent_group_v4.inc" 4647 grouped_mode = 'madevent' 4648 #=========================================================================== 4649 # generate_subprocess_directory 4650 #===========================================================================
4651 - def generate_subprocess_directory(self, subproc_group, 4652 fortran_model, 4653 group_number):
4654 """Generate the Pn directory for a subprocess group in MadEvent, 4655 including the necessary matrix_N.f files, configs.inc and various 4656 other helper files.""" 4657 4658 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4659 "subproc_group object not SubProcessGroup" 4660 4661 if not self.model: 4662 self.model = subproc_group.get('matrix_elements')[0].\ 4663 get('processes')[0].get('model') 4664 4665 cwd = os.getcwd() 4666 path = pjoin(self.dir_path, 'SubProcesses') 4667 4668 os.chdir(path) 4669 pathdir = os.getcwd() 4670 4671 # Create the directory PN in the specified path 4672 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4673 subproc_group.get('name')) 4674 try: 4675 os.mkdir(subprocdir) 4676 except os.error as error: 4677 logger.warning(error.strerror + " " + subprocdir) 4678 4679 try: 4680 os.chdir(subprocdir) 4681 except os.error: 4682 logger.error('Could not cd to directory %s' % subprocdir) 4683 return 0 4684 4685 logger.info('Creating files in directory %s' % subprocdir) 4686 4687 # Create the matrix.f files, auto_dsig.f files and all inc files 4688 # for all subprocesses in the group 4689 4690 maxamps = 0 4691 maxflows = 0 4692 tot_calls = 0 4693 4694 matrix_elements = subproc_group.get('matrix_elements') 4695 4696 # Add the driver.f, all grouped ME's must share the same number of 4697 # helicity configuration 4698 ncomb = matrix_elements[0].get_helicity_combinations() 4699 for me in matrix_elements[1:]: 4700 if ncomb!=me.get_helicity_combinations(): 4701 raise MadGraph5Error, "All grouped processes must share the "+\ 4702 "same number of helicity configurations." 4703 4704 filename = 'driver.f' 4705 self.write_driver(writers.FortranWriter(filename),ncomb, 4706 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4707 4708 for ime, matrix_element in \ 4709 enumerate(matrix_elements): 4710 filename = 'matrix%d.f' % (ime+1) 4711 calls, ncolor = \ 4712 self.write_matrix_element_v4(writers.FortranWriter(filename), 4713 matrix_element, 4714 fortran_model, 4715 proc_id=str(ime+1), 4716 config_map=subproc_group.get('diagram_maps')[ime], 4717 subproc_number=group_number) 4718 4719 filename = 'auto_dsig%d.f' % (ime+1) 4720 self.write_auto_dsig_file(writers.FortranWriter(filename), 4721 matrix_element, 4722 str(ime+1)) 4723 4724 # Keep track of needed quantities 4725 tot_calls += int(calls) 4726 maxflows = max(maxflows, ncolor) 4727 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4728 4729 # Draw diagrams 4730 filename = "matrix%d.ps" % (ime+1) 4731 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4732 get('diagrams'), 4733 filename, 4734 model = \ 4735 matrix_element.get('processes')[0].\ 4736 get('model'), 4737 amplitude=True) 4738 logger.info("Generating Feynman diagrams for " + \ 4739 matrix_element.get('processes')[0].nice_string()) 4740 plot.draw() 4741 4742 # Extract number of external particles 4743 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4744 4745 # Generate a list of diagrams corresponding to each configuration 4746 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4747 # If a subprocess has no diagrams for this config, the number is 0 4748 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4749 4750 filename = 'auto_dsig.f' 4751 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4752 subproc_group) 4753 4754 filename = 'coloramps.inc' 4755 self.write_coloramps_file(writers.FortranWriter(filename), 4756 subproc_diagrams_for_config, 4757 maxflows, 4758 matrix_elements) 4759 4760 filename = 'get_color.f' 4761 self.write_colors_file(writers.FortranWriter(filename), 4762 matrix_elements) 4763 4764 filename = 'config_subproc_map.inc' 4765 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4766 subproc_diagrams_for_config) 4767 4768 filename = 'configs.inc' 4769 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4770 writers.FortranWriter(filename), 4771 subproc_group, 4772 subproc_diagrams_for_config) 4773 4774 filename = 'config_nqcd.inc' 4775 self.write_config_nqcd_file(writers.FortranWriter(filename), 4776 nqcd_list) 4777 4778 filename = 'decayBW.inc' 4779 self.write_decayBW_file(writers.FortranWriter(filename), 4780 s_and_t_channels) 4781 4782 filename = 'dname.mg' 4783 self.write_dname_file(writers.FortranWriter(filename), 4784 subprocdir) 4785 4786 filename = 'iproc.dat' 4787 self.write_iproc_file(writers.FortranWriter(filename), 4788 group_number) 4789 4790 filename = 'leshouche.inc' 4791 self.write_leshouche_file(writers.FortranWriter(filename), 4792 subproc_group) 4793 4794 filename = 'maxamps.inc' 4795 self.write_maxamps_file(writers.FortranWriter(filename), 4796 maxamps, 4797 maxflows, 4798 max([len(me.get('processes')) for me in \ 4799 matrix_elements]), 4800 len(matrix_elements)) 4801 4802 # Note that mg.sym is not relevant for this case 4803 filename = 'mg.sym' 4804 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4805 4806 filename = 'mirrorprocs.inc' 4807 self.write_mirrorprocs(writers.FortranWriter(filename), 4808 subproc_group) 4809 4810 filename = 'ncombs.inc' 4811 self.write_ncombs_file(writers.FortranWriter(filename), 4812 nexternal) 4813 4814 filename = 'nexternal.inc' 4815 self.write_nexternal_file(writers.FortranWriter(filename), 4816 nexternal, ninitial) 4817 4818 filename = 'ngraphs.inc' 4819 self.write_ngraphs_file(writers.FortranWriter(filename), 4820 nconfigs) 4821 4822 filename = 'pmass.inc' 4823 self.write_pmass_file(writers.FortranWriter(filename), 4824 matrix_element) 4825 4826 filename = 'props.inc' 4827 self.write_props_file(writers.FortranWriter(filename), 4828 matrix_element, 4829 s_and_t_channels) 4830 4831 filename = 'processes.dat' 4832 files.write_to_file(filename, 4833 self.write_processes_file, 4834 subproc_group) 4835 4836 # Find config symmetries and permutations 4837 symmetry, perms, ident_perms = \ 4838 diagram_symmetry.find_symmetry(subproc_group) 4839 4840 filename = 'symswap.inc' 4841 self.write_symswap_file(writers.FortranWriter(filename), 4842 ident_perms) 4843 4844 filename = 'symfact_orig.dat' 4845 self.write_symfact_file(open(filename, 'w'), symmetry) 4846 4847 filename = 'symperms.inc' 4848 self.write_symperms_file(writers.FortranWriter(filename), 4849 perms) 4850 4851 # Generate jpgs -> pass in make_html 4852 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4853 4854 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4855 4856 #import nexternal/leshouch in Source 4857 ln('nexternal.inc', '../../Source', log=False) 4858 ln('leshouche.inc', '../../Source', log=False) 4859 ln('maxamps.inc', '../../Source', log=False) 4860 4861 # Return to SubProcesses dir) 4862 os.chdir(pathdir) 4863 4864 # Add subprocess to subproc.mg 4865 filename = 'subproc.mg' 4866 files.append_to_file(filename, 4867 self.write_subproc, 4868 subprocdir) 4869 4870 # Return to original dir 4871 os.chdir(cwd) 4872 4873 if not tot_calls: 4874 tot_calls = 0 4875 return tot_calls
4876 4877 #=========================================================================== 4878 # write_super_auto_dsig_file 4879 #===========================================================================
4880 - def write_super_auto_dsig_file(self, writer, subproc_group):
4881 """Write the auto_dsig.f file selecting between the subprocesses 4882 in subprocess group mode""" 4883 4884 replace_dict = {} 4885 4886 # Extract version number and date from VERSION file 4887 info_lines = self.get_mg5_info_lines() 4888 replace_dict['info_lines'] = info_lines 4889 4890 matrix_elements = subproc_group.get('matrix_elements') 4891 4892 # Extract process info lines 4893 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4894 matrix_elements]) 4895 replace_dict['process_lines'] = process_lines 4896 4897 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4898 replace_dict['nexternal'] = nexternal 4899 4900 replace_dict['nsprocs'] = 2*len(matrix_elements) 4901 4902 # Generate dsig definition line 4903 dsig_def_line = "DOUBLE PRECISION " + \ 4904 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4905 range(len(matrix_elements))]) 4906 replace_dict["dsig_def_line"] = dsig_def_line 4907 4908 # Generate dsig process lines 4909 call_dsig_proc_lines = [] 4910 for iproc in range(len(matrix_elements)): 4911 call_dsig_proc_lines.append(\ 4912 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4913 {"num": iproc + 1, 4914 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4915 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4916 4917 ncomb=matrix_elements[0].get_helicity_combinations() 4918 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4919 4920 if writer: 4921 file = open(pjoin(_file_path, \ 4922 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4923 file = file % replace_dict 4924 4925 # Write the file 4926 writer.writelines(file) 4927 else: 4928 return replace_dict
4929 4930 #=========================================================================== 4931 # write_mirrorprocs 4932 #===========================================================================
4933 - def write_mirrorprocs(self, writer, subproc_group):
4934 """Write the mirrorprocs.inc file determining which processes have 4935 IS mirror process in subprocess group mode.""" 4936 4937 lines = [] 4938 bool_dict = {True: '.true.', False: '.false.'} 4939 matrix_elements = subproc_group.get('matrix_elements') 4940 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4941 (len(matrix_elements), 4942 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4943 me in matrix_elements]))) 4944 # Write the file 4945 writer.writelines(lines)
4946 4947 #=========================================================================== 4948 # write_addmothers 4949 #===========================================================================
4950 - def write_addmothers(self, writer):
4951 """Write the SubProcess/addmothers.f""" 4952 4953 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4954 4955 text = open(path).read() % {'iconfig': 'lconfig'} 4956 writer.write(text) 4957 4958 return True
4959 4960 4961 #=========================================================================== 4962 # write_coloramps_file 4963 #===========================================================================
4964 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4965 matrix_elements):
4966 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4967 4968 # Create a map from subprocess (matrix element) to a list of 4969 # the diagrams corresponding to each config 4970 4971 lines = [] 4972 4973 subproc_to_confdiag = {} 4974 for config in diagrams_for_config: 4975 for subproc, diag in enumerate(config): 4976 try: 4977 subproc_to_confdiag[subproc].append(diag) 4978 except KeyError: 4979 subproc_to_confdiag[subproc] = [diag] 4980 4981 for subproc in sorted(subproc_to_confdiag.keys()): 4982 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 4983 matrix_elements[subproc], 4984 subproc + 1)) 4985 4986 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 4987 (maxflows, 4988 len(diagrams_for_config), 4989 len(matrix_elements))) 4990 4991 # Write the file 4992 writer.writelines(lines) 4993 4994 return True
4995 4996 #=========================================================================== 4997 # write_config_subproc_map_file 4998 #===========================================================================
4999 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5000 """Write the config_subproc_map.inc file for subprocess groups""" 5001 5002 lines = [] 5003 # Output only configs that have some corresponding diagrams 5004 iconfig = 0 5005 for config in config_subproc_map: 5006 if set(config) == set([0]): 5007 continue 5008 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5009 (iconfig + 1, len(config), 5010 ",".join([str(i) for i in config]))) 5011 iconfig += 1 5012 # Write the file 5013 writer.writelines(lines) 5014 5015 return True
5016 5017 #=========================================================================== 5018 # read_write_good_hel 5019 #===========================================================================
5020 - def read_write_good_hel(self, ncomb):
5021 """return the code to read/write the good_hel common_block""" 5022 5023 convert = {'ncomb' : ncomb} 5024 5025 output = """ 5026 subroutine write_good_hel(stream_id) 5027 implicit none 5028 integer stream_id 5029 INTEGER NCOMB 5030 PARAMETER ( NCOMB=%(ncomb)d) 5031 LOGICAL GOODHEL(NCOMB, 2) 5032 INTEGER NTRY(2) 5033 common/BLOCK_GOODHEL/NTRY,GOODHEL 5034 write(stream_id,*) GOODHEL 5035 return 5036 end 5037 5038 5039 subroutine read_good_hel(stream_id) 5040 implicit none 5041 include 'genps.inc' 5042 integer stream_id 5043 INTEGER NCOMB 5044 PARAMETER ( NCOMB=%(ncomb)d) 5045 LOGICAL GOODHEL(NCOMB, 2) 5046 INTEGER NTRY(2) 5047 common/BLOCK_GOODHEL/NTRY,GOODHEL 5048 read(stream_id,*) GOODHEL 5049 NTRY(1) = MAXTRIES + 1 5050 NTRY(2) = MAXTRIES + 1 5051 return 5052 end 5053 5054 subroutine init_good_hel() 5055 implicit none 5056 INTEGER NCOMB 5057 PARAMETER ( NCOMB=%(ncomb)d) 5058 LOGICAL GOODHEL(NCOMB, 2) 5059 INTEGER NTRY(2) 5060 INTEGER I 5061 5062 do i=1,NCOMB 5063 GOODHEL(I,1) = .false. 5064 GOODHEL(I,2) = .false. 5065 enddo 5066 NTRY(1) = 0 5067 NTRY(2) = 0 5068 end 5069 5070 integer function get_maxsproc() 5071 implicit none 5072 include 'maxamps.inc' 5073 5074 get_maxsproc = maxsproc 5075 return 5076 end 5077 5078 """ % convert 5079 5080 return output
5081 5082 5083 5084 #=========================================================================== 5085 # write_configs_file 5086 #===========================================================================
5087 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5088 """Write the configs.inc file with topology information for a 5089 subprocess group. Use the first subprocess with a diagram for each 5090 configuration.""" 5091 5092 matrix_elements = subproc_group.get('matrix_elements') 5093 model = matrix_elements[0].get('processes')[0].get('model') 5094 5095 diagrams = [] 5096 config_numbers = [] 5097 for iconfig, config in enumerate(diagrams_for_config): 5098 # Check if any diagrams correspond to this config 5099 if set(config) == set([0]): 5100 continue 5101 subproc_diags = [] 5102 for s,d in enumerate(config): 5103 if d: 5104 subproc_diags.append(matrix_elements[s].\ 5105 get('diagrams')[d-1]) 5106 else: 5107 subproc_diags.append(None) 5108 diagrams.append(subproc_diags) 5109 config_numbers.append(iconfig + 1) 5110 5111 # Extract number of external particles 5112 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5113 5114 return len(diagrams), \ 5115 self.write_configs_file_from_diagrams(writer, diagrams, 5116 config_numbers, 5117 nexternal, ninitial, 5118 model)
5119 5120 #=========================================================================== 5121 # write_run_configs_file 5122 #===========================================================================
5123 - def write_run_config_file(self, writer):
5124 """Write the run_configs.inc file for MadEvent""" 5125 5126 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5127 if self.proc_characteristic['loop_induced']: 5128 job_per_chan = 1 5129 else: 5130 job_per_chan = 2 5131 text = open(path).read() % {'chanperjob':job_per_chan} 5132 writer.write(text) 5133 return True
5134 5135 5136 #=========================================================================== 5137 # write_leshouche_file 5138 #===========================================================================
5139 - def write_leshouche_file(self, writer, subproc_group):
5140 """Write the leshouche.inc file for MG4""" 5141 5142 all_lines = [] 5143 5144 for iproc, matrix_element in \ 5145 enumerate(subproc_group.get('matrix_elements')): 5146 all_lines.extend(self.get_leshouche_lines(matrix_element, 5147 iproc)) 5148 # Write the file 5149 writer.writelines(all_lines) 5150 return True
5151 5152
5153 - def finalize(self,*args, **opts):
5154 5155 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5156 #ensure that the grouping information is on the correct value 5157 self.proc_characteristic['grouped_matrix'] = True
5158 5159 5160 #=============================================================================== 5161 # UFO_model_to_mg4 5162 #=============================================================================== 5163 5164 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5165 5166 -class UFO_model_to_mg4(object):
5167 """ A converter of the UFO-MG5 Model to the MG4 format """ 5168 5169 # The list below shows the only variables the user is allowed to change by 5170 # himself for each PS point. If he changes any other, then calling 5171 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5172 # correctly account for the change. 5173 PS_dependent_key = ['aS','MU_R'] 5174 mp_complex_format = 'complex*32' 5175 mp_real_format = 'real*16' 5176 # Warning, it is crucial none of the couplings/parameters of the model 5177 # starts with this prefix. I should add a check for this. 5178 # You can change it as the global variable to check_param_card.ParamCard 5179 mp_prefix = check_param_card.ParamCard.mp_prefix 5180
5181 - def __init__(self, model, output_path, opt=None):
5182 """ initialization of the objects """ 5183 5184 self.model = model 5185 self.model_name = model['name'] 5186 self.dir_path = output_path 5187 5188 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5189 'loop_induced': False} 5190 if opt: 5191 self.opt.update(opt) 5192 5193 self.coups_dep = [] # (name, expression, type) 5194 self.coups_indep = [] # (name, expression, type) 5195 self.params_dep = [] # (name, expression, type) 5196 self.params_indep = [] # (name, expression, type) 5197 self.params_ext = [] # external parameter 5198 self.p_to_f = parsers.UFOExpressionParserFortran() 5199 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5200
5202 """modify the parameter if some of them are identical up to the case""" 5203 5204 lower_dict={} 5205 duplicate = set() 5206 keys = self.model['parameters'].keys() 5207 for key in keys: 5208 for param in self.model['parameters'][key]: 5209 lower_name = param.name.lower() 5210 if not lower_name: 5211 continue 5212 try: 5213 lower_dict[lower_name].append(param) 5214 except KeyError,error: 5215 lower_dict[lower_name] = [param] 5216 else: 5217 duplicate.add(lower_name) 5218 logger.debug('%s is define both as lower case and upper case.' 5219 % lower_name) 5220 if not duplicate: 5221 return 5222 5223 re_expr = r'''\b(%s)\b''' 5224 to_change = [] 5225 change={} 5226 for value in duplicate: 5227 for i, var in enumerate(lower_dict[value]): 5228 to_change.append(var.name) 5229 new_name = '%s%s' % (var.name.lower(), 5230 ('__%d'%(i+1) if i>0 else '')) 5231 change[var.name] = new_name 5232 var.name = new_name 5233 5234 # Apply the modification to the map_CTcoup_CTparam of the model 5235 # if it has one (giving for each coupling the CT parameters whcih 5236 # are necessary and which should be exported to the model. 5237 if hasattr(self.model,'map_CTcoup_CTparam'): 5238 for coup, ctparams in self.model.map_CTcoup_CTparam: 5239 for i, ctparam in enumerate(ctparams): 5240 try: 5241 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5242 except KeyError: 5243 pass 5244 5245 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5246 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5247 5248 # change parameters 5249 for key in keys: 5250 if key == ('external',): 5251 continue 5252 for param in self.model['parameters'][key]: 5253 param.expr = rep_pattern.sub(replace, param.expr) 5254 5255 # change couplings 5256 for key in self.model['couplings'].keys(): 5257 for coup in self.model['couplings'][key]: 5258 coup.expr = rep_pattern.sub(replace, coup.expr) 5259 5260 # change mass/width 5261 for part in self.model['particles']: 5262 if str(part.get('mass')) in to_change: 5263 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5264 if str(part.get('width')) in to_change: 5265 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5266
5267 - def refactorize(self, wanted_couplings = []):
5268 """modify the couplings to fit with MG4 convention """ 5269 5270 # Keep only separation in alphaS 5271 keys = self.model['parameters'].keys() 5272 keys.sort(key=len) 5273 for key in keys: 5274 to_add = [o for o in self.model['parameters'][key] if o.name] 5275 5276 if key == ('external',): 5277 self.params_ext += to_add 5278 elif any([(k in key) for k in self.PS_dependent_key]): 5279 self.params_dep += to_add 5280 else: 5281 self.params_indep += to_add 5282 # same for couplings 5283 keys = self.model['couplings'].keys() 5284 keys.sort(key=len) 5285 for key, coup_list in self.model['couplings'].items(): 5286 if any([(k in key) for k in self.PS_dependent_key]): 5287 self.coups_dep += [c for c in coup_list if 5288 (not wanted_couplings or c.name in \ 5289 wanted_couplings)] 5290 else: 5291 self.coups_indep += [c for c in coup_list if 5292 (not wanted_couplings or c.name in \ 5293 wanted_couplings)] 5294 5295 # MG4 use G and not aS as it basic object for alphas related computation 5296 #Pass G in the independant list 5297 if 'G' in self.params_dep: 5298 index = self.params_dep.index('G') 5299 G = self.params_dep.pop(index) 5300 # G.expr = '2*cmath.sqrt(as*pi)' 5301 # self.params_indep.insert(0, self.params_dep.pop(index)) 5302 # No need to add it if not defined 5303 5304 if 'aS' not in self.params_ext: 5305 logger.critical('aS not define as external parameter adding it!') 5306 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5307 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5308 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5309 - def build(self, wanted_couplings = [], full=True):
5310 """modify the couplings to fit with MG4 convention and creates all the 5311 different files""" 5312 5313 self.pass_parameter_to_case_insensitive() 5314 self.refactorize(wanted_couplings) 5315 5316 # write the files 5317 if full: 5318 if wanted_couplings: 5319 # extract the wanted ct parameters 5320 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5321 self.write_all()
5322 5323
5324 - def open(self, name, comment='c', format='default'):
5325 """ Open the file name in the correct directory and with a valid 5326 header.""" 5327 5328 file_path = pjoin(self.dir_path, name) 5329 5330 if format == 'fortran': 5331 fsock = writers.FortranWriter(file_path, 'w') 5332 else: 5333 fsock = open(file_path, 'w') 5334 5335 file.writelines(fsock, comment * 77 + '\n') 5336 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5337 {'comment': comment + (6 - len(comment)) * ' '}) 5338 file.writelines(fsock, comment * 77 + '\n\n') 5339 return fsock
5340 5341
5342 - def write_all(self):
5343 """ write all the files """ 5344 #write the part related to the external parameter 5345 self.create_ident_card() 5346 self.create_param_read() 5347 5348 #write the definition of the parameter 5349 self.create_input() 5350 self.create_intparam_def(dp=True,mp=False) 5351 if self.opt['mp']: 5352 self.create_intparam_def(dp=False,mp=True) 5353 5354 # definition of the coupling. 5355 self.create_actualize_mp_ext_param_inc() 5356 self.create_coupl_inc() 5357 self.create_write_couplings() 5358 self.create_couplings() 5359 5360 # the makefile 5361 self.create_makeinc() 5362 self.create_param_write() 5363 5364 # The model functions 5365 self.create_model_functions_inc() 5366 self.create_model_functions_def() 5367 5368 # The param_card.dat 5369 self.create_param_card() 5370 5371 5372 # All the standard files 5373 self.copy_standard_file()
5374 5375 ############################################################################ 5376 ## ROUTINE CREATING THE FILES ############################################ 5377 ############################################################################ 5378
5379 - def copy_standard_file(self):
5380 """Copy the standard files for the fortran model.""" 5381 5382 #copy the library files 5383 file_to_link = ['formats.inc','printout.f', \ 5384 'rw_para.f', 'testprog.f'] 5385 5386 for filename in file_to_link: 5387 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5388 self.dir_path) 5389 5390 file = open(os.path.join(MG5DIR,\ 5391 'models/template_files/fortran/rw_para.f')).read() 5392 5393 includes=["include \'coupl.inc\'","include \'input.inc\'", 5394 "include \'model_functions.inc\'"] 5395 if self.opt['mp']: 5396 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5397 # In standalone and madloop we do no use the compiled param card but 5398 # still parse the .dat one so we must load it. 5399 if self.opt['loop_induced']: 5400 #loop induced follow MadEvent way to handle the card. 5401 load_card = '' 5402 lha_read_filename='lha_read.f' 5403 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5404 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5405 lha_read_filename='lha_read_mp.f' 5406 elif self.opt['export_format'].startswith('standalone') \ 5407 or self.opt['export_format'] in ['madweight', 'plugin']\ 5408 or self.opt['export_format'].startswith('matchbox'): 5409 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5410 lha_read_filename='lha_read.f' 5411 else: 5412 load_card = '' 5413 lha_read_filename='lha_read.f' 5414 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5415 os.path.join(self.dir_path,'lha_read.f')) 5416 5417 file=file%{'includes':'\n '.join(includes), 5418 'load_card':load_card} 5419 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5420 writer.writelines(file) 5421 writer.close() 5422 5423 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5424 or self.opt['loop_induced']: 5425 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5426 self.dir_path + '/makefile') 5427 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5428 path = pjoin(self.dir_path, 'makefile') 5429 text = open(path).read() 5430 text = text.replace('madevent','aMCatNLO') 5431 open(path, 'w').writelines(text) 5432 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5433 'madloop','madloop_optimized', 'standalone_rw', 5434 'madweight','matchbox','madloop_matchbox', 'plugin']: 5435 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5436 self.dir_path + '/makefile') 5437 #elif self.opt['export_format'] in []: 5438 #pass 5439 else: 5440 raise MadGraph5Error('Unknown format')
5441
5442 - def create_coupl_inc(self):
5443 """ write coupling.inc """ 5444 5445 fsock = self.open('coupl.inc', format='fortran') 5446 if self.opt['mp']: 5447 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5448 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5449 format='fortran') 5450 5451 # Write header 5452 header = """double precision G 5453 common/strong/ G 5454 5455 double complex gal(2) 5456 common/weak/ gal 5457 5458 double precision MU_R 5459 common/rscale/ MU_R 5460 5461 double precision Nf 5462 parameter(Nf=%d) 5463 """ % self.model.get_nflav() 5464 5465 fsock.writelines(header) 5466 5467 if self.opt['mp']: 5468 header = """%(real_mp_format)s %(mp_prefix)sG 5469 common/MP_strong/ %(mp_prefix)sG 5470 5471 %(complex_mp_format)s %(mp_prefix)sgal(2) 5472 common/MP_weak/ %(mp_prefix)sgal 5473 5474 %(complex_mp_format)s %(mp_prefix)sMU_R 5475 common/MP_rscale/ %(mp_prefix)sMU_R 5476 5477 """ 5478 5479 5480 5481 5482 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5483 'complex_mp_format':self.mp_complex_format, 5484 'mp_prefix':self.mp_prefix}) 5485 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5486 'complex_mp_format':self.mp_complex_format, 5487 'mp_prefix':''}) 5488 5489 # Write the Mass definition/ common block 5490 masses = set() 5491 widths = set() 5492 if self.opt['complex_mass']: 5493 complex_mass = set() 5494 5495 for particle in self.model.get('particles'): 5496 #find masses 5497 one_mass = particle.get('mass') 5498 if one_mass.lower() != 'zero': 5499 masses.add(one_mass) 5500 5501 # find width 5502 one_width = particle.get('width') 5503 if one_width.lower() != 'zero': 5504 widths.add(one_width) 5505 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5506 complex_mass.add('CMASS_%s' % one_mass) 5507 5508 if masses: 5509 fsock.writelines('double precision '+','.join(masses)+'\n') 5510 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5511 if self.opt['mp']: 5512 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5513 ','.join(masses)+'\n') 5514 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5515 ','.join(masses)+'\n\n') 5516 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5517 self.mp_prefix+m for m in masses])+'\n') 5518 mp_fsock.writelines('common/MP_masses/ '+\ 5519 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5520 5521 if widths: 5522 fsock.writelines('double precision '+','.join(widths)+'\n') 5523 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5524 if self.opt['mp']: 5525 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5526 ','.join(widths)+'\n') 5527 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5528 ','.join(widths)+'\n\n') 5529 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5530 self.mp_prefix+w for w in widths])+'\n') 5531 mp_fsock.writelines('common/MP_widths/ '+\ 5532 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5533 5534 # Write the Couplings 5535 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5536 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5537 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5538 if self.opt['mp']: 5539 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5540 ','.join(coupling_list)+'\n') 5541 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5542 ','.join(coupling_list)+'\n\n') 5543 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5544 self.mp_prefix+c for c in coupling_list])+'\n') 5545 mp_fsock.writelines('common/MP_couplings/ '+\ 5546 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5547 5548 # Write complex mass for complex mass scheme (if activated) 5549 if self.opt['complex_mass'] and complex_mass: 5550 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5551 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5552 if self.opt['mp']: 5553 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5554 ','.join(complex_mass)+'\n') 5555 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5556 ','.join(complex_mass)+'\n\n') 5557 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5558 self.mp_prefix+cm for cm in complex_mass])+'\n') 5559 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5560 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5561
5562 - def create_write_couplings(self):
5563 """ write the file coupl_write.inc """ 5564 5565 fsock = self.open('coupl_write.inc', format='fortran') 5566 5567 fsock.writelines("""write(*,*) ' Couplings of %s' 5568 write(*,*) ' ---------------------------------' 5569 write(*,*) ' '""" % self.model_name) 5570 def format(coupl): 5571 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5572 5573 # Write the Couplings 5574 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5575 fsock.writelines('\n'.join(lines)) 5576 5577
5578 - def create_input(self):
5579 """create input.inc containing the definition of the parameters""" 5580 5581 fsock = self.open('input.inc', format='fortran') 5582 if self.opt['mp']: 5583 mp_fsock = self.open('mp_input.inc', format='fortran') 5584 5585 #find mass/ width since they are already define 5586 already_def = set() 5587 for particle in self.model.get('particles'): 5588 already_def.add(particle.get('mass').lower()) 5589 already_def.add(particle.get('width').lower()) 5590 if self.opt['complex_mass']: 5591 already_def.add('cmass_%s' % particle.get('mass').lower()) 5592 5593 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5594 name.lower() not in already_def 5595 5596 real_parameters = [param.name for param in self.params_dep + 5597 self.params_indep if param.type == 'real' 5598 and is_valid(param.name)] 5599 5600 real_parameters += [param.name for param in self.params_ext 5601 if param.type == 'real'and 5602 is_valid(param.name)] 5603 5604 # check the parameter is a CT parameter or not 5605 # if yes, just use the needed ones 5606 real_parameters = [param for param in real_parameters \ 5607 if self.check_needed_param(param)] 5608 5609 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5610 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5611 if self.opt['mp']: 5612 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5613 self.mp_prefix+p for p in real_parameters])+'\n') 5614 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5615 self.mp_prefix+p for p in real_parameters])+'\n\n') 5616 5617 complex_parameters = [param.name for param in self.params_dep + 5618 self.params_indep if param.type == 'complex' and 5619 is_valid(param.name)] 5620 5621 # check the parameter is a CT parameter or not 5622 # if yes, just use the needed ones 5623 complex_parameters = [param for param in complex_parameters \ 5624 if self.check_needed_param(param)] 5625 5626 if complex_parameters: 5627 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5628 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5629 if self.opt['mp']: 5630 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5631 self.mp_prefix+p for p in complex_parameters])+'\n') 5632 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5633 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5634
5635 - def check_needed_param(self, param):
5636 """ Returns whether the parameter in argument is needed for this 5637 specific computation or not.""" 5638 5639 # If this is a leading order model or if there was no CT parameter 5640 # employed in this NLO model, one can directly return that the 5641 # parameter is needed since only CTParameters are filtered. 5642 if not hasattr(self, 'allCTparameters') or \ 5643 self.allCTparameters is None or self.usedCTparameters is None or \ 5644 len(self.allCTparameters)==0: 5645 return True 5646 5647 # We must allow the conjugate shorthand for the complex parameter as 5648 # well so we check wether either the parameter name or its name with 5649 # 'conjg__' substituted with '' is present in the list. 5650 # This is acceptable even if some parameter had an original name 5651 # including 'conjg__' in it, because at worst we export a parameter 5652 # was not needed. 5653 param = param.lower() 5654 cjg_param = param.replace('conjg__','',1) 5655 5656 # First make sure it is a CTparameter 5657 if param not in self.allCTparameters and \ 5658 cjg_param not in self.allCTparameters: 5659 return True 5660 5661 # Now check if it is in the list of CTparameters actually used 5662 return (param in self.usedCTparameters or \ 5663 cjg_param in self.usedCTparameters)
5664
5665 - def extract_needed_CTparam(self,wanted_couplings=[]):
5666 """ Extract what are the needed CT parameters given the wanted_couplings""" 5667 5668 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5669 # Setting these lists to none wil disable the filtering in 5670 # check_needed_param 5671 self.allCTparameters = None 5672 self.usedCTparameters = None 5673 return 5674 5675 # All CTparameters appearin in all CT couplings 5676 allCTparameters=self.model.map_CTcoup_CTparam.values() 5677 # Define in this class the list of all CT parameters 5678 self.allCTparameters=list(\ 5679 set(itertools.chain.from_iterable(allCTparameters))) 5680 5681 # All used CT couplings 5682 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5683 allUsedCTCouplings = [coupl for coupl in 5684 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5685 5686 # Now define the list of all CT parameters that are actually used 5687 self.usedCTparameters=list(\ 5688 set(itertools.chain.from_iterable([ 5689 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5690 ]))) 5691 5692 # Now at last, make these list case insensitive 5693 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5694 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5695
5696 - def create_intparam_def(self, dp=True, mp=False):
5697 """ create intparam_definition.inc setting the internal parameters. 5698 Output the double precision and/or the multiple precision parameters 5699 depending on the parameters dp and mp. If mp only, then the file names 5700 get the 'mp_' prefix. 5701 """ 5702 5703 fsock = self.open('%sintparam_definition.inc'% 5704 ('mp_' if mp and not dp else ''), format='fortran') 5705 5706 fsock.write_comments(\ 5707 "Parameters that should not be recomputed event by event.\n") 5708 fsock.writelines("if(readlha) then\n") 5709 if dp: 5710 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5711 if mp: 5712 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5713 5714 for param in self.params_indep: 5715 if param.name == 'ZERO': 5716 continue 5717 # check whether the parameter is a CT parameter 5718 # if yes,just used the needed ones 5719 if not self.check_needed_param(param.name): 5720 continue 5721 if dp: 5722 fsock.writelines("%s = %s\n" % (param.name, 5723 self.p_to_f.parse(param.expr))) 5724 if mp: 5725 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5726 self.mp_p_to_f.parse(param.expr))) 5727 5728 fsock.writelines('endif') 5729 5730 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5731 if dp: 5732 fsock.writelines("aS = G**2/4/pi\n") 5733 if mp: 5734 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5735 for param in self.params_dep: 5736 # check whether the parameter is a CT parameter 5737 # if yes,just used the needed ones 5738 if not self.check_needed_param(param.name): 5739 continue 5740 if dp: 5741 fsock.writelines("%s = %s\n" % (param.name, 5742 self.p_to_f.parse(param.expr))) 5743 elif mp: 5744 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5745 self.mp_p_to_f.parse(param.expr))) 5746 5747 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5748 if ('aEWM1',) in self.model['parameters']: 5749 if dp: 5750 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5751 gal(2) = 1d0 5752 """) 5753 elif mp: 5754 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5755 %(mp_prefix)sgal(2) = 1d0 5756 """ %{'mp_prefix':self.mp_prefix}) 5757 pass 5758 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5759 elif ('Gf',) in self.model['parameters']: 5760 if dp: 5761 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5762 gal(2) = 1d0 5763 """) 5764 elif mp: 5765 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5766 %(mp_prefix)sgal(2) = 1d0 5767 """ %{'mp_prefix':self.mp_prefix}) 5768 pass 5769 else: 5770 if dp: 5771 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5772 fsock.writelines(""" gal(1) = 1d0 5773 gal(2) = 1d0 5774 """) 5775 elif mp: 5776 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5777 %(mp_prefix)sgal(2) = 1e0_16 5778 """%{'mp_prefix':self.mp_prefix})
5779 5780
5781 - def create_couplings(self):
5782 """ create couplings.f and all couplingsX.f """ 5783 5784 nb_def_by_file = 25 5785 5786 self.create_couplings_main(nb_def_by_file) 5787 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5788 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5789 5790 for i in range(nb_coup_indep): 5791 # For the independent couplings, we compute the double and multiple 5792 # precision ones together 5793 data = self.coups_indep[nb_def_by_file * i: 5794 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5795 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5796 5797 for i in range(nb_coup_dep): 5798 # For the dependent couplings, we compute the double and multiple 5799 # precision ones in separate subroutines. 5800 data = self.coups_dep[nb_def_by_file * i: 5801 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5802 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5803 dp=True,mp=False) 5804 if self.opt['mp']: 5805 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5806 dp=False,mp=True)
5807 5808
5809 - def create_couplings_main(self, nb_def_by_file=25):
5810 """ create couplings.f """ 5811 5812 fsock = self.open('couplings.f', format='fortran') 5813 5814 fsock.writelines("""subroutine coup() 5815 5816 implicit none 5817 double precision PI, ZERO 5818 logical READLHA 5819 parameter (PI=3.141592653589793d0) 5820 parameter (ZERO=0d0) 5821 include \'model_functions.inc\'""") 5822 if self.opt['mp']: 5823 fsock.writelines("""%s MP__PI, MP__ZERO 5824 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5825 parameter (MP__ZERO=0e0_16) 5826 include \'mp_input.inc\' 5827 include \'mp_coupl.inc\' 5828 """%self.mp_real_format) 5829 fsock.writelines("""include \'input.inc\' 5830 include \'coupl.inc\' 5831 READLHA = .true. 5832 include \'intparam_definition.inc\'""") 5833 if self.opt['mp']: 5834 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5835 5836 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5837 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5838 5839 fsock.writelines('\n'.join(\ 5840 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5841 5842 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5843 5844 fsock.writelines('\n'.join(\ 5845 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5846 for i in range(nb_coup_dep)])) 5847 if self.opt['mp']: 5848 fsock.writelines('\n'.join(\ 5849 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5850 for i in range(nb_coup_dep)])) 5851 fsock.writelines('''\n return \n end\n''') 5852 5853 fsock.writelines("""subroutine update_as_param() 5854 5855 implicit none 5856 double precision PI, ZERO 5857 logical READLHA 5858 parameter (PI=3.141592653589793d0) 5859 parameter (ZERO=0d0) 5860 include \'model_functions.inc\'""") 5861 fsock.writelines("""include \'input.inc\' 5862 include \'coupl.inc\' 5863 READLHA = .false.""") 5864 fsock.writelines(""" 5865 include \'intparam_definition.inc\'\n 5866 """) 5867 5868 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5869 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5870 5871 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5872 5873 fsock.writelines('\n'.join(\ 5874 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5875 for i in range(nb_coup_dep)])) 5876 fsock.writelines('''\n return \n end\n''') 5877 5878 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5879 5880 implicit none 5881 double precision PI 5882 parameter (PI=3.141592653589793d0) 5883 double precision mu_r2, as2 5884 include \'model_functions.inc\'""") 5885 fsock.writelines("""include \'input.inc\' 5886 include \'coupl.inc\'""") 5887 fsock.writelines(""" 5888 if (mu_r2.gt.0d0) MU_R = mu_r2 5889 G = SQRT(4.0d0*PI*AS2) 5890 AS = as2 5891 5892 CALL UPDATE_AS_PARAM() 5893 """) 5894 fsock.writelines('''\n return \n end\n''') 5895 5896 if self.opt['mp']: 5897 fsock.writelines("""subroutine mp_update_as_param() 5898 5899 implicit none 5900 logical READLHA 5901 include \'model_functions.inc\'""") 5902 fsock.writelines("""%s MP__PI, MP__ZERO 5903 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5904 parameter (MP__ZERO=0e0_16) 5905 include \'mp_input.inc\' 5906 include \'mp_coupl.inc\' 5907 """%self.mp_real_format) 5908 fsock.writelines("""include \'input.inc\' 5909 include \'coupl.inc\' 5910 include \'actualize_mp_ext_params.inc\' 5911 READLHA = .false. 5912 include \'mp_intparam_definition.inc\'\n 5913 """) 5914 5915 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5916 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5917 5918 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5919 5920 fsock.writelines('\n'.join(\ 5921 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5922 for i in range(nb_coup_dep)])) 5923 fsock.writelines('''\n return \n end\n''')
5924
5925 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5926 """ create couplings[nb_file].f containing information coming from data. 5927 Outputs the computation of the double precision and/or the multiple 5928 precision couplings depending on the parameters dp and mp. 5929 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5930 filename and subroutine name. 5931 """ 5932 5933 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5934 nb_file), format='fortran') 5935 fsock.writelines("""subroutine %scoup%s() 5936 5937 implicit none 5938 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5939 if dp: 5940 fsock.writelines(""" 5941 double precision PI, ZERO 5942 parameter (PI=3.141592653589793d0) 5943 parameter (ZERO=0d0) 5944 include 'input.inc' 5945 include 'coupl.inc'""") 5946 if mp: 5947 fsock.writelines("""%s MP__PI, MP__ZERO 5948 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5949 parameter (MP__ZERO=0e0_16) 5950 include \'mp_input.inc\' 5951 include \'mp_coupl.inc\' 5952 """%self.mp_real_format) 5953 5954 for coupling in data: 5955 if dp: 5956 fsock.writelines('%s = %s' % (coupling.name, 5957 self.p_to_f.parse(coupling.expr))) 5958 if mp: 5959 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5960 self.mp_p_to_f.parse(coupling.expr))) 5961 fsock.writelines('end')
5962
5963 - def create_model_functions_inc(self):
5964 """ Create model_functions.inc which contains the various declarations 5965 of auxiliary functions which might be used in the couplings expressions 5966 """ 5967 5968 additional_fct = [] 5969 # check for functions define in the UFO model 5970 ufo_fct = self.model.get('functions') 5971 if ufo_fct: 5972 for fct in ufo_fct: 5973 # already handle by default 5974 if fct.name not in ["complexconjugate", "re", "im", "sec", 5975 "csc", "asec", "acsc", "theta_function", "cond", 5976 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 5977 additional_fct.append(fct.name) 5978 5979 5980 fsock = self.open('model_functions.inc', format='fortran') 5981 fsock.writelines("""double complex cond 5982 double complex condif 5983 double complex reglog 5984 double complex reglogp 5985 double complex reglogm 5986 double complex recms 5987 double complex arg 5988 %s 5989 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 5990 5991 5992 if self.opt['mp']: 5993 fsock.writelines("""%(complex_mp_format)s mp_cond 5994 %(complex_mp_format)s mp_condif 5995 %(complex_mp_format)s mp_reglog 5996 %(complex_mp_format)s mp_reglogp 5997 %(complex_mp_format)s mp_reglogm 5998 %(complex_mp_format)s mp_recms 5999 %(complex_mp_format)s mp_arg 6000 %(additional)s 6001 """ %\ 6002 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 6003 'complex_mp_format':self.mp_complex_format 6004 })
6005
6006 - def create_model_functions_def(self):
6007 """ Create model_functions.f which contains the various definitions 6008 of auxiliary functions which might be used in the couplings expressions 6009 Add the functions.f functions for formfactors support 6010 """ 6011 6012 fsock = self.open('model_functions.f', format='fortran') 6013 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6014 implicit none 6015 double complex condition,truecase,falsecase 6016 if(condition.eq.(0.0d0,0.0d0)) then 6017 cond=truecase 6018 else 6019 cond=falsecase 6020 endif 6021 end 6022 6023 double complex function condif(condition,truecase,falsecase) 6024 implicit none 6025 logical condition 6026 double complex truecase,falsecase 6027 if(condition) then 6028 condif=truecase 6029 else 6030 condif=falsecase 6031 endif 6032 end 6033 6034 double complex function recms(condition,expr) 6035 implicit none 6036 logical condition 6037 double complex expr 6038 if(condition)then 6039 recms=expr 6040 else 6041 recms=dcmplx(dble(expr)) 6042 endif 6043 end 6044 6045 double complex function reglog(arg) 6046 implicit none 6047 double complex TWOPII 6048 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6049 double complex arg 6050 if(arg.eq.(0.0d0,0.0d0)) then 6051 reglog=(0.0d0,0.0d0) 6052 else 6053 reglog=log(arg) 6054 endif 6055 end 6056 6057 double complex function reglogp(arg) 6058 implicit none 6059 double complex TWOPII 6060 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6061 double complex arg 6062 if(arg.eq.(0.0d0,0.0d0))then 6063 reglogp=(0.0d0,0.0d0) 6064 else 6065 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6066 reglogp=log(arg) + TWOPII 6067 else 6068 reglogp=log(arg) 6069 endif 6070 endif 6071 end 6072 6073 double complex function reglogm(arg) 6074 implicit none 6075 double complex TWOPII 6076 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6077 double complex arg 6078 if(arg.eq.(0.0d0,0.0d0))then 6079 reglogm=(0.0d0,0.0d0) 6080 else 6081 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6082 reglogm=log(arg) - TWOPII 6083 else 6084 reglogm=log(arg) 6085 endif 6086 endif 6087 end 6088 6089 double complex function arg(comnum) 6090 implicit none 6091 double complex comnum 6092 double complex iim 6093 iim = (0.0d0,1.0d0) 6094 if(comnum.eq.(0.0d0,0.0d0)) then 6095 arg=(0.0d0,0.0d0) 6096 else 6097 arg=log(comnum/abs(comnum))/iim 6098 endif 6099 end""") 6100 if self.opt['mp']: 6101 fsock.writelines(""" 6102 6103 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6104 implicit none 6105 %(complex_mp_format)s condition,truecase,falsecase 6106 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6107 mp_cond=truecase 6108 else 6109 mp_cond=falsecase 6110 endif 6111 end 6112 6113 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6114 implicit none 6115 logical condition 6116 %(complex_mp_format)s truecase,falsecase 6117 if(condition) then 6118 mp_condif=truecase 6119 else 6120 mp_condif=falsecase 6121 endif 6122 end 6123 6124 %(complex_mp_format)s function mp_recms(condition,expr) 6125 implicit none 6126 logical condition 6127 %(complex_mp_format)s expr 6128 if(condition)then 6129 mp_recms=expr 6130 else 6131 mp_recms=cmplx(real(expr),kind=16) 6132 endif 6133 end 6134 6135 %(complex_mp_format)s function mp_reglog(arg) 6136 implicit none 6137 %(complex_mp_format)s TWOPII 6138 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6139 %(complex_mp_format)s arg 6140 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6141 mp_reglog=(0.0e0_16,0.0e0_16) 6142 else 6143 mp_reglog=log(arg) 6144 endif 6145 end 6146 6147 %(complex_mp_format)s function mp_reglogp(arg) 6148 implicit none 6149 %(complex_mp_format)s TWOPII 6150 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6151 %(complex_mp_format)s arg 6152 if(arg.eq.(0.0e0_16,0.0e0_16))then 6153 mp_reglogp=(0.0e0_16,0.0e0_16) 6154 else 6155 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6156 mp_reglogp=log(arg) + TWOPII 6157 else 6158 mp_reglogp=log(arg) 6159 endif 6160 endif 6161 end 6162 6163 %(complex_mp_format)s function mp_reglogm(arg) 6164 implicit none 6165 %(complex_mp_format)s TWOPII 6166 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6167 %(complex_mp_format)s arg 6168 if(arg.eq.(0.0e0_16,0.0e0_16))then 6169 mp_reglogm=(0.0e0_16,0.0e0_16) 6170 else 6171 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6172 mp_reglogm=log(arg) - TWOPII 6173 else 6174 mp_reglogm=log(arg) 6175 endif 6176 endif 6177 end 6178 6179 %(complex_mp_format)s function mp_arg(comnum) 6180 implicit none 6181 %(complex_mp_format)s comnum 6182 %(complex_mp_format)s imm 6183 imm = (0.0e0_16,1.0e0_16) 6184 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6185 mp_arg=(0.0e0_16,0.0e0_16) 6186 else 6187 mp_arg=log(comnum/abs(comnum))/imm 6188 endif 6189 end"""%{'complex_mp_format':self.mp_complex_format}) 6190 6191 6192 #check for the file functions.f 6193 model_path = self.model.get('modelpath') 6194 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6195 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6196 input = pjoin(model_path,'Fortran','functions.f') 6197 file.writelines(fsock, open(input).read()) 6198 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6199 6200 # check for functions define in the UFO model 6201 ufo_fct = self.model.get('functions') 6202 if ufo_fct: 6203 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6204 for fct in ufo_fct: 6205 # already handle by default 6206 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6207 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6208 ufo_fct_template = """ 6209 double complex function %(name)s(%(args)s) 6210 implicit none 6211 double complex %(args)s 6212 %(name)s = %(fct)s 6213 6214 return 6215 end 6216 """ 6217 text = ufo_fct_template % { 6218 'name': fct.name, 6219 'args': ", ".join(fct.arguments), 6220 'fct': self.p_to_f.parse(fct.expr) 6221 } 6222 fsock.writelines(text) 6223 if self.opt['mp']: 6224 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6225 for fct in ufo_fct: 6226 # already handle by default 6227 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6228 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6229 ufo_fct_template = """ 6230 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6231 implicit none 6232 %(complex_mp_format)s mp__%(args)s 6233 mp__%(name)s = %(fct)s 6234 6235 return 6236 end 6237 """ 6238 text = ufo_fct_template % { 6239 'name': fct.name, 6240 'args': ", mp__".join(fct.arguments), 6241 'fct': self.mp_p_to_f.parse(fct.expr), 6242 'complex_mp_format': self.mp_complex_format 6243 } 6244 fsock.writelines(text) 6245 6246 6247 6248 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6249 6250 6251
6252 - def create_makeinc(self):
6253 """create makeinc.inc containing the file to compile """ 6254 6255 fsock = self.open('makeinc.inc', comment='#') 6256 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6257 text += ' model_functions.o ' 6258 6259 nb_coup_indep = 1 + len(self.coups_dep) // 25 6260 nb_coup_dep = 1 + len(self.coups_indep) // 25 6261 couplings_files=['couplings%s.o' % (i+1) \ 6262 for i in range(nb_coup_dep + nb_coup_indep) ] 6263 if self.opt['mp']: 6264 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6265 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6266 text += ' '.join(couplings_files) 6267 fsock.writelines(text)
6268
6269 - def create_param_write(self):
6270 """ create param_write """ 6271 6272 fsock = self.open('param_write.inc', format='fortran') 6273 6274 fsock.writelines("""write(*,*) ' External Params' 6275 write(*,*) ' ---------------------------------' 6276 write(*,*) ' '""") 6277 def format(name): 6278 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6279 6280 # Write the external parameter 6281 lines = [format(param.name) for param in self.params_ext] 6282 fsock.writelines('\n'.join(lines)) 6283 6284 fsock.writelines("""write(*,*) ' Internal Params' 6285 write(*,*) ' ---------------------------------' 6286 write(*,*) ' '""") 6287 lines = [format(data.name) for data in self.params_indep 6288 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6289 fsock.writelines('\n'.join(lines)) 6290 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6291 write(*,*) ' ----------------------------------------' 6292 write(*,*) ' '""") 6293 lines = [format(data.name) for data in self.params_dep \ 6294 if self.check_needed_param(data.name)] 6295 6296 fsock.writelines('\n'.join(lines)) 6297 6298 6299
6300 - def create_ident_card(self):
6301 """ create the ident_card.dat """ 6302 6303 def format(parameter): 6304 """return the line for the ident_card corresponding to this parameter""" 6305 colum = [parameter.lhablock.lower()] + \ 6306 [str(value) for value in parameter.lhacode] + \ 6307 [parameter.name] 6308 if not parameter.name: 6309 return '' 6310 return ' '.join(colum)+'\n'
6311 6312 fsock = self.open('ident_card.dat') 6313 6314 external_param = [format(param) for param in self.params_ext] 6315 fsock.writelines('\n'.join(external_param)) 6316
6317 - def create_actualize_mp_ext_param_inc(self):
6318 """ create the actualize_mp_ext_params.inc code """ 6319 6320 # In principle one should actualize all external, but for now, it is 6321 # hardcoded that only AS and MU_R can by dynamically changed by the user 6322 # so that we only update those ones. 6323 # Of course, to be on the safe side, one could decide to update all 6324 # external parameters. 6325 update_params_list=[p for p in self.params_ext if p.name in 6326 self.PS_dependent_key] 6327 6328 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6329 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6330 for param in update_params_list] 6331 # When read_lha is false, it is G which is taken in input and not AS, so 6332 # this is what should be reset here too. 6333 if 'aS' in [param.name for param in update_params_list]: 6334 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6335 6336 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6337 fsock.writelines('\n'.join(res_strings))
6338
6339 - def create_param_read(self):
6340 """create param_read""" 6341 6342 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6343 or self.opt['loop_induced']: 6344 fsock = self.open('param_read.inc', format='fortran') 6345 fsock.writelines(' include \'../param_card.inc\'') 6346 return 6347 6348 def format_line(parameter): 6349 """return the line for the ident_card corresponding to this 6350 parameter""" 6351 template = \ 6352 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6353 % {'name': parameter.name, 6354 'value': self.p_to_f.parse(str(parameter.value.real))} 6355 if self.opt['mp']: 6356 template = template+ \ 6357 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6358 "%(mp_prefix)s%(name)s,%(value)s)") \ 6359 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6360 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6361 return template 6362 6363 fsock = self.open('param_read.inc', format='fortran') 6364 res_strings = [format_line(param) \ 6365 for param in self.params_ext] 6366 6367 # Correct width sign for Majorana particles (where the width 6368 # and mass need to have the same sign) 6369 for particle in self.model.get('particles'): 6370 if particle.is_fermion() and particle.get('self_antipart') and \ 6371 particle.get('width').lower() != 'zero': 6372 6373 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6374 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6375 if self.opt['mp']: 6376 res_strings.append(\ 6377 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6378 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6379 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6380 6381 fsock.writelines('\n'.join(res_strings)) 6382 6383 6384 @staticmethod
6385 - def create_param_card_static(model, output_path, rule_card_path=False, 6386 mssm_convert=True):
6387 """ create the param_card.dat for a givent model --static method-- """ 6388 #1. Check if a default param_card is present: 6389 done = False 6390 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6391 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6392 model_path = model.get('modelpath') 6393 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6394 done = True 6395 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6396 output_path) 6397 if not done: 6398 param_writer.ParamCardWriter(model, output_path) 6399 6400 if rule_card_path: 6401 if hasattr(model, 'rule_card'): 6402 model.rule_card.write_file(rule_card_path) 6403 6404 if mssm_convert: 6405 model_name = model.get('name') 6406 # IF MSSM convert the card to SLAH1 6407 if model_name == 'mssm' or model_name.startswith('mssm-'): 6408 import models.check_param_card as translator 6409 # Check the format of the param_card for Pythia and make it correct 6410 if rule_card_path: 6411 translator.make_valid_param_card(output_path, rule_card_path) 6412 translator.convert_to_slha1(output_path)
6413
6414 - def create_param_card(self):
6415 """ create the param_card.dat """ 6416 6417 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6418 if not hasattr(self.model, 'rule_card'): 6419 rule_card=False 6420 self.create_param_card_static(self.model, 6421 output_path=pjoin(self.dir_path, 'param_card.dat'), 6422 rule_card_path=rule_card, 6423 mssm_convert=True)
6424
6425 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6426 """ Determine which Export_v4 class is required. cmd is the command 6427 interface containing all potential usefull information. 6428 The output_type argument specifies from which context the output 6429 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6430 and 'default' for tree-level outputs.""" 6431 6432 opt = cmd.options 6433 6434 # ========================================================================== 6435 # First check whether Ninja must be installed. 6436 # Ninja would only be required if: 6437 # a) Loop optimized output is selected 6438 # b) the process gathered from the amplitude generated use loops 6439 6440 if len(cmd._curr_amps)>0: 6441 try: 6442 curr_proc = cmd._curr_amps[0].get('process') 6443 except base_objects.PhysicsObject.PhysicsObjectError: 6444 curr_proc = None 6445 elif hasattr(cmd,'_fks_multi_proc') and \ 6446 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6447 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6448 else: 6449 curr_proc = None 6450 6451 requires_reduction_tool = opt['loop_optimized_output'] and \ 6452 (not curr_proc is None) and \ 6453 (curr_proc.get('perturbation_couplings') != [] and \ 6454 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6455 6456 # An installation is required then, but only if the specified path is the 6457 # default local one and that the Ninja library appears missing. 6458 if requires_reduction_tool: 6459 cmd.install_reduction_library() 6460 6461 # ========================================================================== 6462 # First treat the MadLoop5 standalone case 6463 MadLoop_SA_options = {'clean': not noclean, 6464 'complex_mass':cmd.options['complex_mass_scheme'], 6465 'export_format':'madloop', 6466 'mp':True, 6467 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6468 'cuttools_dir': cmd._cuttools_dir, 6469 'iregi_dir':cmd._iregi_dir, 6470 'pjfry_dir':cmd.options['pjfry'], 6471 'golem_dir':cmd.options['golem'], 6472 'samurai_dir':cmd.options['samurai'], 6473 'ninja_dir':cmd.options['ninja'], 6474 'collier_dir':cmd.options['collier'], 6475 'fortran_compiler':cmd.options['fortran_compiler'], 6476 'f2py_compiler':cmd.options['f2py_compiler'], 6477 'output_dependencies':cmd.options['output_dependencies'], 6478 'SubProc_prefix':'P', 6479 'compute_color_flows':cmd.options['loop_color_flows'], 6480 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6481 'cluster_local_path': cmd.options['cluster_local_path'] 6482 } 6483 6484 6485 if output_type.startswith('madloop'): 6486 import madgraph.loop.loop_exporters as loop_exporters 6487 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6488 ExporterClass=None 6489 if not cmd.options['loop_optimized_output']: 6490 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6491 else: 6492 if output_type == "madloop": 6493 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6494 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6495 elif output_type == "madloop_matchbox": 6496 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6497 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6498 else: 6499 raise Exception, "output_type not recognize %s" % output_type 6500 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6501 else: 6502 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6503 ' in %s'%str(cmd._mgme_dir)) 6504 6505 # Then treat the aMC@NLO output 6506 elif output_type=='amcatnlo': 6507 import madgraph.iolibs.export_fks as export_fks 6508 ExporterClass=None 6509 amcatnlo_options = dict(opt) 6510 amcatnlo_options.update(MadLoop_SA_options) 6511 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6512 if not cmd.options['loop_optimized_output']: 6513 logger.info("Writing out the aMC@NLO code") 6514 ExporterClass = export_fks.ProcessExporterFortranFKS 6515 amcatnlo_options['export_format']='FKS5_default' 6516 else: 6517 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6518 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6519 amcatnlo_options['export_format']='FKS5_optimized' 6520 return ExporterClass(cmd._export_dir, amcatnlo_options) 6521 6522 6523 # Then the default tree-level output 6524 elif output_type=='default': 6525 assert group_subprocesses in [True, False] 6526 6527 opt = dict(opt) 6528 opt.update({'clean': not noclean, 6529 'complex_mass': cmd.options['complex_mass_scheme'], 6530 'export_format':cmd._export_format, 6531 'mp': False, 6532 'sa_symmetry':False, 6533 'model': cmd._curr_model.get('name'), 6534 'v5_model': False if cmd._model_v4_path else True }) 6535 6536 format = cmd._export_format #shortcut 6537 6538 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6539 opt['sa_symmetry'] = True 6540 elif format == 'plugin': 6541 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6542 6543 loop_induced_opt = dict(opt) 6544 loop_induced_opt.update(MadLoop_SA_options) 6545 loop_induced_opt['export_format'] = 'madloop_optimized' 6546 loop_induced_opt['SubProc_prefix'] = 'PV' 6547 # For loop_induced output with MadEvent, we must have access to the 6548 # color flows. 6549 loop_induced_opt['compute_color_flows'] = True 6550 for key in opt: 6551 if key not in loop_induced_opt: 6552 loop_induced_opt[key] = opt[key] 6553 6554 # Madevent output supports MadAnalysis5 6555 if format in ['madevent']: 6556 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6557 6558 if format == 'matrix' or format.startswith('standalone'): 6559 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6560 6561 elif format in ['madevent'] and group_subprocesses: 6562 if isinstance(cmd._curr_amps[0], 6563 loop_diagram_generation.LoopAmplitude): 6564 import madgraph.loop.loop_exporters as loop_exporters 6565 return loop_exporters.LoopInducedExporterMEGroup( 6566 cmd._export_dir,loop_induced_opt) 6567 else: 6568 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6569 elif format in ['madevent']: 6570 if isinstance(cmd._curr_amps[0], 6571 loop_diagram_generation.LoopAmplitude): 6572 import madgraph.loop.loop_exporters as loop_exporters 6573 return loop_exporters.LoopInducedExporterMENoGroup( 6574 cmd._export_dir,loop_induced_opt) 6575 else: 6576 return ProcessExporterFortranME(cmd._export_dir,opt) 6577 elif format in ['matchbox']: 6578 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6579 elif cmd._export_format in ['madweight'] and group_subprocesses: 6580 6581 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6582 elif cmd._export_format in ['madweight']: 6583 return ProcessExporterFortranMW(cmd._export_dir, opt) 6584 elif format == 'plugin': 6585 return cmd._export_plugin(cmd._export_dir, opt) 6586 else: 6587 raise Exception, 'Wrong export_v4 format' 6588 else: 6589 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6590
6591 6592 6593 6594 #=============================================================================== 6595 # ProcessExporterFortranMWGroup 6596 #=============================================================================== 6597 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6598 """Class to take care of exporting a set of matrix elements to 6599 MadEvent subprocess group format.""" 6600 6601 matrix_file = "matrix_madweight_group_v4.inc" 6602 grouped_mode = 'madweight' 6603 #=========================================================================== 6604 # generate_subprocess_directory 6605 #===========================================================================
6606 - def generate_subprocess_directory(self, subproc_group, 6607 fortran_model, 6608 group_number):
6609 """Generate the Pn directory for a subprocess group in MadEvent, 6610 including the necessary matrix_N.f files, configs.inc and various 6611 other helper files.""" 6612 6613 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6614 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6615 "subproc_group object not SubProcessGroup" 6616 6617 if not self.model: 6618 self.model = subproc_group.get('matrix_elements')[0].\ 6619 get('processes')[0].get('model') 6620 6621 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6622 6623 # Create the directory PN in the specified path 6624 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6625 subproc_group.get('name')) 6626 try: 6627 os.mkdir(pjoin(pathdir, subprocdir)) 6628 except os.error as error: 6629 logger.warning(error.strerror + " " + subprocdir) 6630 6631 6632 logger.info('Creating files in directory %s' % subprocdir) 6633 Ppath = pjoin(pathdir, subprocdir) 6634 6635 # Create the matrix.f files, auto_dsig.f files and all inc files 6636 # for all subprocesses in the group 6637 6638 maxamps = 0 6639 maxflows = 0 6640 tot_calls = 0 6641 6642 matrix_elements = subproc_group.get('matrix_elements') 6643 6644 for ime, matrix_element in \ 6645 enumerate(matrix_elements): 6646 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6647 calls, ncolor = \ 6648 self.write_matrix_element_v4(writers.FortranWriter(filename), 6649 matrix_element, 6650 fortran_model, 6651 str(ime+1), 6652 subproc_group.get('diagram_maps')[\ 6653 ime]) 6654 6655 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6656 self.write_auto_dsig_file(writers.FortranWriter(filename), 6657 matrix_element, 6658 str(ime+1)) 6659 6660 # Keep track of needed quantities 6661 tot_calls += int(calls) 6662 maxflows = max(maxflows, ncolor) 6663 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6664 6665 # Draw diagrams 6666 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6667 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6668 get('diagrams'), 6669 filename, 6670 model = \ 6671 matrix_element.get('processes')[0].\ 6672 get('model'), 6673 amplitude=True) 6674 logger.info("Generating Feynman diagrams for " + \ 6675 matrix_element.get('processes')[0].nice_string()) 6676 plot.draw() 6677 6678 # Extract number of external particles 6679 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6680 6681 # Generate a list of diagrams corresponding to each configuration 6682 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6683 # If a subprocess has no diagrams for this config, the number is 0 6684 6685 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6686 6687 filename = pjoin(Ppath, 'auto_dsig.f') 6688 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6689 subproc_group) 6690 6691 filename = pjoin(Ppath,'configs.inc') 6692 nconfigs, s_and_t_channels = self.write_configs_file(\ 6693 writers.FortranWriter(filename), 6694 subproc_group, 6695 subproc_diagrams_for_config) 6696 6697 filename = pjoin(Ppath, 'leshouche.inc') 6698 self.write_leshouche_file(writers.FortranWriter(filename), 6699 subproc_group) 6700 6701 filename = pjoin(Ppath, 'phasespace.inc') 6702 self.write_phasespace_file(writers.FortranWriter(filename), 6703 nconfigs) 6704 6705 6706 filename = pjoin(Ppath, 'maxamps.inc') 6707 self.write_maxamps_file(writers.FortranWriter(filename), 6708 maxamps, 6709 maxflows, 6710 max([len(me.get('processes')) for me in \ 6711 matrix_elements]), 6712 len(matrix_elements)) 6713 6714 filename = pjoin(Ppath, 'mirrorprocs.inc') 6715 self.write_mirrorprocs(writers.FortranWriter(filename), 6716 subproc_group) 6717 6718 filename = pjoin(Ppath, 'nexternal.inc') 6719 self.write_nexternal_file(writers.FortranWriter(filename), 6720 nexternal, ninitial) 6721 6722 filename = pjoin(Ppath, 'pmass.inc') 6723 self.write_pmass_file(writers.FortranWriter(filename), 6724 matrix_element) 6725 6726 filename = pjoin(Ppath, 'props.inc') 6727 self.write_props_file(writers.FortranWriter(filename), 6728 matrix_element, 6729 s_and_t_channels) 6730 6731 # filename = pjoin(Ppath, 'processes.dat') 6732 # files.write_to_file(filename, 6733 # self.write_processes_file, 6734 # subproc_group) 6735 6736 # Generate jpgs -> pass in make_html 6737 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6738 6739 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6740 6741 for file in linkfiles: 6742 ln('../%s' % file, cwd=Ppath) 6743 6744 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6745 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6746 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6747 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6748 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6749 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6750 if not tot_calls: 6751 tot_calls = 0 6752 return tot_calls
6753 6754 6755 #=========================================================================== 6756 # Helper functions 6757 #===========================================================================
6758 - def modify_grouping(self, matrix_element):
6759 """allow to modify the grouping (if grouping is in place) 6760 return two value: 6761 - True/False if the matrix_element was modified 6762 - the new(or old) matrix element""" 6763 6764 return True, matrix_element.split_lepton_grouping()
6765 6766 #=========================================================================== 6767 # write_super_auto_dsig_file 6768 #===========================================================================
6769 - def write_super_auto_dsig_file(self, writer, subproc_group):
6770 """Write the auto_dsig.f file selecting between the subprocesses 6771 in subprocess group mode""" 6772 6773 replace_dict = {} 6774 6775 # Extract version number and date from VERSION file 6776 info_lines = self.get_mg5_info_lines() 6777 replace_dict['info_lines'] = info_lines 6778 6779 matrix_elements = subproc_group.get('matrix_elements') 6780 6781 # Extract process info lines 6782 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6783 matrix_elements]) 6784 replace_dict['process_lines'] = process_lines 6785 6786 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6787 replace_dict['nexternal'] = nexternal 6788 6789 replace_dict['nsprocs'] = 2*len(matrix_elements) 6790 6791 # Generate dsig definition line 6792 dsig_def_line = "DOUBLE PRECISION " + \ 6793 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6794 range(len(matrix_elements))]) 6795 replace_dict["dsig_def_line"] = dsig_def_line 6796 6797 # Generate dsig process lines 6798 call_dsig_proc_lines = [] 6799 for iproc in range(len(matrix_elements)): 6800 call_dsig_proc_lines.append(\ 6801 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6802 {"num": iproc + 1, 6803 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6804 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6805 6806 if writer: 6807 file = open(os.path.join(_file_path, \ 6808 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6809 file = file % replace_dict 6810 # Write the file 6811 writer.writelines(file) 6812 else: 6813 return replace_dict
6814 6815 #=========================================================================== 6816 # write_mirrorprocs 6817 #===========================================================================
6818 - def write_mirrorprocs(self, writer, subproc_group):
6819 """Write the mirrorprocs.inc file determining which processes have 6820 IS mirror process in subprocess group mode.""" 6821 6822 lines = [] 6823 bool_dict = {True: '.true.', False: '.false.'} 6824 matrix_elements = subproc_group.get('matrix_elements') 6825 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6826 (len(matrix_elements), 6827 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6828 me in matrix_elements]))) 6829 # Write the file 6830 writer.writelines(lines)
6831 6832 #=========================================================================== 6833 # write_configs_file 6834 #===========================================================================
6835 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6836 """Write the configs.inc file with topology information for a 6837 subprocess group. Use the first subprocess with a diagram for each 6838 configuration.""" 6839 6840 matrix_elements = subproc_group.get('matrix_elements') 6841 model = matrix_elements[0].get('processes')[0].get('model') 6842 6843 diagrams = [] 6844 config_numbers = [] 6845 for iconfig, config in enumerate(diagrams_for_config): 6846 # Check if any diagrams correspond to this config 6847 if set(config) == set([0]): 6848 continue 6849 subproc_diags = [] 6850 for s,d in enumerate(config): 6851 if d: 6852 subproc_diags.append(matrix_elements[s].\ 6853 get('diagrams')[d-1]) 6854 else: 6855 subproc_diags.append(None) 6856 diagrams.append(subproc_diags) 6857 config_numbers.append(iconfig + 1) 6858 6859 # Extract number of external particles 6860 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6861 6862 return len(diagrams), \ 6863 self.write_configs_file_from_diagrams(writer, diagrams, 6864 config_numbers, 6865 nexternal, ninitial, 6866 matrix_elements[0],model)
6867 6868 #=========================================================================== 6869 # write_run_configs_file 6870 #===========================================================================
6871 - def write_run_config_file(self, writer):
6872 """Write the run_configs.inc file for MadEvent""" 6873 6874 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6875 text = open(path).read() % {'chanperjob':'2'} 6876 writer.write(text) 6877 return True
6878 6879 6880 #=========================================================================== 6881 # write_leshouche_file 6882 #===========================================================================
6883 - def write_leshouche_file(self, writer, subproc_group):
6884 """Write the leshouche.inc file for MG4""" 6885 6886 all_lines = [] 6887 6888 for iproc, matrix_element in \ 6889 enumerate(subproc_group.get('matrix_elements')): 6890 all_lines.extend(self.get_leshouche_lines(matrix_element, 6891 iproc)) 6892 6893 # Write the file 6894 writer.writelines(all_lines) 6895 6896 return True
6897