Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import traceback 
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.interface.common_run_interface as common_run_interface 
  46  import madgraph.various.diagram_symmetry as diagram_symmetry 
  47  import madgraph.various.misc as misc 
  48  import madgraph.various.banner as banner_mod 
  49  import madgraph.various.process_checks as process_checks 
  50  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  51  import aloha.create_aloha as create_aloha 
  52  import models.import_ufo as import_ufo 
  53  import models.write_param_card as param_writer 
  54  import models.check_param_card as check_param_card 
  55   
  56   
  57  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  58  from madgraph.iolibs.files import cp, ln, mv 
  59   
  60  from madgraph import InvalidCmd 
  61   
  62  pjoin = os.path.join 
  63   
  64  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  65  logger = logging.getLogger('madgraph.export_v4') 
  66   
  67  default_compiler= {'fortran': 'gfortran', 
  68                         'f2py': 'f2py', 
  69                         'cpp':'g++'} 
70 71 72 -class VirtualExporter(object):
73 74 #exporter variable who modified the way madgraph interacts with this class 75 76 grouped_mode = 'madevent' 77 # This variable changes the type of object called within 'generate_subprocess_directory' 78 #functions. 79 # False to avoid grouping (only identical matrix element are merged) 80 # 'madevent' group the massless quark and massless lepton 81 # 'madweight' group the gluon with the massless quark 82 sa_symmetry = False 83 # If no grouped_mode=False, uu~ and u~u will be called independently. 84 #Putting sa_symmetry generates only one of the two matrix-element. 85 check = True 86 # Ask madgraph to check if the directory already exists and propose to the user to 87 #remove it first if this is the case 88 output = 'Template' 89 # [Template, None, dir] 90 # - Template, madgraph will call copy_template 91 # - dir, madgraph will just create an empty directory for initialisation 92 # - None, madgraph do nothing for initialisation 93 exporter = 'v4' 94 # language of the output 'v4' for Fortran output 95 # 'cpp' for C++ output 96 97
98 - def __init__(self, dir_path = "", opt=None):
99 return
100
101 - def copy_template(self, model):
102 return
103
104 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
105 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 106 return 0 # return an integer stating the number of call to helicity routine
107
108 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
109 return
110
111 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
112 return
113 114
115 - def pass_information_from_cmd(self, cmd):
116 """pass information from the command interface to the exporter. 117 Please do not modify any object of the interface from the exporter. 118 """ 119 return
120
121 - def modify_grouping(self, matrix_element):
122 return False, matrix_element
123
124 - def export_model_files(self, model_v4_path):
125 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 126 return
127
128 - def export_helas(self, HELAS_PATH):
129 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 130 return
131
132 #=============================================================================== 133 # ProcessExporterFortran 134 #=============================================================================== 135 -class ProcessExporterFortran(VirtualExporter):
136 """Class to take care of exporting a set of matrix elements to 137 Fortran (v4) format.""" 138 139 default_opt = {'clean': False, 'complex_mass':False, 140 'export_format':'madevent', 'mp': False, 141 'v5_model': True 142 } 143 grouped_mode = False 144
145 - def __init__(self, dir_path = "", opt=None):
146 """Initiate the ProcessExporterFortran with directory information""" 147 self.mgme_dir = MG5DIR 148 self.dir_path = dir_path 149 self.model = None 150 151 self.opt = dict(self.default_opt) 152 if opt: 153 self.opt.update(opt) 154 155 #place holder to pass information to the run_interface 156 self.proc_characteristic = banner_mod.ProcCharacteristic()
157 158 159 #=========================================================================== 160 # process exporter fortran switch between group and not grouped 161 #===========================================================================
162 - def export_processes(self, matrix_elements, fortran_model):
163 """Make the switch between grouped and not grouped output""" 164 165 calls = 0 166 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 167 for (group_number, me_group) in enumerate(matrix_elements): 168 calls = calls + self.generate_subprocess_directory(\ 169 me_group, fortran_model, group_number) 170 else: 171 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 172 calls = calls + self.generate_subprocess_directory(\ 173 me, fortran_model, me_number) 174 175 return calls
176 177 178 #=========================================================================== 179 # create the run_card 180 #===========================================================================
181 - def create_run_card(self, matrix_elements, history):
182 """ """ 183 184 185 # bypass this for the loop-check 186 import madgraph.loop.loop_helas_objects as loop_helas_objects 187 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 188 matrix_elements = None 189 190 run_card = banner_mod.RunCard() 191 192 193 default=True 194 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 195 processes = [me.get('processes') for megroup in matrix_elements 196 for me in megroup['matrix_elements']] 197 elif matrix_elements: 198 processes = [me.get('processes') 199 for me in matrix_elements['matrix_elements']] 200 else: 201 default =False 202 203 if default: 204 run_card.create_default_for_process(self.proc_characteristic, 205 history, 206 processes) 207 208 209 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 210 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
211 212 213 #=========================================================================== 214 # copy the Template in a new directory. 215 #===========================================================================
216 - def copy_template(self, model):
217 """create the directory run_name as a copy of the MadEvent 218 Template, and clean the directory 219 """ 220 221 #First copy the full template tree if dir_path doesn't exit 222 if not os.path.isdir(self.dir_path): 223 assert self.mgme_dir, \ 224 "No valid MG_ME path given for MG4 run directory creation." 225 logger.info('initialize a new directory: %s' % \ 226 os.path.basename(self.dir_path)) 227 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 228 self.dir_path, True) 229 # distutils.dir_util.copy_tree since dir_path already exists 230 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 231 self.dir_path) 232 # copy plot_card 233 for card in ['plot_card']: 234 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 235 try: 236 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 237 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 238 except IOError: 239 logger.warning("Failed to copy " + card + ".dat to default") 240 elif os.getcwd() == os.path.realpath(self.dir_path): 241 logger.info('working in local directory: %s' % \ 242 os.path.realpath(self.dir_path)) 243 # distutils.dir_util.copy_tree since dir_path already exists 244 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 245 self.dir_path) 246 # for name in misc.glob('Template/LO/*', self.mgme_dir): 247 # name = os.path.basename(name) 248 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 249 # if os.path.isfile(filename): 250 # files.cp(filename, pjoin(self.dir_path,name)) 251 # elif os.path.isdir(filename): 252 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 253 # distutils.dir_util.copy_tree since dir_path already exists 254 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 255 self.dir_path) 256 # Copy plot_card 257 for card in ['plot_card']: 258 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 259 try: 260 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 261 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 262 except IOError: 263 logger.warning("Failed to copy " + card + ".dat to default") 264 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 265 assert self.mgme_dir, \ 266 "No valid MG_ME path given for MG4 run directory creation." 267 try: 268 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 269 except IOError: 270 MG5_version = misc.get_pkg_info() 271 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 272 273 #Ensure that the Template is clean 274 if self.opt['clean']: 275 logger.info('remove old information in %s' % \ 276 os.path.basename(self.dir_path)) 277 if os.environ.has_key('MADGRAPH_BASE'): 278 misc.call([pjoin('bin', 'internal', 'clean_template'), 279 '--web'], cwd=self.dir_path) 280 else: 281 try: 282 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 283 cwd=self.dir_path) 284 except Exception, why: 285 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 286 % (os.path.basename(self.dir_path),why)) 287 288 #Write version info 289 MG_version = misc.get_pkg_info() 290 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 291 MG_version['version']) 292 293 # add the makefile in Source directory 294 filename = pjoin(self.dir_path,'Source','makefile') 295 self.write_source_makefile(writers.FileWriter(filename)) 296 297 # add the DiscreteSampler information 298 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 299 pjoin(self.dir_path, 'Source')) 300 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 301 pjoin(self.dir_path, 'Source')) 302 303 # We need to create the correct open_data for the pdf 304 self.write_pdf_opendata()
305 306 307 #=========================================================================== 308 # Call MadAnalysis5 to generate the default cards for this process 309 #===========================================================================
310 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 311 ma5_path, output_dir, levels = ['parton','hadron']):
312 """ Call MA5 so that it writes default cards for both parton and 313 post-shower levels, tailored for this particular process.""" 314 315 if len(levels)==0: 316 return 317 318 logger.info('Generating MadAnalysis5 default cards tailored to this process') 319 try: 320 MA5_interpreter = common_run_interface.CommonRunCmd.\ 321 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 322 except (Exception, SystemExit) as e: 323 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 324 return 325 if MA5_interpreter is None: 326 return 327 328 MA5_main = MA5_interpreter.main 329 330 for lvl in ['parton','hadron']: 331 if lvl in levels: 332 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 333 try: 334 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 335 except (Exception, SystemExit) as e: 336 # keep the default card (skip only) 337 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 338 ' default analysis card for this process.') 339 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 340 error=StringIO() 341 traceback.print_exc(file=error) 342 logger.debug('MadAnalysis5 error was:') 343 logger.debug('-'*60) 344 logger.debug(error.getvalue()[:-1]) 345 logger.debug('-'*60) 346 else: 347 open(card_to_generate,'w').write(text)
348 349 #=========================================================================== 350 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 351 #===========================================================================
352 - def write_procdef_mg5(self, file_pos, modelname, process_str):
353 """ write an equivalent of the MG4 proc_card in order that all the Madevent 354 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 355 356 proc_card_template = template_files.mg4_proc_card.mg4_template 357 process_template = template_files.mg4_proc_card.process_template 358 process_text = '' 359 coupling = '' 360 new_process_content = [] 361 362 363 # First find the coupling and suppress the coupling from process_str 364 #But first ensure that coupling are define whithout spaces: 365 process_str = process_str.replace(' =', '=') 366 process_str = process_str.replace('= ', '=') 367 process_str = process_str.replace(',',' , ') 368 #now loop on the element and treat all the coupling 369 for info in process_str.split(): 370 if '=' in info: 371 coupling += info + '\n' 372 else: 373 new_process_content.append(info) 374 # Recombine the process_str (which is the input process_str without coupling 375 #info) 376 process_str = ' '.join(new_process_content) 377 378 #format the SubProcess 379 replace_dict = {'process': process_str, 380 'coupling': coupling} 381 process_text += process_template.substitute(replace_dict) 382 383 replace_dict = {'process': process_text, 384 'model': modelname, 385 'multiparticle':''} 386 text = proc_card_template.substitute(replace_dict) 387 388 if file_pos: 389 ff = open(file_pos, 'w') 390 ff.write(text) 391 ff.close() 392 else: 393 return replace_dict
394 395
396 - def pass_information_from_cmd(self, cmd):
397 """Pass information for MA5""" 398 399 self.proc_defs = cmd._curr_proc_defs
400 401 #=========================================================================== 402 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 403 #===========================================================================
404 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
405 """Function to finalize v4 directory, for inheritance.""" 406 407 self.create_run_card(matrix_elements, history) 408 self.create_MA5_cards(matrix_elements, history)
409
410 - def create_MA5_cards(self,matrix_elements,history):
411 """ A wrapper around the creation of the MA5 cards so that it can be 412 bypassed by daughter classes (i.e. in standalone).""" 413 if 'madanalysis5_path' in self.opt and not \ 414 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 415 processes = None 416 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 417 processes = [me.get('processes') for megroup in matrix_elements 418 for me in megroup['matrix_elements']] 419 elif matrix_elements: 420 processes = [me.get('processes') 421 for me in matrix_elements['matrix_elements']] 422 423 self.create_default_madanalysis5_cards( 424 history, self.proc_defs, processes, 425 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 426 levels = ['hadron','parton']) 427 428 for level in ['hadron','parton']: 429 # Copying these cards turn on the use of MadAnalysis5 by default. 430 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 431 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 432 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
433 434 #=========================================================================== 435 # Create the proc_characteristic file passing information to the run_interface 436 #===========================================================================
437 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
438 439 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
440 441 #=========================================================================== 442 # write_matrix_element_v4 443 #===========================================================================
444 - def write_matrix_element_v4(self):
445 """Function to write a matrix.f file, for inheritance. 446 """ 447 pass
448 449 #=========================================================================== 450 # write_pdf_opendata 451 #===========================================================================
452 - def write_pdf_opendata(self):
453 """ modify the pdf opendata file, to allow direct access to cluster node 454 repository if configure""" 455 456 if not self.opt["cluster_local_path"]: 457 changer = {"pdf_systemwide": ""} 458 else: 459 to_add = """ 460 tempname='%(path)s'//Tablefile 461 open(IU,file=tempname,status='old',ERR=1) 462 return 463 1 tempname='%(path)s/Pdfdata/'//Tablefile 464 open(IU,file=tempname,status='old',ERR=2) 465 return 466 2 tempname='%(path)s/lhapdf'//Tablefile 467 open(IU,file=tempname,status='old',ERR=3) 468 return 469 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 470 open(IU,file=tempname,status='old',ERR=4) 471 return 472 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 473 open(IU,file=tempname,status='old',ERR=5) 474 return 475 """ % {"path" : self.opt["cluster_local_path"]} 476 477 changer = {"pdf_systemwide": to_add} 478 479 480 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 481 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 482 ff.writelines(template % changer) 483 484 # Do the same for lhapdf set 485 if not self.opt["cluster_local_path"]: 486 changer = {"cluster_specific_path": ""} 487 else: 488 to_add=""" 489 LHAPath='%(path)s/PDFsets' 490 Inquire(File=LHAPath, exist=exists) 491 if(exists)return 492 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 493 Inquire(File=LHAPath, exist=exists) 494 if(exists)return 495 LHAPath='%(path)s/../lhapdf/pdfsets/' 496 Inquire(File=LHAPath, exist=exists) 497 if(exists)return 498 LHAPath='./PDFsets' 499 """ % {"path" : self.opt["cluster_local_path"]} 500 changer = {"cluster_specific_path": to_add} 501 502 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 503 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 504 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 505 ff.writelines(template % changer) 506 507 508 return
509 510 511 512 #=========================================================================== 513 # write_maxparticles_file 514 #===========================================================================
515 - def write_maxparticles_file(self, writer, matrix_elements):
516 """Write the maxparticles.inc file for MadEvent""" 517 518 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 519 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 520 matrix_elements.get('matrix_elements')]) 521 else: 522 maxparticles = max([me.get_nexternal_ninitial()[0] \ 523 for me in matrix_elements]) 524 525 lines = "integer max_particles\n" 526 lines += "parameter(max_particles=%d)" % maxparticles 527 528 # Write the file 529 writer.writelines(lines) 530 531 return True
532 533 534 #=========================================================================== 535 # export the model 536 #===========================================================================
537 - def export_model_files(self, model_path):
538 """Configure the files/link of the process according to the model""" 539 540 # Import the model 541 for file in os.listdir(model_path): 542 if os.path.isfile(pjoin(model_path, file)): 543 shutil.copy2(pjoin(model_path, file), \ 544 pjoin(self.dir_path, 'Source', 'MODEL'))
545 546 560 568 569 570 #=========================================================================== 571 # export the helas routine 572 #===========================================================================
573 - def export_helas(self, helas_path):
574 """Configure the files/link of the process according to the model""" 575 576 # Import helas routine 577 for filename in os.listdir(helas_path): 578 filepos = pjoin(helas_path, filename) 579 if os.path.isfile(filepos): 580 if filepos.endswith('Makefile.template'): 581 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 582 elif filepos.endswith('Makefile'): 583 pass 584 else: 585 cp(filepos, self.dir_path + '/Source/DHELAS')
586 # following lines do the same but whithout symbolic link 587 # 588 #def export_helas(mgme_dir, dir_path): 589 # 590 # # Copy the HELAS directory 591 # helas_dir = pjoin(mgme_dir, 'HELAS') 592 # for filename in os.listdir(helas_dir): 593 # if os.path.isfile(pjoin(helas_dir, filename)): 594 # shutil.copy2(pjoin(helas_dir, filename), 595 # pjoin(dir_path, 'Source', 'DHELAS')) 596 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 597 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 598 # 599 600 #=========================================================================== 601 # generate_subprocess_directory 602 #===========================================================================
603 - def generate_subprocess_directory(self, matrix_element, 604 fortran_model, 605 me_number):
606 """Routine to generate a subprocess directory (for inheritance)""" 607 608 pass
609 610 #=========================================================================== 611 # get_source_libraries_list 612 #===========================================================================
613 - def get_source_libraries_list(self):
614 """ Returns the list of libraries to be compiling when compiling the 615 SOURCE directory. It is different for loop_induced processes and 616 also depends on the value of the 'output_dependencies' option""" 617 618 return ['$(LIBDIR)libdhelas.$(libext)', 619 '$(LIBDIR)libpdf.$(libext)', 620 '$(LIBDIR)libmodel.$(libext)', 621 '$(LIBDIR)libcernlib.$(libext)', 622 '$(LIBDIR)libbias.$(libext)']
623 624 #=========================================================================== 625 # write_source_makefile 626 #===========================================================================
627 - def write_source_makefile(self, writer):
628 """Write the nexternal.inc file for MG4""" 629 630 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 631 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 632 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 633 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 634 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 635 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 636 else: 637 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 638 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 639 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 640 writer.write(text) 641 642 return True
643 644 #=========================================================================== 645 # write_nexternal_madspin 646 #===========================================================================
647 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
648 """Write the nexternal_prod.inc file for madspin""" 649 650 replace_dict = {} 651 652 replace_dict['nexternal'] = nexternal 653 replace_dict['ninitial'] = ninitial 654 655 file = """ \ 656 integer nexternal_prod 657 parameter (nexternal_prod=%(nexternal)d) 658 integer nincoming_prod 659 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 660 661 # Write the file 662 if writer: 663 writer.writelines(file) 664 return True 665 else: 666 return replace_dict
667 668 #=========================================================================== 669 # write_helamp_madspin 670 #===========================================================================
671 - def write_helamp_madspin(self, writer, ncomb):
672 """Write the helamp.inc file for madspin""" 673 674 replace_dict = {} 675 676 replace_dict['ncomb'] = ncomb 677 678 file = """ \ 679 integer ncomb1 680 parameter (ncomb1=%(ncomb)d) 681 double precision helamp(ncomb1) 682 common /to_helamp/helamp """ % replace_dict 683 684 # Write the file 685 if writer: 686 writer.writelines(file) 687 return True 688 else: 689 return replace_dict
690 691 692 693 #=========================================================================== 694 # write_nexternal_file 695 #===========================================================================
696 - def write_nexternal_file(self, writer, nexternal, ninitial):
697 """Write the nexternal.inc file for MG4""" 698 699 replace_dict = {} 700 701 replace_dict['nexternal'] = nexternal 702 replace_dict['ninitial'] = ninitial 703 704 file = """ \ 705 integer nexternal 706 parameter (nexternal=%(nexternal)d) 707 integer nincoming 708 parameter (nincoming=%(ninitial)d)""" % replace_dict 709 710 # Write the file 711 if writer: 712 writer.writelines(file) 713 return True 714 else: 715 return replace_dict
716 #=========================================================================== 717 # write_pmass_file 718 #===========================================================================
719 - def write_pmass_file(self, writer, matrix_element):
720 """Write the pmass.inc file for MG4""" 721 722 model = matrix_element.get('processes')[0].get('model') 723 724 lines = [] 725 for wf in matrix_element.get_external_wavefunctions(): 726 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 727 if mass.lower() != "zero": 728 mass = "abs(%s)" % mass 729 730 lines.append("pmass(%d)=%s" % \ 731 (wf.get('number_external'), mass)) 732 733 # Write the file 734 writer.writelines(lines) 735 736 return True
737 738 #=========================================================================== 739 # write_ngraphs_file 740 #===========================================================================
741 - def write_ngraphs_file(self, writer, nconfigs):
742 """Write the ngraphs.inc file for MG4. Needs input from 743 write_configs_file.""" 744 745 file = " integer n_max_cg\n" 746 file = file + "parameter (n_max_cg=%d)" % nconfigs 747 748 # Write the file 749 writer.writelines(file) 750 751 return True
752 753 #=========================================================================== 754 # write_leshouche_file 755 #===========================================================================
756 - def write_leshouche_file(self, writer, matrix_element):
757 """Write the leshouche.inc file for MG4""" 758 759 # Write the file 760 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 761 762 return True
763 764 #=========================================================================== 765 # get_leshouche_lines 766 #===========================================================================
767 - def get_leshouche_lines(self, matrix_element, numproc):
768 """Write the leshouche.inc file for MG4""" 769 770 # Extract number of external particles 771 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 772 773 lines = [] 774 for iproc, proc in enumerate(matrix_element.get('processes')): 775 legs = proc.get_legs_with_decays() 776 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 777 (iproc + 1, numproc+1, nexternal, 778 ",".join([str(l.get('id')) for l in legs]))) 779 if iproc == 0 and numproc == 0: 780 for i in [1, 2]: 781 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 782 (i, nexternal, 783 ",".join([ "%3r" % 0 ] * ninitial + \ 784 [ "%3r" % i ] * (nexternal - ninitial)))) 785 786 # Here goes the color connections corresponding to the JAMPs 787 # Only one output, for the first subproc! 788 if iproc == 0: 789 # If no color basis, just output trivial color flow 790 if not matrix_element.get('color_basis'): 791 for i in [1, 2]: 792 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 793 (i, numproc+1,nexternal, 794 ",".join([ "%3r" % 0 ] * nexternal))) 795 796 else: 797 # First build a color representation dictionnary 798 repr_dict = {} 799 for l in legs: 800 repr_dict[l.get('number')] = \ 801 proc.get('model').get_particle(l.get('id')).get_color()\ 802 * (-1)**(1+l.get('state')) 803 # Get the list of color flows 804 color_flow_list = \ 805 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 806 ninitial) 807 # And output them properly 808 for cf_i, color_flow_dict in enumerate(color_flow_list): 809 for i in [0, 1]: 810 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 811 (i + 1, cf_i + 1, numproc+1, nexternal, 812 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 813 for l in legs]))) 814 815 return lines
816 817 818 819 820 #=========================================================================== 821 # write_maxamps_file 822 #===========================================================================
823 - def write_maxamps_file(self, writer, maxamps, maxflows, 824 maxproc,maxsproc):
825 """Write the maxamps.inc file for MG4.""" 826 827 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 828 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 829 (maxamps, maxflows) 830 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 831 (maxproc, maxsproc) 832 833 # Write the file 834 writer.writelines(file) 835 836 return True
837 838 839 #=========================================================================== 840 # Routines to output UFO models in MG4 format 841 #=========================================================================== 842
843 - def convert_model(self, model, wanted_lorentz = [], 844 wanted_couplings = []):
845 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 846 847 # Make sure aloha is in quadruple precision if needed 848 old_aloha_mp=aloha.mp_precision 849 aloha.mp_precision=self.opt['mp'] 850 851 # create the MODEL 852 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 853 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 854 model_builder.build(wanted_couplings) 855 856 # Backup the loop mode, because it can be changed in what follows. 857 old_loop_mode = aloha.loop_mode 858 859 # Create the aloha model or use the existing one (for loop exporters 860 # this is useful as the aloha model will be used again in the 861 # LoopHelasMatrixElements generated). We do not save the model generated 862 # here if it didn't exist already because it would be a waste of 863 # memory for tree level applications since aloha is only needed at the 864 # time of creating the aloha fortran subroutines. 865 if hasattr(self, 'aloha_model'): 866 aloha_model = self.aloha_model 867 else: 868 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 869 aloha_model.add_Lorentz_object(model.get('lorentz')) 870 871 # Compute the subroutines 872 if wanted_lorentz: 873 aloha_model.compute_subset(wanted_lorentz) 874 else: 875 aloha_model.compute_all(save=False) 876 877 # Write them out 878 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 879 aloha_model.write(write_dir, 'Fortran') 880 881 # Revert the original aloha loop mode 882 aloha.loop_mode = old_loop_mode 883 884 #copy Helas Template 885 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 886 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 887 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 888 write_dir+'/aloha_functions.f') 889 aloha_model.loop_mode = False 890 else: 891 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 892 write_dir+'/aloha_functions.f') 893 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 894 895 # Make final link in the Process 896 self.make_model_symbolic_link() 897 898 # Re-establish original aloha mode 899 aloha.mp_precision=old_aloha_mp
900 901 902 #=========================================================================== 903 # Helper functions 904 #===========================================================================
905 - def modify_grouping(self, matrix_element):
906 """allow to modify the grouping (if grouping is in place) 907 return two value: 908 - True/False if the matrix_element was modified 909 - the new(or old) matrix element""" 910 911 return False, matrix_element
912 913 #=========================================================================== 914 # Helper functions 915 #===========================================================================
916 - def get_mg5_info_lines(self):
917 """Return info lines for MG5, suitable to place at beginning of 918 Fortran files""" 919 920 info = misc.get_pkg_info() 921 info_lines = "" 922 if info and info.has_key('version') and info.has_key('date'): 923 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 924 (info['version'], info['date']) 925 info_lines = info_lines + \ 926 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 927 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 928 else: 929 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 930 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 931 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 932 933 return info_lines
934
935 - def get_process_info_lines(self, matrix_element):
936 """Return info lines describing the processes for this matrix element""" 937 938 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 939 for process in matrix_element.get('processes')])
940 941
942 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
943 """Return the Helicity matrix definition lines for this matrix element""" 944 945 helicity_line_list = [] 946 i = 0 947 for helicities in matrix_element.get_helicity_matrix(): 948 i = i + 1 949 int_list = [i, len(helicities)] 950 int_list.extend(helicities) 951 helicity_line_list.append(\ 952 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 953 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 954 955 return "\n".join(helicity_line_list)
956
957 - def get_ic_line(self, matrix_element):
958 """Return the IC definition line coming after helicities, required by 959 switchmom in madevent""" 960 961 nexternal = matrix_element.get_nexternal_ninitial()[0] 962 int_list = range(1, nexternal + 1) 963 964 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 965 ",".join([str(i) for \ 966 i in int_list]))
967
968 - def set_chosen_SO_index(self, process, squared_orders):
969 """ From the squared order constraints set by the user, this function 970 finds what indices of the squared_orders list the user intends to pick. 971 It returns this as a string of comma-separated successive '.true.' or 972 '.false.' for each index.""" 973 974 user_squared_orders = process.get('squared_orders') 975 split_orders = process.get('split_orders') 976 977 if len(user_squared_orders)==0: 978 return ','.join(['.true.']*len(squared_orders)) 979 980 res = [] 981 for sqsos in squared_orders: 982 is_a_match = True 983 for user_sqso, value in user_squared_orders.items(): 984 if (process.get_squared_order_type(user_sqso) =='==' and \ 985 value!=sqsos[split_orders.index(user_sqso)]) or \ 986 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 987 value<sqsos[split_orders.index(user_sqso)]) or \ 988 (process.get_squared_order_type(user_sqso) == '>' and \ 989 value>=sqsos[split_orders.index(user_sqso)]): 990 is_a_match = False 991 break 992 res.append('.true.' if is_a_match else '.false.') 993 994 return ','.join(res)
995
996 - def get_split_orders_lines(self, orders, array_name, n=5):
997 """ Return the split orders definition as defined in the list orders and 998 for the name of the array 'array_name'. Split rows in chunks of size n.""" 999 1000 ret_list = [] 1001 for index, order in enumerate(orders): 1002 for k in xrange(0, len(order), n): 1003 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1004 (array_name,index + 1, k + 1, min(k + n, len(order)), 1005 ','.join(["%5r" % i for i in order[k:k + n]]))) 1006 return ret_list
1007
1008 - def format_integer_list(self, list, name, n=5):
1009 """ Return an initialization of the python list in argument following 1010 the fortran syntax using the data keyword assignment, filling an array 1011 of name 'name'. It splits rows in chunks of size n.""" 1012 1013 ret_list = [] 1014 for k in xrange(0, len(list), n): 1015 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1016 (name, k + 1, min(k + n, len(list)), 1017 ','.join(["%5r" % i for i in list[k:k + n]]))) 1018 return ret_list
1019
1020 - def get_color_data_lines(self, matrix_element, n=6):
1021 """Return the color matrix definition lines for this matrix element. Split 1022 rows in chunks of size n.""" 1023 1024 if not matrix_element.get('color_matrix'): 1025 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1026 else: 1027 ret_list = [] 1028 my_cs = color.ColorString() 1029 for index, denominator in \ 1030 enumerate(matrix_element.get('color_matrix').\ 1031 get_line_denominators()): 1032 # First write the common denominator for this color matrix line 1033 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1034 # Then write the numerators for the matrix elements 1035 num_list = matrix_element.get('color_matrix').\ 1036 get_line_numerators(index, denominator) 1037 1038 for k in xrange(0, len(num_list), n): 1039 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1040 (index + 1, k + 1, min(k + n, len(num_list)), 1041 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1042 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1043 ret_list.append("C %s" % repr(my_cs)) 1044 return ret_list
1045 1046
1047 - def get_den_factor_line(self, matrix_element):
1048 """Return the denominator factor line for this matrix element""" 1049 1050 return "DATA IDEN/%2r/" % \ 1051 matrix_element.get_denominator_factor()
1052
1053 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1054 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1055 which configs (diagrams).""" 1056 1057 ret_list = [] 1058 1059 booldict = {False: ".false.", True: ".true."} 1060 1061 if not matrix_element.get('color_basis'): 1062 # No color, so only one color factor. Simply write a ".true." 1063 # for each config (i.e., each diagram with only 3 particle 1064 # vertices 1065 configs = len(mapconfigs) 1066 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1067 (num_matrix_element, configs, 1068 ','.join([".true." for i in range(configs)]))) 1069 return ret_list 1070 1071 # There is a color basis - create a list showing which JAMPs have 1072 # contributions to which configs 1073 1074 # Only want to include leading color flows, so find max_Nc 1075 color_basis = matrix_element.get('color_basis') 1076 1077 # We don't want to include the power of Nc's which come from the potential 1078 # loop color trace (i.e. in the case of a closed fermion loop for example) 1079 # so we subtract it here when computing max_Nc 1080 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1081 color_basis.values()],[])) 1082 1083 # Crate dictionary between diagram number and JAMP number 1084 diag_jamp = {} 1085 for ijamp, col_basis_elem in \ 1086 enumerate(sorted(matrix_element.get('color_basis').keys())): 1087 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1088 # Only use color flows with Nc == max_Nc. However, notice that 1089 # we don't want to include the Nc power coming from the loop 1090 # in this counting. 1091 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1092 diag_num = diag_tuple[0] + 1 1093 # Add this JAMP number to this diag_num 1094 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1095 [ijamp+1] 1096 1097 colamps = ijamp + 1 1098 for iconfig, num_diag in enumerate(mapconfigs): 1099 if num_diag == 0: 1100 continue 1101 1102 # List of True or False 1103 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1104 # Add line 1105 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1106 (iconfig+1, num_matrix_element, colamps, 1107 ','.join(["%s" % booldict[b] for b in \ 1108 bool_list]))) 1109 1110 return ret_list
1111
1112 - def get_amp2_lines(self, matrix_element, config_map = []):
1113 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1114 1115 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1116 # Get minimum legs in a vertex 1117 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1118 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1119 minvert = min(vert_list) if vert_list!=[] else 0 1120 1121 ret_lines = [] 1122 if config_map: 1123 # In this case, we need to sum up all amplitudes that have 1124 # identical topologies, as given by the config_map (which 1125 # gives the topology/config for each of the diagrams 1126 diagrams = matrix_element.get('diagrams') 1127 # Combine the diagrams with identical topologies 1128 config_to_diag_dict = {} 1129 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1130 if config_map[idiag] == 0: 1131 continue 1132 try: 1133 config_to_diag_dict[config_map[idiag]].append(idiag) 1134 except KeyError: 1135 config_to_diag_dict[config_map[idiag]] = [idiag] 1136 # Write out the AMP2s summing squares of amplitudes belonging 1137 # to eiher the same diagram or different diagrams with 1138 # identical propagator properties. Note that we need to use 1139 # AMP2 number corresponding to the first diagram number used 1140 # for that AMP2. 1141 for config in sorted(config_to_diag_dict.keys()): 1142 1143 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1144 {"num": (config_to_diag_dict[config][0] + 1)} 1145 1146 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1147 sum([diagrams[idiag].get('amplitudes') for \ 1148 idiag in config_to_diag_dict[config]], [])]) 1149 1150 # Not using \sum |M|^2 anymore since this creates troubles 1151 # when ckm is not diagonal due to the JIM mechanism. 1152 if '+' in amp: 1153 line += "(%s)*dconjg(%s)" % (amp, amp) 1154 else: 1155 line += "%s*dconjg(%s)" % (amp, amp) 1156 ret_lines.append(line) 1157 else: 1158 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1159 # Ignore any diagrams with 4-particle vertices. 1160 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1161 continue 1162 # Now write out the expression for AMP2, meaning the sum of 1163 # squared amplitudes belonging to the same diagram 1164 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1165 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1166 {"num": a.get('number')} for a in \ 1167 diag.get('amplitudes')]) 1168 ret_lines.append(line) 1169 1170 return ret_lines
1171 1172 #=========================================================================== 1173 # Returns the data statements initializing the coeffictients for the JAMP 1174 # decomposition. It is used when the JAMP initialization is decided to be 1175 # done through big arrays containing the projection coefficients. 1176 #===========================================================================
1177 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1178 n=50, Nc_value=3):
1179 """This functions return the lines defining the DATA statement setting 1180 the coefficients building the JAMPS out of the AMPS. Split rows in 1181 bunches of size n. 1182 One can specify the color_basis from which the color amplitudes originates 1183 so that there are commentaries telling what color structure each JAMP 1184 corresponds to.""" 1185 1186 if(not isinstance(color_amplitudes,list) or 1187 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1188 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1189 1190 res_list = [] 1191 my_cs = color.ColorString() 1192 for index, coeff_list in enumerate(color_amplitudes): 1193 # Create the list of the complete numerical coefficient. 1194 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1195 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1196 coefficient in coeff_list] 1197 # Create the list of the numbers of the contributing amplitudes. 1198 # Mutliply by -1 for those which have an imaginary coefficient. 1199 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1200 for coefficient in coeff_list] 1201 # Find the common denominator. 1202 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1203 num_list=[(coefficient*commondenom).numerator \ 1204 for coefficient in coefs_list] 1205 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1206 index+1,len(num_list))) 1207 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1208 index+1,commondenom)) 1209 if color_basis: 1210 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1211 res_list.append("C %s" % repr(my_cs)) 1212 for k in xrange(0, len(num_list), n): 1213 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1214 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1215 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1216 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1217 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1218 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1219 pass 1220 return res_list
1221 1222
1223 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1224 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1225 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1226 defined as a matrix element or directly as a color_amplitudes dictionary. 1227 The split_order_amps specifies the group of amplitudes sharing the same 1228 amplitude orders which should be put in together in a given set of JAMPS. 1229 The split_order_amps is supposed to have the format of the second output 1230 of the function get_split_orders_mapping function in helas_objects.py. 1231 The split_order_names is optional (it should correspond to the process 1232 'split_orders' attribute) and only present to provide comments in the 1233 JAMP definitions in the code.""" 1234 1235 # Let the user call get_JAMP_lines_split_order directly from a 1236 error_msg="Malformed '%s' argument passed to the "+\ 1237 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1238 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1239 color_amplitudes=col_amps.get_color_amplitudes() 1240 elif(isinstance(col_amps,list)): 1241 if(col_amps and isinstance(col_amps[0],list)): 1242 color_amplitudes=col_amps 1243 else: 1244 raise MadGraph5Error, error_msg%'col_amps' 1245 else: 1246 raise MadGraph5Error, error_msg%'col_amps' 1247 1248 # Verify the sanity of the split_order_amps and split_order_names args 1249 if isinstance(split_order_amps,list): 1250 for elem in split_order_amps: 1251 if len(elem)!=2: 1252 raise MadGraph5Error, error_msg%'split_order_amps' 1253 # Check the first element of the two lists to make sure they are 1254 # integers, although in principle they should all be integers. 1255 if not isinstance(elem[0],tuple) or \ 1256 not isinstance(elem[1],tuple) or \ 1257 not isinstance(elem[0][0],int) or \ 1258 not isinstance(elem[1][0],int): 1259 raise MadGraph5Error, error_msg%'split_order_amps' 1260 else: 1261 raise MadGraph5Error, error_msg%'split_order_amps' 1262 1263 if not split_order_names is None: 1264 if isinstance(split_order_names,list): 1265 # Should specify the same number of names as there are elements 1266 # in the key of the split_order_amps. 1267 if len(split_order_names)!=len(split_order_amps[0][0]): 1268 raise MadGraph5Error, error_msg%'split_order_names' 1269 # Check the first element of the list to be a string 1270 if not isinstance(split_order_names[0],str): 1271 raise MadGraph5Error, error_msg%'split_order_names' 1272 else: 1273 raise MadGraph5Error, error_msg%'split_order_names' 1274 1275 # Now scan all contributing orders to be individually computed and 1276 # construct the list of color_amplitudes for JAMP to be constructed 1277 # accordingly. 1278 res_list=[] 1279 for i, amp_order in enumerate(split_order_amps): 1280 col_amps_order = [] 1281 for jamp in color_amplitudes: 1282 col_amps_order.append(filter(lambda col_amp: 1283 col_amp[1] in amp_order[1],jamp)) 1284 if split_order_names: 1285 res_list.append('C JAMPs contributing to orders '+' '.join( 1286 ['%s=%i'%order for order in zip(split_order_names, 1287 amp_order[0])])) 1288 if self.opt['export_format'] in ['madloop_matchbox']: 1289 res_list.extend(self.get_JAMP_lines(col_amps_order, 1290 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1291 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1292 else: 1293 res_list.extend(self.get_JAMP_lines(col_amps_order, 1294 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1295 1296 return res_list
1297 1298
1299 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1300 split=-1):
1301 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1302 defined as a matrix element or directly as a color_amplitudes dictionary, 1303 Jamp_formatLC should be define to allow to add LeadingColor computation 1304 (usefull for MatchBox) 1305 The split argument defines how the JAMP lines should be split in order 1306 not to be too long.""" 1307 1308 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1309 # the color amplitudes lists. 1310 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1311 color_amplitudes=col_amps.get_color_amplitudes() 1312 elif(isinstance(col_amps,list)): 1313 if(col_amps and isinstance(col_amps[0],list)): 1314 color_amplitudes=col_amps 1315 else: 1316 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1317 else: 1318 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1319 1320 1321 res_list = [] 1322 for i, coeff_list in enumerate(color_amplitudes): 1323 # It might happen that coeff_list is empty if this function was 1324 # called from get_JAMP_lines_split_order (i.e. if some color flow 1325 # does not contribute at all for a given order). 1326 # In this case we simply set it to 0. 1327 if coeff_list==[]: 1328 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1329 continue 1330 # Break the JAMP definition into 'n=split' pieces to avoid having 1331 # arbitrarly long lines. 1332 first=True 1333 n = (len(coeff_list)+1 if split<=0 else split) 1334 while coeff_list!=[]: 1335 coefs=coeff_list[:n] 1336 coeff_list=coeff_list[n:] 1337 res = ((JAMP_format+"=") % str(i + 1)) + \ 1338 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1339 1340 first=False 1341 # Optimization: if all contributions to that color basis element have 1342 # the same coefficient (up to a sign), put it in front 1343 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1344 common_factor = False 1345 diff_fracs = list(set(list_fracs)) 1346 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1347 common_factor = True 1348 global_factor = diff_fracs[0] 1349 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1350 1351 # loop for JAMP 1352 for (coefficient, amp_number) in coefs: 1353 if not coefficient: 1354 continue 1355 if common_factor: 1356 res = (res + "%s" + AMP_format) % \ 1357 (self.coeff(coefficient[0], 1358 coefficient[1] / abs(coefficient[1]), 1359 coefficient[2], 1360 coefficient[3]), 1361 str(amp_number)) 1362 else: 1363 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1364 coefficient[1], 1365 coefficient[2], 1366 coefficient[3]), 1367 str(amp_number)) 1368 1369 if common_factor: 1370 res = res + ')' 1371 1372 res_list.append(res) 1373 1374 return res_list
1375
1376 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1377 """Generate the PDF lines for the auto_dsig.f file""" 1378 1379 processes = matrix_element.get('processes') 1380 model = processes[0].get('model') 1381 1382 pdf_definition_lines = "" 1383 pdf_data_lines = "" 1384 pdf_lines = "" 1385 1386 if ninitial == 1: 1387 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1388 for i, proc in enumerate(processes): 1389 process_line = proc.base_string() 1390 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1391 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1392 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1393 else: 1394 # Pick out all initial state particles for the two beams 1395 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1396 p in processes]))), 1397 sorted(list(set([p.get_initial_pdg(2) for \ 1398 p in processes])))] 1399 1400 # Prepare all variable names 1401 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1402 sum(initial_states,[])]) 1403 for key,val in pdf_codes.items(): 1404 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1405 1406 # Set conversion from PDG code to number used in PDF calls 1407 pdgtopdf = {21: 0, 22: 7} 1408 1409 # Fill in missing entries of pdgtopdf 1410 for pdg in sum(initial_states,[]): 1411 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1412 pdgtopdf[pdg] = pdg 1413 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1414 # If any particle has pdg code 7, we need to use something else 1415 pdgtopdf[pdg] = 6000000 + pdg 1416 1417 # Get PDF variable declarations for all initial states 1418 for i in [0,1]: 1419 pdf_definition_lines += "DOUBLE PRECISION " + \ 1420 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1421 for pdg in \ 1422 initial_states[i]]) + \ 1423 "\n" 1424 1425 # Get PDF data lines for all initial states 1426 for i in [0,1]: 1427 pdf_data_lines += "DATA " + \ 1428 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1429 for pdg in initial_states[i]]) + \ 1430 "/%d*1D0/" % len(initial_states[i]) + \ 1431 "\n" 1432 1433 # Get PDF lines for all different initial states 1434 for i, init_states in enumerate(initial_states): 1435 if subproc_group: 1436 pdf_lines = pdf_lines + \ 1437 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1438 % (i + 1, i + 1) 1439 else: 1440 pdf_lines = pdf_lines + \ 1441 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1442 % (i + 1, i + 1) 1443 1444 for initial_state in init_states: 1445 if initial_state in pdf_codes.keys(): 1446 if subproc_group: 1447 pdf_lines = pdf_lines + \ 1448 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1449 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1450 (pdf_codes[initial_state], 1451 i + 1, i + 1, pdgtopdf[initial_state], 1452 i + 1, i + 1) 1453 else: 1454 pdf_lines = pdf_lines + \ 1455 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1456 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1457 (pdf_codes[initial_state], 1458 i + 1, i + 1, pdgtopdf[initial_state], 1459 i + 1, i + 1) 1460 pdf_lines = pdf_lines + "ENDIF\n" 1461 1462 # Add up PDFs for the different initial state particles 1463 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1464 for proc in processes: 1465 process_line = proc.base_string() 1466 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1467 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1468 for ibeam in [1, 2]: 1469 initial_state = proc.get_initial_pdg(ibeam) 1470 if initial_state in pdf_codes.keys(): 1471 pdf_lines = pdf_lines + "%s%d*" % \ 1472 (pdf_codes[initial_state], ibeam) 1473 else: 1474 pdf_lines = pdf_lines + "1d0*" 1475 # Remove last "*" from pdf_lines 1476 pdf_lines = pdf_lines[:-1] + "\n" 1477 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1478 1479 # Remove last line break from the return variables 1480 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1481 1482 #=========================================================================== 1483 # write_props_file 1484 #===========================================================================
1485 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1486 """Write the props.inc file for MadEvent. Needs input from 1487 write_configs_file.""" 1488 1489 lines = [] 1490 1491 particle_dict = matrix_element.get('processes')[0].get('model').\ 1492 get('particle_dict') 1493 1494 for iconf, configs in enumerate(s_and_t_channels): 1495 for vertex in configs[0] + configs[1][:-1]: 1496 leg = vertex.get('legs')[-1] 1497 if leg.get('id') not in particle_dict: 1498 # Fake propagator used in multiparticle vertices 1499 mass = 'zero' 1500 width = 'zero' 1501 pow_part = 0 1502 else: 1503 particle = particle_dict[leg.get('id')] 1504 # Get mass 1505 if particle.get('mass').lower() == 'zero': 1506 mass = particle.get('mass') 1507 else: 1508 mass = "abs(%s)" % particle.get('mass') 1509 # Get width 1510 if particle.get('width').lower() == 'zero': 1511 width = particle.get('width') 1512 else: 1513 width = "abs(%s)" % particle.get('width') 1514 1515 pow_part = 1 + int(particle.is_boson()) 1516 1517 lines.append("prmass(%d,%d) = %s" % \ 1518 (leg.get('number'), iconf + 1, mass)) 1519 lines.append("prwidth(%d,%d) = %s" % \ 1520 (leg.get('number'), iconf + 1, width)) 1521 lines.append("pow(%d,%d) = %d" % \ 1522 (leg.get('number'), iconf + 1, pow_part)) 1523 1524 # Write the file 1525 writer.writelines(lines) 1526 1527 return True
1528 1529 #=========================================================================== 1530 # write_configs_file 1531 #===========================================================================
1532 - def write_configs_file(self, writer, matrix_element):
1533 """Write the configs.inc file for MadEvent""" 1534 1535 # Extract number of external particles 1536 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1537 1538 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1539 mapconfigs = [c[0] for c in configs] 1540 model = matrix_element.get('processes')[0].get('model') 1541 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1542 [[c[1]] for c in configs], 1543 mapconfigs, 1544 nexternal, ninitial, 1545 model)
1546 1547 #=========================================================================== 1548 # write_configs_file_from_diagrams 1549 #===========================================================================
1550 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1551 nexternal, ninitial, model):
1552 """Write the actual configs.inc file. 1553 1554 configs is the diagrams corresponding to configs (each 1555 diagrams is a list of corresponding diagrams for all 1556 subprocesses, with None if there is no corresponding diagrams 1557 for a given process). 1558 mapconfigs gives the diagram number for each config. 1559 1560 For s-channels, we need to output one PDG for each subprocess in 1561 the subprocess group, in order to be able to pick the right 1562 one for multiprocesses.""" 1563 1564 lines = [] 1565 1566 s_and_t_channels = [] 1567 1568 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1569 for config in configs if [d for d in config if d][0].\ 1570 get_vertex_leg_numbers()!=[]] 1571 minvert = min(vert_list) if vert_list!=[] else 0 1572 1573 # Number of subprocesses 1574 nsubprocs = len(configs[0]) 1575 1576 nconfigs = 0 1577 1578 new_pdg = model.get_first_non_pdg() 1579 1580 for iconfig, helas_diags in enumerate(configs): 1581 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1582 [0].get_vertex_leg_numbers()) : 1583 # Only 3-vertices allowed in configs.inc except for vertices 1584 # which originate from a shrunk loop. 1585 continue 1586 nconfigs += 1 1587 1588 # Need s- and t-channels for all subprocesses, including 1589 # those that don't contribute to this config 1590 empty_verts = [] 1591 stchannels = [] 1592 for h in helas_diags: 1593 if h: 1594 # get_s_and_t_channels gives vertices starting from 1595 # final state external particles and working inwards 1596 stchannels.append(h.get('amplitudes')[0].\ 1597 get_s_and_t_channels(ninitial, model, new_pdg)) 1598 else: 1599 stchannels.append((empty_verts, None)) 1600 1601 # For t-channels, just need the first non-empty one 1602 tchannels = [t for s,t in stchannels if t != None][0] 1603 1604 # For s_and_t_channels (to be used later) use only first config 1605 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1606 tchannels]) 1607 1608 # Make sure empty_verts is same length as real vertices 1609 if any([s for s,t in stchannels]): 1610 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1611 1612 # Reorganize s-channel vertices to get a list of all 1613 # subprocesses for each vertex 1614 schannels = zip(*[s for s,t in stchannels]) 1615 else: 1616 schannels = [] 1617 1618 allchannels = schannels 1619 if len(tchannels) > 1: 1620 # Write out tchannels only if there are any non-trivial ones 1621 allchannels = schannels + tchannels 1622 1623 # Write out propagators for s-channel and t-channel vertices 1624 1625 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1626 # Correspondance between the config and the diagram = amp2 1627 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1628 mapconfigs[iconfig])) 1629 1630 for verts in allchannels: 1631 if verts in schannels: 1632 vert = [v for v in verts if v][0] 1633 else: 1634 vert = verts 1635 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1636 last_leg = vert.get('legs')[-1] 1637 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1638 (last_leg.get('number'), nconfigs, len(daughters), 1639 ",".join([str(d) for d in daughters]))) 1640 if verts in schannels: 1641 pdgs = [] 1642 for v in verts: 1643 if v: 1644 pdgs.append(v.get('legs')[-1].get('id')) 1645 else: 1646 pdgs.append(0) 1647 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1648 (last_leg.get('number'), nconfigs, nsubprocs, 1649 ",".join([str(d) for d in pdgs]))) 1650 lines.append("data tprid(%d,%d)/0/" % \ 1651 (last_leg.get('number'), nconfigs)) 1652 elif verts in tchannels[:-1]: 1653 lines.append("data tprid(%d,%d)/%d/" % \ 1654 (last_leg.get('number'), nconfigs, 1655 abs(last_leg.get('id')))) 1656 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1657 (last_leg.get('number'), nconfigs, nsubprocs, 1658 ",".join(['0'] * nsubprocs))) 1659 1660 # Write out number of configs 1661 lines.append("# Number of configs") 1662 lines.append("data mapconfig(0)/%d/" % nconfigs) 1663 1664 # Write the file 1665 writer.writelines(lines) 1666 1667 return s_and_t_channels
1668 1669 #=========================================================================== 1670 # Global helper methods 1671 #=========================================================================== 1672
1673 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1674 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1675 1676 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1677 1678 if total_coeff == 1: 1679 if is_imaginary: 1680 return '+imag1*' 1681 else: 1682 return '+' 1683 elif total_coeff == -1: 1684 if is_imaginary: 1685 return '-imag1*' 1686 else: 1687 return '-' 1688 1689 res_str = '%+iD0' % total_coeff.numerator 1690 1691 if total_coeff.denominator != 1: 1692 # Check if total_coeff is an integer 1693 res_str = res_str + '/%iD0' % total_coeff.denominator 1694 1695 if is_imaginary: 1696 res_str = res_str + '*imag1' 1697 1698 return res_str + '*'
1699 1700
1701 - def set_fortran_compiler(self, default_compiler, force=False):
1702 """Set compiler based on what's available on the system""" 1703 1704 # Check for compiler 1705 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1706 f77_compiler = default_compiler['fortran'] 1707 elif misc.which('gfortran'): 1708 f77_compiler = 'gfortran' 1709 elif misc.which('g77'): 1710 f77_compiler = 'g77' 1711 elif misc.which('f77'): 1712 f77_compiler = 'f77' 1713 elif default_compiler['fortran']: 1714 logger.warning('No Fortran Compiler detected! Please install one') 1715 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1716 else: 1717 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1718 logger.info('Use Fortran compiler ' + f77_compiler) 1719 1720 1721 # Check for compiler. 1. set default. 1722 if default_compiler['f2py']: 1723 f2py_compiler = default_compiler['f2py'] 1724 else: 1725 f2py_compiler = '' 1726 # Try to find the correct one. 1727 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1728 f2py_compiler = default_compiler 1729 elif misc.which('f2py'): 1730 f2py_compiler = 'f2py' 1731 elif sys.version_info[1] == 6: 1732 if misc.which('f2py-2.6'): 1733 f2py_compiler = 'f2py-2.6' 1734 elif misc.which('f2py2.6'): 1735 f2py_compiler = 'f2py2.6' 1736 elif sys.version_info[1] == 7: 1737 if misc.which('f2py-2.7'): 1738 f2py_compiler = 'f2py-2.7' 1739 elif misc.which('f2py2.7'): 1740 f2py_compiler = 'f2py2.7' 1741 1742 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1743 1744 1745 self.replace_make_opt_f_compiler(to_replace) 1746 # Replace also for Template but not for cluster 1747 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1748 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1749 1750 return f77_compiler
1751 1752 # an alias for backward compatibility 1753 set_compiler = set_fortran_compiler 1754 1755
1756 - def set_cpp_compiler(self, default_compiler, force=False):
1757 """Set compiler based on what's available on the system""" 1758 1759 # Check for compiler 1760 if default_compiler and misc.which(default_compiler): 1761 compiler = default_compiler 1762 elif misc.which('g++'): 1763 #check if clang version 1764 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1765 stderr=subprocess.PIPE) 1766 out, _ = p.communicate() 1767 if 'clang' in out and misc.which('clang'): 1768 compiler = 'clang' 1769 else: 1770 compiler = 'g++' 1771 elif misc.which('c++'): 1772 compiler = 'c++' 1773 elif misc.which('clang'): 1774 compiler = 'clang' 1775 elif default_compiler: 1776 logger.warning('No c++ Compiler detected! Please install one') 1777 compiler = default_compiler # maybe misc fail so try with it 1778 else: 1779 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1780 logger.info('Use c++ compiler ' + compiler) 1781 self.replace_make_opt_c_compiler(compiler) 1782 # Replace also for Template but not for cluster 1783 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1784 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1785 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1786 1787 return compiler
1788 1789
1790 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1791 """Set FC=compiler in Source/make_opts""" 1792 1793 assert isinstance(compilers, dict) 1794 1795 mod = False #avoid to rewrite the file if not needed 1796 if not root_dir: 1797 root_dir = self.dir_path 1798 1799 compiler= compilers['fortran'] 1800 f2py_compiler = compilers['f2py'] 1801 if not f2py_compiler: 1802 f2py_compiler = 'f2py' 1803 for_update= {'DEFAULT_F_COMPILER':compiler, 1804 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1805 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1806 1807 try: 1808 common_run_interface.CommonRunCmd.update_make_opts_full( 1809 make_opts, for_update) 1810 except IOError: 1811 if root_dir == self.dir_path: 1812 logger.info('Fail to set compiler. Trying to continue anyway.')
1813
1814 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1815 """Set CXX=compiler in Source/make_opts. 1816 The version is also checked, in order to set some extra flags 1817 if the compiler is clang (on MACOS)""" 1818 1819 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1820 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1821 1822 # list of the variable to set in the make_opts file 1823 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1824 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1825 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1826 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1827 } 1828 1829 if not root_dir: 1830 root_dir = self.dir_path 1831 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1832 1833 try: 1834 common_run_interface.CommonRunCmd.update_make_opts_full( 1835 make_opts, for_update) 1836 except IOError: 1837 if root_dir == self.dir_path: 1838 logger.info('Fail to set compiler. Trying to continue anyway.') 1839 1840 return
1841
1842 #=============================================================================== 1843 # ProcessExporterFortranSA 1844 #=============================================================================== 1845 -class ProcessExporterFortranSA(ProcessExporterFortran):
1846 """Class to take care of exporting a set of matrix elements to 1847 MadGraph v4 StandAlone format.""" 1848 1849 matrix_template = "matrix_standalone_v4.inc" 1850
1851 - def __init__(self, *args, **opts):
1852 """add the format information compare to standard init""" 1853 1854 if 'format' in opts: 1855 self.format = opts['format'] 1856 del opts['format'] 1857 else: 1858 self.format = 'standalone' 1859 ProcessExporterFortran.__init__(self, *args, **opts)
1860
1861 - def copy_template(self, model):
1862 """Additional actions needed for setup of Template 1863 """ 1864 1865 #First copy the full template tree if dir_path doesn't exit 1866 if os.path.isdir(self.dir_path): 1867 return 1868 1869 logger.info('initialize a new standalone directory: %s' % \ 1870 os.path.basename(self.dir_path)) 1871 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1872 1873 # Create the directory structure 1874 os.mkdir(self.dir_path) 1875 os.mkdir(pjoin(self.dir_path, 'Source')) 1876 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1877 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1878 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1879 os.mkdir(pjoin(self.dir_path, 'bin')) 1880 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1881 os.mkdir(pjoin(self.dir_path, 'lib')) 1882 os.mkdir(pjoin(self.dir_path, 'Cards')) 1883 1884 # Information at top-level 1885 #Write version info 1886 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1887 try: 1888 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1889 except IOError: 1890 MG5_version = misc.get_pkg_info() 1891 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1892 "5." + MG5_version['version']) 1893 1894 1895 # Add file in SubProcesses 1896 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1897 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1898 1899 if self.format == 'standalone': 1900 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1901 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1902 1903 # Add file in Source 1904 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1905 pjoin(self.dir_path, 'Source')) 1906 # add the makefile 1907 filename = pjoin(self.dir_path,'Source','makefile') 1908 self.write_source_makefile(writers.FileWriter(filename))
1909 1910 #=========================================================================== 1911 # export model files 1912 #===========================================================================
1913 - def export_model_files(self, model_path):
1914 """export the model dependent files for V4 model""" 1915 1916 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1917 # Add the routine update_as_param in v4 model 1918 # This is a function created in the UFO 1919 text=""" 1920 subroutine update_as_param() 1921 call setpara('param_card.dat',.false.) 1922 return 1923 end 1924 """ 1925 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1926 ff.write(text) 1927 ff.close() 1928 1929 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1930 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1931 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1932 fsock.write(text) 1933 fsock.close() 1934 1935 self.make_model_symbolic_link()
1936 1937 #=========================================================================== 1938 # Make the Helas and Model directories for Standalone directory 1939 #===========================================================================
1940 - def make(self):
1941 """Run make in the DHELAS and MODEL directories, to set up 1942 everything for running standalone 1943 """ 1944 1945 source_dir = pjoin(self.dir_path, "Source") 1946 logger.info("Running make for Helas") 1947 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1948 logger.info("Running make for Model") 1949 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1950 1951 #=========================================================================== 1952 # Create proc_card_mg5.dat for Standalone directory 1953 #===========================================================================
1954 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1955 """Finalize Standalone MG4 directory by 1956 generation proc_card_mg5.dat 1957 generate a global makefile 1958 """ 1959 1960 compiler = {'fortran': mg5options['fortran_compiler'], 1961 'cpp': mg5options['cpp_compiler'], 1962 'f2py': mg5options['f2py_compiler']} 1963 1964 self.compiler_choice(compiler) 1965 self.make() 1966 1967 # Write command history as proc_card_mg5 1968 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1969 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1970 history.write(output_file) 1971 1972 ProcessExporterFortran.finalize(self, matrix_elements, 1973 history, mg5options, flaglist) 1974 open(pjoin(self.dir_path,'__init__.py'),'w') 1975 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1976 1977 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1978 #add the module to hande the NLO weight 1979 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1980 pjoin(self.dir_path, 'Source')) 1981 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1982 pjoin(self.dir_path, 'Source', 'PDF')) 1983 self.write_pdf_opendata() 1984 1985 # create a single makefile to compile all the subprocesses 1986 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 1987 deppython = '' 1988 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 1989 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 1990 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 1991 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 1992 1993 text+='all: %s\n\techo \'done\'' % deppython 1994 1995 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 1996 ff.write(text) 1997 ff.close()
1998 1999 2000 2001 2002 2003
2004 - def create_MA5_cards(self,*args,**opts):
2005 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2006 pass
2007
2008 - def compiler_choice(self, compiler):
2009 """ Different daughter classes might want different compilers. 2010 So this function is meant to be overloaded if desired.""" 2011 2012 self.set_compiler(compiler)
2013 2014 #=========================================================================== 2015 # generate_subprocess_directory 2016 #===========================================================================
2017 - def generate_subprocess_directory(self, matrix_element, 2018 fortran_model, number):
2019 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2020 including the necessary matrix.f and nexternal.inc files""" 2021 2022 cwd = os.getcwd() 2023 # Create the directory PN_xx_xxxxx in the specified path 2024 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2025 "P%s" % matrix_element.get('processes')[0].shell_string()) 2026 2027 if self.opt['sa_symmetry']: 2028 # avoid symmetric output 2029 for i,proc in enumerate(matrix_element.get('processes')): 2030 2031 initial = [] #filled in the next line 2032 final = [l.get('id') for l in proc.get('legs')\ 2033 if l.get('state') or initial.append(l.get('id'))] 2034 decay_finals = proc.get_final_ids_after_decay() 2035 decay_finals.sort() 2036 tag = (tuple(initial), tuple(decay_finals)) 2037 legs = proc.get('legs')[:] 2038 leg0 = proc.get('legs')[0] 2039 leg1 = proc.get('legs')[1] 2040 if not leg1.get('state'): 2041 proc.get('legs')[0] = leg1 2042 proc.get('legs')[1] = leg0 2043 flegs = proc.get('legs')[2:] 2044 for perm in itertools.permutations(flegs): 2045 for i,p in enumerate(perm): 2046 proc.get('legs')[i+2] = p 2047 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2048 "P%s" % proc.shell_string()) 2049 #restore original order 2050 proc.get('legs')[2:] = legs[2:] 2051 if os.path.exists(dirpath2): 2052 proc.get('legs')[:] = legs 2053 return 0 2054 proc.get('legs')[:] = legs 2055 2056 try: 2057 os.mkdir(dirpath) 2058 except os.error as error: 2059 logger.warning(error.strerror + " " + dirpath) 2060 2061 #try: 2062 # os.chdir(dirpath) 2063 #except os.error: 2064 # logger.error('Could not cd to directory %s' % dirpath) 2065 # return 0 2066 2067 logger.info('Creating files in directory %s' % dirpath) 2068 2069 # Extract number of external particles 2070 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2071 2072 # Create the matrix.f file and the nexternal.inc file 2073 if self.opt['export_format']=='standalone_msP': 2074 filename = pjoin(dirpath, 'matrix_prod.f') 2075 else: 2076 filename = pjoin(dirpath, 'matrix.f') 2077 calls = self.write_matrix_element_v4( 2078 writers.FortranWriter(filename), 2079 matrix_element, 2080 fortran_model) 2081 2082 if self.opt['export_format'] == 'standalone_msP': 2083 filename = pjoin(dirpath,'configs_production.inc') 2084 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2085 writers.FortranWriter(filename), 2086 matrix_element) 2087 2088 filename = pjoin(dirpath,'props_production.inc') 2089 self.write_props_file(writers.FortranWriter(filename), 2090 matrix_element, 2091 s_and_t_channels) 2092 2093 filename = pjoin(dirpath,'nexternal_prod.inc') 2094 self.write_nexternal_madspin(writers.FortranWriter(filename), 2095 nexternal, ninitial) 2096 2097 if self.opt['export_format']=='standalone_msF': 2098 filename = pjoin(dirpath, 'helamp.inc') 2099 ncomb=matrix_element.get_helicity_combinations() 2100 self.write_helamp_madspin(writers.FortranWriter(filename), 2101 ncomb) 2102 2103 filename = pjoin(dirpath, 'nexternal.inc') 2104 self.write_nexternal_file(writers.FortranWriter(filename), 2105 nexternal, ninitial) 2106 2107 filename = pjoin(dirpath, 'pmass.inc') 2108 self.write_pmass_file(writers.FortranWriter(filename), 2109 matrix_element) 2110 2111 filename = pjoin(dirpath, 'ngraphs.inc') 2112 self.write_ngraphs_file(writers.FortranWriter(filename), 2113 len(matrix_element.get_all_amplitudes())) 2114 2115 # Generate diagrams 2116 filename = pjoin(dirpath, "matrix.ps") 2117 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2118 get('diagrams'), 2119 filename, 2120 model=matrix_element.get('processes')[0].\ 2121 get('model'), 2122 amplitude=True) 2123 logger.info("Generating Feynman diagrams for " + \ 2124 matrix_element.get('processes')[0].nice_string()) 2125 plot.draw() 2126 2127 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 2128 2129 for file in linkfiles: 2130 ln('../%s' % file, cwd=dirpath) 2131 2132 # Return to original PWD 2133 #os.chdir(cwd) 2134 2135 if not calls: 2136 calls = 0 2137 return calls
2138 2139 2140 #=========================================================================== 2141 # write_source_makefile 2142 #===========================================================================
2143 - def write_source_makefile(self, writer):
2144 """Write the nexternal.inc file for MG4""" 2145 2146 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2147 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2148 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2149 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2150 writer.write(text) 2151 2152 return True
2153 2154 #=========================================================================== 2155 # write_matrix_element_v4 2156 #===========================================================================
2157 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2158 write=True, proc_prefix=''):
2159 """Export a matrix element to a matrix.f file in MG4 standalone format 2160 if write is on False, just return the replace_dict and not write anything.""" 2161 2162 2163 if not matrix_element.get('processes') or \ 2164 not matrix_element.get('diagrams'): 2165 return 0 2166 2167 if writer: 2168 if not isinstance(writer, writers.FortranWriter): 2169 raise writers.FortranWriter.FortranWriterError(\ 2170 "writer not FortranWriter but %s" % type(writer)) 2171 # Set lowercase/uppercase Fortran code 2172 writers.FortranWriter.downcase = False 2173 2174 2175 if not self.opt.has_key('sa_symmetry'): 2176 self.opt['sa_symmetry']=False 2177 2178 2179 2180 # The proc_id is for MadEvent grouping which is never used in SA. 2181 replace_dict = {'global_variable':'', 'amp2_lines':'', 2182 'proc_prefix':proc_prefix, 'proc_id':''} 2183 2184 # Extract helas calls 2185 helas_calls = fortran_model.get_matrix_element_calls(\ 2186 matrix_element) 2187 2188 replace_dict['helas_calls'] = "\n".join(helas_calls) 2189 2190 # Extract version number and date from VERSION file 2191 info_lines = self.get_mg5_info_lines() 2192 replace_dict['info_lines'] = info_lines 2193 2194 # Extract process info lines 2195 process_lines = self.get_process_info_lines(matrix_element) 2196 replace_dict['process_lines'] = process_lines 2197 2198 # Extract number of external particles 2199 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2200 replace_dict['nexternal'] = nexternal 2201 replace_dict['nincoming'] = ninitial 2202 2203 # Extract ncomb 2204 ncomb = matrix_element.get_helicity_combinations() 2205 replace_dict['ncomb'] = ncomb 2206 2207 # Extract helicity lines 2208 helicity_lines = self.get_helicity_lines(matrix_element) 2209 replace_dict['helicity_lines'] = helicity_lines 2210 2211 # Extract overall denominator 2212 # Averaging initial state color, spin, and identical FS particles 2213 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2214 2215 # Extract ngraphs 2216 ngraphs = matrix_element.get_number_of_amplitudes() 2217 replace_dict['ngraphs'] = ngraphs 2218 2219 # Extract nwavefuncs 2220 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2221 replace_dict['nwavefuncs'] = nwavefuncs 2222 2223 # Extract ncolor 2224 ncolor = max(1, len(matrix_element.get('color_basis'))) 2225 replace_dict['ncolor'] = ncolor 2226 2227 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2228 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2229 matrix_element.get_beams_hel_avg_factor() 2230 2231 # Extract color data lines 2232 color_data_lines = self.get_color_data_lines(matrix_element) 2233 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2234 2235 if self.opt['export_format']=='standalone_msP': 2236 # For MadSpin need to return the AMP2 2237 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2238 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2239 replace_dict['global_variable'] = \ 2240 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2241 2242 # JAMP definition, depends on the number of independent split orders 2243 split_orders=matrix_element.get('processes')[0].get('split_orders') 2244 2245 if len(split_orders)==0: 2246 replace_dict['nSplitOrders']='' 2247 # Extract JAMP lines 2248 jamp_lines = self.get_JAMP_lines(matrix_element) 2249 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2250 # set all amplitude order to weight 1 and only one squared order 2251 # contribution which is of course ALL_ORDERS=2. 2252 squared_orders = [(2,),] 2253 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2254 replace_dict['chosen_so_configs'] = '.TRUE.' 2255 replace_dict['nSqAmpSplitOrders']=1 2256 replace_dict['split_order_str_list']='' 2257 else: 2258 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2259 replace_dict['nAmpSplitOrders']=len(amp_orders) 2260 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2261 replace_dict['nSplitOrders']=len(split_orders) 2262 replace_dict['split_order_str_list']=str(split_orders) 2263 amp_so = self.get_split_orders_lines( 2264 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2265 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2266 replace_dict['ampsplitorders']='\n'.join(amp_so) 2267 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2268 jamp_lines = self.get_JAMP_lines_split_order(\ 2269 matrix_element,amp_orders,split_order_names=split_orders) 2270 2271 # Now setup the array specifying what squared split order is chosen 2272 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2273 matrix_element.get('processes')[0],squared_orders) 2274 2275 # For convenience we also write the driver check_sa_splitOrders.f 2276 # that explicitely writes out the contribution from each squared order. 2277 # The original driver still works and is compiled with 'make' while 2278 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2279 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2280 self.write_check_sa_splitOrders(squared_orders,split_orders, 2281 nexternal,ninitial,proc_prefix,check_sa_writer) 2282 2283 if write: 2284 writers.FortranWriter('nsqso_born.inc').writelines( 2285 """INTEGER NSQSO_BORN 2286 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2287 2288 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2289 2290 matrix_template = self.matrix_template 2291 if self.opt['export_format']=='standalone_msP' : 2292 matrix_template = 'matrix_standalone_msP_v4.inc' 2293 elif self.opt['export_format']=='standalone_msF': 2294 matrix_template = 'matrix_standalone_msF_v4.inc' 2295 elif self.opt['export_format']=='matchbox': 2296 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2297 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2298 2299 if len(split_orders)>0: 2300 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2301 logger.debug("Warning: The export format %s is not "+\ 2302 " available for individual ME evaluation of given coupl. orders."+\ 2303 " Only the total ME will be computed.", self.opt['export_format']) 2304 elif self.opt['export_format'] in ['madloop_matchbox']: 2305 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2306 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2307 else: 2308 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2309 2310 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2311 replace_dict['template_file2'] = pjoin(_file_path, \ 2312 'iolibs/template_files/split_orders_helping_functions.inc') 2313 if write and writer: 2314 path = replace_dict['template_file'] 2315 content = open(path).read() 2316 content = content % replace_dict 2317 # Write the file 2318 writer.writelines(content) 2319 # Add the helper functions. 2320 if len(split_orders)>0: 2321 content = '\n' + open(replace_dict['template_file2'])\ 2322 .read()%replace_dict 2323 writer.writelines(content) 2324 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2325 else: 2326 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2327 return replace_dict # for subclass update
2328
2329 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2330 nincoming, proc_prefix, writer):
2331 """ Write out a more advanced version of the check_sa drivers that 2332 individually returns the matrix element for each contributing squared 2333 order.""" 2334 2335 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2336 'template_files', 'check_sa_splitOrders.f')).read() 2337 printout_sq_orders=[] 2338 for i, squared_order in enumerate(squared_orders): 2339 sq_orders=[] 2340 for j, sqo in enumerate(squared_order): 2341 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2342 printout_sq_orders.append(\ 2343 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2344 %(i+1,' '.join(sq_orders),i+1)) 2345 printout_sq_orders='\n'.join(printout_sq_orders) 2346 replace_dict = {'printout_sqorders':printout_sq_orders, 2347 'nSplitOrders':len(squared_orders), 2348 'nexternal':nexternal, 2349 'nincoming':nincoming, 2350 'proc_prefix':proc_prefix} 2351 2352 if writer: 2353 writer.writelines(check_sa_content % replace_dict) 2354 else: 2355 return replace_dict
2356
2357 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2358 """class to take care of exporting a set of matrix element for the Matchbox 2359 code in the case of Born only routine""" 2360 2361 default_opt = {'clean': False, 'complex_mass':False, 2362 'export_format':'matchbox', 'mp': False, 2363 'sa_symmetry': True} 2364 2365 #specific template of the born 2366 2367 2368 matrix_template = "matrix_standalone_matchbox.inc" 2369 2370 @staticmethod
2371 - def get_color_string_lines(matrix_element):
2372 """Return the color matrix definition lines for this matrix element. Split 2373 rows in chunks of size n.""" 2374 2375 if not matrix_element.get('color_matrix'): 2376 return "\n".join(["out = 1"]) 2377 2378 #start the real work 2379 color_denominators = matrix_element.get('color_matrix').\ 2380 get_line_denominators() 2381 matrix_strings = [] 2382 my_cs = color.ColorString() 2383 for i_color in xrange(len(color_denominators)): 2384 # Then write the numerators for the matrix elements 2385 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2386 t_str=repr(my_cs) 2387 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2388 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2389 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2390 all_matches = t_match.findall(t_str) 2391 output = {} 2392 arg=[] 2393 for match in all_matches: 2394 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2395 if ctype in ['ColorOne' ]: 2396 continue 2397 if ctype not in ['T', 'Tr' ]: 2398 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2399 tmparg += ['0'] 2400 arg +=tmparg 2401 for j, v in enumerate(arg): 2402 output[(i_color,j)] = v 2403 2404 for key in output: 2405 if matrix_strings == []: 2406 #first entry 2407 matrix_strings.append(""" 2408 if (in1.eq.%s.and.in2.eq.%s)then 2409 out = %s 2410 """ % (key[0], key[1], output[key])) 2411 else: 2412 #not first entry 2413 matrix_strings.append(""" 2414 elseif (in1.eq.%s.and.in2.eq.%s)then 2415 out = %s 2416 """ % (key[0], key[1], output[key])) 2417 if len(matrix_strings): 2418 matrix_strings.append(" else \n out = - 1 \n endif") 2419 else: 2420 return "\n out = - 1 \n " 2421 return "\n".join(matrix_strings)
2422
2423 - def make(self,*args,**opts):
2424 pass
2425
2426 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2427 JAMP_formatLC=None):
2428 2429 """Adding leading color part of the colorflow""" 2430 2431 if not JAMP_formatLC: 2432 JAMP_formatLC= "LN%s" % JAMP_format 2433 2434 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2435 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2436 col_amps=col_amps.get_color_amplitudes() 2437 elif(isinstance(col_amps,list)): 2438 if(col_amps and isinstance(col_amps[0],list)): 2439 col_amps=col_amps 2440 else: 2441 raise MadGraph5Error, error_msg % 'col_amps' 2442 else: 2443 raise MadGraph5Error, error_msg % 'col_amps' 2444 2445 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2446 JAMP_format=JAMP_format, 2447 AMP_format=AMP_format, 2448 split=-1) 2449 2450 2451 # Filter the col_ampls to generate only those without any 1/NC terms 2452 2453 LC_col_amps = [] 2454 for coeff_list in col_amps: 2455 to_add = [] 2456 for (coefficient, amp_number) in coeff_list: 2457 if coefficient[3]==0: 2458 to_add.append( (coefficient, amp_number) ) 2459 LC_col_amps.append(to_add) 2460 2461 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2462 JAMP_format=JAMP_formatLC, 2463 AMP_format=AMP_format, 2464 split=-1) 2465 2466 return text
2467
2468 2469 2470 2471 #=============================================================================== 2472 # ProcessExporterFortranMW 2473 #=============================================================================== 2474 -class ProcessExporterFortranMW(ProcessExporterFortran):
2475 """Class to take care of exporting a set of matrix elements to 2476 MadGraph v4 - MadWeight format.""" 2477 2478 matrix_file="matrix_standalone_v4.inc" 2479
2480 - def copy_template(self, model):
2481 """Additional actions needed for setup of Template 2482 """ 2483 2484 super(ProcessExporterFortranMW, self).copy_template(model) 2485 2486 # Add the MW specific file 2487 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2488 pjoin(self.dir_path, 'Source','MadWeight'), True) 2489 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2490 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2491 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2492 pjoin(self.dir_path, 'Source','setrun.f')) 2493 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2494 pjoin(self.dir_path, 'Source','run.inc')) 2495 # File created from Template (Different in some child class) 2496 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2497 self.write_run_config_file(writers.FortranWriter(filename)) 2498 2499 try: 2500 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2501 stdout = os.open(os.devnull, os.O_RDWR), 2502 stderr = os.open(os.devnull, os.O_RDWR), 2503 cwd=self.dir_path) 2504 except OSError: 2505 # Probably madweight already called 2506 pass 2507 2508 # Copy the different python file in the Template 2509 self.copy_python_file() 2510 # create the appropriate cuts.f 2511 self.get_mw_cuts_version() 2512 2513 # add the makefile in Source directory 2514 filename = os.path.join(self.dir_path,'Source','makefile') 2515 self.write_source_makefile(writers.FortranWriter(filename))
2516 2517 2518 2519 2520 #=========================================================================== 2521 # convert_model 2522 #===========================================================================
2523 - def convert_model(self, model, wanted_lorentz = [], 2524 wanted_couplings = []):
2525 2526 super(ProcessExporterFortranMW,self).convert_model(model, 2527 wanted_lorentz, wanted_couplings) 2528 2529 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2530 try: 2531 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2532 except OSError as error: 2533 pass 2534 model_path = model.get('modelpath') 2535 # This is not safe if there is a '##' or '-' in the path. 2536 shutil.copytree(model_path, 2537 pjoin(self.dir_path,'bin','internal','ufomodel'), 2538 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2539 if hasattr(model, 'restrict_card'): 2540 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2541 'restrict_default.dat') 2542 if isinstance(model.restrict_card, check_param_card.ParamCard): 2543 model.restrict_card.write(out_path) 2544 else: 2545 files.cp(model.restrict_card, out_path)
2546 2547 #=========================================================================== 2548 # generate_subprocess_directory 2549 #===========================================================================
2550 - def copy_python_file(self):
2551 """copy the python file require for the Template""" 2552 2553 # madevent interface 2554 cp(_file_path+'/interface/madweight_interface.py', 2555 self.dir_path+'/bin/internal/madweight_interface.py') 2556 cp(_file_path+'/interface/extended_cmd.py', 2557 self.dir_path+'/bin/internal/extended_cmd.py') 2558 cp(_file_path+'/interface/common_run_interface.py', 2559 self.dir_path+'/bin/internal/common_run_interface.py') 2560 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2561 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2562 #cp(_file_path+'/iolibs/save_load_object.py', 2563 # self.dir_path+'/bin/internal/save_load_object.py') 2564 cp(_file_path+'/iolibs/file_writers.py', 2565 self.dir_path+'/bin/internal/file_writers.py') 2566 #model file 2567 cp(_file_path+'../models/check_param_card.py', 2568 self.dir_path+'/bin/internal/check_param_card.py') 2569 2570 #madevent file 2571 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2572 cp(_file_path+'/various/lhe_parser.py', 2573 self.dir_path+'/bin/internal/lhe_parser.py') 2574 2575 cp(_file_path+'/various/banner.py', 2576 self.dir_path+'/bin/internal/banner.py') 2577 cp(_file_path+'/various/shower_card.py', 2578 self.dir_path+'/bin/internal/shower_card.py') 2579 cp(_file_path+'/various/cluster.py', 2580 self.dir_path+'/bin/internal/cluster.py') 2581 2582 # logging configuration 2583 cp(_file_path+'/interface/.mg5_logging.conf', 2584 self.dir_path+'/bin/internal/me5_logging.conf') 2585 cp(_file_path+'/interface/coloring_logging.py', 2586 self.dir_path+'/bin/internal/coloring_logging.py')
2587 2588 2589 #=========================================================================== 2590 # Change the version of cuts.f to the one compatible with MW 2591 #===========================================================================
2592 - def get_mw_cuts_version(self, outpath=None):
2593 """create the appropriate cuts.f 2594 This is based on the one associated to ME output but: 2595 1) No clustering (=> remove initcluster/setclscales) 2596 2) Adding the definition of cut_bw at the file. 2597 """ 2598 2599 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2600 2601 text = StringIO() 2602 #1) remove all dependencies in ickkw >1: 2603 nb_if = 0 2604 for line in template: 2605 if 'if(xqcut.gt.0d0' in line: 2606 nb_if = 1 2607 if nb_if == 0: 2608 text.write(line) 2609 continue 2610 if re.search(r'if\(.*\)\s*then', line): 2611 nb_if += 1 2612 elif 'endif' in line: 2613 nb_if -= 1 2614 2615 #2) add fake cut_bw (have to put the true one later) 2616 text.write(""" 2617 logical function cut_bw(p) 2618 include 'madweight_param.inc' 2619 double precision p(*) 2620 if (bw_cut) then 2621 cut_bw = .true. 2622 else 2623 stop 1 2624 endif 2625 return 2626 end 2627 """) 2628 2629 final = text.getvalue() 2630 #3) remove the call to initcluster: 2631 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2632 template = template.replace('genps.inc', 'maxparticles.inc') 2633 #Now we can write it 2634 if not outpath: 2635 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2636 elif isinstance(outpath, str): 2637 fsock = open(outpath, 'w') 2638 else: 2639 fsock = outpath 2640 fsock.write(template)
2641 2642 2643 2644 #=========================================================================== 2645 # Make the Helas and Model directories for Standalone directory 2646 #===========================================================================
2647 - def make(self):
2648 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2649 everything for running madweight 2650 """ 2651 2652 source_dir = os.path.join(self.dir_path, "Source") 2653 logger.info("Running make for Helas") 2654 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2655 logger.info("Running make for Model") 2656 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2657 logger.info("Running make for PDF") 2658 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2659 logger.info("Running make for CERNLIB") 2660 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2661 logger.info("Running make for GENERIC") 2662 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2663 logger.info("Running make for blocks") 2664 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2665 logger.info("Running make for tools") 2666 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2667 2668 #=========================================================================== 2669 # Create proc_card_mg5.dat for MadWeight directory 2670 #===========================================================================
2671 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2672 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2673 2674 compiler = {'fortran': mg5options['fortran_compiler'], 2675 'cpp': mg5options['cpp_compiler'], 2676 'f2py': mg5options['f2py_compiler']} 2677 2678 2679 2680 #proc_charac 2681 self.create_proc_charac() 2682 2683 # Write maxparticles.inc based on max of ME's/subprocess groups 2684 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2685 self.write_maxparticles_file(writers.FortranWriter(filename), 2686 matrix_elements) 2687 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2688 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2689 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2690 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2691 2692 self.set_compiler(compiler) 2693 self.make() 2694 2695 # Write command history as proc_card_mg5 2696 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2697 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2698 history.write(output_file) 2699 2700 ProcessExporterFortran.finalize(self, matrix_elements, 2701 history, mg5options, flaglist)
2702 2703 2704 2705 #=========================================================================== 2706 # create the run_card for MW 2707 #===========================================================================
2708 - def create_run_card(self, matrix_elements, history):
2709 """ """ 2710 2711 run_card = banner_mod.RunCard() 2712 2713 # pass to default for MW 2714 run_card["run_tag"] = "\'not_use\'" 2715 run_card["fixed_ren_scale"] = "T" 2716 run_card["fixed_fac_scale"] = "T" 2717 run_card.remove_all_cut() 2718 2719 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2720 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2721 python_template=True) 2722 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2723 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2724 python_template=True)
2725 2726 #=========================================================================== 2727 # export model files 2728 #===========================================================================
2729 - def export_model_files(self, model_path):
2730 """export the model dependent files for V4 model""" 2731 2732 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2733 # Add the routine update_as_param in v4 model 2734 # This is a function created in the UFO 2735 text=""" 2736 subroutine update_as_param() 2737 call setpara('param_card.dat',.false.) 2738 return 2739 end 2740 """ 2741 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2742 ff.write(text) 2743 ff.close() 2744 2745 # Modify setrun.f 2746 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2747 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2748 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2749 fsock.write(text) 2750 fsock.close() 2751 2752 # Modify initialization.f 2753 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2754 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2755 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2756 fsock.write(text) 2757 fsock.close() 2758 2759 2760 self.make_model_symbolic_link()
2761 2762 #=========================================================================== 2763 # generate_subprocess_directory 2764 #===========================================================================
2765 - def generate_subprocess_directory(self, matrix_element, 2766 fortran_model,number):
2767 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2768 including the necessary matrix.f and nexternal.inc files""" 2769 2770 cwd = os.getcwd() 2771 misc.sprint(type(matrix_element)) 2772 # Create the directory PN_xx_xxxxx in the specified path 2773 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2774 "P%s" % matrix_element.get('processes')[0].shell_string()) 2775 2776 try: 2777 os.mkdir(dirpath) 2778 except os.error as error: 2779 logger.warning(error.strerror + " " + dirpath) 2780 2781 #try: 2782 # os.chdir(dirpath) 2783 #except os.error: 2784 # logger.error('Could not cd to directory %s' % dirpath) 2785 # return 0 2786 2787 logger.info('Creating files in directory %s' % dirpath) 2788 2789 # Extract number of external particles 2790 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2791 2792 # Create the matrix.f file and the nexternal.inc file 2793 filename = pjoin(dirpath,'matrix.f') 2794 calls,ncolor = self.write_matrix_element_v4( 2795 writers.FortranWriter(filename), 2796 matrix_element, 2797 fortran_model) 2798 2799 filename = pjoin(dirpath, 'auto_dsig.f') 2800 self.write_auto_dsig_file(writers.FortranWriter(filename), 2801 matrix_element) 2802 2803 filename = pjoin(dirpath, 'configs.inc') 2804 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2805 writers.FortranWriter(filename), 2806 matrix_element) 2807 2808 filename = pjoin(dirpath, 'nexternal.inc') 2809 self.write_nexternal_file(writers.FortranWriter(filename), 2810 nexternal, ninitial) 2811 2812 filename = pjoin(dirpath, 'leshouche.inc') 2813 self.write_leshouche_file(writers.FortranWriter(filename), 2814 matrix_element) 2815 2816 filename = pjoin(dirpath, 'props.inc') 2817 self.write_props_file(writers.FortranWriter(filename), 2818 matrix_element, 2819 s_and_t_channels) 2820 2821 filename = pjoin(dirpath, 'pmass.inc') 2822 self.write_pmass_file(writers.FortranWriter(filename), 2823 matrix_element) 2824 2825 filename = pjoin(dirpath, 'ngraphs.inc') 2826 self.write_ngraphs_file(writers.FortranWriter(filename), 2827 len(matrix_element.get_all_amplitudes())) 2828 2829 filename = pjoin(dirpath, 'maxamps.inc') 2830 self.write_maxamps_file(writers.FortranWriter(filename), 2831 len(matrix_element.get('diagrams')), 2832 ncolor, 2833 len(matrix_element.get('processes')), 2834 1) 2835 2836 filename = pjoin(dirpath, 'phasespace.inc') 2837 self.write_phasespace_file(writers.FortranWriter(filename), 2838 len(matrix_element.get('diagrams')), 2839 ) 2840 2841 # Generate diagrams 2842 filename = pjoin(dirpath, "matrix.ps") 2843 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2844 get('diagrams'), 2845 filename, 2846 model=matrix_element.get('processes')[0].\ 2847 get('model'), 2848 amplitude='') 2849 logger.info("Generating Feynman diagrams for " + \ 2850 matrix_element.get('processes')[0].nice_string()) 2851 plot.draw() 2852 2853 #import genps.inc and maxconfigs.inc into Subprocesses 2854 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2855 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2856 2857 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2858 2859 for file in linkfiles: 2860 ln('../%s' % file, starting_dir=cwd) 2861 2862 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2863 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2864 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2865 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2866 # Return to original PWD 2867 #os.chdir(cwd) 2868 2869 if not calls: 2870 calls = 0 2871 return calls
2872 2873 #=========================================================================== 2874 # write_matrix_element_v4 2875 #===========================================================================
2876 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2877 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2878 2879 if not matrix_element.get('processes') or \ 2880 not matrix_element.get('diagrams'): 2881 return 0 2882 2883 if writer: 2884 if not isinstance(writer, writers.FortranWriter): 2885 raise writers.FortranWriter.FortranWriterError(\ 2886 "writer not FortranWriter") 2887 2888 # Set lowercase/uppercase Fortran code 2889 writers.FortranWriter.downcase = False 2890 2891 replace_dict = {} 2892 2893 # Extract version number and date from VERSION file 2894 info_lines = self.get_mg5_info_lines() 2895 replace_dict['info_lines'] = info_lines 2896 2897 # Extract process info lines 2898 process_lines = self.get_process_info_lines(matrix_element) 2899 replace_dict['process_lines'] = process_lines 2900 2901 # Set proc_id 2902 replace_dict['proc_id'] = proc_id 2903 2904 # Extract number of external particles 2905 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2906 replace_dict['nexternal'] = nexternal 2907 2908 # Extract ncomb 2909 ncomb = matrix_element.get_helicity_combinations() 2910 replace_dict['ncomb'] = ncomb 2911 2912 # Extract helicity lines 2913 helicity_lines = self.get_helicity_lines(matrix_element) 2914 replace_dict['helicity_lines'] = helicity_lines 2915 2916 # Extract overall denominator 2917 # Averaging initial state color, spin, and identical FS particles 2918 den_factor_line = self.get_den_factor_line(matrix_element) 2919 replace_dict['den_factor_line'] = den_factor_line 2920 2921 # Extract ngraphs 2922 ngraphs = matrix_element.get_number_of_amplitudes() 2923 replace_dict['ngraphs'] = ngraphs 2924 2925 # Extract nwavefuncs 2926 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2927 replace_dict['nwavefuncs'] = nwavefuncs 2928 2929 # Extract ncolor 2930 ncolor = max(1, len(matrix_element.get('color_basis'))) 2931 replace_dict['ncolor'] = ncolor 2932 2933 # Extract color data lines 2934 color_data_lines = self.get_color_data_lines(matrix_element) 2935 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2936 2937 # Extract helas calls 2938 helas_calls = fortran_model.get_matrix_element_calls(\ 2939 matrix_element) 2940 2941 replace_dict['helas_calls'] = "\n".join(helas_calls) 2942 2943 # Extract JAMP lines 2944 jamp_lines = self.get_JAMP_lines(matrix_element) 2945 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2946 2947 replace_dict['template_file'] = os.path.join(_file_path, \ 2948 'iolibs/template_files/%s' % self.matrix_file) 2949 replace_dict['template_file2'] = '' 2950 2951 if writer: 2952 file = open(replace_dict['template_file']).read() 2953 file = file % replace_dict 2954 # Write the file 2955 writer.writelines(file) 2956 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 2957 else: 2958 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
2959 2960 #=========================================================================== 2961 # write_source_makefile 2962 #===========================================================================
2963 - def write_source_makefile(self, writer):
2964 """Write the nexternal.inc file for madweight""" 2965 2966 2967 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2968 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2969 text = open(path).read() % {'libraries': set_of_lib} 2970 writer.write(text) 2971 2972 return True
2973
2974 - def write_phasespace_file(self, writer, nb_diag):
2975 """ """ 2976 2977 template = """ include 'maxparticles.inc' 2978 integer max_branches 2979 parameter (max_branches=max_particles-1) 2980 integer max_configs 2981 parameter (max_configs=%(nb_diag)s) 2982 2983 c channel position 2984 integer config_pos,perm_pos 2985 common /to_config/config_pos,perm_pos 2986 2987 """ 2988 2989 writer.write(template % {'nb_diag': nb_diag})
2990 2991 2992 #=========================================================================== 2993 # write_auto_dsig_file 2994 #===========================================================================
2995 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2996 """Write the auto_dsig.f file for the differential cross section 2997 calculation, includes pdf call information (MadWeight format)""" 2998 2999 if not matrix_element.get('processes') or \ 3000 not matrix_element.get('diagrams'): 3001 return 0 3002 3003 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3004 3005 if ninitial < 1 or ninitial > 2: 3006 raise writers.FortranWriter.FortranWriterError, \ 3007 """Need ninitial = 1 or 2 to write auto_dsig file""" 3008 3009 replace_dict = {} 3010 3011 # Extract version number and date from VERSION file 3012 info_lines = self.get_mg5_info_lines() 3013 replace_dict['info_lines'] = info_lines 3014 3015 # Extract process info lines 3016 process_lines = self.get_process_info_lines(matrix_element) 3017 replace_dict['process_lines'] = process_lines 3018 3019 # Set proc_id 3020 replace_dict['proc_id'] = proc_id 3021 replace_dict['numproc'] = 1 3022 3023 # Set dsig_line 3024 if ninitial == 1: 3025 # No conversion, since result of decay should be given in GeV 3026 dsig_line = "pd(0)*dsiguu" 3027 else: 3028 # Convert result (in GeV) to pb 3029 dsig_line = "pd(0)*conv*dsiguu" 3030 3031 replace_dict['dsig_line'] = dsig_line 3032 3033 # Extract pdf lines 3034 pdf_vars, pdf_data, pdf_lines = \ 3035 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3036 replace_dict['pdf_vars'] = pdf_vars 3037 replace_dict['pdf_data'] = pdf_data 3038 replace_dict['pdf_lines'] = pdf_lines 3039 3040 # Lines that differ between subprocess group and regular 3041 if proc_id: 3042 replace_dict['numproc'] = int(proc_id) 3043 replace_dict['passcuts_begin'] = "" 3044 replace_dict['passcuts_end'] = "" 3045 # Set lines for subprocess group version 3046 # Set define_iconfigs_lines 3047 replace_dict['define_subdiag_lines'] = \ 3048 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3049 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3050 else: 3051 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3052 replace_dict['passcuts_end'] = "ENDIF" 3053 replace_dict['define_subdiag_lines'] = "" 3054 3055 if writer: 3056 file = open(os.path.join(_file_path, \ 3057 'iolibs/template_files/auto_dsig_mw.inc')).read() 3058 3059 file = file % replace_dict 3060 # Write the file 3061 writer.writelines(file) 3062 else: 3063 return replace_dict
3064 #=========================================================================== 3065 # write_configs_file 3066 #===========================================================================
3067 - def write_configs_file(self, writer, matrix_element):
3068 """Write the configs.inc file for MadEvent""" 3069 3070 # Extract number of external particles 3071 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3072 3073 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3074 mapconfigs = [c[0] for c in configs] 3075 model = matrix_element.get('processes')[0].get('model') 3076 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3077 [[c[1]] for c in configs], 3078 mapconfigs, 3079 nexternal, ninitial,matrix_element, model)
3080 3081 #=========================================================================== 3082 # write_run_configs_file 3083 #===========================================================================
3084 - def write_run_config_file(self, writer):
3085 """Write the run_configs.inc file for MadWeight""" 3086 3087 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3088 text = open(path).read() % {'chanperjob':'5'} 3089 writer.write(text) 3090 return True
3091 3092 #=========================================================================== 3093 # write_configs_file_from_diagrams 3094 #===========================================================================
3095 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3096 nexternal, ninitial, matrix_element, model):
3097 """Write the actual configs.inc file. 3098 3099 configs is the diagrams corresponding to configs (each 3100 diagrams is a list of corresponding diagrams for all 3101 subprocesses, with None if there is no corresponding diagrams 3102 for a given process). 3103 mapconfigs gives the diagram number for each config. 3104 3105 For s-channels, we need to output one PDG for each subprocess in 3106 the subprocess group, in order to be able to pick the right 3107 one for multiprocesses.""" 3108 3109 lines = [] 3110 3111 particle_dict = matrix_element.get('processes')[0].get('model').\ 3112 get('particle_dict') 3113 3114 s_and_t_channels = [] 3115 3116 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3117 for config in configs if [d for d in config if d][0].\ 3118 get_vertex_leg_numbers()!=[]] 3119 3120 minvert = min(vert_list) if vert_list!=[] else 0 3121 # Number of subprocesses 3122 nsubprocs = len(configs[0]) 3123 3124 nconfigs = 0 3125 3126 new_pdg = model.get_first_non_pdg() 3127 3128 for iconfig, helas_diags in enumerate(configs): 3129 if any([vert > minvert for vert in 3130 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3131 # Only 3-vertices allowed in configs.inc 3132 continue 3133 nconfigs += 1 3134 3135 # Need s- and t-channels for all subprocesses, including 3136 # those that don't contribute to this config 3137 empty_verts = [] 3138 stchannels = [] 3139 for h in helas_diags: 3140 if h: 3141 # get_s_and_t_channels gives vertices starting from 3142 # final state external particles and working inwards 3143 stchannels.append(h.get('amplitudes')[0].\ 3144 get_s_and_t_channels(ninitial,model,new_pdg)) 3145 else: 3146 stchannels.append((empty_verts, None)) 3147 3148 # For t-channels, just need the first non-empty one 3149 tchannels = [t for s,t in stchannels if t != None][0] 3150 3151 # For s_and_t_channels (to be used later) use only first config 3152 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3153 tchannels]) 3154 3155 # Make sure empty_verts is same length as real vertices 3156 if any([s for s,t in stchannels]): 3157 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3158 3159 # Reorganize s-channel vertices to get a list of all 3160 # subprocesses for each vertex 3161 schannels = zip(*[s for s,t in stchannels]) 3162 else: 3163 schannels = [] 3164 3165 allchannels = schannels 3166 if len(tchannels) > 1: 3167 # Write out tchannels only if there are any non-trivial ones 3168 allchannels = schannels + tchannels 3169 3170 # Write out propagators for s-channel and t-channel vertices 3171 3172 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3173 # Correspondance between the config and the diagram = amp2 3174 lines.append("* %d %d " % (nconfigs, 3175 mapconfigs[iconfig])) 3176 3177 for verts in allchannels: 3178 if verts in schannels: 3179 vert = [v for v in verts if v][0] 3180 else: 3181 vert = verts 3182 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3183 last_leg = vert.get('legs')[-1] 3184 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3185 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3186 # (last_leg.get('number'), nconfigs, len(daughters), 3187 # ",".join([str(d) for d in daughters]))) 3188 3189 if last_leg.get('id') == 21 and 21 not in particle_dict: 3190 # Fake propagator used in multiparticle vertices 3191 mass = 'zero' 3192 width = 'zero' 3193 pow_part = 0 3194 else: 3195 if (last_leg.get('id')!=7): 3196 particle = particle_dict[last_leg.get('id')] 3197 # Get mass 3198 mass = particle.get('mass') 3199 # Get width 3200 width = particle.get('width') 3201 else : # fake propagator used in multiparticle vertices 3202 mass= 'zero' 3203 width= 'zero' 3204 3205 line=line+" "+mass+" "+width+" " 3206 3207 if verts in schannels: 3208 pdgs = [] 3209 for v in verts: 3210 if v: 3211 pdgs.append(v.get('legs')[-1].get('id')) 3212 else: 3213 pdgs.append(0) 3214 lines.append(line+" S "+str(last_leg.get('id'))) 3215 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3216 # (last_leg.get('number'), nconfigs, nsubprocs, 3217 # ",".join([str(d) for d in pdgs]))) 3218 # lines.append("data tprid(%d,%d)/0/" % \ 3219 # (last_leg.get('number'), nconfigs)) 3220 elif verts in tchannels[:-1]: 3221 lines.append(line+" T "+str(last_leg.get('id'))) 3222 # lines.append("data tprid(%d,%d)/%d/" % \ 3223 # (last_leg.get('number'), nconfigs, 3224 # abs(last_leg.get('id')))) 3225 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3226 # (last_leg.get('number'), nconfigs, nsubprocs, 3227 # ",".join(['0'] * nsubprocs))) 3228 3229 # Write out number of configs 3230 # lines.append("# Number of configs") 3231 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3232 lines.append(" * ") # a line with just a star indicates this is the end of file 3233 # Write the file 3234 writer.writelines(lines) 3235 3236 return s_and_t_channels
3237
3238 3239 #=============================================================================== 3240 # ProcessExporterFortranME 3241 #=============================================================================== 3242 -class ProcessExporterFortranME(ProcessExporterFortran):
3243 """Class to take care of exporting a set of matrix elements to 3244 MadEvent format.""" 3245 3246 matrix_file = "matrix_madevent_v4.inc" 3247
3248 - def copy_template(self, model):
3249 """Additional actions needed for setup of Template 3250 """ 3251 3252 super(ProcessExporterFortranME, self).copy_template(model) 3253 3254 # File created from Template (Different in some child class) 3255 filename = pjoin(self.dir_path,'Source','run_config.inc') 3256 self.write_run_config_file(writers.FortranWriter(filename)) 3257 3258 # The next file are model dependant (due to SLAH convention) 3259 self.model_name = model.get('name') 3260 # Add the symmetry.f 3261 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3262 self.write_symmetry(writers.FortranWriter(filename)) 3263 # 3264 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3265 self.write_addmothers(writers.FortranWriter(filename)) 3266 # Copy the different python file in the Template 3267 self.copy_python_file()
3268 3269 3270 3271 3272 3273 #=========================================================================== 3274 # generate_subprocess_directory 3275 #===========================================================================
3276 - def copy_python_file(self):
3277 """copy the python file require for the Template""" 3278 3279 # madevent interface 3280 cp(_file_path+'/interface/madevent_interface.py', 3281 self.dir_path+'/bin/internal/madevent_interface.py') 3282 cp(_file_path+'/interface/extended_cmd.py', 3283 self.dir_path+'/bin/internal/extended_cmd.py') 3284 cp(_file_path+'/interface/common_run_interface.py', 3285 self.dir_path+'/bin/internal/common_run_interface.py') 3286 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3287 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3288 cp(_file_path+'/iolibs/save_load_object.py', 3289 self.dir_path+'/bin/internal/save_load_object.py') 3290 cp(_file_path+'/iolibs/file_writers.py', 3291 self.dir_path+'/bin/internal/file_writers.py') 3292 #model file 3293 cp(_file_path+'../models/check_param_card.py', 3294 self.dir_path+'/bin/internal/check_param_card.py') 3295 3296 #copy all the file present in madevent directory 3297 for name in os.listdir(pjoin(_file_path, 'madevent')): 3298 if name not in ['__init__.py'] and name.endswith('.py'): 3299 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3300 3301 #madevent file 3302 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3303 cp(_file_path+'/various/lhe_parser.py', 3304 self.dir_path+'/bin/internal/lhe_parser.py') 3305 cp(_file_path+'/various/banner.py', 3306 self.dir_path+'/bin/internal/banner.py') 3307 cp(_file_path+'/various/histograms.py', 3308 self.dir_path+'/bin/internal/histograms.py') 3309 cp(_file_path+'/various/plot_djrs.py', 3310 self.dir_path+'/bin/internal/plot_djrs.py') 3311 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3312 3313 cp(_file_path+'/various/cluster.py', 3314 self.dir_path+'/bin/internal/cluster.py') 3315 cp(_file_path+'/madevent/combine_runs.py', 3316 self.dir_path+'/bin/internal/combine_runs.py') 3317 # logging configuration 3318 cp(_file_path+'/interface/.mg5_logging.conf', 3319 self.dir_path+'/bin/internal/me5_logging.conf') 3320 cp(_file_path+'/interface/coloring_logging.py', 3321 self.dir_path+'/bin/internal/coloring_logging.py') 3322 # shower card and FO_analyse_card. 3323 # Although not needed, it is imported by banner.py 3324 cp(_file_path+'/various/shower_card.py', 3325 self.dir_path+'/bin/internal/shower_card.py') 3326 cp(_file_path+'/various/FO_analyse_card.py', 3327 self.dir_path+'/bin/internal/FO_analyse_card.py')
3328 3329
3330 - def convert_model(self, model, wanted_lorentz = [], 3331 wanted_couplings = []):
3332 3333 super(ProcessExporterFortranME,self).convert_model(model, 3334 wanted_lorentz, wanted_couplings) 3335 3336 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3337 try: 3338 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3339 except OSError as error: 3340 pass 3341 model_path = model.get('modelpath') 3342 # This is not safe if there is a '##' or '-' in the path. 3343 shutil.copytree(model_path, 3344 pjoin(self.dir_path,'bin','internal','ufomodel'), 3345 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3346 if hasattr(model, 'restrict_card'): 3347 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3348 'restrict_default.dat') 3349 if isinstance(model.restrict_card, check_param_card.ParamCard): 3350 model.restrict_card.write(out_path) 3351 else: 3352 files.cp(model.restrict_card, out_path)
3353 3354 #=========================================================================== 3355 # export model files 3356 #===========================================================================
3357 - def export_model_files(self, model_path):
3358 """export the model dependent files""" 3359 3360 super(ProcessExporterFortranME,self).export_model_files(model_path) 3361 3362 # Add the routine update_as_param in v4 model 3363 # This is a function created in the UFO 3364 text=""" 3365 subroutine update_as_param() 3366 call setpara('param_card.dat',.false.) 3367 return 3368 end 3369 """ 3370 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3371 ff.write(text) 3372 ff.close() 3373 3374 # Add the symmetry.f 3375 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3376 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3377 3378 # Modify setrun.f 3379 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3380 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3381 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3382 fsock.write(text) 3383 fsock.close() 3384 3385 self.make_model_symbolic_link()
3386 3387 #=========================================================================== 3388 # generate_subprocess_directory 3389 #===========================================================================
3390 - def generate_subprocess_directory(self, matrix_element, 3391 fortran_model, 3392 me_number):
3393 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3394 including the necessary matrix.f and various helper files""" 3395 3396 cwd = os.getcwd() 3397 path = pjoin(self.dir_path, 'SubProcesses') 3398 3399 3400 if not self.model: 3401 self.model = matrix_element.get('processes')[0].get('model') 3402 3403 3404 3405 #os.chdir(path) 3406 # Create the directory PN_xx_xxxxx in the specified path 3407 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3408 try: 3409 os.mkdir(pjoin(path,subprocdir)) 3410 except os.error as error: 3411 logger.warning(error.strerror + " " + subprocdir) 3412 3413 #try: 3414 # os.chdir(subprocdir) 3415 #except os.error: 3416 # logger.error('Could not cd to directory %s' % subprocdir) 3417 # return 0 3418 3419 logger.info('Creating files in directory %s' % subprocdir) 3420 Ppath = pjoin(path, subprocdir) 3421 3422 # Extract number of external particles 3423 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3424 3425 # Add the driver.f 3426 ncomb = matrix_element.get_helicity_combinations() 3427 filename = pjoin(Ppath,'driver.f') 3428 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3429 v5=self.opt['v5_model']) 3430 3431 # Create the matrix.f file, auto_dsig.f file and all inc files 3432 filename = pjoin(Ppath, 'matrix.f') 3433 calls, ncolor = \ 3434 self.write_matrix_element_v4(writers.FortranWriter(filename), 3435 matrix_element, fortran_model, subproc_number = me_number) 3436 3437 filename = pjoin(Ppath, 'auto_dsig.f') 3438 self.write_auto_dsig_file(writers.FortranWriter(filename), 3439 matrix_element) 3440 3441 filename = pjoin(Ppath, 'configs.inc') 3442 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3443 writers.FortranWriter(filename), 3444 matrix_element) 3445 3446 filename = pjoin(Ppath, 'config_nqcd.inc') 3447 self.write_config_nqcd_file(writers.FortranWriter(filename), 3448 nqcd_list) 3449 3450 filename = pjoin(Ppath, 'config_subproc_map.inc') 3451 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3452 s_and_t_channels) 3453 3454 filename = pjoin(Ppath, 'coloramps.inc') 3455 self.write_coloramps_file(writers.FortranWriter(filename), 3456 mapconfigs, 3457 matrix_element) 3458 3459 filename = pjoin(Ppath, 'get_color.f') 3460 self.write_colors_file(writers.FortranWriter(filename), 3461 matrix_element) 3462 3463 filename = pjoin(Ppath, 'decayBW.inc') 3464 self.write_decayBW_file(writers.FortranWriter(filename), 3465 s_and_t_channels) 3466 3467 filename = pjoin(Ppath, 'dname.mg') 3468 self.write_dname_file(writers.FileWriter(filename), 3469 "P"+matrix_element.get('processes')[0].shell_string()) 3470 3471 filename = pjoin(Ppath, 'iproc.dat') 3472 self.write_iproc_file(writers.FortranWriter(filename), 3473 me_number) 3474 3475 filename = pjoin(Ppath, 'leshouche.inc') 3476 self.write_leshouche_file(writers.FortranWriter(filename), 3477 matrix_element) 3478 3479 filename = pjoin(Ppath, 'maxamps.inc') 3480 self.write_maxamps_file(writers.FortranWriter(filename), 3481 len(matrix_element.get('diagrams')), 3482 ncolor, 3483 len(matrix_element.get('processes')), 3484 1) 3485 3486 filename = pjoin(Ppath, 'mg.sym') 3487 self.write_mg_sym_file(writers.FortranWriter(filename), 3488 matrix_element) 3489 3490 filename = pjoin(Ppath, 'ncombs.inc') 3491 self.write_ncombs_file(writers.FortranWriter(filename), 3492 nexternal) 3493 3494 filename = pjoin(Ppath, 'nexternal.inc') 3495 self.write_nexternal_file(writers.FortranWriter(filename), 3496 nexternal, ninitial) 3497 3498 filename = pjoin(Ppath, 'ngraphs.inc') 3499 self.write_ngraphs_file(writers.FortranWriter(filename), 3500 len(mapconfigs)) 3501 3502 3503 filename = pjoin(Ppath, 'pmass.inc') 3504 self.write_pmass_file(writers.FortranWriter(filename), 3505 matrix_element) 3506 3507 filename = pjoin(Ppath, 'props.inc') 3508 self.write_props_file(writers.FortranWriter(filename), 3509 matrix_element, 3510 s_and_t_channels) 3511 3512 # Find config symmetries and permutations 3513 symmetry, perms, ident_perms = \ 3514 diagram_symmetry.find_symmetry(matrix_element) 3515 3516 filename = pjoin(Ppath, 'symswap.inc') 3517 self.write_symswap_file(writers.FortranWriter(filename), 3518 ident_perms) 3519 3520 filename = pjoin(Ppath, 'symfact_orig.dat') 3521 self.write_symfact_file(open(filename, 'w'), symmetry) 3522 3523 # Generate diagrams 3524 filename = pjoin(Ppath, "matrix.ps") 3525 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3526 get('diagrams'), 3527 filename, 3528 model=matrix_element.get('processes')[0].\ 3529 get('model'), 3530 amplitude=True) 3531 logger.info("Generating Feynman diagrams for " + \ 3532 matrix_element.get('processes')[0].nice_string()) 3533 plot.draw() 3534 3535 self.link_files_in_SubProcess(Ppath) 3536 3537 #import nexternal/leshouche in Source 3538 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3539 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3540 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3541 # Return to SubProcesses dir 3542 #os.chdir(os.path.pardir) 3543 3544 # Add subprocess to subproc.mg 3545 filename = pjoin(path, 'subproc.mg') 3546 files.append_to_file(filename, 3547 self.write_subproc, 3548 subprocdir) 3549 3550 # Return to original dir 3551 #os.chdir(cwd) 3552 3553 # Generate info page 3554 gen_infohtml.make_info_html(self.dir_path) 3555 3556 3557 if not calls: 3558 calls = 0 3559 return calls
3560 3597 3598
3599 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3600 """Finalize ME v4 directory by creating jpeg diagrams, html 3601 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3602 3603 if 'nojpeg' in flaglist: 3604 makejpg = False 3605 else: 3606 makejpg = True 3607 if 'online' in flaglist: 3608 online = True 3609 else: 3610 online = False 3611 3612 compiler = {'fortran': mg5options['fortran_compiler'], 3613 'cpp': mg5options['cpp_compiler'], 3614 'f2py': mg5options['f2py_compiler']} 3615 3616 # indicate that the output type is not grouped 3617 if not isinstance(self, ProcessExporterFortranMEGroup): 3618 self.proc_characteristic['grouped_matrix'] = False 3619 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3620 3621 modelname = self.opt['model'] 3622 if modelname == 'mssm' or modelname.startswith('mssm-'): 3623 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3624 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3625 check_param_card.convert_to_mg5card(param_card, mg5_param) 3626 check_param_card.check_valid_param_card(mg5_param) 3627 3628 # Add the combine_events.f modify param_card path/number of @X 3629 filename = pjoin(self.dir_path,'Source','combine_events.f') 3630 try: 3631 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3632 except AttributeError: 3633 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3634 nb_proc = len(set(nb_proc)) 3635 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3636 # Write maxconfigs.inc based on max of ME's/subprocess groups 3637 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3638 self.write_maxconfigs_file(writers.FortranWriter(filename), 3639 matrix_elements) 3640 3641 # Write maxparticles.inc based on max of ME's/subprocess groups 3642 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3643 self.write_maxparticles_file(writers.FortranWriter(filename), 3644 matrix_elements) 3645 3646 # Touch "done" file 3647 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3648 3649 # Check for compiler 3650 self.set_compiler(compiler) 3651 self.set_cpp_compiler(compiler['cpp']) 3652 3653 3654 old_pos = os.getcwd() 3655 subpath = pjoin(self.dir_path, 'SubProcesses') 3656 3657 P_dir_list = [proc for proc in os.listdir(subpath) 3658 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3659 3660 devnull = os.open(os.devnull, os.O_RDWR) 3661 # Convert the poscript in jpg files (if authorize) 3662 if makejpg: 3663 try: 3664 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3665 except Exception, error: 3666 pass 3667 3668 if misc.which('gs'): 3669 logger.info("Generate jpeg diagrams") 3670 for Pdir in P_dir_list: 3671 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3672 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3673 3674 logger.info("Generate web pages") 3675 # Create the WebPage using perl script 3676 3677 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3678 stdout = devnull,cwd=pjoin(self.dir_path)) 3679 3680 #os.chdir(os.path.pardir) 3681 3682 obj = gen_infohtml.make_info_html(self.dir_path) 3683 3684 if online: 3685 nb_channel = obj.rep_rule['nb_gen_diag'] 3686 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3687 #add the information to proc_charac 3688 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3689 3690 # Write command history as proc_card_mg5 3691 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3692 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3693 history.write(output_file) 3694 3695 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3696 stdout = devnull) 3697 3698 #crate the proc_characteristic file 3699 self.create_proc_charac(matrix_elements, history) 3700 3701 # create the run_card 3702 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3703 3704 # Run "make" to generate madevent.tar.gz file 3705 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3706 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3707 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3708 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3709 stdout = devnull, cwd=self.dir_path) 3710 3711 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3712 stdout = devnull, cwd=self.dir_path)
3713 3714 3715 3716 3717 3718 3719 #return to the initial dir 3720 #os.chdir(old_pos) 3721 3722 #=========================================================================== 3723 # write_matrix_element_v4 3724 #===========================================================================
3725 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3726 proc_id = "", config_map = [], subproc_number = ""):
3727 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3728 3729 if not matrix_element.get('processes') or \ 3730 not matrix_element.get('diagrams'): 3731 return 0 3732 3733 if writer: 3734 if not isinstance(writer, writers.FortranWriter): 3735 raise writers.FortranWriter.FortranWriterError(\ 3736 "writer not FortranWriter") 3737 # Set lowercase/uppercase Fortran code 3738 writers.FortranWriter.downcase = False 3739 3740 # The proc prefix is not used for MadEvent output so it can safely be set 3741 # to an empty string. 3742 replace_dict = {'proc_prefix':''} 3743 3744 # Extract helas calls 3745 helas_calls = fortran_model.get_matrix_element_calls(\ 3746 matrix_element) 3747 3748 replace_dict['helas_calls'] = "\n".join(helas_calls) 3749 3750 3751 # Extract version number and date from VERSION file 3752 info_lines = self.get_mg5_info_lines() 3753 replace_dict['info_lines'] = info_lines 3754 3755 # Extract process info lines 3756 process_lines = self.get_process_info_lines(matrix_element) 3757 replace_dict['process_lines'] = process_lines 3758 3759 # Set proc_id 3760 replace_dict['proc_id'] = proc_id 3761 3762 # Extract ncomb 3763 ncomb = matrix_element.get_helicity_combinations() 3764 replace_dict['ncomb'] = ncomb 3765 3766 # Extract helicity lines 3767 helicity_lines = self.get_helicity_lines(matrix_element) 3768 replace_dict['helicity_lines'] = helicity_lines 3769 3770 # Extract IC line 3771 ic_line = self.get_ic_line(matrix_element) 3772 replace_dict['ic_line'] = ic_line 3773 3774 # Extract overall denominator 3775 # Averaging initial state color, spin, and identical FS particles 3776 den_factor_line = self.get_den_factor_line(matrix_element) 3777 replace_dict['den_factor_line'] = den_factor_line 3778 3779 # Extract ngraphs 3780 ngraphs = matrix_element.get_number_of_amplitudes() 3781 replace_dict['ngraphs'] = ngraphs 3782 3783 # Extract ndiags 3784 ndiags = len(matrix_element.get('diagrams')) 3785 replace_dict['ndiags'] = ndiags 3786 3787 # Set define_iconfigs_lines 3788 replace_dict['define_iconfigs_lines'] = \ 3789 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3790 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3791 3792 if proc_id: 3793 # Set lines for subprocess group version 3794 # Set define_iconfigs_lines 3795 replace_dict['define_iconfigs_lines'] += \ 3796 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3797 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3798 # Set set_amp2_line 3799 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3800 proc_id 3801 else: 3802 # Standard running 3803 # Set set_amp2_line 3804 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3805 3806 # Extract nwavefuncs 3807 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3808 replace_dict['nwavefuncs'] = nwavefuncs 3809 3810 # Extract ncolor 3811 ncolor = max(1, len(matrix_element.get('color_basis'))) 3812 replace_dict['ncolor'] = ncolor 3813 3814 # Extract color data lines 3815 color_data_lines = self.get_color_data_lines(matrix_element) 3816 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3817 3818 3819 # Set the size of Wavefunction 3820 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3821 replace_dict['wavefunctionsize'] = 18 3822 else: 3823 replace_dict['wavefunctionsize'] = 6 3824 3825 # Extract amp2 lines 3826 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3827 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3828 3829 # The JAMP definition depends on the splitting order 3830 split_orders=matrix_element.get('processes')[0].get('split_orders') 3831 if len(split_orders)>0: 3832 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3833 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3834 matrix_element.get('processes')[0],squared_orders) 3835 else: 3836 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3837 # set all amplitude order to weight 1 and only one squared order 3838 # contribution which is of course ALL_ORDERS=2. 3839 squared_orders = [(2,),] 3840 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3841 replace_dict['chosen_so_configs'] = '.TRUE.' 3842 3843 replace_dict['nAmpSplitOrders']=len(amp_orders) 3844 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3845 replace_dict['split_order_str_list']=str(split_orders) 3846 replace_dict['nSplitOrders']=max(len(split_orders),1) 3847 amp_so = self.get_split_orders_lines( 3848 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3849 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3850 replace_dict['ampsplitorders']='\n'.join(amp_so) 3851 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3852 3853 3854 # Extract JAMP lines 3855 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3856 jamp_lines = self.get_JAMP_lines_split_order(\ 3857 matrix_element,amp_orders,split_order_names= 3858 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3859 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3860 3861 replace_dict['template_file'] = pjoin(_file_path, \ 3862 'iolibs/template_files/%s' % self.matrix_file) 3863 replace_dict['template_file2'] = pjoin(_file_path, \ 3864 'iolibs/template_files/split_orders_helping_functions.inc') 3865 if writer: 3866 file = open(replace_dict['template_file']).read() 3867 file = file % replace_dict 3868 # Add the split orders helper functions. 3869 file = file + '\n' + open(replace_dict['template_file2'])\ 3870 .read()%replace_dict 3871 # Write the file 3872 writer.writelines(file) 3873 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 3874 else: 3875 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 3876 return replace_dict
3877 3878 #=========================================================================== 3879 # write_auto_dsig_file 3880 #===========================================================================
3881 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3882 """Write the auto_dsig.f file for the differential cross section 3883 calculation, includes pdf call information""" 3884 3885 if not matrix_element.get('processes') or \ 3886 not matrix_element.get('diagrams'): 3887 return 0 3888 3889 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3890 self.proc_characteristic['ninitial'] = ninitial 3891 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3892 3893 # Add information relevant for MLM matching: 3894 # Maximum QCD power in all the contributions 3895 max_qcd_order = 0 3896 for diag in matrix_element.get('diagrams'): 3897 orders = diag.calculate_orders() 3898 if 'QCD' in orders: 3899 max_qcd_order = max(max_qcd_order,orders['QCD']) 3900 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 3901 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 3902 proc.get('model').get_particle(id).get('color')>1]) 3903 for proc in matrix_element.get('processes')) 3904 # Maximum number of final state light jets to be matched 3905 self.proc_characteristic['max_n_matched_jets'] = max( 3906 self.proc_characteristic['max_n_matched_jets'], 3907 min(max_qcd_order,max_n_light_final_partons)) 3908 3909 # List of default pdgs to be considered for the CKKWl merging cut 3910 self.proc_characteristic['colored_pdgs'] = \ 3911 sorted(list(set([abs(p.get('pdg_code')) for p in 3912 matrix_element.get('processes')[0].get('model').get('particles') if 3913 p.get('color')>1]))) 3914 3915 if ninitial < 1 or ninitial > 2: 3916 raise writers.FortranWriter.FortranWriterError, \ 3917 """Need ninitial = 1 or 2 to write auto_dsig file""" 3918 3919 replace_dict = {} 3920 3921 # Extract version number and date from VERSION file 3922 info_lines = self.get_mg5_info_lines() 3923 replace_dict['info_lines'] = info_lines 3924 3925 # Extract process info lines 3926 process_lines = self.get_process_info_lines(matrix_element) 3927 replace_dict['process_lines'] = process_lines 3928 3929 # Set proc_id 3930 replace_dict['proc_id'] = proc_id 3931 replace_dict['numproc'] = 1 3932 3933 # Set dsig_line 3934 if ninitial == 1: 3935 # No conversion, since result of decay should be given in GeV 3936 dsig_line = "pd(0)*dsiguu" 3937 else: 3938 # Convert result (in GeV) to pb 3939 dsig_line = "pd(0)*conv*dsiguu" 3940 3941 replace_dict['dsig_line'] = dsig_line 3942 3943 # Extract pdf lines 3944 pdf_vars, pdf_data, pdf_lines = \ 3945 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3946 replace_dict['pdf_vars'] = pdf_vars 3947 replace_dict['pdf_data'] = pdf_data 3948 replace_dict['pdf_lines'] = pdf_lines 3949 3950 # Lines that differ between subprocess group and regular 3951 if proc_id: 3952 replace_dict['numproc'] = int(proc_id) 3953 replace_dict['passcuts_begin'] = "" 3954 replace_dict['passcuts_end'] = "" 3955 # Set lines for subprocess group version 3956 # Set define_iconfigs_lines 3957 replace_dict['define_subdiag_lines'] = \ 3958 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3959 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3960 replace_dict['cutsdone'] = "" 3961 else: 3962 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3963 replace_dict['passcuts_end'] = "ENDIF" 3964 replace_dict['define_subdiag_lines'] = "" 3965 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3966 3967 if not isinstance(self, ProcessExporterFortranMEGroup): 3968 ncomb=matrix_element.get_helicity_combinations() 3969 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3970 else: 3971 replace_dict['read_write_good_hel'] = "" 3972 3973 context = {'read_write_good_hel':True} 3974 3975 if writer: 3976 file = open(pjoin(_file_path, \ 3977 'iolibs/template_files/auto_dsig_v4.inc')).read() 3978 file = file % replace_dict 3979 3980 # Write the file 3981 writer.writelines(file, context=context) 3982 else: 3983 return replace_dict, context
3984 #=========================================================================== 3985 # write_coloramps_file 3986 #===========================================================================
3987 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3988 """Write the coloramps.inc file for MadEvent""" 3989 3990 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3991 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3992 (max(len(matrix_element.get('color_basis').keys()), 1), 3993 len(mapconfigs))) 3994 3995 3996 # Write the file 3997 writer.writelines(lines) 3998 3999 return True
4000 4001 #=========================================================================== 4002 # write_colors_file 4003 #===========================================================================
4004 - def write_colors_file(self, writer, matrix_elements):
4005 """Write the get_color.f file for MadEvent, which returns color 4006 for all particles used in the matrix element.""" 4007 4008 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4009 matrix_elements = [matrix_elements] 4010 4011 model = matrix_elements[0].get('processes')[0].get('model') 4012 4013 # We need the both particle and antiparticle wf_ids, since the identity 4014 # depends on the direction of the wf. 4015 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4016 for wf in d.get('wavefunctions')],[]) \ 4017 for d in me.get('diagrams')], []) \ 4018 for me in matrix_elements], [])) 4019 4020 leg_ids = set(sum([sum([sum([[l.get('id'), 4021 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4022 for l in p.get_legs_with_decays()], []) \ 4023 for p in me.get('processes')], []) \ 4024 for me in matrix_elements], [])) 4025 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4026 4027 lines = """function get_color(ipdg) 4028 implicit none 4029 integer get_color, ipdg 4030 4031 if(ipdg.eq.%d)then 4032 get_color=%d 4033 return 4034 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4035 4036 for part_id in particle_ids[1:]: 4037 lines += """else if(ipdg.eq.%d)then 4038 get_color=%d 4039 return 4040 """ % (part_id, model.get_particle(part_id).get_color()) 4041 # Dummy particle for multiparticle vertices with pdg given by 4042 # first code not in the model 4043 lines += """else if(ipdg.eq.%d)then 4044 c This is dummy particle used in multiparticle vertices 4045 get_color=2 4046 return 4047 """ % model.get_first_non_pdg() 4048 lines += """else 4049 write(*,*)'Error: No color given for pdg ',ipdg 4050 get_color=0 4051 return 4052 endif 4053 end 4054 """ 4055 4056 # Write the file 4057 writer.writelines(lines) 4058 4059 return True
4060 4061 #=========================================================================== 4062 # write_config_nqcd_file 4063 #===========================================================================
4064 - def write_config_nqcd_file(self, writer, nqcd_list):
4065 """Write the config_nqcd.inc with the number of QCD couplings 4066 for each config""" 4067 4068 lines = [] 4069 for iconf, n in enumerate(nqcd_list): 4070 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4071 4072 # Write the file 4073 writer.writelines(lines) 4074 4075 return True
4076 4077 #=========================================================================== 4078 # write_maxconfigs_file 4079 #===========================================================================
4080 - def write_maxconfigs_file(self, writer, matrix_elements):
4081 """Write the maxconfigs.inc file for MadEvent""" 4082 4083 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4084 maxconfigs = max([me.get_num_configs() for me in \ 4085 matrix_elements.get('matrix_elements')]) 4086 else: 4087 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4088 4089 lines = "integer lmaxconfigs\n" 4090 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4091 4092 # Write the file 4093 writer.writelines(lines) 4094 4095 return True
4096 4097 #=========================================================================== 4098 # read_write_good_hel 4099 #===========================================================================
4100 - def read_write_good_hel(self, ncomb):
4101 """return the code to read/write the good_hel common_block""" 4102 4103 convert = {'ncomb' : ncomb} 4104 output = """ 4105 subroutine write_good_hel(stream_id) 4106 implicit none 4107 integer stream_id 4108 INTEGER NCOMB 4109 PARAMETER ( NCOMB=%(ncomb)d) 4110 LOGICAL GOODHEL(NCOMB) 4111 INTEGER NTRY 4112 common/BLOCK_GOODHEL/NTRY,GOODHEL 4113 write(stream_id,*) GOODHEL 4114 return 4115 end 4116 4117 4118 subroutine read_good_hel(stream_id) 4119 implicit none 4120 include 'genps.inc' 4121 integer stream_id 4122 INTEGER NCOMB 4123 PARAMETER ( NCOMB=%(ncomb)d) 4124 LOGICAL GOODHEL(NCOMB) 4125 INTEGER NTRY 4126 common/BLOCK_GOODHEL/NTRY,GOODHEL 4127 read(stream_id,*) GOODHEL 4128 NTRY = MAXTRIES + 1 4129 return 4130 end 4131 4132 subroutine init_good_hel() 4133 implicit none 4134 INTEGER NCOMB 4135 PARAMETER ( NCOMB=%(ncomb)d) 4136 LOGICAL GOODHEL(NCOMB) 4137 INTEGER NTRY 4138 INTEGER I 4139 4140 do i=1,NCOMB 4141 GOODHEL(I) = .false. 4142 enddo 4143 NTRY = 0 4144 end 4145 4146 integer function get_maxsproc() 4147 implicit none 4148 get_maxsproc = 1 4149 return 4150 end 4151 4152 """ % convert 4153 4154 return output
4155 4156 #=========================================================================== 4157 # write_config_subproc_map_file 4158 #===========================================================================
4159 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4160 """Write a dummy config_subproc.inc file for MadEvent""" 4161 4162 lines = [] 4163 4164 for iconfig in range(len(s_and_t_channels)): 4165 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4166 (iconfig + 1)) 4167 4168 # Write the file 4169 writer.writelines(lines) 4170 4171 return True
4172 4173 #=========================================================================== 4174 # write_configs_file 4175 #===========================================================================
4176 - def write_configs_file(self, writer, matrix_element):
4177 """Write the configs.inc file for MadEvent""" 4178 4179 # Extract number of external particles 4180 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4181 4182 model = matrix_element.get('processes')[0].get('model') 4183 configs = [(i+1, d) for (i, d) in \ 4184 enumerate(matrix_element.get('diagrams'))] 4185 mapconfigs = [c[0] for c in configs] 4186 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4187 [[c[1]] for c in configs], 4188 mapconfigs, 4189 nexternal, ninitial, 4190 model)
4191 4192 #=========================================================================== 4193 # write_run_configs_file 4194 #===========================================================================
4195 - def write_run_config_file(self, writer):
4196 """Write the run_configs.inc file for MadEvent""" 4197 4198 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4199 4200 if self.proc_characteristic['loop_induced']: 4201 job_per_chan = 1 4202 else: 4203 job_per_chan = 5 4204 4205 if writer: 4206 text = open(path).read() % {'chanperjob': job_per_chan} 4207 writer.write(text) 4208 return True 4209 else: 4210 return {'chanperjob': job_per_chan}
4211 4212 #=========================================================================== 4213 # write_configs_file_from_diagrams 4214 #===========================================================================
4215 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4216 nexternal, ninitial, model):
4217 """Write the actual configs.inc file. 4218 4219 configs is the diagrams corresponding to configs (each 4220 diagrams is a list of corresponding diagrams for all 4221 subprocesses, with None if there is no corresponding diagrams 4222 for a given process). 4223 mapconfigs gives the diagram number for each config. 4224 4225 For s-channels, we need to output one PDG for each subprocess in 4226 the subprocess group, in order to be able to pick the right 4227 one for multiprocesses.""" 4228 4229 lines = [] 4230 4231 s_and_t_channels = [] 4232 4233 nqcd_list = [] 4234 4235 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4236 for config in configs if [d for d in config if d][0].\ 4237 get_vertex_leg_numbers()!=[]] 4238 minvert = min(vert_list) if vert_list!=[] else 0 4239 4240 # Number of subprocesses 4241 nsubprocs = len(configs[0]) 4242 4243 nconfigs = 0 4244 4245 new_pdg = model.get_first_non_pdg() 4246 4247 for iconfig, helas_diags in enumerate(configs): 4248 if any([vert > minvert for vert in 4249 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4250 # Only 3-vertices allowed in configs.inc 4251 continue 4252 nconfigs += 1 4253 4254 # Need s- and t-channels for all subprocesses, including 4255 # those that don't contribute to this config 4256 empty_verts = [] 4257 stchannels = [] 4258 for h in helas_diags: 4259 if h: 4260 # get_s_and_t_channels gives vertices starting from 4261 # final state external particles and working inwards 4262 stchannels.append(h.get('amplitudes')[0].\ 4263 get_s_and_t_channels(ninitial, model, 4264 new_pdg)) 4265 else: 4266 stchannels.append((empty_verts, None)) 4267 4268 # For t-channels, just need the first non-empty one 4269 tchannels = [t for s,t in stchannels if t != None][0] 4270 4271 # For s_and_t_channels (to be used later) use only first config 4272 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4273 tchannels]) 4274 4275 # Make sure empty_verts is same length as real vertices 4276 if any([s for s,t in stchannels]): 4277 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4278 4279 # Reorganize s-channel vertices to get a list of all 4280 # subprocesses for each vertex 4281 schannels = zip(*[s for s,t in stchannels]) 4282 else: 4283 schannels = [] 4284 4285 allchannels = schannels 4286 if len(tchannels) > 1: 4287 # Write out tchannels only if there are any non-trivial ones 4288 allchannels = schannels + tchannels 4289 4290 # Write out propagators for s-channel and t-channel vertices 4291 4292 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4293 # Correspondance between the config and the diagram = amp2 4294 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4295 mapconfigs[iconfig])) 4296 # Number of QCD couplings in this diagram 4297 nqcd = 0 4298 for h in helas_diags: 4299 if h: 4300 try: 4301 nqcd = h.calculate_orders()['QCD'] 4302 except KeyError: 4303 pass 4304 break 4305 else: 4306 continue 4307 4308 nqcd_list.append(nqcd) 4309 4310 for verts in allchannels: 4311 if verts in schannels: 4312 vert = [v for v in verts if v][0] 4313 else: 4314 vert = verts 4315 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4316 last_leg = vert.get('legs')[-1] 4317 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4318 (last_leg.get('number'), nconfigs, len(daughters), 4319 ",".join([str(d) for d in daughters]))) 4320 if verts in schannels: 4321 pdgs = [] 4322 for v in verts: 4323 if v: 4324 pdgs.append(v.get('legs')[-1].get('id')) 4325 else: 4326 pdgs.append(0) 4327 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4328 (last_leg.get('number'), nconfigs, nsubprocs, 4329 ",".join([str(d) for d in pdgs]))) 4330 lines.append("data tprid(%d,%d)/0/" % \ 4331 (last_leg.get('number'), nconfigs)) 4332 elif verts in tchannels[:-1]: 4333 lines.append("data tprid(%d,%d)/%d/" % \ 4334 (last_leg.get('number'), nconfigs, 4335 abs(last_leg.get('id')))) 4336 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4337 (last_leg.get('number'), nconfigs, nsubprocs, 4338 ",".join(['0'] * nsubprocs))) 4339 4340 # Write out number of configs 4341 lines.append("# Number of configs") 4342 lines.append("data mapconfig(0)/%d/" % nconfigs) 4343 4344 # Write the file 4345 writer.writelines(lines) 4346 4347 return s_and_t_channels, nqcd_list
4348 4349 #=========================================================================== 4350 # write_decayBW_file 4351 #===========================================================================
4352 - def write_decayBW_file(self, writer, s_and_t_channels):
4353 """Write the decayBW.inc file for MadEvent""" 4354 4355 lines = [] 4356 4357 booldict = {None: "0", True: "1", False: "2"} 4358 4359 for iconf, config in enumerate(s_and_t_channels): 4360 schannels = config[0] 4361 for vertex in schannels: 4362 # For the resulting leg, pick out whether it comes from 4363 # decay or not, as given by the onshell flag 4364 leg = vertex.get('legs')[-1] 4365 lines.append("data gForceBW(%d,%d)/%s/" % \ 4366 (leg.get('number'), iconf + 1, 4367 booldict[leg.get('onshell')])) 4368 4369 # Write the file 4370 writer.writelines(lines) 4371 4372 return True
4373 4374 #=========================================================================== 4375 # write_dname_file 4376 #===========================================================================
4377 - def write_dname_file(self, writer, dir_name):
4378 """Write the dname.mg file for MG4""" 4379 4380 line = "DIRNAME=%s" % dir_name 4381 4382 # Write the file 4383 writer.write(line + "\n") 4384 4385 return True
4386 4387 #=========================================================================== 4388 # write_driver 4389 #===========================================================================
4390 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4391 """Write the SubProcess/driver.f file for MG4""" 4392 4393 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4394 4395 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4396 card = 'Source/MODEL/MG5_param.dat' 4397 else: 4398 card = 'param_card.dat' 4399 # Requiring each helicity configuration to be probed by 10 points for 4400 # matrix element before using the resulting grid for MC over helicity 4401 # sampling. 4402 # We multiply this by 2 because each grouped subprocess is called at most 4403 # twice for each IMIRROR. 4404 replace_dict = {'param_card_name':card, 4405 'ncomb':ncomb, 4406 'hel_init_points':n_grouped_proc*10*2} 4407 if not v5: 4408 replace_dict['secondparam']=',.true.' 4409 else: 4410 replace_dict['secondparam']='' 4411 4412 if writer: 4413 text = open(path).read() % replace_dict 4414 writer.write(text) 4415 return True 4416 else: 4417 return replace_dict
4418 4419 #=========================================================================== 4420 # write_addmothers 4421 #===========================================================================
4422 - def write_addmothers(self, writer):
4423 """Write the SubProcess/addmothers.f""" 4424 4425 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4426 4427 text = open(path).read() % {'iconfig': 'diag_number'} 4428 writer.write(text) 4429 4430 return True
4431 4432 4433 #=========================================================================== 4434 # write_combine_events 4435 #===========================================================================
4436 - def write_combine_events(self, writer, nb_proc=100):
4437 """Write the SubProcess/driver.f file for MG4""" 4438 4439 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4440 4441 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4442 card = 'Source/MODEL/MG5_param.dat' 4443 else: 4444 card = 'param_card.dat' 4445 4446 #set maxpup (number of @X in the process card) 4447 4448 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4449 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4450 writer.write(text) 4451 4452 return True
4453 4454 4455 #=========================================================================== 4456 # write_symmetry 4457 #===========================================================================
4458 - def write_symmetry(self, writer, v5=True):
4459 """Write the SubProcess/driver.f file for ME""" 4460 4461 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4462 4463 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4464 card = 'Source/MODEL/MG5_param.dat' 4465 else: 4466 card = 'param_card.dat' 4467 4468 if v5: 4469 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4470 else: 4471 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4472 4473 if writer: 4474 text = open(path).read() 4475 text = text % replace_dict 4476 writer.write(text) 4477 return True 4478 else: 4479 return replace_dict
4480 4481 4482 4483 #=========================================================================== 4484 # write_iproc_file 4485 #===========================================================================
4486 - def write_iproc_file(self, writer, me_number):
4487 """Write the iproc.dat file for MG4""" 4488 line = "%d" % (me_number + 1) 4489 4490 # Write the file 4491 for line_to_write in writer.write_line(line): 4492 writer.write(line_to_write) 4493 return True
4494 4495 #=========================================================================== 4496 # write_mg_sym_file 4497 #===========================================================================
4498 - def write_mg_sym_file(self, writer, matrix_element):
4499 """Write the mg.sym file for MadEvent.""" 4500 4501 lines = [] 4502 4503 # Extract process with all decays included 4504 final_legs = filter(lambda leg: leg.get('state') == True, 4505 matrix_element.get('processes')[0].get_legs_with_decays()) 4506 4507 ninitial = len(filter(lambda leg: leg.get('state') == False, 4508 matrix_element.get('processes')[0].get('legs'))) 4509 4510 identical_indices = {} 4511 4512 # Extract identical particle info 4513 for i, leg in enumerate(final_legs): 4514 if leg.get('id') in identical_indices: 4515 identical_indices[leg.get('id')].append(\ 4516 i + ninitial + 1) 4517 else: 4518 identical_indices[leg.get('id')] = [i + ninitial + 1] 4519 4520 # Remove keys which have only one particle 4521 for key in identical_indices.keys(): 4522 if len(identical_indices[key]) < 2: 4523 del identical_indices[key] 4524 4525 # Write mg.sym file 4526 lines.append(str(len(identical_indices.keys()))) 4527 for key in identical_indices.keys(): 4528 lines.append(str(len(identical_indices[key]))) 4529 for number in identical_indices[key]: 4530 lines.append(str(number)) 4531 4532 # Write the file 4533 writer.writelines(lines) 4534 4535 return True
4536 4537 #=========================================================================== 4538 # write_mg_sym_file 4539 #===========================================================================
4540 - def write_default_mg_sym_file(self, writer):
4541 """Write the mg.sym file for MadEvent.""" 4542 4543 lines = "0" 4544 4545 # Write the file 4546 writer.writelines(lines) 4547 4548 return True
4549 4550 #=========================================================================== 4551 # write_ncombs_file 4552 #===========================================================================
4553 - def write_ncombs_file(self, writer, nexternal):
4554 """Write the ncombs.inc file for MadEvent.""" 4555 4556 # ncomb (used for clustering) is 2^nexternal 4557 file = " integer n_max_cl\n" 4558 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4559 4560 # Write the file 4561 writer.writelines(file) 4562 4563 return True
4564 4565 #=========================================================================== 4566 # write_processes_file 4567 #===========================================================================
4568 - def write_processes_file(self, writer, subproc_group):
4569 """Write the processes.dat file with info about the subprocesses 4570 in this group.""" 4571 4572 lines = [] 4573 4574 for ime, me in \ 4575 enumerate(subproc_group.get('matrix_elements')): 4576 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4577 ",".join(p.base_string() for p in \ 4578 me.get('processes')))) 4579 if me.get('has_mirror_process'): 4580 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4581 for proc in mirror_procs: 4582 legs = copy.copy(proc.get('legs_with_decays')) 4583 legs.insert(0, legs.pop(1)) 4584 proc.set("legs_with_decays", legs) 4585 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4586 mirror_procs)) 4587 else: 4588 lines.append("mirror none") 4589 4590 # Write the file 4591 writer.write("\n".join(lines)) 4592 4593 return True
4594 4595 #=========================================================================== 4596 # write_symswap_file 4597 #===========================================================================
4598 - def write_symswap_file(self, writer, ident_perms):
4599 """Write the file symswap.inc for MG4 by comparing diagrams using 4600 the internal matrix element value functionality.""" 4601 4602 lines = [] 4603 4604 # Write out lines for symswap.inc file (used to permute the 4605 # external leg momenta 4606 for iperm, perm in enumerate(ident_perms): 4607 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4608 (iperm+1, ",".join([str(i+1) for i in perm]))) 4609 lines.append("data nsym/%d/" % len(ident_perms)) 4610 4611 # Write the file 4612 writer.writelines(lines) 4613 4614 return True
4615 4616 #=========================================================================== 4617 # write_symfact_file 4618 #===========================================================================
4619 - def write_symfact_file(self, writer, symmetry):
4620 """Write the files symfact.dat for MG4 by comparing diagrams using 4621 the internal matrix element value functionality.""" 4622 4623 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4624 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4625 # Write out lines for symswap.inc file (used to permute the 4626 # external leg momenta 4627 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4628 # Write the file 4629 writer.write('\n'.join(lines)) 4630 writer.write('\n') 4631 4632 return True
4633 4634 #=========================================================================== 4635 # write_symperms_file 4636 #===========================================================================
4637 - def write_symperms_file(self, writer, perms):
4638 """Write the symperms.inc file for subprocess group, used for 4639 symmetric configurations""" 4640 4641 lines = [] 4642 for iperm, perm in enumerate(perms): 4643 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4644 (iperm+1, ",".join([str(i+1) for i in perm]))) 4645 4646 # Write the file 4647 writer.writelines(lines) 4648 4649 return True
4650 4651 #=========================================================================== 4652 # write_subproc 4653 #===========================================================================
4654 - def write_subproc(self, writer, subprocdir):
4655 """Append this subprocess to the subproc.mg file for MG4""" 4656 4657 # Write line to file 4658 writer.write(subprocdir + "\n") 4659 4660 return True
4661
4662 #=============================================================================== 4663 # ProcessExporterFortranMEGroup 4664 #=============================================================================== 4665 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4666 """Class to take care of exporting a set of matrix elements to 4667 MadEvent subprocess group format.""" 4668 4669 matrix_file = "matrix_madevent_group_v4.inc" 4670 grouped_mode = 'madevent' 4671 #=========================================================================== 4672 # generate_subprocess_directory 4673 #===========================================================================
4674 - def generate_subprocess_directory(self, subproc_group, 4675 fortran_model, 4676 group_number):
4677 """Generate the Pn directory for a subprocess group in MadEvent, 4678 including the necessary matrix_N.f files, configs.inc and various 4679 other helper files.""" 4680 4681 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4682 "subproc_group object not SubProcessGroup" 4683 4684 if not self.model: 4685 self.model = subproc_group.get('matrix_elements')[0].\ 4686 get('processes')[0].get('model') 4687 4688 cwd = os.getcwd() 4689 path = pjoin(self.dir_path, 'SubProcesses') 4690 4691 os.chdir(path) 4692 pathdir = os.getcwd() 4693 4694 # Create the directory PN in the specified path 4695 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4696 subproc_group.get('name')) 4697 try: 4698 os.mkdir(subprocdir) 4699 except os.error as error: 4700 logger.warning(error.strerror + " " + subprocdir) 4701 4702 try: 4703 os.chdir(subprocdir) 4704 except os.error: 4705 logger.error('Could not cd to directory %s' % subprocdir) 4706 return 0 4707 4708 logger.info('Creating files in directory %s' % subprocdir) 4709 4710 # Create the matrix.f files, auto_dsig.f files and all inc files 4711 # for all subprocesses in the group 4712 4713 maxamps = 0 4714 maxflows = 0 4715 tot_calls = 0 4716 4717 matrix_elements = subproc_group.get('matrix_elements') 4718 4719 # Add the driver.f, all grouped ME's must share the same number of 4720 # helicity configuration 4721 ncomb = matrix_elements[0].get_helicity_combinations() 4722 for me in matrix_elements[1:]: 4723 if ncomb!=me.get_helicity_combinations(): 4724 raise MadGraph5Error, "All grouped processes must share the "+\ 4725 "same number of helicity configurations." 4726 4727 filename = 'driver.f' 4728 self.write_driver(writers.FortranWriter(filename),ncomb, 4729 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4730 4731 for ime, matrix_element in \ 4732 enumerate(matrix_elements): 4733 filename = 'matrix%d.f' % (ime+1) 4734 calls, ncolor = \ 4735 self.write_matrix_element_v4(writers.FortranWriter(filename), 4736 matrix_element, 4737 fortran_model, 4738 proc_id=str(ime+1), 4739 config_map=subproc_group.get('diagram_maps')[ime], 4740 subproc_number=group_number) 4741 4742 filename = 'auto_dsig%d.f' % (ime+1) 4743 self.write_auto_dsig_file(writers.FortranWriter(filename), 4744 matrix_element, 4745 str(ime+1)) 4746 4747 # Keep track of needed quantities 4748 tot_calls += int(calls) 4749 maxflows = max(maxflows, ncolor) 4750 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4751 4752 # Draw diagrams 4753 filename = "matrix%d.ps" % (ime+1) 4754 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4755 get('diagrams'), 4756 filename, 4757 model = \ 4758 matrix_element.get('processes')[0].\ 4759 get('model'), 4760 amplitude=True) 4761 logger.info("Generating Feynman diagrams for " + \ 4762 matrix_element.get('processes')[0].nice_string()) 4763 plot.draw() 4764 4765 # Extract number of external particles 4766 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4767 4768 # Generate a list of diagrams corresponding to each configuration 4769 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4770 # If a subprocess has no diagrams for this config, the number is 0 4771 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4772 4773 filename = 'auto_dsig.f' 4774 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4775 subproc_group) 4776 4777 filename = 'coloramps.inc' 4778 self.write_coloramps_file(writers.FortranWriter(filename), 4779 subproc_diagrams_for_config, 4780 maxflows, 4781 matrix_elements) 4782 4783 filename = 'get_color.f' 4784 self.write_colors_file(writers.FortranWriter(filename), 4785 matrix_elements) 4786 4787 filename = 'config_subproc_map.inc' 4788 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4789 subproc_diagrams_for_config) 4790 4791 filename = 'configs.inc' 4792 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4793 writers.FortranWriter(filename), 4794 subproc_group, 4795 subproc_diagrams_for_config) 4796 4797 filename = 'config_nqcd.inc' 4798 self.write_config_nqcd_file(writers.FortranWriter(filename), 4799 nqcd_list) 4800 4801 filename = 'decayBW.inc' 4802 self.write_decayBW_file(writers.FortranWriter(filename), 4803 s_and_t_channels) 4804 4805 filename = 'dname.mg' 4806 self.write_dname_file(writers.FortranWriter(filename), 4807 subprocdir) 4808 4809 filename = 'iproc.dat' 4810 self.write_iproc_file(writers.FortranWriter(filename), 4811 group_number) 4812 4813 filename = 'leshouche.inc' 4814 self.write_leshouche_file(writers.FortranWriter(filename), 4815 subproc_group) 4816 4817 filename = 'maxamps.inc' 4818 self.write_maxamps_file(writers.FortranWriter(filename), 4819 maxamps, 4820 maxflows, 4821 max([len(me.get('processes')) for me in \ 4822 matrix_elements]), 4823 len(matrix_elements)) 4824 4825 # Note that mg.sym is not relevant for this case 4826 filename = 'mg.sym' 4827 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4828 4829 filename = 'mirrorprocs.inc' 4830 self.write_mirrorprocs(writers.FortranWriter(filename), 4831 subproc_group) 4832 4833 filename = 'ncombs.inc' 4834 self.write_ncombs_file(writers.FortranWriter(filename), 4835 nexternal) 4836 4837 filename = 'nexternal.inc' 4838 self.write_nexternal_file(writers.FortranWriter(filename), 4839 nexternal, ninitial) 4840 4841 filename = 'ngraphs.inc' 4842 self.write_ngraphs_file(writers.FortranWriter(filename), 4843 nconfigs) 4844 4845 filename = 'pmass.inc' 4846 self.write_pmass_file(writers.FortranWriter(filename), 4847 matrix_element) 4848 4849 filename = 'props.inc' 4850 self.write_props_file(writers.FortranWriter(filename), 4851 matrix_element, 4852 s_and_t_channels) 4853 4854 filename = 'processes.dat' 4855 files.write_to_file(filename, 4856 self.write_processes_file, 4857 subproc_group) 4858 4859 # Find config symmetries and permutations 4860 symmetry, perms, ident_perms = \ 4861 diagram_symmetry.find_symmetry(subproc_group) 4862 4863 filename = 'symswap.inc' 4864 self.write_symswap_file(writers.FortranWriter(filename), 4865 ident_perms) 4866 4867 filename = 'symfact_orig.dat' 4868 self.write_symfact_file(open(filename, 'w'), symmetry) 4869 4870 filename = 'symperms.inc' 4871 self.write_symperms_file(writers.FortranWriter(filename), 4872 perms) 4873 4874 # Generate jpgs -> pass in make_html 4875 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4876 4877 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4878 4879 #import nexternal/leshouch in Source 4880 ln('nexternal.inc', '../../Source', log=False) 4881 ln('leshouche.inc', '../../Source', log=False) 4882 ln('maxamps.inc', '../../Source', log=False) 4883 4884 # Return to SubProcesses dir) 4885 os.chdir(pathdir) 4886 4887 # Add subprocess to subproc.mg 4888 filename = 'subproc.mg' 4889 files.append_to_file(filename, 4890 self.write_subproc, 4891 subprocdir) 4892 4893 # Return to original dir 4894 os.chdir(cwd) 4895 4896 if not tot_calls: 4897 tot_calls = 0 4898 return tot_calls
4899 4900 #=========================================================================== 4901 # write_super_auto_dsig_file 4902 #===========================================================================
4903 - def write_super_auto_dsig_file(self, writer, subproc_group):
4904 """Write the auto_dsig.f file selecting between the subprocesses 4905 in subprocess group mode""" 4906 4907 replace_dict = {} 4908 4909 # Extract version number and date from VERSION file 4910 info_lines = self.get_mg5_info_lines() 4911 replace_dict['info_lines'] = info_lines 4912 4913 matrix_elements = subproc_group.get('matrix_elements') 4914 4915 # Extract process info lines 4916 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4917 matrix_elements]) 4918 replace_dict['process_lines'] = process_lines 4919 4920 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4921 replace_dict['nexternal'] = nexternal 4922 4923 replace_dict['nsprocs'] = 2*len(matrix_elements) 4924 4925 # Generate dsig definition line 4926 dsig_def_line = "DOUBLE PRECISION " + \ 4927 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4928 range(len(matrix_elements))]) 4929 replace_dict["dsig_def_line"] = dsig_def_line 4930 4931 # Generate dsig process lines 4932 call_dsig_proc_lines = [] 4933 for iproc in range(len(matrix_elements)): 4934 call_dsig_proc_lines.append(\ 4935 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4936 {"num": iproc + 1, 4937 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4938 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4939 4940 ncomb=matrix_elements[0].get_helicity_combinations() 4941 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4942 4943 if writer: 4944 file = open(pjoin(_file_path, \ 4945 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4946 file = file % replace_dict 4947 4948 # Write the file 4949 writer.writelines(file) 4950 else: 4951 return replace_dict
4952 4953 #=========================================================================== 4954 # write_mirrorprocs 4955 #===========================================================================
4956 - def write_mirrorprocs(self, writer, subproc_group):
4957 """Write the mirrorprocs.inc file determining which processes have 4958 IS mirror process in subprocess group mode.""" 4959 4960 lines = [] 4961 bool_dict = {True: '.true.', False: '.false.'} 4962 matrix_elements = subproc_group.get('matrix_elements') 4963 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4964 (len(matrix_elements), 4965 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4966 me in matrix_elements]))) 4967 # Write the file 4968 writer.writelines(lines)
4969 4970 #=========================================================================== 4971 # write_addmothers 4972 #===========================================================================
4973 - def write_addmothers(self, writer):
4974 """Write the SubProcess/addmothers.f""" 4975 4976 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4977 4978 text = open(path).read() % {'iconfig': 'lconfig'} 4979 writer.write(text) 4980 4981 return True
4982 4983 4984 #=========================================================================== 4985 # write_coloramps_file 4986 #===========================================================================
4987 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4988 matrix_elements):
4989 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4990 4991 # Create a map from subprocess (matrix element) to a list of 4992 # the diagrams corresponding to each config 4993 4994 lines = [] 4995 4996 subproc_to_confdiag = {} 4997 for config in diagrams_for_config: 4998 for subproc, diag in enumerate(config): 4999 try: 5000 subproc_to_confdiag[subproc].append(diag) 5001 except KeyError: 5002 subproc_to_confdiag[subproc] = [diag] 5003 5004 for subproc in sorted(subproc_to_confdiag.keys()): 5005 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5006 matrix_elements[subproc], 5007 subproc + 1)) 5008 5009 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5010 (maxflows, 5011 len(diagrams_for_config), 5012 len(matrix_elements))) 5013 5014 # Write the file 5015 writer.writelines(lines) 5016 5017 return True
5018 5019 #=========================================================================== 5020 # write_config_subproc_map_file 5021 #===========================================================================
5022 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5023 """Write the config_subproc_map.inc file for subprocess groups""" 5024 5025 lines = [] 5026 # Output only configs that have some corresponding diagrams 5027 iconfig = 0 5028 for config in config_subproc_map: 5029 if set(config) == set([0]): 5030 continue 5031 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5032 (iconfig + 1, len(config), 5033 ",".join([str(i) for i in config]))) 5034 iconfig += 1 5035 # Write the file 5036 writer.writelines(lines) 5037 5038 return True
5039 5040 #=========================================================================== 5041 # read_write_good_hel 5042 #===========================================================================
5043 - def read_write_good_hel(self, ncomb):
5044 """return the code to read/write the good_hel common_block""" 5045 5046 convert = {'ncomb' : ncomb} 5047 5048 output = """ 5049 subroutine write_good_hel(stream_id) 5050 implicit none 5051 integer stream_id 5052 INTEGER NCOMB 5053 PARAMETER ( NCOMB=%(ncomb)d) 5054 LOGICAL GOODHEL(NCOMB, 2) 5055 INTEGER NTRY(2) 5056 common/BLOCK_GOODHEL/NTRY,GOODHEL 5057 write(stream_id,*) GOODHEL 5058 return 5059 end 5060 5061 5062 subroutine read_good_hel(stream_id) 5063 implicit none 5064 include 'genps.inc' 5065 integer stream_id 5066 INTEGER NCOMB 5067 PARAMETER ( NCOMB=%(ncomb)d) 5068 LOGICAL GOODHEL(NCOMB, 2) 5069 INTEGER NTRY(2) 5070 common/BLOCK_GOODHEL/NTRY,GOODHEL 5071 read(stream_id,*) GOODHEL 5072 NTRY(1) = MAXTRIES + 1 5073 NTRY(2) = MAXTRIES + 1 5074 return 5075 end 5076 5077 subroutine init_good_hel() 5078 implicit none 5079 INTEGER NCOMB 5080 PARAMETER ( NCOMB=%(ncomb)d) 5081 LOGICAL GOODHEL(NCOMB, 2) 5082 INTEGER NTRY(2) 5083 INTEGER I 5084 5085 do i=1,NCOMB 5086 GOODHEL(I,1) = .false. 5087 GOODHEL(I,2) = .false. 5088 enddo 5089 NTRY(1) = 0 5090 NTRY(2) = 0 5091 end 5092 5093 integer function get_maxsproc() 5094 implicit none 5095 include 'maxamps.inc' 5096 5097 get_maxsproc = maxsproc 5098 return 5099 end 5100 5101 """ % convert 5102 5103 return output
5104 5105 5106 5107 #=========================================================================== 5108 # write_configs_file 5109 #===========================================================================
5110 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5111 """Write the configs.inc file with topology information for a 5112 subprocess group. Use the first subprocess with a diagram for each 5113 configuration.""" 5114 5115 matrix_elements = subproc_group.get('matrix_elements') 5116 model = matrix_elements[0].get('processes')[0].get('model') 5117 5118 diagrams = [] 5119 config_numbers = [] 5120 for iconfig, config in enumerate(diagrams_for_config): 5121 # Check if any diagrams correspond to this config 5122 if set(config) == set([0]): 5123 continue 5124 subproc_diags = [] 5125 for s,d in enumerate(config): 5126 if d: 5127 subproc_diags.append(matrix_elements[s].\ 5128 get('diagrams')[d-1]) 5129 else: 5130 subproc_diags.append(None) 5131 diagrams.append(subproc_diags) 5132 config_numbers.append(iconfig + 1) 5133 5134 # Extract number of external particles 5135 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5136 5137 return len(diagrams), \ 5138 self.write_configs_file_from_diagrams(writer, diagrams, 5139 config_numbers, 5140 nexternal, ninitial, 5141 model)
5142 5143 #=========================================================================== 5144 # write_run_configs_file 5145 #===========================================================================
5146 - def write_run_config_file(self, writer):
5147 """Write the run_configs.inc file for MadEvent""" 5148 5149 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5150 if self.proc_characteristic['loop_induced']: 5151 job_per_chan = 1 5152 else: 5153 job_per_chan = 2 5154 text = open(path).read() % {'chanperjob':job_per_chan} 5155 writer.write(text) 5156 return True
5157 5158 5159 #=========================================================================== 5160 # write_leshouche_file 5161 #===========================================================================
5162 - def write_leshouche_file(self, writer, subproc_group):
5163 """Write the leshouche.inc file for MG4""" 5164 5165 all_lines = [] 5166 5167 for iproc, matrix_element in \ 5168 enumerate(subproc_group.get('matrix_elements')): 5169 all_lines.extend(self.get_leshouche_lines(matrix_element, 5170 iproc)) 5171 # Write the file 5172 writer.writelines(all_lines) 5173 return True
5174 5175
5176 - def finalize(self,*args, **opts):
5177 5178 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5179 #ensure that the grouping information is on the correct value 5180 self.proc_characteristic['grouped_matrix'] = True
5181 5182 5183 #=============================================================================== 5184 # UFO_model_to_mg4 5185 #=============================================================================== 5186 5187 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5188 5189 -class UFO_model_to_mg4(object):
5190 """ A converter of the UFO-MG5 Model to the MG4 format """ 5191 5192 # The list below shows the only variables the user is allowed to change by 5193 # himself for each PS point. If he changes any other, then calling 5194 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5195 # correctly account for the change. 5196 PS_dependent_key = ['aS','MU_R'] 5197 mp_complex_format = 'complex*32' 5198 mp_real_format = 'real*16' 5199 # Warning, it is crucial none of the couplings/parameters of the model 5200 # starts with this prefix. I should add a check for this. 5201 # You can change it as the global variable to check_param_card.ParamCard 5202 mp_prefix = check_param_card.ParamCard.mp_prefix 5203
5204 - def __init__(self, model, output_path, opt=None):
5205 """ initialization of the objects """ 5206 5207 self.model = model 5208 self.model_name = model['name'] 5209 self.dir_path = output_path 5210 5211 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5212 'loop_induced': False} 5213 if opt: 5214 self.opt.update(opt) 5215 5216 self.coups_dep = [] # (name, expression, type) 5217 self.coups_indep = [] # (name, expression, type) 5218 self.params_dep = [] # (name, expression, type) 5219 self.params_indep = [] # (name, expression, type) 5220 self.params_ext = [] # external parameter 5221 self.p_to_f = parsers.UFOExpressionParserFortran() 5222 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5223
5225 """modify the parameter if some of them are identical up to the case""" 5226 5227 lower_dict={} 5228 duplicate = set() 5229 keys = self.model['parameters'].keys() 5230 for key in keys: 5231 for param in self.model['parameters'][key]: 5232 lower_name = param.name.lower() 5233 if not lower_name: 5234 continue 5235 try: 5236 lower_dict[lower_name].append(param) 5237 except KeyError,error: 5238 lower_dict[lower_name] = [param] 5239 else: 5240 duplicate.add(lower_name) 5241 logger.debug('%s is define both as lower case and upper case.' 5242 % lower_name) 5243 if not duplicate: 5244 return 5245 5246 re_expr = r'''\b(%s)\b''' 5247 to_change = [] 5248 change={} 5249 for value in duplicate: 5250 for i, var in enumerate(lower_dict[value]): 5251 to_change.append(var.name) 5252 new_name = '%s%s' % (var.name.lower(), 5253 ('__%d'%(i+1) if i>0 else '')) 5254 change[var.name] = new_name 5255 var.name = new_name 5256 5257 # Apply the modification to the map_CTcoup_CTparam of the model 5258 # if it has one (giving for each coupling the CT parameters whcih 5259 # are necessary and which should be exported to the model. 5260 if hasattr(self.model,'map_CTcoup_CTparam'): 5261 for coup, ctparams in self.model.map_CTcoup_CTparam: 5262 for i, ctparam in enumerate(ctparams): 5263 try: 5264 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5265 except KeyError: 5266 pass 5267 5268 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5269 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5270 5271 # change parameters 5272 for key in keys: 5273 if key == ('external',): 5274 continue 5275 for param in self.model['parameters'][key]: 5276 param.expr = rep_pattern.sub(replace, param.expr) 5277 5278 # change couplings 5279 for key in self.model['couplings'].keys(): 5280 for coup in self.model['couplings'][key]: 5281 coup.expr = rep_pattern.sub(replace, coup.expr) 5282 5283 # change mass/width 5284 for part in self.model['particles']: 5285 if str(part.get('mass')) in to_change: 5286 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5287 if str(part.get('width')) in to_change: 5288 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5289
5290 - def refactorize(self, wanted_couplings = []):
5291 """modify the couplings to fit with MG4 convention """ 5292 5293 # Keep only separation in alphaS 5294 keys = self.model['parameters'].keys() 5295 keys.sort(key=len) 5296 for key in keys: 5297 to_add = [o for o in self.model['parameters'][key] if o.name] 5298 5299 if key == ('external',): 5300 self.params_ext += to_add 5301 elif any([(k in key) for k in self.PS_dependent_key]): 5302 self.params_dep += to_add 5303 else: 5304 self.params_indep += to_add 5305 # same for couplings 5306 keys = self.model['couplings'].keys() 5307 keys.sort(key=len) 5308 for key, coup_list in self.model['couplings'].items(): 5309 if any([(k in key) for k in self.PS_dependent_key]): 5310 self.coups_dep += [c for c in coup_list if 5311 (not wanted_couplings or c.name in \ 5312 wanted_couplings)] 5313 else: 5314 self.coups_indep += [c for c in coup_list if 5315 (not wanted_couplings or c.name in \ 5316 wanted_couplings)] 5317 5318 # MG4 use G and not aS as it basic object for alphas related computation 5319 #Pass G in the independant list 5320 if 'G' in self.params_dep: 5321 index = self.params_dep.index('G') 5322 G = self.params_dep.pop(index) 5323 # G.expr = '2*cmath.sqrt(as*pi)' 5324 # self.params_indep.insert(0, self.params_dep.pop(index)) 5325 # No need to add it if not defined 5326 5327 if 'aS' not in self.params_ext: 5328 logger.critical('aS not define as external parameter adding it!') 5329 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5330 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5331 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5332 - def build(self, wanted_couplings = [], full=True):
5333 """modify the couplings to fit with MG4 convention and creates all the 5334 different files""" 5335 5336 self.pass_parameter_to_case_insensitive() 5337 self.refactorize(wanted_couplings) 5338 5339 # write the files 5340 if full: 5341 if wanted_couplings: 5342 # extract the wanted ct parameters 5343 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5344 self.write_all()
5345 5346
5347 - def open(self, name, comment='c', format='default'):
5348 """ Open the file name in the correct directory and with a valid 5349 header.""" 5350 5351 file_path = pjoin(self.dir_path, name) 5352 5353 if format == 'fortran': 5354 fsock = writers.FortranWriter(file_path, 'w') 5355 else: 5356 fsock = open(file_path, 'w') 5357 5358 file.writelines(fsock, comment * 77 + '\n') 5359 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5360 {'comment': comment + (6 - len(comment)) * ' '}) 5361 file.writelines(fsock, comment * 77 + '\n\n') 5362 return fsock
5363 5364
5365 - def write_all(self):
5366 """ write all the files """ 5367 #write the part related to the external parameter 5368 self.create_ident_card() 5369 self.create_param_read() 5370 5371 #write the definition of the parameter 5372 self.create_input() 5373 self.create_intparam_def(dp=True,mp=False) 5374 if self.opt['mp']: 5375 self.create_intparam_def(dp=False,mp=True) 5376 5377 # definition of the coupling. 5378 self.create_actualize_mp_ext_param_inc() 5379 self.create_coupl_inc() 5380 self.create_write_couplings() 5381 self.create_couplings() 5382 5383 # the makefile 5384 self.create_makeinc() 5385 self.create_param_write() 5386 5387 # The model functions 5388 self.create_model_functions_inc() 5389 self.create_model_functions_def() 5390 5391 # The param_card.dat 5392 self.create_param_card() 5393 5394 5395 # All the standard files 5396 self.copy_standard_file()
5397 5398 ############################################################################ 5399 ## ROUTINE CREATING THE FILES ############################################ 5400 ############################################################################ 5401
5402 - def copy_standard_file(self):
5403 """Copy the standard files for the fortran model.""" 5404 5405 #copy the library files 5406 file_to_link = ['formats.inc','printout.f', \ 5407 'rw_para.f', 'testprog.f'] 5408 5409 for filename in file_to_link: 5410 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5411 self.dir_path) 5412 5413 file = open(os.path.join(MG5DIR,\ 5414 'models/template_files/fortran/rw_para.f')).read() 5415 5416 includes=["include \'coupl.inc\'","include \'input.inc\'", 5417 "include \'model_functions.inc\'"] 5418 if self.opt['mp']: 5419 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5420 # In standalone and madloop we do no use the compiled param card but 5421 # still parse the .dat one so we must load it. 5422 if self.opt['loop_induced']: 5423 #loop induced follow MadEvent way to handle the card. 5424 load_card = '' 5425 lha_read_filename='lha_read.f' 5426 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5427 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5428 lha_read_filename='lha_read_mp.f' 5429 elif self.opt['export_format'].startswith('standalone') \ 5430 or self.opt['export_format'] in ['madweight', 'plugin']\ 5431 or self.opt['export_format'].startswith('matchbox'): 5432 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5433 lha_read_filename='lha_read.f' 5434 else: 5435 load_card = '' 5436 lha_read_filename='lha_read.f' 5437 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5438 os.path.join(self.dir_path,'lha_read.f')) 5439 5440 file=file%{'includes':'\n '.join(includes), 5441 'load_card':load_card} 5442 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5443 writer.writelines(file) 5444 writer.close() 5445 5446 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5447 or self.opt['loop_induced']: 5448 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5449 self.dir_path + '/makefile') 5450 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5451 path = pjoin(self.dir_path, 'makefile') 5452 text = open(path).read() 5453 text = text.replace('madevent','aMCatNLO') 5454 open(path, 'w').writelines(text) 5455 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5456 'madloop','madloop_optimized', 'standalone_rw', 5457 'madweight','matchbox','madloop_matchbox', 'plugin']: 5458 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5459 self.dir_path + '/makefile') 5460 #elif self.opt['export_format'] in []: 5461 #pass 5462 else: 5463 raise MadGraph5Error('Unknown format')
5464
5465 - def create_coupl_inc(self):
5466 """ write coupling.inc """ 5467 5468 fsock = self.open('coupl.inc', format='fortran') 5469 if self.opt['mp']: 5470 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5471 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5472 format='fortran') 5473 5474 # Write header 5475 header = """double precision G 5476 common/strong/ G 5477 5478 double complex gal(2) 5479 common/weak/ gal 5480 5481 double precision MU_R 5482 common/rscale/ MU_R 5483 5484 double precision Nf 5485 parameter(Nf=%d) 5486 """ % self.model.get_nflav() 5487 5488 fsock.writelines(header) 5489 5490 if self.opt['mp']: 5491 header = """%(real_mp_format)s %(mp_prefix)sG 5492 common/MP_strong/ %(mp_prefix)sG 5493 5494 %(complex_mp_format)s %(mp_prefix)sgal(2) 5495 common/MP_weak/ %(mp_prefix)sgal 5496 5497 %(complex_mp_format)s %(mp_prefix)sMU_R 5498 common/MP_rscale/ %(mp_prefix)sMU_R 5499 5500 """ 5501 5502 5503 5504 5505 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5506 'complex_mp_format':self.mp_complex_format, 5507 'mp_prefix':self.mp_prefix}) 5508 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5509 'complex_mp_format':self.mp_complex_format, 5510 'mp_prefix':''}) 5511 5512 # Write the Mass definition/ common block 5513 masses = set() 5514 widths = set() 5515 if self.opt['complex_mass']: 5516 complex_mass = set() 5517 5518 for particle in self.model.get('particles'): 5519 #find masses 5520 one_mass = particle.get('mass') 5521 if one_mass.lower() != 'zero': 5522 masses.add(one_mass) 5523 5524 # find width 5525 one_width = particle.get('width') 5526 if one_width.lower() != 'zero': 5527 widths.add(one_width) 5528 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5529 complex_mass.add('CMASS_%s' % one_mass) 5530 5531 if masses: 5532 fsock.writelines('double precision '+','.join(masses)+'\n') 5533 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5534 if self.opt['mp']: 5535 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5536 ','.join(masses)+'\n') 5537 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5538 ','.join(masses)+'\n\n') 5539 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5540 self.mp_prefix+m for m in masses])+'\n') 5541 mp_fsock.writelines('common/MP_masses/ '+\ 5542 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5543 5544 if widths: 5545 fsock.writelines('double precision '+','.join(widths)+'\n') 5546 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5547 if self.opt['mp']: 5548 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5549 ','.join(widths)+'\n') 5550 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5551 ','.join(widths)+'\n\n') 5552 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5553 self.mp_prefix+w for w in widths])+'\n') 5554 mp_fsock.writelines('common/MP_widths/ '+\ 5555 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5556 5557 # Write the Couplings 5558 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5559 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5560 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5561 if self.opt['mp']: 5562 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5563 ','.join(coupling_list)+'\n') 5564 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5565 ','.join(coupling_list)+'\n\n') 5566 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5567 self.mp_prefix+c for c in coupling_list])+'\n') 5568 mp_fsock.writelines('common/MP_couplings/ '+\ 5569 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5570 5571 # Write complex mass for complex mass scheme (if activated) 5572 if self.opt['complex_mass'] and complex_mass: 5573 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5574 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5575 if self.opt['mp']: 5576 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5577 ','.join(complex_mass)+'\n') 5578 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5579 ','.join(complex_mass)+'\n\n') 5580 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5581 self.mp_prefix+cm for cm in complex_mass])+'\n') 5582 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5583 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5584
5585 - def create_write_couplings(self):
5586 """ write the file coupl_write.inc """ 5587 5588 fsock = self.open('coupl_write.inc', format='fortran') 5589 5590 fsock.writelines("""write(*,*) ' Couplings of %s' 5591 write(*,*) ' ---------------------------------' 5592 write(*,*) ' '""" % self.model_name) 5593 def format(coupl): 5594 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5595 5596 # Write the Couplings 5597 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5598 fsock.writelines('\n'.join(lines)) 5599 5600
5601 - def create_input(self):
5602 """create input.inc containing the definition of the parameters""" 5603 5604 fsock = self.open('input.inc', format='fortran') 5605 if self.opt['mp']: 5606 mp_fsock = self.open('mp_input.inc', format='fortran') 5607 5608 #find mass/ width since they are already define 5609 already_def = set() 5610 for particle in self.model.get('particles'): 5611 already_def.add(particle.get('mass').lower()) 5612 already_def.add(particle.get('width').lower()) 5613 if self.opt['complex_mass']: 5614 already_def.add('cmass_%s' % particle.get('mass').lower()) 5615 5616 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5617 name.lower() not in already_def 5618 5619 real_parameters = [param.name for param in self.params_dep + 5620 self.params_indep if param.type == 'real' 5621 and is_valid(param.name)] 5622 5623 real_parameters += [param.name for param in self.params_ext 5624 if param.type == 'real'and 5625 is_valid(param.name)] 5626 5627 # check the parameter is a CT parameter or not 5628 # if yes, just use the needed ones 5629 real_parameters = [param for param in real_parameters \ 5630 if self.check_needed_param(param)] 5631 5632 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5633 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5634 if self.opt['mp']: 5635 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5636 self.mp_prefix+p for p in real_parameters])+'\n') 5637 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5638 self.mp_prefix+p for p in real_parameters])+'\n\n') 5639 5640 complex_parameters = [param.name for param in self.params_dep + 5641 self.params_indep if param.type == 'complex' and 5642 is_valid(param.name)] 5643 5644 # check the parameter is a CT parameter or not 5645 # if yes, just use the needed ones 5646 complex_parameters = [param for param in complex_parameters \ 5647 if self.check_needed_param(param)] 5648 5649 if complex_parameters: 5650 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5651 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5652 if self.opt['mp']: 5653 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5654 self.mp_prefix+p for p in complex_parameters])+'\n') 5655 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5656 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5657
5658 - def check_needed_param(self, param):
5659 """ Returns whether the parameter in argument is needed for this 5660 specific computation or not.""" 5661 5662 # If this is a leading order model or if there was no CT parameter 5663 # employed in this NLO model, one can directly return that the 5664 # parameter is needed since only CTParameters are filtered. 5665 if not hasattr(self, 'allCTparameters') or \ 5666 self.allCTparameters is None or self.usedCTparameters is None or \ 5667 len(self.allCTparameters)==0: 5668 return True 5669 5670 # We must allow the conjugate shorthand for the complex parameter as 5671 # well so we check wether either the parameter name or its name with 5672 # 'conjg__' substituted with '' is present in the list. 5673 # This is acceptable even if some parameter had an original name 5674 # including 'conjg__' in it, because at worst we export a parameter 5675 # was not needed. 5676 param = param.lower() 5677 cjg_param = param.replace('conjg__','',1) 5678 5679 # First make sure it is a CTparameter 5680 if param not in self.allCTparameters and \ 5681 cjg_param not in self.allCTparameters: 5682 return True 5683 5684 # Now check if it is in the list of CTparameters actually used 5685 return (param in self.usedCTparameters or \ 5686 cjg_param in self.usedCTparameters)
5687
5688 - def extract_needed_CTparam(self,wanted_couplings=[]):
5689 """ Extract what are the needed CT parameters given the wanted_couplings""" 5690 5691 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5692 # Setting these lists to none wil disable the filtering in 5693 # check_needed_param 5694 self.allCTparameters = None 5695 self.usedCTparameters = None 5696 return 5697 5698 # All CTparameters appearin in all CT couplings 5699 allCTparameters=self.model.map_CTcoup_CTparam.values() 5700 # Define in this class the list of all CT parameters 5701 self.allCTparameters=list(\ 5702 set(itertools.chain.from_iterable(allCTparameters))) 5703 5704 # All used CT couplings 5705 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5706 allUsedCTCouplings = [coupl for coupl in 5707 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5708 5709 # Now define the list of all CT parameters that are actually used 5710 self.usedCTparameters=list(\ 5711 set(itertools.chain.from_iterable([ 5712 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5713 ]))) 5714 5715 # Now at last, make these list case insensitive 5716 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5717 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5718
5719 - def create_intparam_def(self, dp=True, mp=False):
5720 """ create intparam_definition.inc setting the internal parameters. 5721 Output the double precision and/or the multiple precision parameters 5722 depending on the parameters dp and mp. If mp only, then the file names 5723 get the 'mp_' prefix. 5724 """ 5725 5726 fsock = self.open('%sintparam_definition.inc'% 5727 ('mp_' if mp and not dp else ''), format='fortran') 5728 5729 fsock.write_comments(\ 5730 "Parameters that should not be recomputed event by event.\n") 5731 fsock.writelines("if(readlha) then\n") 5732 if dp: 5733 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5734 if mp: 5735 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5736 5737 for param in self.params_indep: 5738 if param.name == 'ZERO': 5739 continue 5740 # check whether the parameter is a CT parameter 5741 # if yes,just used the needed ones 5742 if not self.check_needed_param(param.name): 5743 continue 5744 if dp: 5745 fsock.writelines("%s = %s\n" % (param.name, 5746 self.p_to_f.parse(param.expr))) 5747 if mp: 5748 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5749 self.mp_p_to_f.parse(param.expr))) 5750 5751 fsock.writelines('endif') 5752 5753 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5754 if dp: 5755 fsock.writelines("aS = G**2/4/pi\n") 5756 if mp: 5757 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5758 for param in self.params_dep: 5759 # check whether the parameter is a CT parameter 5760 # if yes,just used the needed ones 5761 if not self.check_needed_param(param.name): 5762 continue 5763 if dp: 5764 fsock.writelines("%s = %s\n" % (param.name, 5765 self.p_to_f.parse(param.expr))) 5766 elif mp: 5767 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5768 self.mp_p_to_f.parse(param.expr))) 5769 5770 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5771 if ('aEWM1',) in self.model['parameters']: 5772 if dp: 5773 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5774 gal(2) = 1d0 5775 """) 5776 elif mp: 5777 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5778 %(mp_prefix)sgal(2) = 1d0 5779 """ %{'mp_prefix':self.mp_prefix}) 5780 pass 5781 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5782 elif ('Gf',) in self.model['parameters']: 5783 if dp: 5784 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5785 gal(2) = 1d0 5786 """) 5787 elif mp: 5788 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5789 %(mp_prefix)sgal(2) = 1d0 5790 """ %{'mp_prefix':self.mp_prefix}) 5791 pass 5792 else: 5793 if dp: 5794 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5795 fsock.writelines(""" gal(1) = 1d0 5796 gal(2) = 1d0 5797 """) 5798 elif mp: 5799 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5800 %(mp_prefix)sgal(2) = 1e0_16 5801 """%{'mp_prefix':self.mp_prefix})
5802 5803
5804 - def create_couplings(self):
5805 """ create couplings.f and all couplingsX.f """ 5806 5807 nb_def_by_file = 25 5808 5809 self.create_couplings_main(nb_def_by_file) 5810 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5811 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5812 5813 for i in range(nb_coup_indep): 5814 # For the independent couplings, we compute the double and multiple 5815 # precision ones together 5816 data = self.coups_indep[nb_def_by_file * i: 5817 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5818 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5819 5820 for i in range(nb_coup_dep): 5821 # For the dependent couplings, we compute the double and multiple 5822 # precision ones in separate subroutines. 5823 data = self.coups_dep[nb_def_by_file * i: 5824 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5825 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5826 dp=True,mp=False) 5827 if self.opt['mp']: 5828 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5829 dp=False,mp=True)
5830 5831
5832 - def create_couplings_main(self, nb_def_by_file=25):
5833 """ create couplings.f """ 5834 5835 fsock = self.open('couplings.f', format='fortran') 5836 5837 fsock.writelines("""subroutine coup() 5838 5839 implicit none 5840 double precision PI, ZERO 5841 logical READLHA 5842 parameter (PI=3.141592653589793d0) 5843 parameter (ZERO=0d0) 5844 include \'model_functions.inc\'""") 5845 if self.opt['mp']: 5846 fsock.writelines("""%s MP__PI, MP__ZERO 5847 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5848 parameter (MP__ZERO=0e0_16) 5849 include \'mp_input.inc\' 5850 include \'mp_coupl.inc\' 5851 """%self.mp_real_format) 5852 fsock.writelines("""include \'input.inc\' 5853 include \'coupl.inc\' 5854 READLHA = .true. 5855 include \'intparam_definition.inc\'""") 5856 if self.opt['mp']: 5857 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5858 5859 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5860 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5861 5862 fsock.writelines('\n'.join(\ 5863 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5864 5865 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5866 5867 fsock.writelines('\n'.join(\ 5868 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5869 for i in range(nb_coup_dep)])) 5870 if self.opt['mp']: 5871 fsock.writelines('\n'.join(\ 5872 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5873 for i in range(nb_coup_dep)])) 5874 fsock.writelines('''\n return \n end\n''') 5875 5876 fsock.writelines("""subroutine update_as_param() 5877 5878 implicit none 5879 double precision PI, ZERO 5880 logical READLHA 5881 parameter (PI=3.141592653589793d0) 5882 parameter (ZERO=0d0) 5883 include \'model_functions.inc\'""") 5884 fsock.writelines("""include \'input.inc\' 5885 include \'coupl.inc\' 5886 READLHA = .false.""") 5887 fsock.writelines(""" 5888 include \'intparam_definition.inc\'\n 5889 """) 5890 5891 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5892 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5893 5894 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5895 5896 fsock.writelines('\n'.join(\ 5897 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5898 for i in range(nb_coup_dep)])) 5899 fsock.writelines('''\n return \n end\n''') 5900 5901 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5902 5903 implicit none 5904 double precision PI 5905 parameter (PI=3.141592653589793d0) 5906 double precision mu_r2, as2 5907 include \'model_functions.inc\'""") 5908 fsock.writelines("""include \'input.inc\' 5909 include \'coupl.inc\'""") 5910 fsock.writelines(""" 5911 if (mu_r2.gt.0d0) MU_R = mu_r2 5912 G = SQRT(4.0d0*PI*AS2) 5913 AS = as2 5914 5915 CALL UPDATE_AS_PARAM() 5916 """) 5917 fsock.writelines('''\n return \n end\n''') 5918 5919 if self.opt['mp']: 5920 fsock.writelines("""subroutine mp_update_as_param() 5921 5922 implicit none 5923 logical READLHA 5924 include \'model_functions.inc\'""") 5925 fsock.writelines("""%s MP__PI, MP__ZERO 5926 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5927 parameter (MP__ZERO=0e0_16) 5928 include \'mp_input.inc\' 5929 include \'mp_coupl.inc\' 5930 """%self.mp_real_format) 5931 fsock.writelines("""include \'input.inc\' 5932 include \'coupl.inc\' 5933 include \'actualize_mp_ext_params.inc\' 5934 READLHA = .false. 5935 include \'mp_intparam_definition.inc\'\n 5936 """) 5937 5938 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5939 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5940 5941 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5942 5943 fsock.writelines('\n'.join(\ 5944 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5945 for i in range(nb_coup_dep)])) 5946 fsock.writelines('''\n return \n end\n''')
5947
5948 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5949 """ create couplings[nb_file].f containing information coming from data. 5950 Outputs the computation of the double precision and/or the multiple 5951 precision couplings depending on the parameters dp and mp. 5952 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5953 filename and subroutine name. 5954 """ 5955 5956 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5957 nb_file), format='fortran') 5958 fsock.writelines("""subroutine %scoup%s() 5959 5960 implicit none 5961 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5962 if dp: 5963 fsock.writelines(""" 5964 double precision PI, ZERO 5965 parameter (PI=3.141592653589793d0) 5966 parameter (ZERO=0d0) 5967 include 'input.inc' 5968 include 'coupl.inc'""") 5969 if mp: 5970 fsock.writelines("""%s MP__PI, MP__ZERO 5971 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5972 parameter (MP__ZERO=0e0_16) 5973 include \'mp_input.inc\' 5974 include \'mp_coupl.inc\' 5975 """%self.mp_real_format) 5976 5977 for coupling in data: 5978 if dp: 5979 fsock.writelines('%s = %s' % (coupling.name, 5980 self.p_to_f.parse(coupling.expr))) 5981 if mp: 5982 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5983 self.mp_p_to_f.parse(coupling.expr))) 5984 fsock.writelines('end')
5985
5986 - def create_model_functions_inc(self):
5987 """ Create model_functions.inc which contains the various declarations 5988 of auxiliary functions which might be used in the couplings expressions 5989 """ 5990 5991 additional_fct = [] 5992 # check for functions define in the UFO model 5993 ufo_fct = self.model.get('functions') 5994 if ufo_fct: 5995 for fct in ufo_fct: 5996 # already handle by default 5997 if fct.name not in ["complexconjugate", "re", "im", "sec", 5998 "csc", "asec", "acsc", "theta_function", "cond", 5999 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 6000 additional_fct.append(fct.name) 6001 6002 6003 fsock = self.open('model_functions.inc', format='fortran') 6004 fsock.writelines("""double complex cond 6005 double complex condif 6006 double complex reglog 6007 double complex reglogp 6008 double complex reglogm 6009 double complex recms 6010 double complex arg 6011 %s 6012 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6013 6014 6015 if self.opt['mp']: 6016 fsock.writelines("""%(complex_mp_format)s mp_cond 6017 %(complex_mp_format)s mp_condif 6018 %(complex_mp_format)s mp_reglog 6019 %(complex_mp_format)s mp_reglogp 6020 %(complex_mp_format)s mp_reglogm 6021 %(complex_mp_format)s mp_recms 6022 %(complex_mp_format)s mp_arg 6023 %(additional)s 6024 """ %\ 6025 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 6026 'complex_mp_format':self.mp_complex_format 6027 })
6028
6029 - def create_model_functions_def(self):
6030 """ Create model_functions.f which contains the various definitions 6031 of auxiliary functions which might be used in the couplings expressions 6032 Add the functions.f functions for formfactors support 6033 """ 6034 6035 fsock = self.open('model_functions.f', format='fortran') 6036 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6037 implicit none 6038 double complex condition,truecase,falsecase 6039 if(condition.eq.(0.0d0,0.0d0)) then 6040 cond=truecase 6041 else 6042 cond=falsecase 6043 endif 6044 end 6045 6046 double complex function condif(condition,truecase,falsecase) 6047 implicit none 6048 logical condition 6049 double complex truecase,falsecase 6050 if(condition) then 6051 condif=truecase 6052 else 6053 condif=falsecase 6054 endif 6055 end 6056 6057 double complex function recms(condition,expr) 6058 implicit none 6059 logical condition 6060 double complex expr 6061 if(condition)then 6062 recms=expr 6063 else 6064 recms=dcmplx(dble(expr)) 6065 endif 6066 end 6067 6068 double complex function reglog(arg) 6069 implicit none 6070 double complex TWOPII 6071 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6072 double complex arg 6073 if(arg.eq.(0.0d0,0.0d0)) then 6074 reglog=(0.0d0,0.0d0) 6075 else 6076 reglog=log(arg) 6077 endif 6078 end 6079 6080 double complex function reglogp(arg) 6081 implicit none 6082 double complex TWOPII 6083 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6084 double complex arg 6085 if(arg.eq.(0.0d0,0.0d0))then 6086 reglogp=(0.0d0,0.0d0) 6087 else 6088 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6089 reglogp=log(arg) + TWOPII 6090 else 6091 reglogp=log(arg) 6092 endif 6093 endif 6094 end 6095 6096 double complex function reglogm(arg) 6097 implicit none 6098 double complex TWOPII 6099 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6100 double complex arg 6101 if(arg.eq.(0.0d0,0.0d0))then 6102 reglogm=(0.0d0,0.0d0) 6103 else 6104 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6105 reglogm=log(arg) - TWOPII 6106 else 6107 reglogm=log(arg) 6108 endif 6109 endif 6110 end 6111 6112 double complex function arg(comnum) 6113 implicit none 6114 double complex comnum 6115 double complex iim 6116 iim = (0.0d0,1.0d0) 6117 if(comnum.eq.(0.0d0,0.0d0)) then 6118 arg=(0.0d0,0.0d0) 6119 else 6120 arg=log(comnum/abs(comnum))/iim 6121 endif 6122 end""") 6123 if self.opt['mp']: 6124 fsock.writelines(""" 6125 6126 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6127 implicit none 6128 %(complex_mp_format)s condition,truecase,falsecase 6129 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6130 mp_cond=truecase 6131 else 6132 mp_cond=falsecase 6133 endif 6134 end 6135 6136 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6137 implicit none 6138 logical condition 6139 %(complex_mp_format)s truecase,falsecase 6140 if(condition) then 6141 mp_condif=truecase 6142 else 6143 mp_condif=falsecase 6144 endif 6145 end 6146 6147 %(complex_mp_format)s function mp_recms(condition,expr) 6148 implicit none 6149 logical condition 6150 %(complex_mp_format)s expr 6151 if(condition)then 6152 mp_recms=expr 6153 else 6154 mp_recms=cmplx(real(expr),kind=16) 6155 endif 6156 end 6157 6158 %(complex_mp_format)s function mp_reglog(arg) 6159 implicit none 6160 %(complex_mp_format)s TWOPII 6161 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6162 %(complex_mp_format)s arg 6163 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6164 mp_reglog=(0.0e0_16,0.0e0_16) 6165 else 6166 mp_reglog=log(arg) 6167 endif 6168 end 6169 6170 %(complex_mp_format)s function mp_reglogp(arg) 6171 implicit none 6172 %(complex_mp_format)s TWOPII 6173 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6174 %(complex_mp_format)s arg 6175 if(arg.eq.(0.0e0_16,0.0e0_16))then 6176 mp_reglogp=(0.0e0_16,0.0e0_16) 6177 else 6178 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6179 mp_reglogp=log(arg) + TWOPII 6180 else 6181 mp_reglogp=log(arg) 6182 endif 6183 endif 6184 end 6185 6186 %(complex_mp_format)s function mp_reglogm(arg) 6187 implicit none 6188 %(complex_mp_format)s TWOPII 6189 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6190 %(complex_mp_format)s arg 6191 if(arg.eq.(0.0e0_16,0.0e0_16))then 6192 mp_reglogm=(0.0e0_16,0.0e0_16) 6193 else 6194 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6195 mp_reglogm=log(arg) - TWOPII 6196 else 6197 mp_reglogm=log(arg) 6198 endif 6199 endif 6200 end 6201 6202 %(complex_mp_format)s function mp_arg(comnum) 6203 implicit none 6204 %(complex_mp_format)s comnum 6205 %(complex_mp_format)s imm 6206 imm = (0.0e0_16,1.0e0_16) 6207 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6208 mp_arg=(0.0e0_16,0.0e0_16) 6209 else 6210 mp_arg=log(comnum/abs(comnum))/imm 6211 endif 6212 end"""%{'complex_mp_format':self.mp_complex_format}) 6213 6214 6215 #check for the file functions.f 6216 model_path = self.model.get('modelpath') 6217 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6218 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6219 input = pjoin(model_path,'Fortran','functions.f') 6220 file.writelines(fsock, open(input).read()) 6221 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6222 6223 # check for functions define in the UFO model 6224 ufo_fct = self.model.get('functions') 6225 if ufo_fct: 6226 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6227 for fct in ufo_fct: 6228 # already handle by default 6229 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6230 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6231 ufo_fct_template = """ 6232 double complex function %(name)s(%(args)s) 6233 implicit none 6234 double complex %(args)s 6235 %(name)s = %(fct)s 6236 6237 return 6238 end 6239 """ 6240 text = ufo_fct_template % { 6241 'name': fct.name, 6242 'args': ", ".join(fct.arguments), 6243 'fct': self.p_to_f.parse(fct.expr) 6244 } 6245 fsock.writelines(text) 6246 if self.opt['mp']: 6247 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6248 for fct in ufo_fct: 6249 # already handle by default 6250 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6251 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6252 ufo_fct_template = """ 6253 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6254 implicit none 6255 %(complex_mp_format)s mp__%(args)s 6256 mp__%(name)s = %(fct)s 6257 6258 return 6259 end 6260 """ 6261 text = ufo_fct_template % { 6262 'name': fct.name, 6263 'args': ", mp__".join(fct.arguments), 6264 'fct': self.mp_p_to_f.parse(fct.expr), 6265 'complex_mp_format': self.mp_complex_format 6266 } 6267 fsock.writelines(text) 6268 6269 6270 6271 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6272 6273 6274
6275 - def create_makeinc(self):
6276 """create makeinc.inc containing the file to compile """ 6277 6278 fsock = self.open('makeinc.inc', comment='#') 6279 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6280 text += ' model_functions.o ' 6281 6282 nb_coup_indep = 1 + len(self.coups_dep) // 25 6283 nb_coup_dep = 1 + len(self.coups_indep) // 25 6284 couplings_files=['couplings%s.o' % (i+1) \ 6285 for i in range(nb_coup_dep + nb_coup_indep) ] 6286 if self.opt['mp']: 6287 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6288 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6289 text += ' '.join(couplings_files) 6290 fsock.writelines(text)
6291
6292 - def create_param_write(self):
6293 """ create param_write """ 6294 6295 fsock = self.open('param_write.inc', format='fortran') 6296 6297 fsock.writelines("""write(*,*) ' External Params' 6298 write(*,*) ' ---------------------------------' 6299 write(*,*) ' '""") 6300 def format(name): 6301 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6302 6303 # Write the external parameter 6304 lines = [format(param.name) for param in self.params_ext] 6305 fsock.writelines('\n'.join(lines)) 6306 6307 fsock.writelines("""write(*,*) ' Internal Params' 6308 write(*,*) ' ---------------------------------' 6309 write(*,*) ' '""") 6310 lines = [format(data.name) for data in self.params_indep 6311 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6312 fsock.writelines('\n'.join(lines)) 6313 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6314 write(*,*) ' ----------------------------------------' 6315 write(*,*) ' '""") 6316 lines = [format(data.name) for data in self.params_dep \ 6317 if self.check_needed_param(data.name)] 6318 6319 fsock.writelines('\n'.join(lines)) 6320 6321 6322
6323 - def create_ident_card(self):
6324 """ create the ident_card.dat """ 6325 6326 def format(parameter): 6327 """return the line for the ident_card corresponding to this parameter""" 6328 colum = [parameter.lhablock.lower()] + \ 6329 [str(value) for value in parameter.lhacode] + \ 6330 [parameter.name] 6331 if not parameter.name: 6332 return '' 6333 return ' '.join(colum)+'\n'
6334 6335 fsock = self.open('ident_card.dat') 6336 6337 external_param = [format(param) for param in self.params_ext] 6338 fsock.writelines('\n'.join(external_param)) 6339
6340 - def create_actualize_mp_ext_param_inc(self):
6341 """ create the actualize_mp_ext_params.inc code """ 6342 6343 # In principle one should actualize all external, but for now, it is 6344 # hardcoded that only AS and MU_R can by dynamically changed by the user 6345 # so that we only update those ones. 6346 # Of course, to be on the safe side, one could decide to update all 6347 # external parameters. 6348 update_params_list=[p for p in self.params_ext if p.name in 6349 self.PS_dependent_key] 6350 6351 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6352 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6353 for param in update_params_list] 6354 # When read_lha is false, it is G which is taken in input and not AS, so 6355 # this is what should be reset here too. 6356 if 'aS' in [param.name for param in update_params_list]: 6357 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6358 6359 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6360 fsock.writelines('\n'.join(res_strings))
6361
6362 - def create_param_read(self):
6363 """create param_read""" 6364 6365 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6366 or self.opt['loop_induced']: 6367 fsock = self.open('param_read.inc', format='fortran') 6368 fsock.writelines(' include \'../param_card.inc\'') 6369 return 6370 6371 def format_line(parameter): 6372 """return the line for the ident_card corresponding to this 6373 parameter""" 6374 template = \ 6375 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6376 % {'name': parameter.name, 6377 'value': self.p_to_f.parse(str(parameter.value.real))} 6378 if self.opt['mp']: 6379 template = template+ \ 6380 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6381 "%(mp_prefix)s%(name)s,%(value)s)") \ 6382 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6383 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6384 return template 6385 6386 fsock = self.open('param_read.inc', format='fortran') 6387 res_strings = [format_line(param) \ 6388 for param in self.params_ext] 6389 6390 # Correct width sign for Majorana particles (where the width 6391 # and mass need to have the same sign) 6392 for particle in self.model.get('particles'): 6393 if particle.is_fermion() and particle.get('self_antipart') and \ 6394 particle.get('width').lower() != 'zero': 6395 6396 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6397 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6398 if self.opt['mp']: 6399 res_strings.append(\ 6400 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6401 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6402 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6403 6404 fsock.writelines('\n'.join(res_strings)) 6405 6406 6407 @staticmethod
6408 - def create_param_card_static(model, output_path, rule_card_path=False, 6409 mssm_convert=True):
6410 """ create the param_card.dat for a givent model --static method-- """ 6411 #1. Check if a default param_card is present: 6412 done = False 6413 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6414 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6415 model_path = model.get('modelpath') 6416 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6417 done = True 6418 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6419 output_path) 6420 if not done: 6421 param_writer.ParamCardWriter(model, output_path) 6422 6423 if rule_card_path: 6424 if hasattr(model, 'rule_card'): 6425 model.rule_card.write_file(rule_card_path) 6426 6427 if mssm_convert: 6428 model_name = model.get('name') 6429 # IF MSSM convert the card to SLAH1 6430 if model_name == 'mssm' or model_name.startswith('mssm-'): 6431 import models.check_param_card as translator 6432 # Check the format of the param_card for Pythia and make it correct 6433 if rule_card_path: 6434 translator.make_valid_param_card(output_path, rule_card_path) 6435 translator.convert_to_slha1(output_path)
6436
6437 - def create_param_card(self):
6438 """ create the param_card.dat """ 6439 6440 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6441 if not hasattr(self.model, 'rule_card'): 6442 rule_card=False 6443 self.create_param_card_static(self.model, 6444 output_path=pjoin(self.dir_path, 'param_card.dat'), 6445 rule_card_path=rule_card, 6446 mssm_convert=True)
6447
6448 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6449 """ Determine which Export_v4 class is required. cmd is the command 6450 interface containing all potential usefull information. 6451 The output_type argument specifies from which context the output 6452 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6453 and 'default' for tree-level outputs.""" 6454 6455 opt = cmd.options 6456 6457 # ========================================================================== 6458 # First check whether Ninja must be installed. 6459 # Ninja would only be required if: 6460 # a) Loop optimized output is selected 6461 # b) the process gathered from the amplitude generated use loops 6462 6463 if len(cmd._curr_amps)>0: 6464 try: 6465 curr_proc = cmd._curr_amps[0].get('process') 6466 except base_objects.PhysicsObject.PhysicsObjectError: 6467 curr_proc = None 6468 elif hasattr(cmd,'_fks_multi_proc') and \ 6469 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6470 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6471 else: 6472 curr_proc = None 6473 6474 requires_reduction_tool = opt['loop_optimized_output'] and \ 6475 (not curr_proc is None) and \ 6476 (curr_proc.get('perturbation_couplings') != [] and \ 6477 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6478 6479 # An installation is required then, but only if the specified path is the 6480 # default local one and that the Ninja library appears missing. 6481 if requires_reduction_tool: 6482 cmd.install_reduction_library() 6483 6484 # ========================================================================== 6485 # First treat the MadLoop5 standalone case 6486 MadLoop_SA_options = {'clean': not noclean, 6487 'complex_mass':cmd.options['complex_mass_scheme'], 6488 'export_format':'madloop', 6489 'mp':True, 6490 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6491 'cuttools_dir': cmd._cuttools_dir, 6492 'iregi_dir':cmd._iregi_dir, 6493 'pjfry_dir':cmd.options['pjfry'], 6494 'golem_dir':cmd.options['golem'], 6495 'samurai_dir':cmd.options['samurai'], 6496 'ninja_dir':cmd.options['ninja'], 6497 'collier_dir':cmd.options['collier'], 6498 'fortran_compiler':cmd.options['fortran_compiler'], 6499 'f2py_compiler':cmd.options['f2py_compiler'], 6500 'output_dependencies':cmd.options['output_dependencies'], 6501 'SubProc_prefix':'P', 6502 'compute_color_flows':cmd.options['loop_color_flows'], 6503 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6504 'cluster_local_path': cmd.options['cluster_local_path'] 6505 } 6506 6507 6508 if output_type.startswith('madloop'): 6509 import madgraph.loop.loop_exporters as loop_exporters 6510 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6511 ExporterClass=None 6512 if not cmd.options['loop_optimized_output']: 6513 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6514 else: 6515 if output_type == "madloop": 6516 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6517 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6518 elif output_type == "madloop_matchbox": 6519 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6520 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6521 else: 6522 raise Exception, "output_type not recognize %s" % output_type 6523 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6524 else: 6525 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6526 ' in %s'%str(cmd._mgme_dir)) 6527 6528 # Then treat the aMC@NLO output 6529 elif output_type=='amcatnlo': 6530 import madgraph.iolibs.export_fks as export_fks 6531 ExporterClass=None 6532 amcatnlo_options = dict(opt) 6533 amcatnlo_options.update(MadLoop_SA_options) 6534 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6535 if not cmd.options['loop_optimized_output']: 6536 logger.info("Writing out the aMC@NLO code") 6537 ExporterClass = export_fks.ProcessExporterFortranFKS 6538 amcatnlo_options['export_format']='FKS5_default' 6539 else: 6540 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6541 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6542 amcatnlo_options['export_format']='FKS5_optimized' 6543 return ExporterClass(cmd._export_dir, amcatnlo_options) 6544 6545 6546 # Then the default tree-level output 6547 elif output_type=='default': 6548 assert group_subprocesses in [True, False] 6549 6550 opt = dict(opt) 6551 opt.update({'clean': not noclean, 6552 'complex_mass': cmd.options['complex_mass_scheme'], 6553 'export_format':cmd._export_format, 6554 'mp': False, 6555 'sa_symmetry':False, 6556 'model': cmd._curr_model.get('name'), 6557 'v5_model': False if cmd._model_v4_path else True }) 6558 6559 format = cmd._export_format #shortcut 6560 6561 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6562 opt['sa_symmetry'] = True 6563 elif format == 'plugin': 6564 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6565 6566 loop_induced_opt = dict(opt) 6567 loop_induced_opt.update(MadLoop_SA_options) 6568 loop_induced_opt['export_format'] = 'madloop_optimized' 6569 loop_induced_opt['SubProc_prefix'] = 'PV' 6570 # For loop_induced output with MadEvent, we must have access to the 6571 # color flows. 6572 loop_induced_opt['compute_color_flows'] = True 6573 for key in opt: 6574 if key not in loop_induced_opt: 6575 loop_induced_opt[key] = opt[key] 6576 6577 # Madevent output supports MadAnalysis5 6578 if format in ['madevent']: 6579 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6580 6581 if format == 'matrix' or format.startswith('standalone'): 6582 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6583 6584 elif format in ['madevent'] and group_subprocesses: 6585 if isinstance(cmd._curr_amps[0], 6586 loop_diagram_generation.LoopAmplitude): 6587 import madgraph.loop.loop_exporters as loop_exporters 6588 return loop_exporters.LoopInducedExporterMEGroup( 6589 cmd._export_dir,loop_induced_opt) 6590 else: 6591 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6592 elif format in ['madevent']: 6593 if isinstance(cmd._curr_amps[0], 6594 loop_diagram_generation.LoopAmplitude): 6595 import madgraph.loop.loop_exporters as loop_exporters 6596 return loop_exporters.LoopInducedExporterMENoGroup( 6597 cmd._export_dir,loop_induced_opt) 6598 else: 6599 return ProcessExporterFortranME(cmd._export_dir,opt) 6600 elif format in ['matchbox']: 6601 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6602 elif cmd._export_format in ['madweight'] and group_subprocesses: 6603 6604 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6605 elif cmd._export_format in ['madweight']: 6606 return ProcessExporterFortranMW(cmd._export_dir, opt) 6607 elif format == 'plugin': 6608 return cmd._export_plugin(cmd._export_dir, opt) 6609 else: 6610 raise Exception, 'Wrong export_v4 format' 6611 else: 6612 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6613
6614 6615 6616 6617 #=============================================================================== 6618 # ProcessExporterFortranMWGroup 6619 #=============================================================================== 6620 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6621 """Class to take care of exporting a set of matrix elements to 6622 MadEvent subprocess group format.""" 6623 6624 matrix_file = "matrix_madweight_group_v4.inc" 6625 grouped_mode = 'madweight' 6626 #=========================================================================== 6627 # generate_subprocess_directory 6628 #===========================================================================
6629 - def generate_subprocess_directory(self, subproc_group, 6630 fortran_model, 6631 group_number):
6632 """Generate the Pn directory for a subprocess group in MadEvent, 6633 including the necessary matrix_N.f files, configs.inc and various 6634 other helper files.""" 6635 6636 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6637 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6638 "subproc_group object not SubProcessGroup" 6639 6640 if not self.model: 6641 self.model = subproc_group.get('matrix_elements')[0].\ 6642 get('processes')[0].get('model') 6643 6644 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6645 6646 # Create the directory PN in the specified path 6647 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6648 subproc_group.get('name')) 6649 try: 6650 os.mkdir(pjoin(pathdir, subprocdir)) 6651 except os.error as error: 6652 logger.warning(error.strerror + " " + subprocdir) 6653 6654 6655 logger.info('Creating files in directory %s' % subprocdir) 6656 Ppath = pjoin(pathdir, subprocdir) 6657 6658 # Create the matrix.f files, auto_dsig.f files and all inc files 6659 # for all subprocesses in the group 6660 6661 maxamps = 0 6662 maxflows = 0 6663 tot_calls = 0 6664 6665 matrix_elements = subproc_group.get('matrix_elements') 6666 6667 for ime, matrix_element in \ 6668 enumerate(matrix_elements): 6669 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6670 calls, ncolor = \ 6671 self.write_matrix_element_v4(writers.FortranWriter(filename), 6672 matrix_element, 6673 fortran_model, 6674 str(ime+1), 6675 subproc_group.get('diagram_maps')[\ 6676 ime]) 6677 6678 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6679 self.write_auto_dsig_file(writers.FortranWriter(filename), 6680 matrix_element, 6681 str(ime+1)) 6682 6683 # Keep track of needed quantities 6684 tot_calls += int(calls) 6685 maxflows = max(maxflows, ncolor) 6686 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6687 6688 # Draw diagrams 6689 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6690 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6691 get('diagrams'), 6692 filename, 6693 model = \ 6694 matrix_element.get('processes')[0].\ 6695 get('model'), 6696 amplitude=True) 6697 logger.info("Generating Feynman diagrams for " + \ 6698 matrix_element.get('processes')[0].nice_string()) 6699 plot.draw() 6700 6701 # Extract number of external particles 6702 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6703 6704 # Generate a list of diagrams corresponding to each configuration 6705 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6706 # If a subprocess has no diagrams for this config, the number is 0 6707 6708 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6709 6710 filename = pjoin(Ppath, 'auto_dsig.f') 6711 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6712 subproc_group) 6713 6714 filename = pjoin(Ppath,'configs.inc') 6715 nconfigs, s_and_t_channels = self.write_configs_file(\ 6716 writers.FortranWriter(filename), 6717 subproc_group, 6718 subproc_diagrams_for_config) 6719 6720 filename = pjoin(Ppath, 'leshouche.inc') 6721 self.write_leshouche_file(writers.FortranWriter(filename), 6722 subproc_group) 6723 6724 filename = pjoin(Ppath, 'phasespace.inc') 6725 self.write_phasespace_file(writers.FortranWriter(filename), 6726 nconfigs) 6727 6728 6729 filename = pjoin(Ppath, 'maxamps.inc') 6730 self.write_maxamps_file(writers.FortranWriter(filename), 6731 maxamps, 6732 maxflows, 6733 max([len(me.get('processes')) for me in \ 6734 matrix_elements]), 6735 len(matrix_elements)) 6736 6737 filename = pjoin(Ppath, 'mirrorprocs.inc') 6738 self.write_mirrorprocs(writers.FortranWriter(filename), 6739 subproc_group) 6740 6741 filename = pjoin(Ppath, 'nexternal.inc') 6742 self.write_nexternal_file(writers.FortranWriter(filename), 6743 nexternal, ninitial) 6744 6745 filename = pjoin(Ppath, 'pmass.inc') 6746 self.write_pmass_file(writers.FortranWriter(filename), 6747 matrix_element) 6748 6749 filename = pjoin(Ppath, 'props.inc') 6750 self.write_props_file(writers.FortranWriter(filename), 6751 matrix_element, 6752 s_and_t_channels) 6753 6754 # filename = pjoin(Ppath, 'processes.dat') 6755 # files.write_to_file(filename, 6756 # self.write_processes_file, 6757 # subproc_group) 6758 6759 # Generate jpgs -> pass in make_html 6760 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6761 6762 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6763 6764 for file in linkfiles: 6765 ln('../%s' % file, cwd=Ppath) 6766 6767 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6768 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6769 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6770 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6771 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6772 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6773 if not tot_calls: 6774 tot_calls = 0 6775 return tot_calls
6776 6777 6778 #=========================================================================== 6779 # Helper functions 6780 #===========================================================================
6781 - def modify_grouping(self, matrix_element):
6782 """allow to modify the grouping (if grouping is in place) 6783 return two value: 6784 - True/False if the matrix_element was modified 6785 - the new(or old) matrix element""" 6786 6787 return True, matrix_element.split_lepton_grouping()
6788 6789 #=========================================================================== 6790 # write_super_auto_dsig_file 6791 #===========================================================================
6792 - def write_super_auto_dsig_file(self, writer, subproc_group):
6793 """Write the auto_dsig.f file selecting between the subprocesses 6794 in subprocess group mode""" 6795 6796 replace_dict = {} 6797 6798 # Extract version number and date from VERSION file 6799 info_lines = self.get_mg5_info_lines() 6800 replace_dict['info_lines'] = info_lines 6801 6802 matrix_elements = subproc_group.get('matrix_elements') 6803 6804 # Extract process info lines 6805 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6806 matrix_elements]) 6807 replace_dict['process_lines'] = process_lines 6808 6809 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6810 replace_dict['nexternal'] = nexternal 6811 6812 replace_dict['nsprocs'] = 2*len(matrix_elements) 6813 6814 # Generate dsig definition line 6815 dsig_def_line = "DOUBLE PRECISION " + \ 6816 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6817 range(len(matrix_elements))]) 6818 replace_dict["dsig_def_line"] = dsig_def_line 6819 6820 # Generate dsig process lines 6821 call_dsig_proc_lines = [] 6822 for iproc in range(len(matrix_elements)): 6823 call_dsig_proc_lines.append(\ 6824 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6825 {"num": iproc + 1, 6826 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6827 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6828 6829 if writer: 6830 file = open(os.path.join(_file_path, \ 6831 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6832 file = file % replace_dict 6833 # Write the file 6834 writer.writelines(file) 6835 else: 6836 return replace_dict
6837 6838 #=========================================================================== 6839 # write_mirrorprocs 6840 #===========================================================================
6841 - def write_mirrorprocs(self, writer, subproc_group):
6842 """Write the mirrorprocs.inc file determining which processes have 6843 IS mirror process in subprocess group mode.""" 6844 6845 lines = [] 6846 bool_dict = {True: '.true.', False: '.false.'} 6847 matrix_elements = subproc_group.get('matrix_elements') 6848 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6849 (len(matrix_elements), 6850 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6851 me in matrix_elements]))) 6852 # Write the file 6853 writer.writelines(lines)
6854 6855 #=========================================================================== 6856 # write_configs_file 6857 #===========================================================================
6858 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6859 """Write the configs.inc file with topology information for a 6860 subprocess group. Use the first subprocess with a diagram for each 6861 configuration.""" 6862 6863 matrix_elements = subproc_group.get('matrix_elements') 6864 model = matrix_elements[0].get('processes')[0].get('model') 6865 6866 diagrams = [] 6867 config_numbers = [] 6868 for iconfig, config in enumerate(diagrams_for_config): 6869 # Check if any diagrams correspond to this config 6870 if set(config) == set([0]): 6871 continue 6872 subproc_diags = [] 6873 for s,d in enumerate(config): 6874 if d: 6875 subproc_diags.append(matrix_elements[s].\ 6876 get('diagrams')[d-1]) 6877 else: 6878 subproc_diags.append(None) 6879 diagrams.append(subproc_diags) 6880 config_numbers.append(iconfig + 1) 6881 6882 # Extract number of external particles 6883 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6884 6885 return len(diagrams), \ 6886 self.write_configs_file_from_diagrams(writer, diagrams, 6887 config_numbers, 6888 nexternal, ninitial, 6889 matrix_elements[0],model)
6890 6891 #=========================================================================== 6892 # write_run_configs_file 6893 #===========================================================================
6894 - def write_run_config_file(self, writer):
6895 """Write the run_configs.inc file for MadEvent""" 6896 6897 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6898 text = open(path).read() % {'chanperjob':'2'} 6899 writer.write(text) 6900 return True
6901 6902 6903 #=========================================================================== 6904 # write_leshouche_file 6905 #===========================================================================
6906 - def write_leshouche_file(self, writer, subproc_group):
6907 """Write the leshouche.inc file for MG4""" 6908 6909 all_lines = [] 6910 6911 for iproc, matrix_element in \ 6912 enumerate(subproc_group.get('matrix_elements')): 6913 all_lines.extend(self.get_leshouche_lines(matrix_element, 6914 iproc)) 6915 6916 # Write the file 6917 writer.writelines(all_lines) 6918 6919 return True
6920