Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  16  """Methods and classes to export matrix elements to v4 format.""" 
  17   
  18  import copy 
  19  from cStringIO import StringIO 
  20  from distutils import dir_util 
  21  import itertools 
  22  import fractions 
  23  import glob 
  24  import logging 
  25  import math 
  26  import os 
  27  import re 
  28  import shutil 
  29  import subprocess 
  30  import sys 
  31  import time 
  32  import traceback 
  33   
  34  import aloha 
  35   
  36  import madgraph.core.base_objects as base_objects 
  37  import madgraph.core.color_algebra as color 
  38  import madgraph.core.helas_objects as helas_objects 
  39  import madgraph.iolibs.drawing_eps as draw 
  40  import madgraph.iolibs.files as files 
  41  import madgraph.iolibs.group_subprocs as group_subprocs 
  42  import madgraph.iolibs.file_writers as writers 
  43  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  44  import madgraph.iolibs.template_files as template_files 
  45  import madgraph.iolibs.ufo_expression_parsers as parsers 
  46  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  47  import madgraph.interface.common_run_interface as common_run_interface 
  48  import madgraph.various.diagram_symmetry as diagram_symmetry 
  49  import madgraph.various.misc as misc 
  50  import madgraph.various.banner as banner_mod 
  51  import madgraph.various.process_checks as process_checks 
  52  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  53  import aloha.create_aloha as create_aloha 
  54  import models.import_ufo as import_ufo 
  55  import models.write_param_card as param_writer 
  56  import models.check_param_card as check_param_card 
  57   
  58   
  59  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  60  from madgraph.iolibs.files import cp, ln, mv 
  61   
  62  from madgraph import InvalidCmd 
  63   
  64  pjoin = os.path.join 
  65   
  66  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  67  logger = logging.getLogger('madgraph.export_v4') 
  68   
  69  default_compiler= {'fortran': 'gfortran', 
  70                         'f2py': 'f2py', 
  71                         'cpp':'g++'} 
72 73 74 -class VirtualExporter(object):
75 76 #exporter variable who modified the way madgraph interacts with this class 77 78 grouped_mode = 'madevent' 79 # This variable changes the type of object called within 'generate_subprocess_directory' 80 #functions. 81 # False to avoid grouping (only identical matrix element are merged) 82 # 'madevent' group the massless quark and massless lepton 83 # 'madweight' group the gluon with the massless quark 84 sa_symmetry = False 85 # If no grouped_mode=False, uu~ and u~u will be called independently. 86 #Putting sa_symmetry generates only one of the two matrix-element. 87 check = True 88 # Ask madgraph to check if the directory already exists and propose to the user to 89 #remove it first if this is the case 90 output = 'Template' 91 # [Template, None, dir] 92 # - Template, madgraph will call copy_template 93 # - dir, madgraph will just create an empty directory for initialisation 94 # - None, madgraph do nothing for initialisation 95 exporter = 'v4' 96 # language of the output 'v4' for Fortran output 97 # 'cpp' for C++ output 98 99
100 - def __init__(self, dir_path = "", opt=None):
101 # cmd_options is a dictionary with all the optional argurment passed at output time 102 103 # Activate some monkey patching for the helas call writer. 104 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 105 self.helas_call_writer_custom
106 107 108 # helper function for customise helas writter 109 @staticmethod
110 - def custom_helas_call(call, arg):
111 """static method to customise the way aloha function call are written 112 call is the default template for the call 113 arg are the dictionary used for the call 114 """ 115 return call, arg
116 117 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 118 119
120 - def copy_template(self, model):
121 return
122
123 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
124 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 125 return 0 # return an integer stating the number of call to helicity routine
126
127 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
128 return
129
130 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
131 return
132 133
134 - def pass_information_from_cmd(self, cmd):
135 """pass information from the command interface to the exporter. 136 Please do not modify any object of the interface from the exporter. 137 """ 138 return
139
140 - def modify_grouping(self, matrix_element):
141 return False, matrix_element
142
143 - def export_model_files(self, model_v4_path):
144 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 145 return
146
147 - def export_helas(self, HELAS_PATH):
148 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 149 return
150
151 #=============================================================================== 152 # ProcessExporterFortran 153 #=============================================================================== 154 -class ProcessExporterFortran(VirtualExporter):
155 """Class to take care of exporting a set of matrix elements to 156 Fortran (v4) format.""" 157 158 default_opt = {'clean': False, 'complex_mass':False, 159 'export_format':'madevent', 'mp': False, 160 'v5_model': True, 161 'output_options':{} 162 } 163 grouped_mode = False 164
165 - def __init__(self, dir_path = "", opt=None):
166 """Initiate the ProcessExporterFortran with directory information""" 167 self.mgme_dir = MG5DIR 168 self.dir_path = dir_path 169 self.model = None 170 171 self.opt = dict(self.default_opt) 172 if opt: 173 self.opt.update(opt) 174 175 self.cmd_options = self.opt['output_options'] 176 177 #place holder to pass information to the run_interface 178 self.proc_characteristic = banner_mod.ProcCharacteristic() 179 # call mother class 180 super(ProcessExporterFortran,self).__init__(dir_path, opt)
181 182 183 #=========================================================================== 184 # process exporter fortran switch between group and not grouped 185 #===========================================================================
186 - def export_processes(self, matrix_elements, fortran_model):
187 """Make the switch between grouped and not grouped output""" 188 189 calls = 0 190 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 191 for (group_number, me_group) in enumerate(matrix_elements): 192 calls = calls + self.generate_subprocess_directory(\ 193 me_group, fortran_model, group_number) 194 else: 195 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 196 calls = calls + self.generate_subprocess_directory(\ 197 me, fortran_model, me_number) 198 199 return calls
200 201 202 #=========================================================================== 203 # create the run_card 204 #===========================================================================
205 - def create_run_card(self, matrix_elements, history):
206 """ """ 207 208 209 # bypass this for the loop-check 210 import madgraph.loop.loop_helas_objects as loop_helas_objects 211 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 212 matrix_elements = None 213 214 run_card = banner_mod.RunCard() 215 216 217 default=True 218 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 219 processes = [me.get('processes') for megroup in matrix_elements 220 for me in megroup['matrix_elements']] 221 elif matrix_elements: 222 processes = [me.get('processes') 223 for me in matrix_elements['matrix_elements']] 224 else: 225 default =False 226 227 if default: 228 run_card.create_default_for_process(self.proc_characteristic, 229 history, 230 processes) 231 232 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 233 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 234 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
235 236 237 238 #=========================================================================== 239 # copy the Template in a new directory. 240 #===========================================================================
241 - def copy_template(self, model):
242 """create the directory run_name as a copy of the MadEvent 243 Template, and clean the directory 244 """ 245 246 #First copy the full template tree if dir_path doesn't exit 247 if not os.path.isdir(self.dir_path): 248 assert self.mgme_dir, \ 249 "No valid MG_ME path given for MG4 run directory creation." 250 logger.info('initialize a new directory: %s' % \ 251 os.path.basename(self.dir_path)) 252 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 253 self.dir_path, True) 254 # distutils.dir_util.copy_tree since dir_path already exists 255 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 256 self.dir_path) 257 # copy plot_card 258 for card in ['plot_card']: 259 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 260 try: 261 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 262 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 263 except IOError: 264 logger.warning("Failed to copy " + card + ".dat to default") 265 elif os.getcwd() == os.path.realpath(self.dir_path): 266 logger.info('working in local directory: %s' % \ 267 os.path.realpath(self.dir_path)) 268 # distutils.dir_util.copy_tree since dir_path already exists 269 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 270 self.dir_path) 271 # for name in misc.glob('Template/LO/*', self.mgme_dir): 272 # name = os.path.basename(name) 273 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 274 # if os.path.isfile(filename): 275 # files.cp(filename, pjoin(self.dir_path,name)) 276 # elif os.path.isdir(filename): 277 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 278 # distutils.dir_util.copy_tree since dir_path already exists 279 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 280 self.dir_path) 281 # Copy plot_card 282 for card in ['plot_card']: 283 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 284 try: 285 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 286 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 287 except IOError: 288 logger.warning("Failed to copy " + card + ".dat to default") 289 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 290 assert self.mgme_dir, \ 291 "No valid MG_ME path given for MG4 run directory creation." 292 try: 293 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 294 except IOError: 295 MG5_version = misc.get_pkg_info() 296 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 297 298 #Ensure that the Template is clean 299 if self.opt['clean']: 300 logger.info('remove old information in %s' % \ 301 os.path.basename(self.dir_path)) 302 if os.environ.has_key('MADGRAPH_BASE'): 303 misc.call([pjoin('bin', 'internal', 'clean_template'), 304 '--web'], cwd=self.dir_path) 305 else: 306 try: 307 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 308 cwd=self.dir_path) 309 except Exception, why: 310 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 311 % (os.path.basename(self.dir_path),why)) 312 313 #Write version info 314 MG_version = misc.get_pkg_info() 315 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 316 MG_version['version']) 317 318 # add the makefile in Source directory 319 filename = pjoin(self.dir_path,'Source','makefile') 320 self.write_source_makefile(writers.FileWriter(filename)) 321 322 # add the DiscreteSampler information 323 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 324 pjoin(self.dir_path, 'Source')) 325 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 326 pjoin(self.dir_path, 'Source')) 327 328 # We need to create the correct open_data for the pdf 329 self.write_pdf_opendata()
330 331 332 #=========================================================================== 333 # Call MadAnalysis5 to generate the default cards for this process 334 #===========================================================================
335 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 336 ma5_path, output_dir, levels = ['parton','hadron']):
337 """ Call MA5 so that it writes default cards for both parton and 338 post-shower levels, tailored for this particular process.""" 339 340 if len(levels)==0: 341 return 342 start = time.time() 343 logger.info('Generating MadAnalysis5 default cards tailored to this process') 344 try: 345 MA5_interpreter = common_run_interface.CommonRunCmd.\ 346 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 347 except (Exception, SystemExit) as e: 348 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 349 return 350 if MA5_interpreter is None: 351 return 352 353 MA5_main = MA5_interpreter.main 354 for lvl in ['parton','hadron']: 355 if lvl in levels: 356 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 357 try: 358 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 359 except (Exception, SystemExit) as e: 360 # keep the default card (skip only) 361 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 362 ' default analysis card for this process.') 363 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 364 error=StringIO() 365 traceback.print_exc(file=error) 366 logger.debug('MadAnalysis5 error was:') 367 logger.debug('-'*60) 368 logger.debug(error.getvalue()[:-1]) 369 logger.debug('-'*60) 370 else: 371 open(card_to_generate,'w').write(text) 372 stop = time.time() 373 if stop-start >1: 374 logger.info('Cards created in %.2fs' % (stop-start))
375 376 #=========================================================================== 377 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 378 #===========================================================================
379 - def write_procdef_mg5(self, file_pos, modelname, process_str):
380 """ write an equivalent of the MG4 proc_card in order that all the Madevent 381 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 382 383 proc_card_template = template_files.mg4_proc_card.mg4_template 384 process_template = template_files.mg4_proc_card.process_template 385 process_text = '' 386 coupling = '' 387 new_process_content = [] 388 389 390 # First find the coupling and suppress the coupling from process_str 391 #But first ensure that coupling are define whithout spaces: 392 process_str = process_str.replace(' =', '=') 393 process_str = process_str.replace('= ', '=') 394 process_str = process_str.replace(',',' , ') 395 #now loop on the element and treat all the coupling 396 for info in process_str.split(): 397 if '=' in info: 398 coupling += info + '\n' 399 else: 400 new_process_content.append(info) 401 # Recombine the process_str (which is the input process_str without coupling 402 #info) 403 process_str = ' '.join(new_process_content) 404 405 #format the SubProcess 406 replace_dict = {'process': process_str, 407 'coupling': coupling} 408 process_text += process_template.substitute(replace_dict) 409 410 replace_dict = {'process': process_text, 411 'model': modelname, 412 'multiparticle':''} 413 text = proc_card_template.substitute(replace_dict) 414 415 if file_pos: 416 ff = open(file_pos, 'w') 417 ff.write(text) 418 ff.close() 419 else: 420 return replace_dict
421 422
423 - def pass_information_from_cmd(self, cmd):
424 """Pass information for MA5""" 425 426 self.proc_defs = cmd._curr_proc_defs
427 428 #=========================================================================== 429 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 430 #===========================================================================
431 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
432 """Function to finalize v4 directory, for inheritance.""" 433 434 self.create_run_card(matrix_elements, history) 435 self.create_MA5_cards(matrix_elements, history)
436
437 - def create_MA5_cards(self,matrix_elements,history):
438 """ A wrapper around the creation of the MA5 cards so that it can be 439 bypassed by daughter classes (i.e. in standalone).""" 440 if 'madanalysis5_path' in self.opt and not \ 441 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 442 processes = None 443 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 444 processes = [me.get('processes') for megroup in matrix_elements 445 for me in megroup['matrix_elements']] 446 elif matrix_elements: 447 processes = [me.get('processes') 448 for me in matrix_elements['matrix_elements']] 449 450 self.create_default_madanalysis5_cards( 451 history, self.proc_defs, processes, 452 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 453 levels = ['hadron','parton']) 454 455 for level in ['hadron','parton']: 456 # Copying these cards turn on the use of MadAnalysis5 by default. 457 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 458 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 459 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
460 461 #=========================================================================== 462 # Create the proc_characteristic file passing information to the run_interface 463 #===========================================================================
464 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
465 466 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
467 468 #=========================================================================== 469 # write_matrix_element_v4 470 #===========================================================================
471 - def write_matrix_element_v4(self):
472 """Function to write a matrix.f file, for inheritance. 473 """ 474 pass
475 476 #=========================================================================== 477 # write_pdf_opendata 478 #===========================================================================
479 - def write_pdf_opendata(self):
480 """ modify the pdf opendata file, to allow direct access to cluster node 481 repository if configure""" 482 483 if not self.opt["cluster_local_path"]: 484 changer = {"pdf_systemwide": ""} 485 else: 486 to_add = """ 487 tempname='%(path)s'//Tablefile 488 open(IU,file=tempname,status='old',ERR=1) 489 return 490 1 tempname='%(path)s/Pdfdata/'//Tablefile 491 open(IU,file=tempname,status='old',ERR=2) 492 return 493 2 tempname='%(path)s/lhapdf'//Tablefile 494 open(IU,file=tempname,status='old',ERR=3) 495 return 496 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 497 open(IU,file=tempname,status='old',ERR=4) 498 return 499 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 500 open(IU,file=tempname,status='old',ERR=5) 501 return 502 """ % {"path" : self.opt["cluster_local_path"]} 503 504 changer = {"pdf_systemwide": to_add} 505 506 507 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 508 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 509 ff.writelines(template % changer) 510 511 # Do the same for lhapdf set 512 if not self.opt["cluster_local_path"]: 513 changer = {"cluster_specific_path": ""} 514 else: 515 to_add=""" 516 LHAPath='%(path)s/PDFsets' 517 Inquire(File=LHAPath, exist=exists) 518 if(exists)return 519 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 520 Inquire(File=LHAPath, exist=exists) 521 if(exists)return 522 LHAPath='%(path)s/../lhapdf/pdfsets/' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='./PDFsets' 526 """ % {"path" : self.opt["cluster_local_path"]} 527 changer = {"cluster_specific_path": to_add} 528 529 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 530 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 531 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 532 ff.writelines(template % changer) 533 534 535 return
536 537 538 539 #=========================================================================== 540 # write_maxparticles_file 541 #===========================================================================
542 - def write_maxparticles_file(self, writer, matrix_elements):
543 """Write the maxparticles.inc file for MadEvent""" 544 545 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 546 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 547 matrix_elements.get('matrix_elements')]) 548 else: 549 maxparticles = max([me.get_nexternal_ninitial()[0] \ 550 for me in matrix_elements]) 551 552 lines = "integer max_particles\n" 553 lines += "parameter(max_particles=%d)" % maxparticles 554 555 # Write the file 556 writer.writelines(lines) 557 558 return True
559 560 561 #=========================================================================== 562 # export the model 563 #===========================================================================
564 - def export_model_files(self, model_path):
565 """Configure the files/link of the process according to the model""" 566 567 # Import the model 568 for file in os.listdir(model_path): 569 if os.path.isfile(pjoin(model_path, file)): 570 shutil.copy2(pjoin(model_path, file), \ 571 pjoin(self.dir_path, 'Source', 'MODEL'))
572 573 587 595 596 597 #=========================================================================== 598 # export the helas routine 599 #===========================================================================
600 - def export_helas(self, helas_path):
601 """Configure the files/link of the process according to the model""" 602 603 # Import helas routine 604 for filename in os.listdir(helas_path): 605 filepos = pjoin(helas_path, filename) 606 if os.path.isfile(filepos): 607 if filepos.endswith('Makefile.template'): 608 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 609 elif filepos.endswith('Makefile'): 610 pass 611 else: 612 cp(filepos, self.dir_path + '/Source/DHELAS')
613 # following lines do the same but whithout symbolic link 614 # 615 #def export_helas(mgme_dir, dir_path): 616 # 617 # # Copy the HELAS directory 618 # helas_dir = pjoin(mgme_dir, 'HELAS') 619 # for filename in os.listdir(helas_dir): 620 # if os.path.isfile(pjoin(helas_dir, filename)): 621 # shutil.copy2(pjoin(helas_dir, filename), 622 # pjoin(dir_path, 'Source', 'DHELAS')) 623 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 624 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 625 # 626 627 #=========================================================================== 628 # generate_subprocess_directory 629 #===========================================================================
630 - def generate_subprocess_directory(self, matrix_element, 631 fortran_model, 632 me_number):
633 """Routine to generate a subprocess directory (for inheritance)""" 634 635 pass
636 637 #=========================================================================== 638 # get_source_libraries_list 639 #===========================================================================
640 - def get_source_libraries_list(self):
641 """ Returns the list of libraries to be compiling when compiling the 642 SOURCE directory. It is different for loop_induced processes and 643 also depends on the value of the 'output_dependencies' option""" 644 645 return ['$(LIBDIR)libdhelas.$(libext)', 646 '$(LIBDIR)libpdf.$(libext)', 647 '$(LIBDIR)libmodel.$(libext)', 648 '$(LIBDIR)libcernlib.$(libext)', 649 '$(LIBDIR)libbias.$(libext)']
650 651 #=========================================================================== 652 # write_source_makefile 653 #===========================================================================
654 - def write_source_makefile(self, writer):
655 """Write the nexternal.inc file for MG4""" 656 657 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 658 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 659 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 660 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 661 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 662 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 663 else: 664 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 665 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 666 667 replace_dict= {'libraries': set_of_lib, 668 'model':model_line, 669 'additional_dsample': '', 670 'additional_dependencies':''} 671 672 if writer: 673 text = open(path).read() % replace_dict 674 writer.write(text) 675 676 return replace_dict
677 678 #=========================================================================== 679 # write_nexternal_madspin 680 #===========================================================================
681 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
682 """Write the nexternal_prod.inc file for madspin""" 683 684 replace_dict = {} 685 686 replace_dict['nexternal'] = nexternal 687 replace_dict['ninitial'] = ninitial 688 689 file = """ \ 690 integer nexternal_prod 691 parameter (nexternal_prod=%(nexternal)d) 692 integer nincoming_prod 693 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 694 695 # Write the file 696 if writer: 697 writer.writelines(file) 698 return True 699 else: 700 return replace_dict
701 702 #=========================================================================== 703 # write_helamp_madspin 704 #===========================================================================
705 - def write_helamp_madspin(self, writer, ncomb):
706 """Write the helamp.inc file for madspin""" 707 708 replace_dict = {} 709 710 replace_dict['ncomb'] = ncomb 711 712 file = """ \ 713 integer ncomb1 714 parameter (ncomb1=%(ncomb)d) 715 double precision helamp(ncomb1) 716 common /to_helamp/helamp """ % replace_dict 717 718 # Write the file 719 if writer: 720 writer.writelines(file) 721 return True 722 else: 723 return replace_dict
724 725 726 727 #=========================================================================== 728 # write_nexternal_file 729 #===========================================================================
730 - def write_nexternal_file(self, writer, nexternal, ninitial):
731 """Write the nexternal.inc file for MG4""" 732 733 replace_dict = {} 734 735 replace_dict['nexternal'] = nexternal 736 replace_dict['ninitial'] = ninitial 737 738 file = """ \ 739 integer nexternal 740 parameter (nexternal=%(nexternal)d) 741 integer nincoming 742 parameter (nincoming=%(ninitial)d)""" % replace_dict 743 744 # Write the file 745 if writer: 746 writer.writelines(file) 747 return True 748 else: 749 return replace_dict
750 #=========================================================================== 751 # write_pmass_file 752 #===========================================================================
753 - def write_pmass_file(self, writer, matrix_element):
754 """Write the pmass.inc file for MG4""" 755 756 model = matrix_element.get('processes')[0].get('model') 757 758 lines = [] 759 for wf in matrix_element.get_external_wavefunctions(): 760 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 761 if mass.lower() != "zero": 762 mass = "abs(%s)" % mass 763 764 lines.append("pmass(%d)=%s" % \ 765 (wf.get('number_external'), mass)) 766 767 # Write the file 768 writer.writelines(lines) 769 770 return True
771 772 #=========================================================================== 773 # write_ngraphs_file 774 #===========================================================================
775 - def write_ngraphs_file(self, writer, nconfigs):
776 """Write the ngraphs.inc file for MG4. Needs input from 777 write_configs_file.""" 778 779 file = " integer n_max_cg\n" 780 file = file + "parameter (n_max_cg=%d)" % nconfigs 781 782 # Write the file 783 writer.writelines(file) 784 785 return True
786 787 #=========================================================================== 788 # write_leshouche_file 789 #===========================================================================
790 - def write_leshouche_file(self, writer, matrix_element):
791 """Write the leshouche.inc file for MG4""" 792 793 # Write the file 794 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 795 796 return True
797 798 #=========================================================================== 799 # get_leshouche_lines 800 #===========================================================================
801 - def get_leshouche_lines(self, matrix_element, numproc):
802 """Write the leshouche.inc file for MG4""" 803 804 # Extract number of external particles 805 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 806 807 lines = [] 808 for iproc, proc in enumerate(matrix_element.get('processes')): 809 legs = proc.get_legs_with_decays() 810 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 811 (iproc + 1, numproc+1, nexternal, 812 ",".join([str(l.get('id')) for l in legs]))) 813 if iproc == 0 and numproc == 0: 814 for i in [1, 2]: 815 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 816 (i, nexternal, 817 ",".join([ "%3r" % 0 ] * ninitial + \ 818 [ "%3r" % i ] * (nexternal - ninitial)))) 819 820 # Here goes the color connections corresponding to the JAMPs 821 # Only one output, for the first subproc! 822 if iproc == 0: 823 # If no color basis, just output trivial color flow 824 if not matrix_element.get('color_basis'): 825 for i in [1, 2]: 826 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 827 (i, numproc+1,nexternal, 828 ",".join([ "%3r" % 0 ] * nexternal))) 829 830 else: 831 # First build a color representation dictionnary 832 repr_dict = {} 833 for l in legs: 834 repr_dict[l.get('number')] = \ 835 proc.get('model').get_particle(l.get('id')).get_color()\ 836 * (-1)**(1+l.get('state')) 837 # Get the list of color flows 838 color_flow_list = \ 839 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 840 ninitial) 841 # And output them properly 842 for cf_i, color_flow_dict in enumerate(color_flow_list): 843 for i in [0, 1]: 844 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 845 (i + 1, cf_i + 1, numproc+1, nexternal, 846 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 847 for l in legs]))) 848 849 return lines
850 851 852 853 854 #=========================================================================== 855 # write_maxamps_file 856 #===========================================================================
857 - def write_maxamps_file(self, writer, maxamps, maxflows, 858 maxproc,maxsproc):
859 """Write the maxamps.inc file for MG4.""" 860 861 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 862 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 863 (maxamps, maxflows) 864 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 865 (maxproc, maxsproc) 866 867 # Write the file 868 writer.writelines(file) 869 870 return True
871 872 873 #=========================================================================== 874 # Routines to output UFO models in MG4 format 875 #=========================================================================== 876
877 - def convert_model(self, model, wanted_lorentz = [], 878 wanted_couplings = []):
879 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 880 881 # Make sure aloha is in quadruple precision if needed 882 old_aloha_mp=aloha.mp_precision 883 aloha.mp_precision=self.opt['mp'] 884 885 # create the MODEL 886 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 887 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 888 model_builder.build(wanted_couplings) 889 890 # Backup the loop mode, because it can be changed in what follows. 891 old_loop_mode = aloha.loop_mode 892 893 # Create the aloha model or use the existing one (for loop exporters 894 # this is useful as the aloha model will be used again in the 895 # LoopHelasMatrixElements generated). We do not save the model generated 896 # here if it didn't exist already because it would be a waste of 897 # memory for tree level applications since aloha is only needed at the 898 # time of creating the aloha fortran subroutines. 899 if hasattr(self, 'aloha_model'): 900 aloha_model = self.aloha_model 901 else: 902 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 903 aloha_model.add_Lorentz_object(model.get('lorentz')) 904 905 # Compute the subroutines 906 if wanted_lorentz: 907 aloha_model.compute_subset(wanted_lorentz) 908 else: 909 aloha_model.compute_all(save=False) 910 911 # Write them out 912 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 913 aloha_model.write(write_dir, 'Fortran') 914 915 # Revert the original aloha loop mode 916 aloha.loop_mode = old_loop_mode 917 918 #copy Helas Template 919 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 920 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 921 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 922 write_dir+'/aloha_functions.f') 923 aloha_model.loop_mode = False 924 else: 925 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 926 write_dir+'/aloha_functions.f') 927 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 928 929 # Make final link in the Process 930 self.make_model_symbolic_link() 931 932 # Re-establish original aloha mode 933 aloha.mp_precision=old_aloha_mp
934 935 936 #=========================================================================== 937 # Helper functions 938 #===========================================================================
939 - def modify_grouping(self, matrix_element):
940 """allow to modify the grouping (if grouping is in place) 941 return two value: 942 - True/False if the matrix_element was modified 943 - the new(or old) matrix element""" 944 945 return False, matrix_element
946 947 #=========================================================================== 948 # Helper functions 949 #===========================================================================
950 - def get_mg5_info_lines(self):
951 """Return info lines for MG5, suitable to place at beginning of 952 Fortran files""" 953 954 info = misc.get_pkg_info() 955 info_lines = "" 956 if info and info.has_key('version') and info.has_key('date'): 957 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 958 (info['version'], info['date']) 959 info_lines = info_lines + \ 960 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 961 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 962 else: 963 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 964 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 965 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 966 967 return info_lines
968
969 - def get_process_info_lines(self, matrix_element):
970 """Return info lines describing the processes for this matrix element""" 971 972 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 973 for process in matrix_element.get('processes')])
974 975
976 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
977 """Return the Helicity matrix definition lines for this matrix element""" 978 979 helicity_line_list = [] 980 i = 0 981 for helicities in matrix_element.get_helicity_matrix(): 982 i = i + 1 983 int_list = [i, len(helicities)] 984 int_list.extend(helicities) 985 helicity_line_list.append(\ 986 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 987 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 988 989 return "\n".join(helicity_line_list)
990
991 - def get_ic_line(self, matrix_element):
992 """Return the IC definition line coming after helicities, required by 993 switchmom in madevent""" 994 995 nexternal = matrix_element.get_nexternal_ninitial()[0] 996 int_list = range(1, nexternal + 1) 997 998 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 999 ",".join([str(i) for \ 1000 i in int_list]))
1001
1002 - def set_chosen_SO_index(self, process, squared_orders):
1003 """ From the squared order constraints set by the user, this function 1004 finds what indices of the squared_orders list the user intends to pick. 1005 It returns this as a string of comma-separated successive '.true.' or 1006 '.false.' for each index.""" 1007 1008 user_squared_orders = process.get('squared_orders') 1009 split_orders = process.get('split_orders') 1010 1011 if len(user_squared_orders)==0: 1012 return ','.join(['.true.']*len(squared_orders)) 1013 1014 res = [] 1015 for sqsos in squared_orders: 1016 is_a_match = True 1017 for user_sqso, value in user_squared_orders.items(): 1018 if (process.get_squared_order_type(user_sqso) =='==' and \ 1019 value!=sqsos[split_orders.index(user_sqso)]) or \ 1020 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1021 value<sqsos[split_orders.index(user_sqso)]) or \ 1022 (process.get_squared_order_type(user_sqso) == '>' and \ 1023 value>=sqsos[split_orders.index(user_sqso)]): 1024 is_a_match = False 1025 break 1026 res.append('.true.' if is_a_match else '.false.') 1027 1028 return ','.join(res)
1029
1030 - def get_split_orders_lines(self, orders, array_name, n=5):
1031 """ Return the split orders definition as defined in the list orders and 1032 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1033 1034 ret_list = [] 1035 for index, order in enumerate(orders): 1036 for k in xrange(0, len(order), n): 1037 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1038 (array_name,index + 1, k + 1, min(k + n, len(order)), 1039 ','.join(["%5r" % i for i in order[k:k + n]]))) 1040 return ret_list
1041
1042 - def format_integer_list(self, list, name, n=5):
1043 """ Return an initialization of the python list in argument following 1044 the fortran syntax using the data keyword assignment, filling an array 1045 of name 'name'. It splits rows in chunks of size n.""" 1046 1047 ret_list = [] 1048 for k in xrange(0, len(list), n): 1049 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1050 (name, k + 1, min(k + n, len(list)), 1051 ','.join(["%5r" % i for i in list[k:k + n]]))) 1052 return ret_list
1053
1054 - def get_color_data_lines(self, matrix_element, n=6):
1055 """Return the color matrix definition lines for this matrix element. Split 1056 rows in chunks of size n.""" 1057 1058 if not matrix_element.get('color_matrix'): 1059 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1060 else: 1061 ret_list = [] 1062 my_cs = color.ColorString() 1063 for index, denominator in \ 1064 enumerate(matrix_element.get('color_matrix').\ 1065 get_line_denominators()): 1066 # First write the common denominator for this color matrix line 1067 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1068 # Then write the numerators for the matrix elements 1069 num_list = matrix_element.get('color_matrix').\ 1070 get_line_numerators(index, denominator) 1071 1072 for k in xrange(0, len(num_list), n): 1073 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1074 (index + 1, k + 1, min(k + n, len(num_list)), 1075 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1076 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1077 ret_list.append("C %s" % repr(my_cs)) 1078 return ret_list
1079 1080
1081 - def get_den_factor_line(self, matrix_element):
1082 """Return the denominator factor line for this matrix element""" 1083 1084 return "DATA IDEN/%2r/" % \ 1085 matrix_element.get_denominator_factor()
1086
1087 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1088 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1089 which configs (diagrams).""" 1090 1091 ret_list = [] 1092 1093 booldict = {False: ".false.", True: ".true."} 1094 1095 if not matrix_element.get('color_basis'): 1096 # No color, so only one color factor. Simply write a ".true." 1097 # for each config (i.e., each diagram with only 3 particle 1098 # vertices 1099 configs = len(mapconfigs) 1100 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1101 (num_matrix_element, configs, 1102 ','.join([".true." for i in range(configs)]))) 1103 return ret_list 1104 1105 # There is a color basis - create a list showing which JAMPs have 1106 # contributions to which configs 1107 1108 # Only want to include leading color flows, so find max_Nc 1109 color_basis = matrix_element.get('color_basis') 1110 1111 # We don't want to include the power of Nc's which come from the potential 1112 # loop color trace (i.e. in the case of a closed fermion loop for example) 1113 # so we subtract it here when computing max_Nc 1114 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1115 color_basis.values()],[])) 1116 1117 # Crate dictionary between diagram number and JAMP number 1118 diag_jamp = {} 1119 for ijamp, col_basis_elem in \ 1120 enumerate(sorted(matrix_element.get('color_basis').keys())): 1121 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1122 # Only use color flows with Nc == max_Nc. However, notice that 1123 # we don't want to include the Nc power coming from the loop 1124 # in this counting. 1125 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1126 diag_num = diag_tuple[0] + 1 1127 # Add this JAMP number to this diag_num 1128 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1129 [ijamp+1] 1130 1131 colamps = ijamp + 1 1132 for iconfig, num_diag in enumerate(mapconfigs): 1133 if num_diag == 0: 1134 continue 1135 1136 # List of True or False 1137 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1138 # Add line 1139 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1140 (iconfig+1, num_matrix_element, colamps, 1141 ','.join(["%s" % booldict[b] for b in \ 1142 bool_list]))) 1143 1144 return ret_list
1145
1146 - def get_amp2_lines(self, matrix_element, config_map = []):
1147 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1148 1149 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1150 # Get minimum legs in a vertex 1151 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1152 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1153 minvert = min(vert_list) if vert_list!=[] else 0 1154 1155 ret_lines = [] 1156 if config_map: 1157 # In this case, we need to sum up all amplitudes that have 1158 # identical topologies, as given by the config_map (which 1159 # gives the topology/config for each of the diagrams 1160 diagrams = matrix_element.get('diagrams') 1161 # Combine the diagrams with identical topologies 1162 config_to_diag_dict = {} 1163 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1164 if config_map[idiag] == 0: 1165 continue 1166 try: 1167 config_to_diag_dict[config_map[idiag]].append(idiag) 1168 except KeyError: 1169 config_to_diag_dict[config_map[idiag]] = [idiag] 1170 # Write out the AMP2s summing squares of amplitudes belonging 1171 # to eiher the same diagram or different diagrams with 1172 # identical propagator properties. Note that we need to use 1173 # AMP2 number corresponding to the first diagram number used 1174 # for that AMP2. 1175 for config in sorted(config_to_diag_dict.keys()): 1176 1177 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1178 {"num": (config_to_diag_dict[config][0] + 1)} 1179 1180 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1181 sum([diagrams[idiag].get('amplitudes') for \ 1182 idiag in config_to_diag_dict[config]], [])]) 1183 1184 # Not using \sum |M|^2 anymore since this creates troubles 1185 # when ckm is not diagonal due to the JIM mechanism. 1186 if '+' in amp: 1187 line += "(%s)*dconjg(%s)" % (amp, amp) 1188 else: 1189 line += "%s*dconjg(%s)" % (amp, amp) 1190 ret_lines.append(line) 1191 else: 1192 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1193 # Ignore any diagrams with 4-particle vertices. 1194 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1195 continue 1196 # Now write out the expression for AMP2, meaning the sum of 1197 # squared amplitudes belonging to the same diagram 1198 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1199 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1200 {"num": a.get('number')} for a in \ 1201 diag.get('amplitudes')]) 1202 ret_lines.append(line) 1203 1204 return ret_lines
1205 1206 #=========================================================================== 1207 # Returns the data statements initializing the coeffictients for the JAMP 1208 # decomposition. It is used when the JAMP initialization is decided to be 1209 # done through big arrays containing the projection coefficients. 1210 #===========================================================================
1211 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1212 n=50, Nc_value=3):
1213 """This functions return the lines defining the DATA statement setting 1214 the coefficients building the JAMPS out of the AMPS. Split rows in 1215 bunches of size n. 1216 One can specify the color_basis from which the color amplitudes originates 1217 so that there are commentaries telling what color structure each JAMP 1218 corresponds to.""" 1219 1220 if(not isinstance(color_amplitudes,list) or 1221 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1222 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1223 1224 res_list = [] 1225 my_cs = color.ColorString() 1226 for index, coeff_list in enumerate(color_amplitudes): 1227 # Create the list of the complete numerical coefficient. 1228 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1229 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1230 coefficient in coeff_list] 1231 # Create the list of the numbers of the contributing amplitudes. 1232 # Mutliply by -1 for those which have an imaginary coefficient. 1233 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1234 for coefficient in coeff_list] 1235 # Find the common denominator. 1236 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1237 num_list=[(coefficient*commondenom).numerator \ 1238 for coefficient in coefs_list] 1239 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1240 index+1,len(num_list))) 1241 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1242 index+1,commondenom)) 1243 if color_basis: 1244 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1245 res_list.append("C %s" % repr(my_cs)) 1246 for k in xrange(0, len(num_list), n): 1247 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1248 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1249 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1250 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1251 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1252 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1253 pass 1254 return res_list
1255 1256
1257 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1258 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1259 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1260 defined as a matrix element or directly as a color_amplitudes dictionary. 1261 The split_order_amps specifies the group of amplitudes sharing the same 1262 amplitude orders which should be put in together in a given set of JAMPS. 1263 The split_order_amps is supposed to have the format of the second output 1264 of the function get_split_orders_mapping function in helas_objects.py. 1265 The split_order_names is optional (it should correspond to the process 1266 'split_orders' attribute) and only present to provide comments in the 1267 JAMP definitions in the code.""" 1268 1269 # Let the user call get_JAMP_lines_split_order directly from a 1270 error_msg="Malformed '%s' argument passed to the "+\ 1271 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1272 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1273 color_amplitudes=col_amps.get_color_amplitudes() 1274 elif(isinstance(col_amps,list)): 1275 if(col_amps and isinstance(col_amps[0],list)): 1276 color_amplitudes=col_amps 1277 else: 1278 raise MadGraph5Error, error_msg%'col_amps' 1279 else: 1280 raise MadGraph5Error, error_msg%'col_amps' 1281 1282 # Verify the sanity of the split_order_amps and split_order_names args 1283 if isinstance(split_order_amps,list): 1284 for elem in split_order_amps: 1285 if len(elem)!=2: 1286 raise MadGraph5Error, error_msg%'split_order_amps' 1287 # Check the first element of the two lists to make sure they are 1288 # integers, although in principle they should all be integers. 1289 if not isinstance(elem[0],tuple) or \ 1290 not isinstance(elem[1],tuple) or \ 1291 not isinstance(elem[0][0],int) or \ 1292 not isinstance(elem[1][0],int): 1293 raise MadGraph5Error, error_msg%'split_order_amps' 1294 else: 1295 raise MadGraph5Error, error_msg%'split_order_amps' 1296 1297 if not split_order_names is None: 1298 if isinstance(split_order_names,list): 1299 # Should specify the same number of names as there are elements 1300 # in the key of the split_order_amps. 1301 if len(split_order_names)!=len(split_order_amps[0][0]): 1302 raise MadGraph5Error, error_msg%'split_order_names' 1303 # Check the first element of the list to be a string 1304 if not isinstance(split_order_names[0],str): 1305 raise MadGraph5Error, error_msg%'split_order_names' 1306 else: 1307 raise MadGraph5Error, error_msg%'split_order_names' 1308 1309 # Now scan all contributing orders to be individually computed and 1310 # construct the list of color_amplitudes for JAMP to be constructed 1311 # accordingly. 1312 res_list=[] 1313 for i, amp_order in enumerate(split_order_amps): 1314 col_amps_order = [] 1315 for jamp in color_amplitudes: 1316 col_amps_order.append(filter(lambda col_amp: 1317 col_amp[1] in amp_order[1],jamp)) 1318 if split_order_names: 1319 res_list.append('C JAMPs contributing to orders '+' '.join( 1320 ['%s=%i'%order for order in zip(split_order_names, 1321 amp_order[0])])) 1322 if self.opt['export_format'] in ['madloop_matchbox']: 1323 res_list.extend(self.get_JAMP_lines(col_amps_order, 1324 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1325 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1326 else: 1327 res_list.extend(self.get_JAMP_lines(col_amps_order, 1328 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1329 1330 return res_list
1331 1332
1333 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1334 split=-1):
1335 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1336 defined as a matrix element or directly as a color_amplitudes dictionary, 1337 Jamp_formatLC should be define to allow to add LeadingColor computation 1338 (usefull for MatchBox) 1339 The split argument defines how the JAMP lines should be split in order 1340 not to be too long.""" 1341 1342 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1343 # the color amplitudes lists. 1344 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1345 color_amplitudes=col_amps.get_color_amplitudes() 1346 elif(isinstance(col_amps,list)): 1347 if(col_amps and isinstance(col_amps[0],list)): 1348 color_amplitudes=col_amps 1349 else: 1350 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1351 else: 1352 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1353 1354 1355 res_list = [] 1356 for i, coeff_list in enumerate(color_amplitudes): 1357 # It might happen that coeff_list is empty if this function was 1358 # called from get_JAMP_lines_split_order (i.e. if some color flow 1359 # does not contribute at all for a given order). 1360 # In this case we simply set it to 0. 1361 if coeff_list==[]: 1362 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1363 continue 1364 # Break the JAMP definition into 'n=split' pieces to avoid having 1365 # arbitrarly long lines. 1366 first=True 1367 n = (len(coeff_list)+1 if split<=0 else split) 1368 while coeff_list!=[]: 1369 coefs=coeff_list[:n] 1370 coeff_list=coeff_list[n:] 1371 res = ((JAMP_format+"=") % str(i + 1)) + \ 1372 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1373 1374 first=False 1375 # Optimization: if all contributions to that color basis element have 1376 # the same coefficient (up to a sign), put it in front 1377 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1378 common_factor = False 1379 diff_fracs = list(set(list_fracs)) 1380 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1381 common_factor = True 1382 global_factor = diff_fracs[0] 1383 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1384 1385 # loop for JAMP 1386 for (coefficient, amp_number) in coefs: 1387 if not coefficient: 1388 continue 1389 if common_factor: 1390 res = (res + "%s" + AMP_format) % \ 1391 (self.coeff(coefficient[0], 1392 coefficient[1] / abs(coefficient[1]), 1393 coefficient[2], 1394 coefficient[3]), 1395 str(amp_number)) 1396 else: 1397 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1398 coefficient[1], 1399 coefficient[2], 1400 coefficient[3]), 1401 str(amp_number)) 1402 1403 if common_factor: 1404 res = res + ')' 1405 1406 res_list.append(res) 1407 1408 return res_list
1409
1410 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1411 """Generate the PDF lines for the auto_dsig.f file""" 1412 1413 processes = matrix_element.get('processes') 1414 model = processes[0].get('model') 1415 1416 pdf_definition_lines = "" 1417 pdf_data_lines = "" 1418 pdf_lines = "" 1419 1420 if ninitial == 1: 1421 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1422 for i, proc in enumerate(processes): 1423 process_line = proc.base_string() 1424 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1425 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1426 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1427 else: 1428 # Pick out all initial state particles for the two beams 1429 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1430 p in processes]))), 1431 sorted(list(set([p.get_initial_pdg(2) for \ 1432 p in processes])))] 1433 1434 # Prepare all variable names 1435 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1436 sum(initial_states,[])]) 1437 for key,val in pdf_codes.items(): 1438 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1439 1440 # Set conversion from PDG code to number used in PDF calls 1441 pdgtopdf = {21: 0, 22: 7} 1442 1443 # Fill in missing entries of pdgtopdf 1444 for pdg in sum(initial_states,[]): 1445 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1446 pdgtopdf[pdg] = pdg 1447 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1448 # If any particle has pdg code 7, we need to use something else 1449 pdgtopdf[pdg] = 6000000 + pdg 1450 1451 # Get PDF variable declarations for all initial states 1452 for i in [0,1]: 1453 pdf_definition_lines += "DOUBLE PRECISION " + \ 1454 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1455 for pdg in \ 1456 initial_states[i]]) + \ 1457 "\n" 1458 1459 # Get PDF data lines for all initial states 1460 for i in [0,1]: 1461 pdf_data_lines += "DATA " + \ 1462 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1463 for pdg in initial_states[i]]) + \ 1464 "/%d*1D0/" % len(initial_states[i]) + \ 1465 "\n" 1466 1467 # Get PDF lines for all different initial states 1468 for i, init_states in enumerate(initial_states): 1469 if subproc_group: 1470 pdf_lines = pdf_lines + \ 1471 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1472 % (i + 1, i + 1) 1473 else: 1474 pdf_lines = pdf_lines + \ 1475 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1476 % (i + 1, i + 1) 1477 1478 for nbi,initial_state in enumerate(init_states): 1479 if initial_state in pdf_codes.keys(): 1480 if subproc_group: 1481 pdf_lines = pdf_lines + \ 1482 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1483 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1484 (pdf_codes[initial_state], 1485 i + 1, i + 1, pdgtopdf[initial_state], 1486 i + 1, i + 1) 1487 else: 1488 pdf_lines = pdf_lines + \ 1489 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1490 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1491 (pdf_codes[initial_state], 1492 i + 1, i + 1, pdgtopdf[initial_state], 1493 i + 1, 1494 i + 1, i + 1) 1495 pdf_lines = pdf_lines + "ENDIF\n" 1496 1497 # Add up PDFs for the different initial state particles 1498 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1499 for proc in processes: 1500 process_line = proc.base_string() 1501 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1502 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1503 for ibeam in [1, 2]: 1504 initial_state = proc.get_initial_pdg(ibeam) 1505 if initial_state in pdf_codes.keys(): 1506 pdf_lines = pdf_lines + "%s%d*" % \ 1507 (pdf_codes[initial_state], ibeam) 1508 else: 1509 pdf_lines = pdf_lines + "1d0*" 1510 # Remove last "*" from pdf_lines 1511 pdf_lines = pdf_lines[:-1] + "\n" 1512 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1513 1514 # Remove last line break from the return variables 1515 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1516 1517 #=========================================================================== 1518 # write_props_file 1519 #===========================================================================
1520 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1521 """Write the props.inc file for MadEvent. Needs input from 1522 write_configs_file.""" 1523 1524 lines = [] 1525 1526 particle_dict = matrix_element.get('processes')[0].get('model').\ 1527 get('particle_dict') 1528 1529 for iconf, configs in enumerate(s_and_t_channels): 1530 for vertex in configs[0] + configs[1][:-1]: 1531 leg = vertex.get('legs')[-1] 1532 if leg.get('id') not in particle_dict: 1533 # Fake propagator used in multiparticle vertices 1534 mass = 'zero' 1535 width = 'zero' 1536 pow_part = 0 1537 else: 1538 particle = particle_dict[leg.get('id')] 1539 # Get mass 1540 if particle.get('mass').lower() == 'zero': 1541 mass = particle.get('mass') 1542 else: 1543 mass = "abs(%s)" % particle.get('mass') 1544 # Get width 1545 if particle.get('width').lower() == 'zero': 1546 width = particle.get('width') 1547 else: 1548 width = "abs(%s)" % particle.get('width') 1549 1550 pow_part = 1 + int(particle.is_boson()) 1551 1552 lines.append("prmass(%d,%d) = %s" % \ 1553 (leg.get('number'), iconf + 1, mass)) 1554 lines.append("prwidth(%d,%d) = %s" % \ 1555 (leg.get('number'), iconf + 1, width)) 1556 lines.append("pow(%d,%d) = %d" % \ 1557 (leg.get('number'), iconf + 1, pow_part)) 1558 1559 # Write the file 1560 writer.writelines(lines) 1561 1562 return True
1563 1564 #=========================================================================== 1565 # write_configs_file 1566 #===========================================================================
1567 - def write_configs_file(self, writer, matrix_element):
1568 """Write the configs.inc file for MadEvent""" 1569 1570 # Extract number of external particles 1571 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1572 1573 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1574 mapconfigs = [c[0] for c in configs] 1575 model = matrix_element.get('processes')[0].get('model') 1576 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1577 [[c[1]] for c in configs], 1578 mapconfigs, 1579 nexternal, ninitial, 1580 model)
1581 1582 #=========================================================================== 1583 # write_configs_file_from_diagrams 1584 #===========================================================================
1585 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1586 nexternal, ninitial, model):
1587 """Write the actual configs.inc file. 1588 1589 configs is the diagrams corresponding to configs (each 1590 diagrams is a list of corresponding diagrams for all 1591 subprocesses, with None if there is no corresponding diagrams 1592 for a given process). 1593 mapconfigs gives the diagram number for each config. 1594 1595 For s-channels, we need to output one PDG for each subprocess in 1596 the subprocess group, in order to be able to pick the right 1597 one for multiprocesses.""" 1598 1599 lines = [] 1600 1601 s_and_t_channels = [] 1602 1603 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1604 for config in configs if [d for d in config if d][0].\ 1605 get_vertex_leg_numbers()!=[]] 1606 minvert = min(vert_list) if vert_list!=[] else 0 1607 1608 # Number of subprocesses 1609 nsubprocs = len(configs[0]) 1610 1611 nconfigs = 0 1612 1613 new_pdg = model.get_first_non_pdg() 1614 1615 for iconfig, helas_diags in enumerate(configs): 1616 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1617 [0].get_vertex_leg_numbers()) : 1618 # Only 3-vertices allowed in configs.inc except for vertices 1619 # which originate from a shrunk loop. 1620 continue 1621 nconfigs += 1 1622 1623 # Need s- and t-channels for all subprocesses, including 1624 # those that don't contribute to this config 1625 empty_verts = [] 1626 stchannels = [] 1627 for h in helas_diags: 1628 if h: 1629 # get_s_and_t_channels gives vertices starting from 1630 # final state external particles and working inwards 1631 stchannels.append(h.get('amplitudes')[0].\ 1632 get_s_and_t_channels(ninitial, model, new_pdg)) 1633 else: 1634 stchannels.append((empty_verts, None)) 1635 1636 # For t-channels, just need the first non-empty one 1637 tchannels = [t for s,t in stchannels if t != None][0] 1638 1639 # For s_and_t_channels (to be used later) use only first config 1640 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1641 tchannels]) 1642 1643 # Make sure empty_verts is same length as real vertices 1644 if any([s for s,t in stchannels]): 1645 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1646 1647 # Reorganize s-channel vertices to get a list of all 1648 # subprocesses for each vertex 1649 schannels = zip(*[s for s,t in stchannels]) 1650 else: 1651 schannels = [] 1652 1653 allchannels = schannels 1654 if len(tchannels) > 1: 1655 # Write out tchannels only if there are any non-trivial ones 1656 allchannels = schannels + tchannels 1657 1658 # Write out propagators for s-channel and t-channel vertices 1659 1660 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1661 # Correspondance between the config and the diagram = amp2 1662 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1663 mapconfigs[iconfig])) 1664 1665 for verts in allchannels: 1666 if verts in schannels: 1667 vert = [v for v in verts if v][0] 1668 else: 1669 vert = verts 1670 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1671 last_leg = vert.get('legs')[-1] 1672 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1673 (last_leg.get('number'), nconfigs, len(daughters), 1674 ",".join([str(d) for d in daughters]))) 1675 if verts in schannels: 1676 pdgs = [] 1677 for v in verts: 1678 if v: 1679 pdgs.append(v.get('legs')[-1].get('id')) 1680 else: 1681 pdgs.append(0) 1682 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1683 (last_leg.get('number'), nconfigs, nsubprocs, 1684 ",".join([str(d) for d in pdgs]))) 1685 lines.append("data tprid(%d,%d)/0/" % \ 1686 (last_leg.get('number'), nconfigs)) 1687 elif verts in tchannels[:-1]: 1688 lines.append("data tprid(%d,%d)/%d/" % \ 1689 (last_leg.get('number'), nconfigs, 1690 abs(last_leg.get('id')))) 1691 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1692 (last_leg.get('number'), nconfigs, nsubprocs, 1693 ",".join(['0'] * nsubprocs))) 1694 1695 # Write out number of configs 1696 lines.append("# Number of configs") 1697 lines.append("data mapconfig(0)/%d/" % nconfigs) 1698 1699 # Write the file 1700 writer.writelines(lines) 1701 1702 return s_and_t_channels
1703 1704 #=========================================================================== 1705 # Global helper methods 1706 #=========================================================================== 1707
1708 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1709 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1710 1711 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1712 1713 if total_coeff == 1: 1714 if is_imaginary: 1715 return '+imag1*' 1716 else: 1717 return '+' 1718 elif total_coeff == -1: 1719 if is_imaginary: 1720 return '-imag1*' 1721 else: 1722 return '-' 1723 1724 res_str = '%+iD0' % total_coeff.numerator 1725 1726 if total_coeff.denominator != 1: 1727 # Check if total_coeff is an integer 1728 res_str = res_str + '/%iD0' % total_coeff.denominator 1729 1730 if is_imaginary: 1731 res_str = res_str + '*imag1' 1732 1733 return res_str + '*'
1734 1735
1736 - def set_fortran_compiler(self, default_compiler, force=False):
1737 """Set compiler based on what's available on the system""" 1738 1739 # Check for compiler 1740 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1741 f77_compiler = default_compiler['fortran'] 1742 elif misc.which('gfortran'): 1743 f77_compiler = 'gfortran' 1744 elif misc.which('g77'): 1745 f77_compiler = 'g77' 1746 elif misc.which('f77'): 1747 f77_compiler = 'f77' 1748 elif default_compiler['fortran']: 1749 logger.warning('No Fortran Compiler detected! Please install one') 1750 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1751 else: 1752 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1753 logger.info('Use Fortran compiler ' + f77_compiler) 1754 1755 1756 # Check for compiler. 1. set default. 1757 if default_compiler['f2py']: 1758 f2py_compiler = default_compiler['f2py'] 1759 else: 1760 f2py_compiler = '' 1761 # Try to find the correct one. 1762 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1763 f2py_compiler = default_compiler['f2py'] 1764 elif misc.which('f2py'): 1765 f2py_compiler = 'f2py' 1766 elif sys.version_info[1] == 6: 1767 if misc.which('f2py-2.6'): 1768 f2py_compiler = 'f2py-2.6' 1769 elif misc.which('f2py2.6'): 1770 f2py_compiler = 'f2py2.6' 1771 elif sys.version_info[1] == 7: 1772 if misc.which('f2py-2.7'): 1773 f2py_compiler = 'f2py-2.7' 1774 elif misc.which('f2py2.7'): 1775 f2py_compiler = 'f2py2.7' 1776 1777 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1778 1779 1780 self.replace_make_opt_f_compiler(to_replace) 1781 # Replace also for Template but not for cluster 1782 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1783 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1784 1785 return f77_compiler
1786 1787 # an alias for backward compatibility 1788 set_compiler = set_fortran_compiler 1789 1790
1791 - def set_cpp_compiler(self, default_compiler, force=False):
1792 """Set compiler based on what's available on the system""" 1793 1794 # Check for compiler 1795 if default_compiler and misc.which(default_compiler): 1796 compiler = default_compiler 1797 elif misc.which('g++'): 1798 #check if clang version 1799 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1800 stderr=subprocess.PIPE) 1801 out, _ = p.communicate() 1802 if 'clang' in out and misc.which('clang'): 1803 compiler = 'clang' 1804 else: 1805 compiler = 'g++' 1806 elif misc.which('c++'): 1807 compiler = 'c++' 1808 elif misc.which('clang'): 1809 compiler = 'clang' 1810 elif default_compiler: 1811 logger.warning('No c++ Compiler detected! Please install one') 1812 compiler = default_compiler # maybe misc fail so try with it 1813 else: 1814 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1815 logger.info('Use c++ compiler ' + compiler) 1816 self.replace_make_opt_c_compiler(compiler) 1817 # Replace also for Template but not for cluster 1818 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1819 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1820 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1821 1822 return compiler
1823 1824
1825 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1826 """Set FC=compiler in Source/make_opts""" 1827 1828 assert isinstance(compilers, dict) 1829 1830 mod = False #avoid to rewrite the file if not needed 1831 if not root_dir: 1832 root_dir = self.dir_path 1833 1834 compiler= compilers['fortran'] 1835 f2py_compiler = compilers['f2py'] 1836 if not f2py_compiler: 1837 f2py_compiler = 'f2py' 1838 for_update= {'DEFAULT_F_COMPILER':compiler, 1839 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1840 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1841 1842 try: 1843 common_run_interface.CommonRunCmd.update_make_opts_full( 1844 make_opts, for_update) 1845 except IOError: 1846 if root_dir == self.dir_path: 1847 logger.info('Fail to set compiler. Trying to continue anyway.')
1848
1849 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1850 """Set CXX=compiler in Source/make_opts. 1851 The version is also checked, in order to set some extra flags 1852 if the compiler is clang (on MACOS)""" 1853 1854 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1855 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1856 1857 1858 # list of the variable to set in the make_opts file 1859 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1860 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1861 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1862 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1863 } 1864 1865 # for MOJAVE remove the MACFLAG: 1866 if is_clang: 1867 import platform 1868 version, _, _ = platform.mac_ver() 1869 if not version:# not linux 1870 version = 14 # set version to remove MACFLAG 1871 else: 1872 version = int(version.split('.')[1]) 1873 if version >= 14: 1874 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1875 1876 if not root_dir: 1877 root_dir = self.dir_path 1878 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1879 1880 try: 1881 common_run_interface.CommonRunCmd.update_make_opts_full( 1882 make_opts, for_update) 1883 except IOError: 1884 if root_dir == self.dir_path: 1885 logger.info('Fail to set compiler. Trying to continue anyway.') 1886 1887 return
1888
1889 #=============================================================================== 1890 # ProcessExporterFortranSA 1891 #=============================================================================== 1892 -class ProcessExporterFortranSA(ProcessExporterFortran):
1893 """Class to take care of exporting a set of matrix elements to 1894 MadGraph v4 StandAlone format.""" 1895 1896 matrix_template = "matrix_standalone_v4.inc" 1897
1898 - def __init__(self, *args,**opts):
1899 """add the format information compare to standard init""" 1900 1901 if 'format' in opts: 1902 self.format = opts['format'] 1903 del opts['format'] 1904 else: 1905 self.format = 'standalone' 1906 1907 self.prefix_info = {} 1908 ProcessExporterFortran.__init__(self, *args, **opts)
1909
1910 - def copy_template(self, model):
1911 """Additional actions needed for setup of Template 1912 """ 1913 1914 #First copy the full template tree if dir_path doesn't exit 1915 if os.path.isdir(self.dir_path): 1916 return 1917 1918 logger.info('initialize a new standalone directory: %s' % \ 1919 os.path.basename(self.dir_path)) 1920 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1921 1922 # Create the directory structure 1923 os.mkdir(self.dir_path) 1924 os.mkdir(pjoin(self.dir_path, 'Source')) 1925 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1926 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1927 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1928 os.mkdir(pjoin(self.dir_path, 'bin')) 1929 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1930 os.mkdir(pjoin(self.dir_path, 'lib')) 1931 os.mkdir(pjoin(self.dir_path, 'Cards')) 1932 1933 # Information at top-level 1934 #Write version info 1935 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1936 try: 1937 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1938 except IOError: 1939 MG5_version = misc.get_pkg_info() 1940 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1941 "5." + MG5_version['version']) 1942 1943 1944 # Add file in SubProcesses 1945 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1946 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1947 1948 if self.format == 'standalone': 1949 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1950 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1951 1952 # Add file in Source 1953 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1954 pjoin(self.dir_path, 'Source')) 1955 # add the makefile 1956 filename = pjoin(self.dir_path,'Source','makefile') 1957 self.write_source_makefile(writers.FileWriter(filename))
1958 1959 #=========================================================================== 1960 # export model files 1961 #===========================================================================
1962 - def export_model_files(self, model_path):
1963 """export the model dependent files for V4 model""" 1964 1965 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1966 # Add the routine update_as_param in v4 model 1967 # This is a function created in the UFO 1968 text=""" 1969 subroutine update_as_param() 1970 call setpara('param_card.dat',.false.) 1971 return 1972 end 1973 """ 1974 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1975 ff.write(text) 1976 ff.close() 1977 1978 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1979 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1980 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1981 fsock.write(text) 1982 fsock.close() 1983 1984 self.make_model_symbolic_link()
1985 1986 #=========================================================================== 1987 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1988 #===========================================================================
1989 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1990 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1991 Perl script of MadEvent4 are still working properly for pure MG5 run. 1992 Not needed for StandAlone so just return 1993 """ 1994 1995 return
1996 1997 1998 #=========================================================================== 1999 # Make the Helas and Model directories for Standalone directory 2000 #===========================================================================
2001 - def make(self):
2002 """Run make in the DHELAS and MODEL directories, to set up 2003 everything for running standalone 2004 """ 2005 2006 source_dir = pjoin(self.dir_path, "Source") 2007 logger.info("Running make for Helas") 2008 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2009 logger.info("Running make for Model") 2010 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2011 2012 #=========================================================================== 2013 # Create proc_card_mg5.dat for Standalone directory 2014 #===========================================================================
2015 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2016 """Finalize Standalone MG4 directory by 2017 generation proc_card_mg5.dat 2018 generate a global makefile 2019 """ 2020 2021 compiler = {'fortran': mg5options['fortran_compiler'], 2022 'cpp': mg5options['cpp_compiler'], 2023 'f2py': mg5options['f2py_compiler']} 2024 2025 self.compiler_choice(compiler) 2026 self.make() 2027 2028 # Write command history as proc_card_mg5 2029 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2030 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2031 history.write(output_file) 2032 2033 ProcessExporterFortran.finalize(self, matrix_elements, 2034 history, mg5options, flaglist) 2035 open(pjoin(self.dir_path,'__init__.py'),'w') 2036 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2037 2038 if 'mode' in self.opt and self.opt['mode'] == "reweight": 2039 #add the module to hande the NLO weight 2040 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2041 pjoin(self.dir_path, 'Source')) 2042 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2043 pjoin(self.dir_path, 'Source', 'PDF')) 2044 self.write_pdf_opendata() 2045 2046 if self.prefix_info: 2047 self.write_f2py_splitter() 2048 self.write_f2py_makefile() 2049 self.write_f2py_check_sa(matrix_elements, 2050 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2051 else: 2052 # create a single makefile to compile all the subprocesses 2053 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2054 deppython = '' 2055 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2056 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2057 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2058 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2059 text+='all: %s\n\techo \'done\'' % deppython 2060 2061 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2062 ff.write(text) 2063 ff.close()
2064
2065 - def write_f2py_splitter(self):
2066 """write a function to call the correct matrix element""" 2067 2068 template = """ 2069 %(python_information)s 2070 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2071 IMPLICIT NONE 2072 2073 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2074 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2075 CF2PY integer, intent(in) :: npdg 2076 CF2PY double precision, intent(out) :: ANS 2077 CF2PY double precision, intent(in) :: ALPHAS 2078 CF2PY double precision, intent(in) :: SCALE2 2079 integer pdgs(*) 2080 integer npdg, nhel 2081 double precision p(*) 2082 double precision ANS, ALPHAS, PI,SCALE2 2083 include 'coupl.inc' 2084 2085 PI = 3.141592653589793D0 2086 G = 2* DSQRT(ALPHAS*PI) 2087 CALL UPDATE_AS_PARAM() 2088 if (scale2.ne.0d0) stop 1 2089 2090 %(smatrixhel)s 2091 2092 return 2093 end 2094 2095 SUBROUTINE INITIALISE(PATH) 2096 C ROUTINE FOR F2PY to read the benchmark point. 2097 IMPLICIT NONE 2098 CHARACTER*512 PATH 2099 CF2PY INTENT(IN) :: PATH 2100 CALL SETPARA(PATH) !first call to setup the paramaters 2101 RETURN 2102 END 2103 2104 subroutine get_pdg_order(PDG) 2105 IMPLICIT NONE 2106 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2107 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2108 DATA PDGS/ %(pdgs)s / 2109 PDG = PDGS 2110 RETURN 2111 END 2112 2113 subroutine get_prefix(PREFIX) 2114 IMPLICIT NONE 2115 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2116 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2117 DATA PREF / '%(prefix)s'/ 2118 PREFIX = PREF 2119 RETURN 2120 END 2121 2122 2123 """ 2124 2125 allids = self.prefix_info.keys() 2126 allprefix = [self.prefix_info[key][0] for key in allids] 2127 min_nexternal = min([len(ids) for ids in allids]) 2128 max_nexternal = max([len(ids) for ids in allids]) 2129 2130 info = [] 2131 for key, (prefix, tag) in self.prefix_info.items(): 2132 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2133 2134 2135 text = [] 2136 for n_ext in range(min_nexternal, max_nexternal+1): 2137 current = [ids for ids in allids if len(ids)==n_ext] 2138 if not current: 2139 continue 2140 if min_nexternal != max_nexternal: 2141 if n_ext == min_nexternal: 2142 text.append(' if (npdg.eq.%i)then' % n_ext) 2143 else: 2144 text.append(' else if (npdg.eq.%i)then' % n_ext) 2145 for ii,pdgs in enumerate(current): 2146 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2147 if ii==0: 2148 text.append( ' if(%s) then ! %i' % (condition, i)) 2149 else: 2150 text.append( ' else if(%s) then ! %i' % (condition,i)) 2151 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2152 text.append(' endif') 2153 #close the function 2154 if min_nexternal != max_nexternal: 2155 text.append('endif') 2156 2157 formatting = {'python_information':'\n'.join(info), 2158 'smatrixhel': '\n'.join(text), 2159 'maxpart': max_nexternal, 2160 'nb_me': len(allids), 2161 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2162 for i in range(max_nexternal) for pdg in allids), 2163 'prefix':'\',\''.join(allprefix) 2164 } 2165 formatting['lenprefix'] = len(formatting['prefix']) 2166 text = template % formatting 2167 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2168 fsock.writelines(text) 2169 fsock.close()
2170
2171 - def write_f2py_check_sa(self, matrix_element, writer):
2172 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2173 # To be implemented. It is just an example file, i.e. not crucial. 2174 return
2175
2176 - def write_f2py_makefile(self):
2177 """ """ 2178 # Add file in SubProcesses 2179 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2180 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2181
2182 - def create_MA5_cards(self,*args,**opts):
2183 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2184 pass
2185
2186 - def compiler_choice(self, compiler):
2187 """ Different daughter classes might want different compilers. 2188 So this function is meant to be overloaded if desired.""" 2189 2190 self.set_compiler(compiler)
2191 2192 #=========================================================================== 2193 # generate_subprocess_directory 2194 #===========================================================================
2195 - def generate_subprocess_directory(self, matrix_element, 2196 fortran_model, number):
2197 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2198 including the necessary matrix.f and nexternal.inc files""" 2199 2200 cwd = os.getcwd() 2201 # Create the directory PN_xx_xxxxx in the specified path 2202 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2203 "P%s" % matrix_element.get('processes')[0].shell_string()) 2204 2205 if self.opt['sa_symmetry']: 2206 # avoid symmetric output 2207 for i,proc in enumerate(matrix_element.get('processes')): 2208 2209 tag = proc.get_tag() 2210 legs = proc.get('legs')[:] 2211 leg0 = proc.get('legs')[0] 2212 leg1 = proc.get('legs')[1] 2213 if not leg1.get('state'): 2214 proc.get('legs')[0] = leg1 2215 proc.get('legs')[1] = leg0 2216 flegs = proc.get('legs')[2:] 2217 for perm in itertools.permutations(flegs): 2218 for i,p in enumerate(perm): 2219 proc.get('legs')[i+2] = p 2220 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2221 "P%s" % proc.shell_string()) 2222 #restore original order 2223 proc.get('legs')[2:] = legs[2:] 2224 if os.path.exists(dirpath2): 2225 proc.get('legs')[:] = legs 2226 return 0 2227 proc.get('legs')[:] = legs 2228 2229 try: 2230 os.mkdir(dirpath) 2231 except os.error as error: 2232 logger.warning(error.strerror + " " + dirpath) 2233 2234 #try: 2235 # os.chdir(dirpath) 2236 #except os.error: 2237 # logger.error('Could not cd to directory %s' % dirpath) 2238 # return 0 2239 2240 logger.info('Creating files in directory %s' % dirpath) 2241 2242 # Extract number of external particles 2243 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2244 2245 # Create the matrix.f file and the nexternal.inc file 2246 if self.opt['export_format']=='standalone_msP': 2247 filename = pjoin(dirpath, 'matrix_prod.f') 2248 else: 2249 filename = pjoin(dirpath, 'matrix.f') 2250 2251 proc_prefix = '' 2252 if 'prefix' in self.cmd_options: 2253 if self.cmd_options['prefix'] == 'int': 2254 proc_prefix = 'M%s_' % number 2255 elif self.cmd_options['prefix'] == 'proc': 2256 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2257 else: 2258 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2259 for proc in matrix_element.get('processes'): 2260 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2261 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2262 2263 calls = self.write_matrix_element_v4( 2264 writers.FortranWriter(filename), 2265 matrix_element, 2266 fortran_model, 2267 proc_prefix=proc_prefix) 2268 2269 if self.opt['export_format'] == 'standalone_msP': 2270 filename = pjoin(dirpath,'configs_production.inc') 2271 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2272 writers.FortranWriter(filename), 2273 matrix_element) 2274 2275 filename = pjoin(dirpath,'props_production.inc') 2276 self.write_props_file(writers.FortranWriter(filename), 2277 matrix_element, 2278 s_and_t_channels) 2279 2280 filename = pjoin(dirpath,'nexternal_prod.inc') 2281 self.write_nexternal_madspin(writers.FortranWriter(filename), 2282 nexternal, ninitial) 2283 2284 if self.opt['export_format']=='standalone_msF': 2285 filename = pjoin(dirpath, 'helamp.inc') 2286 ncomb=matrix_element.get_helicity_combinations() 2287 self.write_helamp_madspin(writers.FortranWriter(filename), 2288 ncomb) 2289 2290 filename = pjoin(dirpath, 'nexternal.inc') 2291 self.write_nexternal_file(writers.FortranWriter(filename), 2292 nexternal, ninitial) 2293 2294 filename = pjoin(dirpath, 'pmass.inc') 2295 self.write_pmass_file(writers.FortranWriter(filename), 2296 matrix_element) 2297 2298 filename = pjoin(dirpath, 'ngraphs.inc') 2299 self.write_ngraphs_file(writers.FortranWriter(filename), 2300 len(matrix_element.get_all_amplitudes())) 2301 2302 # Generate diagrams 2303 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2304 filename = pjoin(dirpath, "matrix.ps") 2305 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2306 get('diagrams'), 2307 filename, 2308 model=matrix_element.get('processes')[0].\ 2309 get('model'), 2310 amplitude=True) 2311 logger.info("Generating Feynman diagrams for " + \ 2312 matrix_element.get('processes')[0].nice_string()) 2313 plot.draw() 2314 2315 linkfiles = ['check_sa.f', 'coupl.inc'] 2316 2317 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2318 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2319 pat = re.compile('smatrix', re.I) 2320 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2321 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2322 f.write(new_text) 2323 linkfiles.pop(0) 2324 2325 for file in linkfiles: 2326 ln('../%s' % file, cwd=dirpath) 2327 ln('../makefileP', name='makefile', cwd=dirpath) 2328 # Return to original PWD 2329 #os.chdir(cwd) 2330 2331 if not calls: 2332 calls = 0 2333 return calls
2334 2335 2336 #=========================================================================== 2337 # write_source_makefile 2338 #===========================================================================
2339 - def write_source_makefile(self, writer):
2340 """Write the nexternal.inc file for MG4""" 2341 2342 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2343 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2344 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2345 2346 replace_dict= {'libraries': set_of_lib, 2347 'model':model_line, 2348 'additional_dsample': '', 2349 'additional_dependencies':''} 2350 2351 text = open(path).read() % replace_dict 2352 2353 if writer: 2354 writer.write(text) 2355 2356 return replace_dict
2357 2358 #=========================================================================== 2359 # write_matrix_element_v4 2360 #===========================================================================
2361 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2362 write=True, proc_prefix=''):
2363 """Export a matrix element to a matrix.f file in MG4 standalone format 2364 if write is on False, just return the replace_dict and not write anything.""" 2365 2366 2367 if not matrix_element.get('processes') or \ 2368 not matrix_element.get('diagrams'): 2369 return 0 2370 2371 if writer: 2372 if not isinstance(writer, writers.FortranWriter): 2373 raise writers.FortranWriter.FortranWriterError(\ 2374 "writer not FortranWriter but %s" % type(writer)) 2375 # Set lowercase/uppercase Fortran code 2376 writers.FortranWriter.downcase = False 2377 2378 2379 if not self.opt.has_key('sa_symmetry'): 2380 self.opt['sa_symmetry']=False 2381 2382 2383 # The proc_id is for MadEvent grouping which is never used in SA. 2384 replace_dict = {'global_variable':'', 'amp2_lines':'', 2385 'proc_prefix':proc_prefix, 'proc_id':''} 2386 2387 # Extract helas calls 2388 helas_calls = fortran_model.get_matrix_element_calls(\ 2389 matrix_element) 2390 2391 replace_dict['helas_calls'] = "\n".join(helas_calls) 2392 2393 # Extract version number and date from VERSION file 2394 info_lines = self.get_mg5_info_lines() 2395 replace_dict['info_lines'] = info_lines 2396 2397 # Extract process info lines 2398 process_lines = self.get_process_info_lines(matrix_element) 2399 replace_dict['process_lines'] = process_lines 2400 2401 # Extract number of external particles 2402 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2403 replace_dict['nexternal'] = nexternal 2404 replace_dict['nincoming'] = ninitial 2405 2406 # Extract ncomb 2407 ncomb = matrix_element.get_helicity_combinations() 2408 replace_dict['ncomb'] = ncomb 2409 2410 # Extract helicity lines 2411 helicity_lines = self.get_helicity_lines(matrix_element) 2412 replace_dict['helicity_lines'] = helicity_lines 2413 2414 # Extract overall denominator 2415 # Averaging initial state color, spin, and identical FS particles 2416 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2417 2418 # Extract ngraphs 2419 ngraphs = matrix_element.get_number_of_amplitudes() 2420 replace_dict['ngraphs'] = ngraphs 2421 2422 # Extract nwavefuncs 2423 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2424 replace_dict['nwavefuncs'] = nwavefuncs 2425 2426 # Extract ncolor 2427 ncolor = max(1, len(matrix_element.get('color_basis'))) 2428 replace_dict['ncolor'] = ncolor 2429 2430 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2431 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2432 matrix_element.get_beams_hel_avg_factor() 2433 2434 # Extract color data lines 2435 color_data_lines = self.get_color_data_lines(matrix_element) 2436 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2437 2438 if self.opt['export_format']=='standalone_msP': 2439 # For MadSpin need to return the AMP2 2440 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2441 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2442 replace_dict['global_variable'] = \ 2443 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2444 2445 # JAMP definition, depends on the number of independent split orders 2446 split_orders=matrix_element.get('processes')[0].get('split_orders') 2447 2448 if len(split_orders)==0: 2449 replace_dict['nSplitOrders']='' 2450 # Extract JAMP lines 2451 jamp_lines = self.get_JAMP_lines(matrix_element) 2452 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2453 # set all amplitude order to weight 1 and only one squared order 2454 # contribution which is of course ALL_ORDERS=2. 2455 squared_orders = [(2,),] 2456 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2457 replace_dict['chosen_so_configs'] = '.TRUE.' 2458 replace_dict['nSqAmpSplitOrders']=1 2459 replace_dict['split_order_str_list']='' 2460 else: 2461 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2462 replace_dict['nAmpSplitOrders']=len(amp_orders) 2463 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2464 replace_dict['nSplitOrders']=len(split_orders) 2465 replace_dict['split_order_str_list']=str(split_orders) 2466 amp_so = self.get_split_orders_lines( 2467 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2468 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2469 replace_dict['ampsplitorders']='\n'.join(amp_so) 2470 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2471 jamp_lines = self.get_JAMP_lines_split_order(\ 2472 matrix_element,amp_orders,split_order_names=split_orders) 2473 2474 # Now setup the array specifying what squared split order is chosen 2475 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2476 matrix_element.get('processes')[0],squared_orders) 2477 2478 # For convenience we also write the driver check_sa_splitOrders.f 2479 # that explicitely writes out the contribution from each squared order. 2480 # The original driver still works and is compiled with 'make' while 2481 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2482 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2483 self.write_check_sa_splitOrders(squared_orders,split_orders, 2484 nexternal,ninitial,proc_prefix,check_sa_writer) 2485 2486 if write: 2487 writers.FortranWriter('nsqso_born.inc').writelines( 2488 """INTEGER NSQSO_BORN 2489 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2490 2491 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2492 2493 matrix_template = self.matrix_template 2494 if self.opt['export_format']=='standalone_msP' : 2495 matrix_template = 'matrix_standalone_msP_v4.inc' 2496 elif self.opt['export_format']=='standalone_msF': 2497 matrix_template = 'matrix_standalone_msF_v4.inc' 2498 elif self.opt['export_format']=='matchbox': 2499 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2500 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2501 2502 if len(split_orders)>0: 2503 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2504 logger.debug("Warning: The export format %s is not "+\ 2505 " available for individual ME evaluation of given coupl. orders."+\ 2506 " Only the total ME will be computed.", self.opt['export_format']) 2507 elif self.opt['export_format'] in ['madloop_matchbox']: 2508 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2509 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2510 else: 2511 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2512 2513 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2514 replace_dict['template_file2'] = pjoin(_file_path, \ 2515 'iolibs/template_files/split_orders_helping_functions.inc') 2516 if write and writer: 2517 path = replace_dict['template_file'] 2518 content = open(path).read() 2519 content = content % replace_dict 2520 # Write the file 2521 writer.writelines(content) 2522 # Add the helper functions. 2523 if len(split_orders)>0: 2524 content = '\n' + open(replace_dict['template_file2'])\ 2525 .read()%replace_dict 2526 writer.writelines(content) 2527 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2528 else: 2529 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2530 return replace_dict # for subclass update
2531
2532 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2533 nincoming, proc_prefix, writer):
2534 """ Write out a more advanced version of the check_sa drivers that 2535 individually returns the matrix element for each contributing squared 2536 order.""" 2537 2538 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2539 'template_files', 'check_sa_splitOrders.f')).read() 2540 printout_sq_orders=[] 2541 for i, squared_order in enumerate(squared_orders): 2542 sq_orders=[] 2543 for j, sqo in enumerate(squared_order): 2544 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2545 printout_sq_orders.append(\ 2546 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2547 %(i+1,' '.join(sq_orders),i+1)) 2548 printout_sq_orders='\n'.join(printout_sq_orders) 2549 replace_dict = {'printout_sqorders':printout_sq_orders, 2550 'nSplitOrders':len(squared_orders), 2551 'nexternal':nexternal, 2552 'nincoming':nincoming, 2553 'proc_prefix':proc_prefix} 2554 2555 if writer: 2556 writer.writelines(check_sa_content % replace_dict) 2557 else: 2558 return replace_dict
2559
2560 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2561 """class to take care of exporting a set of matrix element for the Matchbox 2562 code in the case of Born only routine""" 2563 2564 default_opt = {'clean': False, 'complex_mass':False, 2565 'export_format':'matchbox', 'mp': False, 2566 'sa_symmetry': True} 2567 2568 #specific template of the born 2569 2570 2571 matrix_template = "matrix_standalone_matchbox.inc" 2572 2573 @staticmethod
2574 - def get_color_string_lines(matrix_element):
2575 """Return the color matrix definition lines for this matrix element. Split 2576 rows in chunks of size n.""" 2577 2578 if not matrix_element.get('color_matrix'): 2579 return "\n".join(["out = 1"]) 2580 2581 #start the real work 2582 color_denominators = matrix_element.get('color_matrix').\ 2583 get_line_denominators() 2584 matrix_strings = [] 2585 my_cs = color.ColorString() 2586 for i_color in xrange(len(color_denominators)): 2587 # Then write the numerators for the matrix elements 2588 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2589 t_str=repr(my_cs) 2590 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2591 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2592 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2593 all_matches = t_match.findall(t_str) 2594 output = {} 2595 arg=[] 2596 for match in all_matches: 2597 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2598 if ctype in ['ColorOne' ]: 2599 continue 2600 if ctype not in ['T', 'Tr' ]: 2601 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2602 tmparg += ['0'] 2603 arg +=tmparg 2604 for j, v in enumerate(arg): 2605 output[(i_color,j)] = v 2606 2607 for key in output: 2608 if matrix_strings == []: 2609 #first entry 2610 matrix_strings.append(""" 2611 if (in1.eq.%s.and.in2.eq.%s)then 2612 out = %s 2613 """ % (key[0], key[1], output[key])) 2614 else: 2615 #not first entry 2616 matrix_strings.append(""" 2617 elseif (in1.eq.%s.and.in2.eq.%s)then 2618 out = %s 2619 """ % (key[0], key[1], output[key])) 2620 if len(matrix_strings): 2621 matrix_strings.append(" else \n out = - 1 \n endif") 2622 else: 2623 return "\n out = - 1 \n " 2624 return "\n".join(matrix_strings)
2625
2626 - def make(self,*args,**opts):
2627 pass
2628
2629 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2630 JAMP_formatLC=None):
2631 2632 """Adding leading color part of the colorflow""" 2633 2634 if not JAMP_formatLC: 2635 JAMP_formatLC= "LN%s" % JAMP_format 2636 2637 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2638 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2639 col_amps=col_amps.get_color_amplitudes() 2640 elif(isinstance(col_amps,list)): 2641 if(col_amps and isinstance(col_amps[0],list)): 2642 col_amps=col_amps 2643 else: 2644 raise MadGraph5Error, error_msg % 'col_amps' 2645 else: 2646 raise MadGraph5Error, error_msg % 'col_amps' 2647 2648 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2649 JAMP_format=JAMP_format, 2650 AMP_format=AMP_format, 2651 split=-1) 2652 2653 2654 # Filter the col_ampls to generate only those without any 1/NC terms 2655 2656 LC_col_amps = [] 2657 for coeff_list in col_amps: 2658 to_add = [] 2659 for (coefficient, amp_number) in coeff_list: 2660 if coefficient[3]==0: 2661 to_add.append( (coefficient, amp_number) ) 2662 LC_col_amps.append(to_add) 2663 2664 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2665 JAMP_format=JAMP_formatLC, 2666 AMP_format=AMP_format, 2667 split=-1) 2668 2669 return text
2670
2671 2672 2673 2674 #=============================================================================== 2675 # ProcessExporterFortranMW 2676 #=============================================================================== 2677 -class ProcessExporterFortranMW(ProcessExporterFortran):
2678 """Class to take care of exporting a set of matrix elements to 2679 MadGraph v4 - MadWeight format.""" 2680 2681 matrix_file="matrix_standalone_v4.inc" 2682
2683 - def copy_template(self, model):
2684 """Additional actions needed for setup of Template 2685 """ 2686 2687 super(ProcessExporterFortranMW, self).copy_template(model) 2688 2689 # Add the MW specific file 2690 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2691 pjoin(self.dir_path, 'Source','MadWeight'), True) 2692 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2693 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2694 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2695 pjoin(self.dir_path, 'Source','setrun.f')) 2696 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2697 pjoin(self.dir_path, 'Source','run.inc')) 2698 # File created from Template (Different in some child class) 2699 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2700 self.write_run_config_file(writers.FortranWriter(filename)) 2701 2702 try: 2703 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2704 stdout = os.open(os.devnull, os.O_RDWR), 2705 stderr = os.open(os.devnull, os.O_RDWR), 2706 cwd=self.dir_path) 2707 except OSError: 2708 # Probably madweight already called 2709 pass 2710 2711 # Copy the different python file in the Template 2712 self.copy_python_file() 2713 # create the appropriate cuts.f 2714 self.get_mw_cuts_version() 2715 2716 # add the makefile in Source directory 2717 filename = os.path.join(self.dir_path,'Source','makefile') 2718 self.write_source_makefile(writers.FortranWriter(filename))
2719 2720 2721 2722 2723 #=========================================================================== 2724 # convert_model 2725 #===========================================================================
2726 - def convert_model(self, model, wanted_lorentz = [], 2727 wanted_couplings = []):
2728 2729 super(ProcessExporterFortranMW,self).convert_model(model, 2730 wanted_lorentz, wanted_couplings) 2731 2732 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2733 try: 2734 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2735 except OSError as error: 2736 pass 2737 model_path = model.get('modelpath') 2738 # This is not safe if there is a '##' or '-' in the path. 2739 shutil.copytree(model_path, 2740 pjoin(self.dir_path,'bin','internal','ufomodel'), 2741 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2742 if hasattr(model, 'restrict_card'): 2743 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2744 'restrict_default.dat') 2745 if isinstance(model.restrict_card, check_param_card.ParamCard): 2746 model.restrict_card.write(out_path) 2747 else: 2748 files.cp(model.restrict_card, out_path)
2749 2750 #=========================================================================== 2751 # generate_subprocess_directory 2752 #===========================================================================
2753 - def copy_python_file(self):
2754 """copy the python file require for the Template""" 2755 2756 # madevent interface 2757 cp(_file_path+'/interface/madweight_interface.py', 2758 self.dir_path+'/bin/internal/madweight_interface.py') 2759 cp(_file_path+'/interface/extended_cmd.py', 2760 self.dir_path+'/bin/internal/extended_cmd.py') 2761 cp(_file_path+'/interface/common_run_interface.py', 2762 self.dir_path+'/bin/internal/common_run_interface.py') 2763 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2764 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2765 cp(_file_path+'/iolibs/save_load_object.py', 2766 self.dir_path+'/bin/internal/save_load_object.py') 2767 cp(_file_path+'/madevent/gen_crossxhtml.py', 2768 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2769 cp(_file_path+'/madevent/sum_html.py', 2770 self.dir_path+'/bin/internal/sum_html.py') 2771 cp(_file_path+'/various/FO_analyse_card.py', 2772 self.dir_path+'/bin/internal/FO_analyse_card.py') 2773 cp(_file_path+'/iolibs/file_writers.py', 2774 self.dir_path+'/bin/internal/file_writers.py') 2775 #model file 2776 cp(_file_path+'../models/check_param_card.py', 2777 self.dir_path+'/bin/internal/check_param_card.py') 2778 2779 #madevent file 2780 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2781 cp(_file_path+'/various/lhe_parser.py', 2782 self.dir_path+'/bin/internal/lhe_parser.py') 2783 2784 cp(_file_path+'/various/banner.py', 2785 self.dir_path+'/bin/internal/banner.py') 2786 cp(_file_path+'/various/shower_card.py', 2787 self.dir_path+'/bin/internal/shower_card.py') 2788 cp(_file_path+'/various/cluster.py', 2789 self.dir_path+'/bin/internal/cluster.py') 2790 2791 # logging configuration 2792 cp(_file_path+'/interface/.mg5_logging.conf', 2793 self.dir_path+'/bin/internal/me5_logging.conf') 2794 cp(_file_path+'/interface/coloring_logging.py', 2795 self.dir_path+'/bin/internal/coloring_logging.py')
2796 2797 2798 #=========================================================================== 2799 # Change the version of cuts.f to the one compatible with MW 2800 #===========================================================================
2801 - def get_mw_cuts_version(self, outpath=None):
2802 """create the appropriate cuts.f 2803 This is based on the one associated to ME output but: 2804 1) No clustering (=> remove initcluster/setclscales) 2805 2) Adding the definition of cut_bw at the file. 2806 """ 2807 2808 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2809 2810 text = StringIO() 2811 #1) remove all dependencies in ickkw >1: 2812 nb_if = 0 2813 for line in template: 2814 if 'if(xqcut.gt.0d0' in line: 2815 nb_if = 1 2816 if nb_if == 0: 2817 text.write(line) 2818 continue 2819 if re.search(r'if\(.*\)\s*then', line): 2820 nb_if += 1 2821 elif 'endif' in line: 2822 nb_if -= 1 2823 2824 #2) add fake cut_bw (have to put the true one later) 2825 text.write(""" 2826 logical function cut_bw(p) 2827 include 'madweight_param.inc' 2828 double precision p(*) 2829 if (bw_cut) then 2830 cut_bw = .true. 2831 else 2832 stop 1 2833 endif 2834 return 2835 end 2836 """) 2837 2838 final = text.getvalue() 2839 #3) remove the call to initcluster: 2840 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2841 template = template.replace('genps.inc', 'maxparticles.inc') 2842 #Now we can write it 2843 if not outpath: 2844 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2845 elif isinstance(outpath, str): 2846 fsock = open(outpath, 'w') 2847 else: 2848 fsock = outpath 2849 fsock.write(template)
2850 2851 2852 2853 #=========================================================================== 2854 # Make the Helas and Model directories for Standalone directory 2855 #===========================================================================
2856 - def make(self):
2857 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2858 everything for running madweight 2859 """ 2860 2861 source_dir = os.path.join(self.dir_path, "Source") 2862 logger.info("Running make for Helas") 2863 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2864 logger.info("Running make for Model") 2865 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2866 logger.info("Running make for PDF") 2867 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2868 logger.info("Running make for CERNLIB") 2869 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2870 logger.info("Running make for GENERIC") 2871 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2872 logger.info("Running make for blocks") 2873 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2874 logger.info("Running make for tools") 2875 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2876 2877 #=========================================================================== 2878 # Create proc_card_mg5.dat for MadWeight directory 2879 #===========================================================================
2880 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2881 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2882 2883 compiler = {'fortran': mg5options['fortran_compiler'], 2884 'cpp': mg5options['cpp_compiler'], 2885 'f2py': mg5options['f2py_compiler']} 2886 2887 2888 2889 #proc_charac 2890 self.create_proc_charac() 2891 2892 # Write maxparticles.inc based on max of ME's/subprocess groups 2893 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2894 self.write_maxparticles_file(writers.FortranWriter(filename), 2895 matrix_elements) 2896 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2897 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2898 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2899 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2900 2901 self.set_compiler(compiler) 2902 self.make() 2903 2904 # Write command history as proc_card_mg5 2905 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2906 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2907 history.write(output_file) 2908 2909 ProcessExporterFortran.finalize(self, matrix_elements, 2910 history, mg5options, flaglist)
2911 2912 2913 2914 #=========================================================================== 2915 # create the run_card for MW 2916 #===========================================================================
2917 - def create_run_card(self, matrix_elements, history):
2918 """ """ 2919 2920 run_card = banner_mod.RunCard() 2921 2922 # pass to default for MW 2923 run_card["run_tag"] = "\'not_use\'" 2924 run_card["fixed_ren_scale"] = "T" 2925 run_card["fixed_fac_scale"] = "T" 2926 run_card.remove_all_cut() 2927 2928 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2929 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2930 python_template=True) 2931 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2932 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2933 python_template=True)
2934 2935 #=========================================================================== 2936 # export model files 2937 #===========================================================================
2938 - def export_model_files(self, model_path):
2939 """export the model dependent files for V4 model""" 2940 2941 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2942 # Add the routine update_as_param in v4 model 2943 # This is a function created in the UFO 2944 text=""" 2945 subroutine update_as_param() 2946 call setpara('param_card.dat',.false.) 2947 return 2948 end 2949 """ 2950 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2951 ff.write(text) 2952 ff.close() 2953 2954 # Modify setrun.f 2955 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2956 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2957 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2958 fsock.write(text) 2959 fsock.close() 2960 2961 # Modify initialization.f 2962 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2963 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2964 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2965 fsock.write(text) 2966 fsock.close() 2967 2968 2969 self.make_model_symbolic_link()
2970 2971 #=========================================================================== 2972 # generate_subprocess_directory 2973 #===========================================================================
2974 - def generate_subprocess_directory(self, matrix_element, 2975 fortran_model,number):
2976 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2977 including the necessary matrix.f and nexternal.inc files""" 2978 2979 cwd = os.getcwd() 2980 # Create the directory PN_xx_xxxxx in the specified path 2981 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2982 "P%s" % matrix_element.get('processes')[0].shell_string()) 2983 2984 try: 2985 os.mkdir(dirpath) 2986 except os.error as error: 2987 logger.warning(error.strerror + " " + dirpath) 2988 2989 #try: 2990 # os.chdir(dirpath) 2991 #except os.error: 2992 # logger.error('Could not cd to directory %s' % dirpath) 2993 # return 0 2994 2995 logger.info('Creating files in directory %s' % dirpath) 2996 2997 # Extract number of external particles 2998 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2999 3000 # Create the matrix.f file and the nexternal.inc file 3001 filename = pjoin(dirpath,'matrix.f') 3002 calls,ncolor = self.write_matrix_element_v4( 3003 writers.FortranWriter(filename), 3004 matrix_element, 3005 fortran_model) 3006 3007 filename = pjoin(dirpath, 'auto_dsig.f') 3008 self.write_auto_dsig_file(writers.FortranWriter(filename), 3009 matrix_element) 3010 3011 filename = pjoin(dirpath, 'configs.inc') 3012 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3013 writers.FortranWriter(filename), 3014 matrix_element) 3015 3016 filename = pjoin(dirpath, 'nexternal.inc') 3017 self.write_nexternal_file(writers.FortranWriter(filename), 3018 nexternal, ninitial) 3019 3020 filename = pjoin(dirpath, 'leshouche.inc') 3021 self.write_leshouche_file(writers.FortranWriter(filename), 3022 matrix_element) 3023 3024 filename = pjoin(dirpath, 'props.inc') 3025 self.write_props_file(writers.FortranWriter(filename), 3026 matrix_element, 3027 s_and_t_channels) 3028 3029 filename = pjoin(dirpath, 'pmass.inc') 3030 self.write_pmass_file(writers.FortranWriter(filename), 3031 matrix_element) 3032 3033 filename = pjoin(dirpath, 'ngraphs.inc') 3034 self.write_ngraphs_file(writers.FortranWriter(filename), 3035 len(matrix_element.get_all_amplitudes())) 3036 3037 filename = pjoin(dirpath, 'maxamps.inc') 3038 self.write_maxamps_file(writers.FortranWriter(filename), 3039 len(matrix_element.get('diagrams')), 3040 ncolor, 3041 len(matrix_element.get('processes')), 3042 1) 3043 3044 filename = pjoin(dirpath, 'phasespace.inc') 3045 self.write_phasespace_file(writers.FortranWriter(filename), 3046 len(matrix_element.get('diagrams')), 3047 ) 3048 3049 # Generate diagrams 3050 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3051 filename = pjoin(dirpath, "matrix.ps") 3052 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3053 get('diagrams'), 3054 filename, 3055 model=matrix_element.get('processes')[0].\ 3056 get('model'), 3057 amplitude='') 3058 logger.info("Generating Feynman diagrams for " + \ 3059 matrix_element.get('processes')[0].nice_string()) 3060 plot.draw() 3061 3062 #import genps.inc and maxconfigs.inc into Subprocesses 3063 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3064 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3065 3066 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3067 3068 for file in linkfiles: 3069 ln('../%s' % file, starting_dir=cwd) 3070 3071 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3072 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3073 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3074 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3075 # Return to original PWD 3076 #os.chdir(cwd) 3077 3078 if not calls: 3079 calls = 0 3080 return calls
3081 3082 #=========================================================================== 3083 # write_matrix_element_v4 3084 #===========================================================================
3085 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3086 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3087 3088 if not matrix_element.get('processes') or \ 3089 not matrix_element.get('diagrams'): 3090 return 0 3091 3092 if writer: 3093 if not isinstance(writer, writers.FortranWriter): 3094 raise writers.FortranWriter.FortranWriterError(\ 3095 "writer not FortranWriter") 3096 3097 # Set lowercase/uppercase Fortran code 3098 writers.FortranWriter.downcase = False 3099 3100 replace_dict = {} 3101 3102 # Extract version number and date from VERSION file 3103 info_lines = self.get_mg5_info_lines() 3104 replace_dict['info_lines'] = info_lines 3105 3106 # Extract process info lines 3107 process_lines = self.get_process_info_lines(matrix_element) 3108 replace_dict['process_lines'] = process_lines 3109 3110 # Set proc_id 3111 replace_dict['proc_id'] = proc_id 3112 3113 # Extract number of external particles 3114 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3115 replace_dict['nexternal'] = nexternal 3116 3117 # Extract ncomb 3118 ncomb = matrix_element.get_helicity_combinations() 3119 replace_dict['ncomb'] = ncomb 3120 3121 # Extract helicity lines 3122 helicity_lines = self.get_helicity_lines(matrix_element) 3123 replace_dict['helicity_lines'] = helicity_lines 3124 3125 # Extract overall denominator 3126 # Averaging initial state color, spin, and identical FS particles 3127 den_factor_line = self.get_den_factor_line(matrix_element) 3128 replace_dict['den_factor_line'] = den_factor_line 3129 3130 # Extract ngraphs 3131 ngraphs = matrix_element.get_number_of_amplitudes() 3132 replace_dict['ngraphs'] = ngraphs 3133 3134 # Extract nwavefuncs 3135 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3136 replace_dict['nwavefuncs'] = nwavefuncs 3137 3138 # Extract ncolor 3139 ncolor = max(1, len(matrix_element.get('color_basis'))) 3140 replace_dict['ncolor'] = ncolor 3141 3142 # Extract color data lines 3143 color_data_lines = self.get_color_data_lines(matrix_element) 3144 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3145 3146 # Extract helas calls 3147 helas_calls = fortran_model.get_matrix_element_calls(\ 3148 matrix_element) 3149 3150 replace_dict['helas_calls'] = "\n".join(helas_calls) 3151 3152 # Extract JAMP lines 3153 jamp_lines = self.get_JAMP_lines(matrix_element) 3154 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3155 3156 replace_dict['template_file'] = os.path.join(_file_path, \ 3157 'iolibs/template_files/%s' % self.matrix_file) 3158 replace_dict['template_file2'] = '' 3159 3160 if writer: 3161 file = open(replace_dict['template_file']).read() 3162 file = file % replace_dict 3163 # Write the file 3164 writer.writelines(file) 3165 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3166 else: 3167 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3168 3169 #=========================================================================== 3170 # write_source_makefile 3171 #===========================================================================
3172 - def write_source_makefile(self, writer):
3173 """Write the nexternal.inc file for madweight""" 3174 3175 3176 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3177 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3178 text = open(path).read() % {'libraries': set_of_lib} 3179 writer.write(text) 3180 3181 return True
3182
3183 - def write_phasespace_file(self, writer, nb_diag):
3184 """ """ 3185 3186 template = """ include 'maxparticles.inc' 3187 integer max_branches 3188 parameter (max_branches=max_particles-1) 3189 integer max_configs 3190 parameter (max_configs=%(nb_diag)s) 3191 3192 c channel position 3193 integer config_pos,perm_pos 3194 common /to_config/config_pos,perm_pos 3195 3196 """ 3197 3198 writer.write(template % {'nb_diag': nb_diag})
3199 3200 3201 #=========================================================================== 3202 # write_auto_dsig_file 3203 #===========================================================================
3204 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3205 """Write the auto_dsig.f file for the differential cross section 3206 calculation, includes pdf call information (MadWeight format)""" 3207 3208 if not matrix_element.get('processes') or \ 3209 not matrix_element.get('diagrams'): 3210 return 0 3211 3212 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3213 3214 if ninitial < 1 or ninitial > 2: 3215 raise writers.FortranWriter.FortranWriterError, \ 3216 """Need ninitial = 1 or 2 to write auto_dsig file""" 3217 3218 replace_dict = {} 3219 3220 # Extract version number and date from VERSION file 3221 info_lines = self.get_mg5_info_lines() 3222 replace_dict['info_lines'] = info_lines 3223 3224 # Extract process info lines 3225 process_lines = self.get_process_info_lines(matrix_element) 3226 replace_dict['process_lines'] = process_lines 3227 3228 # Set proc_id 3229 replace_dict['proc_id'] = proc_id 3230 replace_dict['numproc'] = 1 3231 3232 # Set dsig_line 3233 if ninitial == 1: 3234 # No conversion, since result of decay should be given in GeV 3235 dsig_line = "pd(0)*dsiguu" 3236 else: 3237 # Convert result (in GeV) to pb 3238 dsig_line = "pd(0)*conv*dsiguu" 3239 3240 replace_dict['dsig_line'] = dsig_line 3241 3242 # Extract pdf lines 3243 pdf_vars, pdf_data, pdf_lines = \ 3244 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3245 replace_dict['pdf_vars'] = pdf_vars 3246 replace_dict['pdf_data'] = pdf_data 3247 replace_dict['pdf_lines'] = pdf_lines 3248 3249 # Lines that differ between subprocess group and regular 3250 if proc_id: 3251 replace_dict['numproc'] = int(proc_id) 3252 replace_dict['passcuts_begin'] = "" 3253 replace_dict['passcuts_end'] = "" 3254 # Set lines for subprocess group version 3255 # Set define_iconfigs_lines 3256 replace_dict['define_subdiag_lines'] = \ 3257 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3258 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3259 else: 3260 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3261 replace_dict['passcuts_end'] = "ENDIF" 3262 replace_dict['define_subdiag_lines'] = "" 3263 3264 if writer: 3265 file = open(os.path.join(_file_path, \ 3266 'iolibs/template_files/auto_dsig_mw.inc')).read() 3267 3268 file = file % replace_dict 3269 # Write the file 3270 writer.writelines(file) 3271 else: 3272 return replace_dict
3273 #=========================================================================== 3274 # write_configs_file 3275 #===========================================================================
3276 - def write_configs_file(self, writer, matrix_element):
3277 """Write the configs.inc file for MadEvent""" 3278 3279 # Extract number of external particles 3280 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3281 3282 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3283 mapconfigs = [c[0] for c in configs] 3284 model = matrix_element.get('processes')[0].get('model') 3285 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3286 [[c[1]] for c in configs], 3287 mapconfigs, 3288 nexternal, ninitial,matrix_element, model)
3289 3290 #=========================================================================== 3291 # write_run_configs_file 3292 #===========================================================================
3293 - def write_run_config_file(self, writer):
3294 """Write the run_configs.inc file for MadWeight""" 3295 3296 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3297 text = open(path).read() % {'chanperjob':'5'} 3298 writer.write(text) 3299 return True
3300 3301 #=========================================================================== 3302 # write_configs_file_from_diagrams 3303 #===========================================================================
3304 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3305 nexternal, ninitial, matrix_element, model):
3306 """Write the actual configs.inc file. 3307 3308 configs is the diagrams corresponding to configs (each 3309 diagrams is a list of corresponding diagrams for all 3310 subprocesses, with None if there is no corresponding diagrams 3311 for a given process). 3312 mapconfigs gives the diagram number for each config. 3313 3314 For s-channels, we need to output one PDG for each subprocess in 3315 the subprocess group, in order to be able to pick the right 3316 one for multiprocesses.""" 3317 3318 lines = [] 3319 3320 particle_dict = matrix_element.get('processes')[0].get('model').\ 3321 get('particle_dict') 3322 3323 s_and_t_channels = [] 3324 3325 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3326 for config in configs if [d for d in config if d][0].\ 3327 get_vertex_leg_numbers()!=[]] 3328 3329 minvert = min(vert_list) if vert_list!=[] else 0 3330 # Number of subprocesses 3331 nsubprocs = len(configs[0]) 3332 3333 nconfigs = 0 3334 3335 new_pdg = model.get_first_non_pdg() 3336 3337 for iconfig, helas_diags in enumerate(configs): 3338 if any([vert > minvert for vert in 3339 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3340 # Only 3-vertices allowed in configs.inc 3341 continue 3342 nconfigs += 1 3343 3344 # Need s- and t-channels for all subprocesses, including 3345 # those that don't contribute to this config 3346 empty_verts = [] 3347 stchannels = [] 3348 for h in helas_diags: 3349 if h: 3350 # get_s_and_t_channels gives vertices starting from 3351 # final state external particles and working inwards 3352 stchannels.append(h.get('amplitudes')[0].\ 3353 get_s_and_t_channels(ninitial,model,new_pdg)) 3354 else: 3355 stchannels.append((empty_verts, None)) 3356 3357 # For t-channels, just need the first non-empty one 3358 tchannels = [t for s,t in stchannels if t != None][0] 3359 3360 # For s_and_t_channels (to be used later) use only first config 3361 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3362 tchannels]) 3363 3364 # Make sure empty_verts is same length as real vertices 3365 if any([s for s,t in stchannels]): 3366 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3367 3368 # Reorganize s-channel vertices to get a list of all 3369 # subprocesses for each vertex 3370 schannels = zip(*[s for s,t in stchannels]) 3371 else: 3372 schannels = [] 3373 3374 allchannels = schannels 3375 if len(tchannels) > 1: 3376 # Write out tchannels only if there are any non-trivial ones 3377 allchannels = schannels + tchannels 3378 3379 # Write out propagators for s-channel and t-channel vertices 3380 3381 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3382 # Correspondance between the config and the diagram = amp2 3383 lines.append("* %d %d " % (nconfigs, 3384 mapconfigs[iconfig])) 3385 3386 for verts in allchannels: 3387 if verts in schannels: 3388 vert = [v for v in verts if v][0] 3389 else: 3390 vert = verts 3391 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3392 last_leg = vert.get('legs')[-1] 3393 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3394 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3395 # (last_leg.get('number'), nconfigs, len(daughters), 3396 # ",".join([str(d) for d in daughters]))) 3397 3398 if last_leg.get('id') == 21 and 21 not in particle_dict: 3399 # Fake propagator used in multiparticle vertices 3400 mass = 'zero' 3401 width = 'zero' 3402 pow_part = 0 3403 else: 3404 if (last_leg.get('id')!=7): 3405 particle = particle_dict[last_leg.get('id')] 3406 # Get mass 3407 mass = particle.get('mass') 3408 # Get width 3409 width = particle.get('width') 3410 else : # fake propagator used in multiparticle vertices 3411 mass= 'zero' 3412 width= 'zero' 3413 3414 line=line+" "+mass+" "+width+" " 3415 3416 if verts in schannels: 3417 pdgs = [] 3418 for v in verts: 3419 if v: 3420 pdgs.append(v.get('legs')[-1].get('id')) 3421 else: 3422 pdgs.append(0) 3423 lines.append(line+" S "+str(last_leg.get('id'))) 3424 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3425 # (last_leg.get('number'), nconfigs, nsubprocs, 3426 # ",".join([str(d) for d in pdgs]))) 3427 # lines.append("data tprid(%d,%d)/0/" % \ 3428 # (last_leg.get('number'), nconfigs)) 3429 elif verts in tchannels[:-1]: 3430 lines.append(line+" T "+str(last_leg.get('id'))) 3431 # lines.append("data tprid(%d,%d)/%d/" % \ 3432 # (last_leg.get('number'), nconfigs, 3433 # abs(last_leg.get('id')))) 3434 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3435 # (last_leg.get('number'), nconfigs, nsubprocs, 3436 # ",".join(['0'] * nsubprocs))) 3437 3438 # Write out number of configs 3439 # lines.append("# Number of configs") 3440 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3441 lines.append(" * ") # a line with just a star indicates this is the end of file 3442 # Write the file 3443 writer.writelines(lines) 3444 3445 return s_and_t_channels
3446
3447 3448 3449 #=============================================================================== 3450 # ProcessExporterFortranME 3451 #=============================================================================== 3452 -class ProcessExporterFortranME(ProcessExporterFortran):
3453 """Class to take care of exporting a set of matrix elements to 3454 MadEvent format.""" 3455 3456 matrix_file = "matrix_madevent_v4.inc" 3457 3458 # helper function for customise helas writter 3459 @staticmethod
3460 - def custom_helas_call(call, arg):
3461 if arg['mass'] == '%(M)s,%(W)s,': 3462 arg['mass'] = '%(M)s, fk_%(W)s,' 3463 elif '%(W)s' in arg['mass']: 3464 raise Exception 3465 return call, arg
3466
3467 - def copy_template(self, model):
3468 """Additional actions needed for setup of Template 3469 """ 3470 3471 super(ProcessExporterFortranME, self).copy_template(model) 3472 3473 # File created from Template (Different in some child class) 3474 filename = pjoin(self.dir_path,'Source','run_config.inc') 3475 self.write_run_config_file(writers.FortranWriter(filename)) 3476 3477 # The next file are model dependant (due to SLAH convention) 3478 self.model_name = model.get('name') 3479 # Add the symmetry.f 3480 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3481 self.write_symmetry(writers.FortranWriter(filename)) 3482 # 3483 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3484 self.write_addmothers(writers.FortranWriter(filename)) 3485 # Copy the different python file in the Template 3486 self.copy_python_file()
3487 3488 3489 3490 3491 3492 3493 #=========================================================================== 3494 # generate_subprocess_directory 3495 #===========================================================================
3496 - def copy_python_file(self):
3497 """copy the python file require for the Template""" 3498 3499 # madevent interface 3500 cp(_file_path+'/interface/madevent_interface.py', 3501 self.dir_path+'/bin/internal/madevent_interface.py') 3502 cp(_file_path+'/interface/extended_cmd.py', 3503 self.dir_path+'/bin/internal/extended_cmd.py') 3504 cp(_file_path+'/interface/common_run_interface.py', 3505 self.dir_path+'/bin/internal/common_run_interface.py') 3506 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3507 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3508 cp(_file_path+'/iolibs/save_load_object.py', 3509 self.dir_path+'/bin/internal/save_load_object.py') 3510 cp(_file_path+'/iolibs/file_writers.py', 3511 self.dir_path+'/bin/internal/file_writers.py') 3512 #model file 3513 cp(_file_path+'../models/check_param_card.py', 3514 self.dir_path+'/bin/internal/check_param_card.py') 3515 3516 #copy all the file present in madevent directory 3517 for name in os.listdir(pjoin(_file_path, 'madevent')): 3518 if name not in ['__init__.py'] and name.endswith('.py'): 3519 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3520 3521 #madevent file 3522 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3523 cp(_file_path+'/various/lhe_parser.py', 3524 self.dir_path+'/bin/internal/lhe_parser.py') 3525 cp(_file_path+'/various/banner.py', 3526 self.dir_path+'/bin/internal/banner.py') 3527 cp(_file_path+'/various/histograms.py', 3528 self.dir_path+'/bin/internal/histograms.py') 3529 cp(_file_path+'/various/plot_djrs.py', 3530 self.dir_path+'/bin/internal/plot_djrs.py') 3531 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3532 3533 cp(_file_path+'/various/cluster.py', 3534 self.dir_path+'/bin/internal/cluster.py') 3535 cp(_file_path+'/madevent/combine_runs.py', 3536 self.dir_path+'/bin/internal/combine_runs.py') 3537 # logging configuration 3538 cp(_file_path+'/interface/.mg5_logging.conf', 3539 self.dir_path+'/bin/internal/me5_logging.conf') 3540 cp(_file_path+'/interface/coloring_logging.py', 3541 self.dir_path+'/bin/internal/coloring_logging.py') 3542 # shower card and FO_analyse_card. 3543 # Although not needed, it is imported by banner.py 3544 cp(_file_path+'/various/shower_card.py', 3545 self.dir_path+'/bin/internal/shower_card.py') 3546 cp(_file_path+'/various/FO_analyse_card.py', 3547 self.dir_path+'/bin/internal/FO_analyse_card.py')
3548 3549
3550 - def convert_model(self, model, wanted_lorentz = [], 3551 wanted_couplings = []):
3552 3553 super(ProcessExporterFortranME,self).convert_model(model, 3554 wanted_lorentz, wanted_couplings) 3555 3556 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3557 try: 3558 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3559 except OSError as error: 3560 pass 3561 model_path = model.get('modelpath') 3562 # This is not safe if there is a '##' or '-' in the path. 3563 shutil.copytree(model_path, 3564 pjoin(self.dir_path,'bin','internal','ufomodel'), 3565 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3566 if hasattr(model, 'restrict_card'): 3567 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3568 'restrict_default.dat') 3569 if isinstance(model.restrict_card, check_param_card.ParamCard): 3570 model.restrict_card.write(out_path) 3571 else: 3572 files.cp(model.restrict_card, out_path)
3573 3574 #=========================================================================== 3575 # export model files 3576 #===========================================================================
3577 - def export_model_files(self, model_path):
3578 """export the model dependent files""" 3579 3580 super(ProcessExporterFortranME,self).export_model_files(model_path) 3581 3582 # Add the routine update_as_param in v4 model 3583 # This is a function created in the UFO 3584 text=""" 3585 subroutine update_as_param() 3586 call setpara('param_card.dat',.false.) 3587 return 3588 end 3589 """ 3590 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3591 ff.write(text) 3592 ff.close() 3593 3594 # Add the symmetry.f 3595 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3596 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3597 3598 # Modify setrun.f 3599 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3600 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3601 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3602 fsock.write(text) 3603 fsock.close() 3604 3605 self.make_model_symbolic_link()
3606 3607 #=========================================================================== 3608 # generate_subprocess_directory 3609 #===========================================================================
3610 - def generate_subprocess_directory(self, matrix_element, 3611 fortran_model, 3612 me_number):
3613 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3614 including the necessary matrix.f and various helper files""" 3615 3616 cwd = os.getcwd() 3617 path = pjoin(self.dir_path, 'SubProcesses') 3618 3619 3620 if not self.model: 3621 self.model = matrix_element.get('processes')[0].get('model') 3622 3623 3624 3625 #os.chdir(path) 3626 # Create the directory PN_xx_xxxxx in the specified path 3627 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3628 try: 3629 os.mkdir(pjoin(path,subprocdir)) 3630 except os.error as error: 3631 logger.warning(error.strerror + " " + subprocdir) 3632 3633 #try: 3634 # os.chdir(subprocdir) 3635 #except os.error: 3636 # logger.error('Could not cd to directory %s' % subprocdir) 3637 # return 0 3638 3639 logger.info('Creating files in directory %s' % subprocdir) 3640 Ppath = pjoin(path, subprocdir) 3641 3642 # Extract number of external particles 3643 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3644 3645 # Add the driver.f 3646 ncomb = matrix_element.get_helicity_combinations() 3647 filename = pjoin(Ppath,'driver.f') 3648 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3649 v5=self.opt['v5_model']) 3650 3651 # Create the matrix.f file, auto_dsig.f file and all inc files 3652 filename = pjoin(Ppath, 'matrix.f') 3653 calls, ncolor = \ 3654 self.write_matrix_element_v4(writers.FortranWriter(filename), 3655 matrix_element, fortran_model, subproc_number = me_number) 3656 3657 filename = pjoin(Ppath, 'auto_dsig.f') 3658 self.write_auto_dsig_file(writers.FortranWriter(filename), 3659 matrix_element) 3660 3661 filename = pjoin(Ppath, 'configs.inc') 3662 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3663 writers.FortranWriter(filename), 3664 matrix_element) 3665 3666 filename = pjoin(Ppath, 'config_nqcd.inc') 3667 self.write_config_nqcd_file(writers.FortranWriter(filename), 3668 nqcd_list) 3669 3670 filename = pjoin(Ppath, 'config_subproc_map.inc') 3671 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3672 s_and_t_channels) 3673 3674 filename = pjoin(Ppath, 'coloramps.inc') 3675 self.write_coloramps_file(writers.FortranWriter(filename), 3676 mapconfigs, 3677 matrix_element) 3678 3679 filename = pjoin(Ppath, 'get_color.f') 3680 self.write_colors_file(writers.FortranWriter(filename), 3681 matrix_element) 3682 3683 filename = pjoin(Ppath, 'decayBW.inc') 3684 self.write_decayBW_file(writers.FortranWriter(filename), 3685 s_and_t_channels) 3686 3687 filename = pjoin(Ppath, 'dname.mg') 3688 self.write_dname_file(writers.FileWriter(filename), 3689 "P"+matrix_element.get('processes')[0].shell_string()) 3690 3691 filename = pjoin(Ppath, 'iproc.dat') 3692 self.write_iproc_file(writers.FortranWriter(filename), 3693 me_number) 3694 3695 filename = pjoin(Ppath, 'leshouche.inc') 3696 self.write_leshouche_file(writers.FortranWriter(filename), 3697 matrix_element) 3698 3699 filename = pjoin(Ppath, 'maxamps.inc') 3700 self.write_maxamps_file(writers.FortranWriter(filename), 3701 len(matrix_element.get('diagrams')), 3702 ncolor, 3703 len(matrix_element.get('processes')), 3704 1) 3705 3706 filename = pjoin(Ppath, 'mg.sym') 3707 self.write_mg_sym_file(writers.FortranWriter(filename), 3708 matrix_element) 3709 3710 filename = pjoin(Ppath, 'ncombs.inc') 3711 self.write_ncombs_file(writers.FortranWriter(filename), 3712 nexternal) 3713 3714 filename = pjoin(Ppath, 'nexternal.inc') 3715 self.write_nexternal_file(writers.FortranWriter(filename), 3716 nexternal, ninitial) 3717 3718 filename = pjoin(Ppath, 'ngraphs.inc') 3719 self.write_ngraphs_file(writers.FortranWriter(filename), 3720 len(mapconfigs)) 3721 3722 3723 filename = pjoin(Ppath, 'pmass.inc') 3724 self.write_pmass_file(writers.FortranWriter(filename), 3725 matrix_element) 3726 3727 filename = pjoin(Ppath, 'props.inc') 3728 self.write_props_file(writers.FortranWriter(filename), 3729 matrix_element, 3730 s_and_t_channels) 3731 3732 # Find config symmetries and permutations 3733 symmetry, perms, ident_perms = \ 3734 diagram_symmetry.find_symmetry(matrix_element) 3735 3736 filename = pjoin(Ppath, 'symswap.inc') 3737 self.write_symswap_file(writers.FortranWriter(filename), 3738 ident_perms) 3739 3740 filename = pjoin(Ppath, 'symfact_orig.dat') 3741 self.write_symfact_file(open(filename, 'w'), symmetry) 3742 3743 # Generate diagrams 3744 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3745 filename = pjoin(Ppath, "matrix.ps") 3746 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3747 get('diagrams'), 3748 filename, 3749 model=matrix_element.get('processes')[0].\ 3750 get('model'), 3751 amplitude=True) 3752 logger.info("Generating Feynman diagrams for " + \ 3753 matrix_element.get('processes')[0].nice_string()) 3754 plot.draw() 3755 3756 self.link_files_in_SubProcess(Ppath) 3757 3758 #import nexternal/leshouche in Source 3759 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3760 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3761 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3762 # Return to SubProcesses dir 3763 #os.chdir(os.path.pardir) 3764 3765 # Add subprocess to subproc.mg 3766 filename = pjoin(path, 'subproc.mg') 3767 files.append_to_file(filename, 3768 self.write_subproc, 3769 subprocdir) 3770 3771 # Return to original dir 3772 #os.chdir(cwd) 3773 3774 # Generate info page 3775 gen_infohtml.make_info_html(self.dir_path) 3776 3777 3778 if not calls: 3779 calls = 0 3780 return calls
3781 3782 link_Sub_files = ['addmothers.f', 3783 'cluster.f', 3784 'cluster.inc', 3785 'coupl.inc', 3786 'cuts.f', 3787 'cuts.inc', 3788 'genps.f', 3789 'genps.inc', 3790 'idenparts.f', 3791 'initcluster.f', 3792 'makefile', 3793 'message.inc', 3794 'myamp.f', 3795 'reweight.f', 3796 'run.inc', 3797 'maxconfigs.inc', 3798 'maxparticles.inc', 3799 'run_config.inc', 3800 'lhe_event_infos.inc', 3801 'setcuts.f', 3802 'setscales.f', 3803 'sudakov.inc', 3804 'symmetry.f', 3805 'unwgt.f', 3806 'dummy_fct.f' 3807 ] 3808 3822 3823
3824 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3825 """Finalize ME v4 directory by creating jpeg diagrams, html 3826 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3827 3828 if 'nojpeg' in flaglist: 3829 makejpg = False 3830 else: 3831 makejpg = True 3832 if 'online' in flaglist: 3833 online = True 3834 else: 3835 online = False 3836 3837 compiler = {'fortran': mg5options['fortran_compiler'], 3838 'cpp': mg5options['cpp_compiler'], 3839 'f2py': mg5options['f2py_compiler']} 3840 3841 # indicate that the output type is not grouped 3842 if not isinstance(self, ProcessExporterFortranMEGroup): 3843 self.proc_characteristic['grouped_matrix'] = False 3844 3845 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3846 3847 # set limitation linked to the model 3848 3849 3850 # indicate the PDG of all initial particle 3851 try: 3852 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3853 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3854 except AttributeError: 3855 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3856 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3857 self.proc_characteristic['pdg_initial1'] = pdgs1 3858 self.proc_characteristic['pdg_initial2'] = pdgs2 3859 3860 3861 modelname = self.opt['model'] 3862 if modelname == 'mssm' or modelname.startswith('mssm-'): 3863 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3864 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3865 check_param_card.convert_to_mg5card(param_card, mg5_param) 3866 check_param_card.check_valid_param_card(mg5_param) 3867 3868 # Add the combine_events.f modify param_card path/number of @X 3869 filename = pjoin(self.dir_path,'Source','combine_events.f') 3870 try: 3871 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3872 except AttributeError: 3873 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3874 nb_proc = len(set(nb_proc)) 3875 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3876 # Write maxconfigs.inc based on max of ME's/subprocess groups 3877 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3878 self.write_maxconfigs_file(writers.FortranWriter(filename), 3879 matrix_elements) 3880 3881 # Write maxparticles.inc based on max of ME's/subprocess groups 3882 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3883 self.write_maxparticles_file(writers.FortranWriter(filename), 3884 matrix_elements) 3885 3886 # Touch "done" file 3887 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3888 3889 # Check for compiler 3890 self.set_compiler(compiler) 3891 self.set_cpp_compiler(compiler['cpp']) 3892 3893 3894 old_pos = os.getcwd() 3895 subpath = pjoin(self.dir_path, 'SubProcesses') 3896 3897 P_dir_list = [proc for proc in os.listdir(subpath) 3898 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3899 3900 devnull = os.open(os.devnull, os.O_RDWR) 3901 # Convert the poscript in jpg files (if authorize) 3902 if makejpg: 3903 try: 3904 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3905 except Exception, error: 3906 pass 3907 3908 if misc.which('gs'): 3909 logger.info("Generate jpeg diagrams") 3910 for Pdir in P_dir_list: 3911 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3912 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3913 3914 logger.info("Generate web pages") 3915 # Create the WebPage using perl script 3916 3917 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3918 stdout = devnull,cwd=pjoin(self.dir_path)) 3919 3920 #os.chdir(os.path.pardir) 3921 3922 obj = gen_infohtml.make_info_html(self.dir_path) 3923 3924 if online: 3925 nb_channel = obj.rep_rule['nb_gen_diag'] 3926 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3927 #add the information to proc_charac 3928 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3929 3930 # Write command history as proc_card_mg5 3931 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3932 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3933 history.write(output_file) 3934 3935 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3936 stdout = devnull) 3937 3938 #crate the proc_characteristic file 3939 self.create_proc_charac(matrix_elements, history) 3940 3941 # create the run_card 3942 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3943 3944 # Run "make" to generate madevent.tar.gz file 3945 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3946 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3947 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3948 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3949 stdout = devnull, cwd=self.dir_path) 3950 3951 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3952 stdout = devnull, cwd=self.dir_path)
3953 3954 3955 3956 3957 3958 3959 #return to the initial dir 3960 #os.chdir(old_pos) 3961 3962 #=========================================================================== 3963 # write_matrix_element_v4 3964 #===========================================================================
3965 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3966 proc_id = "", config_map = [], subproc_number = ""):
3967 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3968 3969 if not matrix_element.get('processes') or \ 3970 not matrix_element.get('diagrams'): 3971 return 0 3972 3973 if writer: 3974 if not isinstance(writer, writers.FortranWriter): 3975 raise writers.FortranWriter.FortranWriterError(\ 3976 "writer not FortranWriter") 3977 # Set lowercase/uppercase Fortran code 3978 writers.FortranWriter.downcase = False 3979 3980 # check if MLM/.../ is supported for this matrix-element and update associate flag 3981 if self.model and 'MLM' in self.model["limitations"]: 3982 if 'MLM' not in self.proc_characteristic["limitations"]: 3983 used_couplings = matrix_element.get_used_couplings(output="set") 3984 for vertex in self.model.get('interactions'): 3985 particles = [p for p in vertex.get('particles')] 3986 if 21 in [p.get('pdg_code') for p in particles]: 3987 colors = [par.get('color') for par in particles] 3988 if 1 in colors: 3989 continue 3990 elif 'QCD' not in vertex.get('orders'): 3991 for bad_coup in vertex.get('couplings').values(): 3992 if bad_coup in used_couplings: 3993 self.proc_characteristic["limitations"].append('MLM') 3994 break 3995 3996 # The proc prefix is not used for MadEvent output so it can safely be set 3997 # to an empty string. 3998 replace_dict = {'proc_prefix':''} 3999 4000 # Extract helas calls 4001 helas_calls = fortran_model.get_matrix_element_calls(\ 4002 matrix_element) 4003 4004 4005 replace_dict['helas_calls'] = "\n".join(helas_calls) 4006 4007 4008 #adding the support for the fake width (forbidding too small width) 4009 mass_width = matrix_element.get_all_mass_widths() 4010 width_list = set([e[1] for e in mass_width]) 4011 4012 replace_dict['fake_width_declaration'] = \ 4013 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4014 replace_dict['fake_width_declaration'] += \ 4015 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4016 fk_w_defs = [] 4017 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4018 for m, w in mass_width: 4019 if w == 'zero': 4020 if ' fk_zero = 0d0' not in fk_w_defs: 4021 fk_w_defs.append(' fk_zero = 0d0') 4022 continue 4023 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4024 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4025 4026 # Extract version number and date from VERSION file 4027 info_lines = self.get_mg5_info_lines() 4028 replace_dict['info_lines'] = info_lines 4029 4030 # Extract process info lines 4031 process_lines = self.get_process_info_lines(matrix_element) 4032 replace_dict['process_lines'] = process_lines 4033 4034 # Set proc_id 4035 replace_dict['proc_id'] = proc_id 4036 4037 # Extract ncomb 4038 ncomb = matrix_element.get_helicity_combinations() 4039 replace_dict['ncomb'] = ncomb 4040 4041 # Extract helicity lines 4042 helicity_lines = self.get_helicity_lines(matrix_element) 4043 replace_dict['helicity_lines'] = helicity_lines 4044 4045 # Extract IC line 4046 ic_line = self.get_ic_line(matrix_element) 4047 replace_dict['ic_line'] = ic_line 4048 4049 # Extract overall denominator 4050 # Averaging initial state color, spin, and identical FS particles 4051 den_factor_line = self.get_den_factor_line(matrix_element) 4052 replace_dict['den_factor_line'] = den_factor_line 4053 4054 # Extract ngraphs 4055 ngraphs = matrix_element.get_number_of_amplitudes() 4056 replace_dict['ngraphs'] = ngraphs 4057 4058 # Extract ndiags 4059 ndiags = len(matrix_element.get('diagrams')) 4060 replace_dict['ndiags'] = ndiags 4061 4062 # Set define_iconfigs_lines 4063 replace_dict['define_iconfigs_lines'] = \ 4064 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4065 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4066 4067 if proc_id: 4068 # Set lines for subprocess group version 4069 # Set define_iconfigs_lines 4070 replace_dict['define_iconfigs_lines'] += \ 4071 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4072 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4073 # Set set_amp2_line 4074 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4075 proc_id 4076 else: 4077 # Standard running 4078 # Set set_amp2_line 4079 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4080 4081 # Extract nwavefuncs 4082 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4083 replace_dict['nwavefuncs'] = nwavefuncs 4084 4085 # Extract ncolor 4086 ncolor = max(1, len(matrix_element.get('color_basis'))) 4087 replace_dict['ncolor'] = ncolor 4088 4089 # Extract color data lines 4090 color_data_lines = self.get_color_data_lines(matrix_element) 4091 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4092 4093 4094 # Set the size of Wavefunction 4095 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4096 replace_dict['wavefunctionsize'] = 18 4097 else: 4098 replace_dict['wavefunctionsize'] = 6 4099 4100 # Extract amp2 lines 4101 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4102 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4103 4104 # The JAMP definition depends on the splitting order 4105 split_orders=matrix_element.get('processes')[0].get('split_orders') 4106 if len(split_orders)>0: 4107 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4108 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4109 matrix_element.get('processes')[0],squared_orders) 4110 else: 4111 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4112 # set all amplitude order to weight 1 and only one squared order 4113 # contribution which is of course ALL_ORDERS=2. 4114 squared_orders = [(2,),] 4115 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4116 replace_dict['chosen_so_configs'] = '.TRUE.' 4117 4118 replace_dict['nAmpSplitOrders']=len(amp_orders) 4119 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4120 replace_dict['split_order_str_list']=str(split_orders) 4121 replace_dict['nSplitOrders']=max(len(split_orders),1) 4122 amp_so = self.get_split_orders_lines( 4123 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4124 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4125 replace_dict['ampsplitorders']='\n'.join(amp_so) 4126 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4127 4128 4129 # Extract JAMP lines 4130 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4131 jamp_lines = self.get_JAMP_lines_split_order(\ 4132 matrix_element,amp_orders,split_order_names= 4133 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4134 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4135 4136 replace_dict['template_file'] = pjoin(_file_path, \ 4137 'iolibs/template_files/%s' % self.matrix_file) 4138 replace_dict['template_file2'] = pjoin(_file_path, \ 4139 'iolibs/template_files/split_orders_helping_functions.inc') 4140 4141 s1,s2 = matrix_element.get_spin_state_initial() 4142 replace_dict['nb_spin_state1'] = s1 4143 replace_dict['nb_spin_state2'] = s2 4144 4145 if writer: 4146 file = open(replace_dict['template_file']).read() 4147 file = file % replace_dict 4148 # Add the split orders helper functions. 4149 file = file + '\n' + open(replace_dict['template_file2'])\ 4150 .read()%replace_dict 4151 # Write the file 4152 writer.writelines(file) 4153 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4154 else: 4155 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4156 return replace_dict
4157 4158 #=========================================================================== 4159 # write_auto_dsig_file 4160 #===========================================================================
4161 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4162 """Write the auto_dsig.f file for the differential cross section 4163 calculation, includes pdf call information""" 4164 4165 if not matrix_element.get('processes') or \ 4166 not matrix_element.get('diagrams'): 4167 return 0 4168 4169 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4170 self.proc_characteristic['ninitial'] = ninitial 4171 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4172 4173 # Add information relevant for MLM matching: 4174 # Maximum QCD power in all the contributions 4175 max_qcd_order = 0 4176 for diag in matrix_element.get('diagrams'): 4177 orders = diag.calculate_orders() 4178 if 'QCD' in orders: 4179 max_qcd_order = max(max_qcd_order,orders['QCD']) 4180 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4181 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4182 proc.get('model').get_particle(id).get('color')>1]) 4183 for proc in matrix_element.get('processes')) 4184 # Maximum number of final state light jets to be matched 4185 self.proc_characteristic['max_n_matched_jets'] = max( 4186 self.proc_characteristic['max_n_matched_jets'], 4187 min(max_qcd_order,max_n_light_final_partons)) 4188 4189 # List of default pdgs to be considered for the CKKWl merging cut 4190 self.proc_characteristic['colored_pdgs'] = \ 4191 sorted(list(set([abs(p.get('pdg_code')) for p in 4192 matrix_element.get('processes')[0].get('model').get('particles') if 4193 p.get('color')>1]))) 4194 4195 if ninitial < 1 or ninitial > 2: 4196 raise writers.FortranWriter.FortranWriterError, \ 4197 """Need ninitial = 1 or 2 to write auto_dsig file""" 4198 4199 replace_dict = {} 4200 4201 # Extract version number and date from VERSION file 4202 info_lines = self.get_mg5_info_lines() 4203 replace_dict['info_lines'] = info_lines 4204 4205 # Extract process info lines 4206 process_lines = self.get_process_info_lines(matrix_element) 4207 replace_dict['process_lines'] = process_lines 4208 4209 # Set proc_id 4210 replace_dict['proc_id'] = proc_id 4211 replace_dict['numproc'] = 1 4212 4213 # Set dsig_line 4214 if ninitial == 1: 4215 # No conversion, since result of decay should be given in GeV 4216 dsig_line = "pd(0)*dsiguu" 4217 else: 4218 # Convert result (in GeV) to pb 4219 dsig_line = "pd(0)*conv*dsiguu" 4220 4221 replace_dict['dsig_line'] = dsig_line 4222 4223 # Extract pdf lines 4224 pdf_vars, pdf_data, pdf_lines = \ 4225 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4226 replace_dict['pdf_vars'] = pdf_vars 4227 replace_dict['pdf_data'] = pdf_data 4228 replace_dict['pdf_lines'] = pdf_lines 4229 4230 # Lines that differ between subprocess group and regular 4231 if proc_id: 4232 replace_dict['numproc'] = int(proc_id) 4233 replace_dict['passcuts_begin'] = "" 4234 replace_dict['passcuts_end'] = "" 4235 # Set lines for subprocess group version 4236 # Set define_iconfigs_lines 4237 replace_dict['define_subdiag_lines'] = \ 4238 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4239 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4240 replace_dict['cutsdone'] = "" 4241 else: 4242 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4243 replace_dict['passcuts_end'] = "ENDIF" 4244 replace_dict['define_subdiag_lines'] = "" 4245 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4246 4247 if not isinstance(self, ProcessExporterFortranMEGroup): 4248 ncomb=matrix_element.get_helicity_combinations() 4249 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4250 else: 4251 replace_dict['read_write_good_hel'] = "" 4252 4253 context = {'read_write_good_hel':True} 4254 4255 if writer: 4256 file = open(pjoin(_file_path, \ 4257 'iolibs/template_files/auto_dsig_v4.inc')).read() 4258 file = file % replace_dict 4259 4260 # Write the file 4261 writer.writelines(file, context=context) 4262 else: 4263 return replace_dict, context
4264 #=========================================================================== 4265 # write_coloramps_file 4266 #===========================================================================
4267 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4268 """Write the coloramps.inc file for MadEvent""" 4269 4270 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4271 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4272 (max(len(matrix_element.get('color_basis').keys()), 1), 4273 len(mapconfigs))) 4274 4275 4276 # Write the file 4277 writer.writelines(lines) 4278 4279 return True
4280 4281 #=========================================================================== 4282 # write_colors_file 4283 #===========================================================================
4284 - def write_colors_file(self, writer, matrix_elements):
4285 """Write the get_color.f file for MadEvent, which returns color 4286 for all particles used in the matrix element.""" 4287 4288 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4289 matrix_elements = [matrix_elements] 4290 4291 model = matrix_elements[0].get('processes')[0].get('model') 4292 4293 # We need the both particle and antiparticle wf_ids, since the identity 4294 # depends on the direction of the wf. 4295 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4296 for wf in d.get('wavefunctions')],[]) \ 4297 for d in me.get('diagrams')], []) \ 4298 for me in matrix_elements], [])) 4299 4300 leg_ids = set(sum([sum([sum([[l.get('id'), 4301 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4302 for l in p.get_legs_with_decays()], []) \ 4303 for p in me.get('processes')], []) \ 4304 for me in matrix_elements], [])) 4305 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4306 4307 lines = """function get_color(ipdg) 4308 implicit none 4309 integer get_color, ipdg 4310 4311 if(ipdg.eq.%d)then 4312 get_color=%d 4313 return 4314 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4315 4316 for part_id in particle_ids[1:]: 4317 lines += """else if(ipdg.eq.%d)then 4318 get_color=%d 4319 return 4320 """ % (part_id, model.get_particle(part_id).get_color()) 4321 # Dummy particle for multiparticle vertices with pdg given by 4322 # first code not in the model 4323 lines += """else if(ipdg.eq.%d)then 4324 c This is dummy particle used in multiparticle vertices 4325 get_color=2 4326 return 4327 """ % model.get_first_non_pdg() 4328 lines += """else 4329 write(*,*)'Error: No color given for pdg ',ipdg 4330 get_color=0 4331 return 4332 endif 4333 end 4334 """ 4335 4336 # Write the file 4337 writer.writelines(lines) 4338 4339 return True
4340 4341 #=========================================================================== 4342 # write_config_nqcd_file 4343 #===========================================================================
4344 - def write_config_nqcd_file(self, writer, nqcd_list):
4345 """Write the config_nqcd.inc with the number of QCD couplings 4346 for each config""" 4347 4348 lines = [] 4349 for iconf, n in enumerate(nqcd_list): 4350 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4351 4352 # Write the file 4353 writer.writelines(lines) 4354 4355 return True
4356 4357 #=========================================================================== 4358 # write_maxconfigs_file 4359 #===========================================================================
4360 - def write_maxconfigs_file(self, writer, matrix_elements):
4361 """Write the maxconfigs.inc file for MadEvent""" 4362 4363 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4364 maxconfigs = max([me.get_num_configs() for me in \ 4365 matrix_elements.get('matrix_elements')]) 4366 else: 4367 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4368 4369 lines = "integer lmaxconfigs\n" 4370 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4371 4372 # Write the file 4373 writer.writelines(lines) 4374 4375 return True
4376 4377 #=========================================================================== 4378 # read_write_good_hel 4379 #===========================================================================
4380 - def read_write_good_hel(self, ncomb):
4381 """return the code to read/write the good_hel common_block""" 4382 4383 convert = {'ncomb' : ncomb} 4384 output = """ 4385 subroutine write_good_hel(stream_id) 4386 implicit none 4387 integer stream_id 4388 INTEGER NCOMB 4389 PARAMETER ( NCOMB=%(ncomb)d) 4390 LOGICAL GOODHEL(NCOMB) 4391 INTEGER NTRY 4392 common/BLOCK_GOODHEL/NTRY,GOODHEL 4393 write(stream_id,*) GOODHEL 4394 return 4395 end 4396 4397 4398 subroutine read_good_hel(stream_id) 4399 implicit none 4400 include 'genps.inc' 4401 integer stream_id 4402 INTEGER NCOMB 4403 PARAMETER ( NCOMB=%(ncomb)d) 4404 LOGICAL GOODHEL(NCOMB) 4405 INTEGER NTRY 4406 common/BLOCK_GOODHEL/NTRY,GOODHEL 4407 read(stream_id,*) GOODHEL 4408 NTRY = MAXTRIES + 1 4409 return 4410 end 4411 4412 subroutine init_good_hel() 4413 implicit none 4414 INTEGER NCOMB 4415 PARAMETER ( NCOMB=%(ncomb)d) 4416 LOGICAL GOODHEL(NCOMB) 4417 INTEGER NTRY 4418 INTEGER I 4419 4420 do i=1,NCOMB 4421 GOODHEL(I) = .false. 4422 enddo 4423 NTRY = 0 4424 end 4425 4426 integer function get_maxsproc() 4427 implicit none 4428 get_maxsproc = 1 4429 return 4430 end 4431 4432 """ % convert 4433 4434 return output
4435 4436 #=========================================================================== 4437 # write_config_subproc_map_file 4438 #===========================================================================
4439 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4440 """Write a dummy config_subproc.inc file for MadEvent""" 4441 4442 lines = [] 4443 4444 for iconfig in range(len(s_and_t_channels)): 4445 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4446 (iconfig + 1)) 4447 4448 # Write the file 4449 writer.writelines(lines) 4450 4451 return True
4452 4453 #=========================================================================== 4454 # write_configs_file 4455 #===========================================================================
4456 - def write_configs_file(self, writer, matrix_element):
4457 """Write the configs.inc file for MadEvent""" 4458 4459 # Extract number of external particles 4460 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4461 4462 model = matrix_element.get('processes')[0].get('model') 4463 configs = [(i+1, d) for (i, d) in \ 4464 enumerate(matrix_element.get('diagrams'))] 4465 mapconfigs = [c[0] for c in configs] 4466 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4467 [[c[1]] for c in configs], 4468 mapconfigs, 4469 nexternal, ninitial, 4470 model)
4471 4472 #=========================================================================== 4473 # write_run_configs_file 4474 #===========================================================================
4475 - def write_run_config_file(self, writer):
4476 """Write the run_configs.inc file for MadEvent""" 4477 4478 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4479 4480 if self.proc_characteristic['loop_induced']: 4481 job_per_chan = 1 4482 else: 4483 job_per_chan = 5 4484 4485 if writer: 4486 text = open(path).read() % {'chanperjob': job_per_chan} 4487 writer.write(text) 4488 return True 4489 else: 4490 return {'chanperjob': job_per_chan}
4491 4492 #=========================================================================== 4493 # write_configs_file_from_diagrams 4494 #===========================================================================
4495 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4496 nexternal, ninitial, model):
4497 """Write the actual configs.inc file. 4498 4499 configs is the diagrams corresponding to configs (each 4500 diagrams is a list of corresponding diagrams for all 4501 subprocesses, with None if there is no corresponding diagrams 4502 for a given process). 4503 mapconfigs gives the diagram number for each config. 4504 4505 For s-channels, we need to output one PDG for each subprocess in 4506 the subprocess group, in order to be able to pick the right 4507 one for multiprocesses.""" 4508 4509 lines = [] 4510 4511 s_and_t_channels = [] 4512 4513 nqcd_list = [] 4514 4515 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4516 for config in configs if [d for d in config if d][0].\ 4517 get_vertex_leg_numbers()!=[]] 4518 minvert = min(vert_list) if vert_list!=[] else 0 4519 4520 # Number of subprocesses 4521 nsubprocs = len(configs[0]) 4522 4523 nconfigs = 0 4524 4525 new_pdg = model.get_first_non_pdg() 4526 4527 for iconfig, helas_diags in enumerate(configs): 4528 if any([vert > minvert for vert in 4529 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4530 # Only 3-vertices allowed in configs.inc 4531 continue 4532 nconfigs += 1 4533 4534 # Need s- and t-channels for all subprocesses, including 4535 # those that don't contribute to this config 4536 empty_verts = [] 4537 stchannels = [] 4538 for h in helas_diags: 4539 if h: 4540 # get_s_and_t_channels gives vertices starting from 4541 # final state external particles and working inwards 4542 stchannels.append(h.get('amplitudes')[0].\ 4543 get_s_and_t_channels(ninitial, model, 4544 new_pdg)) 4545 else: 4546 stchannels.append((empty_verts, None)) 4547 4548 # For t-channels, just need the first non-empty one 4549 tchannels = [t for s,t in stchannels if t != None][0] 4550 4551 # For s_and_t_channels (to be used later) use only first config 4552 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4553 tchannels]) 4554 4555 # Make sure empty_verts is same length as real vertices 4556 if any([s for s,t in stchannels]): 4557 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4558 4559 # Reorganize s-channel vertices to get a list of all 4560 # subprocesses for each vertex 4561 schannels = zip(*[s for s,t in stchannels]) 4562 else: 4563 schannels = [] 4564 4565 allchannels = schannels 4566 if len(tchannels) > 1: 4567 # Write out tchannels only if there are any non-trivial ones 4568 allchannels = schannels + tchannels 4569 4570 # Write out propagators for s-channel and t-channel vertices 4571 4572 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4573 # Correspondance between the config and the diagram = amp2 4574 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4575 mapconfigs[iconfig])) 4576 # Number of QCD couplings in this diagram 4577 nqcd = 0 4578 for h in helas_diags: 4579 if h: 4580 try: 4581 nqcd = h.calculate_orders()['QCD'] 4582 except KeyError: 4583 pass 4584 break 4585 else: 4586 continue 4587 4588 nqcd_list.append(nqcd) 4589 4590 for verts in allchannels: 4591 if verts in schannels: 4592 vert = [v for v in verts if v][0] 4593 else: 4594 vert = verts 4595 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4596 last_leg = vert.get('legs')[-1] 4597 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4598 (last_leg.get('number'), nconfigs, len(daughters), 4599 ",".join([str(d) for d in daughters]))) 4600 if verts in schannels: 4601 pdgs = [] 4602 for v in verts: 4603 if v: 4604 pdgs.append(v.get('legs')[-1].get('id')) 4605 else: 4606 pdgs.append(0) 4607 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4608 (last_leg.get('number'), nconfigs, nsubprocs, 4609 ",".join([str(d) for d in pdgs]))) 4610 lines.append("data tprid(%d,%d)/0/" % \ 4611 (last_leg.get('number'), nconfigs)) 4612 elif verts in tchannels[:-1]: 4613 lines.append("data tprid(%d,%d)/%d/" % \ 4614 (last_leg.get('number'), nconfigs, 4615 abs(last_leg.get('id')))) 4616 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4617 (last_leg.get('number'), nconfigs, nsubprocs, 4618 ",".join(['0'] * nsubprocs))) 4619 4620 # Write out number of configs 4621 lines.append("# Number of configs") 4622 lines.append("data mapconfig(0)/%d/" % nconfigs) 4623 4624 # Write the file 4625 writer.writelines(lines) 4626 4627 return s_and_t_channels, nqcd_list
4628 4629 #=========================================================================== 4630 # write_decayBW_file 4631 #===========================================================================
4632 - def write_decayBW_file(self, writer, s_and_t_channels):
4633 """Write the decayBW.inc file for MadEvent""" 4634 4635 lines = [] 4636 4637 booldict = {None: "0", True: "1", False: "2"} 4638 4639 for iconf, config in enumerate(s_and_t_channels): 4640 schannels = config[0] 4641 for vertex in schannels: 4642 # For the resulting leg, pick out whether it comes from 4643 # decay or not, as given by the onshell flag 4644 leg = vertex.get('legs')[-1] 4645 lines.append("data gForceBW(%d,%d)/%s/" % \ 4646 (leg.get('number'), iconf + 1, 4647 booldict[leg.get('onshell')])) 4648 4649 # Write the file 4650 writer.writelines(lines) 4651 4652 return True
4653 4654 #=========================================================================== 4655 # write_dname_file 4656 #===========================================================================
4657 - def write_dname_file(self, writer, dir_name):
4658 """Write the dname.mg file for MG4""" 4659 4660 line = "DIRNAME=%s" % dir_name 4661 4662 # Write the file 4663 writer.write(line + "\n") 4664 4665 return True
4666 4667 #=========================================================================== 4668 # write_driver 4669 #===========================================================================
4670 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4671 """Write the SubProcess/driver.f file for MG4""" 4672 4673 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4674 4675 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4676 card = 'Source/MODEL/MG5_param.dat' 4677 else: 4678 card = 'param_card.dat' 4679 # Requiring each helicity configuration to be probed by 10 points for 4680 # matrix element before using the resulting grid for MC over helicity 4681 # sampling. 4682 # We multiply this by 2 because each grouped subprocess is called at most 4683 # twice for each IMIRROR. 4684 replace_dict = {'param_card_name':card, 4685 'ncomb':ncomb, 4686 'hel_init_points':n_grouped_proc*10*2} 4687 if not v5: 4688 replace_dict['secondparam']=',.true.' 4689 else: 4690 replace_dict['secondparam']='' 4691 4692 if writer: 4693 text = open(path).read() % replace_dict 4694 writer.write(text) 4695 return True 4696 else: 4697 return replace_dict
4698 4699 #=========================================================================== 4700 # write_addmothers 4701 #===========================================================================
4702 - def write_addmothers(self, writer):
4703 """Write the SubProcess/addmothers.f""" 4704 4705 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4706 4707 text = open(path).read() % {'iconfig': 'diag_number'} 4708 writer.write(text) 4709 4710 return True
4711 4712 4713 #=========================================================================== 4714 # write_combine_events 4715 #===========================================================================
4716 - def write_combine_events(self, writer, nb_proc=100):
4717 """Write the SubProcess/driver.f file for MG4""" 4718 4719 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4720 4721 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4722 card = 'Source/MODEL/MG5_param.dat' 4723 else: 4724 card = 'param_card.dat' 4725 4726 #set maxpup (number of @X in the process card) 4727 4728 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4729 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4730 writer.write(text) 4731 4732 return True
4733 4734 4735 #=========================================================================== 4736 # write_symmetry 4737 #===========================================================================
4738 - def write_symmetry(self, writer, v5=True):
4739 """Write the SubProcess/driver.f file for ME""" 4740 4741 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4742 4743 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4744 card = 'Source/MODEL/MG5_param.dat' 4745 else: 4746 card = 'param_card.dat' 4747 4748 if v5: 4749 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4750 else: 4751 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4752 4753 if writer: 4754 text = open(path).read() 4755 text = text % replace_dict 4756 writer.write(text) 4757 return True 4758 else: 4759 return replace_dict
4760 4761 4762 4763 #=========================================================================== 4764 # write_iproc_file 4765 #===========================================================================
4766 - def write_iproc_file(self, writer, me_number):
4767 """Write the iproc.dat file for MG4""" 4768 line = "%d" % (me_number + 1) 4769 4770 # Write the file 4771 for line_to_write in writer.write_line(line): 4772 writer.write(line_to_write) 4773 return True
4774 4775 #=========================================================================== 4776 # write_mg_sym_file 4777 #===========================================================================
4778 - def write_mg_sym_file(self, writer, matrix_element):
4779 """Write the mg.sym file for MadEvent.""" 4780 4781 lines = [] 4782 4783 # Extract process with all decays included 4784 final_legs = filter(lambda leg: leg.get('state') == True, 4785 matrix_element.get('processes')[0].get_legs_with_decays()) 4786 4787 ninitial = len(filter(lambda leg: leg.get('state') == False, 4788 matrix_element.get('processes')[0].get('legs'))) 4789 4790 identical_indices = {} 4791 4792 # Extract identical particle info 4793 for i, leg in enumerate(final_legs): 4794 if leg.get('id') in identical_indices: 4795 identical_indices[leg.get('id')].append(\ 4796 i + ninitial + 1) 4797 else: 4798 identical_indices[leg.get('id')] = [i + ninitial + 1] 4799 4800 # Remove keys which have only one particle 4801 for key in identical_indices.keys(): 4802 if len(identical_indices[key]) < 2: 4803 del identical_indices[key] 4804 4805 # Write mg.sym file 4806 lines.append(str(len(identical_indices.keys()))) 4807 for key in identical_indices.keys(): 4808 lines.append(str(len(identical_indices[key]))) 4809 for number in identical_indices[key]: 4810 lines.append(str(number)) 4811 4812 # Write the file 4813 writer.writelines(lines) 4814 4815 return True
4816 4817 #=========================================================================== 4818 # write_mg_sym_file 4819 #===========================================================================
4820 - def write_default_mg_sym_file(self, writer):
4821 """Write the mg.sym file for MadEvent.""" 4822 4823 lines = "0" 4824 4825 # Write the file 4826 writer.writelines(lines) 4827 4828 return True
4829 4830 #=========================================================================== 4831 # write_ncombs_file 4832 #===========================================================================
4833 - def write_ncombs_file(self, writer, nexternal):
4834 """Write the ncombs.inc file for MadEvent.""" 4835 4836 # ncomb (used for clustering) is 2^nexternal 4837 file = " integer n_max_cl\n" 4838 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4839 4840 # Write the file 4841 writer.writelines(file) 4842 4843 return True
4844 4845 #=========================================================================== 4846 # write_processes_file 4847 #===========================================================================
4848 - def write_processes_file(self, writer, subproc_group):
4849 """Write the processes.dat file with info about the subprocesses 4850 in this group.""" 4851 4852 lines = [] 4853 4854 for ime, me in \ 4855 enumerate(subproc_group.get('matrix_elements')): 4856 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4857 ",".join(p.base_string() for p in \ 4858 me.get('processes')))) 4859 if me.get('has_mirror_process'): 4860 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4861 for proc in mirror_procs: 4862 legs = copy.copy(proc.get('legs_with_decays')) 4863 legs.insert(0, legs.pop(1)) 4864 proc.set("legs_with_decays", legs) 4865 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4866 mirror_procs)) 4867 else: 4868 lines.append("mirror none") 4869 4870 # Write the file 4871 writer.write("\n".join(lines)) 4872 4873 return True
4874 4875 #=========================================================================== 4876 # write_symswap_file 4877 #===========================================================================
4878 - def write_symswap_file(self, writer, ident_perms):
4879 """Write the file symswap.inc for MG4 by comparing diagrams using 4880 the internal matrix element value functionality.""" 4881 4882 lines = [] 4883 4884 # Write out lines for symswap.inc file (used to permute the 4885 # external leg momenta 4886 for iperm, perm in enumerate(ident_perms): 4887 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4888 (iperm+1, ",".join([str(i+1) for i in perm]))) 4889 lines.append("data nsym/%d/" % len(ident_perms)) 4890 4891 # Write the file 4892 writer.writelines(lines) 4893 4894 return True
4895 4896 #=========================================================================== 4897 # write_symfact_file 4898 #===========================================================================
4899 - def write_symfact_file(self, writer, symmetry):
4900 """Write the files symfact.dat for MG4 by comparing diagrams using 4901 the internal matrix element value functionality.""" 4902 4903 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4904 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4905 # Write out lines for symswap.inc file (used to permute the 4906 # external leg momenta 4907 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4908 # Write the file 4909 writer.write('\n'.join(lines)) 4910 writer.write('\n') 4911 4912 return True
4913 4914 #=========================================================================== 4915 # write_symperms_file 4916 #===========================================================================
4917 - def write_symperms_file(self, writer, perms):
4918 """Write the symperms.inc file for subprocess group, used for 4919 symmetric configurations""" 4920 4921 lines = [] 4922 for iperm, perm in enumerate(perms): 4923 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4924 (iperm+1, ",".join([str(i+1) for i in perm]))) 4925 4926 # Write the file 4927 writer.writelines(lines) 4928 4929 return True
4930 4931 #=========================================================================== 4932 # write_subproc 4933 #===========================================================================
4934 - def write_subproc(self, writer, subprocdir):
4935 """Append this subprocess to the subproc.mg file for MG4""" 4936 4937 # Write line to file 4938 writer.write(subprocdir + "\n") 4939 4940 return True
4941
4942 #=============================================================================== 4943 # ProcessExporterFortranMEGroup 4944 #=============================================================================== 4945 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4946 """Class to take care of exporting a set of matrix elements to 4947 MadEvent subprocess group format.""" 4948 4949 matrix_file = "matrix_madevent_group_v4.inc" 4950 grouped_mode = 'madevent' 4951 #=========================================================================== 4952 # generate_subprocess_directory 4953 #===========================================================================
4954 - def generate_subprocess_directory(self, subproc_group, 4955 fortran_model, 4956 group_number):
4957 """Generate the Pn directory for a subprocess group in MadEvent, 4958 including the necessary matrix_N.f files, configs.inc and various 4959 other helper files.""" 4960 4961 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4962 "subproc_group object not SubProcessGroup" 4963 4964 if not self.model: 4965 self.model = subproc_group.get('matrix_elements')[0].\ 4966 get('processes')[0].get('model') 4967 4968 cwd = os.getcwd() 4969 path = pjoin(self.dir_path, 'SubProcesses') 4970 4971 os.chdir(path) 4972 pathdir = os.getcwd() 4973 4974 # Create the directory PN in the specified path 4975 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4976 subproc_group.get('name')) 4977 try: 4978 os.mkdir(subprocdir) 4979 except os.error as error: 4980 logger.warning(error.strerror + " " + subprocdir) 4981 4982 try: 4983 os.chdir(subprocdir) 4984 except os.error: 4985 logger.error('Could not cd to directory %s' % subprocdir) 4986 return 0 4987 4988 logger.info('Creating files in directory %s' % subprocdir) 4989 4990 # Create the matrix.f files, auto_dsig.f files and all inc files 4991 # for all subprocesses in the group 4992 4993 maxamps = 0 4994 maxflows = 0 4995 tot_calls = 0 4996 4997 matrix_elements = subproc_group.get('matrix_elements') 4998 4999 # Add the driver.f, all grouped ME's must share the same number of 5000 # helicity configuration 5001 ncomb = matrix_elements[0].get_helicity_combinations() 5002 for me in matrix_elements[1:]: 5003 if ncomb!=me.get_helicity_combinations(): 5004 raise MadGraph5Error, "All grouped processes must share the "+\ 5005 "same number of helicity configurations." 5006 5007 filename = 'driver.f' 5008 self.write_driver(writers.FortranWriter(filename),ncomb, 5009 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5010 5011 for ime, matrix_element in \ 5012 enumerate(matrix_elements): 5013 filename = 'matrix%d.f' % (ime+1) 5014 calls, ncolor = \ 5015 self.write_matrix_element_v4(writers.FortranWriter(filename), 5016 matrix_element, 5017 fortran_model, 5018 proc_id=str(ime+1), 5019 config_map=subproc_group.get('diagram_maps')[ime], 5020 subproc_number=group_number) 5021 5022 filename = 'auto_dsig%d.f' % (ime+1) 5023 self.write_auto_dsig_file(writers.FortranWriter(filename), 5024 matrix_element, 5025 str(ime+1)) 5026 5027 # Keep track of needed quantities 5028 tot_calls += int(calls) 5029 maxflows = max(maxflows, ncolor) 5030 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5031 5032 # Draw diagrams 5033 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5034 filename = "matrix%d.ps" % (ime+1) 5035 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5036 get('diagrams'), 5037 filename, 5038 model = \ 5039 matrix_element.get('processes')[0].\ 5040 get('model'), 5041 amplitude=True) 5042 logger.info("Generating Feynman diagrams for " + \ 5043 matrix_element.get('processes')[0].nice_string()) 5044 plot.draw() 5045 5046 # Extract number of external particles 5047 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5048 5049 # Generate a list of diagrams corresponding to each configuration 5050 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5051 # If a subprocess has no diagrams for this config, the number is 0 5052 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5053 5054 filename = 'auto_dsig.f' 5055 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5056 subproc_group) 5057 5058 filename = 'coloramps.inc' 5059 self.write_coloramps_file(writers.FortranWriter(filename), 5060 subproc_diagrams_for_config, 5061 maxflows, 5062 matrix_elements) 5063 5064 filename = 'get_color.f' 5065 self.write_colors_file(writers.FortranWriter(filename), 5066 matrix_elements) 5067 5068 filename = 'config_subproc_map.inc' 5069 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5070 subproc_diagrams_for_config) 5071 5072 filename = 'configs.inc' 5073 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5074 writers.FortranWriter(filename), 5075 subproc_group, 5076 subproc_diagrams_for_config) 5077 5078 filename = 'config_nqcd.inc' 5079 self.write_config_nqcd_file(writers.FortranWriter(filename), 5080 nqcd_list) 5081 5082 filename = 'decayBW.inc' 5083 self.write_decayBW_file(writers.FortranWriter(filename), 5084 s_and_t_channels) 5085 5086 filename = 'dname.mg' 5087 self.write_dname_file(writers.FortranWriter(filename), 5088 subprocdir) 5089 5090 filename = 'iproc.dat' 5091 self.write_iproc_file(writers.FortranWriter(filename), 5092 group_number) 5093 5094 filename = 'leshouche.inc' 5095 self.write_leshouche_file(writers.FortranWriter(filename), 5096 subproc_group) 5097 5098 filename = 'maxamps.inc' 5099 self.write_maxamps_file(writers.FortranWriter(filename), 5100 maxamps, 5101 maxflows, 5102 max([len(me.get('processes')) for me in \ 5103 matrix_elements]), 5104 len(matrix_elements)) 5105 5106 # Note that mg.sym is not relevant for this case 5107 filename = 'mg.sym' 5108 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5109 5110 filename = 'mirrorprocs.inc' 5111 self.write_mirrorprocs(writers.FortranWriter(filename), 5112 subproc_group) 5113 5114 filename = 'ncombs.inc' 5115 self.write_ncombs_file(writers.FortranWriter(filename), 5116 nexternal) 5117 5118 filename = 'nexternal.inc' 5119 self.write_nexternal_file(writers.FortranWriter(filename), 5120 nexternal, ninitial) 5121 5122 filename = 'ngraphs.inc' 5123 self.write_ngraphs_file(writers.FortranWriter(filename), 5124 nconfigs) 5125 5126 filename = 'pmass.inc' 5127 self.write_pmass_file(writers.FortranWriter(filename), 5128 matrix_element) 5129 5130 filename = 'props.inc' 5131 self.write_props_file(writers.FortranWriter(filename), 5132 matrix_element, 5133 s_and_t_channels) 5134 5135 filename = 'processes.dat' 5136 files.write_to_file(filename, 5137 self.write_processes_file, 5138 subproc_group) 5139 5140 # Find config symmetries and permutations 5141 symmetry, perms, ident_perms = \ 5142 diagram_symmetry.find_symmetry(subproc_group) 5143 5144 filename = 'symswap.inc' 5145 self.write_symswap_file(writers.FortranWriter(filename), 5146 ident_perms) 5147 5148 filename = 'symfact_orig.dat' 5149 self.write_symfact_file(open(filename, 'w'), symmetry) 5150 5151 # check consistency 5152 for i, sym_fact in enumerate(symmetry): 5153 if sym_fact > 0: 5154 continue 5155 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5156 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5157 raise Exception, "identical diagram with different QCD powwer" 5158 5159 5160 5161 filename = 'symperms.inc' 5162 self.write_symperms_file(writers.FortranWriter(filename), 5163 perms) 5164 5165 # Generate jpgs -> pass in make_html 5166 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5167 5168 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5169 5170 #import nexternal/leshouch in Source 5171 ln('nexternal.inc', '../../Source', log=False) 5172 ln('leshouche.inc', '../../Source', log=False) 5173 ln('maxamps.inc', '../../Source', log=False) 5174 5175 # Return to SubProcesses dir) 5176 os.chdir(pathdir) 5177 5178 # Add subprocess to subproc.mg 5179 filename = 'subproc.mg' 5180 files.append_to_file(filename, 5181 self.write_subproc, 5182 subprocdir) 5183 5184 # Return to original dir 5185 os.chdir(cwd) 5186 5187 if not tot_calls: 5188 tot_calls = 0 5189 return tot_calls
5190 5191 #=========================================================================== 5192 # write_super_auto_dsig_file 5193 #===========================================================================
5194 - def write_super_auto_dsig_file(self, writer, subproc_group):
5195 """Write the auto_dsig.f file selecting between the subprocesses 5196 in subprocess group mode""" 5197 5198 replace_dict = {} 5199 5200 # Extract version number and date from VERSION file 5201 info_lines = self.get_mg5_info_lines() 5202 replace_dict['info_lines'] = info_lines 5203 5204 matrix_elements = subproc_group.get('matrix_elements') 5205 5206 # Extract process info lines 5207 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5208 matrix_elements]) 5209 replace_dict['process_lines'] = process_lines 5210 5211 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5212 replace_dict['nexternal'] = nexternal 5213 5214 replace_dict['nsprocs'] = 2*len(matrix_elements) 5215 5216 # Generate dsig definition line 5217 dsig_def_line = "DOUBLE PRECISION " + \ 5218 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5219 range(len(matrix_elements))]) 5220 replace_dict["dsig_def_line"] = dsig_def_line 5221 5222 # Generate dsig process lines 5223 call_dsig_proc_lines = [] 5224 for iproc in range(len(matrix_elements)): 5225 call_dsig_proc_lines.append(\ 5226 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5227 {"num": iproc + 1, 5228 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5229 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5230 5231 ncomb=matrix_elements[0].get_helicity_combinations() 5232 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5233 5234 s1,s2 = matrix_elements[0].get_spin_state_initial() 5235 replace_dict['nb_spin_state1'] = s1 5236 replace_dict['nb_spin_state2'] = s2 5237 5238 if writer: 5239 file = open(pjoin(_file_path, \ 5240 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5241 file = file % replace_dict 5242 5243 # Write the file 5244 writer.writelines(file) 5245 else: 5246 return replace_dict
5247 5248 #=========================================================================== 5249 # write_mirrorprocs 5250 #===========================================================================
5251 - def write_mirrorprocs(self, writer, subproc_group):
5252 """Write the mirrorprocs.inc file determining which processes have 5253 IS mirror process in subprocess group mode.""" 5254 5255 lines = [] 5256 bool_dict = {True: '.true.', False: '.false.'} 5257 matrix_elements = subproc_group.get('matrix_elements') 5258 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5259 (len(matrix_elements), 5260 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5261 me in matrix_elements]))) 5262 # Write the file 5263 writer.writelines(lines)
5264 5265 #=========================================================================== 5266 # write_addmothers 5267 #===========================================================================
5268 - def write_addmothers(self, writer):
5269 """Write the SubProcess/addmothers.f""" 5270 5271 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5272 5273 text = open(path).read() % {'iconfig': 'lconfig'} 5274 writer.write(text) 5275 5276 return True
5277 5278 5279 #=========================================================================== 5280 # write_coloramps_file 5281 #===========================================================================
5282 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5283 matrix_elements):
5284 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5285 5286 # Create a map from subprocess (matrix element) to a list of 5287 # the diagrams corresponding to each config 5288 5289 lines = [] 5290 5291 subproc_to_confdiag = {} 5292 for config in diagrams_for_config: 5293 for subproc, diag in enumerate(config): 5294 try: 5295 subproc_to_confdiag[subproc].append(diag) 5296 except KeyError: 5297 subproc_to_confdiag[subproc] = [diag] 5298 5299 for subproc in sorted(subproc_to_confdiag.keys()): 5300 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5301 matrix_elements[subproc], 5302 subproc + 1)) 5303 5304 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5305 (maxflows, 5306 len(diagrams_for_config), 5307 len(matrix_elements))) 5308 5309 # Write the file 5310 writer.writelines(lines) 5311 5312 return True
5313 5314 #=========================================================================== 5315 # write_config_subproc_map_file 5316 #===========================================================================
5317 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5318 """Write the config_subproc_map.inc file for subprocess groups""" 5319 5320 lines = [] 5321 # Output only configs that have some corresponding diagrams 5322 iconfig = 0 5323 for config in config_subproc_map: 5324 if set(config) == set([0]): 5325 continue 5326 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5327 (iconfig + 1, len(config), 5328 ",".join([str(i) for i in config]))) 5329 iconfig += 1 5330 # Write the file 5331 writer.writelines(lines) 5332 5333 return True
5334 5335 #=========================================================================== 5336 # read_write_good_hel 5337 #===========================================================================
5338 - def read_write_good_hel(self, ncomb):
5339 """return the code to read/write the good_hel common_block""" 5340 5341 convert = {'ncomb' : ncomb} 5342 5343 output = """ 5344 subroutine write_good_hel(stream_id) 5345 implicit none 5346 integer stream_id 5347 INTEGER NCOMB 5348 PARAMETER ( NCOMB=%(ncomb)d) 5349 LOGICAL GOODHEL(NCOMB, 2) 5350 INTEGER NTRY(2) 5351 common/BLOCK_GOODHEL/NTRY,GOODHEL 5352 write(stream_id,*) GOODHEL 5353 return 5354 end 5355 5356 5357 subroutine read_good_hel(stream_id) 5358 implicit none 5359 include 'genps.inc' 5360 integer stream_id 5361 INTEGER NCOMB 5362 PARAMETER ( NCOMB=%(ncomb)d) 5363 LOGICAL GOODHEL(NCOMB, 2) 5364 INTEGER NTRY(2) 5365 common/BLOCK_GOODHEL/NTRY,GOODHEL 5366 read(stream_id,*) GOODHEL 5367 NTRY(1) = MAXTRIES + 1 5368 NTRY(2) = MAXTRIES + 1 5369 return 5370 end 5371 5372 subroutine init_good_hel() 5373 implicit none 5374 INTEGER NCOMB 5375 PARAMETER ( NCOMB=%(ncomb)d) 5376 LOGICAL GOODHEL(NCOMB, 2) 5377 INTEGER NTRY(2) 5378 INTEGER I 5379 5380 do i=1,NCOMB 5381 GOODHEL(I,1) = .false. 5382 GOODHEL(I,2) = .false. 5383 enddo 5384 NTRY(1) = 0 5385 NTRY(2) = 0 5386 end 5387 5388 integer function get_maxsproc() 5389 implicit none 5390 include 'maxamps.inc' 5391 5392 get_maxsproc = maxsproc 5393 return 5394 end 5395 5396 """ % convert 5397 5398 return output
5399 5400 5401 5402 #=========================================================================== 5403 # write_configs_file 5404 #===========================================================================
5405 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5406 """Write the configs.inc file with topology information for a 5407 subprocess group. Use the first subprocess with a diagram for each 5408 configuration.""" 5409 5410 matrix_elements = subproc_group.get('matrix_elements') 5411 model = matrix_elements[0].get('processes')[0].get('model') 5412 5413 diagrams = [] 5414 config_numbers = [] 5415 for iconfig, config in enumerate(diagrams_for_config): 5416 # Check if any diagrams correspond to this config 5417 if set(config) == set([0]): 5418 continue 5419 subproc_diags = [] 5420 for s,d in enumerate(config): 5421 if d: 5422 subproc_diags.append(matrix_elements[s].\ 5423 get('diagrams')[d-1]) 5424 else: 5425 subproc_diags.append(None) 5426 diagrams.append(subproc_diags) 5427 config_numbers.append(iconfig + 1) 5428 5429 # Extract number of external particles 5430 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5431 5432 return len(diagrams), \ 5433 self.write_configs_file_from_diagrams(writer, diagrams, 5434 config_numbers, 5435 nexternal, ninitial, 5436 model)
5437 5438 #=========================================================================== 5439 # write_run_configs_file 5440 #===========================================================================
5441 - def write_run_config_file(self, writer):
5442 """Write the run_configs.inc file for MadEvent""" 5443 5444 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5445 if self.proc_characteristic['loop_induced']: 5446 job_per_chan = 1 5447 else: 5448 job_per_chan = 2 5449 text = open(path).read() % {'chanperjob':job_per_chan} 5450 writer.write(text) 5451 return True
5452 5453 5454 #=========================================================================== 5455 # write_leshouche_file 5456 #===========================================================================
5457 - def write_leshouche_file(self, writer, subproc_group):
5458 """Write the leshouche.inc file for MG4""" 5459 5460 all_lines = [] 5461 5462 for iproc, matrix_element in \ 5463 enumerate(subproc_group.get('matrix_elements')): 5464 all_lines.extend(self.get_leshouche_lines(matrix_element, 5465 iproc)) 5466 # Write the file 5467 writer.writelines(all_lines) 5468 return True
5469 5470
5471 - def finalize(self,*args, **opts):
5472 5473 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5474 #ensure that the grouping information is on the correct value 5475 self.proc_characteristic['grouped_matrix'] = True
5476 5477 5478 #=============================================================================== 5479 # UFO_model_to_mg4 5480 #=============================================================================== 5481 5482 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5483 5484 -class UFO_model_to_mg4(object):
5485 """ A converter of the UFO-MG5 Model to the MG4 format """ 5486 5487 # The list below shows the only variables the user is allowed to change by 5488 # himself for each PS point. If he changes any other, then calling 5489 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5490 # correctly account for the change. 5491 PS_dependent_key = ['aS','MU_R'] 5492 mp_complex_format = 'complex*32' 5493 mp_real_format = 'real*16' 5494 # Warning, it is crucial none of the couplings/parameters of the model 5495 # starts with this prefix. I should add a check for this. 5496 # You can change it as the global variable to check_param_card.ParamCard 5497 mp_prefix = check_param_card.ParamCard.mp_prefix 5498
5499 - def __init__(self, model, output_path, opt=None):
5500 """ initialization of the objects """ 5501 5502 self.model = model 5503 self.model_name = model['name'] 5504 self.dir_path = output_path 5505 5506 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5507 'loop_induced': False} 5508 if opt: 5509 self.opt.update(opt) 5510 5511 self.coups_dep = [] # (name, expression, type) 5512 self.coups_indep = [] # (name, expression, type) 5513 self.params_dep = [] # (name, expression, type) 5514 self.params_indep = [] # (name, expression, type) 5515 self.params_ext = [] # external parameter 5516 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5517 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5518
5520 """modify the parameter if some of them are identical up to the case""" 5521 5522 lower_dict={} 5523 duplicate = set() 5524 keys = self.model['parameters'].keys() 5525 for key in keys: 5526 for param in self.model['parameters'][key]: 5527 lower_name = param.name.lower() 5528 if not lower_name: 5529 continue 5530 try: 5531 lower_dict[lower_name].append(param) 5532 except KeyError,error: 5533 lower_dict[lower_name] = [param] 5534 else: 5535 duplicate.add(lower_name) 5536 logger.debug('%s is define both as lower case and upper case.' 5537 % lower_name) 5538 if not duplicate: 5539 return 5540 5541 re_expr = r'''\b(%s)\b''' 5542 to_change = [] 5543 change={} 5544 for value in duplicate: 5545 for i, var in enumerate(lower_dict[value]): 5546 to_change.append(var.name) 5547 new_name = '%s%s' % (var.name.lower(), 5548 ('__%d'%(i+1) if i>0 else '')) 5549 change[var.name] = new_name 5550 var.name = new_name 5551 5552 # Apply the modification to the map_CTcoup_CTparam of the model 5553 # if it has one (giving for each coupling the CT parameters whcih 5554 # are necessary and which should be exported to the model. 5555 if hasattr(self.model,'map_CTcoup_CTparam'): 5556 for coup, ctparams in self.model.map_CTcoup_CTparam: 5557 for i, ctparam in enumerate(ctparams): 5558 try: 5559 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5560 except KeyError: 5561 pass 5562 5563 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5564 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5565 5566 # change parameters 5567 for key in keys: 5568 if key == ('external',): 5569 continue 5570 for param in self.model['parameters'][key]: 5571 param.expr = rep_pattern.sub(replace, param.expr) 5572 5573 # change couplings 5574 for key in self.model['couplings'].keys(): 5575 for coup in self.model['couplings'][key]: 5576 coup.expr = rep_pattern.sub(replace, coup.expr) 5577 5578 # change mass/width 5579 for part in self.model['particles']: 5580 if str(part.get('mass')) in to_change: 5581 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5582 if str(part.get('width')) in to_change: 5583 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5584
5585 - def refactorize(self, wanted_couplings = []):
5586 """modify the couplings to fit with MG4 convention """ 5587 5588 # Keep only separation in alphaS 5589 keys = self.model['parameters'].keys() 5590 keys.sort(key=len) 5591 for key in keys: 5592 to_add = [o for o in self.model['parameters'][key] if o.name] 5593 5594 if key == ('external',): 5595 self.params_ext += to_add 5596 elif any([(k in key) for k in self.PS_dependent_key]): 5597 self.params_dep += to_add 5598 else: 5599 self.params_indep += to_add 5600 # same for couplings 5601 keys = self.model['couplings'].keys() 5602 keys.sort(key=len) 5603 for key, coup_list in self.model['couplings'].items(): 5604 if any([(k in key) for k in self.PS_dependent_key]): 5605 self.coups_dep += [c for c in coup_list if 5606 (not wanted_couplings or c.name in \ 5607 wanted_couplings)] 5608 else: 5609 self.coups_indep += [c for c in coup_list if 5610 (not wanted_couplings or c.name in \ 5611 wanted_couplings)] 5612 5613 # MG4 use G and not aS as it basic object for alphas related computation 5614 #Pass G in the independant list 5615 if 'G' in self.params_dep: 5616 index = self.params_dep.index('G') 5617 G = self.params_dep.pop(index) 5618 # G.expr = '2*cmath.sqrt(as*pi)' 5619 # self.params_indep.insert(0, self.params_dep.pop(index)) 5620 # No need to add it if not defined 5621 5622 if 'aS' not in self.params_ext: 5623 logger.critical('aS not define as external parameter adding it!') 5624 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5625 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5626 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5627 - def build(self, wanted_couplings = [], full=True):
5628 """modify the couplings to fit with MG4 convention and creates all the 5629 different files""" 5630 5631 self.pass_parameter_to_case_insensitive() 5632 self.refactorize(wanted_couplings) 5633 5634 # write the files 5635 if full: 5636 if wanted_couplings: 5637 # extract the wanted ct parameters 5638 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5639 self.write_all()
5640 5641
5642 - def open(self, name, comment='c', format='default'):
5643 """ Open the file name in the correct directory and with a valid 5644 header.""" 5645 5646 file_path = pjoin(self.dir_path, name) 5647 5648 if format == 'fortran': 5649 fsock = writers.FortranWriter(file_path, 'w') 5650 else: 5651 fsock = open(file_path, 'w') 5652 5653 file.writelines(fsock, comment * 77 + '\n') 5654 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5655 {'comment': comment + (6 - len(comment)) * ' '}) 5656 file.writelines(fsock, comment * 77 + '\n\n') 5657 return fsock
5658 5659
5660 - def write_all(self):
5661 """ write all the files """ 5662 #write the part related to the external parameter 5663 self.create_ident_card() 5664 self.create_param_read() 5665 5666 #write the definition of the parameter 5667 self.create_input() 5668 self.create_intparam_def(dp=True,mp=False) 5669 if self.opt['mp']: 5670 self.create_intparam_def(dp=False,mp=True) 5671 5672 # definition of the coupling. 5673 self.create_actualize_mp_ext_param_inc() 5674 self.create_coupl_inc() 5675 self.create_write_couplings() 5676 self.create_couplings() 5677 5678 # the makefile 5679 self.create_makeinc() 5680 self.create_param_write() 5681 5682 # The model functions 5683 self.create_model_functions_inc() 5684 self.create_model_functions_def() 5685 5686 # The param_card.dat 5687 self.create_param_card() 5688 5689 5690 # All the standard files 5691 self.copy_standard_file()
5692 5693 ############################################################################ 5694 ## ROUTINE CREATING THE FILES ############################################ 5695 ############################################################################ 5696
5697 - def copy_standard_file(self):
5698 """Copy the standard files for the fortran model.""" 5699 5700 #copy the library files 5701 file_to_link = ['formats.inc','printout.f', \ 5702 'rw_para.f', 'testprog.f'] 5703 5704 for filename in file_to_link: 5705 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5706 self.dir_path) 5707 5708 file = open(os.path.join(MG5DIR,\ 5709 'models/template_files/fortran/rw_para.f')).read() 5710 5711 includes=["include \'coupl.inc\'","include \'input.inc\'", 5712 "include \'model_functions.inc\'"] 5713 if self.opt['mp']: 5714 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5715 # In standalone and madloop we do no use the compiled param card but 5716 # still parse the .dat one so we must load it. 5717 if self.opt['loop_induced']: 5718 #loop induced follow MadEvent way to handle the card. 5719 load_card = '' 5720 lha_read_filename='lha_read.f' 5721 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5722 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5723 lha_read_filename='lha_read_mp.f' 5724 elif self.opt['export_format'].startswith('standalone') \ 5725 or self.opt['export_format'] in ['madweight', 'plugin']\ 5726 or self.opt['export_format'].startswith('matchbox'): 5727 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5728 lha_read_filename='lha_read.f' 5729 else: 5730 load_card = '' 5731 lha_read_filename='lha_read.f' 5732 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5733 os.path.join(self.dir_path,'lha_read.f')) 5734 5735 file=file%{'includes':'\n '.join(includes), 5736 'load_card':load_card} 5737 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5738 writer.writelines(file) 5739 writer.close() 5740 5741 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5742 or self.opt['loop_induced']: 5743 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5744 self.dir_path + '/makefile') 5745 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5746 path = pjoin(self.dir_path, 'makefile') 5747 text = open(path).read() 5748 text = text.replace('madevent','aMCatNLO') 5749 open(path, 'w').writelines(text) 5750 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5751 'madloop','madloop_optimized', 'standalone_rw', 5752 'madweight','matchbox','madloop_matchbox', 'plugin']: 5753 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5754 self.dir_path + '/makefile') 5755 #elif self.opt['export_format'] in []: 5756 #pass 5757 else: 5758 raise MadGraph5Error('Unknown format')
5759
5760 - def create_coupl_inc(self):
5761 """ write coupling.inc """ 5762 5763 fsock = self.open('coupl.inc', format='fortran') 5764 if self.opt['mp']: 5765 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5766 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5767 format='fortran') 5768 5769 # Write header 5770 header = """double precision G 5771 common/strong/ G 5772 5773 double complex gal(2) 5774 common/weak/ gal 5775 5776 double precision MU_R 5777 common/rscale/ MU_R 5778 5779 double precision Nf 5780 parameter(Nf=%d) 5781 """ % self.model.get_nflav() 5782 5783 fsock.writelines(header) 5784 5785 if self.opt['mp']: 5786 header = """%(real_mp_format)s %(mp_prefix)sG 5787 common/MP_strong/ %(mp_prefix)sG 5788 5789 %(complex_mp_format)s %(mp_prefix)sgal(2) 5790 common/MP_weak/ %(mp_prefix)sgal 5791 5792 %(complex_mp_format)s %(mp_prefix)sMU_R 5793 common/MP_rscale/ %(mp_prefix)sMU_R 5794 5795 """ 5796 5797 5798 5799 5800 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5801 'complex_mp_format':self.mp_complex_format, 5802 'mp_prefix':self.mp_prefix}) 5803 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5804 'complex_mp_format':self.mp_complex_format, 5805 'mp_prefix':''}) 5806 5807 # Write the Mass definition/ common block 5808 masses = set() 5809 widths = set() 5810 if self.opt['complex_mass']: 5811 complex_mass = set() 5812 5813 for particle in self.model.get('particles'): 5814 #find masses 5815 one_mass = particle.get('mass') 5816 if one_mass.lower() != 'zero': 5817 masses.add(one_mass) 5818 5819 # find width 5820 one_width = particle.get('width') 5821 if one_width.lower() != 'zero': 5822 widths.add(one_width) 5823 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5824 complex_mass.add('CMASS_%s' % one_mass) 5825 5826 if masses: 5827 fsock.writelines('double precision '+','.join(masses)+'\n') 5828 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5829 if self.opt['mp']: 5830 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5831 ','.join(masses)+'\n') 5832 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5833 ','.join(masses)+'\n\n') 5834 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5835 self.mp_prefix+m for m in masses])+'\n') 5836 mp_fsock.writelines('common/MP_masses/ '+\ 5837 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5838 5839 if widths: 5840 fsock.writelines('double precision '+','.join(widths)+'\n') 5841 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5842 if self.opt['mp']: 5843 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5844 ','.join(widths)+'\n') 5845 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5846 ','.join(widths)+'\n\n') 5847 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5848 self.mp_prefix+w for w in widths])+'\n') 5849 mp_fsock.writelines('common/MP_widths/ '+\ 5850 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5851 5852 # Write the Couplings 5853 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5854 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5855 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5856 if self.opt['mp']: 5857 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5858 ','.join(coupling_list)+'\n') 5859 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5860 ','.join(coupling_list)+'\n\n') 5861 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5862 self.mp_prefix+c for c in coupling_list])+'\n') 5863 mp_fsock.writelines('common/MP_couplings/ '+\ 5864 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5865 5866 # Write complex mass for complex mass scheme (if activated) 5867 if self.opt['complex_mass'] and complex_mass: 5868 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5869 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5870 if self.opt['mp']: 5871 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5872 ','.join(complex_mass)+'\n') 5873 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5874 ','.join(complex_mass)+'\n\n') 5875 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5876 self.mp_prefix+cm for cm in complex_mass])+'\n') 5877 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5878 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5879
5880 - def create_write_couplings(self):
5881 """ write the file coupl_write.inc """ 5882 5883 fsock = self.open('coupl_write.inc', format='fortran') 5884 5885 fsock.writelines("""write(*,*) ' Couplings of %s' 5886 write(*,*) ' ---------------------------------' 5887 write(*,*) ' '""" % self.model_name) 5888 def format(coupl): 5889 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5890 5891 # Write the Couplings 5892 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5893 fsock.writelines('\n'.join(lines)) 5894 5895
5896 - def create_input(self):
5897 """create input.inc containing the definition of the parameters""" 5898 5899 fsock = self.open('input.inc', format='fortran') 5900 if self.opt['mp']: 5901 mp_fsock = self.open('mp_input.inc', format='fortran') 5902 5903 #find mass/ width since they are already define 5904 already_def = set() 5905 for particle in self.model.get('particles'): 5906 already_def.add(particle.get('mass').lower()) 5907 already_def.add(particle.get('width').lower()) 5908 if self.opt['complex_mass']: 5909 already_def.add('cmass_%s' % particle.get('mass').lower()) 5910 5911 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5912 name.lower() not in already_def 5913 5914 real_parameters = [param.name for param in self.params_dep + 5915 self.params_indep if param.type == 'real' 5916 and is_valid(param.name)] 5917 5918 real_parameters += [param.name for param in self.params_ext 5919 if param.type == 'real'and 5920 is_valid(param.name)] 5921 5922 # check the parameter is a CT parameter or not 5923 # if yes, just use the needed ones 5924 real_parameters = [param for param in real_parameters \ 5925 if self.check_needed_param(param)] 5926 5927 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5928 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5929 if self.opt['mp']: 5930 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5931 self.mp_prefix+p for p in real_parameters])+'\n') 5932 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5933 self.mp_prefix+p for p in real_parameters])+'\n\n') 5934 5935 complex_parameters = [param.name for param in self.params_dep + 5936 self.params_indep if param.type == 'complex' and 5937 is_valid(param.name)] 5938 5939 # check the parameter is a CT parameter or not 5940 # if yes, just use the needed ones 5941 complex_parameters = [param for param in complex_parameters \ 5942 if self.check_needed_param(param)] 5943 5944 if complex_parameters: 5945 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5946 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5947 if self.opt['mp']: 5948 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5949 self.mp_prefix+p for p in complex_parameters])+'\n') 5950 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5951 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5952
5953 - def check_needed_param(self, param):
5954 """ Returns whether the parameter in argument is needed for this 5955 specific computation or not.""" 5956 5957 # If this is a leading order model or if there was no CT parameter 5958 # employed in this NLO model, one can directly return that the 5959 # parameter is needed since only CTParameters are filtered. 5960 if not hasattr(self, 'allCTparameters') or \ 5961 self.allCTparameters is None or self.usedCTparameters is None or \ 5962 len(self.allCTparameters)==0: 5963 return True 5964 5965 # We must allow the conjugate shorthand for the complex parameter as 5966 # well so we check wether either the parameter name or its name with 5967 # 'conjg__' substituted with '' is present in the list. 5968 # This is acceptable even if some parameter had an original name 5969 # including 'conjg__' in it, because at worst we export a parameter 5970 # was not needed. 5971 param = param.lower() 5972 cjg_param = param.replace('conjg__','',1) 5973 5974 # First make sure it is a CTparameter 5975 if param not in self.allCTparameters and \ 5976 cjg_param not in self.allCTparameters: 5977 return True 5978 5979 # Now check if it is in the list of CTparameters actually used 5980 return (param in self.usedCTparameters or \ 5981 cjg_param in self.usedCTparameters)
5982
5983 - def extract_needed_CTparam(self,wanted_couplings=[]):
5984 """ Extract what are the needed CT parameters given the wanted_couplings""" 5985 5986 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5987 # Setting these lists to none wil disable the filtering in 5988 # check_needed_param 5989 self.allCTparameters = None 5990 self.usedCTparameters = None 5991 return 5992 5993 # All CTparameters appearin in all CT couplings 5994 allCTparameters=self.model.map_CTcoup_CTparam.values() 5995 # Define in this class the list of all CT parameters 5996 self.allCTparameters=list(\ 5997 set(itertools.chain.from_iterable(allCTparameters))) 5998 5999 # All used CT couplings 6000 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6001 allUsedCTCouplings = [coupl for coupl in 6002 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6003 6004 # Now define the list of all CT parameters that are actually used 6005 self.usedCTparameters=list(\ 6006 set(itertools.chain.from_iterable([ 6007 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6008 ]))) 6009 6010 # Now at last, make these list case insensitive 6011 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6012 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6013
6014 - def create_intparam_def(self, dp=True, mp=False):
6015 """ create intparam_definition.inc setting the internal parameters. 6016 Output the double precision and/or the multiple precision parameters 6017 depending on the parameters dp and mp. If mp only, then the file names 6018 get the 'mp_' prefix. 6019 """ 6020 6021 fsock = self.open('%sintparam_definition.inc'% 6022 ('mp_' if mp and not dp else ''), format='fortran') 6023 6024 fsock.write_comments(\ 6025 "Parameters that should not be recomputed event by event.\n") 6026 fsock.writelines("if(readlha) then\n") 6027 if dp: 6028 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6029 if mp: 6030 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6031 6032 for param in self.params_indep: 6033 if param.name == 'ZERO': 6034 continue 6035 # check whether the parameter is a CT parameter 6036 # if yes,just used the needed ones 6037 if not self.check_needed_param(param.name): 6038 continue 6039 if dp: 6040 fsock.writelines("%s = %s\n" % (param.name, 6041 self.p_to_f.parse(param.expr))) 6042 if mp: 6043 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6044 self.mp_p_to_f.parse(param.expr))) 6045 6046 fsock.writelines('endif') 6047 6048 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6049 if dp: 6050 fsock.writelines("aS = G**2/4/pi\n") 6051 if mp: 6052 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6053 for param in self.params_dep: 6054 # check whether the parameter is a CT parameter 6055 # if yes,just used the needed ones 6056 if not self.check_needed_param(param.name): 6057 continue 6058 if dp: 6059 fsock.writelines("%s = %s\n" % (param.name, 6060 self.p_to_f.parse(param.expr))) 6061 elif mp: 6062 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6063 self.mp_p_to_f.parse(param.expr))) 6064 6065 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6066 if ('aEWM1',) in self.model['parameters']: 6067 if dp: 6068 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6069 gal(2) = 1d0 6070 """) 6071 elif mp: 6072 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6073 %(mp_prefix)sgal(2) = 1d0 6074 """ %{'mp_prefix':self.mp_prefix}) 6075 pass 6076 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6077 elif ('Gf',) in self.model['parameters']: 6078 if dp: 6079 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6080 gal(2) = 1d0 6081 """) 6082 elif mp: 6083 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6084 %(mp_prefix)sgal(2) = 1d0 6085 """ %{'mp_prefix':self.mp_prefix}) 6086 pass 6087 else: 6088 if dp: 6089 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6090 fsock.writelines(""" gal(1) = 1d0 6091 gal(2) = 1d0 6092 """) 6093 elif mp: 6094 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6095 %(mp_prefix)sgal(2) = 1e0_16 6096 """%{'mp_prefix':self.mp_prefix})
6097 6098
6099 - def create_couplings(self):
6100 """ create couplings.f and all couplingsX.f """ 6101 6102 nb_def_by_file = 25 6103 6104 self.create_couplings_main(nb_def_by_file) 6105 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6106 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6107 6108 for i in range(nb_coup_indep): 6109 # For the independent couplings, we compute the double and multiple 6110 # precision ones together 6111 data = self.coups_indep[nb_def_by_file * i: 6112 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6113 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6114 6115 for i in range(nb_coup_dep): 6116 # For the dependent couplings, we compute the double and multiple 6117 # precision ones in separate subroutines. 6118 data = self.coups_dep[nb_def_by_file * i: 6119 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6120 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6121 dp=True,mp=False) 6122 if self.opt['mp']: 6123 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6124 dp=False,mp=True)
6125 6126
6127 - def create_couplings_main(self, nb_def_by_file=25):
6128 """ create couplings.f """ 6129 6130 fsock = self.open('couplings.f', format='fortran') 6131 6132 fsock.writelines("""subroutine coup() 6133 6134 implicit none 6135 double precision PI, ZERO 6136 logical READLHA 6137 parameter (PI=3.141592653589793d0) 6138 parameter (ZERO=0d0) 6139 include \'model_functions.inc\'""") 6140 if self.opt['mp']: 6141 fsock.writelines("""%s MP__PI, MP__ZERO 6142 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6143 parameter (MP__ZERO=0e0_16) 6144 include \'mp_input.inc\' 6145 include \'mp_coupl.inc\' 6146 """%self.mp_real_format) 6147 fsock.writelines("""include \'input.inc\' 6148 include \'coupl.inc\' 6149 READLHA = .true. 6150 include \'intparam_definition.inc\'""") 6151 if self.opt['mp']: 6152 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6153 6154 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6155 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6156 6157 fsock.writelines('\n'.join(\ 6158 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6159 6160 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6161 6162 fsock.writelines('\n'.join(\ 6163 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6164 for i in range(nb_coup_dep)])) 6165 if self.opt['mp']: 6166 fsock.writelines('\n'.join(\ 6167 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6168 for i in range(nb_coup_dep)])) 6169 fsock.writelines('''\n return \n end\n''') 6170 6171 fsock.writelines("""subroutine update_as_param() 6172 6173 implicit none 6174 double precision PI, ZERO 6175 logical READLHA 6176 parameter (PI=3.141592653589793d0) 6177 parameter (ZERO=0d0) 6178 include \'model_functions.inc\'""") 6179 fsock.writelines("""include \'input.inc\' 6180 include \'coupl.inc\' 6181 READLHA = .false.""") 6182 fsock.writelines(""" 6183 include \'intparam_definition.inc\'\n 6184 """) 6185 6186 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6187 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6188 6189 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6190 6191 fsock.writelines('\n'.join(\ 6192 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6193 for i in range(nb_coup_dep)])) 6194 fsock.writelines('''\n return \n end\n''') 6195 6196 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6197 6198 implicit none 6199 double precision PI 6200 parameter (PI=3.141592653589793d0) 6201 double precision mu_r2, as2 6202 include \'model_functions.inc\'""") 6203 fsock.writelines("""include \'input.inc\' 6204 include \'coupl.inc\'""") 6205 fsock.writelines(""" 6206 if (mu_r2.gt.0d0) MU_R = mu_r2 6207 G = SQRT(4.0d0*PI*AS2) 6208 AS = as2 6209 6210 CALL UPDATE_AS_PARAM() 6211 """) 6212 fsock.writelines('''\n return \n end\n''') 6213 6214 if self.opt['mp']: 6215 fsock.writelines("""subroutine mp_update_as_param() 6216 6217 implicit none 6218 logical READLHA 6219 include \'model_functions.inc\'""") 6220 fsock.writelines("""%s MP__PI, MP__ZERO 6221 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6222 parameter (MP__ZERO=0e0_16) 6223 include \'mp_input.inc\' 6224 include \'mp_coupl.inc\' 6225 """%self.mp_real_format) 6226 fsock.writelines("""include \'input.inc\' 6227 include \'coupl.inc\' 6228 include \'actualize_mp_ext_params.inc\' 6229 READLHA = .false. 6230 include \'mp_intparam_definition.inc\'\n 6231 """) 6232 6233 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6234 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6235 6236 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6237 6238 fsock.writelines('\n'.join(\ 6239 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6240 for i in range(nb_coup_dep)])) 6241 fsock.writelines('''\n return \n end\n''')
6242
6243 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6244 """ create couplings[nb_file].f containing information coming from data. 6245 Outputs the computation of the double precision and/or the multiple 6246 precision couplings depending on the parameters dp and mp. 6247 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6248 filename and subroutine name. 6249 """ 6250 6251 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6252 nb_file), format='fortran') 6253 fsock.writelines("""subroutine %scoup%s() 6254 6255 implicit none 6256 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6257 if dp: 6258 fsock.writelines(""" 6259 double precision PI, ZERO 6260 parameter (PI=3.141592653589793d0) 6261 parameter (ZERO=0d0) 6262 include 'input.inc' 6263 include 'coupl.inc'""") 6264 if mp: 6265 fsock.writelines("""%s MP__PI, MP__ZERO 6266 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6267 parameter (MP__ZERO=0e0_16) 6268 include \'mp_input.inc\' 6269 include \'mp_coupl.inc\' 6270 """%self.mp_real_format) 6271 6272 for coupling in data: 6273 if dp: 6274 fsock.writelines('%s = %s' % (coupling.name, 6275 self.p_to_f.parse(coupling.expr))) 6276 if mp: 6277 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6278 self.mp_p_to_f.parse(coupling.expr))) 6279 fsock.writelines('end')
6280
6281 - def create_model_functions_inc(self):
6282 """ Create model_functions.inc which contains the various declarations 6283 of auxiliary functions which might be used in the couplings expressions 6284 """ 6285 6286 additional_fct = [] 6287 # check for functions define in the UFO model 6288 ufo_fct = self.model.get('functions') 6289 if ufo_fct: 6290 for fct in ufo_fct: 6291 # already handle by default 6292 if fct.name not in ["complexconjugate", "re", "im", "sec", 6293 "csc", "asec", "acsc", "theta_function", "cond", 6294 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6295 "grreglog","regsqrt"]: 6296 additional_fct.append(fct.name) 6297 6298 6299 fsock = self.open('model_functions.inc', format='fortran') 6300 fsock.writelines("""double complex cond 6301 double complex condif 6302 double complex reglog 6303 double complex reglogp 6304 double complex reglogm 6305 double complex recms 6306 double complex arg 6307 double complex grreglog 6308 double complex regsqrt 6309 %s 6310 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6311 6312 6313 if self.opt['mp']: 6314 fsock.writelines("""%(complex_mp_format)s mp_cond 6315 %(complex_mp_format)s mp_condif 6316 %(complex_mp_format)s mp_reglog 6317 %(complex_mp_format)s mp_reglogp 6318 %(complex_mp_format)s mp_reglogm 6319 %(complex_mp_format)s mp_recms 6320 %(complex_mp_format)s mp_arg 6321 %(complex_mp_format)s mp_grreglog 6322 %(complex_mp_format)s mp_regsqrt 6323 %(additional)s 6324 """ %\ 6325 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6326 'complex_mp_format':self.mp_complex_format 6327 })
6328
6329 - def create_model_functions_def(self):
6330 """ Create model_functions.f which contains the various definitions 6331 of auxiliary functions which might be used in the couplings expressions 6332 Add the functions.f functions for formfactors support 6333 """ 6334 6335 fsock = self.open('model_functions.f', format='fortran') 6336 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6337 implicit none 6338 double complex condition,truecase,falsecase 6339 if(condition.eq.(0.0d0,0.0d0)) then 6340 cond=truecase 6341 else 6342 cond=falsecase 6343 endif 6344 end 6345 6346 double complex function condif(condition,truecase,falsecase) 6347 implicit none 6348 logical condition 6349 double complex truecase,falsecase 6350 if(condition) then 6351 condif=truecase 6352 else 6353 condif=falsecase 6354 endif 6355 end 6356 6357 double complex function recms(condition,expr) 6358 implicit none 6359 logical condition 6360 double complex expr 6361 if(condition)then 6362 recms=expr 6363 else 6364 recms=dcmplx(dble(expr)) 6365 endif 6366 end 6367 6368 double complex function reglog(arg) 6369 implicit none 6370 double complex TWOPII 6371 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6372 double complex arg 6373 if(arg.eq.(0.0d0,0.0d0)) then 6374 reglog=(0.0d0,0.0d0) 6375 else 6376 reglog=log(arg) 6377 endif 6378 end 6379 6380 double complex function reglogp(arg) 6381 implicit none 6382 double complex TWOPII 6383 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6384 double complex arg 6385 if(arg.eq.(0.0d0,0.0d0))then 6386 reglogp=(0.0d0,0.0d0) 6387 else 6388 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6389 reglogp=log(arg) + TWOPII 6390 else 6391 reglogp=log(arg) 6392 endif 6393 endif 6394 end 6395 6396 double complex function reglogm(arg) 6397 implicit none 6398 double complex TWOPII 6399 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6400 double complex arg 6401 if(arg.eq.(0.0d0,0.0d0))then 6402 reglogm=(0.0d0,0.0d0) 6403 else 6404 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6405 reglogm=log(arg) - TWOPII 6406 else 6407 reglogm=log(arg) 6408 endif 6409 endif 6410 end 6411 6412 double complex function regsqrt(arg_in) 6413 implicit none 6414 double complex arg_in 6415 double complex arg 6416 arg=arg_in 6417 if(dabs(dimag(arg)).eq.0.0d0)then 6418 arg=dcmplx(dble(arg),0.0d0) 6419 endif 6420 if(dabs(dble(arg)).eq.0.0d0)then 6421 arg=dcmplx(0.0d0,dimag(arg)) 6422 endif 6423 regsqrt=sqrt(arg) 6424 end 6425 6426 double complex function grreglog(logsw,expr1_in,expr2_in) 6427 implicit none 6428 double complex TWOPII 6429 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6430 double complex expr1_in,expr2_in 6431 double complex expr1,expr2 6432 double precision logsw 6433 double precision imagexpr 6434 logical firstsheet 6435 expr1=expr1_in 6436 expr2=expr2_in 6437 if(dabs(dimag(expr1)).eq.0.0d0)then 6438 expr1=dcmplx(dble(expr1),0.0d0) 6439 endif 6440 if(dabs(dble(expr1)).eq.0.0d0)then 6441 expr1=dcmplx(0.0d0,dimag(expr1)) 6442 endif 6443 if(dabs(dimag(expr2)).eq.0.0d0)then 6444 expr2=dcmplx(dble(expr2),0.0d0) 6445 endif 6446 if(dabs(dble(expr2)).eq.0.0d0)then 6447 expr2=dcmplx(0.0d0,dimag(expr2)) 6448 endif 6449 if(expr1.eq.(0.0d0,0.0d0))then 6450 grreglog=(0.0d0,0.0d0) 6451 else 6452 imagexpr=dimag(expr1)*dimag(expr2) 6453 firstsheet=imagexpr.ge.0.0d0 6454 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6455 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6456 if(firstsheet)then 6457 grreglog=log(expr1) 6458 else 6459 if(dimag(expr1).gt.0.0d0)then 6460 grreglog=log(expr1) - logsw*TWOPII 6461 else 6462 grreglog=log(expr1) + logsw*TWOPII 6463 endif 6464 endif 6465 endif 6466 end 6467 6468 double complex function arg(comnum) 6469 implicit none 6470 double complex comnum 6471 double complex iim 6472 iim = (0.0d0,1.0d0) 6473 if(comnum.eq.(0.0d0,0.0d0)) then 6474 arg=(0.0d0,0.0d0) 6475 else 6476 arg=log(comnum/abs(comnum))/iim 6477 endif 6478 end""") 6479 if self.opt['mp']: 6480 fsock.writelines(""" 6481 6482 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6483 implicit none 6484 %(complex_mp_format)s condition,truecase,falsecase 6485 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6486 mp_cond=truecase 6487 else 6488 mp_cond=falsecase 6489 endif 6490 end 6491 6492 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6493 implicit none 6494 logical condition 6495 %(complex_mp_format)s truecase,falsecase 6496 if(condition) then 6497 mp_condif=truecase 6498 else 6499 mp_condif=falsecase 6500 endif 6501 end 6502 6503 %(complex_mp_format)s function mp_recms(condition,expr) 6504 implicit none 6505 logical condition 6506 %(complex_mp_format)s expr 6507 if(condition)then 6508 mp_recms=expr 6509 else 6510 mp_recms=cmplx(real(expr),kind=16) 6511 endif 6512 end 6513 6514 %(complex_mp_format)s function mp_reglog(arg) 6515 implicit none 6516 %(complex_mp_format)s TWOPII 6517 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6518 %(complex_mp_format)s arg 6519 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6520 mp_reglog=(0.0e0_16,0.0e0_16) 6521 else 6522 mp_reglog=log(arg) 6523 endif 6524 end 6525 6526 %(complex_mp_format)s function mp_reglogp(arg) 6527 implicit none 6528 %(complex_mp_format)s TWOPII 6529 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6530 %(complex_mp_format)s arg 6531 if(arg.eq.(0.0e0_16,0.0e0_16))then 6532 mp_reglogp=(0.0e0_16,0.0e0_16) 6533 else 6534 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6535 mp_reglogp=log(arg) + TWOPII 6536 else 6537 mp_reglogp=log(arg) 6538 endif 6539 endif 6540 end 6541 6542 %(complex_mp_format)s function mp_reglogm(arg) 6543 implicit none 6544 %(complex_mp_format)s TWOPII 6545 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6546 %(complex_mp_format)s arg 6547 if(arg.eq.(0.0e0_16,0.0e0_16))then 6548 mp_reglogm=(0.0e0_16,0.0e0_16) 6549 else 6550 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6551 mp_reglogm=log(arg) - TWOPII 6552 else 6553 mp_reglogm=log(arg) 6554 endif 6555 endif 6556 end 6557 6558 %(complex_mp_format)s function mp_regsqrt(arg_in) 6559 implicit none 6560 %(complex_mp_format)s arg_in 6561 %(complex_mp_format)s arg 6562 arg=arg_in 6563 if(abs(imagpart(arg)).eq.0.0e0_16)then 6564 arg=cmplx(real(arg,kind=16),0.0e0_16) 6565 endif 6566 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6567 arg=cmplx(0.0e0_16,imagpart(arg)) 6568 endif 6569 mp_regsqrt=sqrt(arg) 6570 end 6571 6572 6573 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6574 implicit none 6575 %(complex_mp_format)s TWOPII 6576 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6577 %(complex_mp_format)s expr1_in,expr2_in 6578 %(complex_mp_format)s expr1,expr2 6579 %(real_mp_format)s logsw 6580 %(real_mp_format)s imagexpr 6581 logical firstsheet 6582 expr1=expr1_in 6583 expr2=expr2_in 6584 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6585 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6586 endif 6587 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6588 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6589 endif 6590 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6591 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6592 endif 6593 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6594 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6595 endif 6596 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6597 mp_grreglog=(0.0e0_16,0.0e0_16) 6598 else 6599 imagexpr=imagpart(expr1)*imagpart(expr2) 6600 firstsheet=imagexpr.ge.0.0e0_16 6601 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6602 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6603 if(firstsheet)then 6604 mp_grreglog=log(expr1) 6605 else 6606 if(imagpart(expr1).gt.0.0e0_16)then 6607 mp_grreglog=log(expr1) - logsw*TWOPII 6608 else 6609 mp_grreglog=log(expr1) + logsw*TWOPII 6610 endif 6611 endif 6612 endif 6613 end 6614 6615 %(complex_mp_format)s function mp_arg(comnum) 6616 implicit none 6617 %(complex_mp_format)s comnum 6618 %(complex_mp_format)s imm 6619 imm = (0.0e0_16,1.0e0_16) 6620 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6621 mp_arg=(0.0e0_16,0.0e0_16) 6622 else 6623 mp_arg=log(comnum/abs(comnum))/imm 6624 endif 6625 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6626 6627 6628 #check for the file functions.f 6629 model_path = self.model.get('modelpath') 6630 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6631 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6632 input = pjoin(model_path,'Fortran','functions.f') 6633 file.writelines(fsock, open(input).read()) 6634 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6635 6636 # check for functions define in the UFO model 6637 ufo_fct = self.model.get('functions') 6638 if ufo_fct: 6639 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6640 for fct in ufo_fct: 6641 # already handle by default 6642 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6643 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6644 "grreglog","regsqrt"]: 6645 ufo_fct_template = """ 6646 double complex function %(name)s(%(args)s) 6647 implicit none 6648 double complex %(args)s 6649 %(definitions)s 6650 %(name)s = %(fct)s 6651 6652 return 6653 end 6654 """ 6655 str_fct = self.p_to_f.parse(fct.expr) 6656 if not self.p_to_f.to_define: 6657 definitions = [] 6658 else: 6659 definitions=[] 6660 for d in self.p_to_f.to_define: 6661 if d == 'pi': 6662 definitions.append(' double precision pi') 6663 definitions.append(' data pi /3.1415926535897932d0/') 6664 else: 6665 definitions.append(' double complex %s' % d) 6666 6667 text = ufo_fct_template % { 6668 'name': fct.name, 6669 'args': ", ".join(fct.arguments), 6670 'fct': str_fct, 6671 'definitions': '\n'.join(definitions) 6672 } 6673 6674 fsock.writelines(text) 6675 if self.opt['mp']: 6676 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6677 for fct in ufo_fct: 6678 # already handle by default 6679 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6680 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6681 "grreglog","regsqrt"]: 6682 ufo_fct_template = """ 6683 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6684 implicit none 6685 %(complex_mp_format)s mp__%(args)s 6686 %(definitions)s 6687 mp_%(name)s = %(fct)s 6688 6689 return 6690 end 6691 """ 6692 str_fct = self.mp_p_to_f.parse(fct.expr) 6693 if not self.mp_p_to_f.to_define: 6694 definitions = [] 6695 else: 6696 definitions=[] 6697 for d in self.mp_p_to_f.to_define: 6698 if d == 'pi': 6699 definitions.append(' %s mp__pi' % self.mp_real_format) 6700 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6701 else: 6702 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6703 text = ufo_fct_template % { 6704 'name': fct.name, 6705 'args': ", mp__".join(fct.arguments), 6706 'fct': str_fct, 6707 'definitions': '\n'.join(definitions), 6708 'complex_mp_format': self.mp_complex_format 6709 } 6710 fsock.writelines(text) 6711 6712 6713 6714 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6715 6716 6717
6718 - def create_makeinc(self):
6719 """create makeinc.inc containing the file to compile """ 6720 6721 fsock = self.open('makeinc.inc', comment='#') 6722 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6723 text += ' model_functions.o ' 6724 6725 nb_coup_indep = 1 + len(self.coups_dep) // 25 6726 nb_coup_dep = 1 + len(self.coups_indep) // 25 6727 couplings_files=['couplings%s.o' % (i+1) \ 6728 for i in range(nb_coup_dep + nb_coup_indep) ] 6729 if self.opt['mp']: 6730 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6731 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6732 text += ' '.join(couplings_files) 6733 fsock.writelines(text)
6734
6735 - def create_param_write(self):
6736 """ create param_write """ 6737 6738 fsock = self.open('param_write.inc', format='fortran') 6739 6740 fsock.writelines("""write(*,*) ' External Params' 6741 write(*,*) ' ---------------------------------' 6742 write(*,*) ' '""") 6743 def format(name): 6744 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6745 6746 # Write the external parameter 6747 lines = [format(param.name) for param in self.params_ext] 6748 fsock.writelines('\n'.join(lines)) 6749 6750 fsock.writelines("""write(*,*) ' Internal Params' 6751 write(*,*) ' ---------------------------------' 6752 write(*,*) ' '""") 6753 lines = [format(data.name) for data in self.params_indep 6754 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6755 fsock.writelines('\n'.join(lines)) 6756 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6757 write(*,*) ' ----------------------------------------' 6758 write(*,*) ' '""") 6759 lines = [format(data.name) for data in self.params_dep \ 6760 if self.check_needed_param(data.name)] 6761 6762 fsock.writelines('\n'.join(lines)) 6763 6764 6765
6766 - def create_ident_card(self):
6767 """ create the ident_card.dat """ 6768 6769 def format(parameter): 6770 """return the line for the ident_card corresponding to this parameter""" 6771 colum = [parameter.lhablock.lower()] + \ 6772 [str(value) for value in parameter.lhacode] + \ 6773 [parameter.name] 6774 if not parameter.name: 6775 return '' 6776 return ' '.join(colum)+'\n'
6777 6778 fsock = self.open('ident_card.dat') 6779 6780 external_param = [format(param) for param in self.params_ext] 6781 fsock.writelines('\n'.join(external_param)) 6782
6783 - def create_actualize_mp_ext_param_inc(self):
6784 """ create the actualize_mp_ext_params.inc code """ 6785 6786 # In principle one should actualize all external, but for now, it is 6787 # hardcoded that only AS and MU_R can by dynamically changed by the user 6788 # so that we only update those ones. 6789 # Of course, to be on the safe side, one could decide to update all 6790 # external parameters. 6791 update_params_list=[p for p in self.params_ext if p.name in 6792 self.PS_dependent_key] 6793 6794 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6795 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6796 for param in update_params_list] 6797 # When read_lha is false, it is G which is taken in input and not AS, so 6798 # this is what should be reset here too. 6799 if 'aS' in [param.name for param in update_params_list]: 6800 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6801 6802 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6803 fsock.writelines('\n'.join(res_strings))
6804
6805 - def create_param_read(self):
6806 """create param_read""" 6807 6808 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6809 or self.opt['loop_induced']: 6810 fsock = self.open('param_read.inc', format='fortran') 6811 fsock.writelines(' include \'../param_card.inc\'') 6812 return 6813 6814 def format_line(parameter): 6815 """return the line for the ident_card corresponding to this 6816 parameter""" 6817 template = \ 6818 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6819 % {'name': parameter.name, 6820 'value': self.p_to_f.parse(str(parameter.value.real))} 6821 if self.opt['mp']: 6822 template = template+ \ 6823 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6824 "%(mp_prefix)s%(name)s,%(value)s)") \ 6825 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6826 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6827 return template 6828 6829 fsock = self.open('param_read.inc', format='fortran') 6830 res_strings = [format_line(param) \ 6831 for param in self.params_ext] 6832 6833 # Correct width sign for Majorana particles (where the width 6834 # and mass need to have the same sign) 6835 for particle in self.model.get('particles'): 6836 if particle.is_fermion() and particle.get('self_antipart') and \ 6837 particle.get('width').lower() != 'zero': 6838 6839 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6840 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6841 if self.opt['mp']: 6842 res_strings.append(\ 6843 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6844 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6845 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6846 6847 fsock.writelines('\n'.join(res_strings)) 6848 6849 6850 @staticmethod
6851 - def create_param_card_static(model, output_path, rule_card_path=False, 6852 mssm_convert=True):
6853 """ create the param_card.dat for a givent model --static method-- """ 6854 #1. Check if a default param_card is present: 6855 done = False 6856 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6857 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6858 model_path = model.get('modelpath') 6859 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6860 done = True 6861 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6862 output_path) 6863 if not done: 6864 param_writer.ParamCardWriter(model, output_path) 6865 6866 if rule_card_path: 6867 if hasattr(model, 'rule_card'): 6868 model.rule_card.write_file(rule_card_path) 6869 6870 if mssm_convert: 6871 model_name = model.get('name') 6872 # IF MSSM convert the card to SLAH1 6873 if model_name == 'mssm' or model_name.startswith('mssm-'): 6874 import models.check_param_card as translator 6875 # Check the format of the param_card for Pythia and make it correct 6876 if rule_card_path: 6877 translator.make_valid_param_card(output_path, rule_card_path) 6878 translator.convert_to_slha1(output_path)
6879
6880 - def create_param_card(self):
6881 """ create the param_card.dat """ 6882 6883 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6884 if not hasattr(self.model, 'rule_card'): 6885 rule_card=False 6886 self.create_param_card_static(self.model, 6887 output_path=pjoin(self.dir_path, 'param_card.dat'), 6888 rule_card_path=rule_card, 6889 mssm_convert=True)
6890
6891 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6892 """ Determine which Export_v4 class is required. cmd is the command 6893 interface containing all potential usefull information. 6894 The output_type argument specifies from which context the output 6895 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6896 and 'default' for tree-level outputs.""" 6897 6898 opt = dict(cmd.options) 6899 opt['output_options'] = cmd_options 6900 6901 # ========================================================================== 6902 # First check whether Ninja must be installed. 6903 # Ninja would only be required if: 6904 # a) Loop optimized output is selected 6905 # b) the process gathered from the amplitude generated use loops 6906 6907 if len(cmd._curr_amps)>0: 6908 try: 6909 curr_proc = cmd._curr_amps[0].get('process') 6910 except base_objects.PhysicsObject.PhysicsObjectError: 6911 curr_proc = None 6912 elif hasattr(cmd,'_fks_multi_proc') and \ 6913 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6914 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6915 else: 6916 curr_proc = None 6917 6918 requires_reduction_tool = opt['loop_optimized_output'] and \ 6919 (not curr_proc is None) and \ 6920 (curr_proc.get('perturbation_couplings') != [] and \ 6921 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6922 6923 # An installation is required then, but only if the specified path is the 6924 # default local one and that the Ninja library appears missing. 6925 if requires_reduction_tool: 6926 cmd.install_reduction_library() 6927 6928 # ========================================================================== 6929 # First treat the MadLoop5 standalone case 6930 MadLoop_SA_options = {'clean': not noclean, 6931 'complex_mass':cmd.options['complex_mass_scheme'], 6932 'export_format':'madloop', 6933 'mp':True, 6934 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6935 'cuttools_dir': cmd._cuttools_dir, 6936 'iregi_dir':cmd._iregi_dir, 6937 'pjfry_dir':cmd.options['pjfry'], 6938 'golem_dir':cmd.options['golem'], 6939 'samurai_dir':cmd.options['samurai'], 6940 'ninja_dir':cmd.options['ninja'], 6941 'collier_dir':cmd.options['collier'], 6942 'fortran_compiler':cmd.options['fortran_compiler'], 6943 'f2py_compiler':cmd.options['f2py_compiler'], 6944 'output_dependencies':cmd.options['output_dependencies'], 6945 'SubProc_prefix':'P', 6946 'compute_color_flows':cmd.options['loop_color_flows'], 6947 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6948 'cluster_local_path': cmd.options['cluster_local_path'], 6949 'output_options': cmd_options 6950 } 6951 6952 if output_type.startswith('madloop'): 6953 import madgraph.loop.loop_exporters as loop_exporters 6954 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6955 ExporterClass=None 6956 if not cmd.options['loop_optimized_output']: 6957 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6958 else: 6959 if output_type == "madloop": 6960 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6961 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6962 elif output_type == "madloop_matchbox": 6963 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6964 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6965 else: 6966 raise Exception, "output_type not recognize %s" % output_type 6967 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6968 else: 6969 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6970 ' in %s'%str(cmd._mgme_dir)) 6971 6972 # Then treat the aMC@NLO output 6973 elif output_type=='amcatnlo': 6974 import madgraph.iolibs.export_fks as export_fks 6975 ExporterClass=None 6976 amcatnlo_options = dict(opt) 6977 amcatnlo_options.update(MadLoop_SA_options) 6978 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6979 if not cmd.options['loop_optimized_output']: 6980 logger.info("Writing out the aMC@NLO code") 6981 ExporterClass = export_fks.ProcessExporterFortranFKS 6982 amcatnlo_options['export_format']='FKS5_default' 6983 else: 6984 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6985 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6986 amcatnlo_options['export_format']='FKS5_optimized' 6987 return ExporterClass(cmd._export_dir, amcatnlo_options) 6988 6989 6990 # Then the default tree-level output 6991 elif output_type=='default': 6992 assert group_subprocesses in [True, False] 6993 6994 opt = dict(opt) 6995 opt.update({'clean': not noclean, 6996 'complex_mass': cmd.options['complex_mass_scheme'], 6997 'export_format':cmd._export_format, 6998 'mp': False, 6999 'sa_symmetry':False, 7000 'model': cmd._curr_model.get('name'), 7001 'v5_model': False if cmd._model_v4_path else True }) 7002 7003 format = cmd._export_format #shortcut 7004 7005 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7006 opt['sa_symmetry'] = True 7007 elif format == 'plugin': 7008 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7009 7010 loop_induced_opt = dict(opt) 7011 loop_induced_opt.update(MadLoop_SA_options) 7012 loop_induced_opt['export_format'] = 'madloop_optimized' 7013 loop_induced_opt['SubProc_prefix'] = 'PV' 7014 # For loop_induced output with MadEvent, we must have access to the 7015 # color flows. 7016 loop_induced_opt['compute_color_flows'] = True 7017 for key in opt: 7018 if key not in loop_induced_opt: 7019 loop_induced_opt[key] = opt[key] 7020 7021 # Madevent output supports MadAnalysis5 7022 if format in ['madevent']: 7023 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7024 7025 if format == 'matrix' or format.startswith('standalone'): 7026 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7027 7028 elif format in ['madevent'] and group_subprocesses: 7029 if isinstance(cmd._curr_amps[0], 7030 loop_diagram_generation.LoopAmplitude): 7031 import madgraph.loop.loop_exporters as loop_exporters 7032 return loop_exporters.LoopInducedExporterMEGroup( 7033 cmd._export_dir,loop_induced_opt) 7034 else: 7035 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7036 elif format in ['madevent']: 7037 if isinstance(cmd._curr_amps[0], 7038 loop_diagram_generation.LoopAmplitude): 7039 import madgraph.loop.loop_exporters as loop_exporters 7040 return loop_exporters.LoopInducedExporterMENoGroup( 7041 cmd._export_dir,loop_induced_opt) 7042 else: 7043 return ProcessExporterFortranME(cmd._export_dir,opt) 7044 elif format in ['matchbox']: 7045 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7046 elif cmd._export_format in ['madweight'] and group_subprocesses: 7047 7048 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7049 elif cmd._export_format in ['madweight']: 7050 return ProcessExporterFortranMW(cmd._export_dir, opt) 7051 elif format == 'plugin': 7052 if isinstance(cmd._curr_amps[0], 7053 loop_diagram_generation.LoopAmplitude): 7054 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7055 else: 7056 return cmd._export_plugin(cmd._export_dir, opt) 7057 7058 else: 7059 raise Exception, 'Wrong export_v4 format' 7060 else: 7061 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
7062
7063 7064 7065 7066 #=============================================================================== 7067 # ProcessExporterFortranMWGroup 7068 #=============================================================================== 7069 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7070 """Class to take care of exporting a set of matrix elements to 7071 MadEvent subprocess group format.""" 7072 7073 matrix_file = "matrix_madweight_group_v4.inc" 7074 grouped_mode = 'madweight' 7075 #=========================================================================== 7076 # generate_subprocess_directory 7077 #===========================================================================
7078 - def generate_subprocess_directory(self, subproc_group, 7079 fortran_model, 7080 group_number):
7081 """Generate the Pn directory for a subprocess group in MadEvent, 7082 including the necessary matrix_N.f files, configs.inc and various 7083 other helper files.""" 7084 7085 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7086 raise base_objects.PhysicsObject.PhysicsObjectError,\ 7087 "subproc_group object not SubProcessGroup" 7088 7089 if not self.model: 7090 self.model = subproc_group.get('matrix_elements')[0].\ 7091 get('processes')[0].get('model') 7092 7093 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7094 7095 # Create the directory PN in the specified path 7096 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7097 subproc_group.get('name')) 7098 try: 7099 os.mkdir(pjoin(pathdir, subprocdir)) 7100 except os.error as error: 7101 logger.warning(error.strerror + " " + subprocdir) 7102 7103 7104 logger.info('Creating files in directory %s' % subprocdir) 7105 Ppath = pjoin(pathdir, subprocdir) 7106 7107 # Create the matrix.f files, auto_dsig.f files and all inc files 7108 # for all subprocesses in the group 7109 7110 maxamps = 0 7111 maxflows = 0 7112 tot_calls = 0 7113 7114 matrix_elements = subproc_group.get('matrix_elements') 7115 7116 for ime, matrix_element in \ 7117 enumerate(matrix_elements): 7118 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7119 calls, ncolor = \ 7120 self.write_matrix_element_v4(writers.FortranWriter(filename), 7121 matrix_element, 7122 fortran_model, 7123 str(ime+1), 7124 subproc_group.get('diagram_maps')[\ 7125 ime]) 7126 7127 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7128 self.write_auto_dsig_file(writers.FortranWriter(filename), 7129 matrix_element, 7130 str(ime+1)) 7131 7132 # Keep track of needed quantities 7133 tot_calls += int(calls) 7134 maxflows = max(maxflows, ncolor) 7135 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7136 7137 # Draw diagrams 7138 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7139 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7140 get('diagrams'), 7141 filename, 7142 model = \ 7143 matrix_element.get('processes')[0].\ 7144 get('model'), 7145 amplitude=True) 7146 logger.info("Generating Feynman diagrams for " + \ 7147 matrix_element.get('processes')[0].nice_string()) 7148 plot.draw() 7149 7150 # Extract number of external particles 7151 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7152 7153 # Generate a list of diagrams corresponding to each configuration 7154 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7155 # If a subprocess has no diagrams for this config, the number is 0 7156 7157 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7158 7159 filename = pjoin(Ppath, 'auto_dsig.f') 7160 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7161 subproc_group) 7162 7163 filename = pjoin(Ppath,'configs.inc') 7164 nconfigs, s_and_t_channels = self.write_configs_file(\ 7165 writers.FortranWriter(filename), 7166 subproc_group, 7167 subproc_diagrams_for_config) 7168 7169 filename = pjoin(Ppath, 'leshouche.inc') 7170 self.write_leshouche_file(writers.FortranWriter(filename), 7171 subproc_group) 7172 7173 filename = pjoin(Ppath, 'phasespace.inc') 7174 self.write_phasespace_file(writers.FortranWriter(filename), 7175 nconfigs) 7176 7177 7178 filename = pjoin(Ppath, 'maxamps.inc') 7179 self.write_maxamps_file(writers.FortranWriter(filename), 7180 maxamps, 7181 maxflows, 7182 max([len(me.get('processes')) for me in \ 7183 matrix_elements]), 7184 len(matrix_elements)) 7185 7186 filename = pjoin(Ppath, 'mirrorprocs.inc') 7187 self.write_mirrorprocs(writers.FortranWriter(filename), 7188 subproc_group) 7189 7190 filename = pjoin(Ppath, 'nexternal.inc') 7191 self.write_nexternal_file(writers.FortranWriter(filename), 7192 nexternal, ninitial) 7193 7194 filename = pjoin(Ppath, 'pmass.inc') 7195 self.write_pmass_file(writers.FortranWriter(filename), 7196 matrix_element) 7197 7198 filename = pjoin(Ppath, 'props.inc') 7199 self.write_props_file(writers.FortranWriter(filename), 7200 matrix_element, 7201 s_and_t_channels) 7202 7203 # filename = pjoin(Ppath, 'processes.dat') 7204 # files.write_to_file(filename, 7205 # self.write_processes_file, 7206 # subproc_group) 7207 7208 # Generate jpgs -> pass in make_html 7209 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7210 7211 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7212 7213 for file in linkfiles: 7214 ln('../%s' % file, cwd=Ppath) 7215 7216 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7217 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7218 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7219 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7220 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7221 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7222 if not tot_calls: 7223 tot_calls = 0 7224 return tot_calls
7225 7226 7227 #=========================================================================== 7228 # Helper functions 7229 #===========================================================================
7230 - def modify_grouping(self, matrix_element):
7231 """allow to modify the grouping (if grouping is in place) 7232 return two value: 7233 - True/False if the matrix_element was modified 7234 - the new(or old) matrix element""" 7235 7236 return True, matrix_element.split_lepton_grouping()
7237 7238 #=========================================================================== 7239 # write_super_auto_dsig_file 7240 #===========================================================================
7241 - def write_super_auto_dsig_file(self, writer, subproc_group):
7242 """Write the auto_dsig.f file selecting between the subprocesses 7243 in subprocess group mode""" 7244 7245 replace_dict = {} 7246 7247 # Extract version number and date from VERSION file 7248 info_lines = self.get_mg5_info_lines() 7249 replace_dict['info_lines'] = info_lines 7250 7251 matrix_elements = subproc_group.get('matrix_elements') 7252 7253 # Extract process info lines 7254 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7255 matrix_elements]) 7256 replace_dict['process_lines'] = process_lines 7257 7258 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7259 replace_dict['nexternal'] = nexternal 7260 7261 replace_dict['nsprocs'] = 2*len(matrix_elements) 7262 7263 # Generate dsig definition line 7264 dsig_def_line = "DOUBLE PRECISION " + \ 7265 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7266 range(len(matrix_elements))]) 7267 replace_dict["dsig_def_line"] = dsig_def_line 7268 7269 # Generate dsig process lines 7270 call_dsig_proc_lines = [] 7271 for iproc in range(len(matrix_elements)): 7272 call_dsig_proc_lines.append(\ 7273 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7274 {"num": iproc + 1, 7275 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7276 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7277 7278 if writer: 7279 file = open(os.path.join(_file_path, \ 7280 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7281 file = file % replace_dict 7282 # Write the file 7283 writer.writelines(file) 7284 else: 7285 return replace_dict
7286 7287 #=========================================================================== 7288 # write_mirrorprocs 7289 #===========================================================================
7290 - def write_mirrorprocs(self, writer, subproc_group):
7291 """Write the mirrorprocs.inc file determining which processes have 7292 IS mirror process in subprocess group mode.""" 7293 7294 lines = [] 7295 bool_dict = {True: '.true.', False: '.false.'} 7296 matrix_elements = subproc_group.get('matrix_elements') 7297 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7298 (len(matrix_elements), 7299 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7300 me in matrix_elements]))) 7301 # Write the file 7302 writer.writelines(lines)
7303 7304 #=========================================================================== 7305 # write_configs_file 7306 #===========================================================================
7307 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7308 """Write the configs.inc file with topology information for a 7309 subprocess group. Use the first subprocess with a diagram for each 7310 configuration.""" 7311 7312 matrix_elements = subproc_group.get('matrix_elements') 7313 model = matrix_elements[0].get('processes')[0].get('model') 7314 7315 diagrams = [] 7316 config_numbers = [] 7317 for iconfig, config in enumerate(diagrams_for_config): 7318 # Check if any diagrams correspond to this config 7319 if set(config) == set([0]): 7320 continue 7321 subproc_diags = [] 7322 for s,d in enumerate(config): 7323 if d: 7324 subproc_diags.append(matrix_elements[s].\ 7325 get('diagrams')[d-1]) 7326 else: 7327 subproc_diags.append(None) 7328 diagrams.append(subproc_diags) 7329 config_numbers.append(iconfig + 1) 7330 7331 # Extract number of external particles 7332 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7333 7334 return len(diagrams), \ 7335 self.write_configs_file_from_diagrams(writer, diagrams, 7336 config_numbers, 7337 nexternal, ninitial, 7338 matrix_elements[0],model)
7339 7340 #=========================================================================== 7341 # write_run_configs_file 7342 #===========================================================================
7343 - def write_run_config_file(self, writer):
7344 """Write the run_configs.inc file for MadEvent""" 7345 7346 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7347 text = open(path).read() % {'chanperjob':'2'} 7348 writer.write(text) 7349 return True
7350 7351 7352 #=========================================================================== 7353 # write_leshouche_file 7354 #===========================================================================
7355 - def write_leshouche_file(self, writer, subproc_group):
7356 """Write the leshouche.inc file for MG4""" 7357 7358 all_lines = [] 7359 7360 for iproc, matrix_element in \ 7361 enumerate(subproc_group.get('matrix_elements')): 7362 all_lines.extend(self.get_leshouche_lines(matrix_element, 7363 iproc)) 7364 7365 # Write the file 7366 writer.writelines(all_lines) 7367 7368 return True
7369