Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import, division 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  from fractions import Fraction 
  20  """Methods and classes to export matrix elements to v4 format.""" 
  21   
  22  import copy 
  23  from six import StringIO 
  24  import itertools 
  25  import fractions 
  26  import glob 
  27  import logging 
  28  import math 
  29  import os 
  30  import io 
  31  import re 
  32  import shutil 
  33  import subprocess 
  34  import sys 
  35  import time 
  36  import traceback 
  37  import  collections 
  38   
  39  import aloha 
  40   
  41  import madgraph.core.base_objects as base_objects 
  42  import madgraph.core.color_algebra as color 
  43  import madgraph.core.helas_objects as helas_objects 
  44  import madgraph.iolibs.drawing_eps as draw 
  45  import madgraph.iolibs.files as files 
  46  import madgraph.iolibs.group_subprocs as group_subprocs 
  47  import madgraph.iolibs.file_writers as writers 
  48  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  49  import madgraph.iolibs.template_files as template_files 
  50  import madgraph.iolibs.ufo_expression_parsers as parsers 
  51  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  52  import madgraph.interface.common_run_interface as common_run_interface 
  53  import madgraph.various.diagram_symmetry as diagram_symmetry 
  54  import madgraph.various.misc as misc 
  55  import madgraph.various.banner as banner_mod 
  56  import madgraph.various.process_checks as process_checks 
  57  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  58  import aloha.create_aloha as create_aloha 
  59  import models.import_ufo as import_ufo 
  60  import models.write_param_card as param_writer 
  61  import models.check_param_card as check_param_card 
  62  from models import UFOError 
  63   
  64   
  65  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  66  from madgraph.iolibs.files import cp, ln, mv 
  67   
  68  from madgraph import InvalidCmd 
  69   
  70  pjoin = os.path.join 
  71   
  72  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  73  logger = logging.getLogger('madgraph.export_v4') 
  74   
  75  default_compiler= {'fortran': 'gfortran', 
  76                         'f2py': 'f2py', 
  77                         'cpp':'g++'} 
78 79 80 -class VirtualExporter(object):
81 82 #exporter variable who modified the way madgraph interacts with this class 83 84 grouped_mode = 'madevent' 85 # This variable changes the type of object called within 'generate_subprocess_directory' 86 #functions. 87 # False to avoid grouping (only identical matrix element are merged) 88 # 'madevent' group the massless quark and massless lepton 89 # 'madweight' group the gluon with the massless quark 90 sa_symmetry = False 91 # If no grouped_mode=False, uu~ and u~u will be called independently. 92 #Putting sa_symmetry generates only one of the two matrix-element. 93 check = True 94 # Ask madgraph to check if the directory already exists and propose to the user to 95 #remove it first if this is the case 96 output = 'Template' 97 # [Template, None, dir] 98 # - Template, madgraph will call copy_template 99 # - dir, madgraph will just create an empty directory for initialisation 100 # - None, madgraph do nothing for initialisation 101 exporter = 'v4' 102 # language of the output 'v4' for Fortran output 103 # 'cpp' for C++ output 104 105
106 - def __init__(self, dir_path = "", opt=None):
107 # cmd_options is a dictionary with all the optional argurment passed at output time 108 109 # Activate some monkey patching for the helas call writer. 110 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 111 self.helas_call_writer_custom
112 113 114 # helper function for customise helas writter 115 @staticmethod
116 - def custom_helas_call(call, arg):
117 """static method to customise the way aloha function call are written 118 call is the default template for the call 119 arg are the dictionary used for the call 120 """ 121 return call, arg
122 123 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 124 125
126 - def copy_template(self, model):
127 return
128
129 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
130 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 131 return 0 # return an integer stating the number of call to helicity routine
132
133 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
134 return
135
136 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
137 return
138 139
140 - def pass_information_from_cmd(self, cmd):
141 """pass information from the command interface to the exporter. 142 Please do not modify any object of the interface from the exporter. 143 """ 144 return
145
146 - def modify_grouping(self, matrix_element):
147 return False, matrix_element
148
149 - def export_model_files(self, model_v4_path):
150 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 151 return
152
153 - def export_helas(self, HELAS_PATH):
154 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 155 return
156
157 #=============================================================================== 158 # ProcessExporterFortran 159 #=============================================================================== 160 -class ProcessExporterFortran(VirtualExporter):
161 """Class to take care of exporting a set of matrix elements to 162 Fortran (v4) format.""" 163 164 default_opt = {'clean': False, 'complex_mass':False, 165 'export_format':'madevent', 'mp': False, 166 'v5_model': True, 167 'output_options':{} 168 } 169 grouped_mode = False 170 jamp_optim = False 171
172 - def __init__(self, dir_path = "", opt=None):
173 """Initiate the ProcessExporterFortran with directory information""" 174 self.mgme_dir = MG5DIR 175 self.dir_path = dir_path 176 self.model = None 177 178 self.opt = dict(self.default_opt) 179 if opt: 180 self.opt.update(opt) 181 self.cmd_options = self.opt['output_options'] 182 183 #place holder to pass information to the run_interface 184 self.proc_characteristic = banner_mod.ProcCharacteristic() 185 # call mother class 186 super(ProcessExporterFortran,self).__init__(dir_path, opt)
187 188 189 #=========================================================================== 190 # process exporter fortran switch between group and not grouped 191 #===========================================================================
192 - def export_processes(self, matrix_elements, fortran_model):
193 """Make the switch between grouped and not grouped output""" 194 195 calls = 0 196 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 197 for (group_number, me_group) in enumerate(matrix_elements): 198 calls = calls + self.generate_subprocess_directory(\ 199 me_group, fortran_model, group_number) 200 else: 201 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 202 calls = calls + self.generate_subprocess_directory(\ 203 me, fortran_model, me_number) 204 205 return calls
206 207 208 #=========================================================================== 209 # create the run_card 210 #===========================================================================
211 - def create_run_card(self, matrix_elements, history):
212 """ """ 213 214 215 # bypass this for the loop-check 216 import madgraph.loop.loop_helas_objects as loop_helas_objects 217 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 218 matrix_elements = None 219 220 run_card = banner_mod.RunCard() 221 222 223 default=True 224 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 225 processes = [me.get('processes') for megroup in matrix_elements 226 for me in megroup['matrix_elements']] 227 elif matrix_elements: 228 processes = [me.get('processes') 229 for me in matrix_elements['matrix_elements']] 230 else: 231 default =False 232 233 if default: 234 run_card.create_default_for_process(self.proc_characteristic, 235 history, 236 processes) 237 238 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 239 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 240 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
241 242 243 244 #=========================================================================== 245 # copy the Template in a new directory. 246 #===========================================================================
247 - def copy_template(self, model):
248 """create the directory run_name as a copy of the MadEvent 249 Template, and clean the directory 250 """ 251 252 #First copy the full template tree if dir_path doesn't exit 253 if not os.path.isdir(self.dir_path): 254 assert self.mgme_dir, \ 255 "No valid MG_ME path given for MG4 run directory creation." 256 logger.info('initialize a new directory: %s' % \ 257 os.path.basename(self.dir_path)) 258 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 259 self.dir_path, True) 260 # misc.copytree since dir_path already exists 261 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 262 self.dir_path) 263 # copy plot_card 264 for card in ['plot_card']: 265 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 266 try: 267 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 268 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 269 except IOError: 270 logger.warning("Failed to copy " + card + ".dat to default") 271 elif os.getcwd() == os.path.realpath(self.dir_path): 272 logger.info('working in local directory: %s' % \ 273 os.path.realpath(self.dir_path)) 274 # misc.copytree since dir_path already exists 275 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 276 self.dir_path) 277 # for name in misc.glob('Template/LO/*', self.mgme_dir): 278 # name = os.path.basename(name) 279 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 280 # if os.path.isfile(filename): 281 # files.cp(filename, pjoin(self.dir_path,name)) 282 # elif os.path.isdir(filename): 283 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 284 # misc.copytree since dir_path already exists 285 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 286 self.dir_path) 287 # Copy plot_card 288 for card in ['plot_card']: 289 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 290 try: 291 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 292 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 293 except IOError: 294 logger.warning("Failed to copy " + card + ".dat to default") 295 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 296 assert self.mgme_dir, \ 297 "No valid MG_ME path given for MG4 run directory creation." 298 try: 299 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 300 except IOError: 301 MG5_version = misc.get_pkg_info() 302 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 303 304 #Ensure that the Template is clean 305 if self.opt['clean']: 306 logger.info('remove old information in %s' % \ 307 os.path.basename(self.dir_path)) 308 if 'MADGRAPH_BASE' in os.environ: 309 misc.call([pjoin('bin', 'internal', 'clean_template'), 310 '--web'], cwd=self.dir_path) 311 else: 312 try: 313 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 314 cwd=self.dir_path) 315 except Exception as why: 316 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 317 % (os.path.basename(self.dir_path),why)) 318 319 #Write version info 320 MG_version = misc.get_pkg_info() 321 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 322 MG_version['version']) 323 324 # add the makefile in Source directory 325 filename = pjoin(self.dir_path,'Source','makefile') 326 self.write_source_makefile(writers.FileWriter(filename)) 327 328 # add the DiscreteSampler information 329 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 330 pjoin(self.dir_path, 'Source')) 331 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 332 pjoin(self.dir_path, 'Source')) 333 334 # We need to create the correct open_data for the pdf 335 self.write_pdf_opendata()
336 337 338 #=========================================================================== 339 # Call MadAnalysis5 to generate the default cards for this process 340 #===========================================================================
341 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 342 ma5_path, output_dir, levels = ['parton','hadron']):
343 """ Call MA5 so that it writes default cards for both parton and 344 post-shower levels, tailored for this particular process.""" 345 346 if len(levels)==0: 347 return 348 start = time.time() 349 logger.info('Generating MadAnalysis5 default cards tailored to this process') 350 try: 351 MA5_interpreter = common_run_interface.CommonRunCmd.\ 352 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 353 except (Exception, SystemExit) as e: 354 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 355 return 356 if MA5_interpreter is None: 357 return 358 359 MA5_main = MA5_interpreter.main 360 for lvl in ['parton','hadron']: 361 if lvl in levels: 362 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 363 try: 364 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 365 except (Exception, SystemExit) as e: 366 # keep the default card (skip only) 367 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 368 ' default analysis card for this process.') 369 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 370 error=StringIO() 371 traceback.print_exc(file=error) 372 logger.debug('MadAnalysis5 error was:') 373 logger.debug('-'*60) 374 logger.debug(error.getvalue()[:-1]) 375 logger.debug('-'*60) 376 else: 377 open(card_to_generate,'w').write(text) 378 stop = time.time() 379 if stop-start >1: 380 logger.info('Cards created in %.2fs' % (stop-start))
381 382 #=========================================================================== 383 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 384 #===========================================================================
385 - def write_procdef_mg5(self, file_pos, modelname, process_str):
386 """ write an equivalent of the MG4 proc_card in order that all the Madevent 387 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 388 389 proc_card_template = template_files.mg4_proc_card.mg4_template 390 process_template = template_files.mg4_proc_card.process_template 391 process_text = '' 392 coupling = '' 393 new_process_content = [] 394 395 396 # First find the coupling and suppress the coupling from process_str 397 #But first ensure that coupling are define whithout spaces: 398 process_str = process_str.replace(' =', '=') 399 process_str = process_str.replace('= ', '=') 400 process_str = process_str.replace(',',' , ') 401 #now loop on the element and treat all the coupling 402 for info in process_str.split(): 403 if '=' in info: 404 coupling += info + '\n' 405 else: 406 new_process_content.append(info) 407 # Recombine the process_str (which is the input process_str without coupling 408 #info) 409 process_str = ' '.join(new_process_content) 410 411 #format the SubProcess 412 replace_dict = {'process': process_str, 413 'coupling': coupling} 414 process_text += process_template.substitute(replace_dict) 415 416 replace_dict = {'process': process_text, 417 'model': modelname, 418 'multiparticle':''} 419 text = proc_card_template.substitute(replace_dict) 420 421 if file_pos: 422 ff = open(file_pos, 'w') 423 ff.write(text) 424 ff.close() 425 else: 426 return replace_dict
427 428
429 - def pass_information_from_cmd(self, cmd):
430 """Pass information for MA5""" 431 432 self.proc_defs = cmd._curr_proc_defs
433 434 #=========================================================================== 435 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 436 #===========================================================================
437 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
438 """Function to finalize v4 directory, for inheritance.""" 439 440 self.create_run_card(matrix_elements, history) 441 self.create_MA5_cards(matrix_elements, history)
442
443 - def create_MA5_cards(self,matrix_elements,history):
444 """ A wrapper around the creation of the MA5 cards so that it can be 445 bypassed by daughter classes (i.e. in standalone).""" 446 if 'madanalysis5_path' in self.opt and not \ 447 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 448 processes = None 449 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 450 processes = [me.get('processes') for megroup in matrix_elements 451 for me in megroup['matrix_elements']] 452 elif matrix_elements: 453 processes = [me.get('processes') 454 for me in matrix_elements['matrix_elements']] 455 456 self.create_default_madanalysis5_cards( 457 history, self.proc_defs, processes, 458 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 459 levels = ['hadron','parton']) 460 461 for level in ['hadron','parton']: 462 # Copying these cards turn on the use of MadAnalysis5 by default. 463 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 464 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 465 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
466 467 #=========================================================================== 468 # Create the proc_characteristic file passing information to the run_interface 469 #===========================================================================
470 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
471 472 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
473 474 #=========================================================================== 475 # write_matrix_element_v4 476 #===========================================================================
477 - def write_matrix_element_v4(self):
478 """Function to write a matrix.f file, for inheritance. 479 """ 480 pass
481 482 #=========================================================================== 483 # write_pdf_opendata 484 #===========================================================================
485 - def write_pdf_opendata(self):
486 """ modify the pdf opendata file, to allow direct access to cluster node 487 repository if configure""" 488 489 if not self.opt["cluster_local_path"]: 490 changer = {"pdf_systemwide": ""} 491 else: 492 to_add = """ 493 tempname='%(path)s'//Tablefile 494 open(IU,file=tempname,status='old',ERR=1) 495 return 496 1 tempname='%(path)s/Pdfdata/'//Tablefile 497 open(IU,file=tempname,status='old',ERR=2) 498 return 499 2 tempname='%(path)s/lhapdf'//Tablefile 500 open(IU,file=tempname,status='old',ERR=3) 501 return 502 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 503 open(IU,file=tempname,status='old',ERR=4) 504 return 505 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 506 open(IU,file=tempname,status='old',ERR=5) 507 return 508 """ % {"path" : self.opt["cluster_local_path"]} 509 510 changer = {"pdf_systemwide": to_add} 511 512 513 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 514 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 515 ff.writelines(template % changer) 516 517 # Do the same for lhapdf set 518 if not self.opt["cluster_local_path"]: 519 changer = {"cluster_specific_path": ""} 520 else: 521 to_add=""" 522 LHAPath='%(path)s/PDFsets' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 526 Inquire(File=LHAPath, exist=exists) 527 if(exists)return 528 LHAPath='%(path)s/../lhapdf/pdfsets/' 529 Inquire(File=LHAPath, exist=exists) 530 if(exists)return 531 LHAPath='./PDFsets' 532 """ % {"path" : self.opt["cluster_local_path"]} 533 changer = {"cluster_specific_path": to_add} 534 535 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 536 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 537 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 538 ff.writelines(template % changer) 539 540 541 return
542 543 544 545 #=========================================================================== 546 # write_maxparticles_file 547 #===========================================================================
548 - def write_maxparticles_file(self, writer, matrix_elements):
549 """Write the maxparticles.inc file for MadEvent""" 550 551 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 552 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 553 matrix_elements.get('matrix_elements')]) 554 else: 555 maxparticles = max([me.get_nexternal_ninitial()[0] \ 556 for me in matrix_elements]) 557 558 lines = "integer max_particles\n" 559 lines += "parameter(max_particles=%d)" % maxparticles 560 561 # Write the file 562 writer.writelines(lines) 563 564 return True
565 566 567 #=========================================================================== 568 # export the model 569 #===========================================================================
570 - def export_model_files(self, model_path):
571 """Configure the files/link of the process according to the model""" 572 573 # Import the model 574 for file in os.listdir(model_path): 575 if os.path.isfile(pjoin(model_path, file)): 576 shutil.copy2(pjoin(model_path, file), \ 577 pjoin(self.dir_path, 'Source', 'MODEL'))
578 579 593 601 602 603 #=========================================================================== 604 # export the helas routine 605 #===========================================================================
606 - def export_helas(self, helas_path):
607 """Configure the files/link of the process according to the model""" 608 609 # Import helas routine 610 for filename in os.listdir(helas_path): 611 filepos = pjoin(helas_path, filename) 612 if os.path.isfile(filepos): 613 if filepos.endswith('Makefile.template'): 614 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 615 elif filepos.endswith('Makefile'): 616 pass 617 else: 618 cp(filepos, self.dir_path + '/Source/DHELAS')
619 # following lines do the same but whithout symbolic link 620 # 621 #def export_helas(mgme_dir, dir_path): 622 # 623 # # Copy the HELAS directory 624 # helas_dir = pjoin(mgme_dir, 'HELAS') 625 # for filename in os.listdir(helas_dir): 626 # if os.path.isfile(pjoin(helas_dir, filename)): 627 # shutil.copy2(pjoin(helas_dir, filename), 628 # pjoin(dir_path, 'Source', 'DHELAS')) 629 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 630 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 631 # 632 633 #=========================================================================== 634 # generate_subprocess_directory 635 #===========================================================================
636 - def generate_subprocess_directory(self, matrix_element, 637 fortran_model, 638 me_number):
639 """Routine to generate a subprocess directory (for inheritance)""" 640 641 pass
642 643 #=========================================================================== 644 # get_source_libraries_list 645 #===========================================================================
646 - def get_source_libraries_list(self):
647 """ Returns the list of libraries to be compiling when compiling the 648 SOURCE directory. It is different for loop_induced processes and 649 also depends on the value of the 'output_dependencies' option""" 650 651 return ['$(LIBDIR)libdhelas.$(libext)', 652 '$(LIBDIR)libpdf.$(libext)', 653 '$(LIBDIR)libmodel.$(libext)', 654 '$(LIBDIR)libcernlib.$(libext)', 655 '$(LIBDIR)libbias.$(libext)']
656 657 #=========================================================================== 658 # write_source_makefile 659 #===========================================================================
660 - def write_source_makefile(self, writer):
661 """Write the nexternal.inc file for MG4""" 662 663 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 664 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 665 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 666 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 667 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 668 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 669 else: 670 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 671 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 672 673 replace_dict= {'libraries': set_of_lib, 674 'model':model_line, 675 'additional_dsample': '', 676 'additional_dependencies':''} 677 678 if writer: 679 text = open(path).read() % replace_dict 680 writer.write(text) 681 682 return replace_dict
683 684 #=========================================================================== 685 # write_nexternal_madspin 686 #===========================================================================
687 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
688 """Write the nexternal_prod.inc file for madspin""" 689 690 replace_dict = {} 691 692 replace_dict['nexternal'] = nexternal 693 replace_dict['ninitial'] = ninitial 694 695 file = """ \ 696 integer nexternal_prod 697 parameter (nexternal_prod=%(nexternal)d) 698 integer nincoming_prod 699 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 700 701 # Write the file 702 if writer: 703 writer.writelines(file) 704 return True 705 else: 706 return replace_dict
707 708 #=========================================================================== 709 # write_helamp_madspin 710 #===========================================================================
711 - def write_helamp_madspin(self, writer, ncomb):
712 """Write the helamp.inc file for madspin""" 713 714 replace_dict = {} 715 716 replace_dict['ncomb'] = ncomb 717 718 file = """ \ 719 integer ncomb1 720 parameter (ncomb1=%(ncomb)d) 721 double precision helamp(ncomb1) 722 common /to_helamp/helamp """ % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 731 732 733 #=========================================================================== 734 # write_nexternal_file 735 #===========================================================================
736 - def write_nexternal_file(self, writer, nexternal, ninitial):
737 """Write the nexternal.inc file for MG4""" 738 739 replace_dict = {} 740 741 replace_dict['nexternal'] = nexternal 742 replace_dict['ninitial'] = ninitial 743 744 file = """ \ 745 integer nexternal 746 parameter (nexternal=%(nexternal)d) 747 integer nincoming 748 parameter (nincoming=%(ninitial)d)""" % replace_dict 749 750 # Write the file 751 if writer: 752 writer.writelines(file) 753 return True 754 else: 755 return replace_dict
756 #=========================================================================== 757 # write_pmass_file 758 #===========================================================================
759 - def write_pmass_file(self, writer, matrix_element):
760 """Write the pmass.inc file for MG4""" 761 762 model = matrix_element.get('processes')[0].get('model') 763 764 lines = [] 765 for wf in matrix_element.get_external_wavefunctions(): 766 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 767 if mass.lower() != "zero": 768 mass = "abs(%s)" % mass 769 770 lines.append("pmass(%d)=%s" % \ 771 (wf.get('number_external'), mass)) 772 773 # Write the file 774 writer.writelines(lines) 775 776 return True
777 778 #=========================================================================== 779 # write_ngraphs_file 780 #===========================================================================
781 - def write_ngraphs_file(self, writer, nconfigs):
782 """Write the ngraphs.inc file for MG4. Needs input from 783 write_configs_file.""" 784 785 file = " integer n_max_cg\n" 786 file = file + "parameter (n_max_cg=%d)" % nconfigs 787 788 # Write the file 789 writer.writelines(file) 790 791 return True
792 793 #=========================================================================== 794 # write_leshouche_file 795 #===========================================================================
796 - def write_leshouche_file(self, writer, matrix_element):
797 """Write the leshouche.inc file for MG4""" 798 799 # Write the file 800 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 801 802 return True
803 804 #=========================================================================== 805 # get_leshouche_lines 806 #===========================================================================
807 - def get_leshouche_lines(self, matrix_element, numproc):
808 """Write the leshouche.inc file for MG4""" 809 810 # Extract number of external particles 811 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 812 813 lines = [] 814 for iproc, proc in enumerate(matrix_element.get('processes')): 815 legs = proc.get_legs_with_decays() 816 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 817 (iproc + 1, numproc+1, nexternal, 818 ",".join([str(l.get('id')) for l in legs]))) 819 if iproc == 0 and numproc == 0: 820 for i in [1, 2]: 821 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 822 (i, nexternal, 823 ",".join([ "%3r" % 0 ] * ninitial + \ 824 [ "%3r" % i ] * (nexternal - ninitial)))) 825 826 # Here goes the color connections corresponding to the JAMPs 827 # Only one output, for the first subproc! 828 if iproc == 0: 829 # If no color basis, just output trivial color flow 830 if not matrix_element.get('color_basis'): 831 for i in [1, 2]: 832 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 833 (i, numproc+1,nexternal, 834 ",".join([ "%3r" % 0 ] * nexternal))) 835 836 else: 837 # First build a color representation dictionnary 838 repr_dict = {} 839 for l in legs: 840 repr_dict[l.get('number')] = \ 841 proc.get('model').get_particle(l.get('id')).get_color()\ 842 * (-1)**(1+l.get('state')) 843 # Get the list of color flows 844 color_flow_list = \ 845 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 846 ninitial) 847 # And output them properly 848 for cf_i, color_flow_dict in enumerate(color_flow_list): 849 for i in [0, 1]: 850 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 851 (i + 1, cf_i + 1, numproc+1, nexternal, 852 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 853 for l in legs]))) 854 855 return lines
856 857 858 859 860 #=========================================================================== 861 # write_maxamps_file 862 #===========================================================================
863 - def write_maxamps_file(self, writer, maxamps, maxflows, 864 maxproc,maxsproc):
865 """Write the maxamps.inc file for MG4.""" 866 867 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 868 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 869 (maxamps, maxflows) 870 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 871 (maxproc, maxsproc) 872 873 # Write the file 874 writer.writelines(file) 875 876 return True
877 878 879 #=========================================================================== 880 # Routines to output UFO models in MG4 format 881 #=========================================================================== 882
883 - def convert_model(self, model, wanted_lorentz = [], 884 wanted_couplings = []):
885 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 886 887 # Make sure aloha is in quadruple precision if needed 888 old_aloha_mp=aloha.mp_precision 889 aloha.mp_precision=self.opt['mp'] 890 self.model = model 891 # create the MODEL 892 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 893 self.opt['exporter'] = self.__class__ 894 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 895 model_builder.build(wanted_couplings) 896 897 # Backup the loop mode, because it can be changed in what follows. 898 old_loop_mode = aloha.loop_mode 899 900 # Create the aloha model or use the existing one (for loop exporters 901 # this is useful as the aloha model will be used again in the 902 # LoopHelasMatrixElements generated). We do not save the model generated 903 # here if it didn't exist already because it would be a waste of 904 # memory for tree level applications since aloha is only needed at the 905 # time of creating the aloha fortran subroutines. 906 if hasattr(self, 'aloha_model'): 907 aloha_model = self.aloha_model 908 else: 909 try: 910 with misc.MuteLogger(['madgraph.models'], [60]): 911 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 912 except (ImportError, UFOError): 913 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 914 aloha_model.add_Lorentz_object(model.get('lorentz')) 915 916 # Compute the subroutines 917 if wanted_lorentz: 918 aloha_model.compute_subset(wanted_lorentz) 919 else: 920 aloha_model.compute_all(save=False) 921 922 # Write them out 923 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 924 aloha_model.write(write_dir, 'Fortran') 925 926 # Revert the original aloha loop mode 927 aloha.loop_mode = old_loop_mode 928 929 #copy Helas Template 930 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 931 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 932 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 933 write_dir+'/aloha_functions.f') 934 aloha_model.loop_mode = False 935 else: 936 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 937 write_dir+'/aloha_functions.f') 938 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 939 940 # Make final link in the Process 941 self.make_model_symbolic_link() 942 943 # Re-establish original aloha mode 944 aloha.mp_precision=old_aloha_mp
945 946 947 #=========================================================================== 948 # Helper functions 949 #===========================================================================
950 - def modify_grouping(self, matrix_element):
951 """allow to modify the grouping (if grouping is in place) 952 return two value: 953 - True/False if the matrix_element was modified 954 - the new(or old) matrix element""" 955 956 return False, matrix_element
957 958 #=========================================================================== 959 # Helper functions 960 #===========================================================================
961 - def get_mg5_info_lines(self):
962 """Return info lines for MG5, suitable to place at beginning of 963 Fortran files""" 964 965 info = misc.get_pkg_info() 966 info_lines = "" 967 if info and 'version' in info and 'date' in info: 968 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 969 (info['version'], info['date']) 970 info_lines = info_lines + \ 971 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 972 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 973 else: 974 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 975 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 976 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 977 978 return info_lines
979
980 - def get_process_info_lines(self, matrix_element):
981 """Return info lines describing the processes for this matrix element""" 982 983 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 984 for process in matrix_element.get('processes')])
985 986
987 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
988 """Return the Helicity matrix definition lines for this matrix element""" 989 990 helicity_line_list = [] 991 i = 0 992 for helicities in matrix_element.get_helicity_matrix(): 993 i = i + 1 994 int_list = [i, len(helicities)] 995 int_list.extend(helicities) 996 helicity_line_list.append(\ 997 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 998 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 999 1000 return "\n".join(helicity_line_list)
1001
1002 - def get_ic_line(self, matrix_element):
1003 """Return the IC definition line coming after helicities, required by 1004 switchmom in madevent""" 1005 1006 nexternal = matrix_element.get_nexternal_ninitial()[0] 1007 int_list = list(range(1, nexternal + 1)) 1008 1009 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1010 ",".join([str(i) for \ 1011 i in int_list]))
1012
1013 - def set_chosen_SO_index(self, process, squared_orders):
1014 """ From the squared order constraints set by the user, this function 1015 finds what indices of the squared_orders list the user intends to pick. 1016 It returns this as a string of comma-separated successive '.true.' or 1017 '.false.' for each index.""" 1018 1019 user_squared_orders = process.get('squared_orders') 1020 split_orders = process.get('split_orders') 1021 1022 if len(user_squared_orders)==0: 1023 return ','.join(['.true.']*len(squared_orders)) 1024 1025 res = [] 1026 for sqsos in squared_orders: 1027 is_a_match = True 1028 for user_sqso, value in user_squared_orders.items(): 1029 if (process.get_squared_order_type(user_sqso) =='==' and \ 1030 value!=sqsos[split_orders.index(user_sqso)]) or \ 1031 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1032 value<sqsos[split_orders.index(user_sqso)]) or \ 1033 (process.get_squared_order_type(user_sqso) == '>' and \ 1034 value>=sqsos[split_orders.index(user_sqso)]): 1035 is_a_match = False 1036 break 1037 res.append('.true.' if is_a_match else '.false.') 1038 1039 return ','.join(res)
1040
1041 - def get_split_orders_lines(self, orders, array_name, n=5):
1042 """ Return the split orders definition as defined in the list orders and 1043 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1044 1045 ret_list = [] 1046 for index, order in enumerate(orders): 1047 for k in range(0, len(order), n): 1048 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1049 (array_name,index + 1, k + 1, min(k + n, len(order)), 1050 ','.join(["%5r" % i for i in order[k:k + n]]))) 1051 return ret_list
1052
1053 - def format_integer_list(self, list, name, n=5):
1054 """ Return an initialization of the python list in argument following 1055 the fortran syntax using the data keyword assignment, filling an array 1056 of name 'name'. It splits rows in chunks of size n.""" 1057 1058 ret_list = [] 1059 for k in range(0, len(list), n): 1060 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1061 (name, k + 1, min(k + n, len(list)), 1062 ','.join(["%5r" % i for i in list[k:k + n]]))) 1063 return ret_list
1064
1065 - def get_color_data_lines(self, matrix_element, n=6):
1066 """Return the color matrix definition lines for this matrix element. Split 1067 rows in chunks of size n.""" 1068 1069 if not matrix_element.get('color_matrix'): 1070 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1071 else: 1072 ret_list = [] 1073 my_cs = color.ColorString() 1074 for index, denominator in \ 1075 enumerate(matrix_element.get('color_matrix').\ 1076 get_line_denominators()): 1077 # First write the common denominator for this color matrix line 1078 #ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1079 # Then write the numerators for the matrix elements 1080 num_list = matrix_element.get('color_matrix').\ 1081 get_line_numerators(index, denominator) 1082 1083 assert all([int(i)==i for i in num_list]) 1084 1085 for k in range(0, len(num_list), n): 1086 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1087 (index + 1, k + 1, min(k + n, len(num_list)), 1088 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 1089 1090 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1091 ret_list.append("C %s" % repr(my_cs)) 1092 return ret_list
1093 1094
1095 - def get_den_factor_line(self, matrix_element):
1096 """Return the denominator factor line for this matrix element""" 1097 1098 return "DATA IDEN/%2r/" % \ 1099 matrix_element.get_denominator_factor()
1100
1101 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1102 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1103 which configs (diagrams).""" 1104 1105 ret_list = [] 1106 1107 booldict = {False: ".false.", True: ".true."} 1108 1109 if not matrix_element.get('color_basis'): 1110 # No color, so only one color factor. Simply write a ".true." 1111 # for each config (i.e., each diagram with only 3 particle 1112 # vertices 1113 configs = len(mapconfigs) 1114 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1115 (num_matrix_element, configs, 1116 ','.join([".true." for i in range(configs)]))) 1117 return ret_list 1118 1119 1120 # There is a color basis - create a list showing which JAMPs have 1121 # contributions to which configs 1122 1123 # Only want to include leading color flows, so find max_Nc 1124 color_basis = matrix_element.get('color_basis') 1125 1126 # We don't want to include the power of Nc's which come from the potential 1127 # loop color trace (i.e. in the case of a closed fermion loop for example) 1128 # so we subtract it here when computing max_Nc 1129 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1130 color_basis.values()],[])) 1131 1132 # Crate dictionary between diagram number and JAMP number 1133 diag_jamp = {} 1134 for ijamp, col_basis_elem in \ 1135 enumerate(sorted(matrix_element.get('color_basis').keys())): 1136 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1137 # Only use color flows with Nc == max_Nc. However, notice that 1138 # we don't want to include the Nc power coming from the loop 1139 # in this counting. 1140 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1141 diag_num = diag_tuple[0] + 1 1142 # Add this JAMP number to this diag_num 1143 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1144 [ijamp+1] 1145 else: 1146 self.proc_characteristic['single_color'] = False 1147 1148 colamps = ijamp + 1 1149 for iconfig, num_diag in enumerate(mapconfigs): 1150 if num_diag == 0: 1151 continue 1152 1153 # List of True or False 1154 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1155 # Add line 1156 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1157 (iconfig+1, num_matrix_element, colamps, 1158 ','.join(["%s" % booldict[b] for b in \ 1159 bool_list]))) 1160 1161 return ret_list
1162
1163 - def get_amp2_lines(self, matrix_element, config_map = [], replace_dict=None):
1164 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1165 1166 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1167 # Get minimum legs in a vertex 1168 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1169 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1170 minvert = min(vert_list) if vert_list!=[] else 0 1171 1172 ret_lines = [] 1173 if config_map: 1174 # In this case, we need to sum up all amplitudes that have 1175 # identical topologies, as given by the config_map (which 1176 # gives the topology/config for each of the diagrams 1177 diagrams = matrix_element.get('diagrams') 1178 # Combine the diagrams with identical topologies 1179 config_to_diag_dict = {} 1180 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1181 if config_map[idiag] == 0: 1182 continue 1183 try: 1184 config_to_diag_dict[config_map[idiag]].append(idiag) 1185 except KeyError: 1186 config_to_diag_dict[config_map[idiag]] = [idiag] 1187 # Write out the AMP2s summing squares of amplitudes belonging 1188 # to eiher the same diagram or different diagrams with 1189 # identical propagator properties. Note that we need to use 1190 # AMP2 number corresponding to the first diagram number used 1191 # for that AMP2. 1192 for config in sorted(config_to_diag_dict.keys()): 1193 1194 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1195 {"num": (config_to_diag_dict[config][0] + 1)} 1196 1197 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1198 sum([diagrams[idiag].get('amplitudes') for \ 1199 idiag in config_to_diag_dict[config]], [])]) 1200 1201 # Not using \sum |M|^2 anymore since this creates troubles 1202 # when ckm is not diagonal due to the JIM mechanism. 1203 if '+' in amp: 1204 amp = "(%s)*dconjg(%s)" % (amp, amp) 1205 else: 1206 amp = "%s*dconjg(%s)" % (amp, amp) 1207 1208 line = line + "%s" % (amp) 1209 #line += " * get_channel_cut(p, %s) " % (config) 1210 ret_lines.append(line) 1211 else: 1212 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1213 # Ignore any diagrams with 4-particle vertices. 1214 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1215 continue 1216 # Now write out the expression for AMP2, meaning the sum of 1217 # squared amplitudes belonging to the same diagram 1218 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1219 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1220 {"num": a.get('number')} for a in \ 1221 diag.get('amplitudes')]) 1222 ret_lines.append(line) 1223 1224 return ret_lines
1225 1226 #=========================================================================== 1227 # Returns the data statements initializing the coeffictients for the JAMP 1228 # decomposition. It is used when the JAMP initialization is decided to be 1229 # done through big arrays containing the projection coefficients. 1230 #===========================================================================
1231 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1232 n=50, Nc_value=3):
1233 """This functions return the lines defining the DATA statement setting 1234 the coefficients building the JAMPS out of the AMPS. Split rows in 1235 bunches of size n. 1236 One can specify the color_basis from which the color amplitudes originates 1237 so that there are commentaries telling what color structure each JAMP 1238 corresponds to.""" 1239 1240 if(not isinstance(color_amplitudes,list) or 1241 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1242 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1243 1244 res_list = [] 1245 my_cs = color.ColorString() 1246 for index, coeff_list in enumerate(color_amplitudes): 1247 # Create the list of the complete numerical coefficient. 1248 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1249 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1250 coefficient in coeff_list] 1251 # Create the list of the numbers of the contributing amplitudes. 1252 # Mutliply by -1 for those which have an imaginary coefficient. 1253 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1254 for coefficient in coeff_list] 1255 # Find the common denominator. 1256 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1257 num_list=[(coefficient*commondenom).numerator \ 1258 for coefficient in coefs_list] 1259 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1260 index+1,len(num_list))) 1261 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1262 index+1,commondenom)) 1263 if color_basis: 1264 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1265 res_list.append("C %s" % repr(my_cs)) 1266 for k in range(0, len(num_list), n): 1267 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1268 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1269 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1270 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1271 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1272 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1273 pass 1274 return res_list
1275 1276
1277 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1278 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1279 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1280 defined as a matrix element or directly as a color_amplitudes dictionary. 1281 The split_order_amps specifies the group of amplitudes sharing the same 1282 amplitude orders which should be put in together in a given set of JAMPS. 1283 The split_order_amps is supposed to have the format of the second output 1284 of the function get_split_orders_mapping function in helas_objects.py. 1285 The split_order_names is optional (it should correspond to the process 1286 'split_orders' attribute) and only present to provide comments in the 1287 JAMP definitions in the code.""" 1288 1289 # Let the user call get_JAMP_lines_split_order directly from a 1290 error_msg="Malformed '%s' argument passed to the "+\ 1291 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1292 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1293 color_amplitudes=col_amps.get_color_amplitudes() 1294 elif(isinstance(col_amps,list)): 1295 if(col_amps and isinstance(col_amps[0],list)): 1296 color_amplitudes=col_amps 1297 else: 1298 raise MadGraph5Error(error_msg%'col_amps') 1299 else: 1300 raise MadGraph5Error(error_msg%'col_amps') 1301 1302 # Verify the sanity of the split_order_amps and split_order_names args 1303 if isinstance(split_order_amps,list): 1304 for elem in split_order_amps: 1305 if len(elem)!=2: 1306 raise MadGraph5Error(error_msg%'split_order_amps') 1307 # Check the first element of the two lists to make sure they are 1308 # integers, although in principle they should all be integers. 1309 if not isinstance(elem[0],tuple) or \ 1310 not isinstance(elem[1],tuple) or \ 1311 not isinstance(elem[0][0],int) or \ 1312 not isinstance(elem[1][0],int): 1313 raise MadGraph5Error(error_msg%'split_order_amps') 1314 else: 1315 raise MadGraph5Error(error_msg%'split_order_amps') 1316 1317 if not split_order_names is None: 1318 if isinstance(split_order_names,list): 1319 # Should specify the same number of names as there are elements 1320 # in the key of the split_order_amps. 1321 if len(split_order_names)!=len(split_order_amps[0][0]): 1322 raise MadGraph5Error(error_msg%'split_order_names') 1323 # Check the first element of the list to be a string 1324 if not isinstance(split_order_names[0],str): 1325 raise MadGraph5Error(error_msg%'split_order_names') 1326 else: 1327 raise MadGraph5Error(error_msg%'split_order_names') 1328 1329 # Now scan all contributing orders to be individually computed and 1330 # construct the list of color_amplitudes for JAMP to be constructed 1331 # accordingly. 1332 res_list=[] 1333 max_tmp = 0 1334 for i, amp_order in enumerate(split_order_amps): 1335 col_amps_order = [] 1336 for jamp in color_amplitudes: 1337 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1338 if split_order_names: 1339 res_list.append('C JAMPs contributing to orders '+' '.join( 1340 ['%s=%i'%order for order in zip(split_order_names, 1341 amp_order[0])])) 1342 if self.opt['export_format'] in ['madloop_matchbox']: 1343 res_list.extend(self.get_JAMP_lines(col_amps_order, 1344 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1345 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))[0]) 1346 else: 1347 toadd, nb_tmp = self.get_JAMP_lines(col_amps_order, 1348 JAMP_format="JAMP(%s,{0})".format(str(i+1))) 1349 res_list.extend(toadd) 1350 max_tmp = max(max_tmp, nb_tmp) 1351 1352 return res_list, max_tmp
1353 1354
1355 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1356 split=-1):
1357 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1358 defined as a matrix element or directly as a color_amplitudes dictionary, 1359 Jamp_formatLC should be define to allow to add LeadingColor computation 1360 (usefull for MatchBox) 1361 The split argument defines how the JAMP lines should be split in order 1362 not to be too long.""" 1363 1364 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1365 # the color amplitudes lists. 1366 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1367 color_amplitudes=col_amps.get_color_amplitudes() 1368 elif(isinstance(col_amps,list)): 1369 if(col_amps and isinstance(col_amps[0],list)): 1370 color_amplitudes=col_amps 1371 else: 1372 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1373 else: 1374 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1375 1376 all_element = {} 1377 res_list = [] 1378 for i, coeff_list in enumerate(color_amplitudes): 1379 # It might happen that coeff_list is empty if this function was 1380 # called from get_JAMP_lines_split_order (i.e. if some color flow 1381 # does not contribute at all for a given order). 1382 # In this case we simply set it to 0. 1383 if coeff_list==[]: 1384 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1385 continue 1386 # Break the JAMP definition into 'n=split' pieces to avoid having 1387 # arbitrarly long lines. 1388 first=True 1389 n = (len(coeff_list)+1 if split<=0 else split) 1390 while coeff_list!=[]: 1391 coefs=coeff_list[:n] 1392 coeff_list=coeff_list[n:] 1393 res = ((JAMP_format+"=") % str(i + 1)) + \ 1394 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1395 1396 first=False 1397 # Optimization: if all contributions to that color basis element have 1398 # the same coefficient (up to a sign), put it in front 1399 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1400 common_factor = False 1401 diff_fracs = list(set(list_fracs)) 1402 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1403 common_factor = True 1404 global_factor = diff_fracs[0] 1405 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1406 1407 # loop for JAMP 1408 for (coefficient, amp_number) in coefs: 1409 if not coefficient: 1410 continue 1411 value = (1j if coefficient[2] else 1)* coefficient[0] * coefficient[1] * fractions.Fraction(3)**coefficient[3] 1412 if (i+1, amp_number) not in all_element: 1413 all_element[(i+1, amp_number)] = value 1414 else: 1415 all_element[(i+1, amp_number)] += value 1416 if common_factor: 1417 res = (res + "%s" + AMP_format) % \ 1418 (self.coeff(coefficient[0], 1419 coefficient[1] / abs(coefficient[1]), 1420 coefficient[2], 1421 coefficient[3]), 1422 str(amp_number)) 1423 else: 1424 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1425 coefficient[1], 1426 coefficient[2], 1427 coefficient[3]), 1428 str(amp_number)) 1429 1430 if common_factor: 1431 res = res + ')' 1432 res_list.append(res) 1433 1434 if 'jamp_optim' in self.cmd_options: 1435 jamp_optim = banner_mod.ConfigFile.format_variable(self.cmd_options['jamp_optim'], bool, 'jamp_optim') 1436 else: 1437 # class default 1438 jamp_optim = self.jamp_optim 1439 1440 if not jamp_optim: 1441 return res_list, 0 1442 else: 1443 saved = list(res_list) 1444 1445 if len(all_element) > 1000: 1446 logger.info("Computing Color-Flow optimization [%s term]", len(all_element)) 1447 start_time = time.time() 1448 else: 1449 start_time = 0 1450 1451 res_list = [] 1452 #misc.sprint(len(all_element)) 1453 1454 self.myjamp_count = 0 1455 for key in all_element: 1456 all_element[key] = complex(all_element[key]) 1457 new_mat, defs = self.optimise_jamp(all_element) 1458 if start_time: 1459 logger.info("Color-Flow passed to %s term in %ss. Introduce %i contraction", len(new_mat), int(time.time()-start_time), len(defs)) 1460 1461 1462 #misc.sprint("number of iteration", self.myjamp_count) 1463 def format(frac): 1464 if isinstance(frac, Fraction): 1465 if frac.denominator == 1: 1466 return str(frac.numerator) 1467 else: 1468 return "%id0/%id0" % (frac.numerator, frac.denominator) 1469 elif frac.real == frac: 1470 #misc.sprint(frac.real, frac) 1471 return ('%.15e' % frac.real).replace('e','d') 1472 #str(float(frac.real)).replace('e','d') 1473 else: 1474 return ('(%.15e,%.15e)' % (frac.real, frac.imag)).replace('e','d')
1475 #str(frac).replace('e','d').replace('j','*imag1') 1476 1477 1478 1479 for i, amp1, amp2, frac, nb in defs: 1480 if amp1 > 0: 1481 amp1 = AMP_format % amp1 1482 else: 1483 amp1 = "TMP_JAMP(%d)" % -amp1 1484 if amp2 > 0: 1485 amp2 = AMP_format % amp2 1486 else: 1487 amp2 = "TMP_JAMP(%d)" % -amp2 1488 1489 if frac not in [1., -1]: 1490 res_list.append(' TMP_JAMP(%d) = %s + (%s) * %s ! used %d times' % (i,amp1, format(frac), amp2, nb)) 1491 elif frac == 1.: 1492 res_list.append(' TMP_JAMP(%d) = %s + %s ! used %d times' % (i,amp1, amp2, nb)) 1493 else: 1494 res_list.append(' TMP_JAMP(%d) = %s - %s ! used %d times' % (i,amp1, amp2, nb)) 1495 1496 1497 # misc.sprint(new_mat) 1498 jamp_res = collections.defaultdict(list) 1499 max_jamp=0 1500 for (jamp, var), factor in new_mat.items(): 1501 if var > 0: 1502 name = AMP_format % var 1503 else: 1504 name = "TMP_JAMP(%d)" % -var 1505 if factor not in [1.]: 1506 jamp_res[jamp].append("(%s)*%s" % (format(factor), name)) 1507 elif factor ==1: 1508 jamp_res[jamp].append("%s" % (name)) 1509 max_jamp = max(max_jamp, jamp) 1510 1511 1512 for i in range(1,max_jamp+1): 1513 name = JAMP_format % i 1514 if not jamp_res[i]: 1515 res_list.append(" %s = 0d0" %(name)) 1516 else: 1517 res_list.append(" %s = %s" %(name, '+'.join(jamp_res[i]))) 1518 1519 return res_list, len(defs)
1520
1521 - def optimise_jamp(self, all_element, nb_line=0, nb_col=0, added=0):
1522 """ optimise problem of type Y = A X 1523 A is a matrix (all_element) 1524 X is the fortran name of the input. 1525 The code iteratively add sub-expression jtemp[sub_add] 1526 and recall itself (this is add to the X size) 1527 """ 1528 self.myjamp_count +=1 1529 1530 if not nb_line: 1531 for i,j in all_element: 1532 if i+1 > nb_line: 1533 nb_line = i+1 1534 if j+1> nb_col: 1535 nb_col = j+1 1536 1537 max_count = 0 1538 all_index = [] 1539 operation = collections.defaultdict(lambda: collections.defaultdict(int)) 1540 for i in range(nb_line): 1541 for j1 in range(-added, nb_col): 1542 v1 = all_element.get((i,j1), 0) 1543 if not v1: 1544 continue 1545 for j2 in range(j1+1, nb_col): 1546 R = all_element.get((i,j2), 0)/v1 1547 if not R: 1548 continue 1549 1550 operation[(j1,j2)][R] +=1 1551 if operation[(j1,j2)][R] > max_count: 1552 max_count = operation[(j1,j2)][R] 1553 all_index = [(j1,j2, R)] 1554 elif operation[(j1,j2)][R] == max_count: 1555 all_index.append((j1,j2, R)) 1556 if max_count <= 1: 1557 return all_element, [] 1558 #added += 1 1559 #misc.sprint(max_count, len(all_index)) 1560 #misc.sprint(operation) 1561 to_add = [] 1562 for index in all_index: 1563 j1,j2,R = index 1564 first = True 1565 for i in range(nb_line): 1566 v1 = all_element.get((i,j1), 0) 1567 v2 = all_element.get((i,j2), 0) 1568 if not v1 or not v2: 1569 continue 1570 if v2/v1 == R: 1571 if first: 1572 first = False 1573 added +=1 1574 to_add.append((added,j1,j2,R, max_count)) 1575 1576 all_element[(i,-added)] = v1 1577 del all_element[(i,j1)] #= 0 1578 del all_element[(i,j2)] #= 0 1579 1580 logger.log(5,"Define %d new shortcut reused %d times", len(to_add), max_count) 1581 new_element, new_def = self.optimise_jamp(all_element, nb_line=nb_line, nb_col=nb_col, added=added) 1582 for one_def in to_add: 1583 new_def.insert(0, one_def) 1584 return new_element, new_def
1585 1586 1587 1588 1589
1590 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1591 """Generate the PDF lines for the auto_dsig.f file""" 1592 1593 processes = matrix_element.get('processes') 1594 model = processes[0].get('model') 1595 1596 pdf_definition_lines = "" 1597 pdf_data_lines = "" 1598 pdf_lines = "" 1599 1600 if ninitial == 1: 1601 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1602 for i, proc in enumerate(processes): 1603 process_line = proc.base_string() 1604 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1605 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1606 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1607 else: 1608 # Pick out all initial state particles for the two beams 1609 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1610 p in processes]))), 1611 sorted(list(set([p.get_initial_pdg(2) for \ 1612 p in processes])))] 1613 1614 # Prepare all variable names 1615 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1616 sum(initial_states,[])]) 1617 for key,val in pdf_codes.items(): 1618 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1619 1620 # Set conversion from PDG code to number used in PDF calls 1621 pdgtopdf = {21: 0, 22: 7} 1622 1623 # Fill in missing entries of pdgtopdf 1624 for pdg in sum(initial_states,[]): 1625 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1626 pdgtopdf[pdg] = pdg 1627 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1628 # If any particle has pdg code 7, we need to use something else 1629 pdgtopdf[pdg] = 6000000 + pdg 1630 1631 # Get PDF variable declarations for all initial states 1632 for i in [0,1]: 1633 pdf_definition_lines += "DOUBLE PRECISION " + \ 1634 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1635 for pdg in \ 1636 initial_states[i]]) + \ 1637 "\n" 1638 1639 # Get PDF data lines for all initial states 1640 for i in [0,1]: 1641 pdf_data_lines += "DATA " + \ 1642 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1643 for pdg in initial_states[i]]) + \ 1644 "/%d*1D0/" % len(initial_states[i]) + \ 1645 "\n" 1646 1647 # Get PDF lines for all different initial states 1648 for i, init_states in enumerate(initial_states): 1649 if subproc_group: 1650 pdf_lines = pdf_lines + \ 1651 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1652 % (i + 1, i + 1) 1653 else: 1654 pdf_lines = pdf_lines + \ 1655 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1656 % (i + 1, i + 1) 1657 1658 for nbi,initial_state in enumerate(init_states): 1659 if initial_state in list(pdf_codes.keys()): 1660 if subproc_group: 1661 pdf_lines = pdf_lines + \ 1662 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1663 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1664 (pdf_codes[initial_state], 1665 i + 1, i + 1, pdgtopdf[initial_state], 1666 i + 1, i + 1) 1667 else: 1668 pdf_lines = pdf_lines + \ 1669 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1670 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1671 (pdf_codes[initial_state], 1672 i + 1, i + 1, pdgtopdf[initial_state], 1673 i + 1, 1674 i + 1, i + 1) 1675 pdf_lines = pdf_lines + "ENDIF\n" 1676 1677 # Add up PDFs for the different initial state particles 1678 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1679 for proc in processes: 1680 process_line = proc.base_string() 1681 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1682 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1683 for ibeam in [1, 2]: 1684 initial_state = proc.get_initial_pdg(ibeam) 1685 if initial_state in list(pdf_codes.keys()): 1686 pdf_lines = pdf_lines + "%s%d*" % \ 1687 (pdf_codes[initial_state], ibeam) 1688 else: 1689 pdf_lines = pdf_lines + "1d0*" 1690 # Remove last "*" from pdf_lines 1691 pdf_lines = pdf_lines[:-1] + "\n" 1692 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1693 1694 # Remove last line break from the return variables 1695 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1696 1697 #=========================================================================== 1698 # write_props_file 1699 #===========================================================================
1700 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1701 """Write the props.inc file for MadEvent. Needs input from 1702 write_configs_file.""" 1703 1704 lines = [] 1705 1706 particle_dict = matrix_element.get('processes')[0].get('model').\ 1707 get('particle_dict') 1708 1709 for iconf, configs in enumerate(s_and_t_channels): 1710 for vertex in configs[0] + configs[1][:-1]: 1711 leg = vertex.get('legs')[-1] 1712 if leg.get('id') not in particle_dict: 1713 # Fake propagator used in multiparticle vertices 1714 mass = 'zero' 1715 width = 'zero' 1716 pow_part = 0 1717 else: 1718 particle = particle_dict[leg.get('id')] 1719 # Get mass 1720 if particle.get('mass').lower() == 'zero': 1721 mass = particle.get('mass') 1722 else: 1723 mass = "abs(%s)" % particle.get('mass') 1724 # Get width 1725 if particle.get('width').lower() == 'zero': 1726 width = particle.get('width') 1727 else: 1728 width = "abs(%s)" % particle.get('width') 1729 1730 pow_part = 1 + int(particle.is_boson()) 1731 1732 lines.append("prmass(%d,%d) = %s" % \ 1733 (leg.get('number'), iconf + 1, mass)) 1734 lines.append("prwidth(%d,%d) = %s" % \ 1735 (leg.get('number'), iconf + 1, width)) 1736 lines.append("pow(%d,%d) = %d" % \ 1737 (leg.get('number'), iconf + 1, pow_part)) 1738 1739 # Write the file 1740 writer.writelines(lines) 1741 1742 return True
1743 1744 #=========================================================================== 1745 # write_configs_file 1746 #===========================================================================
1747 - def write_configs_file(self, writer, matrix_element):
1748 """Write the configs.inc file for MadEvent""" 1749 1750 # Extract number of external particles 1751 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1752 1753 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1754 mapconfigs = [c[0] for c in configs] 1755 model = matrix_element.get('processes')[0].get('model') 1756 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1757 [[c[1]] for c in configs], 1758 mapconfigs, 1759 nexternal, ninitial, 1760 model)
1761 1762 #=========================================================================== 1763 # write_configs_file_from_diagrams 1764 #===========================================================================
1765 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1766 nexternal, ninitial, model):
1767 """Write the actual configs.inc file. 1768 1769 configs is the diagrams corresponding to configs (each 1770 diagrams is a list of corresponding diagrams for all 1771 subprocesses, with None if there is no corresponding diagrams 1772 for a given process). 1773 mapconfigs gives the diagram number for each config. 1774 1775 For s-channels, we need to output one PDG for each subprocess in 1776 the subprocess group, in order to be able to pick the right 1777 one for multiprocesses.""" 1778 1779 lines = [] 1780 1781 s_and_t_channels = [] 1782 1783 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1784 for config in configs if [d for d in config if d][0].\ 1785 get_vertex_leg_numbers()!=[]] 1786 minvert = min(vert_list) if vert_list!=[] else 0 1787 1788 # Number of subprocesses 1789 nsubprocs = len(configs[0]) 1790 1791 nconfigs = 0 1792 1793 new_pdg = model.get_first_non_pdg() 1794 1795 for iconfig, helas_diags in enumerate(configs): 1796 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1797 [0].get_vertex_leg_numbers()) : 1798 # Only 3-vertices allowed in configs.inc except for vertices 1799 # which originate from a shrunk loop. 1800 continue 1801 nconfigs += 1 1802 1803 # Need s- and t-channels for all subprocesses, including 1804 # those that don't contribute to this config 1805 empty_verts = [] 1806 stchannels = [] 1807 for h in helas_diags: 1808 if h: 1809 # get_s_and_t_channels gives vertices starting from 1810 # final state external particles and working inwards 1811 stchannels.append(h.get('amplitudes')[0].\ 1812 get_s_and_t_channels(ninitial, model, new_pdg)) 1813 else: 1814 stchannels.append((empty_verts, None)) 1815 1816 # For t-channels, just need the first non-empty one 1817 tchannels = [t for s,t in stchannels if t != None][0] 1818 1819 # For s_and_t_channels (to be used later) use only first config 1820 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1821 tchannels]) 1822 1823 # Make sure empty_verts is same length as real vertices 1824 if any([s for s,t in stchannels]): 1825 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1826 1827 # Reorganize s-channel vertices to get a list of all 1828 # subprocesses for each vertex 1829 schannels = list(zip(*[s for s,t in stchannels])) 1830 else: 1831 schannels = [] 1832 1833 allchannels = schannels 1834 if len(tchannels) > 1: 1835 # Write out tchannels only if there are any non-trivial ones 1836 allchannels = schannels + tchannels 1837 1838 # Write out propagators for s-channel and t-channel vertices 1839 1840 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1841 # Correspondance between the config and the diagram = amp2 1842 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1843 mapconfigs[iconfig])) 1844 1845 for verts in allchannels: 1846 if verts in schannels: 1847 vert = [v for v in verts if v][0] 1848 else: 1849 vert = verts 1850 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1851 last_leg = vert.get('legs')[-1] 1852 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1853 (last_leg.get('number'), nconfigs, len(daughters), 1854 ",".join([str(d) for d in daughters]))) 1855 if verts in schannels: 1856 pdgs = [] 1857 for v in verts: 1858 if v: 1859 pdgs.append(v.get('legs')[-1].get('id')) 1860 else: 1861 pdgs.append(0) 1862 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1863 (last_leg.get('number'), nconfigs, nsubprocs, 1864 ",".join([str(d) for d in pdgs]))) 1865 lines.append("data tprid(%d,%d)/0/" % \ 1866 (last_leg.get('number'), nconfigs)) 1867 elif verts in tchannels[:-1]: 1868 lines.append("data tprid(%d,%d)/%d/" % \ 1869 (last_leg.get('number'), nconfigs, 1870 abs(last_leg.get('id')))) 1871 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1872 (last_leg.get('number'), nconfigs, nsubprocs, 1873 ",".join(['0'] * nsubprocs))) 1874 1875 # Write out number of configs 1876 lines.append("# Number of configs") 1877 lines.append("data mapconfig(0)/%d/" % nconfigs) 1878 1879 # Write the file 1880 writer.writelines(lines) 1881 1882 return s_and_t_channels
1883 1884 #=========================================================================== 1885 # Global helper methods 1886 #=========================================================================== 1887
1888 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1889 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1890 1891 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1892 1893 if total_coeff == 1: 1894 if is_imaginary: 1895 return '+imag1*' 1896 else: 1897 return '+' 1898 elif total_coeff == -1: 1899 if is_imaginary: 1900 return '-imag1*' 1901 else: 1902 return '-' 1903 1904 res_str = '%+iD0' % total_coeff.numerator 1905 1906 if total_coeff.denominator != 1: 1907 # Check if total_coeff is an integer 1908 res_str = res_str + '/%iD0' % total_coeff.denominator 1909 1910 if is_imaginary: 1911 res_str = res_str + '*imag1' 1912 1913 return res_str + '*'
1914 1915
1916 - def set_fortran_compiler(self, default_compiler, force=False):
1917 """Set compiler based on what's available on the system""" 1918 1919 # Check for compiler 1920 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1921 f77_compiler = default_compiler['fortran'] 1922 elif misc.which('gfortran'): 1923 f77_compiler = 'gfortran' 1924 elif misc.which('g77'): 1925 f77_compiler = 'g77' 1926 elif misc.which('f77'): 1927 f77_compiler = 'f77' 1928 elif default_compiler['fortran']: 1929 logger.warning('No Fortran Compiler detected! Please install one') 1930 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1931 else: 1932 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1933 logger.info('Use Fortran compiler ' + f77_compiler) 1934 1935 1936 # Check for compiler. 1. set default. 1937 if default_compiler['f2py']: 1938 f2py_compiler = default_compiler['f2py'] 1939 else: 1940 f2py_compiler = '' 1941 # Try to find the correct one. 1942 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1943 f2py_compiler = default_compiler['f2py'] 1944 elif misc.which('f2py'): 1945 f2py_compiler = 'f2py' 1946 elif sys.version_info[1] == 6: 1947 if misc.which('f2py-2.6'): 1948 f2py_compiler = 'f2py-2.6' 1949 elif misc.which('f2py2.6'): 1950 f2py_compiler = 'f2py2.6' 1951 elif sys.version_info[1] == 7: 1952 if misc.which('f2py-2.7'): 1953 f2py_compiler = 'f2py-2.7' 1954 elif misc.which('f2py2.7'): 1955 f2py_compiler = 'f2py2.7' 1956 1957 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1958 1959 1960 self.replace_make_opt_f_compiler(to_replace) 1961 # Replace also for Template but not for cluster 1962 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1963 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1964 1965 return f77_compiler
1966 1967 # an alias for backward compatibility 1968 set_compiler = set_fortran_compiler 1969 1970
1971 - def set_cpp_compiler(self, default_compiler, force=False):
1972 """Set compiler based on what's available on the system""" 1973 1974 # Check for compiler 1975 if default_compiler and misc.which(default_compiler): 1976 compiler = default_compiler 1977 elif misc.which('g++'): 1978 #check if clang version 1979 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1980 stderr=subprocess.PIPE) 1981 out, _ = p.communicate() 1982 out = out.decode() 1983 if 'clang' in str(out) and misc.which('clang'): 1984 compiler = 'clang' 1985 else: 1986 compiler = 'g++' 1987 elif misc.which('c++'): 1988 compiler = 'c++' 1989 elif misc.which('clang'): 1990 compiler = 'clang' 1991 elif default_compiler: 1992 logger.warning('No c++ Compiler detected! Please install one') 1993 compiler = default_compiler # maybe misc fail so try with it 1994 else: 1995 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1996 logger.info('Use c++ compiler ' + compiler) 1997 self.replace_make_opt_c_compiler(compiler) 1998 # Replace also for Template but not for cluster 1999 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 2000 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 2001 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 2002 2003 return compiler
2004 2005
2006 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
2007 """Set FC=compiler in Source/make_opts""" 2008 2009 assert isinstance(compilers, dict) 2010 2011 mod = False #avoid to rewrite the file if not needed 2012 if not root_dir: 2013 root_dir = self.dir_path 2014 2015 compiler= compilers['fortran'] 2016 f2py_compiler = compilers['f2py'] 2017 if not f2py_compiler: 2018 f2py_compiler = 'f2py' 2019 for_update= {'DEFAULT_F_COMPILER':compiler, 2020 'DEFAULT_F2PY_COMPILER':f2py_compiler} 2021 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2022 2023 try: 2024 common_run_interface.CommonRunCmd.update_make_opts_full( 2025 make_opts, for_update) 2026 except IOError: 2027 if root_dir == self.dir_path: 2028 logger.info('Fail to set compiler. Trying to continue anyway.')
2029
2030 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
2031 """Set CXX=compiler in Source/make_opts. 2032 The version is also checked, in order to set some extra flags 2033 if the compiler is clang (on MACOS)""" 2034 2035 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 2036 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 2037 2038 2039 # list of the variable to set in the make_opts file 2040 for_update= {'DEFAULT_CPP_COMPILER':compiler, 2041 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 2042 'STDLIB': '-lc++' if is_lc else '-lstdc++', 2043 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 2044 } 2045 2046 # for MOJAVE remove the MACFLAG: 2047 if is_clang: 2048 import platform 2049 version, _, _ = platform.mac_ver() 2050 if not version:# not linux 2051 version = 14 # set version to remove MACFLAG 2052 else: 2053 majversion, version = [int(x) for x in version.split('.',3)[:2]] 2054 2055 if majversion >= 11 or (majversion ==10 and version >= 14): 2056 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 2057 2058 if not root_dir: 2059 root_dir = self.dir_path 2060 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2061 2062 try: 2063 common_run_interface.CommonRunCmd.update_make_opts_full( 2064 make_opts, for_update) 2065 except IOError: 2066 if root_dir == self.dir_path: 2067 logger.info('Fail to set compiler. Trying to continue anyway.') 2068 2069 return
2070
2071 #=============================================================================== 2072 # ProcessExporterFortranSA 2073 #=============================================================================== 2074 -class ProcessExporterFortranSA(ProcessExporterFortran):
2075 """Class to take care of exporting a set of matrix elements to 2076 MadGraph v4 StandAlone format.""" 2077 2078 matrix_template = "matrix_standalone_v4.inc" 2079
2080 - def __init__(self, *args,**opts):
2081 """add the format information compare to standard init""" 2082 2083 if 'format' in opts: 2084 self.format = opts['format'] 2085 del opts['format'] 2086 else: 2087 self.format = 'standalone' 2088 2089 self.prefix_info = {} 2090 ProcessExporterFortran.__init__(self, *args, **opts)
2091
2092 - def copy_template(self, model):
2093 """Additional actions needed for setup of Template 2094 """ 2095 2096 #First copy the full template tree if dir_path doesn't exit 2097 if os.path.isdir(self.dir_path): 2098 return 2099 2100 logger.info('initialize a new standalone directory: %s' % \ 2101 os.path.basename(self.dir_path)) 2102 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 2103 2104 # Create the directory structure 2105 os.mkdir(self.dir_path) 2106 os.mkdir(pjoin(self.dir_path, 'Source')) 2107 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 2108 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 2109 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 2110 os.mkdir(pjoin(self.dir_path, 'bin')) 2111 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 2112 os.mkdir(pjoin(self.dir_path, 'lib')) 2113 os.mkdir(pjoin(self.dir_path, 'Cards')) 2114 2115 # Information at top-level 2116 #Write version info 2117 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 2118 try: 2119 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 2120 except IOError: 2121 MG5_version = misc.get_pkg_info() 2122 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 2123 "5." + MG5_version['version']) 2124 2125 2126 # Add file in SubProcesses 2127 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 2128 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 2129 2130 if self.format == 'standalone': 2131 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 2132 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 2133 2134 # Add file in Source 2135 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 2136 pjoin(self.dir_path, 'Source')) 2137 # add the makefile 2138 filename = pjoin(self.dir_path,'Source','makefile') 2139 self.write_source_makefile(writers.FileWriter(filename))
2140 2141 #=========================================================================== 2142 # export model files 2143 #===========================================================================
2144 - def export_model_files(self, model_path):
2145 """export the model dependent files for V4 model""" 2146 2147 super(ProcessExporterFortranSA,self).export_model_files(model_path) 2148 # Add the routine update_as_param in v4 model 2149 # This is a function created in the UFO 2150 text=""" 2151 subroutine update_as_param() 2152 call setpara('param_card.dat',.false.) 2153 return 2154 end 2155 """ 2156 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2157 ff.write(text) 2158 ff.close() 2159 2160 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 2161 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 2162 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 2163 fsock.write(text) 2164 fsock.close() 2165 2166 self.make_model_symbolic_link()
2167 2168 #=========================================================================== 2169 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 2170 #===========================================================================
2171 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2172 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2173 Perl script of MadEvent4 are still working properly for pure MG5 run. 2174 Not needed for StandAlone so just return 2175 """ 2176 2177 return
2178 2179 2180 #=========================================================================== 2181 # Make the Helas and Model directories for Standalone directory 2182 #===========================================================================
2183 - def make(self):
2184 """Run make in the DHELAS and MODEL directories, to set up 2185 everything for running standalone 2186 """ 2187 2188 source_dir = pjoin(self.dir_path, "Source") 2189 logger.info("Running make for Helas") 2190 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2191 logger.info("Running make for Model") 2192 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2193 2194 #=========================================================================== 2195 # Create proc_card_mg5.dat for Standalone directory 2196 #===========================================================================
2197 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2198 """Finalize Standalone MG4 directory by 2199 generation proc_card_mg5.dat 2200 generate a global makefile 2201 """ 2202 2203 compiler = {'fortran': mg5options['fortran_compiler'], 2204 'cpp': mg5options['cpp_compiler'], 2205 'f2py': mg5options['f2py_compiler']} 2206 2207 self.compiler_choice(compiler) 2208 self.make() 2209 2210 # Write command history as proc_card_mg5 2211 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2212 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2213 history.write(output_file) 2214 2215 ProcessExporterFortran.finalize(self, matrix_elements, 2216 history, mg5options, flaglist) 2217 open(pjoin(self.dir_path,'__init__.py'),'w') 2218 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2219 2220 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2221 #add the module to hande the NLO weight 2222 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2223 pjoin(self.dir_path, 'Source')) 2224 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2225 pjoin(self.dir_path, 'Source', 'PDF')) 2226 self.write_pdf_opendata() 2227 2228 if self.prefix_info: 2229 self.write_f2py_splitter() 2230 self.write_f2py_makefile() 2231 self.write_f2py_check_sa(matrix_elements, 2232 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2233 else: 2234 # create a single makefile to compile all the subprocesses 2235 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2236 deppython = '' 2237 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2238 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2239 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2240 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2241 text+='all: %s\n\techo \'done\'' % deppython 2242 2243 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2244 ff.write(text) 2245 ff.close()
2246
2247 - def write_f2py_splitter(self):
2248 """write a function to call the correct matrix element""" 2249 2250 template = """ 2251 %(python_information)s 2252 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2253 IMPLICIT NONE 2254 C ALPHAS is given at scale2 (SHOULD be different of 0 for loop induced, ignore for LO) 2255 2256 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2257 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2258 CF2PY integer, intent(in):: procid 2259 CF2PY integer, intent(in) :: npdg 2260 CF2PY double precision, intent(out) :: ANS 2261 CF2PY double precision, intent(in) :: ALPHAS 2262 CF2PY double precision, intent(in) :: SCALE2 2263 integer pdgs(*) 2264 integer npdg, nhel, procid 2265 double precision p(*) 2266 double precision ANS, ALPHAS, PI,SCALE2 2267 include 'coupl.inc' 2268 2269 PI = 3.141592653589793D0 2270 G = 2* DSQRT(ALPHAS*PI) 2271 CALL UPDATE_AS_PARAM() 2272 c if (scale2.ne.0d0) stop 1 2273 2274 %(smatrixhel)s 2275 2276 return 2277 end 2278 2279 SUBROUTINE INITIALISE(PATH) 2280 C ROUTINE FOR F2PY to read the benchmark point. 2281 IMPLICIT NONE 2282 CHARACTER*512 PATH 2283 CF2PY INTENT(IN) :: PATH 2284 CALL SETPARA(PATH) !first call to setup the paramaters 2285 RETURN 2286 END 2287 2288 2289 subroutine CHANGE_PARA(name, value) 2290 implicit none 2291 CF2PY intent(in) :: name 2292 CF2PY intent(in) :: value 2293 2294 character*512 name 2295 double precision value 2296 2297 %(helreset_def)s 2298 2299 include '../Source/MODEL/input.inc' 2300 include '../Source/MODEL/coupl.inc' 2301 2302 %(helreset_setup)s 2303 2304 SELECT CASE (name) 2305 %(parameter_setup)s 2306 CASE DEFAULT 2307 write(*,*) 'no parameter matching', name, value 2308 END SELECT 2309 2310 return 2311 end 2312 2313 subroutine update_all_coup() 2314 implicit none 2315 call coup() 2316 return 2317 end 2318 2319 2320 subroutine get_pdg_order(PDG, ALLPROC) 2321 IMPLICIT NONE 2322 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2323 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2324 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2325 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2326 DATA PDGS/ %(pdgs)s / 2327 DATA PIDS/ %(pids)s / 2328 PDG = PDGS 2329 ALLPROC = PIDS 2330 RETURN 2331 END 2332 2333 subroutine get_prefix(PREFIX) 2334 IMPLICIT NONE 2335 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2336 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2337 DATA PREF / '%(prefix)s'/ 2338 PREFIX = PREF 2339 RETURN 2340 END 2341 2342 2343 """ 2344 2345 allids = list(self.prefix_info.keys()) 2346 allprefix = [self.prefix_info[key][0] for key in allids] 2347 min_nexternal = min([len(ids[0]) for ids in allids]) 2348 max_nexternal = max([len(ids[0]) for ids in allids]) 2349 2350 info = [] 2351 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2352 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2353 2354 2355 text = [] 2356 for n_ext in range(min_nexternal, max_nexternal+1): 2357 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2358 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2359 if not current_id: 2360 continue 2361 if min_nexternal != max_nexternal: 2362 if n_ext == min_nexternal: 2363 text.append(' if (npdg.eq.%i)then' % n_ext) 2364 else: 2365 text.append(' else if (npdg.eq.%i)then' % n_ext) 2366 for ii,pdgs in enumerate(current_id): 2367 pid = current_pid[ii] 2368 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2369 if ii==0: 2370 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2371 else: 2372 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2373 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2374 text.append(' endif') 2375 #close the function 2376 if min_nexternal != max_nexternal: 2377 text.append('endif') 2378 2379 params = self.get_model_parameter(self.model) 2380 parameter_setup =[] 2381 for key, var in params.items(): 2382 parameter_setup.append(' CASE ("%s")\n %s = value' 2383 % (key, var)) 2384 2385 # part for the resetting of the helicity 2386 helreset_def = [] 2387 helreset_setup = [] 2388 for prefix in set(allprefix): 2389 helreset_setup.append(' %shelreset = .true. ' % prefix) 2390 helreset_def.append(' logical %shelreset \n common /%shelreset/ %shelreset' % (prefix, prefix, prefix)) 2391 2392 2393 formatting = {'python_information':'\n'.join(info), 2394 'smatrixhel': '\n'.join(text), 2395 'maxpart': max_nexternal, 2396 'nb_me': len(allids), 2397 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2398 for i in range(max_nexternal) for (pdg,pid) in allids), 2399 'prefix':'\',\''.join(allprefix), 2400 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2401 'parameter_setup': '\n'.join(parameter_setup), 2402 'helreset_def' : '\n'.join(helreset_def), 2403 'helreset_setup' : '\n'.join(helreset_setup), 2404 } 2405 formatting['lenprefix'] = len(formatting['prefix']) 2406 text = template % formatting 2407 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2408 fsock.writelines(text) 2409 fsock.close()
2410
2411 - def get_model_parameter(self, model):
2412 """ returns all the model parameter 2413 """ 2414 params = {} 2415 for p in model.get('parameters')[('external',)]: 2416 name = p.name 2417 nopref = name[4:] if name.startswith('mdl_') else name 2418 params[nopref] = name 2419 2420 block = p.lhablock 2421 lha = '_'.join([str(i) for i in p.lhacode]) 2422 params['%s_%s' % (block.upper(), lha)] = name 2423 2424 return params
2425 2426 2427 2428 2429
2430 - def write_f2py_check_sa(self, matrix_element, writer):
2431 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2432 # To be implemented. It is just an example file, i.e. not crucial. 2433 return
2434
2435 - def write_f2py_makefile(self):
2436 """ """ 2437 # Add file in SubProcesses 2438 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2439 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2440
2441 - def create_MA5_cards(self,*args,**opts):
2442 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2443 pass
2444
2445 - def compiler_choice(self, compiler):
2446 """ Different daughter classes might want different compilers. 2447 So this function is meant to be overloaded if desired.""" 2448 2449 self.set_compiler(compiler)
2450 2451 #=========================================================================== 2452 # generate_subprocess_directory 2453 #===========================================================================
2454 - def generate_subprocess_directory(self, matrix_element, 2455 fortran_model, number):
2456 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2457 including the necessary matrix.f and nexternal.inc files""" 2458 2459 cwd = os.getcwd() 2460 # Create the directory PN_xx_xxxxx in the specified path 2461 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2462 "P%s" % matrix_element.get('processes')[0].shell_string()) 2463 2464 if self.opt['sa_symmetry']: 2465 # avoid symmetric output 2466 for i,proc in enumerate(matrix_element.get('processes')): 2467 2468 tag = proc.get_tag() 2469 legs = proc.get('legs')[:] 2470 leg0 = proc.get('legs')[0] 2471 leg1 = proc.get('legs')[1] 2472 if not leg1.get('state'): 2473 proc.get('legs')[0] = leg1 2474 proc.get('legs')[1] = leg0 2475 flegs = proc.get('legs')[2:] 2476 for perm in itertools.permutations(flegs): 2477 for i,p in enumerate(perm): 2478 proc.get('legs')[i+2] = p 2479 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2480 "P%s" % proc.shell_string()) 2481 #restore original order 2482 proc.get('legs')[2:] = legs[2:] 2483 if os.path.exists(dirpath2): 2484 proc.get('legs')[:] = legs 2485 return 0 2486 proc.get('legs')[:] = legs 2487 2488 try: 2489 os.mkdir(dirpath) 2490 except os.error as error: 2491 logger.warning(error.strerror + " " + dirpath) 2492 2493 #try: 2494 # os.chdir(dirpath) 2495 #except os.error: 2496 # logger.error('Could not cd to directory %s' % dirpath) 2497 # return 0 2498 2499 logger.info('Creating files in directory %s' % dirpath) 2500 2501 # Extract number of external particles 2502 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2503 2504 # Create the matrix.f file and the nexternal.inc file 2505 if self.opt['export_format']=='standalone_msP': 2506 filename = pjoin(dirpath, 'matrix_prod.f') 2507 else: 2508 filename = pjoin(dirpath, 'matrix.f') 2509 2510 proc_prefix = '' 2511 if 'prefix' in self.cmd_options: 2512 if self.cmd_options['prefix'] == 'int': 2513 proc_prefix = 'M%s_' % number 2514 elif self.cmd_options['prefix'] == 'proc': 2515 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2516 else: 2517 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2518 for proc in matrix_element.get('processes'): 2519 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2520 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2521 2522 calls = self.write_matrix_element_v4( 2523 writers.FortranWriter(filename), 2524 matrix_element, 2525 fortran_model, 2526 proc_prefix=proc_prefix) 2527 2528 if self.opt['export_format'] == 'standalone_msP': 2529 filename = pjoin(dirpath,'configs_production.inc') 2530 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2531 writers.FortranWriter(filename), 2532 matrix_element) 2533 2534 filename = pjoin(dirpath,'props_production.inc') 2535 self.write_props_file(writers.FortranWriter(filename), 2536 matrix_element, 2537 s_and_t_channels) 2538 2539 filename = pjoin(dirpath,'nexternal_prod.inc') 2540 self.write_nexternal_madspin(writers.FortranWriter(filename), 2541 nexternal, ninitial) 2542 2543 if self.opt['export_format']=='standalone_msF': 2544 filename = pjoin(dirpath, 'helamp.inc') 2545 ncomb=matrix_element.get_helicity_combinations() 2546 self.write_helamp_madspin(writers.FortranWriter(filename), 2547 ncomb) 2548 2549 filename = pjoin(dirpath, 'nexternal.inc') 2550 self.write_nexternal_file(writers.FortranWriter(filename), 2551 nexternal, ninitial) 2552 2553 filename = pjoin(dirpath, 'pmass.inc') 2554 self.write_pmass_file(writers.FortranWriter(filename), 2555 matrix_element) 2556 2557 filename = pjoin(dirpath, 'ngraphs.inc') 2558 self.write_ngraphs_file(writers.FortranWriter(filename), 2559 len(matrix_element.get_all_amplitudes())) 2560 2561 # Generate diagrams 2562 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2563 filename = pjoin(dirpath, "matrix.ps") 2564 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2565 get('diagrams'), 2566 filename, 2567 model=matrix_element.get('processes')[0].\ 2568 get('model'), 2569 amplitude=True) 2570 logger.info("Generating Feynman diagrams for " + \ 2571 matrix_element.get('processes')[0].nice_string()) 2572 plot.draw() 2573 2574 linkfiles = ['check_sa.f', 'coupl.inc'] 2575 2576 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2577 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2578 pat = re.compile('smatrix', re.I) 2579 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2580 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2581 f.write(new_text) 2582 linkfiles.pop(0) 2583 2584 for file in linkfiles: 2585 ln('../%s' % file, cwd=dirpath) 2586 ln('../makefileP', name='makefile', cwd=dirpath) 2587 # Return to original PWD 2588 #os.chdir(cwd) 2589 2590 if not calls: 2591 calls = 0 2592 return calls
2593 2594 2595 #=========================================================================== 2596 # write_source_makefile 2597 #===========================================================================
2598 - def write_source_makefile(self, writer):
2599 """Write the nexternal.inc file for MG4""" 2600 2601 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2602 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2603 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2604 2605 replace_dict= {'libraries': set_of_lib, 2606 'model':model_line, 2607 'additional_dsample': '', 2608 'additional_dependencies':''} 2609 2610 text = open(path).read() % replace_dict 2611 2612 if writer: 2613 writer.write(text) 2614 2615 return replace_dict
2616 2617 #=========================================================================== 2618 # write_matrix_element_v4 2619 #===========================================================================
2620 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2621 write=True, proc_prefix=''):
2622 """Export a matrix element to a matrix.f file in MG4 standalone format 2623 if write is on False, just return the replace_dict and not write anything.""" 2624 2625 2626 if not matrix_element.get('processes') or \ 2627 not matrix_element.get('diagrams'): 2628 return 0 2629 2630 if writer: 2631 if not isinstance(writer, writers.FortranWriter): 2632 raise writers.FortranWriter.FortranWriterError(\ 2633 "writer not FortranWriter but %s" % type(writer)) 2634 # Set lowercase/uppercase Fortran code 2635 writers.FortranWriter.downcase = False 2636 2637 2638 if 'sa_symmetry' not in self.opt: 2639 self.opt['sa_symmetry']=False 2640 2641 2642 # The proc_id is for MadEvent grouping which is never used in SA. 2643 replace_dict = {'global_variable':'', 'amp2_lines':'', 2644 'proc_prefix':proc_prefix, 'proc_id':''} 2645 2646 # Extract helas calls 2647 helas_calls = fortran_model.get_matrix_element_calls(\ 2648 matrix_element) 2649 2650 replace_dict['helas_calls'] = "\n".join(helas_calls) 2651 2652 # Extract version number and date from VERSION file 2653 info_lines = self.get_mg5_info_lines() 2654 replace_dict['info_lines'] = info_lines 2655 2656 # Extract process info lines 2657 process_lines = self.get_process_info_lines(matrix_element) 2658 replace_dict['process_lines'] = process_lines 2659 2660 # Extract number of external particles 2661 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2662 replace_dict['nexternal'] = nexternal 2663 replace_dict['nincoming'] = ninitial 2664 2665 # Extract ncomb 2666 ncomb = matrix_element.get_helicity_combinations() 2667 replace_dict['ncomb'] = ncomb 2668 2669 # Extract helicity lines 2670 helicity_lines = self.get_helicity_lines(matrix_element) 2671 replace_dict['helicity_lines'] = helicity_lines 2672 2673 # Extract overall denominator 2674 # Averaging initial state color, spin, and identical FS particles 2675 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2676 2677 # Extract ngraphs 2678 ngraphs = matrix_element.get_number_of_amplitudes() 2679 replace_dict['ngraphs'] = ngraphs 2680 2681 # Extract nwavefuncs 2682 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2683 replace_dict['nwavefuncs'] = nwavefuncs 2684 2685 # Extract ncolor 2686 ncolor = max(1, len(matrix_element.get('color_basis'))) 2687 replace_dict['ncolor'] = ncolor 2688 2689 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2690 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2691 matrix_element.get_beams_hel_avg_factor() 2692 2693 # Extract color data lines 2694 color_data_lines = self.get_color_data_lines(matrix_element) 2695 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2696 2697 if self.opt['export_format']=='standalone_msP': 2698 # For MadSpin need to return the AMP2 2699 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2700 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2701 replace_dict['global_variable'] = \ 2702 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2703 2704 # JAMP definition, depends on the number of independent split orders 2705 split_orders=matrix_element.get('processes')[0].get('split_orders') 2706 2707 if len(split_orders)==0: 2708 replace_dict['nSplitOrders']='' 2709 # Extract JAMP lines 2710 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2711 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2712 # set all amplitude order to weight 1 and only one squared order 2713 # contribution which is of course ALL_ORDERS=2. 2714 squared_orders = [(2,),] 2715 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2716 replace_dict['chosen_so_configs'] = '.TRUE.' 2717 replace_dict['nSqAmpSplitOrders']=1 2718 replace_dict['split_order_str_list']='' 2719 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2720 2721 else: 2722 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2723 replace_dict['nAmpSplitOrders']=len(amp_orders) 2724 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2725 replace_dict['nSplitOrders']=len(split_orders) 2726 replace_dict['split_order_str_list']=str(split_orders) 2727 amp_so = self.get_split_orders_lines( 2728 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2729 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2730 replace_dict['ampsplitorders']='\n'.join(amp_so) 2731 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2732 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2733 matrix_element,amp_orders,split_order_names=split_orders) 2734 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2735 # Now setup the array specifying what squared split order is chosen 2736 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2737 matrix_element.get('processes')[0],squared_orders) 2738 2739 # For convenience we also write the driver check_sa_splitOrders.f 2740 # that explicitely writes out the contribution from each squared order. 2741 # The original driver still works and is compiled with 'make' while 2742 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2743 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2744 self.write_check_sa_splitOrders(squared_orders,split_orders, 2745 nexternal,ninitial,proc_prefix,check_sa_writer) 2746 2747 if write: 2748 writers.FortranWriter('nsqso_born.inc').writelines( 2749 """INTEGER NSQSO_BORN 2750 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2751 2752 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2753 2754 matrix_template = self.matrix_template 2755 if self.opt['export_format']=='standalone_msP' : 2756 matrix_template = 'matrix_standalone_msP_v4.inc' 2757 elif self.opt['export_format']=='standalone_msF': 2758 matrix_template = 'matrix_standalone_msF_v4.inc' 2759 elif self.opt['export_format']=='matchbox': 2760 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2761 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2762 2763 if len(split_orders)>0: 2764 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2765 logger.debug("Warning: The export format %s is not "+\ 2766 " available for individual ME evaluation of given coupl. orders."+\ 2767 " Only the total ME will be computed.", self.opt['export_format']) 2768 elif self.opt['export_format'] in ['madloop_matchbox']: 2769 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2770 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2771 else: 2772 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2773 2774 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2775 replace_dict['template_file2'] = pjoin(_file_path, \ 2776 'iolibs/template_files/split_orders_helping_functions.inc') 2777 if write and writer: 2778 path = replace_dict['template_file'] 2779 content = open(path).read() 2780 content = content % replace_dict 2781 # Write the file 2782 writer.writelines(content) 2783 # Add the helper functions. 2784 if len(split_orders)>0: 2785 content = '\n' + open(replace_dict['template_file2'])\ 2786 .read()%replace_dict 2787 writer.writelines(content) 2788 return len([call for call in helas_calls if call.find('#') != 0]) 2789 else: 2790 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2791 return replace_dict # for subclass update
2792
2793 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2794 nincoming, proc_prefix, writer):
2795 """ Write out a more advanced version of the check_sa drivers that 2796 individually returns the matrix element for each contributing squared 2797 order.""" 2798 2799 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2800 'template_files', 'check_sa_splitOrders.f')).read() 2801 printout_sq_orders=[] 2802 for i, squared_order in enumerate(squared_orders): 2803 sq_orders=[] 2804 for j, sqo in enumerate(squared_order): 2805 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2806 printout_sq_orders.append(\ 2807 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2808 %(i+1,' '.join(sq_orders),i+1)) 2809 printout_sq_orders='\n'.join(printout_sq_orders) 2810 replace_dict = {'printout_sqorders':printout_sq_orders, 2811 'nSplitOrders':len(squared_orders), 2812 'nexternal':nexternal, 2813 'nincoming':nincoming, 2814 'proc_prefix':proc_prefix} 2815 2816 if writer: 2817 writer.writelines(check_sa_content % replace_dict) 2818 else: 2819 return replace_dict
2820
2821 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2822 """class to take care of exporting a set of matrix element for the Matchbox 2823 code in the case of Born only routine""" 2824 2825 default_opt = {'clean': False, 'complex_mass':False, 2826 'export_format':'matchbox', 'mp': False, 2827 'sa_symmetry': True} 2828 2829 #specific template of the born 2830 2831 2832 matrix_template = "matrix_standalone_matchbox.inc" 2833 2834 @staticmethod
2835 - def get_color_string_lines(matrix_element):
2836 """Return the color matrix definition lines for this matrix element. Split 2837 rows in chunks of size n.""" 2838 2839 if not matrix_element.get('color_matrix'): 2840 return "\n".join(["out = 1"]) 2841 2842 #start the real work 2843 color_denominators = matrix_element.get('color_matrix').\ 2844 get_line_denominators() 2845 matrix_strings = [] 2846 my_cs = color.ColorString() 2847 for i_color in range(len(color_denominators)): 2848 # Then write the numerators for the matrix elements 2849 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2850 t_str=repr(my_cs) 2851 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2852 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2853 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2854 all_matches = t_match.findall(t_str) 2855 output = {} 2856 arg=[] 2857 for match in all_matches: 2858 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2859 if ctype in ['ColorOne' ]: 2860 continue 2861 if ctype not in ['T', 'Tr' ]: 2862 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2863 tmparg += ['0'] 2864 arg +=tmparg 2865 for j, v in enumerate(arg): 2866 output[(i_color,j)] = v 2867 2868 for key in output: 2869 if matrix_strings == []: 2870 #first entry 2871 matrix_strings.append(""" 2872 if (in1.eq.%s.and.in2.eq.%s)then 2873 out = %s 2874 """ % (key[0], key[1], output[key])) 2875 else: 2876 #not first entry 2877 matrix_strings.append(""" 2878 elseif (in1.eq.%s.and.in2.eq.%s)then 2879 out = %s 2880 """ % (key[0], key[1], output[key])) 2881 if len(matrix_strings): 2882 matrix_strings.append(" else \n out = - 1 \n endif") 2883 else: 2884 return "\n out = - 1 \n " 2885 return "\n".join(matrix_strings)
2886
2887 - def make(self,*args,**opts):
2888 pass
2889
2890 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2891 JAMP_formatLC=None):
2892 2893 """Adding leading color part of the colorflow""" 2894 2895 if not JAMP_formatLC: 2896 JAMP_formatLC= "LN%s" % JAMP_format 2897 2898 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2899 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2900 col_amps=col_amps.get_color_amplitudes() 2901 elif(isinstance(col_amps,list)): 2902 if(col_amps and isinstance(col_amps[0],list)): 2903 col_amps=col_amps 2904 else: 2905 raise MadGraph5Error(error_msg % 'col_amps') 2906 else: 2907 raise MadGraph5Error(error_msg % 'col_amps') 2908 2909 text, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2910 JAMP_format=JAMP_format, 2911 AMP_format=AMP_format, 2912 split=-1) 2913 2914 2915 # Filter the col_ampls to generate only those without any 1/NC terms 2916 2917 LC_col_amps = [] 2918 for coeff_list in col_amps: 2919 to_add = [] 2920 for (coefficient, amp_number) in coeff_list: 2921 if coefficient[3]==0: 2922 to_add.append( (coefficient, amp_number) ) 2923 LC_col_amps.append(to_add) 2924 2925 text2, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2926 JAMP_format=JAMP_formatLC, 2927 AMP_format=AMP_format, 2928 split=-1) 2929 text += text2 2930 2931 return text, 0
2932
2933 2934 2935 2936 #=============================================================================== 2937 # ProcessExporterFortranMW 2938 #=============================================================================== 2939 -class ProcessExporterFortranMW(ProcessExporterFortran):
2940 """Class to take care of exporting a set of matrix elements to 2941 MadGraph v4 - MadWeight format.""" 2942 2943 matrix_file="matrix_standalone_v4.inc" 2944 jamp_optim = False 2945
2946 - def copy_template(self, model):
2947 """Additional actions needed for setup of Template 2948 """ 2949 2950 super(ProcessExporterFortranMW, self).copy_template(model) 2951 2952 # Add the MW specific file 2953 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2954 pjoin(self.dir_path, 'Source','MadWeight'), True) 2955 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2956 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2957 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2958 pjoin(self.dir_path, 'Source','setrun.f')) 2959 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2960 pjoin(self.dir_path, 'Source','run.inc')) 2961 # File created from Template (Different in some child class) 2962 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2963 self.write_run_config_file(writers.FortranWriter(filename)) 2964 2965 try: 2966 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2967 stdout = os.open(os.devnull, os.O_RDWR), 2968 stderr = os.open(os.devnull, os.O_RDWR), 2969 cwd=self.dir_path) 2970 except OSError: 2971 # Probably madweight already called 2972 pass 2973 2974 # Copy the different python file in the Template 2975 self.copy_python_file() 2976 # create the appropriate cuts.f 2977 self.get_mw_cuts_version() 2978 2979 # add the makefile in Source directory 2980 filename = os.path.join(self.dir_path,'Source','makefile') 2981 self.write_source_makefile(writers.FortranWriter(filename))
2982 2983 2984 2985 2986 #=========================================================================== 2987 # convert_model 2988 #===========================================================================
2989 - def convert_model(self, model, wanted_lorentz = [], 2990 wanted_couplings = []):
2991 2992 super(ProcessExporterFortranMW,self).convert_model(model, 2993 wanted_lorentz, wanted_couplings) 2994 2995 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2996 try: 2997 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2998 except OSError as error: 2999 pass 3000 model_path = model.get('modelpath') 3001 # This is not safe if there is a '##' or '-' in the path. 3002 shutil.copytree(model_path, 3003 pjoin(self.dir_path,'bin','internal','ufomodel'), 3004 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3005 if hasattr(model, 'restrict_card'): 3006 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3007 'restrict_default.dat') 3008 if isinstance(model.restrict_card, check_param_card.ParamCard): 3009 model.restrict_card.write(out_path) 3010 else: 3011 files.cp(model.restrict_card, out_path)
3012 3013 #=========================================================================== 3014 # generate_subprocess_directory 3015 #===========================================================================
3016 - def copy_python_file(self):
3017 """copy the python file require for the Template""" 3018 3019 # madevent interface 3020 cp(_file_path+'/interface/madweight_interface.py', 3021 self.dir_path+'/bin/internal/madweight_interface.py') 3022 cp(_file_path+'/interface/extended_cmd.py', 3023 self.dir_path+'/bin/internal/extended_cmd.py') 3024 cp(_file_path+'/interface/common_run_interface.py', 3025 self.dir_path+'/bin/internal/common_run_interface.py') 3026 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3027 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3028 cp(_file_path+'/iolibs/save_load_object.py', 3029 self.dir_path+'/bin/internal/save_load_object.py') 3030 cp(_file_path+'/madevent/gen_crossxhtml.py', 3031 self.dir_path+'/bin/internal/gen_crossxhtml.py') 3032 cp(_file_path+'/madevent/sum_html.py', 3033 self.dir_path+'/bin/internal/sum_html.py') 3034 cp(_file_path+'/various/FO_analyse_card.py', 3035 self.dir_path+'/bin/internal/FO_analyse_card.py') 3036 cp(_file_path+'/iolibs/file_writers.py', 3037 self.dir_path+'/bin/internal/file_writers.py') 3038 #model file 3039 cp(_file_path+'../models/check_param_card.py', 3040 self.dir_path+'/bin/internal/check_param_card.py') 3041 3042 #madevent file 3043 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3044 cp(_file_path+'/various/lhe_parser.py', 3045 self.dir_path+'/bin/internal/lhe_parser.py') 3046 3047 cp(_file_path+'/various/banner.py', 3048 self.dir_path+'/bin/internal/banner.py') 3049 cp(_file_path+'/various/shower_card.py', 3050 self.dir_path+'/bin/internal/shower_card.py') 3051 cp(_file_path+'/various/cluster.py', 3052 self.dir_path+'/bin/internal/cluster.py') 3053 3054 # logging configuration 3055 cp(_file_path+'/interface/.mg5_logging.conf', 3056 self.dir_path+'/bin/internal/me5_logging.conf') 3057 cp(_file_path+'/interface/coloring_logging.py', 3058 self.dir_path+'/bin/internal/coloring_logging.py')
3059 3060 3061 #=========================================================================== 3062 # Change the version of cuts.f to the one compatible with MW 3063 #===========================================================================
3064 - def get_mw_cuts_version(self, outpath=None):
3065 """create the appropriate cuts.f 3066 This is based on the one associated to ME output but: 3067 1) No clustering (=> remove initcluster/setclscales) 3068 2) Adding the definition of cut_bw at the file. 3069 """ 3070 3071 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 3072 3073 text = StringIO() 3074 #1) remove all dependencies in ickkw >1: 3075 nb_if = 0 3076 for line in template: 3077 if 'if(xqcut.gt.0d0' in line: 3078 nb_if = 1 3079 if nb_if == 0: 3080 text.write(line) 3081 continue 3082 if re.search(r'if\(.*\)\s*then', line): 3083 nb_if += 1 3084 elif 'endif' in line: 3085 nb_if -= 1 3086 3087 #2) add fake cut_bw (have to put the true one later) 3088 text.write(""" 3089 logical function cut_bw(p) 3090 include 'madweight_param.inc' 3091 double precision p(*) 3092 if (bw_cut) then 3093 cut_bw = .true. 3094 else 3095 stop 1 3096 endif 3097 return 3098 end 3099 """) 3100 3101 final = text.getvalue() 3102 #3) remove the call to initcluster: 3103 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 3104 template = template.replace('genps.inc', 'maxparticles.inc') 3105 #Now we can write it 3106 if not outpath: 3107 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 3108 elif isinstance(outpath, str): 3109 fsock = open(outpath, 'w') 3110 else: 3111 fsock = outpath 3112 fsock.write(template)
3113 3114 3115 3116 #=========================================================================== 3117 # Make the Helas and Model directories for Standalone directory 3118 #===========================================================================
3119 - def make(self):
3120 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 3121 everything for running madweight 3122 """ 3123 3124 source_dir = os.path.join(self.dir_path, "Source") 3125 logger.info("Running make for Helas") 3126 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 3127 logger.info("Running make for Model") 3128 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 3129 logger.info("Running make for PDF") 3130 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 3131 logger.info("Running make for CERNLIB") 3132 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 3133 logger.info("Running make for GENERIC") 3134 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 3135 logger.info("Running make for blocks") 3136 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 3137 logger.info("Running make for tools") 3138 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
3139 3140 #=========================================================================== 3141 # Create proc_card_mg5.dat for MadWeight directory 3142 #===========================================================================
3143 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3144 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 3145 3146 compiler = {'fortran': mg5options['fortran_compiler'], 3147 'cpp': mg5options['cpp_compiler'], 3148 'f2py': mg5options['f2py_compiler']} 3149 3150 3151 3152 #proc_charac 3153 self.create_proc_charac() 3154 3155 # Write maxparticles.inc based on max of ME's/subprocess groups 3156 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3157 self.write_maxparticles_file(writers.FortranWriter(filename), 3158 matrix_elements) 3159 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3160 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 3161 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3162 pjoin(self.dir_path, 'Source','MadWeight','tools')) 3163 3164 self.set_compiler(compiler) 3165 self.make() 3166 3167 # Write command history as proc_card_mg5 3168 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 3169 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 3170 history.write(output_file) 3171 3172 ProcessExporterFortran.finalize(self, matrix_elements, 3173 history, mg5options, flaglist)
3174 3175 3176 3177 #=========================================================================== 3178 # create the run_card for MW 3179 #===========================================================================
3180 - def create_run_card(self, matrix_elements, history):
3181 """ """ 3182 3183 run_card = banner_mod.RunCard() 3184 3185 # pass to default for MW 3186 run_card["run_tag"] = "\'not_use\'" 3187 run_card["fixed_ren_scale"] = "T" 3188 run_card["fixed_fac_scale"] = "T" 3189 run_card.remove_all_cut() 3190 3191 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3192 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3193 python_template=True) 3194 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3195 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3196 python_template=True)
3197 3198 #=========================================================================== 3199 # export model files 3200 #===========================================================================
3201 - def export_model_files(self, model_path):
3202 """export the model dependent files for V4 model""" 3203 3204 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3205 # Add the routine update_as_param in v4 model 3206 # This is a function created in the UFO 3207 text=""" 3208 subroutine update_as_param() 3209 call setpara('param_card.dat',.false.) 3210 return 3211 end 3212 """ 3213 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3214 ff.write(text) 3215 ff.close() 3216 3217 # Modify setrun.f 3218 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3219 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3220 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3221 fsock.write(text) 3222 fsock.close() 3223 3224 # Modify initialization.f 3225 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3226 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3227 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3228 fsock.write(text) 3229 fsock.close() 3230 3231 3232 self.make_model_symbolic_link()
3233 3234 #=========================================================================== 3235 # generate_subprocess_directory 3236 #===========================================================================
3237 - def generate_subprocess_directory(self, matrix_element, 3238 fortran_model,number):
3239 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3240 including the necessary matrix.f and nexternal.inc files""" 3241 3242 cwd = os.getcwd() 3243 # Create the directory PN_xx_xxxxx in the specified path 3244 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3245 "P%s" % matrix_element.get('processes')[0].shell_string()) 3246 3247 try: 3248 os.mkdir(dirpath) 3249 except os.error as error: 3250 logger.warning(error.strerror + " " + dirpath) 3251 3252 #try: 3253 # os.chdir(dirpath) 3254 #except os.error: 3255 # logger.error('Could not cd to directory %s' % dirpath) 3256 # return 0 3257 3258 logger.info('Creating files in directory %s' % dirpath) 3259 3260 # Extract number of external particles 3261 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3262 3263 # Create the matrix.f file and the nexternal.inc file 3264 filename = pjoin(dirpath,'matrix.f') 3265 calls,ncolor = self.write_matrix_element_v4( 3266 writers.FortranWriter(filename), 3267 matrix_element, 3268 fortran_model) 3269 3270 filename = pjoin(dirpath, 'auto_dsig.f') 3271 self.write_auto_dsig_file(writers.FortranWriter(filename), 3272 matrix_element) 3273 3274 filename = pjoin(dirpath, 'configs.inc') 3275 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3276 writers.FortranWriter(filename), 3277 matrix_element) 3278 3279 filename = pjoin(dirpath, 'nexternal.inc') 3280 self.write_nexternal_file(writers.FortranWriter(filename), 3281 nexternal, ninitial) 3282 3283 filename = pjoin(dirpath, 'leshouche.inc') 3284 self.write_leshouche_file(writers.FortranWriter(filename), 3285 matrix_element) 3286 3287 filename = pjoin(dirpath, 'props.inc') 3288 self.write_props_file(writers.FortranWriter(filename), 3289 matrix_element, 3290 s_and_t_channels) 3291 3292 filename = pjoin(dirpath, 'pmass.inc') 3293 self.write_pmass_file(writers.FortranWriter(filename), 3294 matrix_element) 3295 3296 filename = pjoin(dirpath, 'ngraphs.inc') 3297 self.write_ngraphs_file(writers.FortranWriter(filename), 3298 len(matrix_element.get_all_amplitudes())) 3299 3300 filename = pjoin(dirpath, 'maxamps.inc') 3301 self.write_maxamps_file(writers.FortranWriter(filename), 3302 len(matrix_element.get('diagrams')), 3303 ncolor, 3304 len(matrix_element.get('processes')), 3305 1) 3306 3307 filename = pjoin(dirpath, 'phasespace.inc') 3308 self.write_phasespace_file(writers.FortranWriter(filename), 3309 len(matrix_element.get('diagrams')), 3310 ) 3311 3312 # Generate diagrams 3313 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3314 filename = pjoin(dirpath, "matrix.ps") 3315 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3316 get('diagrams'), 3317 filename, 3318 model=matrix_element.get('processes')[0].\ 3319 get('model'), 3320 amplitude='') 3321 logger.info("Generating Feynman diagrams for " + \ 3322 matrix_element.get('processes')[0].nice_string()) 3323 plot.draw() 3324 3325 #import genps.inc and maxconfigs.inc into Subprocesses 3326 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3327 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3328 3329 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3330 3331 for file in linkfiles: 3332 ln('../%s' % file, starting_dir=cwd) 3333 3334 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3335 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3336 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3337 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3338 # Return to original PWD 3339 #os.chdir(cwd) 3340 3341 if not calls: 3342 calls = 0 3343 return calls
3344 3345 #=========================================================================== 3346 # write_matrix_element_v4 3347 #===========================================================================
3348 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3349 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3350 3351 if not matrix_element.get('processes') or \ 3352 not matrix_element.get('diagrams'): 3353 return 0 3354 3355 if writer: 3356 if not isinstance(writer, writers.FortranWriter): 3357 raise writers.FortranWriter.FortranWriterError(\ 3358 "writer not FortranWriter") 3359 3360 # Set lowercase/uppercase Fortran code 3361 writers.FortranWriter.downcase = False 3362 3363 replace_dict = {} 3364 3365 # Extract version number and date from VERSION file 3366 info_lines = self.get_mg5_info_lines() 3367 replace_dict['info_lines'] = info_lines 3368 3369 # Extract process info lines 3370 process_lines = self.get_process_info_lines(matrix_element) 3371 replace_dict['process_lines'] = process_lines 3372 3373 # Set proc_id 3374 replace_dict['proc_id'] = proc_id 3375 3376 # Extract number of external particles 3377 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3378 replace_dict['nexternal'] = nexternal 3379 3380 # Extract ncomb 3381 ncomb = matrix_element.get_helicity_combinations() 3382 replace_dict['ncomb'] = ncomb 3383 3384 # Extract helicity lines 3385 helicity_lines = self.get_helicity_lines(matrix_element) 3386 replace_dict['helicity_lines'] = helicity_lines 3387 3388 # Extract overall denominator 3389 # Averaging initial state color, spin, and identical FS particles 3390 den_factor_line = self.get_den_factor_line(matrix_element) 3391 replace_dict['den_factor_line'] = den_factor_line 3392 3393 # Extract ngraphs 3394 ngraphs = matrix_element.get_number_of_amplitudes() 3395 replace_dict['ngraphs'] = ngraphs 3396 3397 # Extract nwavefuncs 3398 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3399 replace_dict['nwavefuncs'] = nwavefuncs 3400 3401 # Extract ncolor 3402 ncolor = max(1, len(matrix_element.get('color_basis'))) 3403 replace_dict['ncolor'] = ncolor 3404 3405 # Extract color data lines 3406 color_data_lines = self.get_color_data_lines(matrix_element) 3407 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3408 3409 # Extract helas calls 3410 helas_calls = fortran_model.get_matrix_element_calls(\ 3411 matrix_element) 3412 3413 replace_dict['helas_calls'] = "\n".join(helas_calls) 3414 3415 # Extract JAMP lines 3416 jamp_lines, nb = self.get_JAMP_lines(matrix_element) 3417 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3418 3419 replace_dict['template_file'] = os.path.join(_file_path, \ 3420 'iolibs/template_files/%s' % self.matrix_file) 3421 replace_dict['template_file2'] = '' 3422 3423 if writer: 3424 file = open(replace_dict['template_file']).read() 3425 file = file % replace_dict 3426 # Write the file 3427 writer.writelines(file) 3428 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3429 else: 3430 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3431 3432 #=========================================================================== 3433 # write_source_makefile 3434 #===========================================================================
3435 - def write_source_makefile(self, writer):
3436 """Write the nexternal.inc file for madweight""" 3437 3438 3439 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3440 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3441 text = open(path).read() % {'libraries': set_of_lib} 3442 writer.write(text) 3443 3444 return True
3445
3446 - def write_phasespace_file(self, writer, nb_diag):
3447 """ """ 3448 3449 template = """ include 'maxparticles.inc' 3450 integer max_branches 3451 parameter (max_branches=max_particles-1) 3452 integer max_configs 3453 parameter (max_configs=%(nb_diag)s) 3454 3455 c channel position 3456 integer config_pos,perm_pos 3457 common /to_config/config_pos,perm_pos 3458 3459 """ 3460 3461 writer.write(template % {'nb_diag': nb_diag})
3462 3463 3464 #=========================================================================== 3465 # write_auto_dsig_file 3466 #===========================================================================
3467 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3468 """Write the auto_dsig.f file for the differential cross section 3469 calculation, includes pdf call information (MadWeight format)""" 3470 3471 if not matrix_element.get('processes') or \ 3472 not matrix_element.get('diagrams'): 3473 return 0 3474 3475 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3476 3477 if ninitial < 1 or ninitial > 2: 3478 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3479 3480 replace_dict = {} 3481 3482 # Extract version number and date from VERSION file 3483 info_lines = self.get_mg5_info_lines() 3484 replace_dict['info_lines'] = info_lines 3485 3486 # Extract process info lines 3487 process_lines = self.get_process_info_lines(matrix_element) 3488 replace_dict['process_lines'] = process_lines 3489 3490 # Set proc_id 3491 replace_dict['proc_id'] = proc_id 3492 replace_dict['numproc'] = 1 3493 3494 # Set dsig_line 3495 if ninitial == 1: 3496 # No conversion, since result of decay should be given in GeV 3497 dsig_line = "pd(0)*dsiguu" 3498 else: 3499 # Convert result (in GeV) to pb 3500 dsig_line = "pd(0)*conv*dsiguu" 3501 3502 replace_dict['dsig_line'] = dsig_line 3503 3504 # Extract pdf lines 3505 pdf_vars, pdf_data, pdf_lines = \ 3506 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3507 replace_dict['pdf_vars'] = pdf_vars 3508 replace_dict['pdf_data'] = pdf_data 3509 replace_dict['pdf_lines'] = pdf_lines 3510 3511 # Lines that differ between subprocess group and regular 3512 if proc_id: 3513 replace_dict['numproc'] = int(proc_id) 3514 replace_dict['passcuts_begin'] = "" 3515 replace_dict['passcuts_end'] = "" 3516 # Set lines for subprocess group version 3517 # Set define_iconfigs_lines 3518 replace_dict['define_subdiag_lines'] = \ 3519 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3520 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3521 else: 3522 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3523 replace_dict['passcuts_end'] = "ENDIF" 3524 replace_dict['define_subdiag_lines'] = "" 3525 3526 if writer: 3527 file = open(os.path.join(_file_path, \ 3528 'iolibs/template_files/auto_dsig_mw.inc')).read() 3529 3530 file = file % replace_dict 3531 # Write the file 3532 writer.writelines(file) 3533 else: 3534 return replace_dict
3535 #=========================================================================== 3536 # write_configs_file 3537 #===========================================================================
3538 - def write_configs_file(self, writer, matrix_element):
3539 """Write the configs.inc file for MadEvent""" 3540 3541 # Extract number of external particles 3542 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3543 3544 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3545 mapconfigs = [c[0] for c in configs] 3546 model = matrix_element.get('processes')[0].get('model') 3547 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3548 [[c[1]] for c in configs], 3549 mapconfigs, 3550 nexternal, ninitial,matrix_element, model)
3551 3552 #=========================================================================== 3553 # write_run_configs_file 3554 #===========================================================================
3555 - def write_run_config_file(self, writer):
3556 """Write the run_configs.inc file for MadWeight""" 3557 3558 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3559 text = open(path).read() % {'chanperjob':'5'} 3560 writer.write(text) 3561 return True
3562 3563 #=========================================================================== 3564 # write_configs_file_from_diagrams 3565 #===========================================================================
3566 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3567 nexternal, ninitial, matrix_element, model):
3568 """Write the actual configs.inc file. 3569 3570 configs is the diagrams corresponding to configs (each 3571 diagrams is a list of corresponding diagrams for all 3572 subprocesses, with None if there is no corresponding diagrams 3573 for a given process). 3574 mapconfigs gives the diagram number for each config. 3575 3576 For s-channels, we need to output one PDG for each subprocess in 3577 the subprocess group, in order to be able to pick the right 3578 one for multiprocesses.""" 3579 3580 lines = [] 3581 3582 particle_dict = matrix_element.get('processes')[0].get('model').\ 3583 get('particle_dict') 3584 3585 s_and_t_channels = [] 3586 3587 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3588 for config in configs if [d for d in config if d][0].\ 3589 get_vertex_leg_numbers()!=[]] 3590 3591 minvert = min(vert_list) if vert_list!=[] else 0 3592 # Number of subprocesses 3593 nsubprocs = len(configs[0]) 3594 3595 nconfigs = 0 3596 3597 new_pdg = model.get_first_non_pdg() 3598 3599 for iconfig, helas_diags in enumerate(configs): 3600 if any([vert > minvert for vert in 3601 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3602 # Only 3-vertices allowed in configs.inc 3603 continue 3604 nconfigs += 1 3605 3606 # Need s- and t-channels for all subprocesses, including 3607 # those that don't contribute to this config 3608 empty_verts = [] 3609 stchannels = [] 3610 for h in helas_diags: 3611 if h: 3612 # get_s_and_t_channels gives vertices starting from 3613 # final state external particles and working inwards 3614 stchannels.append(h.get('amplitudes')[0].\ 3615 get_s_and_t_channels(ninitial,model,new_pdg)) 3616 else: 3617 stchannels.append((empty_verts, None)) 3618 3619 # For t-channels, just need the first non-empty one 3620 tchannels = [t for s,t in stchannels if t != None][0] 3621 3622 # For s_and_t_channels (to be used later) use only first config 3623 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3624 tchannels]) 3625 3626 # Make sure empty_verts is same length as real vertices 3627 if any([s for s,t in stchannels]): 3628 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3629 3630 # Reorganize s-channel vertices to get a list of all 3631 # subprocesses for each vertex 3632 schannels = list(zip(*[s for s,t in stchannels])) 3633 else: 3634 schannels = [] 3635 3636 allchannels = schannels 3637 if len(tchannels) > 1: 3638 # Write out tchannels only if there are any non-trivial ones 3639 allchannels = schannels + tchannels 3640 3641 # Write out propagators for s-channel and t-channel vertices 3642 3643 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3644 # Correspondance between the config and the diagram = amp2 3645 lines.append("* %d %d " % (nconfigs, 3646 mapconfigs[iconfig])) 3647 3648 for verts in allchannels: 3649 if verts in schannels: 3650 vert = [v for v in verts if v][0] 3651 else: 3652 vert = verts 3653 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3654 last_leg = vert.get('legs')[-1] 3655 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3656 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3657 # (last_leg.get('number'), nconfigs, len(daughters), 3658 # ",".join([str(d) for d in daughters]))) 3659 3660 if last_leg.get('id') == 21 and 21 not in particle_dict: 3661 # Fake propagator used in multiparticle vertices 3662 mass = 'zero' 3663 width = 'zero' 3664 pow_part = 0 3665 else: 3666 if (last_leg.get('id')!=7): 3667 particle = particle_dict[last_leg.get('id')] 3668 # Get mass 3669 mass = particle.get('mass') 3670 # Get width 3671 width = particle.get('width') 3672 else : # fake propagator used in multiparticle vertices 3673 mass= 'zero' 3674 width= 'zero' 3675 3676 line=line+" "+mass+" "+width+" " 3677 3678 if verts in schannels: 3679 pdgs = [] 3680 for v in verts: 3681 if v: 3682 pdgs.append(v.get('legs')[-1].get('id')) 3683 else: 3684 pdgs.append(0) 3685 lines.append(line+" S "+str(last_leg.get('id'))) 3686 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3687 # (last_leg.get('number'), nconfigs, nsubprocs, 3688 # ",".join([str(d) for d in pdgs]))) 3689 # lines.append("data tprid(%d,%d)/0/" % \ 3690 # (last_leg.get('number'), nconfigs)) 3691 elif verts in tchannels[:-1]: 3692 lines.append(line+" T "+str(last_leg.get('id'))) 3693 # lines.append("data tprid(%d,%d)/%d/" % \ 3694 # (last_leg.get('number'), nconfigs, 3695 # abs(last_leg.get('id')))) 3696 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3697 # (last_leg.get('number'), nconfigs, nsubprocs, 3698 # ",".join(['0'] * nsubprocs))) 3699 3700 # Write out number of configs 3701 # lines.append("# Number of configs") 3702 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3703 lines.append(" * ") # a line with just a star indicates this is the end of file 3704 # Write the file 3705 writer.writelines(lines) 3706 3707 return s_and_t_channels
3708
3709 3710 3711 #=============================================================================== 3712 # ProcessExporterFortranME 3713 #=============================================================================== 3714 -class ProcessExporterFortranME(ProcessExporterFortran):
3715 """Class to take care of exporting a set of matrix elements to 3716 MadEvent format.""" 3717 3718 matrix_file = "matrix_madevent_v4.inc" 3719 done_warning_tchannel = False 3720 3721 default_opt = {'clean': False, 'complex_mass':False, 3722 'export_format':'madevent', 'mp': False, 3723 'v5_model': True, 3724 'output_options':{}, 3725 'hel_recycling': False 3726 } 3727 jamp_optim = True 3728
3729 - def __init__(self, dir_path = "", opt=None):
3730 3731 super(ProcessExporterFortranME, self).__init__(dir_path, opt) 3732 3733 # check and format the hel_recycling options as it should if provided 3734 if opt and isinstance(opt['output_options'], dict) and \ 3735 'hel_recycling' in opt['output_options']: 3736 self.opt['hel_recycling'] = banner_mod.ConfigFile.format_variable( 3737 opt['output_options']['hel_recycling'], bool, 'hel_recycling') 3738 3739 if opt and isinstance(opt['output_options'], dict) and \ 3740 't_strategy' in opt['output_options']: 3741 self.opt['t_strategy'] = banner_mod.ConfigFile.format_variable( 3742 opt['output_options']['t_strategy'], int, 't_strategy')
3743 3744 # helper function for customise helas writter 3745 @staticmethod
3746 - def custom_helas_call(call, arg):
3747 if arg['mass'] == '%(M)s,%(W)s,': 3748 arg['mass'] = '%(M)s, fk_%(W)s,' 3749 elif '%(W)s' in arg['mass']: 3750 raise Exception 3751 return call, arg
3752
3753 - def copy_template(self, model):
3754 """Additional actions needed for setup of Template 3755 """ 3756 3757 super(ProcessExporterFortranME, self).copy_template(model) 3758 3759 # File created from Template (Different in some child class) 3760 filename = pjoin(self.dir_path,'Source','run_config.inc') 3761 self.write_run_config_file(writers.FortranWriter(filename)) 3762 3763 # The next file are model dependant (due to SLAH convention) 3764 self.model_name = model.get('name') 3765 # Add the symmetry.f 3766 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3767 self.write_symmetry(writers.FortranWriter(filename)) 3768 # 3769 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3770 self.write_addmothers(writers.FortranWriter(filename)) 3771 # Copy the different python file in the Template 3772 self.copy_python_file()
3773 3774 3775 3776 3777 3778 3779 #=========================================================================== 3780 # generate_subprocess_directory 3781 #===========================================================================
3782 - def copy_python_file(self):
3783 """copy the python file require for the Template""" 3784 3785 # madevent interface 3786 cp(_file_path+'/interface/madevent_interface.py', 3787 self.dir_path+'/bin/internal/madevent_interface.py') 3788 cp(_file_path+'/interface/extended_cmd.py', 3789 self.dir_path+'/bin/internal/extended_cmd.py') 3790 cp(_file_path+'/interface/common_run_interface.py', 3791 self.dir_path+'/bin/internal/common_run_interface.py') 3792 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3793 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3794 cp(_file_path+'/iolibs/save_load_object.py', 3795 self.dir_path+'/bin/internal/save_load_object.py') 3796 cp(_file_path+'/iolibs/file_writers.py', 3797 self.dir_path+'/bin/internal/file_writers.py') 3798 #model file 3799 cp(_file_path+'../models/check_param_card.py', 3800 self.dir_path+'/bin/internal/check_param_card.py') 3801 3802 #copy all the file present in madevent directory 3803 for name in os.listdir(pjoin(_file_path, 'madevent')): 3804 if name not in ['__init__.py'] and name.endswith('.py'): 3805 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3806 3807 #madevent file 3808 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3809 cp(_file_path+'/various/lhe_parser.py', 3810 self.dir_path+'/bin/internal/lhe_parser.py') 3811 cp(_file_path+'/various/banner.py', 3812 self.dir_path+'/bin/internal/banner.py') 3813 cp(_file_path+'/various/histograms.py', 3814 self.dir_path+'/bin/internal/histograms.py') 3815 cp(_file_path+'/various/plot_djrs.py', 3816 self.dir_path+'/bin/internal/plot_djrs.py') 3817 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3818 3819 cp(_file_path+'/various/cluster.py', 3820 self.dir_path+'/bin/internal/cluster.py') 3821 cp(_file_path+'/madevent/combine_runs.py', 3822 self.dir_path+'/bin/internal/combine_runs.py') 3823 # logging configuration 3824 cp(_file_path+'/interface/.mg5_logging.conf', 3825 self.dir_path+'/bin/internal/me5_logging.conf') 3826 cp(_file_path+'/interface/coloring_logging.py', 3827 self.dir_path+'/bin/internal/coloring_logging.py') 3828 # shower card and FO_analyse_card. 3829 # Although not needed, it is imported by banner.py 3830 cp(_file_path+'/various/shower_card.py', 3831 self.dir_path+'/bin/internal/shower_card.py') 3832 cp(_file_path+'/various/FO_analyse_card.py', 3833 self.dir_path+'/bin/internal/FO_analyse_card.py')
3834 3835
3836 - def convert_model(self, model, wanted_lorentz = [], 3837 wanted_couplings = []):
3838 3839 super(ProcessExporterFortranME,self).convert_model(model, 3840 wanted_lorentz, wanted_couplings) 3841 3842 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3843 try: 3844 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3845 except OSError as error: 3846 pass 3847 model_path = model.get('modelpath') 3848 # This is not safe if there is a '##' or '-' in the path. 3849 shutil.copytree(model_path, 3850 pjoin(self.dir_path,'bin','internal','ufomodel'), 3851 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3852 if hasattr(model, 'restrict_card'): 3853 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3854 'restrict_default.dat') 3855 if isinstance(model.restrict_card, check_param_card.ParamCard): 3856 model.restrict_card.write(out_path) 3857 else: 3858 files.cp(model.restrict_card, out_path)
3859 3860 #=========================================================================== 3861 # export model files 3862 #===========================================================================
3863 - def export_model_files(self, model_path):
3864 """export the model dependent files""" 3865 3866 super(ProcessExporterFortranME,self).export_model_files(model_path) 3867 3868 # Add the routine update_as_param in v4 model 3869 # This is a function created in the UFO 3870 text=""" 3871 subroutine update_as_param() 3872 call setpara('param_card.dat',.false.) 3873 return 3874 end 3875 """ 3876 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3877 ff.write(text) 3878 ff.close() 3879 3880 # Add the symmetry.f 3881 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3882 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3883 3884 # Modify setrun.f 3885 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3886 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3887 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3888 fsock.write(text) 3889 fsock.close() 3890 3891 self.make_model_symbolic_link()
3892 3893 #=========================================================================== 3894 # generate_subprocess_directory 3895 #===========================================================================
3896 - def generate_subprocess_directory(self, matrix_element, 3897 fortran_model, 3898 me_number):
3899 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3900 including the necessary matrix.f and various helper files""" 3901 3902 cwd = os.getcwd() 3903 path = pjoin(self.dir_path, 'SubProcesses') 3904 3905 3906 if not self.model: 3907 self.model = matrix_element.get('processes')[0].get('model') 3908 3909 #os.chdir(path) 3910 # Create the directory PN_xx_xxxxx in the specified path 3911 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3912 try: 3913 os.mkdir(pjoin(path,subprocdir)) 3914 except os.error as error: 3915 logger.warning(error.strerror + " " + subprocdir) 3916 3917 #try: 3918 # os.chdir(subprocdir) 3919 #except os.error: 3920 # logger.error('Could not cd to directory %s' % subprocdir) 3921 # return 0 3922 3923 logger.info('Creating files in directory %s' % subprocdir) 3924 Ppath = pjoin(path, subprocdir) 3925 3926 # Extract number of external particles 3927 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3928 3929 # Add the driver.f 3930 ncomb = matrix_element.get_helicity_combinations() 3931 filename = pjoin(Ppath,'driver.f') 3932 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3933 v5=self.opt['v5_model']) 3934 3935 3936 # Create the matrix.f file, auto_dsig.f file and all inc files 3937 if self.opt['hel_recycling']: 3938 filename = pjoin(Ppath, 'matrix_orig.f') 3939 else: 3940 filename = pjoin(Ppath, 'matrix.f') 3941 calls, ncolor = \ 3942 self.write_matrix_element_v4(writers.FortranWriter(filename), 3943 matrix_element, fortran_model, subproc_number = me_number) 3944 3945 filename = pjoin(Ppath, 'auto_dsig.f') 3946 self.write_auto_dsig_file(writers.FortranWriter(filename), 3947 matrix_element) 3948 3949 filename = pjoin(Ppath, 'configs.inc') 3950 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3951 writers.FortranWriter(filename), 3952 matrix_element) 3953 3954 filename = pjoin(Ppath, 'config_nqcd.inc') 3955 self.write_config_nqcd_file(writers.FortranWriter(filename), 3956 nqcd_list) 3957 3958 filename = pjoin(Ppath, 'config_subproc_map.inc') 3959 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3960 s_and_t_channels) 3961 3962 filename = pjoin(Ppath, 'coloramps.inc') 3963 self.write_coloramps_file(writers.FortranWriter(filename), 3964 mapconfigs, 3965 matrix_element) 3966 3967 filename = pjoin(Ppath, 'get_color.f') 3968 self.write_colors_file(writers.FortranWriter(filename), 3969 matrix_element) 3970 3971 filename = pjoin(Ppath, 'decayBW.inc') 3972 self.write_decayBW_file(writers.FortranWriter(filename), 3973 s_and_t_channels) 3974 3975 filename = pjoin(Ppath, 'dname.mg') 3976 self.write_dname_file(writers.FileWriter(filename), 3977 "P"+matrix_element.get('processes')[0].shell_string()) 3978 3979 filename = pjoin(Ppath, 'iproc.dat') 3980 self.write_iproc_file(writers.FortranWriter(filename), 3981 me_number) 3982 3983 filename = pjoin(Ppath, 'leshouche.inc') 3984 self.write_leshouche_file(writers.FortranWriter(filename), 3985 matrix_element) 3986 3987 filename = pjoin(Ppath, 'maxamps.inc') 3988 self.write_maxamps_file(writers.FortranWriter(filename), 3989 len(matrix_element.get('diagrams')), 3990 ncolor, 3991 len(matrix_element.get('processes')), 3992 1) 3993 3994 filename = pjoin(Ppath, 'mg.sym') 3995 self.write_mg_sym_file(writers.FortranWriter(filename), 3996 matrix_element) 3997 3998 filename = pjoin(Ppath, 'ncombs.inc') 3999 self.write_ncombs_file(writers.FortranWriter(filename), 4000 nexternal) 4001 4002 filename = pjoin(Ppath, 'nexternal.inc') 4003 self.write_nexternal_file(writers.FortranWriter(filename), 4004 nexternal, ninitial) 4005 4006 filename = pjoin(Ppath, 'ngraphs.inc') 4007 self.write_ngraphs_file(writers.FortranWriter(filename), 4008 len(mapconfigs)) 4009 4010 4011 filename = pjoin(Ppath, 'pmass.inc') 4012 self.write_pmass_file(writers.FortranWriter(filename), 4013 matrix_element) 4014 4015 filename = pjoin(Ppath, 'props.inc') 4016 self.write_props_file(writers.FortranWriter(filename), 4017 matrix_element, 4018 s_and_t_channels) 4019 4020 # Find config symmetries and permutations 4021 symmetry, perms, ident_perms = \ 4022 diagram_symmetry.find_symmetry(matrix_element) 4023 4024 filename = pjoin(Ppath, 'symswap.inc') 4025 self.write_symswap_file(writers.FortranWriter(filename), 4026 ident_perms) 4027 4028 filename = pjoin(Ppath, 'symfact_orig.dat') 4029 self.write_symfact_file(open(filename, 'w'), symmetry) 4030 4031 # Generate diagrams 4032 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 4033 filename = pjoin(Ppath, "matrix.ps") 4034 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4035 get('diagrams'), 4036 filename, 4037 model=matrix_element.get('processes')[0].\ 4038 get('model'), 4039 amplitude=True) 4040 logger.info("Generating Feynman diagrams for " + \ 4041 matrix_element.get('processes')[0].nice_string()) 4042 plot.draw() 4043 4044 self.link_files_in_SubProcess(Ppath) 4045 4046 #import nexternal/leshouche in Source 4047 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 4048 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 4049 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 4050 # Return to SubProcesses dir 4051 #os.chdir(os.path.pardir) 4052 4053 # Add subprocess to subproc.mg 4054 filename = pjoin(path, 'subproc.mg') 4055 files.append_to_file(filename, 4056 self.write_subproc, 4057 subprocdir) 4058 4059 # Return to original dir 4060 #os.chdir(cwd) 4061 4062 # Generate info page 4063 gen_infohtml.make_info_html(self.dir_path) 4064 4065 4066 if not calls: 4067 calls = 0 4068 return calls
4069 4070 link_Sub_files = ['addmothers.f', 4071 'cluster.f', 4072 'cluster.inc', 4073 'coupl.inc', 4074 'cuts.f', 4075 'cuts.inc', 4076 'genps.f', 4077 'genps.inc', 4078 'idenparts.f', 4079 'initcluster.f', 4080 'makefile', 4081 'message.inc', 4082 'myamp.f', 4083 'reweight.f', 4084 'run.inc', 4085 'maxconfigs.inc', 4086 'maxparticles.inc', 4087 'run_config.inc', 4088 'lhe_event_infos.inc', 4089 'setcuts.f', 4090 'setscales.f', 4091 'sudakov.inc', 4092 'symmetry.f', 4093 'unwgt.f', 4094 'dummy_fct.f' 4095 ] 4096 4110 4111
4112 - def finalize(self, matrix_elements, history, mg5options, flaglist):
4113 """Finalize ME v4 directory by creating jpeg diagrams, html 4114 pages,proc_card_mg5.dat and madevent.tar.gz.""" 4115 4116 if 'nojpeg' in flaglist: 4117 makejpg = False 4118 else: 4119 makejpg = True 4120 if 'online' in flaglist: 4121 online = True 4122 else: 4123 online = False 4124 4125 compiler = {'fortran': mg5options['fortran_compiler'], 4126 'cpp': mg5options['cpp_compiler'], 4127 'f2py': mg5options['f2py_compiler']} 4128 4129 # indicate that the output type is not grouped 4130 if not isinstance(self, ProcessExporterFortranMEGroup): 4131 self.proc_characteristic['grouped_matrix'] = False 4132 4133 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 4134 4135 # set limitation linked to the model 4136 4137 4138 # indicate the PDG of all initial particle 4139 try: 4140 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4141 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4142 except AttributeError: 4143 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4144 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4145 self.proc_characteristic['pdg_initial1'] = pdgs1 4146 self.proc_characteristic['pdg_initial2'] = pdgs2 4147 4148 4149 modelname = self.opt['model'] 4150 if modelname == 'mssm' or modelname.startswith('mssm-'): 4151 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 4152 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 4153 check_param_card.convert_to_mg5card(param_card, mg5_param) 4154 check_param_card.check_valid_param_card(mg5_param) 4155 4156 # Add the combine_events.f modify param_card path/number of @X 4157 filename = pjoin(self.dir_path,'Source','combine_events.f') 4158 try: 4159 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 4160 except AttributeError: 4161 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 4162 nb_proc = len(set(nb_proc)) 4163 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 4164 # Write maxconfigs.inc based on max of ME's/subprocess groups 4165 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 4166 self.write_maxconfigs_file(writers.FortranWriter(filename), 4167 matrix_elements) 4168 4169 # Write maxparticles.inc based on max of ME's/subprocess groups 4170 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 4171 self.write_maxparticles_file(writers.FortranWriter(filename), 4172 matrix_elements) 4173 4174 # Touch "done" file 4175 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 4176 4177 # Check for compiler 4178 self.set_compiler(compiler) 4179 self.set_cpp_compiler(compiler['cpp']) 4180 4181 4182 old_pos = os.getcwd() 4183 subpath = pjoin(self.dir_path, 'SubProcesses') 4184 4185 P_dir_list = [proc for proc in os.listdir(subpath) 4186 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 4187 4188 devnull = os.open(os.devnull, os.O_RDWR) 4189 # Convert the poscript in jpg files (if authorize) 4190 if makejpg: 4191 try: 4192 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 4193 except Exception as error: 4194 pass 4195 4196 if misc.which('gs'): 4197 logger.info("Generate jpeg diagrams") 4198 for Pdir in P_dir_list: 4199 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 4200 stdout = devnull, cwd=pjoin(subpath, Pdir)) 4201 4202 logger.info("Generate web pages") 4203 # Create the WebPage using perl script 4204 4205 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 4206 stdout = devnull,cwd=pjoin(self.dir_path)) 4207 4208 #os.chdir(os.path.pardir) 4209 4210 obj = gen_infohtml.make_info_html(self.dir_path) 4211 4212 if online: 4213 nb_channel = obj.rep_rule['nb_gen_diag'] 4214 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4215 #add the information to proc_charac 4216 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4217 4218 # Write command history as proc_card_mg5 4219 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4220 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4221 history.write(output_file) 4222 4223 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4224 stdout = devnull) 4225 4226 #crate the proc_characteristic file 4227 self.create_proc_charac(matrix_elements, history) 4228 4229 # create the run_card 4230 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4231 4232 # Run "make" to generate madevent.tar.gz file 4233 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4234 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4235 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4236 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4237 stdout = devnull, cwd=self.dir_path) 4238 4239 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4240 stdout = devnull, cwd=self.dir_path)
4241 4242 4243 4244 4245 4246 4247 #return to the initial dir 4248 #os.chdir(old_pos) 4249 4250 #=========================================================================== 4251 # write_matrix_element_v4 4252 #===========================================================================
4253 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4254 proc_id = "", config_map = [], subproc_number = ""):
4255 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4256 4257 if not matrix_element.get('processes') or \ 4258 not matrix_element.get('diagrams'): 4259 return 0 4260 4261 if writer: 4262 if not isinstance(writer, writers.FortranWriter): 4263 raise writers.FortranWriter.FortranWriterError(\ 4264 "writer not FortranWriter") 4265 # Set lowercase/uppercase Fortran code 4266 writers.FortranWriter.downcase = False 4267 4268 # check if MLM/.../ is supported for this matrix-element and update associate flag 4269 if self.model and 'MLM' in self.model["limitations"]: 4270 if 'MLM' not in self.proc_characteristic["limitations"]: 4271 used_couplings = matrix_element.get_used_couplings(output="set") 4272 for vertex in self.model.get('interactions'): 4273 particles = [p for p in vertex.get('particles')] 4274 if 21 in [p.get('pdg_code') for p in particles]: 4275 colors = [par.get('color') for par in particles] 4276 if 1 in colors: 4277 continue 4278 elif 'QCD' not in vertex.get('orders'): 4279 for bad_coup in vertex.get('couplings').values(): 4280 if bad_coup in used_couplings: 4281 self.proc_characteristic["limitations"].append('MLM') 4282 break 4283 4284 # The proc prefix is not used for MadEvent output so it can safely be set 4285 # to an empty string. 4286 replace_dict = {'proc_prefix':''} 4287 4288 4289 # Extract helas calls 4290 helas_calls = fortran_model.get_matrix_element_calls(\ 4291 matrix_element) 4292 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4293 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4294 ProcessExporterFortranME.done_warning_tchannel = True 4295 4296 replace_dict['helas_calls'] = "\n".join(helas_calls) 4297 4298 4299 #adding the support for the fake width (forbidding too small width) 4300 mass_width = matrix_element.get_all_mass_widths() 4301 mass_width = sorted(list(mass_width)) 4302 width_list = set([e[1] for e in mass_width]) 4303 4304 replace_dict['fake_width_declaration'] = \ 4305 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4306 replace_dict['fake_width_declaration'] += \ 4307 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4308 fk_w_defs = [] 4309 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4310 for m, w in mass_width: 4311 if w == 'zero': 4312 if ' fk_zero = 0d0' not in fk_w_defs: 4313 fk_w_defs.append(' fk_zero = 0d0') 4314 continue 4315 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4316 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4317 4318 # Extract version number and date from VERSION file 4319 info_lines = self.get_mg5_info_lines() 4320 replace_dict['info_lines'] = info_lines 4321 4322 # Extract process info lines 4323 process_lines = self.get_process_info_lines(matrix_element) 4324 replace_dict['process_lines'] = process_lines 4325 4326 # Set proc_id 4327 replace_dict['proc_id'] = proc_id 4328 4329 # Extract ncomb 4330 ncomb = matrix_element.get_helicity_combinations() 4331 replace_dict['ncomb'] = ncomb 4332 4333 # Extract helicity lines 4334 helicity_lines = self.get_helicity_lines(matrix_element) 4335 replace_dict['helicity_lines'] = helicity_lines 4336 4337 # Extract IC line 4338 ic_line = self.get_ic_line(matrix_element) 4339 replace_dict['ic_line'] = ic_line 4340 4341 # Extract overall denominator 4342 # Averaging initial state color, spin, and identical FS particles 4343 den_factor_line = self.get_den_factor_line(matrix_element) 4344 replace_dict['den_factor_line'] = den_factor_line 4345 4346 # Extract ngraphs 4347 ngraphs = matrix_element.get_number_of_amplitudes() 4348 replace_dict['ngraphs'] = ngraphs 4349 4350 # Extract ndiags 4351 ndiags = len(matrix_element.get('diagrams')) 4352 replace_dict['ndiags'] = ndiags 4353 4354 # Set define_iconfigs_lines 4355 replace_dict['define_iconfigs_lines'] = \ 4356 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4357 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4358 4359 if proc_id: 4360 # Set lines for subprocess group version 4361 # Set define_iconfigs_lines 4362 replace_dict['define_iconfigs_lines'] += \ 4363 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4364 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4365 # Set set_amp2_line 4366 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4367 proc_id 4368 else: 4369 # Standard running 4370 # Set set_amp2_line 4371 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4372 4373 # Extract nwavefuncs 4374 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4375 replace_dict['nwavefuncs'] = nwavefuncs 4376 4377 # Extract ncolor 4378 ncolor = max(1, len(matrix_element.get('color_basis'))) 4379 replace_dict['ncolor'] = ncolor 4380 4381 # Extract color data lines 4382 color_data_lines = self.get_color_data_lines(matrix_element) 4383 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4384 4385 4386 # Set the size of Wavefunction 4387 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4388 replace_dict['wavefunctionsize'] = 18 4389 else: 4390 replace_dict['wavefunctionsize'] = 6 4391 4392 # Extract amp2 lines 4393 amp2_lines = self.get_amp2_lines(matrix_element, config_map, replace_dict) 4394 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4395 4396 # The JAMP definition depends on the splitting order 4397 split_orders=matrix_element.get('processes')[0].get('split_orders') 4398 if len(split_orders)>0: 4399 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4400 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4401 matrix_element.get('processes')[0],squared_orders) 4402 replace_dict['select_configs_if'] = ' IF (CHOSEN_SO_CONFIGS(SQSOINDEX%(proc_id)s(M,N))) THEN' % replace_dict 4403 replace_dict['select_configs_endif'] = ' endif' 4404 else: 4405 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4406 # set all amplitude order to weight 1 and only one squared order 4407 # contribution which is of course ALL_ORDERS=2. 4408 squared_orders = [(2,),] 4409 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4410 replace_dict['chosen_so_configs'] = '.TRUE.' 4411 # addtionally set the function to NOT be called 4412 replace_dict['select_configs_if'] = '' 4413 replace_dict['select_configs_endif'] = '' 4414 4415 replace_dict['nAmpSplitOrders']=len(amp_orders) 4416 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4417 replace_dict['split_order_str_list']=str(split_orders) 4418 replace_dict['nSplitOrders']=max(len(split_orders),1) 4419 amp_so = self.get_split_orders_lines( 4420 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4421 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4422 replace_dict['ampsplitorders']='\n'.join(amp_so) 4423 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4424 4425 4426 # Extract JAMP lines 4427 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4428 jamp_lines, nb_temp = self.get_JAMP_lines_split_order(\ 4429 matrix_element,amp_orders,split_order_names= 4430 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4431 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4432 replace_dict['nb_temp_jamp'] = nb_temp 4433 4434 replace_dict['template_file'] = pjoin(_file_path, \ 4435 'iolibs/template_files/%s' % self.matrix_file) 4436 replace_dict['template_file2'] = pjoin(_file_path, \ 4437 'iolibs/template_files/split_orders_helping_functions.inc') 4438 4439 s1,s2 = matrix_element.get_spin_state_initial() 4440 replace_dict['nb_spin_state1'] = s1 4441 replace_dict['nb_spin_state2'] = s2 4442 4443 if writer: 4444 file = open(replace_dict['template_file']).read() 4445 file = file % replace_dict 4446 # Add the split orders helper functions. 4447 file = file + '\n' + open(replace_dict['template_file2'])\ 4448 .read()%replace_dict 4449 # Write the file 4450 writer.writelines(file) 4451 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4452 else: 4453 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4454 return replace_dict
4455 4456 #=========================================================================== 4457 # write_auto_dsig_file 4458 #===========================================================================
4459 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4460 """Write the auto_dsig.f file for the differential cross section 4461 calculation, includes pdf call information""" 4462 4463 if not matrix_element.get('processes') or \ 4464 not matrix_element.get('diagrams'): 4465 return 0 4466 4467 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4468 self.proc_characteristic['ninitial'] = ninitial 4469 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4470 4471 # Add information relevant for MLM matching: 4472 # Maximum QCD power in all the contributions 4473 max_qcd_order = 0 4474 for diag in matrix_element.get('diagrams'): 4475 orders = diag.calculate_orders() 4476 if 'QCD' in orders: 4477 max_qcd_order = max(max_qcd_order,orders['QCD']) 4478 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4479 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4480 proc.get('model').get_particle(id).get('color')>1]) 4481 for proc in matrix_element.get('processes')) 4482 # Maximum number of final state light jets to be matched 4483 self.proc_characteristic['max_n_matched_jets'] = max( 4484 self.proc_characteristic['max_n_matched_jets'], 4485 min(max_qcd_order,max_n_light_final_partons)) 4486 4487 # List of default pdgs to be considered for the CKKWl merging cut 4488 self.proc_characteristic['colored_pdgs'] = \ 4489 sorted(list(set([abs(p.get('pdg_code')) for p in 4490 matrix_element.get('processes')[0].get('model').get('particles') if 4491 p.get('color')>1]))) 4492 4493 if ninitial < 1 or ninitial > 2: 4494 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4495 4496 replace_dict = {} 4497 4498 # Extract version number and date from VERSION file 4499 info_lines = self.get_mg5_info_lines() 4500 replace_dict['info_lines'] = info_lines 4501 4502 # Extract process info lines 4503 process_lines = self.get_process_info_lines(matrix_element) 4504 replace_dict['process_lines'] = process_lines 4505 4506 # Set proc_id 4507 replace_dict['proc_id'] = proc_id 4508 replace_dict['numproc'] = 1 4509 4510 # Set dsig_line 4511 if ninitial == 1: 4512 # No conversion, since result of decay should be given in GeV 4513 dsig_line = "pd(0)*dsiguu" 4514 else: 4515 # Convert result (in GeV) to pb 4516 dsig_line = "pd(0)*conv*dsiguu" 4517 4518 replace_dict['dsig_line'] = dsig_line 4519 4520 # Extract pdf lines 4521 pdf_vars, pdf_data, pdf_lines = \ 4522 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4523 replace_dict['pdf_vars'] = pdf_vars 4524 replace_dict['pdf_data'] = pdf_data 4525 replace_dict['pdf_lines'] = pdf_lines 4526 4527 # Lines that differ between subprocess group and regular 4528 if proc_id: 4529 replace_dict['numproc'] = int(proc_id) 4530 replace_dict['passcuts_begin'] = "" 4531 replace_dict['passcuts_end'] = "" 4532 # Set lines for subprocess group version 4533 # Set define_iconfigs_lines 4534 replace_dict['define_subdiag_lines'] = \ 4535 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4536 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4537 replace_dict['cutsdone'] = "" 4538 else: 4539 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4540 replace_dict['passcuts_end'] = "ENDIF" 4541 replace_dict['define_subdiag_lines'] = "" 4542 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4543 4544 if not isinstance(self, ProcessExporterFortranMEGroup): 4545 ncomb=matrix_element.get_helicity_combinations() 4546 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4547 else: 4548 replace_dict['read_write_good_hel'] = "" 4549 4550 context = {'read_write_good_hel':True} 4551 4552 if writer: 4553 file = open(pjoin(_file_path, \ 4554 'iolibs/template_files/auto_dsig_v4.inc')).read() 4555 file = file % replace_dict 4556 4557 # Write the file 4558 writer.writelines(file, context=context) 4559 else: 4560 return replace_dict, context
4561 #=========================================================================== 4562 # write_coloramps_file 4563 #===========================================================================
4564 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4565 """Write the coloramps.inc file for MadEvent""" 4566 4567 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4568 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4569 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4570 len(mapconfigs))) 4571 4572 4573 # Write the file 4574 writer.writelines(lines) 4575 4576 return True
4577 4578 #=========================================================================== 4579 # write_colors_file 4580 #===========================================================================
4581 - def write_colors_file(self, writer, matrix_elements):
4582 """Write the get_color.f file for MadEvent, which returns color 4583 for all particles used in the matrix element.""" 4584 4585 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4586 matrix_elements = [matrix_elements] 4587 4588 model = matrix_elements[0].get('processes')[0].get('model') 4589 4590 # We need the both particle and antiparticle wf_ids, since the identity 4591 # depends on the direction of the wf. 4592 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4593 for wf in d.get('wavefunctions')],[]) \ 4594 for d in me.get('diagrams')], []) \ 4595 for me in matrix_elements], [])) 4596 4597 leg_ids = set(sum([sum([sum([[l.get('id'), 4598 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4599 for l in p.get_legs_with_decays()], []) \ 4600 for p in me.get('processes')], []) \ 4601 for me in matrix_elements], [])) 4602 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4603 4604 lines = """function get_color(ipdg) 4605 implicit none 4606 integer get_color, ipdg 4607 4608 if(ipdg.eq.%d)then 4609 get_color=%d 4610 return 4611 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4612 4613 for part_id in particle_ids[1:]: 4614 lines += """else if(ipdg.eq.%d)then 4615 get_color=%d 4616 return 4617 """ % (part_id, model.get_particle(part_id).get_color()) 4618 # Dummy particle for multiparticle vertices with pdg given by 4619 # first code not in the model 4620 lines += """else if(ipdg.eq.%d)then 4621 c This is dummy particle used in multiparticle vertices 4622 get_color=2 4623 return 4624 """ % model.get_first_non_pdg() 4625 lines += """else 4626 write(*,*)'Error: No color given for pdg ',ipdg 4627 get_color=0 4628 return 4629 endif 4630 end 4631 """ 4632 4633 # Write the file 4634 writer.writelines(lines) 4635 4636 return True
4637 4638 #=========================================================================== 4639 # write_config_nqcd_file 4640 #===========================================================================
4641 - def write_config_nqcd_file(self, writer, nqcd_list):
4642 """Write the config_nqcd.inc with the number of QCD couplings 4643 for each config""" 4644 4645 lines = [] 4646 for iconf, n in enumerate(nqcd_list): 4647 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4648 4649 # Write the file 4650 writer.writelines(lines) 4651 4652 return True
4653 4654 #=========================================================================== 4655 # write_maxconfigs_file 4656 #===========================================================================
4657 - def write_maxconfigs_file(self, writer, matrix_elements):
4658 """Write the maxconfigs.inc file for MadEvent""" 4659 4660 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4661 maxconfigs = max([me.get_num_configs() for me in \ 4662 matrix_elements.get('matrix_elements')]) 4663 else: 4664 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4665 4666 lines = "integer lmaxconfigs\n" 4667 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4668 4669 # Write the file 4670 writer.writelines(lines) 4671 4672 return True
4673 4674 #=========================================================================== 4675 # read_write_good_hel 4676 #===========================================================================
4677 - def read_write_good_hel(self, ncomb):
4678 """return the code to read/write the good_hel common_block""" 4679 4680 convert = {'ncomb' : ncomb} 4681 output = """ 4682 subroutine write_good_hel(stream_id) 4683 implicit none 4684 integer stream_id 4685 INTEGER NCOMB 4686 PARAMETER ( NCOMB=%(ncomb)d) 4687 LOGICAL GOODHEL(NCOMB) 4688 INTEGER NTRY 4689 common/BLOCK_GOODHEL/NTRY,GOODHEL 4690 write(stream_id,*) GOODHEL 4691 return 4692 end 4693 4694 4695 subroutine read_good_hel(stream_id) 4696 implicit none 4697 include 'genps.inc' 4698 integer stream_id 4699 INTEGER NCOMB 4700 PARAMETER ( NCOMB=%(ncomb)d) 4701 LOGICAL GOODHEL(NCOMB) 4702 INTEGER NTRY 4703 common/BLOCK_GOODHEL/NTRY,GOODHEL 4704 read(stream_id,*) GOODHEL 4705 NTRY = MAXTRIES + 1 4706 return 4707 end 4708 4709 subroutine init_good_hel() 4710 implicit none 4711 INTEGER NCOMB 4712 PARAMETER ( NCOMB=%(ncomb)d) 4713 LOGICAL GOODHEL(NCOMB) 4714 INTEGER NTRY 4715 INTEGER I 4716 4717 do i=1,NCOMB 4718 GOODHEL(I) = .false. 4719 enddo 4720 NTRY = 0 4721 end 4722 4723 integer function get_maxsproc() 4724 implicit none 4725 get_maxsproc = 1 4726 return 4727 end 4728 4729 """ % convert 4730 4731 return output
4732 4733 #=========================================================================== 4734 # write_config_subproc_map_file 4735 #===========================================================================
4736 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4737 """Write a dummy config_subproc.inc file for MadEvent""" 4738 4739 lines = [] 4740 4741 for iconfig in range(len(s_and_t_channels)): 4742 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4743 (iconfig + 1)) 4744 4745 # Write the file 4746 writer.writelines(lines) 4747 4748 return True
4749 4750 #=========================================================================== 4751 # write_configs_file 4752 #===========================================================================
4753 - def write_configs_file(self, writer, matrix_element):
4754 """Write the configs.inc file for MadEvent""" 4755 4756 # Extract number of external particles 4757 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4758 4759 model = matrix_element.get('processes')[0].get('model') 4760 configs = [(i+1, d) for (i, d) in \ 4761 enumerate(matrix_element.get('diagrams'))] 4762 mapconfigs = [c[0] for c in configs] 4763 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4764 [[c[1]] for c in configs], 4765 mapconfigs, 4766 nexternal, ninitial, 4767 model)
4768 4769 #=========================================================================== 4770 # write_run_configs_file 4771 #===========================================================================
4772 - def write_run_config_file(self, writer):
4773 """Write the run_configs.inc file for MadEvent""" 4774 4775 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4776 4777 if self.proc_characteristic['loop_induced']: 4778 job_per_chan = 1 4779 else: 4780 job_per_chan = 5 4781 4782 if writer: 4783 text = open(path).read() % {'chanperjob': job_per_chan} 4784 writer.write(text) 4785 return True 4786 else: 4787 return {'chanperjob': job_per_chan}
4788 4789 #=========================================================================== 4790 # write_configs_file_from_diagrams 4791 #===========================================================================
4792 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4793 nexternal, ninitial, model):
4794 """Write the actual configs.inc file. 4795 4796 configs is the diagrams corresponding to configs (each 4797 diagrams is a list of corresponding diagrams for all 4798 subprocesses, with None if there is no corresponding diagrams 4799 for a given process). 4800 mapconfigs gives the diagram number for each config. 4801 4802 For s-channels, we need to output one PDG for each subprocess in 4803 the subprocess group, in order to be able to pick the right 4804 one for multiprocesses.""" 4805 4806 lines = [] 4807 4808 s_and_t_channels = [] 4809 4810 nqcd_list = [] 4811 4812 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4813 for config in configs if [d for d in config if d][0].\ 4814 get_vertex_leg_numbers()!=[]] 4815 minvert = min(vert_list) if vert_list!=[] else 0 4816 4817 # Number of subprocesses 4818 nsubprocs = len(configs[0]) 4819 4820 nconfigs = 0 4821 4822 new_pdg = model.get_first_non_pdg() 4823 4824 for iconfig, helas_diags in enumerate(configs): 4825 if any([vert > minvert for vert in 4826 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4827 # Only 3-vertices allowed in configs.inc 4828 continue 4829 nconfigs += 1 4830 4831 # Need s- and t-channels for all subprocesses, including 4832 # those that don't contribute to this config 4833 empty_verts = [] 4834 stchannels = [] 4835 for h in helas_diags: 4836 if h: 4837 # get_s_and_t_channels gives vertices starting from 4838 # final state external particles and working inwards 4839 stchannels.append(h.get('amplitudes')[0].\ 4840 get_s_and_t_channels(ninitial, model, 4841 new_pdg)) 4842 else: 4843 stchannels.append((empty_verts, None)) 4844 4845 4846 # For t-channels, just need the first non-empty one 4847 tchannels = [t for s,t in stchannels if t != None][0] 4848 4849 # pass to ping-pong strategy for t-channel for 3 ore more T-channel 4850 # this is directly related to change in genps.f 4851 tstrat = self.opt.get('t_strategy', 0) 4852 tchannels, tchannels_strategy = ProcessExporterFortranME.reorder_tchannels(tchannels, tstrat, self.model) 4853 4854 # For s_and_t_channels (to be used later) use only first config 4855 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4856 tchannels, tchannels_strategy]) 4857 4858 # Make sure empty_verts is same length as real vertices 4859 if any([s for s,t in stchannels]): 4860 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4861 4862 # Reorganize s-channel vertices to get a list of all 4863 # subprocesses for each vertex 4864 schannels = list(zip(*[s for s,t in stchannels])) 4865 else: 4866 schannels = [] 4867 4868 allchannels = schannels 4869 if len(tchannels) > 1: 4870 # Write out tchannels only if there are any non-trivial ones 4871 allchannels = schannels + tchannels 4872 4873 # Write out propagators for s-channel and t-channel vertices 4874 4875 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4876 # Correspondance between the config and the diagram = amp2 4877 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4878 mapconfigs[iconfig])) 4879 lines.append("data tstrategy(%d)/%d/" % (nconfigs, tchannels_strategy)) 4880 # Number of QCD couplings in this diagram 4881 nqcd = 0 4882 for h in helas_diags: 4883 if h: 4884 try: 4885 nqcd = h.calculate_orders()['QCD'] 4886 except KeyError: 4887 pass 4888 break 4889 else: 4890 continue 4891 4892 nqcd_list.append(nqcd) 4893 4894 for verts in allchannels: 4895 if verts in schannels: 4896 vert = [v for v in verts if v][0] 4897 else: 4898 vert = verts 4899 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4900 last_leg = vert.get('legs')[-1] 4901 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4902 (last_leg.get('number'), nconfigs, len(daughters), 4903 ",".join([str(d) for d in daughters]))) 4904 if verts in schannels: 4905 pdgs = [] 4906 for v in verts: 4907 if v: 4908 pdgs.append(v.get('legs')[-1].get('id')) 4909 else: 4910 pdgs.append(0) 4911 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4912 (last_leg.get('number'), nconfigs, nsubprocs, 4913 ",".join([str(d) for d in pdgs]))) 4914 lines.append("data tprid(%d,%d)/0/" % \ 4915 (last_leg.get('number'), nconfigs)) 4916 elif verts in tchannels[:-1]: 4917 lines.append("data tprid(%d,%d)/%d/" % \ 4918 (last_leg.get('number'), nconfigs, 4919 abs(last_leg.get('id')))) 4920 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4921 (last_leg.get('number'), nconfigs, nsubprocs, 4922 ",".join(['0'] * nsubprocs))) 4923 4924 # Write out number of configs 4925 lines.append("# Number of configs") 4926 lines.append("data mapconfig(0)/%d/" % nconfigs) 4927 4928 # Write the file 4929 writer.writelines(lines) 4930 4931 return s_and_t_channels, nqcd_list
4932 4933 4934 4935 #=========================================================================== 4936 # reoder t-channels 4937 #=========================================================================== 4938 4939 #ordering = 0 4940 @staticmethod
4941 - def reorder_tchannels(tchannels, tstrat, model):
4942 # no need to modified anything if 1 or less T-Channel 4943 #Note that this counts the number of vertex (one more vertex compare to T) 4944 #ProcessExporterFortranME.ordering +=1 4945 4946 if len(tchannels) < 3 or tstrat == 2 or not model: 4947 return tchannels, 2 4948 elif tstrat == 1: 4949 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4950 elif tstrat == -2: 4951 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4952 elif tstrat == -1: 4953 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels, 1), -1 4954 elif len(tchannels) < 4: 4955 # 4956 first = tchannels[0]['legs'][1]['number'] 4957 t1 = tchannels[0]['legs'][-1]['id'] 4958 last = tchannels[-1]['legs'][1]['number'] 4959 t2 = tchannels[-1]['legs'][0]['id'] 4960 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4961 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4962 if m2 and not m1: 4963 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4964 elif m1 and not m2: 4965 return tchannels, 2 4966 elif first < last: 4967 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4968 else: 4969 return tchannels, 2 4970 else: 4971 first = tchannels[0]['legs'][1]['number'] 4972 t1 = tchannels[0]['legs'][-1]['id'] 4973 last = tchannels[-1]['legs'][1]['number'] 4974 t2 = tchannels[-1]['legs'][0]['id'] 4975 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4976 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4977 4978 t12 = tchannels[1]['legs'][-1]['id'] 4979 m12 = model.get_particle(t12).get('mass') == 'ZERO' 4980 t22 = tchannels[-2]['legs'][0]['id'] 4981 m22 = model.get_particle(t22).get('mass') == 'ZERO' 4982 if m2 and not m1: 4983 if m22: 4984 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4985 else: 4986 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4987 elif m1 and not m2: 4988 if m12: 4989 return tchannels, 2 4990 else: 4991 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4992 elif m1 and m2 and len(tchannels) == 4 and not m12: # 3 T propa 4993 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4994 # this case seems quite sensitive we tested method 2 specifically and this was not helping in general 4995 elif not m1 and not m2 and len(tchannels) == 4 and m12: 4996 if first < last: 4997 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4998 return tchannels, 2 4999 else: 5000 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2
5001 5002 5003 5004 5005 @staticmethod
5006 - def reorder_tchannels_flipside(tchannels):
5007 """change the tchannel ordering to pass to a ping-pong strategy. 5008 assume ninitial == 2 5009 5010 We assume that we receive something like this 5011 5012 1 ----- X ------- -2 5013 | 5014 | (-X) 5015 | 5016 X -------- 4 5017 | 5018 | (-X-1) 5019 | 5020 X --------- -1 5021 5022 X---------- 3 5023 | 5024 | (-N+2) 5025 | 5026 X --------- L 5027 | 5028 | (-N+1) 5029 | 5030 -N ----- X ------- P 5031 5032 coded as 5033 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5034 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5035 5036 we want to convert this as: 5037 -N ----- X ------- -2 5038 | 5039 | (-N+1) 5040 | 5041 X -------- 4 5042 | 5043 | (-N+2) 5044 | 5045 X --------- -1 5046 5047 X---------- 3 5048 | 5049 | (-X-1) 5050 | 5051 X --------- L 5052 | 5053 | (-X) 5054 | 5055 2 ----- X ------- P 5056 5057 coded as 5058 ( 2 P > -X) (-X L > -X-1) (-X-1 3 > -X-2)... (-X-L -2 > -N) 5059 """ 5060 5061 # no need to modified anything if 1 or less T-Channel 5062 #Note that this counts the number of vertex (one more vertex compare to T) 5063 if len(tchannels) < 2: 5064 return tchannels 5065 5066 out = [] 5067 oldid2new = {} 5068 5069 # initialisation 5070 # id of the first T-channel (-X) 5071 propa_id = tchannels[0]['legs'][-1]['number'] 5072 # 5073 # Setup the last vertex to refenence the second id beam 5074 # -N (need to setup it to 2. 5075 initialid = tchannels[-1]['legs'][-1]['number'] 5076 oldid2new[initialid] = 2 5077 oldid2new[1] = initialid 5078 5079 i = 0 5080 while tchannels: 5081 old_vert = tchannels.pop() 5082 5083 #copy the vertex /leglist to avoid side effects 5084 new_vert = copy.copy(old_vert) 5085 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5086 # vertex taken from the bottom we have 5087 # (-N+1 X > -N) we need to flip to pass to 5088 # -N X > -N+1 (and then relabel -N and -N+1 5089 legs = new_vert['legs'] # shorcut 5090 id1 = legs[0]['number'] 5091 id2 = legs[1]['number'] 5092 id3 = legs[2]['number'] 5093 # to be secure we also support (X -N+1 > -N) 5094 if id3 == id2 -1 and id1 !=1: 5095 legs[0], legs[1] = legs[1], legs[0] 5096 #flipping side 5097 legs[0], legs[2] = legs[2], legs[0] 5098 5099 # the only new relabelling is the last element of the list 5100 # always thanks to the above flipping 5101 old_propa_id = new_vert['legs'][-1]['number'] 5102 oldid2new[old_propa_id] = propa_id 5103 5104 5105 #pass to new convention for leg numbering: 5106 for l in new_vert['legs']: 5107 if l['number'] in oldid2new: 5108 l['number'] = oldid2new[l['number']] 5109 5110 # new_vert is now ready 5111 out.append(new_vert) 5112 # prepare next iteration 5113 propa_id -=1 5114 i +=1 5115 5116 return out
5117 5118 @staticmethod
5119 - def reorder_tchannels_pingpong(tchannels, id=2):
5120 """change the tchannel ordering to pass to a ping-pong strategy. 5121 assume ninitial == 2 5122 5123 We assume that we receive something like this 5124 5125 1 ----- X ------- -2 5126 | 5127 | (-X) 5128 | 5129 X -------- 4 5130 | 5131 | (-X-1) 5132 | 5133 X --------- -1 5134 5135 X---------- 3 5136 | 5137 | (-N+2) 5138 | 5139 X --------- L 5140 | 5141 | (-N+1) 5142 | 5143 -N ----- X ------- P 5144 5145 coded as 5146 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5147 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5148 5149 we want to convert this as: 5150 1 ----- X ------- -2 5151 | 5152 | (-X) 5153 | 5154 X -------- 4 5155 | 5156 | (-X-2) 5157 | 5158 X --------- -1 5159 5160 X---------- 3 5161 | 5162 | (-X-3) 5163 | 5164 X --------- L 5165 | 5166 | (-X-1) 5167 | 5168 2 ----- X ------- P 5169 5170 coded as 5171 (1 -2 > -X) (2 P > -X-1) (-X 4 > -X-2) (-X-1 L > -X-3) ... 5172 """ 5173 5174 # no need to modified anything if 1 or less T-Channel 5175 #Note that this counts the number of vertex (one more vertex compare to T) 5176 if len(tchannels) < 2: 5177 return tchannels 5178 5179 out = [] 5180 oldid2new = {} 5181 5182 # initialisation 5183 # id of the first T-channel (-X) 5184 propa_id = tchannels[0]['legs'][-1]['number'] 5185 # 5186 # Setup the last vertex to refenence the second id beam 5187 # -N (need to setup it to 2. 5188 initialid = tchannels[-1]['legs'][-1]['number'] 5189 oldid2new[initialid] = id 5190 5191 5192 5193 i = 0 5194 while tchannels: 5195 #ping pong by taking first/last element in aternance 5196 if id ==2: 5197 if i % 2 == 0: 5198 old_vert = tchannels.pop(0) 5199 else: 5200 old_vert = tchannels.pop() 5201 else: 5202 if i % 2 != 0: 5203 old_vert = tchannels.pop(0) 5204 else: 5205 old_vert = tchannels.pop() 5206 5207 #copy the vertex /leglist to avoid side effects 5208 new_vert = base_objects.Vertex(old_vert) 5209 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5210 # if vertex taken from the bottom we have 5211 # (-N+1 X > -N) we need to flip to pass to 5212 # -N X > -N+1 (and then relabel -N and -N+1 5213 # to be secure we also support (X -N+1 > -N) 5214 if (i % 2 ==1 and id ==2) or (i %2 == 0 and id ==1): 5215 legs = new_vert['legs'] # shorcut 5216 id1 = legs[0]['number'] 5217 id2 = legs[1]['number'] 5218 if id1 > id2: 5219 legs[0], legs[1] = legs[1], legs[0] 5220 else: 5221 legs[0], legs[2] = legs[2], legs[0] 5222 5223 # the only new relabelling is the last element of the list 5224 # always thanks to the above flipping 5225 old_propa_id = new_vert['legs'][-1]['number'] 5226 oldid2new[old_propa_id] = propa_id 5227 5228 if i==0 and id==1: 5229 legs[0]['number'] = 2 5230 5231 #pass to new convention for leg numbering: 5232 for l in new_vert['legs']: 5233 if l['number'] in oldid2new: 5234 l['number'] = oldid2new[l['number']] 5235 5236 # new_vert is now ready 5237 out.append(new_vert) 5238 # prepare next iteration 5239 propa_id -=1 5240 i +=1 5241 5242 return out
5243 5244 5245 5246 5247 5248 #=========================================================================== 5249 # write_decayBW_file 5250 #===========================================================================
5251 - def write_decayBW_file(self, writer, s_and_t_channels):
5252 """Write the decayBW.inc file for MadEvent""" 5253 5254 lines = [] 5255 5256 booldict = {None: "0", True: "1", False: "2"} 5257 5258 for iconf, config in enumerate(s_and_t_channels): 5259 schannels = config[0] 5260 for vertex in schannels: 5261 # For the resulting leg, pick out whether it comes from 5262 # decay or not, as given by the onshell flag 5263 leg = vertex.get('legs')[-1] 5264 lines.append("data gForceBW(%d,%d)/%s/" % \ 5265 (leg.get('number'), iconf + 1, 5266 booldict[leg.get('onshell')])) 5267 5268 # Write the file 5269 writer.writelines(lines) 5270 5271 return True
5272 5273 #=========================================================================== 5274 # write_dname_file 5275 #===========================================================================
5276 - def write_dname_file(self, writer, dir_name):
5277 """Write the dname.mg file for MG4""" 5278 5279 line = "DIRNAME=%s" % dir_name 5280 5281 # Write the file 5282 writer.write(line + "\n") 5283 5284 return True
5285 5286 #=========================================================================== 5287 # write_driver 5288 #===========================================================================
5289 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
5290 """Write the SubProcess/driver.f file for MG4""" 5291 5292 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 5293 5294 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5295 card = 'Source/MODEL/MG5_param.dat' 5296 else: 5297 card = 'param_card.dat' 5298 # Requiring each helicity configuration to be probed by 10 points for 5299 # matrix element before using the resulting grid for MC over helicity 5300 # sampling. 5301 # We multiply this by 2 because each grouped subprocess is called at most 5302 # twice for each IMIRROR. 5303 replace_dict = {'param_card_name':card, 5304 'ncomb':ncomb, 5305 'hel_init_points':n_grouped_proc*10*2} 5306 if not v5: 5307 replace_dict['secondparam']=',.true.' 5308 else: 5309 replace_dict['secondparam']='' 5310 5311 if writer: 5312 text = open(path).read() % replace_dict 5313 writer.write(text) 5314 return True 5315 else: 5316 return replace_dict
5317 5318 #=========================================================================== 5319 # write_addmothers 5320 #===========================================================================
5321 - def write_addmothers(self, writer):
5322 """Write the SubProcess/addmothers.f""" 5323 5324 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5325 5326 text = open(path).read() % {'iconfig': 'diag_number'} 5327 writer.write(text) 5328 5329 return True
5330 5331 5332 #=========================================================================== 5333 # write_combine_events 5334 #===========================================================================
5335 - def write_combine_events(self, writer, nb_proc=100):
5336 """Write the SubProcess/driver.f file for MG4""" 5337 5338 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 5339 5340 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5341 card = 'Source/MODEL/MG5_param.dat' 5342 else: 5343 card = 'param_card.dat' 5344 5345 #set maxpup (number of @X in the process card) 5346 5347 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 5348 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 5349 writer.write(text) 5350 5351 return True
5352 5353 5354 #=========================================================================== 5355 # write_symmetry 5356 #===========================================================================
5357 - def write_symmetry(self, writer, v5=True):
5358 """Write the SubProcess/driver.f file for ME""" 5359 5360 5361 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 5362 5363 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5364 card = 'Source/MODEL/MG5_param.dat' 5365 else: 5366 card = 'param_card.dat' 5367 5368 if v5: 5369 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 5370 else: 5371 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 5372 5373 if writer: 5374 text = open(path).read() 5375 text = text % replace_dict 5376 writer.write(text) 5377 return True 5378 else: 5379 return replace_dict
5380 5381 5382 5383 #=========================================================================== 5384 # write_iproc_file 5385 #===========================================================================
5386 - def write_iproc_file(self, writer, me_number):
5387 """Write the iproc.dat file for MG4""" 5388 line = "%d" % (me_number + 1) 5389 5390 # Write the file 5391 for line_to_write in writer.write_line(line): 5392 writer.write(line_to_write) 5393 return True
5394 5395 #=========================================================================== 5396 # write_mg_sym_file 5397 #===========================================================================
5398 - def write_mg_sym_file(self, writer, matrix_element):
5399 """Write the mg.sym file for MadEvent.""" 5400 5401 lines = [] 5402 5403 # Extract process with all decays included 5404 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 5405 5406 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 5407 5408 identical_indices = {} 5409 5410 # Extract identical particle info 5411 for i, leg in enumerate(final_legs): 5412 if leg.get('id') in identical_indices: 5413 identical_indices[leg.get('id')].append(\ 5414 i + ninitial + 1) 5415 else: 5416 identical_indices[leg.get('id')] = [i + ninitial + 1] 5417 5418 # Remove keys which have only one particle 5419 for key in list(identical_indices.keys()): 5420 if len(identical_indices[key]) < 2: 5421 del identical_indices[key] 5422 5423 # Write mg.sym file 5424 lines.append(str(len(list(identical_indices.keys())))) 5425 for key in identical_indices.keys(): 5426 lines.append(str(len(identical_indices[key]))) 5427 for number in identical_indices[key]: 5428 lines.append(str(number)) 5429 5430 # Write the file 5431 writer.writelines(lines) 5432 5433 return True
5434 5435 #=========================================================================== 5436 # write_mg_sym_file 5437 #===========================================================================
5438 - def write_default_mg_sym_file(self, writer):
5439 """Write the mg.sym file for MadEvent.""" 5440 5441 lines = "0" 5442 5443 # Write the file 5444 writer.writelines(lines) 5445 5446 return True
5447 5448 #=========================================================================== 5449 # write_ncombs_file 5450 #===========================================================================
5451 - def write_ncombs_file(self, writer, nexternal):
5452 """Write the ncombs.inc file for MadEvent.""" 5453 5454 # ncomb (used for clustering) is 2^nexternal 5455 file = " integer n_max_cl\n" 5456 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 5457 5458 # Write the file 5459 writer.writelines(file) 5460 5461 return True
5462 5463 #=========================================================================== 5464 # write_processes_file 5465 #===========================================================================
5466 - def write_processes_file(self, writer, subproc_group):
5467 """Write the processes.dat file with info about the subprocesses 5468 in this group.""" 5469 5470 lines = [] 5471 5472 for ime, me in \ 5473 enumerate(subproc_group.get('matrix_elements')): 5474 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 5475 ",".join(p.base_string() for p in \ 5476 me.get('processes')))) 5477 if me.get('has_mirror_process'): 5478 mirror_procs = [copy.copy(p) for p in me.get('processes')] 5479 for proc in mirror_procs: 5480 legs = copy.copy(proc.get('legs_with_decays')) 5481 legs.insert(0, legs.pop(1)) 5482 proc.set("legs_with_decays", legs) 5483 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 5484 mirror_procs)) 5485 else: 5486 lines.append("mirror none") 5487 5488 # Write the file 5489 writer.write("\n".join(lines)) 5490 5491 return True
5492 5493 #=========================================================================== 5494 # write_symswap_file 5495 #===========================================================================
5496 - def write_symswap_file(self, writer, ident_perms):
5497 """Write the file symswap.inc for MG4 by comparing diagrams using 5498 the internal matrix element value functionality.""" 5499 5500 lines = [] 5501 5502 # Write out lines for symswap.inc file (used to permute the 5503 # external leg momenta 5504 for iperm, perm in enumerate(ident_perms): 5505 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 5506 (iperm+1, ",".join([str(i+1) for i in perm]))) 5507 lines.append("data nsym/%d/" % len(ident_perms)) 5508 5509 # Write the file 5510 writer.writelines(lines) 5511 5512 return True
5513 5514 #=========================================================================== 5515 # write_symfact_file 5516 #===========================================================================
5517 - def write_symfact_file(self, writer, symmetry):
5518 """Write the files symfact.dat for MG4 by comparing diagrams using 5519 the internal matrix element value functionality.""" 5520 5521 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 5522 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 5523 # Write out lines for symswap.inc file (used to permute the 5524 # external leg momenta 5525 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 5526 # Write the file 5527 writer.write('\n'.join(lines)) 5528 writer.write('\n') 5529 5530 return True
5531 5532 #=========================================================================== 5533 # write_symperms_file 5534 #===========================================================================
5535 - def write_symperms_file(self, writer, perms):
5536 """Write the symperms.inc file for subprocess group, used for 5537 symmetric configurations""" 5538 5539 lines = [] 5540 for iperm, perm in enumerate(perms): 5541 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 5542 (iperm+1, ",".join([str(i+1) for i in perm]))) 5543 5544 # Write the file 5545 writer.writelines(lines) 5546 5547 return True
5548 5549 #=========================================================================== 5550 # write_subproc 5551 #===========================================================================
5552 - def write_subproc(self, writer, subprocdir):
5553 """Append this subprocess to the subproc.mg file for MG4""" 5554 5555 # Write line to file 5556 writer.write(subprocdir + "\n") 5557 5558 return True
5559
5560 #=============================================================================== 5561 # ProcessExporterFortranMEGroup 5562 #=============================================================================== 5563 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5564 """Class to take care of exporting a set of matrix elements to 5565 MadEvent subprocess group format.""" 5566 5567 matrix_file = "matrix_madevent_group_v4.inc" 5568 grouped_mode = 'madevent' 5569 default_opt = {'clean': False, 'complex_mass':False, 5570 'export_format':'madevent', 'mp': False, 5571 'v5_model': True, 5572 'output_options':{}, 5573 'hel_recycling': True 5574 } 5575 5576 5577 #=========================================================================== 5578 # generate_subprocess_directory 5579 #===========================================================================
5580 - def generate_subprocess_directory(self, subproc_group, 5581 fortran_model, 5582 group_number):
5583 """Generate the Pn directory for a subprocess group in MadEvent, 5584 including the necessary matrix_N.f files, configs.inc and various 5585 other helper files.""" 5586 5587 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5588 "subproc_group object not SubProcessGroup" 5589 5590 if not self.model: 5591 self.model = subproc_group.get('matrix_elements')[0].\ 5592 get('processes')[0].get('model') 5593 5594 cwd = os.getcwd() 5595 path = pjoin(self.dir_path, 'SubProcesses') 5596 5597 os.chdir(path) 5598 pathdir = os.getcwd() 5599 5600 # Create the directory PN in the specified path 5601 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5602 subproc_group.get('name')) 5603 try: 5604 os.mkdir(subprocdir) 5605 except os.error as error: 5606 logger.warning(error.strerror + " " + subprocdir) 5607 5608 try: 5609 os.chdir(subprocdir) 5610 except os.error: 5611 logger.error('Could not cd to directory %s' % subprocdir) 5612 return 0 5613 5614 logger.info('Creating files in directory %s' % subprocdir) 5615 5616 # Create the matrix.f files, auto_dsig.f files and all inc files 5617 # for all subprocesses in the group 5618 5619 maxamps = 0 5620 maxflows = 0 5621 tot_calls = 0 5622 5623 matrix_elements = subproc_group.get('matrix_elements') 5624 5625 # Add the driver.f, all grouped ME's must share the same number of 5626 # helicity configuration 5627 ncomb = matrix_elements[0].get_helicity_combinations() 5628 for me in matrix_elements[1:]: 5629 if ncomb!=me.get_helicity_combinations(): 5630 raise MadGraph5Error("All grouped processes must share the "+\ 5631 "same number of helicity configurations.") 5632 5633 filename = 'driver.f' 5634 self.write_driver(writers.FortranWriter(filename),ncomb, 5635 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5636 5637 try: 5638 self.proc_characteristic['hel_recycling'] = self.opt['hel_recycling'] 5639 except KeyError: 5640 self.proc_characteristic['hel_recycling'] = False 5641 self.opt['hel_recycling'] = False 5642 for ime, matrix_element in \ 5643 enumerate(matrix_elements): 5644 if self.opt['hel_recycling']: 5645 filename = 'matrix%d_orig.f' % (ime+1) 5646 replace_dict = self.write_matrix_element_v4(None, 5647 matrix_element, 5648 fortran_model, 5649 proc_id=str(ime+1), 5650 config_map=subproc_group.get('diagram_maps')[ime], 5651 subproc_number=group_number) 5652 calls,ncolor = replace_dict['return_value'] 5653 tfile = open(replace_dict['template_file']).read() 5654 file = tfile % replace_dict 5655 # Add the split orders helper functions. 5656 file = file + '\n' + open(replace_dict['template_file2'])\ 5657 .read()%replace_dict 5658 # Write the file 5659 writer = writers.FortranWriter(filename) 5660 writer.writelines(file) 5661 5662 # 5663 # write the dedicated template for helicity recycling 5664 # 5665 tfile = open(replace_dict['template_file'].replace('.inc',"_hel.inc")).read() 5666 file = tfile % replace_dict 5667 # Add the split orders helper functions. 5668 file = file + '\n' + open(replace_dict['template_file2'])\ 5669 .read()%replace_dict 5670 # Write the file 5671 writer = writers.FortranWriter('template_matrix%d.f' % (ime+1)) 5672 writer.uniformcase = False 5673 writer.writelines(file) 5674 5675 5676 5677 5678 else: 5679 filename = 'matrix%d.f' % (ime+1) 5680 calls, ncolor = \ 5681 self.write_matrix_element_v4(writers.FortranWriter(filename), 5682 matrix_element, 5683 fortran_model, 5684 proc_id=str(ime+1), 5685 config_map=subproc_group.get('diagram_maps')[ime], 5686 subproc_number=group_number) 5687 5688 5689 5690 filename = 'auto_dsig%d.f' % (ime+1) 5691 self.write_auto_dsig_file(writers.FortranWriter(filename), 5692 matrix_element, 5693 str(ime+1)) 5694 5695 # Keep track of needed quantities 5696 tot_calls += int(calls) 5697 maxflows = max(maxflows, ncolor) 5698 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5699 5700 # Draw diagrams 5701 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5702 filename = "matrix%d.ps" % (ime+1) 5703 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5704 get('diagrams'), 5705 filename, 5706 model = \ 5707 matrix_element.get('processes')[0].\ 5708 get('model'), 5709 amplitude=True) 5710 logger.info("Generating Feynman diagrams for " + \ 5711 matrix_element.get('processes')[0].nice_string()) 5712 plot.draw() 5713 5714 # Extract number of external particles 5715 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5716 5717 # Generate a list of diagrams corresponding to each configuration 5718 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5719 # If a subprocess has no diagrams for this config, the number is 0 5720 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5721 5722 filename = 'auto_dsig.f' 5723 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5724 subproc_group) 5725 5726 filename = 'coloramps.inc' 5727 self.write_coloramps_file(writers.FortranWriter(filename), 5728 subproc_diagrams_for_config, 5729 maxflows, 5730 matrix_elements) 5731 5732 filename = 'get_color.f' 5733 self.write_colors_file(writers.FortranWriter(filename), 5734 matrix_elements) 5735 5736 filename = 'config_subproc_map.inc' 5737 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5738 subproc_diagrams_for_config) 5739 5740 filename = 'configs.inc' 5741 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5742 writers.FortranWriter(filename), 5743 subproc_group, 5744 subproc_diagrams_for_config) 5745 5746 filename = 'config_nqcd.inc' 5747 self.write_config_nqcd_file(writers.FortranWriter(filename), 5748 nqcd_list) 5749 5750 filename = 'decayBW.inc' 5751 self.write_decayBW_file(writers.FortranWriter(filename), 5752 s_and_t_channels) 5753 5754 filename = 'dname.mg' 5755 self.write_dname_file(writers.FortranWriter(filename), 5756 subprocdir) 5757 5758 filename = 'iproc.dat' 5759 self.write_iproc_file(writers.FortranWriter(filename), 5760 group_number) 5761 5762 filename = 'leshouche.inc' 5763 self.write_leshouche_file(writers.FortranWriter(filename), 5764 subproc_group) 5765 5766 filename = 'maxamps.inc' 5767 self.write_maxamps_file(writers.FortranWriter(filename), 5768 maxamps, 5769 maxflows, 5770 max([len(me.get('processes')) for me in \ 5771 matrix_elements]), 5772 len(matrix_elements)) 5773 5774 # Note that mg.sym is not relevant for this case 5775 filename = 'mg.sym' 5776 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5777 5778 filename = 'mirrorprocs.inc' 5779 self.write_mirrorprocs(writers.FortranWriter(filename), 5780 subproc_group) 5781 5782 filename = 'ncombs.inc' 5783 self.write_ncombs_file(writers.FortranWriter(filename), 5784 nexternal) 5785 5786 filename = 'nexternal.inc' 5787 self.write_nexternal_file(writers.FortranWriter(filename), 5788 nexternal, ninitial) 5789 5790 filename = 'ngraphs.inc' 5791 self.write_ngraphs_file(writers.FortranWriter(filename), 5792 nconfigs) 5793 5794 filename = 'pmass.inc' 5795 self.write_pmass_file(writers.FortranWriter(filename), 5796 matrix_element) 5797 5798 filename = 'props.inc' 5799 self.write_props_file(writers.FortranWriter(filename), 5800 matrix_element, 5801 s_and_t_channels) 5802 5803 filename = 'processes.dat' 5804 files.write_to_file(filename, 5805 self.write_processes_file, 5806 subproc_group) 5807 5808 # Find config symmetries and permutations 5809 symmetry, perms, ident_perms = \ 5810 diagram_symmetry.find_symmetry(subproc_group) 5811 5812 filename = 'symswap.inc' 5813 self.write_symswap_file(writers.FortranWriter(filename), 5814 ident_perms) 5815 5816 filename = 'symfact_orig.dat' 5817 self.write_symfact_file(open(filename, 'w'), symmetry) 5818 5819 # check consistency 5820 for i, sym_fact in enumerate(symmetry): 5821 5822 if sym_fact >= 0: 5823 continue 5824 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5825 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5826 raise Exception("identical diagram with different QCD powwer") 5827 5828 5829 filename = 'symperms.inc' 5830 self.write_symperms_file(writers.FortranWriter(filename), 5831 perms) 5832 5833 # Generate jpgs -> pass in make_html 5834 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5835 5836 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5837 5838 #import nexternal/leshouch in Source 5839 ln('nexternal.inc', '../../Source', log=False) 5840 ln('leshouche.inc', '../../Source', log=False) 5841 ln('maxamps.inc', '../../Source', log=False) 5842 5843 # Return to SubProcesses dir) 5844 os.chdir(pathdir) 5845 5846 # Add subprocess to subproc.mg 5847 filename = 'subproc.mg' 5848 files.append_to_file(filename, 5849 self.write_subproc, 5850 subprocdir) 5851 5852 # Return to original dir 5853 os.chdir(cwd) 5854 5855 if not tot_calls: 5856 tot_calls = 0 5857 return tot_calls
5858 5859 #=========================================================================== 5860 # write_super_auto_dsig_file 5861 #===========================================================================
5862 - def write_super_auto_dsig_file(self, writer, subproc_group):
5863 """Write the auto_dsig.f file selecting between the subprocesses 5864 in subprocess group mode""" 5865 5866 replace_dict = {} 5867 5868 # Extract version number and date from VERSION file 5869 info_lines = self.get_mg5_info_lines() 5870 replace_dict['info_lines'] = info_lines 5871 5872 matrix_elements = subproc_group.get('matrix_elements') 5873 5874 # Extract process info lines 5875 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5876 matrix_elements]) 5877 replace_dict['process_lines'] = process_lines 5878 5879 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5880 replace_dict['nexternal'] = nexternal 5881 5882 replace_dict['nsprocs'] = 2*len(matrix_elements) 5883 5884 # Generate dsig definition line 5885 dsig_def_line = "DOUBLE PRECISION " + \ 5886 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5887 range(len(matrix_elements))]) 5888 replace_dict["dsig_def_line"] = dsig_def_line 5889 5890 # Generate dsig process lines 5891 call_dsig_proc_lines = [] 5892 for iproc in range(len(matrix_elements)): 5893 call_dsig_proc_lines.append(\ 5894 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5895 {"num": iproc + 1, 5896 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5897 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5898 5899 ncomb=matrix_elements[0].get_helicity_combinations() 5900 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5901 5902 s1,s2 = matrix_elements[0].get_spin_state_initial() 5903 replace_dict['nb_spin_state1'] = s1 5904 replace_dict['nb_spin_state2'] = s2 5905 5906 printzeroamp = [] 5907 for iproc in range(len(matrix_elements)): 5908 printzeroamp.append(\ 5909 " call print_zero_amp_%i()" % ( iproc + 1)) 5910 replace_dict['print_zero_amp'] = "\n".join(printzeroamp) 5911 5912 5913 if writer: 5914 file = open(pjoin(_file_path, \ 5915 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5916 file = file % replace_dict 5917 5918 # Write the file 5919 writer.writelines(file) 5920 else: 5921 return replace_dict
5922 5923 #=========================================================================== 5924 # write_mirrorprocs 5925 #===========================================================================
5926 - def write_mirrorprocs(self, writer, subproc_group):
5927 """Write the mirrorprocs.inc file determining which processes have 5928 IS mirror process in subprocess group mode.""" 5929 5930 lines = [] 5931 bool_dict = {True: '.true.', False: '.false.'} 5932 matrix_elements = subproc_group.get('matrix_elements') 5933 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5934 (len(matrix_elements), 5935 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5936 me in matrix_elements]))) 5937 # Write the file 5938 writer.writelines(lines)
5939 5940 #=========================================================================== 5941 # write_addmothers 5942 #===========================================================================
5943 - def write_addmothers(self, writer):
5944 """Write the SubProcess/addmothers.f""" 5945 5946 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5947 5948 text = open(path).read() % {'iconfig': 'lconfig'} 5949 writer.write(text) 5950 5951 return True
5952 5953 5954 #=========================================================================== 5955 # write_coloramps_file 5956 #===========================================================================
5957 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5958 matrix_elements):
5959 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5960 5961 # Create a map from subprocess (matrix element) to a list of 5962 # the diagrams corresponding to each config 5963 5964 lines = [] 5965 5966 subproc_to_confdiag = {} 5967 for config in diagrams_for_config: 5968 for subproc, diag in enumerate(config): 5969 try: 5970 subproc_to_confdiag[subproc].append(diag) 5971 except KeyError: 5972 subproc_to_confdiag[subproc] = [diag] 5973 5974 for subproc in sorted(subproc_to_confdiag.keys()): 5975 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5976 matrix_elements[subproc], 5977 subproc + 1)) 5978 5979 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5980 (maxflows, 5981 len(diagrams_for_config), 5982 len(matrix_elements))) 5983 5984 # Write the file 5985 writer.writelines(lines) 5986 5987 return True
5988 5989 #=========================================================================== 5990 # write_config_subproc_map_file 5991 #===========================================================================
5992 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5993 """Write the config_subproc_map.inc file for subprocess groups""" 5994 5995 lines = [] 5996 # Output only configs that have some corresponding diagrams 5997 iconfig = 0 5998 for config in config_subproc_map: 5999 if set(config) == set([0]): 6000 continue 6001 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 6002 (iconfig + 1, len(config), 6003 ",".join([str(i) for i in config]))) 6004 iconfig += 1 6005 # Write the file 6006 writer.writelines(lines) 6007 6008 return True
6009 6010 #=========================================================================== 6011 # read_write_good_hel 6012 #===========================================================================
6013 - def read_write_good_hel(self, ncomb):
6014 """return the code to read/write the good_hel common_block""" 6015 6016 convert = {'ncomb' : ncomb} 6017 6018 output = """ 6019 subroutine write_good_hel(stream_id) 6020 implicit none 6021 integer stream_id 6022 INTEGER NCOMB 6023 PARAMETER ( NCOMB=%(ncomb)d) 6024 LOGICAL GOODHEL(NCOMB, 2) 6025 INTEGER NTRY(2) 6026 common/BLOCK_GOODHEL/NTRY,GOODHEL 6027 write(stream_id,*) GOODHEL 6028 return 6029 end 6030 6031 6032 subroutine read_good_hel(stream_id) 6033 implicit none 6034 include 'genps.inc' 6035 integer stream_id 6036 INTEGER NCOMB 6037 PARAMETER ( NCOMB=%(ncomb)d) 6038 LOGICAL GOODHEL(NCOMB, 2) 6039 INTEGER NTRY(2) 6040 common/BLOCK_GOODHEL/NTRY,GOODHEL 6041 read(stream_id,*) GOODHEL 6042 NTRY(1) = MAXTRIES + 1 6043 NTRY(2) = MAXTRIES + 1 6044 return 6045 end 6046 6047 subroutine init_good_hel() 6048 implicit none 6049 INTEGER NCOMB 6050 PARAMETER ( NCOMB=%(ncomb)d) 6051 LOGICAL GOODHEL(NCOMB, 2) 6052 INTEGER NTRY(2) 6053 INTEGER I 6054 6055 do i=1,NCOMB 6056 GOODHEL(I,1) = .false. 6057 GOODHEL(I,2) = .false. 6058 enddo 6059 NTRY(1) = 0 6060 NTRY(2) = 0 6061 end 6062 6063 integer function get_maxsproc() 6064 implicit none 6065 include 'maxamps.inc' 6066 6067 get_maxsproc = maxsproc 6068 return 6069 end 6070 6071 """ % convert 6072 6073 return output
6074 6075 6076 6077 #=========================================================================== 6078 # write_configs_file 6079 #===========================================================================
6080 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6081 """Write the configs.inc file with topology information for a 6082 subprocess group. Use the first subprocess with a diagram for each 6083 configuration.""" 6084 6085 matrix_elements = subproc_group.get('matrix_elements') 6086 model = matrix_elements[0].get('processes')[0].get('model') 6087 6088 diagrams = [] 6089 config_numbers = [] 6090 for iconfig, config in enumerate(diagrams_for_config): 6091 # Check if any diagrams correspond to this config 6092 if set(config) == set([0]): 6093 continue 6094 subproc_diags = [] 6095 for s,d in enumerate(config): 6096 if d: 6097 subproc_diags.append(matrix_elements[s].\ 6098 get('diagrams')[d-1]) 6099 else: 6100 subproc_diags.append(None) 6101 diagrams.append(subproc_diags) 6102 config_numbers.append(iconfig + 1) 6103 6104 # Extract number of external particles 6105 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6106 6107 return len(diagrams), \ 6108 self.write_configs_file_from_diagrams(writer, diagrams, 6109 config_numbers, 6110 nexternal, ninitial, 6111 model)
6112 6113 #=========================================================================== 6114 # write_run_configs_file 6115 #===========================================================================
6116 - def write_run_config_file(self, writer):
6117 """Write the run_configs.inc file for MadEvent""" 6118 6119 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 6120 if self.proc_characteristic['loop_induced']: 6121 job_per_chan = 1 6122 else: 6123 job_per_chan = 2 6124 text = open(path).read() % {'chanperjob':job_per_chan} 6125 writer.write(text) 6126 return True
6127 6128 6129 #=========================================================================== 6130 # write_leshouche_file 6131 #===========================================================================
6132 - def write_leshouche_file(self, writer, subproc_group):
6133 """Write the leshouche.inc file for MG4""" 6134 6135 all_lines = [] 6136 6137 for iproc, matrix_element in \ 6138 enumerate(subproc_group.get('matrix_elements')): 6139 all_lines.extend(self.get_leshouche_lines(matrix_element, 6140 iproc)) 6141 # Write the file 6142 writer.writelines(all_lines) 6143 return True
6144 6145
6146 - def finalize(self,*args, **opts):
6147 6148 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 6149 #ensure that the grouping information is on the correct value 6150 self.proc_characteristic['grouped_matrix'] = True
6151 6152 6153 #=============================================================================== 6154 # UFO_model_to_mg4 6155 #=============================================================================== 6156 6157 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
6158 6159 -class UFO_model_to_mg4(object):
6160 """ A converter of the UFO-MG5 Model to the MG4 format """ 6161 6162 # The list below shows the only variables the user is allowed to change by 6163 # himself for each PS point. If he changes any other, then calling 6164 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 6165 # correctly account for the change. 6166 PS_dependent_key = ['aS','MU_R'] 6167 mp_complex_format = 'complex*32' 6168 mp_real_format = 'real*16' 6169 # Warning, it is crucial none of the couplings/parameters of the model 6170 # starts with this prefix. I should add a check for this. 6171 # You can change it as the global variable to check_param_card.ParamCard 6172 mp_prefix = check_param_card.ParamCard.mp_prefix 6173
6174 - def __init__(self, model, output_path, opt=None):
6175 """ initialization of the objects """ 6176 6177 self.model = model 6178 self.model_name = model['name'] 6179 self.dir_path = output_path 6180 6181 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 6182 'loop_induced': False} 6183 if opt: 6184 self.opt.update(opt) 6185 6186 self.coups_dep = [] # (name, expression, type) 6187 self.coups_indep = [] # (name, expression, type) 6188 self.params_dep = [] # (name, expression, type) 6189 self.params_indep = [] # (name, expression, type) 6190 self.params_ext = [] # external parameter 6191 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 6192 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
6193 6194
6196 """modify the parameter if some of them are identical up to the case""" 6197 6198 lower_dict={} 6199 duplicate = set() 6200 keys = list(self.model['parameters'].keys()) 6201 keys.sort() 6202 for key in keys: 6203 for param in self.model['parameters'][key]: 6204 lower_name = param.name.lower() 6205 if not lower_name: 6206 continue 6207 try: 6208 lower_dict[lower_name].append(param) 6209 except KeyError as error: 6210 lower_dict[lower_name] = [param] 6211 else: 6212 duplicate.add(lower_name) 6213 logger.debug('%s is define both as lower case and upper case.' 6214 % lower_name) 6215 if not duplicate: 6216 return 6217 6218 re_expr = r'''\b(%s)\b''' 6219 to_change = [] 6220 change={} 6221 for value in duplicate: 6222 for i, var in enumerate(lower_dict[value]): 6223 to_change.append(var.name) 6224 new_name = '%s%s' % (var.name.lower(), 6225 ('__%d'%(i+1) if i>0 else '')) 6226 change[var.name] = new_name 6227 var.name = new_name 6228 6229 # Apply the modification to the map_CTcoup_CTparam of the model 6230 # if it has one (giving for each coupling the CT parameters whcih 6231 # are necessary and which should be exported to the model. 6232 if hasattr(self.model,'map_CTcoup_CTparam'): 6233 for coup, ctparams in self.model.map_CTcoup_CTparam: 6234 for i, ctparam in enumerate(ctparams): 6235 try: 6236 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 6237 except KeyError: 6238 pass 6239 6240 replace = lambda match_pattern: change[match_pattern.groups()[0]] 6241 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 6242 6243 # change parameters 6244 for key in keys: 6245 if key == ('external',): 6246 continue 6247 for param in self.model['parameters'][key]: 6248 param.expr = rep_pattern.sub(replace, param.expr) 6249 6250 # change couplings 6251 for key in self.model['couplings'].keys(): 6252 for coup in self.model['couplings'][key]: 6253 coup.expr = rep_pattern.sub(replace, coup.expr) 6254 6255 # change mass/width 6256 for part in self.model['particles']: 6257 if str(part.get('mass')) in to_change: 6258 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 6259 if str(part.get('width')) in to_change: 6260 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
6261
6262 - def refactorize(self, wanted_couplings = []):
6263 """modify the couplings to fit with MG4 convention """ 6264 6265 # Keep only separation in alphaS 6266 keys = list(self.model['parameters'].keys()) 6267 keys.sort(key=len) 6268 for key in keys: 6269 to_add = [o for o in self.model['parameters'][key] if o.name] 6270 6271 if key == ('external',): 6272 self.params_ext += to_add 6273 elif any([(k in key) for k in self.PS_dependent_key]): 6274 self.params_dep += to_add 6275 else: 6276 self.params_indep += to_add 6277 # same for couplings 6278 keys = list(self.model['couplings'].keys()) 6279 keys.sort(key=len) 6280 for key, coup_list in self.model['couplings'].items(): 6281 if any([(k in key) for k in self.PS_dependent_key]): 6282 self.coups_dep += [c for c in coup_list if 6283 (not wanted_couplings or c.name in \ 6284 wanted_couplings)] 6285 else: 6286 self.coups_indep += [c for c in coup_list if 6287 (not wanted_couplings or c.name in \ 6288 wanted_couplings)] 6289 6290 # MG4 use G and not aS as it basic object for alphas related computation 6291 #Pass G in the independant list 6292 if 'G' in self.params_dep: 6293 index = self.params_dep.index('G') 6294 G = self.params_dep.pop(index) 6295 # G.expr = '2*cmath.sqrt(as*pi)' 6296 # self.params_indep.insert(0, self.params_dep.pop(index)) 6297 # No need to add it if not defined 6298 6299 if 'aS' not in self.params_ext: 6300 logger.critical('aS not define as external parameter adding it!') 6301 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 6302 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 6303 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
6304 - def build(self, wanted_couplings = [], full=True):
6305 """modify the couplings to fit with MG4 convention and creates all the 6306 different files""" 6307 6308 self.pass_parameter_to_case_insensitive() 6309 self.refactorize(wanted_couplings) 6310 6311 # write the files 6312 if full: 6313 if wanted_couplings: 6314 # extract the wanted ct parameters 6315 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 6316 self.write_all()
6317 6318
6319 - def open(self, name, comment='c', format='default'):
6320 """ Open the file name in the correct directory and with a valid 6321 header.""" 6322 6323 file_path = pjoin(self.dir_path, name) 6324 6325 if format == 'fortran': 6326 fsock = writers.FortranWriter(file_path, 'w') 6327 write_class = io.FileIO 6328 6329 write_class.writelines(fsock, comment * 77 + '\n') 6330 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 6331 {'comment': comment + (6 - len(comment)) * ' '}) 6332 write_class.writelines(fsock, comment * 77 + '\n\n') 6333 else: 6334 fsock = open(file_path, 'w') 6335 fsock.writelines(comment * 77 + '\n') 6336 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 6337 {'comment': comment + (6 - len(comment)) * ' '}) 6338 fsock.writelines(comment * 77 + '\n\n') 6339 return fsock
6340 6341
6342 - def write_all(self):
6343 """ write all the files """ 6344 #write the part related to the external parameter 6345 self.create_ident_card() 6346 self.create_param_read() 6347 6348 #write the definition of the parameter 6349 self.create_input() 6350 self.create_intparam_def(dp=True,mp=False) 6351 if self.opt['mp']: 6352 self.create_intparam_def(dp=False,mp=True) 6353 6354 # definition of the coupling. 6355 self.create_actualize_mp_ext_param_inc() 6356 self.create_coupl_inc() 6357 self.create_write_couplings() 6358 self.create_couplings() 6359 6360 # the makefile 6361 self.create_makeinc() 6362 self.create_param_write() 6363 6364 # The model functions 6365 self.create_model_functions_inc() 6366 self.create_model_functions_def() 6367 6368 # The param_card.dat 6369 self.create_param_card() 6370 6371 6372 # All the standard files 6373 self.copy_standard_file()
6374 6375 ############################################################################ 6376 ## ROUTINE CREATING THE FILES ############################################ 6377 ############################################################################ 6378
6379 - def copy_standard_file(self):
6380 """Copy the standard files for the fortran model.""" 6381 6382 #copy the library files 6383 file_to_link = ['formats.inc','printout.f', \ 6384 'rw_para.f', 'testprog.f'] 6385 6386 for filename in file_to_link: 6387 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 6388 self.dir_path) 6389 6390 file = open(os.path.join(MG5DIR,\ 6391 'models/template_files/fortran/rw_para.f')).read() 6392 6393 includes=["include \'coupl.inc\'","include \'input.inc\'", 6394 "include \'model_functions.inc\'"] 6395 if self.opt['mp']: 6396 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 6397 # In standalone and madloop we do no use the compiled param card but 6398 # still parse the .dat one so we must load it. 6399 if self.opt['loop_induced']: 6400 #loop induced follow MadEvent way to handle the card. 6401 load_card = '' 6402 lha_read_filename='lha_read.f' 6403 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 6404 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6405 lha_read_filename='lha_read_mp.f' 6406 elif self.opt['export_format'].startswith('standalone') \ 6407 or self.opt['export_format'] in ['madweight', 'plugin']\ 6408 or self.opt['export_format'].startswith('matchbox'): 6409 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6410 lha_read_filename='lha_read.f' 6411 else: 6412 load_card = '' 6413 lha_read_filename='lha_read.f' 6414 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 6415 os.path.join(self.dir_path,'lha_read.f')) 6416 6417 file=file%{'includes':'\n '.join(includes), 6418 'load_card':load_card} 6419 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 6420 writer.writelines(file) 6421 writer.close() 6422 6423 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6424 or self.opt['loop_induced']: 6425 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 6426 self.dir_path + '/makefile') 6427 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 6428 path = pjoin(self.dir_path, 'makefile') 6429 text = open(path).read() 6430 text = text.replace('madevent','aMCatNLO') 6431 open(path, 'w').writelines(text) 6432 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 6433 'madloop','madloop_optimized', 'standalone_rw', 6434 'madweight','matchbox','madloop_matchbox', 'plugin']: 6435 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 6436 self.dir_path + '/makefile') 6437 #elif self.opt['export_format'] in []: 6438 #pass 6439 else: 6440 raise MadGraph5Error('Unknown format')
6441
6442 - def create_coupl_inc(self):
6443 """ write coupling.inc """ 6444 6445 fsock = self.open('coupl.inc', format='fortran') 6446 if self.opt['mp']: 6447 mp_fsock = self.open('mp_coupl.inc', format='fortran') 6448 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 6449 format='fortran') 6450 6451 # Write header 6452 header = """double precision G 6453 common/strong/ G 6454 6455 double complex gal(2) 6456 common/weak/ gal 6457 6458 double precision MU_R 6459 common/rscale/ MU_R 6460 6461 double precision Nf 6462 parameter(Nf=%d) 6463 """ % self.model.get_nflav() 6464 6465 fsock.writelines(header) 6466 6467 if self.opt['mp']: 6468 header = """%(real_mp_format)s %(mp_prefix)sG 6469 common/MP_strong/ %(mp_prefix)sG 6470 6471 %(complex_mp_format)s %(mp_prefix)sgal(2) 6472 common/MP_weak/ %(mp_prefix)sgal 6473 6474 %(complex_mp_format)s %(mp_prefix)sMU_R 6475 common/MP_rscale/ %(mp_prefix)sMU_R 6476 6477 """ 6478 6479 6480 6481 6482 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 6483 'complex_mp_format':self.mp_complex_format, 6484 'mp_prefix':self.mp_prefix}) 6485 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 6486 'complex_mp_format':self.mp_complex_format, 6487 'mp_prefix':''}) 6488 6489 # Write the Mass definition/ common block 6490 masses = set() 6491 widths = set() 6492 if self.opt['complex_mass']: 6493 complex_mass = set() 6494 6495 for particle in self.model.get('particles'): 6496 #find masses 6497 one_mass = particle.get('mass') 6498 if one_mass.lower() != 'zero': 6499 masses.add(one_mass) 6500 6501 # find width 6502 one_width = particle.get('width') 6503 if one_width.lower() != 'zero': 6504 widths.add(one_width) 6505 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 6506 complex_mass.add('CMASS_%s' % one_mass) 6507 6508 if masses: 6509 fsock.writelines('double precision '+','.join(masses)+'\n') 6510 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 6511 if self.opt['mp']: 6512 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6513 ','.join(masses)+'\n') 6514 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 6515 ','.join(masses)+'\n\n') 6516 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6517 self.mp_prefix+m for m in masses])+'\n') 6518 mp_fsock.writelines('common/MP_masses/ '+\ 6519 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 6520 6521 if widths: 6522 fsock.writelines('double precision '+','.join(widths)+'\n') 6523 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 6524 if self.opt['mp']: 6525 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6526 ','.join(widths)+'\n') 6527 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 6528 ','.join(widths)+'\n\n') 6529 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6530 self.mp_prefix+w for w in widths])+'\n') 6531 mp_fsock.writelines('common/MP_widths/ '+\ 6532 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 6533 6534 # Write the Couplings 6535 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 6536 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 6537 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 6538 if self.opt['mp']: 6539 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6540 ','.join(coupling_list)+'\n') 6541 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 6542 ','.join(coupling_list)+'\n\n') 6543 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6544 self.mp_prefix+c for c in coupling_list])+'\n') 6545 mp_fsock.writelines('common/MP_couplings/ '+\ 6546 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 6547 6548 # Write complex mass for complex mass scheme (if activated) 6549 if self.opt['complex_mass'] and complex_mass: 6550 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 6551 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 6552 if self.opt['mp']: 6553 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6554 ','.join(complex_mass)+'\n') 6555 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 6556 ','.join(complex_mass)+'\n\n') 6557 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6558 self.mp_prefix+cm for cm in complex_mass])+'\n') 6559 mp_fsock.writelines('common/MP_complex_mass/ '+\ 6560 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
6561
6562 - def create_write_couplings(self):
6563 """ write the file coupl_write.inc """ 6564 6565 fsock = self.open('coupl_write.inc', format='fortran') 6566 6567 fsock.writelines("""write(*,*) ' Couplings of %s' 6568 write(*,*) ' ---------------------------------' 6569 write(*,*) ' '""" % self.model_name) 6570 def format(coupl): 6571 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
6572 6573 # Write the Couplings 6574 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 6575 fsock.writelines('\n'.join(lines)) 6576 6577
6578 - def create_input(self):
6579 """create input.inc containing the definition of the parameters""" 6580 6581 fsock = self.open('input.inc', format='fortran') 6582 if self.opt['mp']: 6583 mp_fsock = self.open('mp_input.inc', format='fortran') 6584 6585 #find mass/ width since they are already define 6586 already_def = set() 6587 for particle in self.model.get('particles'): 6588 already_def.add(particle.get('mass').lower()) 6589 already_def.add(particle.get('width').lower()) 6590 if self.opt['complex_mass']: 6591 already_def.add('cmass_%s' % particle.get('mass').lower()) 6592 6593 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 6594 name.lower() not in already_def 6595 6596 real_parameters = [param.name for param in self.params_dep + 6597 self.params_indep if param.type == 'real' 6598 and is_valid(param.name)] 6599 6600 real_parameters += [param.name for param in self.params_ext 6601 if param.type == 'real'and 6602 is_valid(param.name)] 6603 6604 # check the parameter is a CT parameter or not 6605 # if yes, just use the needed ones 6606 real_parameters = [param for param in real_parameters \ 6607 if self.check_needed_param(param)] 6608 6609 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6610 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6611 if self.opt['mp']: 6612 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6613 self.mp_prefix+p for p in real_parameters])+'\n') 6614 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6615 self.mp_prefix+p for p in real_parameters])+'\n\n') 6616 6617 complex_parameters = [param.name for param in self.params_dep + 6618 self.params_indep if param.type == 'complex' and 6619 is_valid(param.name)] 6620 6621 # check the parameter is a CT parameter or not 6622 # if yes, just use the needed ones 6623 complex_parameters = [param for param in complex_parameters \ 6624 if self.check_needed_param(param)] 6625 6626 if complex_parameters: 6627 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6628 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6629 if self.opt['mp']: 6630 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6631 self.mp_prefix+p for p in complex_parameters])+'\n') 6632 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6633 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6634
6635 - def check_needed_param(self, param):
6636 """ Returns whether the parameter in argument is needed for this 6637 specific computation or not.""" 6638 6639 # If this is a leading order model or if there was no CT parameter 6640 # employed in this NLO model, one can directly return that the 6641 # parameter is needed since only CTParameters are filtered. 6642 if not hasattr(self, 'allCTparameters') or \ 6643 self.allCTparameters is None or self.usedCTparameters is None or \ 6644 len(self.allCTparameters)==0: 6645 return True 6646 6647 # We must allow the conjugate shorthand for the complex parameter as 6648 # well so we check wether either the parameter name or its name with 6649 # 'conjg__' substituted with '' is present in the list. 6650 # This is acceptable even if some parameter had an original name 6651 # including 'conjg__' in it, because at worst we export a parameter 6652 # was not needed. 6653 param = param.lower() 6654 cjg_param = param.replace('conjg__','',1) 6655 6656 # First make sure it is a CTparameter 6657 if param not in self.allCTparameters and \ 6658 cjg_param not in self.allCTparameters: 6659 return True 6660 6661 # Now check if it is in the list of CTparameters actually used 6662 return (param in self.usedCTparameters or \ 6663 cjg_param in self.usedCTparameters)
6664
6665 - def extract_needed_CTparam(self,wanted_couplings=[]):
6666 """ Extract what are the needed CT parameters given the wanted_couplings""" 6667 6668 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6669 # Setting these lists to none wil disable the filtering in 6670 # check_needed_param 6671 self.allCTparameters = None 6672 self.usedCTparameters = None 6673 return 6674 6675 # All CTparameters appearin in all CT couplings 6676 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6677 # Define in this class the list of all CT parameters 6678 self.allCTparameters=list(\ 6679 set(itertools.chain.from_iterable(allCTparameters))) 6680 6681 # All used CT couplings 6682 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6683 allUsedCTCouplings = [coupl for coupl in 6684 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6685 6686 # Now define the list of all CT parameters that are actually used 6687 self.usedCTparameters=list(\ 6688 set(itertools.chain.from_iterable([ 6689 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6690 ]))) 6691 6692 # Now at last, make these list case insensitive 6693 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6694 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6695
6696 - def create_intparam_def(self, dp=True, mp=False):
6697 """ create intparam_definition.inc setting the internal parameters. 6698 Output the double precision and/or the multiple precision parameters 6699 depending on the parameters dp and mp. If mp only, then the file names 6700 get the 'mp_' prefix. 6701 """ 6702 6703 fsock = self.open('%sintparam_definition.inc'% 6704 ('mp_' if mp and not dp else ''), format='fortran') 6705 6706 fsock.write_comments(\ 6707 "Parameters that should not be recomputed event by event.\n") 6708 fsock.writelines("if(readlha) then\n") 6709 if dp: 6710 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6711 if mp: 6712 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6713 6714 for param in self.params_indep: 6715 if param.name == 'ZERO': 6716 continue 6717 # check whether the parameter is a CT parameter 6718 # if yes,just used the needed ones 6719 if not self.check_needed_param(param.name): 6720 continue 6721 if dp: 6722 fsock.writelines("%s = %s\n" % (param.name, 6723 self.p_to_f.parse(param.expr))) 6724 if mp: 6725 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6726 self.mp_p_to_f.parse(param.expr))) 6727 6728 fsock.writelines('endif') 6729 6730 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6731 if dp: 6732 fsock.writelines("aS = G**2/4/pi\n") 6733 if mp: 6734 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6735 for param in self.params_dep: 6736 # check whether the parameter is a CT parameter 6737 # if yes,just used the needed ones 6738 if not self.check_needed_param(param.name): 6739 continue 6740 if dp: 6741 fsock.writelines("%s = %s\n" % (param.name, 6742 self.p_to_f.parse(param.expr))) 6743 elif mp: 6744 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6745 self.mp_p_to_f.parse(param.expr))) 6746 6747 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6748 if ('aEWM1',) in self.model['parameters']: 6749 if dp: 6750 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6751 gal(2) = 1d0 6752 """) 6753 elif mp: 6754 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6755 %(mp_prefix)sgal(2) = 1d0 6756 """ %{'mp_prefix':self.mp_prefix}) 6757 pass 6758 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6759 elif ('Gf',) in self.model['parameters']: 6760 if dp: 6761 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6762 gal(2) = 1d0 6763 """) 6764 elif mp: 6765 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6766 %(mp_prefix)sgal(2) = 1d0 6767 """ %{'mp_prefix':self.mp_prefix}) 6768 pass 6769 else: 6770 if dp: 6771 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6772 fsock.writelines(""" gal(1) = 1d0 6773 gal(2) = 1d0 6774 """) 6775 elif mp: 6776 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6777 %(mp_prefix)sgal(2) = 1e0_16 6778 """%{'mp_prefix':self.mp_prefix})
6779 6780
6781 - def create_couplings(self):
6782 """ create couplings.f and all couplingsX.f """ 6783 6784 nb_def_by_file = 25 6785 6786 self.create_couplings_main(nb_def_by_file) 6787 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6788 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6789 6790 for i in range(nb_coup_indep): 6791 # For the independent couplings, we compute the double and multiple 6792 # precision ones together 6793 data = self.coups_indep[nb_def_by_file * i: 6794 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6795 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6796 6797 for i in range(nb_coup_dep): 6798 # For the dependent couplings, we compute the double and multiple 6799 # precision ones in separate subroutines. 6800 data = self.coups_dep[nb_def_by_file * i: 6801 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6802 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6803 dp=True,mp=False) 6804 if self.opt['mp']: 6805 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6806 dp=False,mp=True)
6807 6808
6809 - def create_couplings_main(self, nb_def_by_file=25):
6810 """ create couplings.f """ 6811 6812 fsock = self.open('couplings.f', format='fortran') 6813 6814 fsock.writelines("""subroutine coup() 6815 6816 implicit none 6817 double precision PI, ZERO 6818 logical READLHA 6819 parameter (PI=3.141592653589793d0) 6820 parameter (ZERO=0d0) 6821 include \'model_functions.inc\'""") 6822 if self.opt['mp']: 6823 fsock.writelines("""%s MP__PI, MP__ZERO 6824 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6825 parameter (MP__ZERO=0e0_16) 6826 include \'mp_input.inc\' 6827 include \'mp_coupl.inc\' 6828 """%self.mp_real_format) 6829 fsock.writelines("""include \'input.inc\' 6830 include \'coupl.inc\' 6831 READLHA = .true. 6832 include \'intparam_definition.inc\'""") 6833 if self.opt['mp']: 6834 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6835 6836 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6837 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6838 6839 fsock.writelines('\n'.join(\ 6840 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6841 6842 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6843 6844 fsock.writelines('\n'.join(\ 6845 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6846 for i in range(nb_coup_dep)])) 6847 if self.opt['mp']: 6848 fsock.writelines('\n'.join(\ 6849 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6850 for i in range(nb_coup_dep)])) 6851 fsock.writelines('''\n return \n end\n''') 6852 6853 fsock.writelines("""subroutine update_as_param() 6854 6855 implicit none 6856 double precision PI, ZERO 6857 logical READLHA 6858 parameter (PI=3.141592653589793d0) 6859 parameter (ZERO=0d0) 6860 include \'model_functions.inc\'""") 6861 fsock.writelines("""include \'input.inc\' 6862 include \'coupl.inc\' 6863 READLHA = .false.""") 6864 fsock.writelines(""" 6865 include \'intparam_definition.inc\'\n 6866 """) 6867 6868 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6869 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6870 6871 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6872 6873 fsock.writelines('\n'.join(\ 6874 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6875 for i in range(nb_coup_dep)])) 6876 fsock.writelines('''\n return \n end\n''') 6877 6878 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6879 6880 implicit none 6881 double precision PI 6882 parameter (PI=3.141592653589793d0) 6883 double precision mu_r2, as2 6884 include \'model_functions.inc\'""") 6885 fsock.writelines("""include \'input.inc\' 6886 include \'coupl.inc\'""") 6887 fsock.writelines(""" 6888 if (mu_r2.gt.0d0) MU_R = mu_r2 6889 G = SQRT(4.0d0*PI*AS2) 6890 AS = as2 6891 6892 CALL UPDATE_AS_PARAM() 6893 """) 6894 fsock.writelines('''\n return \n end\n''') 6895 6896 if self.opt['mp']: 6897 fsock.writelines("""subroutine mp_update_as_param() 6898 6899 implicit none 6900 logical READLHA 6901 include \'model_functions.inc\'""") 6902 fsock.writelines("""%s MP__PI, MP__ZERO 6903 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6904 parameter (MP__ZERO=0e0_16) 6905 include \'mp_input.inc\' 6906 include \'mp_coupl.inc\' 6907 """%self.mp_real_format) 6908 fsock.writelines("""include \'input.inc\' 6909 include \'coupl.inc\' 6910 include \'actualize_mp_ext_params.inc\' 6911 READLHA = .false. 6912 include \'mp_intparam_definition.inc\'\n 6913 """) 6914 6915 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6916 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6917 6918 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6919 6920 fsock.writelines('\n'.join(\ 6921 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6922 for i in range(nb_coup_dep)])) 6923 fsock.writelines('''\n return \n end\n''')
6924
6925 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6926 """ create couplings[nb_file].f containing information coming from data. 6927 Outputs the computation of the double precision and/or the multiple 6928 precision couplings depending on the parameters dp and mp. 6929 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6930 filename and subroutine name. 6931 """ 6932 6933 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6934 nb_file), format='fortran') 6935 fsock.writelines("""subroutine %scoup%s() 6936 6937 implicit none 6938 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6939 if dp: 6940 fsock.writelines(""" 6941 double precision PI, ZERO 6942 parameter (PI=3.141592653589793d0) 6943 parameter (ZERO=0d0) 6944 include 'input.inc' 6945 include 'coupl.inc'""") 6946 if mp: 6947 fsock.writelines("""%s MP__PI, MP__ZERO 6948 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6949 parameter (MP__ZERO=0e0_16) 6950 include \'mp_input.inc\' 6951 include \'mp_coupl.inc\' 6952 """%self.mp_real_format) 6953 6954 for coupling in data: 6955 if dp: 6956 fsock.writelines('%s = %s' % (coupling.name, 6957 self.p_to_f.parse(coupling.expr))) 6958 if mp: 6959 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6960 self.mp_p_to_f.parse(coupling.expr))) 6961 fsock.writelines('end')
6962
6963 - def create_model_functions_inc(self):
6964 """ Create model_functions.inc which contains the various declarations 6965 of auxiliary functions which might be used in the couplings expressions 6966 """ 6967 6968 additional_fct = [] 6969 # check for functions define in the UFO model 6970 ufo_fct = self.model.get('functions') 6971 if ufo_fct: 6972 for fct in ufo_fct: 6973 # already handle by default 6974 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6975 "csc", "asec", "acsc", "theta_function", "cond", 6976 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6977 "grreglog","regsqrt"]: 6978 additional_fct.append(fct.name) 6979 6980 fsock = self.open('model_functions.inc', format='fortran') 6981 fsock.writelines("""double complex cond 6982 double complex condif 6983 double complex reglog 6984 double complex reglogp 6985 double complex reglogm 6986 double complex recms 6987 double complex arg 6988 double complex grreglog 6989 double complex regsqrt 6990 %s 6991 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6992 6993 6994 if self.opt['mp']: 6995 fsock.writelines("""%(complex_mp_format)s mp_cond 6996 %(complex_mp_format)s mp_condif 6997 %(complex_mp_format)s mp_reglog 6998 %(complex_mp_format)s mp_reglogp 6999 %(complex_mp_format)s mp_reglogm 7000 %(complex_mp_format)s mp_recms 7001 %(complex_mp_format)s mp_arg 7002 %(complex_mp_format)s mp_grreglog 7003 %(complex_mp_format)s mp_regsqrt 7004 %(additional)s 7005 """ %\ 7006 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 7007 'complex_mp_format':self.mp_complex_format 7008 })
7009
7010 - def create_model_functions_def(self):
7011 """ Create model_functions.f which contains the various definitions 7012 of auxiliary functions which might be used in the couplings expressions 7013 Add the functions.f functions for formfactors support 7014 """ 7015 7016 fsock = self.open('model_functions.f', format='fortran') 7017 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 7018 implicit none 7019 double complex condition,truecase,falsecase 7020 if(condition.eq.(0.0d0,0.0d0)) then 7021 cond=truecase 7022 else 7023 cond=falsecase 7024 endif 7025 end 7026 7027 double complex function condif(condition,truecase,falsecase) 7028 implicit none 7029 logical condition 7030 double complex truecase,falsecase 7031 if(condition) then 7032 condif=truecase 7033 else 7034 condif=falsecase 7035 endif 7036 end 7037 7038 double complex function recms(condition,expr) 7039 implicit none 7040 logical condition 7041 double complex expr 7042 if(condition)then 7043 recms=expr 7044 else 7045 recms=dcmplx(dble(expr)) 7046 endif 7047 end 7048 7049 double complex function reglog(arg) 7050 implicit none 7051 double complex TWOPII 7052 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7053 double complex arg 7054 if(arg.eq.(0.0d0,0.0d0)) then 7055 reglog=(0.0d0,0.0d0) 7056 else 7057 reglog=log(arg) 7058 endif 7059 end 7060 7061 double complex function reglogp(arg) 7062 implicit none 7063 double complex TWOPII 7064 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7065 double complex arg 7066 if(arg.eq.(0.0d0,0.0d0))then 7067 reglogp=(0.0d0,0.0d0) 7068 else 7069 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 7070 reglogp=log(arg) + TWOPII 7071 else 7072 reglogp=log(arg) 7073 endif 7074 endif 7075 end 7076 7077 double complex function reglogm(arg) 7078 implicit none 7079 double complex TWOPII 7080 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7081 double complex arg 7082 if(arg.eq.(0.0d0,0.0d0))then 7083 reglogm=(0.0d0,0.0d0) 7084 else 7085 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 7086 reglogm=log(arg) - TWOPII 7087 else 7088 reglogm=log(arg) 7089 endif 7090 endif 7091 end 7092 7093 double complex function regsqrt(arg_in) 7094 implicit none 7095 double complex arg_in 7096 double complex arg 7097 arg=arg_in 7098 if(dabs(dimag(arg)).eq.0.0d0)then 7099 arg=dcmplx(dble(arg),0.0d0) 7100 endif 7101 if(dabs(dble(arg)).eq.0.0d0)then 7102 arg=dcmplx(0.0d0,dimag(arg)) 7103 endif 7104 regsqrt=sqrt(arg) 7105 end 7106 7107 double complex function grreglog(logsw,expr1_in,expr2_in) 7108 implicit none 7109 double complex TWOPII 7110 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7111 double complex expr1_in,expr2_in 7112 double complex expr1,expr2 7113 double precision logsw 7114 double precision imagexpr 7115 logical firstsheet 7116 expr1=expr1_in 7117 expr2=expr2_in 7118 if(dabs(dimag(expr1)).eq.0.0d0)then 7119 expr1=dcmplx(dble(expr1),0.0d0) 7120 endif 7121 if(dabs(dble(expr1)).eq.0.0d0)then 7122 expr1=dcmplx(0.0d0,dimag(expr1)) 7123 endif 7124 if(dabs(dimag(expr2)).eq.0.0d0)then 7125 expr2=dcmplx(dble(expr2),0.0d0) 7126 endif 7127 if(dabs(dble(expr2)).eq.0.0d0)then 7128 expr2=dcmplx(0.0d0,dimag(expr2)) 7129 endif 7130 if(expr1.eq.(0.0d0,0.0d0))then 7131 grreglog=(0.0d0,0.0d0) 7132 else 7133 imagexpr=dimag(expr1)*dimag(expr2) 7134 firstsheet=imagexpr.ge.0.0d0 7135 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 7136 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 7137 if(firstsheet)then 7138 grreglog=log(expr1) 7139 else 7140 if(dimag(expr1).gt.0.0d0)then 7141 grreglog=log(expr1) - logsw*TWOPII 7142 else 7143 grreglog=log(expr1) + logsw*TWOPII 7144 endif 7145 endif 7146 endif 7147 end 7148 7149 double complex function arg(comnum) 7150 implicit none 7151 double complex comnum 7152 double complex iim 7153 iim = (0.0d0,1.0d0) 7154 if(comnum.eq.(0.0d0,0.0d0)) then 7155 arg=(0.0d0,0.0d0) 7156 else 7157 arg=log(comnum/abs(comnum))/iim 7158 endif 7159 end""") 7160 if self.opt['mp']: 7161 fsock.writelines(""" 7162 7163 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 7164 implicit none 7165 %(complex_mp_format)s condition,truecase,falsecase 7166 if(condition.eq.(0.0e0_16,0.0e0_16)) then 7167 mp_cond=truecase 7168 else 7169 mp_cond=falsecase 7170 endif 7171 end 7172 7173 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 7174 implicit none 7175 logical condition 7176 %(complex_mp_format)s truecase,falsecase 7177 if(condition) then 7178 mp_condif=truecase 7179 else 7180 mp_condif=falsecase 7181 endif 7182 end 7183 7184 %(complex_mp_format)s function mp_recms(condition,expr) 7185 implicit none 7186 logical condition 7187 %(complex_mp_format)s expr 7188 if(condition)then 7189 mp_recms=expr 7190 else 7191 mp_recms=cmplx(real(expr),kind=16) 7192 endif 7193 end 7194 7195 %(complex_mp_format)s function mp_reglog(arg) 7196 implicit none 7197 %(complex_mp_format)s TWOPII 7198 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7199 %(complex_mp_format)s arg 7200 if(arg.eq.(0.0e0_16,0.0e0_16)) then 7201 mp_reglog=(0.0e0_16,0.0e0_16) 7202 else 7203 mp_reglog=log(arg) 7204 endif 7205 end 7206 7207 %(complex_mp_format)s function mp_reglogp(arg) 7208 implicit none 7209 %(complex_mp_format)s TWOPII 7210 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7211 %(complex_mp_format)s arg 7212 if(arg.eq.(0.0e0_16,0.0e0_16))then 7213 mp_reglogp=(0.0e0_16,0.0e0_16) 7214 else 7215 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 7216 mp_reglogp=log(arg) + TWOPII 7217 else 7218 mp_reglogp=log(arg) 7219 endif 7220 endif 7221 end 7222 7223 %(complex_mp_format)s function mp_reglogm(arg) 7224 implicit none 7225 %(complex_mp_format)s TWOPII 7226 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7227 %(complex_mp_format)s arg 7228 if(arg.eq.(0.0e0_16,0.0e0_16))then 7229 mp_reglogm=(0.0e0_16,0.0e0_16) 7230 else 7231 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 7232 mp_reglogm=log(arg) - TWOPII 7233 else 7234 mp_reglogm=log(arg) 7235 endif 7236 endif 7237 end 7238 7239 %(complex_mp_format)s function mp_regsqrt(arg_in) 7240 implicit none 7241 %(complex_mp_format)s arg_in 7242 %(complex_mp_format)s arg 7243 arg=arg_in 7244 if(abs(imagpart(arg)).eq.0.0e0_16)then 7245 arg=cmplx(real(arg,kind=16),0.0e0_16) 7246 endif 7247 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 7248 arg=cmplx(0.0e0_16,imagpart(arg)) 7249 endif 7250 mp_regsqrt=sqrt(arg) 7251 end 7252 7253 7254 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 7255 implicit none 7256 %(complex_mp_format)s TWOPII 7257 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7258 %(complex_mp_format)s expr1_in,expr2_in 7259 %(complex_mp_format)s expr1,expr2 7260 %(real_mp_format)s logsw 7261 %(real_mp_format)s imagexpr 7262 logical firstsheet 7263 expr1=expr1_in 7264 expr2=expr2_in 7265 if(abs(imagpart(expr1)).eq.0.0e0_16)then 7266 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 7267 endif 7268 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 7269 expr1=cmplx(0.0e0_16,imagpart(expr1)) 7270 endif 7271 if(abs(imagpart(expr2)).eq.0.0e0_16)then 7272 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 7273 endif 7274 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 7275 expr2=cmplx(0.0e0_16,imagpart(expr2)) 7276 endif 7277 if(expr1.eq.(0.0e0_16,0.0e0_16))then 7278 mp_grreglog=(0.0e0_16,0.0e0_16) 7279 else 7280 imagexpr=imagpart(expr1)*imagpart(expr2) 7281 firstsheet=imagexpr.ge.0.0e0_16 7282 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 7283 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 7284 if(firstsheet)then 7285 mp_grreglog=log(expr1) 7286 else 7287 if(imagpart(expr1).gt.0.0e0_16)then 7288 mp_grreglog=log(expr1) - logsw*TWOPII 7289 else 7290 mp_grreglog=log(expr1) + logsw*TWOPII 7291 endif 7292 endif 7293 endif 7294 end 7295 7296 %(complex_mp_format)s function mp_arg(comnum) 7297 implicit none 7298 %(complex_mp_format)s comnum 7299 %(complex_mp_format)s imm 7300 imm = (0.0e0_16,1.0e0_16) 7301 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 7302 mp_arg=(0.0e0_16,0.0e0_16) 7303 else 7304 mp_arg=log(comnum/abs(comnum))/imm 7305 endif 7306 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 7307 7308 7309 #check for the file functions.f 7310 model_path = self.model.get('modelpath') 7311 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 7312 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 7313 input = pjoin(model_path,'Fortran','functions.f') 7314 fsock.writelines(open(input).read()) 7315 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 7316 7317 # check for functions define in the UFO model 7318 ufo_fct = self.model.get('functions') 7319 if ufo_fct: 7320 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 7321 done = [] 7322 for fct in ufo_fct: 7323 # already handle by default 7324 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 7325 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 7326 "grreglog","regsqrt"] + done: 7327 done.append(str(fct.name.lower())) 7328 ufo_fct_template = """ 7329 double complex function %(name)s(%(args)s) 7330 implicit none 7331 double complex %(args)s 7332 %(definitions)s 7333 %(name)s = %(fct)s 7334 7335 return 7336 end 7337 """ 7338 str_fct = self.p_to_f.parse(fct.expr) 7339 if not self.p_to_f.to_define: 7340 definitions = [] 7341 else: 7342 definitions=[] 7343 for d in self.p_to_f.to_define: 7344 if d == 'pi': 7345 definitions.append(' double precision pi') 7346 definitions.append(' data pi /3.1415926535897932d0/') 7347 else: 7348 definitions.append(' double complex %s' % d) 7349 7350 text = ufo_fct_template % { 7351 'name': fct.name, 7352 'args': ", ".join(fct.arguments), 7353 'fct': str_fct, 7354 'definitions': '\n'.join(definitions) 7355 } 7356 7357 fsock.writelines(text) 7358 if self.opt['mp']: 7359 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 7360 for fct in ufo_fct: 7361 # already handle by default 7362 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 7363 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 7364 "grreglog","regsqrt"]: 7365 ufo_fct_template = """ 7366 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 7367 implicit none 7368 %(complex_mp_format)s mp__%(args)s 7369 %(definitions)s 7370 mp_%(name)s = %(fct)s 7371 7372 return 7373 end 7374 """ 7375 str_fct = self.mp_p_to_f.parse(fct.expr) 7376 if not self.mp_p_to_f.to_define: 7377 definitions = [] 7378 else: 7379 definitions=[] 7380 for d in self.mp_p_to_f.to_define: 7381 if d == 'pi': 7382 definitions.append(' %s mp__pi' % self.mp_real_format) 7383 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 7384 else: 7385 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 7386 text = ufo_fct_template % { 7387 'name': fct.name, 7388 'args': ", mp__".join(fct.arguments), 7389 'fct': str_fct, 7390 'definitions': '\n'.join(definitions), 7391 'complex_mp_format': self.mp_complex_format 7392 } 7393 fsock.writelines(text) 7394 7395 7396 7397 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
7398 7399 7400
7401 - def create_makeinc(self):
7402 """create makeinc.inc containing the file to compile """ 7403 7404 fsock = self.open('makeinc.inc', comment='#') 7405 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 7406 text += ' model_functions.o ' 7407 7408 nb_coup_indep = 1 + len(self.coups_dep) // 25 7409 nb_coup_dep = 1 + len(self.coups_indep) // 25 7410 couplings_files=['couplings%s.o' % (i+1) \ 7411 for i in range(nb_coup_dep + nb_coup_indep) ] 7412 if self.opt['mp']: 7413 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 7414 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 7415 text += ' '.join(couplings_files) 7416 fsock.writelines(text)
7417
7418 - def create_param_write(self):
7419 """ create param_write """ 7420 7421 fsock = self.open('param_write.inc', format='fortran') 7422 7423 fsock.writelines("""write(*,*) ' External Params' 7424 write(*,*) ' ---------------------------------' 7425 write(*,*) ' '""") 7426 def format(name): 7427 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
7428 7429 # Write the external parameter 7430 lines = [format(param.name) for param in self.params_ext] 7431 fsock.writelines('\n'.join(lines)) 7432 7433 fsock.writelines("""write(*,*) ' Internal Params' 7434 write(*,*) ' ---------------------------------' 7435 write(*,*) ' '""") 7436 lines = [format(data.name) for data in self.params_indep 7437 if data.name != 'ZERO' and self.check_needed_param(data.name)] 7438 fsock.writelines('\n'.join(lines)) 7439 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 7440 write(*,*) ' ----------------------------------------' 7441 write(*,*) ' '""") 7442 lines = [format(data.name) for data in self.params_dep \ 7443 if self.check_needed_param(data.name)] 7444 7445 fsock.writelines('\n'.join(lines)) 7446 7447 7448
7449 - def create_ident_card(self):
7450 """ create the ident_card.dat """ 7451 7452 def format(parameter): 7453 """return the line for the ident_card corresponding to this parameter""" 7454 colum = [parameter.lhablock.lower()] + \ 7455 [str(value) for value in parameter.lhacode] + \ 7456 [parameter.name] 7457 if not parameter.name: 7458 return '' 7459 return ' '.join(colum)+'\n'
7460 7461 fsock = self.open('ident_card.dat') 7462 7463 external_param = [format(param) for param in self.params_ext] 7464 fsock.writelines('\n'.join(external_param)) 7465
7466 - def create_actualize_mp_ext_param_inc(self):
7467 """ create the actualize_mp_ext_params.inc code """ 7468 7469 # In principle one should actualize all external, but for now, it is 7470 # hardcoded that only AS and MU_R can by dynamically changed by the user 7471 # so that we only update those ones. 7472 # Of course, to be on the safe side, one could decide to update all 7473 # external parameters. 7474 update_params_list=[p for p in self.params_ext if p.name in 7475 self.PS_dependent_key] 7476 7477 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 7478 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 7479 for param in update_params_list] 7480 # When read_lha is false, it is G which is taken in input and not AS, so 7481 # this is what should be reset here too. 7482 if 'aS' in [param.name for param in update_params_list]: 7483 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 7484 7485 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 7486 fsock.writelines('\n'.join(res_strings))
7487
7488 - def create_param_read(self):
7489 """create param_read""" 7490 7491 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 7492 or self.opt['loop_induced']: 7493 fsock = self.open('param_read.inc', format='fortran') 7494 fsock.writelines(' include \'../param_card.inc\'') 7495 return 7496 7497 def format_line(parameter): 7498 """return the line for the ident_card corresponding to this 7499 parameter""" 7500 template = \ 7501 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 7502 % {'name': parameter.name, 7503 'value': self.p_to_f.parse(str(parameter.value.real))} 7504 if self.opt['mp']: 7505 template = template+ \ 7506 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 7507 "%(mp_prefix)s%(name)s,%(value)s)") \ 7508 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 7509 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 7510 7511 if parameter.lhablock.lower() == 'loop': 7512 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 7513 7514 return template 7515 7516 fsock = self.open('param_read.inc', format='fortran') 7517 res_strings = [format_line(param) \ 7518 for param in self.params_ext] 7519 7520 # Correct width sign for Majorana particles (where the width 7521 # and mass need to have the same sign) 7522 for particle in self.model.get('particles'): 7523 if particle.is_fermion() and particle.get('self_antipart') and \ 7524 particle.get('width').lower() != 'zero': 7525 7526 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 7527 {'width': particle.get('width'), 'mass': particle.get('mass')}) 7528 if self.opt['mp']: 7529 res_strings.append(\ 7530 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 7531 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 7532 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 7533 7534 fsock.writelines('\n'.join(res_strings)) 7535 7536 7537 @staticmethod
7538 - def create_param_card_static(model, output_path, rule_card_path=False, 7539 mssm_convert=True, write_special=True):
7540 """ create the param_card.dat for a givent model --static method-- """ 7541 #1. Check if a default param_card is present: 7542 done = False 7543 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 7544 restrict_name = os.path.basename(model.restrict_card)[9:-4] 7545 model_path = model.get('modelpath') 7546 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 7547 done = True 7548 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 7549 output_path) 7550 if not done: 7551 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 7552 7553 if rule_card_path: 7554 if hasattr(model, 'rule_card'): 7555 model.rule_card.write_file(rule_card_path) 7556 7557 if mssm_convert: 7558 model_name = model.get('name') 7559 # IF MSSM convert the card to SLAH1 7560 if model_name == 'mssm' or model_name.startswith('mssm-'): 7561 import models.check_param_card as translator 7562 # Check the format of the param_card for Pythia and make it correct 7563 if rule_card_path: 7564 translator.make_valid_param_card(output_path, rule_card_path) 7565 translator.convert_to_slha1(output_path)
7566
7567 - def create_param_card(self, write_special=True):
7568 """ create the param_card.dat """ 7569 7570 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 7571 if not hasattr(self.model, 'rule_card'): 7572 rule_card=False 7573 write_special = True 7574 if 'exporter' in self.opt: 7575 import madgraph.loop.loop_exporters as loop_exporters 7576 import madgraph.iolibs.export_fks as export_fks 7577 write_special = False 7578 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 7579 write_special = True 7580 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 7581 write_special = False 7582 7583 self.create_param_card_static(self.model, 7584 output_path=pjoin(self.dir_path, 'param_card.dat'), 7585 rule_card_path=rule_card, 7586 mssm_convert=True, 7587 write_special=write_special)
7588
7589 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
7590 """ Determine which Export_v4 class is required. cmd is the command 7591 interface containing all potential usefull information. 7592 The output_type argument specifies from which context the output 7593 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 7594 and 'default' for tree-level outputs.""" 7595 7596 opt = dict(cmd.options) 7597 opt['output_options'] = cmd_options 7598 7599 # ========================================================================== 7600 # First check whether Ninja must be installed. 7601 # Ninja would only be required if: 7602 # a) Loop optimized output is selected 7603 # b) the process gathered from the amplitude generated use loops 7604 7605 if len(cmd._curr_amps)>0: 7606 try: 7607 curr_proc = cmd._curr_amps[0].get('process') 7608 except base_objects.PhysicsObject.PhysicsObjectError: 7609 curr_proc = None 7610 elif hasattr(cmd,'_fks_multi_proc') and \ 7611 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7612 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7613 else: 7614 curr_proc = None 7615 7616 requires_reduction_tool = opt['loop_optimized_output'] and \ 7617 (not curr_proc is None) and \ 7618 (curr_proc.get('perturbation_couplings') != [] and \ 7619 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7620 7621 # An installation is required then, but only if the specified path is the 7622 # default local one and that the Ninja library appears missing. 7623 if requires_reduction_tool: 7624 cmd.install_reduction_library() 7625 7626 # ========================================================================== 7627 # First treat the MadLoop5 standalone case 7628 MadLoop_SA_options = {'clean': not noclean, 7629 'complex_mass':cmd.options['complex_mass_scheme'], 7630 'export_format':'madloop', 7631 'mp':True, 7632 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7633 'cuttools_dir': cmd._cuttools_dir, 7634 'iregi_dir':cmd._iregi_dir, 7635 'golem_dir':cmd.options['golem'], 7636 'samurai_dir':cmd.options['samurai'], 7637 'ninja_dir':cmd.options['ninja'], 7638 'collier_dir':cmd.options['collier'], 7639 'fortran_compiler':cmd.options['fortran_compiler'], 7640 'f2py_compiler':cmd.options['f2py_compiler'], 7641 'output_dependencies':cmd.options['output_dependencies'], 7642 'SubProc_prefix':'P', 7643 'compute_color_flows':cmd.options['loop_color_flows'], 7644 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7645 'cluster_local_path': cmd.options['cluster_local_path'], 7646 'output_options': cmd_options 7647 } 7648 7649 if output_type.startswith('madloop'): 7650 import madgraph.loop.loop_exporters as loop_exporters 7651 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7652 ExporterClass=None 7653 if not cmd.options['loop_optimized_output']: 7654 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7655 else: 7656 if output_type == "madloop": 7657 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7658 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7659 elif output_type == "madloop_matchbox": 7660 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7661 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7662 else: 7663 raise Exception("output_type not recognize %s" % output_type) 7664 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7665 else: 7666 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7667 ' in %s'%str(cmd._mgme_dir)) 7668 7669 # Then treat the aMC@NLO output 7670 elif output_type=='amcatnlo': 7671 import madgraph.iolibs.export_fks as export_fks 7672 ExporterClass=None 7673 amcatnlo_options = dict(opt) 7674 amcatnlo_options.update(MadLoop_SA_options) 7675 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7676 if not cmd.options['loop_optimized_output']: 7677 logger.info("Writing out the aMC@NLO code") 7678 ExporterClass = export_fks.ProcessExporterFortranFKS 7679 amcatnlo_options['export_format']='FKS5_default' 7680 else: 7681 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7682 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7683 amcatnlo_options['export_format']='FKS5_optimized' 7684 return ExporterClass(cmd._export_dir, amcatnlo_options) 7685 7686 7687 # Then the default tree-level output 7688 elif output_type=='default': 7689 assert group_subprocesses in [True, False] 7690 7691 opt = dict(opt) 7692 opt.update({'clean': not noclean, 7693 'complex_mass': cmd.options['complex_mass_scheme'], 7694 'export_format':cmd._export_format, 7695 'mp': False, 7696 'sa_symmetry':False, 7697 'model': cmd._curr_model.get('name'), 7698 'v5_model': False if cmd._model_v4_path else True }) 7699 7700 format = cmd._export_format #shortcut 7701 7702 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7703 opt['sa_symmetry'] = True 7704 elif format == 'plugin': 7705 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7706 7707 loop_induced_opt = dict(opt) 7708 loop_induced_opt.update(MadLoop_SA_options) 7709 loop_induced_opt['export_format'] = 'madloop_optimized' 7710 loop_induced_opt['SubProc_prefix'] = 'PV' 7711 # For loop_induced output with MadEvent, we must have access to the 7712 # color flows. 7713 loop_induced_opt['compute_color_flows'] = True 7714 for key in opt: 7715 if key not in loop_induced_opt: 7716 loop_induced_opt[key] = opt[key] 7717 7718 # Madevent output supports MadAnalysis5 7719 if format in ['madevent']: 7720 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7721 7722 if format == 'matrix' or format.startswith('standalone'): 7723 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7724 7725 elif format in ['madevent'] and group_subprocesses: 7726 if isinstance(cmd._curr_amps[0], 7727 loop_diagram_generation.LoopAmplitude): 7728 import madgraph.loop.loop_exporters as loop_exporters 7729 return loop_exporters.LoopInducedExporterMEGroup( 7730 cmd._export_dir,loop_induced_opt) 7731 else: 7732 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7733 elif format in ['madevent']: 7734 if isinstance(cmd._curr_amps[0], 7735 loop_diagram_generation.LoopAmplitude): 7736 import madgraph.loop.loop_exporters as loop_exporters 7737 return loop_exporters.LoopInducedExporterMENoGroup( 7738 cmd._export_dir,loop_induced_opt) 7739 else: 7740 return ProcessExporterFortranME(cmd._export_dir,opt) 7741 elif format in ['matchbox']: 7742 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7743 elif cmd._export_format in ['madweight'] and group_subprocesses: 7744 7745 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7746 elif cmd._export_format in ['madweight']: 7747 return ProcessExporterFortranMW(cmd._export_dir, opt) 7748 elif format == 'plugin': 7749 if isinstance(cmd._curr_amps[0], 7750 loop_diagram_generation.LoopAmplitude): 7751 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7752 else: 7753 return cmd._export_plugin(cmd._export_dir, opt) 7754 7755 else: 7756 raise Exception('Wrong export_v4 format') 7757 else: 7758 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7759
7760 7761 7762 7763 #=============================================================================== 7764 # ProcessExporterFortranMWGroup 7765 #=============================================================================== 7766 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7767 """Class to take care of exporting a set of matrix elements to 7768 MadEvent subprocess group format.""" 7769 7770 matrix_file = "matrix_madweight_group_v4.inc" 7771 grouped_mode = 'madweight' 7772 #=========================================================================== 7773 # generate_subprocess_directory 7774 #===========================================================================
7775 - def generate_subprocess_directory(self, subproc_group, 7776 fortran_model, 7777 group_number):
7778 """Generate the Pn directory for a subprocess group in MadEvent, 7779 including the necessary matrix_N.f files, configs.inc and various 7780 other helper files.""" 7781 7782 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7783 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7784 7785 if not self.model: 7786 self.model = subproc_group.get('matrix_elements')[0].\ 7787 get('processes')[0].get('model') 7788 7789 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7790 7791 # Create the directory PN in the specified path 7792 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7793 subproc_group.get('name')) 7794 try: 7795 os.mkdir(pjoin(pathdir, subprocdir)) 7796 except os.error as error: 7797 logger.warning(error.strerror + " " + subprocdir) 7798 7799 7800 logger.info('Creating files in directory %s' % subprocdir) 7801 Ppath = pjoin(pathdir, subprocdir) 7802 7803 # Create the matrix.f files, auto_dsig.f files and all inc files 7804 # for all subprocesses in the group 7805 7806 maxamps = 0 7807 maxflows = 0 7808 tot_calls = 0 7809 7810 matrix_elements = subproc_group.get('matrix_elements') 7811 7812 for ime, matrix_element in \ 7813 enumerate(matrix_elements): 7814 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7815 calls, ncolor = \ 7816 self.write_matrix_element_v4(writers.FortranWriter(filename), 7817 matrix_element, 7818 fortran_model, 7819 str(ime+1), 7820 subproc_group.get('diagram_maps')[\ 7821 ime]) 7822 7823 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7824 self.write_auto_dsig_file(writers.FortranWriter(filename), 7825 matrix_element, 7826 str(ime+1)) 7827 7828 # Keep track of needed quantities 7829 tot_calls += int(calls) 7830 maxflows = max(maxflows, ncolor) 7831 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7832 7833 # Draw diagrams 7834 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7835 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7836 get('diagrams'), 7837 filename, 7838 model = \ 7839 matrix_element.get('processes')[0].\ 7840 get('model'), 7841 amplitude=True) 7842 logger.info("Generating Feynman diagrams for " + \ 7843 matrix_element.get('processes')[0].nice_string()) 7844 plot.draw() 7845 7846 # Extract number of external particles 7847 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7848 7849 # Generate a list of diagrams corresponding to each configuration 7850 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7851 # If a subprocess has no diagrams for this config, the number is 0 7852 7853 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7854 7855 filename = pjoin(Ppath, 'auto_dsig.f') 7856 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7857 subproc_group) 7858 7859 filename = pjoin(Ppath,'configs.inc') 7860 nconfigs, s_and_t_channels = self.write_configs_file(\ 7861 writers.FortranWriter(filename), 7862 subproc_group, 7863 subproc_diagrams_for_config) 7864 7865 filename = pjoin(Ppath, 'leshouche.inc') 7866 self.write_leshouche_file(writers.FortranWriter(filename), 7867 subproc_group) 7868 7869 filename = pjoin(Ppath, 'phasespace.inc') 7870 self.write_phasespace_file(writers.FortranWriter(filename), 7871 nconfigs) 7872 7873 7874 filename = pjoin(Ppath, 'maxamps.inc') 7875 self.write_maxamps_file(writers.FortranWriter(filename), 7876 maxamps, 7877 maxflows, 7878 max([len(me.get('processes')) for me in \ 7879 matrix_elements]), 7880 len(matrix_elements)) 7881 7882 filename = pjoin(Ppath, 'mirrorprocs.inc') 7883 self.write_mirrorprocs(writers.FortranWriter(filename), 7884 subproc_group) 7885 7886 filename = pjoin(Ppath, 'nexternal.inc') 7887 self.write_nexternal_file(writers.FortranWriter(filename), 7888 nexternal, ninitial) 7889 7890 filename = pjoin(Ppath, 'pmass.inc') 7891 self.write_pmass_file(writers.FortranWriter(filename), 7892 matrix_element) 7893 7894 filename = pjoin(Ppath, 'props.inc') 7895 self.write_props_file(writers.FortranWriter(filename), 7896 matrix_element, 7897 s_and_t_channels) 7898 7899 # filename = pjoin(Ppath, 'processes.dat') 7900 # files.write_to_file(filename, 7901 # self.write_processes_file, 7902 # subproc_group) 7903 7904 # Generate jpgs -> pass in make_html 7905 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7906 7907 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7908 7909 for file in linkfiles: 7910 ln('../%s' % file, cwd=Ppath) 7911 7912 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7913 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7914 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7915 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7916 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7917 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7918 if not tot_calls: 7919 tot_calls = 0 7920 return tot_calls
7921 7922 7923 #=========================================================================== 7924 # Helper functions 7925 #===========================================================================
7926 - def modify_grouping(self, matrix_element):
7927 """allow to modify the grouping (if grouping is in place) 7928 return two value: 7929 - True/False if the matrix_element was modified 7930 - the new(or old) matrix element""" 7931 7932 return True, matrix_element.split_lepton_grouping()
7933 7934 #=========================================================================== 7935 # write_super_auto_dsig_file 7936 #===========================================================================
7937 - def write_super_auto_dsig_file(self, writer, subproc_group):
7938 """Write the auto_dsig.f file selecting between the subprocesses 7939 in subprocess group mode""" 7940 7941 replace_dict = {} 7942 7943 # Extract version number and date from VERSION file 7944 info_lines = self.get_mg5_info_lines() 7945 replace_dict['info_lines'] = info_lines 7946 7947 matrix_elements = subproc_group.get('matrix_elements') 7948 7949 # Extract process info lines 7950 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7951 matrix_elements]) 7952 replace_dict['process_lines'] = process_lines 7953 7954 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7955 replace_dict['nexternal'] = nexternal 7956 7957 replace_dict['nsprocs'] = 2*len(matrix_elements) 7958 7959 # Generate dsig definition line 7960 dsig_def_line = "DOUBLE PRECISION " + \ 7961 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7962 range(len(matrix_elements))]) 7963 replace_dict["dsig_def_line"] = dsig_def_line 7964 7965 # Generate dsig process lines 7966 call_dsig_proc_lines = [] 7967 for iproc in range(len(matrix_elements)): 7968 call_dsig_proc_lines.append(\ 7969 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7970 {"num": iproc + 1, 7971 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7972 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7973 7974 if writer: 7975 file = open(os.path.join(_file_path, \ 7976 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7977 file = file % replace_dict 7978 # Write the file 7979 writer.writelines(file) 7980 else: 7981 return replace_dict
7982 7983 #=========================================================================== 7984 # write_mirrorprocs 7985 #===========================================================================
7986 - def write_mirrorprocs(self, writer, subproc_group):
7987 """Write the mirrorprocs.inc file determining which processes have 7988 IS mirror process in subprocess group mode.""" 7989 7990 lines = [] 7991 bool_dict = {True: '.true.', False: '.false.'} 7992 matrix_elements = subproc_group.get('matrix_elements') 7993 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7994 (len(matrix_elements), 7995 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7996 me in matrix_elements]))) 7997 # Write the file 7998 writer.writelines(lines)
7999 8000 #=========================================================================== 8001 # write_configs_file 8002 #===========================================================================
8003 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
8004 """Write the configs.inc file with topology information for a 8005 subprocess group. Use the first subprocess with a diagram for each 8006 configuration.""" 8007 8008 matrix_elements = subproc_group.get('matrix_elements') 8009 model = matrix_elements[0].get('processes')[0].get('model') 8010 8011 diagrams = [] 8012 config_numbers = [] 8013 for iconfig, config in enumerate(diagrams_for_config): 8014 # Check if any diagrams correspond to this config 8015 if set(config) == set([0]): 8016 continue 8017 subproc_diags = [] 8018 for s,d in enumerate(config): 8019 if d: 8020 subproc_diags.append(matrix_elements[s].\ 8021 get('diagrams')[d-1]) 8022 else: 8023 subproc_diags.append(None) 8024 diagrams.append(subproc_diags) 8025 config_numbers.append(iconfig + 1) 8026 8027 # Extract number of external particles 8028 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 8029 8030 return len(diagrams), \ 8031 self.write_configs_file_from_diagrams(writer, diagrams, 8032 config_numbers, 8033 nexternal, ninitial, 8034 matrix_elements[0],model)
8035 8036 #=========================================================================== 8037 # write_run_configs_file 8038 #===========================================================================
8039 - def write_run_config_file(self, writer):
8040 """Write the run_configs.inc file for MadEvent""" 8041 8042 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 8043 text = open(path).read() % {'chanperjob':'2'} 8044 writer.write(text) 8045 return True
8046 8047 8048 #=========================================================================== 8049 # write_leshouche_file 8050 #===========================================================================
8051 - def write_leshouche_file(self, writer, subproc_group):
8052 """Write the leshouche.inc file for MG4""" 8053 8054 all_lines = [] 8055 8056 for iproc, matrix_element in \ 8057 enumerate(subproc_group.get('matrix_elements')): 8058 all_lines.extend(self.get_leshouche_lines(matrix_element, 8059 iproc)) 8060 8061 # Write the file 8062 writer.writelines(all_lines) 8063 8064 return True
8065