Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import, division 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  from fractions import Fraction 
  20  """Methods and classes to export matrix elements to v4 format.""" 
  21   
  22  import copy 
  23  from six import StringIO 
  24  import itertools 
  25  import fractions 
  26  import glob 
  27  import logging 
  28  import math 
  29  import os 
  30  import io 
  31  import re 
  32  import shutil 
  33  import subprocess 
  34  import sys 
  35  import time 
  36  import traceback 
  37  import  collections 
  38   
  39  import aloha 
  40   
  41  import madgraph.core.base_objects as base_objects 
  42  import madgraph.core.color_algebra as color 
  43  import madgraph.core.helas_objects as helas_objects 
  44  import madgraph.iolibs.drawing_eps as draw 
  45  import madgraph.iolibs.files as files 
  46  import madgraph.iolibs.group_subprocs as group_subprocs 
  47  import madgraph.iolibs.file_writers as writers 
  48  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  49  import madgraph.iolibs.template_files as template_files 
  50  import madgraph.iolibs.ufo_expression_parsers as parsers 
  51  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  52  import madgraph.interface.common_run_interface as common_run_interface 
  53  import madgraph.various.diagram_symmetry as diagram_symmetry 
  54  import madgraph.various.misc as misc 
  55  import madgraph.various.banner as banner_mod 
  56  import madgraph.various.process_checks as process_checks 
  57  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  58  import aloha.create_aloha as create_aloha 
  59  import models.import_ufo as import_ufo 
  60  import models.write_param_card as param_writer 
  61  import models.check_param_card as check_param_card 
  62   
  63   
  64  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  65  from madgraph.iolibs.files import cp, ln, mv 
  66   
  67  from madgraph import InvalidCmd 
  68   
  69  pjoin = os.path.join 
  70   
  71  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  72  logger = logging.getLogger('madgraph.export_v4') 
  73   
  74  default_compiler= {'fortran': 'gfortran', 
  75                         'f2py': 'f2py', 
  76                         'cpp':'g++'} 
77 78 79 -class VirtualExporter(object):
80 81 #exporter variable who modified the way madgraph interacts with this class 82 83 grouped_mode = 'madevent' 84 # This variable changes the type of object called within 'generate_subprocess_directory' 85 #functions. 86 # False to avoid grouping (only identical matrix element are merged) 87 # 'madevent' group the massless quark and massless lepton 88 # 'madweight' group the gluon with the massless quark 89 sa_symmetry = False 90 # If no grouped_mode=False, uu~ and u~u will be called independently. 91 #Putting sa_symmetry generates only one of the two matrix-element. 92 check = True 93 # Ask madgraph to check if the directory already exists and propose to the user to 94 #remove it first if this is the case 95 output = 'Template' 96 # [Template, None, dir] 97 # - Template, madgraph will call copy_template 98 # - dir, madgraph will just create an empty directory for initialisation 99 # - None, madgraph do nothing for initialisation 100 exporter = 'v4' 101 # language of the output 'v4' for Fortran output 102 # 'cpp' for C++ output 103 104
105 - def __init__(self, dir_path = "", opt=None):
106 # cmd_options is a dictionary with all the optional argurment passed at output time 107 108 # Activate some monkey patching for the helas call writer. 109 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 110 self.helas_call_writer_custom
111 112 113 # helper function for customise helas writter 114 @staticmethod
115 - def custom_helas_call(call, arg):
116 """static method to customise the way aloha function call are written 117 call is the default template for the call 118 arg are the dictionary used for the call 119 """ 120 return call, arg
121 122 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 123 124
125 - def copy_template(self, model):
126 return
127
128 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
129 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 130 return 0 # return an integer stating the number of call to helicity routine
131
132 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
133 return
134
135 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
136 return
137 138
139 - def pass_information_from_cmd(self, cmd):
140 """pass information from the command interface to the exporter. 141 Please do not modify any object of the interface from the exporter. 142 """ 143 return
144
145 - def modify_grouping(self, matrix_element):
146 return False, matrix_element
147
148 - def export_model_files(self, model_v4_path):
149 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 150 return
151
152 - def export_helas(self, HELAS_PATH):
153 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 154 return
155
156 #=============================================================================== 157 # ProcessExporterFortran 158 #=============================================================================== 159 -class ProcessExporterFortran(VirtualExporter):
160 """Class to take care of exporting a set of matrix elements to 161 Fortran (v4) format.""" 162 163 default_opt = {'clean': False, 'complex_mass':False, 164 'export_format':'madevent', 'mp': False, 165 'v5_model': True, 166 'output_options':{} 167 } 168 grouped_mode = False 169 jamp_optim = False 170
171 - def __init__(self, dir_path = "", opt=None):
172 """Initiate the ProcessExporterFortran with directory information""" 173 self.mgme_dir = MG5DIR 174 self.dir_path = dir_path 175 self.model = None 176 177 self.opt = dict(self.default_opt) 178 if opt: 179 self.opt.update(opt) 180 self.cmd_options = self.opt['output_options'] 181 182 #place holder to pass information to the run_interface 183 self.proc_characteristic = banner_mod.ProcCharacteristic() 184 # call mother class 185 super(ProcessExporterFortran,self).__init__(dir_path, opt)
186 187 188 #=========================================================================== 189 # process exporter fortran switch between group and not grouped 190 #===========================================================================
191 - def export_processes(self, matrix_elements, fortran_model):
192 """Make the switch between grouped and not grouped output""" 193 194 calls = 0 195 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 196 for (group_number, me_group) in enumerate(matrix_elements): 197 calls = calls + self.generate_subprocess_directory(\ 198 me_group, fortran_model, group_number) 199 else: 200 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 201 calls = calls + self.generate_subprocess_directory(\ 202 me, fortran_model, me_number) 203 204 return calls
205 206 207 #=========================================================================== 208 # create the run_card 209 #===========================================================================
210 - def create_run_card(self, matrix_elements, history):
211 """ """ 212 213 214 # bypass this for the loop-check 215 import madgraph.loop.loop_helas_objects as loop_helas_objects 216 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 217 matrix_elements = None 218 219 run_card = banner_mod.RunCard() 220 221 222 default=True 223 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 224 processes = [me.get('processes') for megroup in matrix_elements 225 for me in megroup['matrix_elements']] 226 elif matrix_elements: 227 processes = [me.get('processes') 228 for me in matrix_elements['matrix_elements']] 229 else: 230 default =False 231 232 if default: 233 run_card.create_default_for_process(self.proc_characteristic, 234 history, 235 processes) 236 237 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 238 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 239 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
240 241 242 243 #=========================================================================== 244 # copy the Template in a new directory. 245 #===========================================================================
246 - def copy_template(self, model):
247 """create the directory run_name as a copy of the MadEvent 248 Template, and clean the directory 249 """ 250 251 #First copy the full template tree if dir_path doesn't exit 252 if not os.path.isdir(self.dir_path): 253 assert self.mgme_dir, \ 254 "No valid MG_ME path given for MG4 run directory creation." 255 logger.info('initialize a new directory: %s' % \ 256 os.path.basename(self.dir_path)) 257 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 258 self.dir_path, True) 259 # misc.copytree since dir_path already exists 260 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 261 self.dir_path) 262 # copy plot_card 263 for card in ['plot_card']: 264 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 265 try: 266 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 267 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 268 except IOError: 269 logger.warning("Failed to copy " + card + ".dat to default") 270 elif os.getcwd() == os.path.realpath(self.dir_path): 271 logger.info('working in local directory: %s' % \ 272 os.path.realpath(self.dir_path)) 273 # misc.copytree since dir_path already exists 274 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 275 self.dir_path) 276 # for name in misc.glob('Template/LO/*', self.mgme_dir): 277 # name = os.path.basename(name) 278 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 279 # if os.path.isfile(filename): 280 # files.cp(filename, pjoin(self.dir_path,name)) 281 # elif os.path.isdir(filename): 282 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 283 # misc.copytree since dir_path already exists 284 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 285 self.dir_path) 286 # Copy plot_card 287 for card in ['plot_card']: 288 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 289 try: 290 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 291 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 292 except IOError: 293 logger.warning("Failed to copy " + card + ".dat to default") 294 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 295 assert self.mgme_dir, \ 296 "No valid MG_ME path given for MG4 run directory creation." 297 try: 298 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 299 except IOError: 300 MG5_version = misc.get_pkg_info() 301 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 302 303 #Ensure that the Template is clean 304 if self.opt['clean']: 305 logger.info('remove old information in %s' % \ 306 os.path.basename(self.dir_path)) 307 if 'MADGRAPH_BASE' in os.environ: 308 misc.call([pjoin('bin', 'internal', 'clean_template'), 309 '--web'], cwd=self.dir_path) 310 else: 311 try: 312 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 313 cwd=self.dir_path) 314 except Exception as why: 315 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 316 % (os.path.basename(self.dir_path),why)) 317 318 #Write version info 319 MG_version = misc.get_pkg_info() 320 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 321 MG_version['version']) 322 323 # add the makefile in Source directory 324 filename = pjoin(self.dir_path,'Source','makefile') 325 self.write_source_makefile(writers.FileWriter(filename)) 326 327 # add the DiscreteSampler information 328 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 329 pjoin(self.dir_path, 'Source')) 330 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 331 pjoin(self.dir_path, 'Source')) 332 333 # We need to create the correct open_data for the pdf 334 self.write_pdf_opendata()
335 336 337 #=========================================================================== 338 # Call MadAnalysis5 to generate the default cards for this process 339 #===========================================================================
340 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 341 ma5_path, output_dir, levels = ['parton','hadron']):
342 """ Call MA5 so that it writes default cards for both parton and 343 post-shower levels, tailored for this particular process.""" 344 345 if len(levels)==0: 346 return 347 start = time.time() 348 logger.info('Generating MadAnalysis5 default cards tailored to this process') 349 try: 350 MA5_interpreter = common_run_interface.CommonRunCmd.\ 351 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 352 except (Exception, SystemExit) as e: 353 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 354 return 355 if MA5_interpreter is None: 356 return 357 358 MA5_main = MA5_interpreter.main 359 for lvl in ['parton','hadron']: 360 if lvl in levels: 361 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 362 try: 363 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 364 except (Exception, SystemExit) as e: 365 # keep the default card (skip only) 366 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 367 ' default analysis card for this process.') 368 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 369 error=StringIO() 370 traceback.print_exc(file=error) 371 logger.debug('MadAnalysis5 error was:') 372 logger.debug('-'*60) 373 logger.debug(error.getvalue()[:-1]) 374 logger.debug('-'*60) 375 else: 376 open(card_to_generate,'w').write(text) 377 stop = time.time() 378 if stop-start >1: 379 logger.info('Cards created in %.2fs' % (stop-start))
380 381 #=========================================================================== 382 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 383 #===========================================================================
384 - def write_procdef_mg5(self, file_pos, modelname, process_str):
385 """ write an equivalent of the MG4 proc_card in order that all the Madevent 386 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 387 388 proc_card_template = template_files.mg4_proc_card.mg4_template 389 process_template = template_files.mg4_proc_card.process_template 390 process_text = '' 391 coupling = '' 392 new_process_content = [] 393 394 395 # First find the coupling and suppress the coupling from process_str 396 #But first ensure that coupling are define whithout spaces: 397 process_str = process_str.replace(' =', '=') 398 process_str = process_str.replace('= ', '=') 399 process_str = process_str.replace(',',' , ') 400 #now loop on the element and treat all the coupling 401 for info in process_str.split(): 402 if '=' in info: 403 coupling += info + '\n' 404 else: 405 new_process_content.append(info) 406 # Recombine the process_str (which is the input process_str without coupling 407 #info) 408 process_str = ' '.join(new_process_content) 409 410 #format the SubProcess 411 replace_dict = {'process': process_str, 412 'coupling': coupling} 413 process_text += process_template.substitute(replace_dict) 414 415 replace_dict = {'process': process_text, 416 'model': modelname, 417 'multiparticle':''} 418 text = proc_card_template.substitute(replace_dict) 419 420 if file_pos: 421 ff = open(file_pos, 'w') 422 ff.write(text) 423 ff.close() 424 else: 425 return replace_dict
426 427
428 - def pass_information_from_cmd(self, cmd):
429 """Pass information for MA5""" 430 431 self.proc_defs = cmd._curr_proc_defs
432 433 #=========================================================================== 434 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 435 #===========================================================================
436 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
437 """Function to finalize v4 directory, for inheritance.""" 438 439 self.create_run_card(matrix_elements, history) 440 self.create_MA5_cards(matrix_elements, history)
441
442 - def create_MA5_cards(self,matrix_elements,history):
443 """ A wrapper around the creation of the MA5 cards so that it can be 444 bypassed by daughter classes (i.e. in standalone).""" 445 if 'madanalysis5_path' in self.opt and not \ 446 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 447 processes = None 448 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 449 processes = [me.get('processes') for megroup in matrix_elements 450 for me in megroup['matrix_elements']] 451 elif matrix_elements: 452 processes = [me.get('processes') 453 for me in matrix_elements['matrix_elements']] 454 455 self.create_default_madanalysis5_cards( 456 history, self.proc_defs, processes, 457 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 458 levels = ['hadron','parton']) 459 460 for level in ['hadron','parton']: 461 # Copying these cards turn on the use of MadAnalysis5 by default. 462 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 463 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 464 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
465 466 #=========================================================================== 467 # Create the proc_characteristic file passing information to the run_interface 468 #===========================================================================
469 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
470 471 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
472 473 #=========================================================================== 474 # write_matrix_element_v4 475 #===========================================================================
476 - def write_matrix_element_v4(self):
477 """Function to write a matrix.f file, for inheritance. 478 """ 479 pass
480 481 #=========================================================================== 482 # write_pdf_opendata 483 #===========================================================================
484 - def write_pdf_opendata(self):
485 """ modify the pdf opendata file, to allow direct access to cluster node 486 repository if configure""" 487 488 if not self.opt["cluster_local_path"]: 489 changer = {"pdf_systemwide": ""} 490 else: 491 to_add = """ 492 tempname='%(path)s'//Tablefile 493 open(IU,file=tempname,status='old',ERR=1) 494 return 495 1 tempname='%(path)s/Pdfdata/'//Tablefile 496 open(IU,file=tempname,status='old',ERR=2) 497 return 498 2 tempname='%(path)s/lhapdf'//Tablefile 499 open(IU,file=tempname,status='old',ERR=3) 500 return 501 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 502 open(IU,file=tempname,status='old',ERR=4) 503 return 504 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 505 open(IU,file=tempname,status='old',ERR=5) 506 return 507 """ % {"path" : self.opt["cluster_local_path"]} 508 509 changer = {"pdf_systemwide": to_add} 510 511 512 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 513 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 514 ff.writelines(template % changer) 515 516 # Do the same for lhapdf set 517 if not self.opt["cluster_local_path"]: 518 changer = {"cluster_specific_path": ""} 519 else: 520 to_add=""" 521 LHAPath='%(path)s/PDFsets' 522 Inquire(File=LHAPath, exist=exists) 523 if(exists)return 524 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 525 Inquire(File=LHAPath, exist=exists) 526 if(exists)return 527 LHAPath='%(path)s/../lhapdf/pdfsets/' 528 Inquire(File=LHAPath, exist=exists) 529 if(exists)return 530 LHAPath='./PDFsets' 531 """ % {"path" : self.opt["cluster_local_path"]} 532 changer = {"cluster_specific_path": to_add} 533 534 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 535 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 536 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 537 ff.writelines(template % changer) 538 539 540 return
541 542 543 544 #=========================================================================== 545 # write_maxparticles_file 546 #===========================================================================
547 - def write_maxparticles_file(self, writer, matrix_elements):
548 """Write the maxparticles.inc file for MadEvent""" 549 550 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 551 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 552 matrix_elements.get('matrix_elements')]) 553 else: 554 maxparticles = max([me.get_nexternal_ninitial()[0] \ 555 for me in matrix_elements]) 556 557 lines = "integer max_particles\n" 558 lines += "parameter(max_particles=%d)" % maxparticles 559 560 # Write the file 561 writer.writelines(lines) 562 563 return True
564 565 566 #=========================================================================== 567 # export the model 568 #===========================================================================
569 - def export_model_files(self, model_path):
570 """Configure the files/link of the process according to the model""" 571 572 # Import the model 573 for file in os.listdir(model_path): 574 if os.path.isfile(pjoin(model_path, file)): 575 shutil.copy2(pjoin(model_path, file), \ 576 pjoin(self.dir_path, 'Source', 'MODEL'))
577 578 592 600 601 602 #=========================================================================== 603 # export the helas routine 604 #===========================================================================
605 - def export_helas(self, helas_path):
606 """Configure the files/link of the process according to the model""" 607 608 # Import helas routine 609 for filename in os.listdir(helas_path): 610 filepos = pjoin(helas_path, filename) 611 if os.path.isfile(filepos): 612 if filepos.endswith('Makefile.template'): 613 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 614 elif filepos.endswith('Makefile'): 615 pass 616 else: 617 cp(filepos, self.dir_path + '/Source/DHELAS')
618 # following lines do the same but whithout symbolic link 619 # 620 #def export_helas(mgme_dir, dir_path): 621 # 622 # # Copy the HELAS directory 623 # helas_dir = pjoin(mgme_dir, 'HELAS') 624 # for filename in os.listdir(helas_dir): 625 # if os.path.isfile(pjoin(helas_dir, filename)): 626 # shutil.copy2(pjoin(helas_dir, filename), 627 # pjoin(dir_path, 'Source', 'DHELAS')) 628 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 629 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 630 # 631 632 #=========================================================================== 633 # generate_subprocess_directory 634 #===========================================================================
635 - def generate_subprocess_directory(self, matrix_element, 636 fortran_model, 637 me_number):
638 """Routine to generate a subprocess directory (for inheritance)""" 639 640 pass
641 642 #=========================================================================== 643 # get_source_libraries_list 644 #===========================================================================
645 - def get_source_libraries_list(self):
646 """ Returns the list of libraries to be compiling when compiling the 647 SOURCE directory. It is different for loop_induced processes and 648 also depends on the value of the 'output_dependencies' option""" 649 650 return ['$(LIBDIR)libdhelas.$(libext)', 651 '$(LIBDIR)libpdf.$(libext)', 652 '$(LIBDIR)libmodel.$(libext)', 653 '$(LIBDIR)libcernlib.$(libext)', 654 '$(LIBDIR)libbias.$(libext)']
655 656 #=========================================================================== 657 # write_source_makefile 658 #===========================================================================
659 - def write_source_makefile(self, writer):
660 """Write the nexternal.inc file for MG4""" 661 662 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 663 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 664 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 665 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 666 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 667 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 668 else: 669 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 670 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 671 672 replace_dict= {'libraries': set_of_lib, 673 'model':model_line, 674 'additional_dsample': '', 675 'additional_dependencies':''} 676 677 if writer: 678 text = open(path).read() % replace_dict 679 writer.write(text) 680 681 return replace_dict
682 683 #=========================================================================== 684 # write_nexternal_madspin 685 #===========================================================================
686 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
687 """Write the nexternal_prod.inc file for madspin""" 688 689 replace_dict = {} 690 691 replace_dict['nexternal'] = nexternal 692 replace_dict['ninitial'] = ninitial 693 694 file = """ \ 695 integer nexternal_prod 696 parameter (nexternal_prod=%(nexternal)d) 697 integer nincoming_prod 698 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 699 700 # Write the file 701 if writer: 702 writer.writelines(file) 703 return True 704 else: 705 return replace_dict
706 707 #=========================================================================== 708 # write_helamp_madspin 709 #===========================================================================
710 - def write_helamp_madspin(self, writer, ncomb):
711 """Write the helamp.inc file for madspin""" 712 713 replace_dict = {} 714 715 replace_dict['ncomb'] = ncomb 716 717 file = """ \ 718 integer ncomb1 719 parameter (ncomb1=%(ncomb)d) 720 double precision helamp(ncomb1) 721 common /to_helamp/helamp """ % replace_dict 722 723 # Write the file 724 if writer: 725 writer.writelines(file) 726 return True 727 else: 728 return replace_dict
729 730 731 732 #=========================================================================== 733 # write_nexternal_file 734 #===========================================================================
735 - def write_nexternal_file(self, writer, nexternal, ninitial):
736 """Write the nexternal.inc file for MG4""" 737 738 replace_dict = {} 739 740 replace_dict['nexternal'] = nexternal 741 replace_dict['ninitial'] = ninitial 742 743 file = """ \ 744 integer nexternal 745 parameter (nexternal=%(nexternal)d) 746 integer nincoming 747 parameter (nincoming=%(ninitial)d)""" % replace_dict 748 749 # Write the file 750 if writer: 751 writer.writelines(file) 752 return True 753 else: 754 return replace_dict
755 #=========================================================================== 756 # write_pmass_file 757 #===========================================================================
758 - def write_pmass_file(self, writer, matrix_element):
759 """Write the pmass.inc file for MG4""" 760 761 model = matrix_element.get('processes')[0].get('model') 762 763 lines = [] 764 for wf in matrix_element.get_external_wavefunctions(): 765 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 766 if mass.lower() != "zero": 767 mass = "abs(%s)" % mass 768 769 lines.append("pmass(%d)=%s" % \ 770 (wf.get('number_external'), mass)) 771 772 # Write the file 773 writer.writelines(lines) 774 775 return True
776 777 #=========================================================================== 778 # write_ngraphs_file 779 #===========================================================================
780 - def write_ngraphs_file(self, writer, nconfigs):
781 """Write the ngraphs.inc file for MG4. Needs input from 782 write_configs_file.""" 783 784 file = " integer n_max_cg\n" 785 file = file + "parameter (n_max_cg=%d)" % nconfigs 786 787 # Write the file 788 writer.writelines(file) 789 790 return True
791 792 #=========================================================================== 793 # write_leshouche_file 794 #===========================================================================
795 - def write_leshouche_file(self, writer, matrix_element):
796 """Write the leshouche.inc file for MG4""" 797 798 # Write the file 799 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 800 801 return True
802 803 #=========================================================================== 804 # get_leshouche_lines 805 #===========================================================================
806 - def get_leshouche_lines(self, matrix_element, numproc):
807 """Write the leshouche.inc file for MG4""" 808 809 # Extract number of external particles 810 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 811 812 lines = [] 813 for iproc, proc in enumerate(matrix_element.get('processes')): 814 legs = proc.get_legs_with_decays() 815 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 816 (iproc + 1, numproc+1, nexternal, 817 ",".join([str(l.get('id')) for l in legs]))) 818 if iproc == 0 and numproc == 0: 819 for i in [1, 2]: 820 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 821 (i, nexternal, 822 ",".join([ "%3r" % 0 ] * ninitial + \ 823 [ "%3r" % i ] * (nexternal - ninitial)))) 824 825 # Here goes the color connections corresponding to the JAMPs 826 # Only one output, for the first subproc! 827 if iproc == 0: 828 # If no color basis, just output trivial color flow 829 if not matrix_element.get('color_basis'): 830 for i in [1, 2]: 831 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 832 (i, numproc+1,nexternal, 833 ",".join([ "%3r" % 0 ] * nexternal))) 834 835 else: 836 # First build a color representation dictionnary 837 repr_dict = {} 838 for l in legs: 839 repr_dict[l.get('number')] = \ 840 proc.get('model').get_particle(l.get('id')).get_color()\ 841 * (-1)**(1+l.get('state')) 842 # Get the list of color flows 843 color_flow_list = \ 844 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 845 ninitial) 846 # And output them properly 847 for cf_i, color_flow_dict in enumerate(color_flow_list): 848 for i in [0, 1]: 849 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 850 (i + 1, cf_i + 1, numproc+1, nexternal, 851 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 852 for l in legs]))) 853 854 return lines
855 856 857 858 859 #=========================================================================== 860 # write_maxamps_file 861 #===========================================================================
862 - def write_maxamps_file(self, writer, maxamps, maxflows, 863 maxproc,maxsproc):
864 """Write the maxamps.inc file for MG4.""" 865 866 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 867 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 868 (maxamps, maxflows) 869 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 870 (maxproc, maxsproc) 871 872 # Write the file 873 writer.writelines(file) 874 875 return True
876 877 878 #=========================================================================== 879 # Routines to output UFO models in MG4 format 880 #=========================================================================== 881
882 - def convert_model(self, model, wanted_lorentz = [], 883 wanted_couplings = []):
884 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 885 886 # Make sure aloha is in quadruple precision if needed 887 old_aloha_mp=aloha.mp_precision 888 aloha.mp_precision=self.opt['mp'] 889 self.model = model 890 # create the MODEL 891 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 892 self.opt['exporter'] = self.__class__ 893 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 894 model_builder.build(wanted_couplings) 895 896 # Backup the loop mode, because it can be changed in what follows. 897 old_loop_mode = aloha.loop_mode 898 899 # Create the aloha model or use the existing one (for loop exporters 900 # this is useful as the aloha model will be used again in the 901 # LoopHelasMatrixElements generated). We do not save the model generated 902 # here if it didn't exist already because it would be a waste of 903 # memory for tree level applications since aloha is only needed at the 904 # time of creating the aloha fortran subroutines. 905 if hasattr(self, 'aloha_model'): 906 aloha_model = self.aloha_model 907 else: 908 try: 909 with misc.MuteLogger(['madgraph.models'], [60]): 910 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 911 except ImportError: 912 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 913 aloha_model.add_Lorentz_object(model.get('lorentz')) 914 915 # Compute the subroutines 916 if wanted_lorentz: 917 aloha_model.compute_subset(wanted_lorentz) 918 else: 919 aloha_model.compute_all(save=False) 920 921 # Write them out 922 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 923 aloha_model.write(write_dir, 'Fortran') 924 925 # Revert the original aloha loop mode 926 aloha.loop_mode = old_loop_mode 927 928 #copy Helas Template 929 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 930 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 931 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 932 write_dir+'/aloha_functions.f') 933 aloha_model.loop_mode = False 934 else: 935 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 936 write_dir+'/aloha_functions.f') 937 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 938 939 # Make final link in the Process 940 self.make_model_symbolic_link() 941 942 # Re-establish original aloha mode 943 aloha.mp_precision=old_aloha_mp
944 945 946 #=========================================================================== 947 # Helper functions 948 #===========================================================================
949 - def modify_grouping(self, matrix_element):
950 """allow to modify the grouping (if grouping is in place) 951 return two value: 952 - True/False if the matrix_element was modified 953 - the new(or old) matrix element""" 954 955 return False, matrix_element
956 957 #=========================================================================== 958 # Helper functions 959 #===========================================================================
960 - def get_mg5_info_lines(self):
961 """Return info lines for MG5, suitable to place at beginning of 962 Fortran files""" 963 964 info = misc.get_pkg_info() 965 info_lines = "" 966 if info and 'version' in info and 'date' in info: 967 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 968 (info['version'], info['date']) 969 info_lines = info_lines + \ 970 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 971 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 972 else: 973 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 974 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 975 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 976 977 return info_lines
978
979 - def get_process_info_lines(self, matrix_element):
980 """Return info lines describing the processes for this matrix element""" 981 982 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 983 for process in matrix_element.get('processes')])
984 985
986 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
987 """Return the Helicity matrix definition lines for this matrix element""" 988 989 helicity_line_list = [] 990 i = 0 991 for helicities in matrix_element.get_helicity_matrix(): 992 i = i + 1 993 int_list = [i, len(helicities)] 994 int_list.extend(helicities) 995 helicity_line_list.append(\ 996 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 997 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 998 999 return "\n".join(helicity_line_list)
1000
1001 - def get_ic_line(self, matrix_element):
1002 """Return the IC definition line coming after helicities, required by 1003 switchmom in madevent""" 1004 1005 nexternal = matrix_element.get_nexternal_ninitial()[0] 1006 int_list = list(range(1, nexternal + 1)) 1007 1008 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1009 ",".join([str(i) for \ 1010 i in int_list]))
1011
1012 - def set_chosen_SO_index(self, process, squared_orders):
1013 """ From the squared order constraints set by the user, this function 1014 finds what indices of the squared_orders list the user intends to pick. 1015 It returns this as a string of comma-separated successive '.true.' or 1016 '.false.' for each index.""" 1017 1018 user_squared_orders = process.get('squared_orders') 1019 split_orders = process.get('split_orders') 1020 1021 if len(user_squared_orders)==0: 1022 return ','.join(['.true.']*len(squared_orders)) 1023 1024 res = [] 1025 for sqsos in squared_orders: 1026 is_a_match = True 1027 for user_sqso, value in user_squared_orders.items(): 1028 if (process.get_squared_order_type(user_sqso) =='==' and \ 1029 value!=sqsos[split_orders.index(user_sqso)]) or \ 1030 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1031 value<sqsos[split_orders.index(user_sqso)]) or \ 1032 (process.get_squared_order_type(user_sqso) == '>' and \ 1033 value>=sqsos[split_orders.index(user_sqso)]): 1034 is_a_match = False 1035 break 1036 res.append('.true.' if is_a_match else '.false.') 1037 1038 return ','.join(res)
1039
1040 - def get_split_orders_lines(self, orders, array_name, n=5):
1041 """ Return the split orders definition as defined in the list orders and 1042 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1043 1044 ret_list = [] 1045 for index, order in enumerate(orders): 1046 for k in range(0, len(order), n): 1047 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1048 (array_name,index + 1, k + 1, min(k + n, len(order)), 1049 ','.join(["%5r" % i for i in order[k:k + n]]))) 1050 return ret_list
1051
1052 - def format_integer_list(self, list, name, n=5):
1053 """ Return an initialization of the python list in argument following 1054 the fortran syntax using the data keyword assignment, filling an array 1055 of name 'name'. It splits rows in chunks of size n.""" 1056 1057 ret_list = [] 1058 for k in range(0, len(list), n): 1059 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1060 (name, k + 1, min(k + n, len(list)), 1061 ','.join(["%5r" % i for i in list[k:k + n]]))) 1062 return ret_list
1063
1064 - def get_color_data_lines(self, matrix_element, n=6):
1065 """Return the color matrix definition lines for this matrix element. Split 1066 rows in chunks of size n.""" 1067 1068 if not matrix_element.get('color_matrix'): 1069 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1070 else: 1071 ret_list = [] 1072 my_cs = color.ColorString() 1073 for index, denominator in \ 1074 enumerate(matrix_element.get('color_matrix').\ 1075 get_line_denominators()): 1076 # First write the common denominator for this color matrix line 1077 #ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1078 # Then write the numerators for the matrix elements 1079 num_list = matrix_element.get('color_matrix').\ 1080 get_line_numerators(index, denominator) 1081 1082 assert all([int(i)==i for i in num_list]) 1083 1084 for k in range(0, len(num_list), n): 1085 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1086 (index + 1, k + 1, min(k + n, len(num_list)), 1087 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 1088 1089 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1090 ret_list.append("C %s" % repr(my_cs)) 1091 return ret_list
1092 1093
1094 - def get_den_factor_line(self, matrix_element):
1095 """Return the denominator factor line for this matrix element""" 1096 1097 return "DATA IDEN/%2r/" % \ 1098 matrix_element.get_denominator_factor()
1099
1100 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1101 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1102 which configs (diagrams).""" 1103 1104 ret_list = [] 1105 1106 booldict = {False: ".false.", True: ".true."} 1107 1108 if not matrix_element.get('color_basis'): 1109 # No color, so only one color factor. Simply write a ".true." 1110 # for each config (i.e., each diagram with only 3 particle 1111 # vertices 1112 configs = len(mapconfigs) 1113 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1114 (num_matrix_element, configs, 1115 ','.join([".true." for i in range(configs)]))) 1116 return ret_list 1117 1118 1119 # There is a color basis - create a list showing which JAMPs have 1120 # contributions to which configs 1121 1122 # Only want to include leading color flows, so find max_Nc 1123 color_basis = matrix_element.get('color_basis') 1124 1125 # We don't want to include the power of Nc's which come from the potential 1126 # loop color trace (i.e. in the case of a closed fermion loop for example) 1127 # so we subtract it here when computing max_Nc 1128 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1129 color_basis.values()],[])) 1130 1131 # Crate dictionary between diagram number and JAMP number 1132 diag_jamp = {} 1133 for ijamp, col_basis_elem in \ 1134 enumerate(sorted(matrix_element.get('color_basis').keys())): 1135 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1136 # Only use color flows with Nc == max_Nc. However, notice that 1137 # we don't want to include the Nc power coming from the loop 1138 # in this counting. 1139 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1140 diag_num = diag_tuple[0] + 1 1141 # Add this JAMP number to this diag_num 1142 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1143 [ijamp+1] 1144 else: 1145 self.proc_characteristic['single_color'] = False 1146 1147 colamps = ijamp + 1 1148 for iconfig, num_diag in enumerate(mapconfigs): 1149 if num_diag == 0: 1150 continue 1151 1152 # List of True or False 1153 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1154 # Add line 1155 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1156 (iconfig+1, num_matrix_element, colamps, 1157 ','.join(["%s" % booldict[b] for b in \ 1158 bool_list]))) 1159 1160 return ret_list
1161
1162 - def get_amp2_lines(self, matrix_element, config_map = [], replace_dict=None):
1163 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1164 1165 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1166 # Get minimum legs in a vertex 1167 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1168 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1169 minvert = min(vert_list) if vert_list!=[] else 0 1170 1171 ret_lines = [] 1172 if config_map: 1173 # In this case, we need to sum up all amplitudes that have 1174 # identical topologies, as given by the config_map (which 1175 # gives the topology/config for each of the diagrams 1176 diagrams = matrix_element.get('diagrams') 1177 # Combine the diagrams with identical topologies 1178 config_to_diag_dict = {} 1179 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1180 if config_map[idiag] == 0: 1181 continue 1182 try: 1183 config_to_diag_dict[config_map[idiag]].append(idiag) 1184 except KeyError: 1185 config_to_diag_dict[config_map[idiag]] = [idiag] 1186 # Write out the AMP2s summing squares of amplitudes belonging 1187 # to eiher the same diagram or different diagrams with 1188 # identical propagator properties. Note that we need to use 1189 # AMP2 number corresponding to the first diagram number used 1190 # for that AMP2. 1191 for config in sorted(config_to_diag_dict.keys()): 1192 1193 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1194 {"num": (config_to_diag_dict[config][0] + 1)} 1195 1196 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1197 sum([diagrams[idiag].get('amplitudes') for \ 1198 idiag in config_to_diag_dict[config]], [])]) 1199 1200 # Not using \sum |M|^2 anymore since this creates troubles 1201 # when ckm is not diagonal due to the JIM mechanism. 1202 if '+' in amp: 1203 amp = "(%s)*dconjg(%s)" % (amp, amp) 1204 else: 1205 amp = "%s*dconjg(%s)" % (amp, amp) 1206 1207 line = line + "%s" % (amp) 1208 #line += " * get_channel_cut(p, %s) " % (config) 1209 ret_lines.append(line) 1210 else: 1211 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1212 # Ignore any diagrams with 4-particle vertices. 1213 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1214 continue 1215 # Now write out the expression for AMP2, meaning the sum of 1216 # squared amplitudes belonging to the same diagram 1217 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1218 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1219 {"num": a.get('number')} for a in \ 1220 diag.get('amplitudes')]) 1221 ret_lines.append(line) 1222 1223 return ret_lines
1224 1225 #=========================================================================== 1226 # Returns the data statements initializing the coeffictients for the JAMP 1227 # decomposition. It is used when the JAMP initialization is decided to be 1228 # done through big arrays containing the projection coefficients. 1229 #===========================================================================
1230 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1231 n=50, Nc_value=3):
1232 """This functions return the lines defining the DATA statement setting 1233 the coefficients building the JAMPS out of the AMPS. Split rows in 1234 bunches of size n. 1235 One can specify the color_basis from which the color amplitudes originates 1236 so that there are commentaries telling what color structure each JAMP 1237 corresponds to.""" 1238 1239 if(not isinstance(color_amplitudes,list) or 1240 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1241 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1242 1243 res_list = [] 1244 my_cs = color.ColorString() 1245 for index, coeff_list in enumerate(color_amplitudes): 1246 # Create the list of the complete numerical coefficient. 1247 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1248 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1249 coefficient in coeff_list] 1250 # Create the list of the numbers of the contributing amplitudes. 1251 # Mutliply by -1 for those which have an imaginary coefficient. 1252 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1253 for coefficient in coeff_list] 1254 # Find the common denominator. 1255 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1256 num_list=[(coefficient*commondenom).numerator \ 1257 for coefficient in coefs_list] 1258 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1259 index+1,len(num_list))) 1260 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1261 index+1,commondenom)) 1262 if color_basis: 1263 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1264 res_list.append("C %s" % repr(my_cs)) 1265 for k in range(0, len(num_list), n): 1266 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1267 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1268 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1269 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1270 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1271 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1272 pass 1273 return res_list
1274 1275
1276 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1277 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1278 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1279 defined as a matrix element or directly as a color_amplitudes dictionary. 1280 The split_order_amps specifies the group of amplitudes sharing the same 1281 amplitude orders which should be put in together in a given set of JAMPS. 1282 The split_order_amps is supposed to have the format of the second output 1283 of the function get_split_orders_mapping function in helas_objects.py. 1284 The split_order_names is optional (it should correspond to the process 1285 'split_orders' attribute) and only present to provide comments in the 1286 JAMP definitions in the code.""" 1287 1288 # Let the user call get_JAMP_lines_split_order directly from a 1289 error_msg="Malformed '%s' argument passed to the "+\ 1290 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1291 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1292 color_amplitudes=col_amps.get_color_amplitudes() 1293 elif(isinstance(col_amps,list)): 1294 if(col_amps and isinstance(col_amps[0],list)): 1295 color_amplitudes=col_amps 1296 else: 1297 raise MadGraph5Error(error_msg%'col_amps') 1298 else: 1299 raise MadGraph5Error(error_msg%'col_amps') 1300 1301 # Verify the sanity of the split_order_amps and split_order_names args 1302 if isinstance(split_order_amps,list): 1303 for elem in split_order_amps: 1304 if len(elem)!=2: 1305 raise MadGraph5Error(error_msg%'split_order_amps') 1306 # Check the first element of the two lists to make sure they are 1307 # integers, although in principle they should all be integers. 1308 if not isinstance(elem[0],tuple) or \ 1309 not isinstance(elem[1],tuple) or \ 1310 not isinstance(elem[0][0],int) or \ 1311 not isinstance(elem[1][0],int): 1312 raise MadGraph5Error(error_msg%'split_order_amps') 1313 else: 1314 raise MadGraph5Error(error_msg%'split_order_amps') 1315 1316 if not split_order_names is None: 1317 if isinstance(split_order_names,list): 1318 # Should specify the same number of names as there are elements 1319 # in the key of the split_order_amps. 1320 if len(split_order_names)!=len(split_order_amps[0][0]): 1321 raise MadGraph5Error(error_msg%'split_order_names') 1322 # Check the first element of the list to be a string 1323 if not isinstance(split_order_names[0],str): 1324 raise MadGraph5Error(error_msg%'split_order_names') 1325 else: 1326 raise MadGraph5Error(error_msg%'split_order_names') 1327 1328 # Now scan all contributing orders to be individually computed and 1329 # construct the list of color_amplitudes for JAMP to be constructed 1330 # accordingly. 1331 res_list=[] 1332 max_tmp = 0 1333 for i, amp_order in enumerate(split_order_amps): 1334 col_amps_order = [] 1335 for jamp in color_amplitudes: 1336 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1337 if split_order_names: 1338 res_list.append('C JAMPs contributing to orders '+' '.join( 1339 ['%s=%i'%order for order in zip(split_order_names, 1340 amp_order[0])])) 1341 if self.opt['export_format'] in ['madloop_matchbox']: 1342 res_list.extend(self.get_JAMP_lines(col_amps_order, 1343 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1344 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))[0]) 1345 else: 1346 toadd, nb_tmp = self.get_JAMP_lines(col_amps_order, 1347 JAMP_format="JAMP(%s,{0})".format(str(i+1))) 1348 res_list.extend(toadd) 1349 max_tmp = max(max_tmp, nb_tmp) 1350 1351 return res_list, max_tmp
1352 1353
1354 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1355 split=-1):
1356 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1357 defined as a matrix element or directly as a color_amplitudes dictionary, 1358 Jamp_formatLC should be define to allow to add LeadingColor computation 1359 (usefull for MatchBox) 1360 The split argument defines how the JAMP lines should be split in order 1361 not to be too long.""" 1362 1363 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1364 # the color amplitudes lists. 1365 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1366 color_amplitudes=col_amps.get_color_amplitudes() 1367 elif(isinstance(col_amps,list)): 1368 if(col_amps and isinstance(col_amps[0],list)): 1369 color_amplitudes=col_amps 1370 else: 1371 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1372 else: 1373 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1374 1375 all_element = {} 1376 res_list = [] 1377 for i, coeff_list in enumerate(color_amplitudes): 1378 # It might happen that coeff_list is empty if this function was 1379 # called from get_JAMP_lines_split_order (i.e. if some color flow 1380 # does not contribute at all for a given order). 1381 # In this case we simply set it to 0. 1382 if coeff_list==[]: 1383 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1384 continue 1385 # Break the JAMP definition into 'n=split' pieces to avoid having 1386 # arbitrarly long lines. 1387 first=True 1388 n = (len(coeff_list)+1 if split<=0 else split) 1389 while coeff_list!=[]: 1390 coefs=coeff_list[:n] 1391 coeff_list=coeff_list[n:] 1392 res = ((JAMP_format+"=") % str(i + 1)) + \ 1393 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1394 1395 first=False 1396 # Optimization: if all contributions to that color basis element have 1397 # the same coefficient (up to a sign), put it in front 1398 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1399 common_factor = False 1400 diff_fracs = list(set(list_fracs)) 1401 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1402 common_factor = True 1403 global_factor = diff_fracs[0] 1404 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1405 1406 # loop for JAMP 1407 for (coefficient, amp_number) in coefs: 1408 if not coefficient: 1409 continue 1410 value = (1j if coefficient[2] else 1)* coefficient[0] * coefficient[1] * fractions.Fraction(3)**coefficient[3] 1411 if (i+1, amp_number) not in all_element: 1412 all_element[(i+1, amp_number)] = value 1413 else: 1414 all_element[(i+1, amp_number)] += value 1415 if common_factor: 1416 res = (res + "%s" + AMP_format) % \ 1417 (self.coeff(coefficient[0], 1418 coefficient[1] / abs(coefficient[1]), 1419 coefficient[2], 1420 coefficient[3]), 1421 str(amp_number)) 1422 else: 1423 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1424 coefficient[1], 1425 coefficient[2], 1426 coefficient[3]), 1427 str(amp_number)) 1428 1429 if common_factor: 1430 res = res + ')' 1431 1432 res_list.append(res) 1433 1434 if 'jamp_optim' in self.cmd_options: 1435 jamp_optim = banner_mod.ConfigFile.format_variable(self.cmd_options['jamp_optim'], bool, 'jamp_optim') 1436 else: 1437 # class default 1438 jamp_optim = self.jamp_optim 1439 1440 if not jamp_optim: 1441 return res_list, 0 1442 else: 1443 saved = list(res_list) 1444 1445 if len(all_element) > 1000: 1446 logger.info("Computing Color-Flow optimization [%s term]", len(all_element)) 1447 start_time = time.time() 1448 else: 1449 start_time = 0 1450 1451 res_list = [] 1452 #misc.sprint(len(all_element)) 1453 1454 self.myjamp_count = 0 1455 new_mat, defs = self.optimise_jamp(all_element) 1456 if start_time: 1457 logger.info("Color-Flow passed to %s term in %ss. Introduce %i contraction", len(new_mat), int(time.time()-start_time), len(defs)) 1458 1459 1460 #misc.sprint("number of iteration", self.myjamp_count) 1461 def format(frac): 1462 if isinstance(frac, Fraction): 1463 if frac.denominator == 1: 1464 return str(frac.numerator) 1465 else: 1466 return "%id0/%id0" % (frac.numerator, frac.denominator) 1467 elif frac.real == frac: 1468 #misc.sprint(frac.real, frac) 1469 return str(float(frac.real)).replace('e','d') 1470 else: 1471 return str(frac).replace('e','d').replace('j','*imag1')
1472 1473 1474 1475 for i, amp1, amp2, frac, nb in defs: 1476 if amp1 > 0: 1477 amp1 = AMP_format % amp1 1478 else: 1479 amp1 = "TMP_JAMP(%d)" % -amp1 1480 if amp2 > 0: 1481 amp2 = AMP_format % amp2 1482 else: 1483 amp2 = "TMP_JAMP(%d)" % -amp2 1484 1485 res_list.append(' TMP_JAMP(%d) = %s + (%s) * %s ! used %d times' % (i,amp1, format(frac), amp2, nb)) 1486 1487 1488 # misc.sprint(new_mat) 1489 jamp_res = collections.defaultdict(list) 1490 max_jamp=0 1491 for (jamp, var), factor in new_mat.items(): 1492 if var > 0: 1493 name = AMP_format % var 1494 else: 1495 name = "TMP_JAMP(%d)" % -var 1496 jamp_res[jamp].append("(%s)*%s" % (format(factor), name)) 1497 max_jamp = max(max_jamp, jamp) 1498 1499 1500 for i in range(1,max_jamp+1): 1501 name = JAMP_format % i 1502 if not jamp_res[i]: 1503 res_list.append(" %s = 0d0" %(name)) 1504 else: 1505 res_list.append(" %s = %s" %(name, '+'.join(jamp_res[i]))) 1506 1507 return res_list, len(defs)
1508
1509 - def optimise_jamp(self, all_element, nb_line=0, nb_col=0, added=0):
1510 """ optimise problem of type Y = A X 1511 A is a matrix (all_element) 1512 X is the fortran name of the input. 1513 The code iteratively add sub-expression jtemp[sub_add] 1514 and recall itself (this is add to the X size) 1515 """ 1516 self.myjamp_count +=1 1517 1518 if not nb_line: 1519 for i,j in all_element: 1520 if i+1 > nb_line: 1521 nb_line = i+1 1522 if j+1> nb_col: 1523 nb_col = j+1 1524 1525 max_count = 0 1526 all_index = [] 1527 operation = collections.defaultdict(lambda: collections.defaultdict(int)) 1528 for i in range(nb_line): 1529 for j1 in range(-added, nb_col): 1530 v1 = all_element.get((i,j1), 0) 1531 if not v1: 1532 continue 1533 for j2 in range(j1+1, nb_col): 1534 R = all_element.get((i,j2), 0)/v1 1535 if not R: 1536 continue 1537 1538 operation[(j1,j2)][R] +=1 1539 if operation[(j1,j2)][R] > max_count: 1540 max_count = operation[(j1,j2)][R] 1541 all_index = [(j1,j2, R)] 1542 elif operation[(j1,j2)][R] == max_count: 1543 all_index.append((j1,j2, R)) 1544 if max_count <= 1: 1545 return all_element, [] 1546 #added += 1 1547 #misc.sprint(max_count, len(all_index)) 1548 #misc.sprint(operation) 1549 to_add = [] 1550 for index in all_index: 1551 j1,j2,R = index 1552 first = True 1553 for i in range(nb_line): 1554 v1 = all_element.get((i,j1), 0) 1555 v2 = all_element.get((i,j2), 0) 1556 if not v1 or not v2: 1557 continue 1558 if v2/v1 == R: 1559 if first: 1560 first = False 1561 added +=1 1562 to_add.append((added,j1,j2,R, max_count)) 1563 1564 all_element[(i,-added)] = v1 1565 del all_element[(i,j1)] #= 0 1566 del all_element[(i,j2)] #= 0 1567 1568 logger.log(5,"Define %d new shortcut reused %d times", len(to_add), max_count) 1569 new_element, new_def = self.optimise_jamp(all_element, nb_line=nb_line, nb_col=nb_col, added=added) 1570 for one_def in to_add: 1571 new_def.insert(0, one_def) 1572 return new_element, new_def
1573 1574 1575 1576 1577
1578 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1579 """Generate the PDF lines for the auto_dsig.f file""" 1580 1581 processes = matrix_element.get('processes') 1582 model = processes[0].get('model') 1583 1584 pdf_definition_lines = "" 1585 pdf_data_lines = "" 1586 pdf_lines = "" 1587 1588 if ninitial == 1: 1589 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1590 for i, proc in enumerate(processes): 1591 process_line = proc.base_string() 1592 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1593 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1594 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1595 else: 1596 # Pick out all initial state particles for the two beams 1597 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1598 p in processes]))), 1599 sorted(list(set([p.get_initial_pdg(2) for \ 1600 p in processes])))] 1601 1602 # Prepare all variable names 1603 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1604 sum(initial_states,[])]) 1605 for key,val in pdf_codes.items(): 1606 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1607 1608 # Set conversion from PDG code to number used in PDF calls 1609 pdgtopdf = {21: 0, 22: 7} 1610 1611 # Fill in missing entries of pdgtopdf 1612 for pdg in sum(initial_states,[]): 1613 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1614 pdgtopdf[pdg] = pdg 1615 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1616 # If any particle has pdg code 7, we need to use something else 1617 pdgtopdf[pdg] = 6000000 + pdg 1618 1619 # Get PDF variable declarations for all initial states 1620 for i in [0,1]: 1621 pdf_definition_lines += "DOUBLE PRECISION " + \ 1622 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1623 for pdg in \ 1624 initial_states[i]]) + \ 1625 "\n" 1626 1627 # Get PDF data lines for all initial states 1628 for i in [0,1]: 1629 pdf_data_lines += "DATA " + \ 1630 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1631 for pdg in initial_states[i]]) + \ 1632 "/%d*1D0/" % len(initial_states[i]) + \ 1633 "\n" 1634 1635 # Get PDF lines for all different initial states 1636 for i, init_states in enumerate(initial_states): 1637 if subproc_group: 1638 pdf_lines = pdf_lines + \ 1639 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1640 % (i + 1, i + 1) 1641 else: 1642 pdf_lines = pdf_lines + \ 1643 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1644 % (i + 1, i + 1) 1645 1646 for nbi,initial_state in enumerate(init_states): 1647 if initial_state in list(pdf_codes.keys()): 1648 if subproc_group: 1649 pdf_lines = pdf_lines + \ 1650 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1651 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1652 (pdf_codes[initial_state], 1653 i + 1, i + 1, pdgtopdf[initial_state], 1654 i + 1, i + 1) 1655 else: 1656 pdf_lines = pdf_lines + \ 1657 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1658 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1659 (pdf_codes[initial_state], 1660 i + 1, i + 1, pdgtopdf[initial_state], 1661 i + 1, 1662 i + 1, i + 1) 1663 pdf_lines = pdf_lines + "ENDIF\n" 1664 1665 # Add up PDFs for the different initial state particles 1666 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1667 for proc in processes: 1668 process_line = proc.base_string() 1669 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1670 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1671 for ibeam in [1, 2]: 1672 initial_state = proc.get_initial_pdg(ibeam) 1673 if initial_state in list(pdf_codes.keys()): 1674 pdf_lines = pdf_lines + "%s%d*" % \ 1675 (pdf_codes[initial_state], ibeam) 1676 else: 1677 pdf_lines = pdf_lines + "1d0*" 1678 # Remove last "*" from pdf_lines 1679 pdf_lines = pdf_lines[:-1] + "\n" 1680 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1681 1682 # Remove last line break from the return variables 1683 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1684 1685 #=========================================================================== 1686 # write_props_file 1687 #===========================================================================
1688 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1689 """Write the props.inc file for MadEvent. Needs input from 1690 write_configs_file.""" 1691 1692 lines = [] 1693 1694 particle_dict = matrix_element.get('processes')[0].get('model').\ 1695 get('particle_dict') 1696 1697 for iconf, configs in enumerate(s_and_t_channels): 1698 for vertex in configs[0] + configs[1][:-1]: 1699 leg = vertex.get('legs')[-1] 1700 if leg.get('id') not in particle_dict: 1701 # Fake propagator used in multiparticle vertices 1702 mass = 'zero' 1703 width = 'zero' 1704 pow_part = 0 1705 else: 1706 particle = particle_dict[leg.get('id')] 1707 # Get mass 1708 if particle.get('mass').lower() == 'zero': 1709 mass = particle.get('mass') 1710 else: 1711 mass = "abs(%s)" % particle.get('mass') 1712 # Get width 1713 if particle.get('width').lower() == 'zero': 1714 width = particle.get('width') 1715 else: 1716 width = "abs(%s)" % particle.get('width') 1717 1718 pow_part = 1 + int(particle.is_boson()) 1719 1720 lines.append("prmass(%d,%d) = %s" % \ 1721 (leg.get('number'), iconf + 1, mass)) 1722 lines.append("prwidth(%d,%d) = %s" % \ 1723 (leg.get('number'), iconf + 1, width)) 1724 lines.append("pow(%d,%d) = %d" % \ 1725 (leg.get('number'), iconf + 1, pow_part)) 1726 1727 # Write the file 1728 writer.writelines(lines) 1729 1730 return True
1731 1732 #=========================================================================== 1733 # write_configs_file 1734 #===========================================================================
1735 - def write_configs_file(self, writer, matrix_element):
1736 """Write the configs.inc file for MadEvent""" 1737 1738 # Extract number of external particles 1739 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1740 1741 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1742 mapconfigs = [c[0] for c in configs] 1743 model = matrix_element.get('processes')[0].get('model') 1744 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1745 [[c[1]] for c in configs], 1746 mapconfigs, 1747 nexternal, ninitial, 1748 model)
1749 1750 #=========================================================================== 1751 # write_configs_file_from_diagrams 1752 #===========================================================================
1753 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1754 nexternal, ninitial, model):
1755 """Write the actual configs.inc file. 1756 1757 configs is the diagrams corresponding to configs (each 1758 diagrams is a list of corresponding diagrams for all 1759 subprocesses, with None if there is no corresponding diagrams 1760 for a given process). 1761 mapconfigs gives the diagram number for each config. 1762 1763 For s-channels, we need to output one PDG for each subprocess in 1764 the subprocess group, in order to be able to pick the right 1765 one for multiprocesses.""" 1766 1767 lines = [] 1768 1769 s_and_t_channels = [] 1770 1771 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1772 for config in configs if [d for d in config if d][0].\ 1773 get_vertex_leg_numbers()!=[]] 1774 minvert = min(vert_list) if vert_list!=[] else 0 1775 1776 # Number of subprocesses 1777 nsubprocs = len(configs[0]) 1778 1779 nconfigs = 0 1780 1781 new_pdg = model.get_first_non_pdg() 1782 1783 for iconfig, helas_diags in enumerate(configs): 1784 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1785 [0].get_vertex_leg_numbers()) : 1786 # Only 3-vertices allowed in configs.inc except for vertices 1787 # which originate from a shrunk loop. 1788 continue 1789 nconfigs += 1 1790 1791 # Need s- and t-channels for all subprocesses, including 1792 # those that don't contribute to this config 1793 empty_verts = [] 1794 stchannels = [] 1795 for h in helas_diags: 1796 if h: 1797 # get_s_and_t_channels gives vertices starting from 1798 # final state external particles and working inwards 1799 stchannels.append(h.get('amplitudes')[0].\ 1800 get_s_and_t_channels(ninitial, model, new_pdg)) 1801 else: 1802 stchannels.append((empty_verts, None)) 1803 1804 # For t-channels, just need the first non-empty one 1805 tchannels = [t for s,t in stchannels if t != None][0] 1806 1807 # For s_and_t_channels (to be used later) use only first config 1808 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1809 tchannels]) 1810 1811 # Make sure empty_verts is same length as real vertices 1812 if any([s for s,t in stchannels]): 1813 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1814 1815 # Reorganize s-channel vertices to get a list of all 1816 # subprocesses for each vertex 1817 schannels = list(zip(*[s for s,t in stchannels])) 1818 else: 1819 schannels = [] 1820 1821 allchannels = schannels 1822 if len(tchannels) > 1: 1823 # Write out tchannels only if there are any non-trivial ones 1824 allchannels = schannels + tchannels 1825 1826 # Write out propagators for s-channel and t-channel vertices 1827 1828 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1829 # Correspondance between the config and the diagram = amp2 1830 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1831 mapconfigs[iconfig])) 1832 1833 for verts in allchannels: 1834 if verts in schannels: 1835 vert = [v for v in verts if v][0] 1836 else: 1837 vert = verts 1838 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1839 last_leg = vert.get('legs')[-1] 1840 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1841 (last_leg.get('number'), nconfigs, len(daughters), 1842 ",".join([str(d) for d in daughters]))) 1843 if verts in schannels: 1844 pdgs = [] 1845 for v in verts: 1846 if v: 1847 pdgs.append(v.get('legs')[-1].get('id')) 1848 else: 1849 pdgs.append(0) 1850 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1851 (last_leg.get('number'), nconfigs, nsubprocs, 1852 ",".join([str(d) for d in pdgs]))) 1853 lines.append("data tprid(%d,%d)/0/" % \ 1854 (last_leg.get('number'), nconfigs)) 1855 elif verts in tchannels[:-1]: 1856 lines.append("data tprid(%d,%d)/%d/" % \ 1857 (last_leg.get('number'), nconfigs, 1858 abs(last_leg.get('id')))) 1859 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1860 (last_leg.get('number'), nconfigs, nsubprocs, 1861 ",".join(['0'] * nsubprocs))) 1862 1863 # Write out number of configs 1864 lines.append("# Number of configs") 1865 lines.append("data mapconfig(0)/%d/" % nconfigs) 1866 1867 # Write the file 1868 writer.writelines(lines) 1869 1870 return s_and_t_channels
1871 1872 #=========================================================================== 1873 # Global helper methods 1874 #=========================================================================== 1875
1876 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1877 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1878 1879 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1880 1881 if total_coeff == 1: 1882 if is_imaginary: 1883 return '+imag1*' 1884 else: 1885 return '+' 1886 elif total_coeff == -1: 1887 if is_imaginary: 1888 return '-imag1*' 1889 else: 1890 return '-' 1891 1892 res_str = '%+iD0' % total_coeff.numerator 1893 1894 if total_coeff.denominator != 1: 1895 # Check if total_coeff is an integer 1896 res_str = res_str + '/%iD0' % total_coeff.denominator 1897 1898 if is_imaginary: 1899 res_str = res_str + '*imag1' 1900 1901 return res_str + '*'
1902 1903
1904 - def set_fortran_compiler(self, default_compiler, force=False):
1905 """Set compiler based on what's available on the system""" 1906 1907 # Check for compiler 1908 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1909 f77_compiler = default_compiler['fortran'] 1910 elif misc.which('gfortran'): 1911 f77_compiler = 'gfortran' 1912 elif misc.which('g77'): 1913 f77_compiler = 'g77' 1914 elif misc.which('f77'): 1915 f77_compiler = 'f77' 1916 elif default_compiler['fortran']: 1917 logger.warning('No Fortran Compiler detected! Please install one') 1918 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1919 else: 1920 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1921 logger.info('Use Fortran compiler ' + f77_compiler) 1922 1923 1924 # Check for compiler. 1. set default. 1925 if default_compiler['f2py']: 1926 f2py_compiler = default_compiler['f2py'] 1927 else: 1928 f2py_compiler = '' 1929 # Try to find the correct one. 1930 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1931 f2py_compiler = default_compiler['f2py'] 1932 elif misc.which('f2py'): 1933 f2py_compiler = 'f2py' 1934 elif sys.version_info[1] == 6: 1935 if misc.which('f2py-2.6'): 1936 f2py_compiler = 'f2py-2.6' 1937 elif misc.which('f2py2.6'): 1938 f2py_compiler = 'f2py2.6' 1939 elif sys.version_info[1] == 7: 1940 if misc.which('f2py-2.7'): 1941 f2py_compiler = 'f2py-2.7' 1942 elif misc.which('f2py2.7'): 1943 f2py_compiler = 'f2py2.7' 1944 1945 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1946 1947 1948 self.replace_make_opt_f_compiler(to_replace) 1949 # Replace also for Template but not for cluster 1950 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1951 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1952 1953 return f77_compiler
1954 1955 # an alias for backward compatibility 1956 set_compiler = set_fortran_compiler 1957 1958
1959 - def set_cpp_compiler(self, default_compiler, force=False):
1960 """Set compiler based on what's available on the system""" 1961 1962 # Check for compiler 1963 if default_compiler and misc.which(default_compiler): 1964 compiler = default_compiler 1965 elif misc.which('g++'): 1966 #check if clang version 1967 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1968 stderr=subprocess.PIPE) 1969 out, _ = p.communicate() 1970 out = out.decode() 1971 if 'clang' in str(out) and misc.which('clang'): 1972 compiler = 'clang' 1973 else: 1974 compiler = 'g++' 1975 elif misc.which('c++'): 1976 compiler = 'c++' 1977 elif misc.which('clang'): 1978 compiler = 'clang' 1979 elif default_compiler: 1980 logger.warning('No c++ Compiler detected! Please install one') 1981 compiler = default_compiler # maybe misc fail so try with it 1982 else: 1983 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1984 logger.info('Use c++ compiler ' + compiler) 1985 self.replace_make_opt_c_compiler(compiler) 1986 # Replace also for Template but not for cluster 1987 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 1988 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1989 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1990 1991 return compiler
1992 1993
1994 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1995 """Set FC=compiler in Source/make_opts""" 1996 1997 assert isinstance(compilers, dict) 1998 1999 mod = False #avoid to rewrite the file if not needed 2000 if not root_dir: 2001 root_dir = self.dir_path 2002 2003 compiler= compilers['fortran'] 2004 f2py_compiler = compilers['f2py'] 2005 if not f2py_compiler: 2006 f2py_compiler = 'f2py' 2007 for_update= {'DEFAULT_F_COMPILER':compiler, 2008 'DEFAULT_F2PY_COMPILER':f2py_compiler} 2009 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2010 2011 try: 2012 common_run_interface.CommonRunCmd.update_make_opts_full( 2013 make_opts, for_update) 2014 except IOError: 2015 if root_dir == self.dir_path: 2016 logger.info('Fail to set compiler. Trying to continue anyway.')
2017
2018 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
2019 """Set CXX=compiler in Source/make_opts. 2020 The version is also checked, in order to set some extra flags 2021 if the compiler is clang (on MACOS)""" 2022 2023 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 2024 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 2025 2026 2027 # list of the variable to set in the make_opts file 2028 for_update= {'DEFAULT_CPP_COMPILER':compiler, 2029 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 2030 'STDLIB': '-lc++' if is_lc else '-lstdc++', 2031 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 2032 } 2033 2034 # for MOJAVE remove the MACFLAG: 2035 if is_clang: 2036 import platform 2037 version, _, _ = platform.mac_ver() 2038 if not version:# not linux 2039 version = 14 # set version to remove MACFLAG 2040 else: 2041 majversion, version = [int(x) for x in version.split('.',3)[:2]] 2042 2043 if majversion >= 11 or (majversion ==10 and version >= 14): 2044 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 2045 2046 if not root_dir: 2047 root_dir = self.dir_path 2048 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2049 2050 try: 2051 common_run_interface.CommonRunCmd.update_make_opts_full( 2052 make_opts, for_update) 2053 except IOError: 2054 if root_dir == self.dir_path: 2055 logger.info('Fail to set compiler. Trying to continue anyway.') 2056 2057 return
2058
2059 #=============================================================================== 2060 # ProcessExporterFortranSA 2061 #=============================================================================== 2062 -class ProcessExporterFortranSA(ProcessExporterFortran):
2063 """Class to take care of exporting a set of matrix elements to 2064 MadGraph v4 StandAlone format.""" 2065 2066 matrix_template = "matrix_standalone_v4.inc" 2067
2068 - def __init__(self, *args,**opts):
2069 """add the format information compare to standard init""" 2070 2071 if 'format' in opts: 2072 self.format = opts['format'] 2073 del opts['format'] 2074 else: 2075 self.format = 'standalone' 2076 2077 self.prefix_info = {} 2078 ProcessExporterFortran.__init__(self, *args, **opts)
2079
2080 - def copy_template(self, model):
2081 """Additional actions needed for setup of Template 2082 """ 2083 2084 #First copy the full template tree if dir_path doesn't exit 2085 if os.path.isdir(self.dir_path): 2086 return 2087 2088 logger.info('initialize a new standalone directory: %s' % \ 2089 os.path.basename(self.dir_path)) 2090 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 2091 2092 # Create the directory structure 2093 os.mkdir(self.dir_path) 2094 os.mkdir(pjoin(self.dir_path, 'Source')) 2095 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 2096 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 2097 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 2098 os.mkdir(pjoin(self.dir_path, 'bin')) 2099 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 2100 os.mkdir(pjoin(self.dir_path, 'lib')) 2101 os.mkdir(pjoin(self.dir_path, 'Cards')) 2102 2103 # Information at top-level 2104 #Write version info 2105 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 2106 try: 2107 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 2108 except IOError: 2109 MG5_version = misc.get_pkg_info() 2110 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 2111 "5." + MG5_version['version']) 2112 2113 2114 # Add file in SubProcesses 2115 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 2116 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 2117 2118 if self.format == 'standalone': 2119 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 2120 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 2121 2122 # Add file in Source 2123 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 2124 pjoin(self.dir_path, 'Source')) 2125 # add the makefile 2126 filename = pjoin(self.dir_path,'Source','makefile') 2127 self.write_source_makefile(writers.FileWriter(filename))
2128 2129 #=========================================================================== 2130 # export model files 2131 #===========================================================================
2132 - def export_model_files(self, model_path):
2133 """export the model dependent files for V4 model""" 2134 2135 super(ProcessExporterFortranSA,self).export_model_files(model_path) 2136 # Add the routine update_as_param in v4 model 2137 # This is a function created in the UFO 2138 text=""" 2139 subroutine update_as_param() 2140 call setpara('param_card.dat',.false.) 2141 return 2142 end 2143 """ 2144 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2145 ff.write(text) 2146 ff.close() 2147 2148 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 2149 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 2150 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 2151 fsock.write(text) 2152 fsock.close() 2153 2154 self.make_model_symbolic_link()
2155 2156 #=========================================================================== 2157 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 2158 #===========================================================================
2159 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2160 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2161 Perl script of MadEvent4 are still working properly for pure MG5 run. 2162 Not needed for StandAlone so just return 2163 """ 2164 2165 return
2166 2167 2168 #=========================================================================== 2169 # Make the Helas and Model directories for Standalone directory 2170 #===========================================================================
2171 - def make(self):
2172 """Run make in the DHELAS and MODEL directories, to set up 2173 everything for running standalone 2174 """ 2175 2176 source_dir = pjoin(self.dir_path, "Source") 2177 logger.info("Running make for Helas") 2178 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2179 logger.info("Running make for Model") 2180 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2181 2182 #=========================================================================== 2183 # Create proc_card_mg5.dat for Standalone directory 2184 #===========================================================================
2185 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2186 """Finalize Standalone MG4 directory by 2187 generation proc_card_mg5.dat 2188 generate a global makefile 2189 """ 2190 2191 compiler = {'fortran': mg5options['fortran_compiler'], 2192 'cpp': mg5options['cpp_compiler'], 2193 'f2py': mg5options['f2py_compiler']} 2194 2195 self.compiler_choice(compiler) 2196 self.make() 2197 2198 # Write command history as proc_card_mg5 2199 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2200 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2201 history.write(output_file) 2202 2203 ProcessExporterFortran.finalize(self, matrix_elements, 2204 history, mg5options, flaglist) 2205 open(pjoin(self.dir_path,'__init__.py'),'w') 2206 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2207 2208 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2209 #add the module to hande the NLO weight 2210 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2211 pjoin(self.dir_path, 'Source')) 2212 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2213 pjoin(self.dir_path, 'Source', 'PDF')) 2214 self.write_pdf_opendata() 2215 2216 if self.prefix_info: 2217 self.write_f2py_splitter() 2218 self.write_f2py_makefile() 2219 self.write_f2py_check_sa(matrix_elements, 2220 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2221 else: 2222 # create a single makefile to compile all the subprocesses 2223 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2224 deppython = '' 2225 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2226 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2227 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2228 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2229 text+='all: %s\n\techo \'done\'' % deppython 2230 2231 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2232 ff.write(text) 2233 ff.close()
2234
2235 - def write_f2py_splitter(self):
2236 """write a function to call the correct matrix element""" 2237 2238 template = """ 2239 %(python_information)s 2240 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2241 IMPLICIT NONE 2242 C ALPHAS is given at scale2 (SHOULD be different of 0 for loop induced, ignore for LO) 2243 2244 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2245 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2246 CF2PY integer, intent(in):: procid 2247 CF2PY integer, intent(in) :: npdg 2248 CF2PY double precision, intent(out) :: ANS 2249 CF2PY double precision, intent(in) :: ALPHAS 2250 CF2PY double precision, intent(in) :: SCALE2 2251 integer pdgs(*) 2252 integer npdg, nhel, procid 2253 double precision p(*) 2254 double precision ANS, ALPHAS, PI,SCALE2 2255 include 'coupl.inc' 2256 2257 PI = 3.141592653589793D0 2258 G = 2* DSQRT(ALPHAS*PI) 2259 CALL UPDATE_AS_PARAM() 2260 c if (scale2.ne.0d0) stop 1 2261 2262 %(smatrixhel)s 2263 2264 return 2265 end 2266 2267 SUBROUTINE INITIALISE(PATH) 2268 C ROUTINE FOR F2PY to read the benchmark point. 2269 IMPLICIT NONE 2270 CHARACTER*512 PATH 2271 CF2PY INTENT(IN) :: PATH 2272 CALL SETPARA(PATH) !first call to setup the paramaters 2273 RETURN 2274 END 2275 2276 2277 subroutine CHANGE_PARA(name, value) 2278 implicit none 2279 CF2PY intent(in) :: name 2280 CF2PY intent(in) :: value 2281 2282 character*512 name 2283 double precision value 2284 2285 %(helreset_def)s 2286 2287 include '../Source/MODEL/input.inc' 2288 include '../Source/MODEL/coupl.inc' 2289 2290 %(helreset_setup)s 2291 2292 SELECT CASE (name) 2293 %(parameter_setup)s 2294 CASE DEFAULT 2295 write(*,*) 'no parameter matching', name, value 2296 END SELECT 2297 2298 return 2299 end 2300 2301 subroutine update_all_coup() 2302 implicit none 2303 call coup() 2304 return 2305 end 2306 2307 2308 subroutine get_pdg_order(PDG, ALLPROC) 2309 IMPLICIT NONE 2310 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2311 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2312 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2313 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2314 DATA PDGS/ %(pdgs)s / 2315 DATA PIDS/ %(pids)s / 2316 PDG = PDGS 2317 ALLPROC = PIDS 2318 RETURN 2319 END 2320 2321 subroutine get_prefix(PREFIX) 2322 IMPLICIT NONE 2323 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2324 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2325 DATA PREF / '%(prefix)s'/ 2326 PREFIX = PREF 2327 RETURN 2328 END 2329 2330 2331 """ 2332 2333 allids = list(self.prefix_info.keys()) 2334 allprefix = [self.prefix_info[key][0] for key in allids] 2335 min_nexternal = min([len(ids[0]) for ids in allids]) 2336 max_nexternal = max([len(ids[0]) for ids in allids]) 2337 2338 info = [] 2339 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2340 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2341 2342 2343 text = [] 2344 for n_ext in range(min_nexternal, max_nexternal+1): 2345 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2346 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2347 if not current_id: 2348 continue 2349 if min_nexternal != max_nexternal: 2350 if n_ext == min_nexternal: 2351 text.append(' if (npdg.eq.%i)then' % n_ext) 2352 else: 2353 text.append(' else if (npdg.eq.%i)then' % n_ext) 2354 for ii,pdgs in enumerate(current_id): 2355 pid = current_pid[ii] 2356 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2357 if ii==0: 2358 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2359 else: 2360 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2361 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2362 text.append(' endif') 2363 #close the function 2364 if min_nexternal != max_nexternal: 2365 text.append('endif') 2366 2367 params = self.get_model_parameter(self.model) 2368 parameter_setup =[] 2369 for key, var in params.items(): 2370 parameter_setup.append(' CASE ("%s")\n %s = value' 2371 % (key, var)) 2372 2373 # part for the resetting of the helicity 2374 helreset_def = [] 2375 helreset_setup = [] 2376 for prefix in set(allprefix): 2377 helreset_setup.append(' %shelreset = .true. ' % prefix) 2378 helreset_def.append(' logical %shelreset \n common /%shelreset/ %shelreset' % (prefix, prefix, prefix)) 2379 2380 2381 formatting = {'python_information':'\n'.join(info), 2382 'smatrixhel': '\n'.join(text), 2383 'maxpart': max_nexternal, 2384 'nb_me': len(allids), 2385 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2386 for i in range(max_nexternal) for (pdg,pid) in allids), 2387 'prefix':'\',\''.join(allprefix), 2388 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2389 'parameter_setup': '\n'.join(parameter_setup), 2390 'helreset_def' : '\n'.join(helreset_def), 2391 'helreset_setup' : '\n'.join(helreset_setup), 2392 } 2393 formatting['lenprefix'] = len(formatting['prefix']) 2394 text = template % formatting 2395 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2396 fsock.writelines(text) 2397 fsock.close()
2398
2399 - def get_model_parameter(self, model):
2400 """ returns all the model parameter 2401 """ 2402 params = {} 2403 for p in model.get('parameters')[('external',)]: 2404 name = p.name 2405 nopref = name[4:] if name.startswith('mdl_') else name 2406 params[nopref] = name 2407 2408 block = p.lhablock 2409 lha = '_'.join([str(i) for i in p.lhacode]) 2410 params['%s_%s' % (block.upper(), lha)] = name 2411 2412 return params
2413 2414 2415 2416 2417
2418 - def write_f2py_check_sa(self, matrix_element, writer):
2419 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2420 # To be implemented. It is just an example file, i.e. not crucial. 2421 return
2422
2423 - def write_f2py_makefile(self):
2424 """ """ 2425 # Add file in SubProcesses 2426 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2427 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2428
2429 - def create_MA5_cards(self,*args,**opts):
2430 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2431 pass
2432
2433 - def compiler_choice(self, compiler):
2434 """ Different daughter classes might want different compilers. 2435 So this function is meant to be overloaded if desired.""" 2436 2437 self.set_compiler(compiler)
2438 2439 #=========================================================================== 2440 # generate_subprocess_directory 2441 #===========================================================================
2442 - def generate_subprocess_directory(self, matrix_element, 2443 fortran_model, number):
2444 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2445 including the necessary matrix.f and nexternal.inc files""" 2446 2447 cwd = os.getcwd() 2448 # Create the directory PN_xx_xxxxx in the specified path 2449 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2450 "P%s" % matrix_element.get('processes')[0].shell_string()) 2451 2452 if self.opt['sa_symmetry']: 2453 # avoid symmetric output 2454 for i,proc in enumerate(matrix_element.get('processes')): 2455 2456 tag = proc.get_tag() 2457 legs = proc.get('legs')[:] 2458 leg0 = proc.get('legs')[0] 2459 leg1 = proc.get('legs')[1] 2460 if not leg1.get('state'): 2461 proc.get('legs')[0] = leg1 2462 proc.get('legs')[1] = leg0 2463 flegs = proc.get('legs')[2:] 2464 for perm in itertools.permutations(flegs): 2465 for i,p in enumerate(perm): 2466 proc.get('legs')[i+2] = p 2467 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2468 "P%s" % proc.shell_string()) 2469 #restore original order 2470 proc.get('legs')[2:] = legs[2:] 2471 if os.path.exists(dirpath2): 2472 proc.get('legs')[:] = legs 2473 return 0 2474 proc.get('legs')[:] = legs 2475 2476 try: 2477 os.mkdir(dirpath) 2478 except os.error as error: 2479 logger.warning(error.strerror + " " + dirpath) 2480 2481 #try: 2482 # os.chdir(dirpath) 2483 #except os.error: 2484 # logger.error('Could not cd to directory %s' % dirpath) 2485 # return 0 2486 2487 logger.info('Creating files in directory %s' % dirpath) 2488 2489 # Extract number of external particles 2490 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2491 2492 # Create the matrix.f file and the nexternal.inc file 2493 if self.opt['export_format']=='standalone_msP': 2494 filename = pjoin(dirpath, 'matrix_prod.f') 2495 else: 2496 filename = pjoin(dirpath, 'matrix.f') 2497 2498 proc_prefix = '' 2499 if 'prefix' in self.cmd_options: 2500 if self.cmd_options['prefix'] == 'int': 2501 proc_prefix = 'M%s_' % number 2502 elif self.cmd_options['prefix'] == 'proc': 2503 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2504 else: 2505 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2506 for proc in matrix_element.get('processes'): 2507 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2508 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2509 2510 calls = self.write_matrix_element_v4( 2511 writers.FortranWriter(filename), 2512 matrix_element, 2513 fortran_model, 2514 proc_prefix=proc_prefix) 2515 2516 if self.opt['export_format'] == 'standalone_msP': 2517 filename = pjoin(dirpath,'configs_production.inc') 2518 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2519 writers.FortranWriter(filename), 2520 matrix_element) 2521 2522 filename = pjoin(dirpath,'props_production.inc') 2523 self.write_props_file(writers.FortranWriter(filename), 2524 matrix_element, 2525 s_and_t_channels) 2526 2527 filename = pjoin(dirpath,'nexternal_prod.inc') 2528 self.write_nexternal_madspin(writers.FortranWriter(filename), 2529 nexternal, ninitial) 2530 2531 if self.opt['export_format']=='standalone_msF': 2532 filename = pjoin(dirpath, 'helamp.inc') 2533 ncomb=matrix_element.get_helicity_combinations() 2534 self.write_helamp_madspin(writers.FortranWriter(filename), 2535 ncomb) 2536 2537 filename = pjoin(dirpath, 'nexternal.inc') 2538 self.write_nexternal_file(writers.FortranWriter(filename), 2539 nexternal, ninitial) 2540 2541 filename = pjoin(dirpath, 'pmass.inc') 2542 self.write_pmass_file(writers.FortranWriter(filename), 2543 matrix_element) 2544 2545 filename = pjoin(dirpath, 'ngraphs.inc') 2546 self.write_ngraphs_file(writers.FortranWriter(filename), 2547 len(matrix_element.get_all_amplitudes())) 2548 2549 # Generate diagrams 2550 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2551 filename = pjoin(dirpath, "matrix.ps") 2552 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2553 get('diagrams'), 2554 filename, 2555 model=matrix_element.get('processes')[0].\ 2556 get('model'), 2557 amplitude=True) 2558 logger.info("Generating Feynman diagrams for " + \ 2559 matrix_element.get('processes')[0].nice_string()) 2560 plot.draw() 2561 2562 linkfiles = ['check_sa.f', 'coupl.inc'] 2563 2564 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2565 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2566 pat = re.compile('smatrix', re.I) 2567 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2568 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2569 f.write(new_text) 2570 linkfiles.pop(0) 2571 2572 for file in linkfiles: 2573 ln('../%s' % file, cwd=dirpath) 2574 ln('../makefileP', name='makefile', cwd=dirpath) 2575 # Return to original PWD 2576 #os.chdir(cwd) 2577 2578 if not calls: 2579 calls = 0 2580 return calls
2581 2582 2583 #=========================================================================== 2584 # write_source_makefile 2585 #===========================================================================
2586 - def write_source_makefile(self, writer):
2587 """Write the nexternal.inc file for MG4""" 2588 2589 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2590 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2591 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2592 2593 replace_dict= {'libraries': set_of_lib, 2594 'model':model_line, 2595 'additional_dsample': '', 2596 'additional_dependencies':''} 2597 2598 text = open(path).read() % replace_dict 2599 2600 if writer: 2601 writer.write(text) 2602 2603 return replace_dict
2604 2605 #=========================================================================== 2606 # write_matrix_element_v4 2607 #===========================================================================
2608 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2609 write=True, proc_prefix=''):
2610 """Export a matrix element to a matrix.f file in MG4 standalone format 2611 if write is on False, just return the replace_dict and not write anything.""" 2612 2613 2614 if not matrix_element.get('processes') or \ 2615 not matrix_element.get('diagrams'): 2616 return 0 2617 2618 if writer: 2619 if not isinstance(writer, writers.FortranWriter): 2620 raise writers.FortranWriter.FortranWriterError(\ 2621 "writer not FortranWriter but %s" % type(writer)) 2622 # Set lowercase/uppercase Fortran code 2623 writers.FortranWriter.downcase = False 2624 2625 2626 if 'sa_symmetry' not in self.opt: 2627 self.opt['sa_symmetry']=False 2628 2629 2630 # The proc_id is for MadEvent grouping which is never used in SA. 2631 replace_dict = {'global_variable':'', 'amp2_lines':'', 2632 'proc_prefix':proc_prefix, 'proc_id':''} 2633 2634 # Extract helas calls 2635 helas_calls = fortran_model.get_matrix_element_calls(\ 2636 matrix_element) 2637 2638 replace_dict['helas_calls'] = "\n".join(helas_calls) 2639 2640 # Extract version number and date from VERSION file 2641 info_lines = self.get_mg5_info_lines() 2642 replace_dict['info_lines'] = info_lines 2643 2644 # Extract process info lines 2645 process_lines = self.get_process_info_lines(matrix_element) 2646 replace_dict['process_lines'] = process_lines 2647 2648 # Extract number of external particles 2649 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2650 replace_dict['nexternal'] = nexternal 2651 replace_dict['nincoming'] = ninitial 2652 2653 # Extract ncomb 2654 ncomb = matrix_element.get_helicity_combinations() 2655 replace_dict['ncomb'] = ncomb 2656 2657 # Extract helicity lines 2658 helicity_lines = self.get_helicity_lines(matrix_element) 2659 replace_dict['helicity_lines'] = helicity_lines 2660 2661 # Extract overall denominator 2662 # Averaging initial state color, spin, and identical FS particles 2663 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2664 2665 # Extract ngraphs 2666 ngraphs = matrix_element.get_number_of_amplitudes() 2667 replace_dict['ngraphs'] = ngraphs 2668 2669 # Extract nwavefuncs 2670 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2671 replace_dict['nwavefuncs'] = nwavefuncs 2672 2673 # Extract ncolor 2674 ncolor = max(1, len(matrix_element.get('color_basis'))) 2675 replace_dict['ncolor'] = ncolor 2676 2677 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2678 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2679 matrix_element.get_beams_hel_avg_factor() 2680 2681 # Extract color data lines 2682 color_data_lines = self.get_color_data_lines(matrix_element) 2683 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2684 2685 if self.opt['export_format']=='standalone_msP': 2686 # For MadSpin need to return the AMP2 2687 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2688 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2689 replace_dict['global_variable'] = \ 2690 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2691 2692 # JAMP definition, depends on the number of independent split orders 2693 split_orders=matrix_element.get('processes')[0].get('split_orders') 2694 2695 if len(split_orders)==0: 2696 replace_dict['nSplitOrders']='' 2697 # Extract JAMP lines 2698 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2699 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2700 # set all amplitude order to weight 1 and only one squared order 2701 # contribution which is of course ALL_ORDERS=2. 2702 squared_orders = [(2,),] 2703 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2704 replace_dict['chosen_so_configs'] = '.TRUE.' 2705 replace_dict['nSqAmpSplitOrders']=1 2706 replace_dict['split_order_str_list']='' 2707 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2708 2709 else: 2710 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2711 replace_dict['nAmpSplitOrders']=len(amp_orders) 2712 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2713 replace_dict['nSplitOrders']=len(split_orders) 2714 replace_dict['split_order_str_list']=str(split_orders) 2715 amp_so = self.get_split_orders_lines( 2716 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2717 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2718 replace_dict['ampsplitorders']='\n'.join(amp_so) 2719 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2720 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2721 matrix_element,amp_orders,split_order_names=split_orders) 2722 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2723 # Now setup the array specifying what squared split order is chosen 2724 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2725 matrix_element.get('processes')[0],squared_orders) 2726 2727 # For convenience we also write the driver check_sa_splitOrders.f 2728 # that explicitely writes out the contribution from each squared order. 2729 # The original driver still works and is compiled with 'make' while 2730 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2731 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2732 self.write_check_sa_splitOrders(squared_orders,split_orders, 2733 nexternal,ninitial,proc_prefix,check_sa_writer) 2734 2735 if write: 2736 writers.FortranWriter('nsqso_born.inc').writelines( 2737 """INTEGER NSQSO_BORN 2738 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2739 2740 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2741 2742 matrix_template = self.matrix_template 2743 if self.opt['export_format']=='standalone_msP' : 2744 matrix_template = 'matrix_standalone_msP_v4.inc' 2745 elif self.opt['export_format']=='standalone_msF': 2746 matrix_template = 'matrix_standalone_msF_v4.inc' 2747 elif self.opt['export_format']=='matchbox': 2748 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2749 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2750 2751 if len(split_orders)>0: 2752 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2753 logger.debug("Warning: The export format %s is not "+\ 2754 " available for individual ME evaluation of given coupl. orders."+\ 2755 " Only the total ME will be computed.", self.opt['export_format']) 2756 elif self.opt['export_format'] in ['madloop_matchbox']: 2757 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2758 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2759 else: 2760 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2761 2762 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2763 replace_dict['template_file2'] = pjoin(_file_path, \ 2764 'iolibs/template_files/split_orders_helping_functions.inc') 2765 if write and writer: 2766 path = replace_dict['template_file'] 2767 content = open(path).read() 2768 content = content % replace_dict 2769 # Write the file 2770 writer.writelines(content) 2771 # Add the helper functions. 2772 if len(split_orders)>0: 2773 content = '\n' + open(replace_dict['template_file2'])\ 2774 .read()%replace_dict 2775 writer.writelines(content) 2776 return len([call for call in helas_calls if call.find('#') != 0]) 2777 else: 2778 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2779 return replace_dict # for subclass update
2780
2781 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2782 nincoming, proc_prefix, writer):
2783 """ Write out a more advanced version of the check_sa drivers that 2784 individually returns the matrix element for each contributing squared 2785 order.""" 2786 2787 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2788 'template_files', 'check_sa_splitOrders.f')).read() 2789 printout_sq_orders=[] 2790 for i, squared_order in enumerate(squared_orders): 2791 sq_orders=[] 2792 for j, sqo in enumerate(squared_order): 2793 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2794 printout_sq_orders.append(\ 2795 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2796 %(i+1,' '.join(sq_orders),i+1)) 2797 printout_sq_orders='\n'.join(printout_sq_orders) 2798 replace_dict = {'printout_sqorders':printout_sq_orders, 2799 'nSplitOrders':len(squared_orders), 2800 'nexternal':nexternal, 2801 'nincoming':nincoming, 2802 'proc_prefix':proc_prefix} 2803 2804 if writer: 2805 writer.writelines(check_sa_content % replace_dict) 2806 else: 2807 return replace_dict
2808
2809 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2810 """class to take care of exporting a set of matrix element for the Matchbox 2811 code in the case of Born only routine""" 2812 2813 default_opt = {'clean': False, 'complex_mass':False, 2814 'export_format':'matchbox', 'mp': False, 2815 'sa_symmetry': True} 2816 2817 #specific template of the born 2818 2819 2820 matrix_template = "matrix_standalone_matchbox.inc" 2821 2822 @staticmethod
2823 - def get_color_string_lines(matrix_element):
2824 """Return the color matrix definition lines for this matrix element. Split 2825 rows in chunks of size n.""" 2826 2827 if not matrix_element.get('color_matrix'): 2828 return "\n".join(["out = 1"]) 2829 2830 #start the real work 2831 color_denominators = matrix_element.get('color_matrix').\ 2832 get_line_denominators() 2833 matrix_strings = [] 2834 my_cs = color.ColorString() 2835 for i_color in range(len(color_denominators)): 2836 # Then write the numerators for the matrix elements 2837 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2838 t_str=repr(my_cs) 2839 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2840 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2841 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2842 all_matches = t_match.findall(t_str) 2843 output = {} 2844 arg=[] 2845 for match in all_matches: 2846 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2847 if ctype in ['ColorOne' ]: 2848 continue 2849 if ctype not in ['T', 'Tr' ]: 2850 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2851 tmparg += ['0'] 2852 arg +=tmparg 2853 for j, v in enumerate(arg): 2854 output[(i_color,j)] = v 2855 2856 for key in output: 2857 if matrix_strings == []: 2858 #first entry 2859 matrix_strings.append(""" 2860 if (in1.eq.%s.and.in2.eq.%s)then 2861 out = %s 2862 """ % (key[0], key[1], output[key])) 2863 else: 2864 #not first entry 2865 matrix_strings.append(""" 2866 elseif (in1.eq.%s.and.in2.eq.%s)then 2867 out = %s 2868 """ % (key[0], key[1], output[key])) 2869 if len(matrix_strings): 2870 matrix_strings.append(" else \n out = - 1 \n endif") 2871 else: 2872 return "\n out = - 1 \n " 2873 return "\n".join(matrix_strings)
2874
2875 - def make(self,*args,**opts):
2876 pass
2877
2878 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2879 JAMP_formatLC=None):
2880 2881 """Adding leading color part of the colorflow""" 2882 2883 if not JAMP_formatLC: 2884 JAMP_formatLC= "LN%s" % JAMP_format 2885 2886 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2887 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2888 col_amps=col_amps.get_color_amplitudes() 2889 elif(isinstance(col_amps,list)): 2890 if(col_amps and isinstance(col_amps[0],list)): 2891 col_amps=col_amps 2892 else: 2893 raise MadGraph5Error(error_msg % 'col_amps') 2894 else: 2895 raise MadGraph5Error(error_msg % 'col_amps') 2896 2897 text, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2898 JAMP_format=JAMP_format, 2899 AMP_format=AMP_format, 2900 split=-1) 2901 2902 2903 # Filter the col_ampls to generate only those without any 1/NC terms 2904 2905 LC_col_amps = [] 2906 for coeff_list in col_amps: 2907 to_add = [] 2908 for (coefficient, amp_number) in coeff_list: 2909 if coefficient[3]==0: 2910 to_add.append( (coefficient, amp_number) ) 2911 LC_col_amps.append(to_add) 2912 2913 text2, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2914 JAMP_format=JAMP_formatLC, 2915 AMP_format=AMP_format, 2916 split=-1) 2917 text += text2 2918 2919 return text, 0
2920
2921 2922 2923 2924 #=============================================================================== 2925 # ProcessExporterFortranMW 2926 #=============================================================================== 2927 -class ProcessExporterFortranMW(ProcessExporterFortran):
2928 """Class to take care of exporting a set of matrix elements to 2929 MadGraph v4 - MadWeight format.""" 2930 2931 matrix_file="matrix_standalone_v4.inc" 2932 jamp_optim = False 2933
2934 - def copy_template(self, model):
2935 """Additional actions needed for setup of Template 2936 """ 2937 2938 super(ProcessExporterFortranMW, self).copy_template(model) 2939 2940 # Add the MW specific file 2941 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2942 pjoin(self.dir_path, 'Source','MadWeight'), True) 2943 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2944 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2945 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2946 pjoin(self.dir_path, 'Source','setrun.f')) 2947 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2948 pjoin(self.dir_path, 'Source','run.inc')) 2949 # File created from Template (Different in some child class) 2950 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2951 self.write_run_config_file(writers.FortranWriter(filename)) 2952 2953 try: 2954 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2955 stdout = os.open(os.devnull, os.O_RDWR), 2956 stderr = os.open(os.devnull, os.O_RDWR), 2957 cwd=self.dir_path) 2958 except OSError: 2959 # Probably madweight already called 2960 pass 2961 2962 # Copy the different python file in the Template 2963 self.copy_python_file() 2964 # create the appropriate cuts.f 2965 self.get_mw_cuts_version() 2966 2967 # add the makefile in Source directory 2968 filename = os.path.join(self.dir_path,'Source','makefile') 2969 self.write_source_makefile(writers.FortranWriter(filename))
2970 2971 2972 2973 2974 #=========================================================================== 2975 # convert_model 2976 #===========================================================================
2977 - def convert_model(self, model, wanted_lorentz = [], 2978 wanted_couplings = []):
2979 2980 super(ProcessExporterFortranMW,self).convert_model(model, 2981 wanted_lorentz, wanted_couplings) 2982 2983 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2984 try: 2985 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2986 except OSError as error: 2987 pass 2988 model_path = model.get('modelpath') 2989 # This is not safe if there is a '##' or '-' in the path. 2990 shutil.copytree(model_path, 2991 pjoin(self.dir_path,'bin','internal','ufomodel'), 2992 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2993 if hasattr(model, 'restrict_card'): 2994 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2995 'restrict_default.dat') 2996 if isinstance(model.restrict_card, check_param_card.ParamCard): 2997 model.restrict_card.write(out_path) 2998 else: 2999 files.cp(model.restrict_card, out_path)
3000 3001 #=========================================================================== 3002 # generate_subprocess_directory 3003 #===========================================================================
3004 - def copy_python_file(self):
3005 """copy the python file require for the Template""" 3006 3007 # madevent interface 3008 cp(_file_path+'/interface/madweight_interface.py', 3009 self.dir_path+'/bin/internal/madweight_interface.py') 3010 cp(_file_path+'/interface/extended_cmd.py', 3011 self.dir_path+'/bin/internal/extended_cmd.py') 3012 cp(_file_path+'/interface/common_run_interface.py', 3013 self.dir_path+'/bin/internal/common_run_interface.py') 3014 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3015 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3016 cp(_file_path+'/iolibs/save_load_object.py', 3017 self.dir_path+'/bin/internal/save_load_object.py') 3018 cp(_file_path+'/madevent/gen_crossxhtml.py', 3019 self.dir_path+'/bin/internal/gen_crossxhtml.py') 3020 cp(_file_path+'/madevent/sum_html.py', 3021 self.dir_path+'/bin/internal/sum_html.py') 3022 cp(_file_path+'/various/FO_analyse_card.py', 3023 self.dir_path+'/bin/internal/FO_analyse_card.py') 3024 cp(_file_path+'/iolibs/file_writers.py', 3025 self.dir_path+'/bin/internal/file_writers.py') 3026 #model file 3027 cp(_file_path+'../models/check_param_card.py', 3028 self.dir_path+'/bin/internal/check_param_card.py') 3029 3030 #madevent file 3031 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3032 cp(_file_path+'/various/lhe_parser.py', 3033 self.dir_path+'/bin/internal/lhe_parser.py') 3034 3035 cp(_file_path+'/various/banner.py', 3036 self.dir_path+'/bin/internal/banner.py') 3037 cp(_file_path+'/various/shower_card.py', 3038 self.dir_path+'/bin/internal/shower_card.py') 3039 cp(_file_path+'/various/cluster.py', 3040 self.dir_path+'/bin/internal/cluster.py') 3041 3042 # logging configuration 3043 cp(_file_path+'/interface/.mg5_logging.conf', 3044 self.dir_path+'/bin/internal/me5_logging.conf') 3045 cp(_file_path+'/interface/coloring_logging.py', 3046 self.dir_path+'/bin/internal/coloring_logging.py')
3047 3048 3049 #=========================================================================== 3050 # Change the version of cuts.f to the one compatible with MW 3051 #===========================================================================
3052 - def get_mw_cuts_version(self, outpath=None):
3053 """create the appropriate cuts.f 3054 This is based on the one associated to ME output but: 3055 1) No clustering (=> remove initcluster/setclscales) 3056 2) Adding the definition of cut_bw at the file. 3057 """ 3058 3059 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 3060 3061 text = StringIO() 3062 #1) remove all dependencies in ickkw >1: 3063 nb_if = 0 3064 for line in template: 3065 if 'if(xqcut.gt.0d0' in line: 3066 nb_if = 1 3067 if nb_if == 0: 3068 text.write(line) 3069 continue 3070 if re.search(r'if\(.*\)\s*then', line): 3071 nb_if += 1 3072 elif 'endif' in line: 3073 nb_if -= 1 3074 3075 #2) add fake cut_bw (have to put the true one later) 3076 text.write(""" 3077 logical function cut_bw(p) 3078 include 'madweight_param.inc' 3079 double precision p(*) 3080 if (bw_cut) then 3081 cut_bw = .true. 3082 else 3083 stop 1 3084 endif 3085 return 3086 end 3087 """) 3088 3089 final = text.getvalue() 3090 #3) remove the call to initcluster: 3091 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 3092 template = template.replace('genps.inc', 'maxparticles.inc') 3093 #Now we can write it 3094 if not outpath: 3095 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 3096 elif isinstance(outpath, str): 3097 fsock = open(outpath, 'w') 3098 else: 3099 fsock = outpath 3100 fsock.write(template)
3101 3102 3103 3104 #=========================================================================== 3105 # Make the Helas and Model directories for Standalone directory 3106 #===========================================================================
3107 - def make(self):
3108 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 3109 everything for running madweight 3110 """ 3111 3112 source_dir = os.path.join(self.dir_path, "Source") 3113 logger.info("Running make for Helas") 3114 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 3115 logger.info("Running make for Model") 3116 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 3117 logger.info("Running make for PDF") 3118 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 3119 logger.info("Running make for CERNLIB") 3120 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 3121 logger.info("Running make for GENERIC") 3122 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 3123 logger.info("Running make for blocks") 3124 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 3125 logger.info("Running make for tools") 3126 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
3127 3128 #=========================================================================== 3129 # Create proc_card_mg5.dat for MadWeight directory 3130 #===========================================================================
3131 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3132 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 3133 3134 compiler = {'fortran': mg5options['fortran_compiler'], 3135 'cpp': mg5options['cpp_compiler'], 3136 'f2py': mg5options['f2py_compiler']} 3137 3138 3139 3140 #proc_charac 3141 self.create_proc_charac() 3142 3143 # Write maxparticles.inc based on max of ME's/subprocess groups 3144 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3145 self.write_maxparticles_file(writers.FortranWriter(filename), 3146 matrix_elements) 3147 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3148 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 3149 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3150 pjoin(self.dir_path, 'Source','MadWeight','tools')) 3151 3152 self.set_compiler(compiler) 3153 self.make() 3154 3155 # Write command history as proc_card_mg5 3156 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 3157 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 3158 history.write(output_file) 3159 3160 ProcessExporterFortran.finalize(self, matrix_elements, 3161 history, mg5options, flaglist)
3162 3163 3164 3165 #=========================================================================== 3166 # create the run_card for MW 3167 #===========================================================================
3168 - def create_run_card(self, matrix_elements, history):
3169 """ """ 3170 3171 run_card = banner_mod.RunCard() 3172 3173 # pass to default for MW 3174 run_card["run_tag"] = "\'not_use\'" 3175 run_card["fixed_ren_scale"] = "T" 3176 run_card["fixed_fac_scale"] = "T" 3177 run_card.remove_all_cut() 3178 3179 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3180 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3181 python_template=True) 3182 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3183 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3184 python_template=True)
3185 3186 #=========================================================================== 3187 # export model files 3188 #===========================================================================
3189 - def export_model_files(self, model_path):
3190 """export the model dependent files for V4 model""" 3191 3192 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3193 # Add the routine update_as_param in v4 model 3194 # This is a function created in the UFO 3195 text=""" 3196 subroutine update_as_param() 3197 call setpara('param_card.dat',.false.) 3198 return 3199 end 3200 """ 3201 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3202 ff.write(text) 3203 ff.close() 3204 3205 # Modify setrun.f 3206 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3207 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3208 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3209 fsock.write(text) 3210 fsock.close() 3211 3212 # Modify initialization.f 3213 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3214 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3215 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3216 fsock.write(text) 3217 fsock.close() 3218 3219 3220 self.make_model_symbolic_link()
3221 3222 #=========================================================================== 3223 # generate_subprocess_directory 3224 #===========================================================================
3225 - def generate_subprocess_directory(self, matrix_element, 3226 fortran_model,number):
3227 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3228 including the necessary matrix.f and nexternal.inc files""" 3229 3230 cwd = os.getcwd() 3231 # Create the directory PN_xx_xxxxx in the specified path 3232 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3233 "P%s" % matrix_element.get('processes')[0].shell_string()) 3234 3235 try: 3236 os.mkdir(dirpath) 3237 except os.error as error: 3238 logger.warning(error.strerror + " " + dirpath) 3239 3240 #try: 3241 # os.chdir(dirpath) 3242 #except os.error: 3243 # logger.error('Could not cd to directory %s' % dirpath) 3244 # return 0 3245 3246 logger.info('Creating files in directory %s' % dirpath) 3247 3248 # Extract number of external particles 3249 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3250 3251 # Create the matrix.f file and the nexternal.inc file 3252 filename = pjoin(dirpath,'matrix.f') 3253 calls,ncolor = self.write_matrix_element_v4( 3254 writers.FortranWriter(filename), 3255 matrix_element, 3256 fortran_model) 3257 3258 filename = pjoin(dirpath, 'auto_dsig.f') 3259 self.write_auto_dsig_file(writers.FortranWriter(filename), 3260 matrix_element) 3261 3262 filename = pjoin(dirpath, 'configs.inc') 3263 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3264 writers.FortranWriter(filename), 3265 matrix_element) 3266 3267 filename = pjoin(dirpath, 'nexternal.inc') 3268 self.write_nexternal_file(writers.FortranWriter(filename), 3269 nexternal, ninitial) 3270 3271 filename = pjoin(dirpath, 'leshouche.inc') 3272 self.write_leshouche_file(writers.FortranWriter(filename), 3273 matrix_element) 3274 3275 filename = pjoin(dirpath, 'props.inc') 3276 self.write_props_file(writers.FortranWriter(filename), 3277 matrix_element, 3278 s_and_t_channels) 3279 3280 filename = pjoin(dirpath, 'pmass.inc') 3281 self.write_pmass_file(writers.FortranWriter(filename), 3282 matrix_element) 3283 3284 filename = pjoin(dirpath, 'ngraphs.inc') 3285 self.write_ngraphs_file(writers.FortranWriter(filename), 3286 len(matrix_element.get_all_amplitudes())) 3287 3288 filename = pjoin(dirpath, 'maxamps.inc') 3289 self.write_maxamps_file(writers.FortranWriter(filename), 3290 len(matrix_element.get('diagrams')), 3291 ncolor, 3292 len(matrix_element.get('processes')), 3293 1) 3294 3295 filename = pjoin(dirpath, 'phasespace.inc') 3296 self.write_phasespace_file(writers.FortranWriter(filename), 3297 len(matrix_element.get('diagrams')), 3298 ) 3299 3300 # Generate diagrams 3301 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3302 filename = pjoin(dirpath, "matrix.ps") 3303 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3304 get('diagrams'), 3305 filename, 3306 model=matrix_element.get('processes')[0].\ 3307 get('model'), 3308 amplitude='') 3309 logger.info("Generating Feynman diagrams for " + \ 3310 matrix_element.get('processes')[0].nice_string()) 3311 plot.draw() 3312 3313 #import genps.inc and maxconfigs.inc into Subprocesses 3314 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3315 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3316 3317 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3318 3319 for file in linkfiles: 3320 ln('../%s' % file, starting_dir=cwd) 3321 3322 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3323 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3324 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3325 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3326 # Return to original PWD 3327 #os.chdir(cwd) 3328 3329 if not calls: 3330 calls = 0 3331 return calls
3332 3333 #=========================================================================== 3334 # write_matrix_element_v4 3335 #===========================================================================
3336 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3337 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3338 3339 if not matrix_element.get('processes') or \ 3340 not matrix_element.get('diagrams'): 3341 return 0 3342 3343 if writer: 3344 if not isinstance(writer, writers.FortranWriter): 3345 raise writers.FortranWriter.FortranWriterError(\ 3346 "writer not FortranWriter") 3347 3348 # Set lowercase/uppercase Fortran code 3349 writers.FortranWriter.downcase = False 3350 3351 replace_dict = {} 3352 3353 # Extract version number and date from VERSION file 3354 info_lines = self.get_mg5_info_lines() 3355 replace_dict['info_lines'] = info_lines 3356 3357 # Extract process info lines 3358 process_lines = self.get_process_info_lines(matrix_element) 3359 replace_dict['process_lines'] = process_lines 3360 3361 # Set proc_id 3362 replace_dict['proc_id'] = proc_id 3363 3364 # Extract number of external particles 3365 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3366 replace_dict['nexternal'] = nexternal 3367 3368 # Extract ncomb 3369 ncomb = matrix_element.get_helicity_combinations() 3370 replace_dict['ncomb'] = ncomb 3371 3372 # Extract helicity lines 3373 helicity_lines = self.get_helicity_lines(matrix_element) 3374 replace_dict['helicity_lines'] = helicity_lines 3375 3376 # Extract overall denominator 3377 # Averaging initial state color, spin, and identical FS particles 3378 den_factor_line = self.get_den_factor_line(matrix_element) 3379 replace_dict['den_factor_line'] = den_factor_line 3380 3381 # Extract ngraphs 3382 ngraphs = matrix_element.get_number_of_amplitudes() 3383 replace_dict['ngraphs'] = ngraphs 3384 3385 # Extract nwavefuncs 3386 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3387 replace_dict['nwavefuncs'] = nwavefuncs 3388 3389 # Extract ncolor 3390 ncolor = max(1, len(matrix_element.get('color_basis'))) 3391 replace_dict['ncolor'] = ncolor 3392 3393 # Extract color data lines 3394 color_data_lines = self.get_color_data_lines(matrix_element) 3395 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3396 3397 # Extract helas calls 3398 helas_calls = fortran_model.get_matrix_element_calls(\ 3399 matrix_element) 3400 3401 replace_dict['helas_calls'] = "\n".join(helas_calls) 3402 3403 # Extract JAMP lines 3404 jamp_lines, nb = self.get_JAMP_lines(matrix_element) 3405 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3406 3407 replace_dict['template_file'] = os.path.join(_file_path, \ 3408 'iolibs/template_files/%s' % self.matrix_file) 3409 replace_dict['template_file2'] = '' 3410 3411 if writer: 3412 file = open(replace_dict['template_file']).read() 3413 file = file % replace_dict 3414 # Write the file 3415 writer.writelines(file) 3416 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3417 else: 3418 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3419 3420 #=========================================================================== 3421 # write_source_makefile 3422 #===========================================================================
3423 - def write_source_makefile(self, writer):
3424 """Write the nexternal.inc file for madweight""" 3425 3426 3427 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3428 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3429 text = open(path).read() % {'libraries': set_of_lib} 3430 writer.write(text) 3431 3432 return True
3433
3434 - def write_phasespace_file(self, writer, nb_diag):
3435 """ """ 3436 3437 template = """ include 'maxparticles.inc' 3438 integer max_branches 3439 parameter (max_branches=max_particles-1) 3440 integer max_configs 3441 parameter (max_configs=%(nb_diag)s) 3442 3443 c channel position 3444 integer config_pos,perm_pos 3445 common /to_config/config_pos,perm_pos 3446 3447 """ 3448 3449 writer.write(template % {'nb_diag': nb_diag})
3450 3451 3452 #=========================================================================== 3453 # write_auto_dsig_file 3454 #===========================================================================
3455 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3456 """Write the auto_dsig.f file for the differential cross section 3457 calculation, includes pdf call information (MadWeight format)""" 3458 3459 if not matrix_element.get('processes') or \ 3460 not matrix_element.get('diagrams'): 3461 return 0 3462 3463 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3464 3465 if ninitial < 1 or ninitial > 2: 3466 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3467 3468 replace_dict = {} 3469 3470 # Extract version number and date from VERSION file 3471 info_lines = self.get_mg5_info_lines() 3472 replace_dict['info_lines'] = info_lines 3473 3474 # Extract process info lines 3475 process_lines = self.get_process_info_lines(matrix_element) 3476 replace_dict['process_lines'] = process_lines 3477 3478 # Set proc_id 3479 replace_dict['proc_id'] = proc_id 3480 replace_dict['numproc'] = 1 3481 3482 # Set dsig_line 3483 if ninitial == 1: 3484 # No conversion, since result of decay should be given in GeV 3485 dsig_line = "pd(0)*dsiguu" 3486 else: 3487 # Convert result (in GeV) to pb 3488 dsig_line = "pd(0)*conv*dsiguu" 3489 3490 replace_dict['dsig_line'] = dsig_line 3491 3492 # Extract pdf lines 3493 pdf_vars, pdf_data, pdf_lines = \ 3494 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3495 replace_dict['pdf_vars'] = pdf_vars 3496 replace_dict['pdf_data'] = pdf_data 3497 replace_dict['pdf_lines'] = pdf_lines 3498 3499 # Lines that differ between subprocess group and regular 3500 if proc_id: 3501 replace_dict['numproc'] = int(proc_id) 3502 replace_dict['passcuts_begin'] = "" 3503 replace_dict['passcuts_end'] = "" 3504 # Set lines for subprocess group version 3505 # Set define_iconfigs_lines 3506 replace_dict['define_subdiag_lines'] = \ 3507 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3508 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3509 else: 3510 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3511 replace_dict['passcuts_end'] = "ENDIF" 3512 replace_dict['define_subdiag_lines'] = "" 3513 3514 if writer: 3515 file = open(os.path.join(_file_path, \ 3516 'iolibs/template_files/auto_dsig_mw.inc')).read() 3517 3518 file = file % replace_dict 3519 # Write the file 3520 writer.writelines(file) 3521 else: 3522 return replace_dict
3523 #=========================================================================== 3524 # write_configs_file 3525 #===========================================================================
3526 - def write_configs_file(self, writer, matrix_element):
3527 """Write the configs.inc file for MadEvent""" 3528 3529 # Extract number of external particles 3530 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3531 3532 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3533 mapconfigs = [c[0] for c in configs] 3534 model = matrix_element.get('processes')[0].get('model') 3535 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3536 [[c[1]] for c in configs], 3537 mapconfigs, 3538 nexternal, ninitial,matrix_element, model)
3539 3540 #=========================================================================== 3541 # write_run_configs_file 3542 #===========================================================================
3543 - def write_run_config_file(self, writer):
3544 """Write the run_configs.inc file for MadWeight""" 3545 3546 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3547 text = open(path).read() % {'chanperjob':'5'} 3548 writer.write(text) 3549 return True
3550 3551 #=========================================================================== 3552 # write_configs_file_from_diagrams 3553 #===========================================================================
3554 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3555 nexternal, ninitial, matrix_element, model):
3556 """Write the actual configs.inc file. 3557 3558 configs is the diagrams corresponding to configs (each 3559 diagrams is a list of corresponding diagrams for all 3560 subprocesses, with None if there is no corresponding diagrams 3561 for a given process). 3562 mapconfigs gives the diagram number for each config. 3563 3564 For s-channels, we need to output one PDG for each subprocess in 3565 the subprocess group, in order to be able to pick the right 3566 one for multiprocesses.""" 3567 3568 lines = [] 3569 3570 particle_dict = matrix_element.get('processes')[0].get('model').\ 3571 get('particle_dict') 3572 3573 s_and_t_channels = [] 3574 3575 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3576 for config in configs if [d for d in config if d][0].\ 3577 get_vertex_leg_numbers()!=[]] 3578 3579 minvert = min(vert_list) if vert_list!=[] else 0 3580 # Number of subprocesses 3581 nsubprocs = len(configs[0]) 3582 3583 nconfigs = 0 3584 3585 new_pdg = model.get_first_non_pdg() 3586 3587 for iconfig, helas_diags in enumerate(configs): 3588 if any([vert > minvert for vert in 3589 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3590 # Only 3-vertices allowed in configs.inc 3591 continue 3592 nconfigs += 1 3593 3594 # Need s- and t-channels for all subprocesses, including 3595 # those that don't contribute to this config 3596 empty_verts = [] 3597 stchannels = [] 3598 for h in helas_diags: 3599 if h: 3600 # get_s_and_t_channels gives vertices starting from 3601 # final state external particles and working inwards 3602 stchannels.append(h.get('amplitudes')[0].\ 3603 get_s_and_t_channels(ninitial,model,new_pdg)) 3604 else: 3605 stchannels.append((empty_verts, None)) 3606 3607 # For t-channels, just need the first non-empty one 3608 tchannels = [t for s,t in stchannels if t != None][0] 3609 3610 # For s_and_t_channels (to be used later) use only first config 3611 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3612 tchannels]) 3613 3614 # Make sure empty_verts is same length as real vertices 3615 if any([s for s,t in stchannels]): 3616 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3617 3618 # Reorganize s-channel vertices to get a list of all 3619 # subprocesses for each vertex 3620 schannels = list(zip(*[s for s,t in stchannels])) 3621 else: 3622 schannels = [] 3623 3624 allchannels = schannels 3625 if len(tchannels) > 1: 3626 # Write out tchannels only if there are any non-trivial ones 3627 allchannels = schannels + tchannels 3628 3629 # Write out propagators for s-channel and t-channel vertices 3630 3631 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3632 # Correspondance between the config and the diagram = amp2 3633 lines.append("* %d %d " % (nconfigs, 3634 mapconfigs[iconfig])) 3635 3636 for verts in allchannels: 3637 if verts in schannels: 3638 vert = [v for v in verts if v][0] 3639 else: 3640 vert = verts 3641 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3642 last_leg = vert.get('legs')[-1] 3643 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3644 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3645 # (last_leg.get('number'), nconfigs, len(daughters), 3646 # ",".join([str(d) for d in daughters]))) 3647 3648 if last_leg.get('id') == 21 and 21 not in particle_dict: 3649 # Fake propagator used in multiparticle vertices 3650 mass = 'zero' 3651 width = 'zero' 3652 pow_part = 0 3653 else: 3654 if (last_leg.get('id')!=7): 3655 particle = particle_dict[last_leg.get('id')] 3656 # Get mass 3657 mass = particle.get('mass') 3658 # Get width 3659 width = particle.get('width') 3660 else : # fake propagator used in multiparticle vertices 3661 mass= 'zero' 3662 width= 'zero' 3663 3664 line=line+" "+mass+" "+width+" " 3665 3666 if verts in schannels: 3667 pdgs = [] 3668 for v in verts: 3669 if v: 3670 pdgs.append(v.get('legs')[-1].get('id')) 3671 else: 3672 pdgs.append(0) 3673 lines.append(line+" S "+str(last_leg.get('id'))) 3674 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3675 # (last_leg.get('number'), nconfigs, nsubprocs, 3676 # ",".join([str(d) for d in pdgs]))) 3677 # lines.append("data tprid(%d,%d)/0/" % \ 3678 # (last_leg.get('number'), nconfigs)) 3679 elif verts in tchannels[:-1]: 3680 lines.append(line+" T "+str(last_leg.get('id'))) 3681 # lines.append("data tprid(%d,%d)/%d/" % \ 3682 # (last_leg.get('number'), nconfigs, 3683 # abs(last_leg.get('id')))) 3684 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3685 # (last_leg.get('number'), nconfigs, nsubprocs, 3686 # ",".join(['0'] * nsubprocs))) 3687 3688 # Write out number of configs 3689 # lines.append("# Number of configs") 3690 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3691 lines.append(" * ") # a line with just a star indicates this is the end of file 3692 # Write the file 3693 writer.writelines(lines) 3694 3695 return s_and_t_channels
3696
3697 3698 3699 #=============================================================================== 3700 # ProcessExporterFortranME 3701 #=============================================================================== 3702 -class ProcessExporterFortranME(ProcessExporterFortran):
3703 """Class to take care of exporting a set of matrix elements to 3704 MadEvent format.""" 3705 3706 matrix_file = "matrix_madevent_v4.inc" 3707 done_warning_tchannel = False 3708 3709 default_opt = {'clean': False, 'complex_mass':False, 3710 'export_format':'madevent', 'mp': False, 3711 'v5_model': True, 3712 'output_options':{}, 3713 'hel_recycling': False 3714 } 3715 jamp_optim = True 3716
3717 - def __init__(self, dir_path = "", opt=None):
3718 3719 super(ProcessExporterFortranME, self).__init__(dir_path, opt) 3720 3721 # check and format the hel_recycling options as it should if provided 3722 if opt and isinstance(opt['output_options'], dict) and \ 3723 'hel_recycling' in opt['output_options']: 3724 self.opt['hel_recycling'] = banner_mod.ConfigFile.format_variable( 3725 opt['output_options']['hel_recycling'], bool, 'hel_recycling') 3726 3727 if opt and isinstance(opt['output_options'], dict) and \ 3728 't_strategy' in opt['output_options']: 3729 self.opt['t_strategy'] = banner_mod.ConfigFile.format_variable( 3730 opt['output_options']['t_strategy'], int, 't_strategy')
3731 3732 # helper function for customise helas writter 3733 @staticmethod
3734 - def custom_helas_call(call, arg):
3735 if arg['mass'] == '%(M)s,%(W)s,': 3736 arg['mass'] = '%(M)s, fk_%(W)s,' 3737 elif '%(W)s' in arg['mass']: 3738 raise Exception 3739 return call, arg
3740
3741 - def copy_template(self, model):
3742 """Additional actions needed for setup of Template 3743 """ 3744 3745 super(ProcessExporterFortranME, self).copy_template(model) 3746 3747 # File created from Template (Different in some child class) 3748 filename = pjoin(self.dir_path,'Source','run_config.inc') 3749 self.write_run_config_file(writers.FortranWriter(filename)) 3750 3751 # The next file are model dependant (due to SLAH convention) 3752 self.model_name = model.get('name') 3753 # Add the symmetry.f 3754 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3755 self.write_symmetry(writers.FortranWriter(filename)) 3756 # 3757 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3758 self.write_addmothers(writers.FortranWriter(filename)) 3759 # Copy the different python file in the Template 3760 self.copy_python_file()
3761 3762 3763 3764 3765 3766 3767 #=========================================================================== 3768 # generate_subprocess_directory 3769 #===========================================================================
3770 - def copy_python_file(self):
3771 """copy the python file require for the Template""" 3772 3773 # madevent interface 3774 cp(_file_path+'/interface/madevent_interface.py', 3775 self.dir_path+'/bin/internal/madevent_interface.py') 3776 cp(_file_path+'/interface/extended_cmd.py', 3777 self.dir_path+'/bin/internal/extended_cmd.py') 3778 cp(_file_path+'/interface/common_run_interface.py', 3779 self.dir_path+'/bin/internal/common_run_interface.py') 3780 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3781 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3782 cp(_file_path+'/iolibs/save_load_object.py', 3783 self.dir_path+'/bin/internal/save_load_object.py') 3784 cp(_file_path+'/iolibs/file_writers.py', 3785 self.dir_path+'/bin/internal/file_writers.py') 3786 #model file 3787 cp(_file_path+'../models/check_param_card.py', 3788 self.dir_path+'/bin/internal/check_param_card.py') 3789 3790 #copy all the file present in madevent directory 3791 for name in os.listdir(pjoin(_file_path, 'madevent')): 3792 if name not in ['__init__.py'] and name.endswith('.py'): 3793 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3794 3795 #madevent file 3796 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3797 cp(_file_path+'/various/lhe_parser.py', 3798 self.dir_path+'/bin/internal/lhe_parser.py') 3799 cp(_file_path+'/various/banner.py', 3800 self.dir_path+'/bin/internal/banner.py') 3801 cp(_file_path+'/various/histograms.py', 3802 self.dir_path+'/bin/internal/histograms.py') 3803 cp(_file_path+'/various/plot_djrs.py', 3804 self.dir_path+'/bin/internal/plot_djrs.py') 3805 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3806 3807 cp(_file_path+'/various/cluster.py', 3808 self.dir_path+'/bin/internal/cluster.py') 3809 cp(_file_path+'/madevent/combine_runs.py', 3810 self.dir_path+'/bin/internal/combine_runs.py') 3811 # logging configuration 3812 cp(_file_path+'/interface/.mg5_logging.conf', 3813 self.dir_path+'/bin/internal/me5_logging.conf') 3814 cp(_file_path+'/interface/coloring_logging.py', 3815 self.dir_path+'/bin/internal/coloring_logging.py') 3816 # shower card and FO_analyse_card. 3817 # Although not needed, it is imported by banner.py 3818 cp(_file_path+'/various/shower_card.py', 3819 self.dir_path+'/bin/internal/shower_card.py') 3820 cp(_file_path+'/various/FO_analyse_card.py', 3821 self.dir_path+'/bin/internal/FO_analyse_card.py')
3822 3823
3824 - def convert_model(self, model, wanted_lorentz = [], 3825 wanted_couplings = []):
3826 3827 super(ProcessExporterFortranME,self).convert_model(model, 3828 wanted_lorentz, wanted_couplings) 3829 3830 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3831 try: 3832 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3833 except OSError as error: 3834 pass 3835 model_path = model.get('modelpath') 3836 # This is not safe if there is a '##' or '-' in the path. 3837 shutil.copytree(model_path, 3838 pjoin(self.dir_path,'bin','internal','ufomodel'), 3839 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3840 if hasattr(model, 'restrict_card'): 3841 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3842 'restrict_default.dat') 3843 if isinstance(model.restrict_card, check_param_card.ParamCard): 3844 model.restrict_card.write(out_path) 3845 else: 3846 files.cp(model.restrict_card, out_path)
3847 3848 #=========================================================================== 3849 # export model files 3850 #===========================================================================
3851 - def export_model_files(self, model_path):
3852 """export the model dependent files""" 3853 3854 super(ProcessExporterFortranME,self).export_model_files(model_path) 3855 3856 # Add the routine update_as_param in v4 model 3857 # This is a function created in the UFO 3858 text=""" 3859 subroutine update_as_param() 3860 call setpara('param_card.dat',.false.) 3861 return 3862 end 3863 """ 3864 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3865 ff.write(text) 3866 ff.close() 3867 3868 # Add the symmetry.f 3869 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3870 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3871 3872 # Modify setrun.f 3873 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3874 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3875 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3876 fsock.write(text) 3877 fsock.close() 3878 3879 self.make_model_symbolic_link()
3880 3881 #=========================================================================== 3882 # generate_subprocess_directory 3883 #===========================================================================
3884 - def generate_subprocess_directory(self, matrix_element, 3885 fortran_model, 3886 me_number):
3887 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3888 including the necessary matrix.f and various helper files""" 3889 3890 cwd = os.getcwd() 3891 path = pjoin(self.dir_path, 'SubProcesses') 3892 3893 3894 if not self.model: 3895 self.model = matrix_element.get('processes')[0].get('model') 3896 3897 #os.chdir(path) 3898 # Create the directory PN_xx_xxxxx in the specified path 3899 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3900 try: 3901 os.mkdir(pjoin(path,subprocdir)) 3902 except os.error as error: 3903 logger.warning(error.strerror + " " + subprocdir) 3904 3905 #try: 3906 # os.chdir(subprocdir) 3907 #except os.error: 3908 # logger.error('Could not cd to directory %s' % subprocdir) 3909 # return 0 3910 3911 logger.info('Creating files in directory %s' % subprocdir) 3912 Ppath = pjoin(path, subprocdir) 3913 3914 # Extract number of external particles 3915 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3916 3917 # Add the driver.f 3918 ncomb = matrix_element.get_helicity_combinations() 3919 filename = pjoin(Ppath,'driver.f') 3920 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3921 v5=self.opt['v5_model']) 3922 3923 3924 # Create the matrix.f file, auto_dsig.f file and all inc files 3925 if self.opt['hel_recycling']: 3926 filename = pjoin(Ppath, 'matrix_orig.f') 3927 else: 3928 filename = pjoin(Ppath, 'matrix.f') 3929 calls, ncolor = \ 3930 self.write_matrix_element_v4(writers.FortranWriter(filename), 3931 matrix_element, fortran_model, subproc_number = me_number) 3932 3933 filename = pjoin(Ppath, 'auto_dsig.f') 3934 self.write_auto_dsig_file(writers.FortranWriter(filename), 3935 matrix_element) 3936 3937 filename = pjoin(Ppath, 'configs.inc') 3938 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3939 writers.FortranWriter(filename), 3940 matrix_element) 3941 3942 filename = pjoin(Ppath, 'config_nqcd.inc') 3943 self.write_config_nqcd_file(writers.FortranWriter(filename), 3944 nqcd_list) 3945 3946 filename = pjoin(Ppath, 'config_subproc_map.inc') 3947 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3948 s_and_t_channels) 3949 3950 filename = pjoin(Ppath, 'coloramps.inc') 3951 self.write_coloramps_file(writers.FortranWriter(filename), 3952 mapconfigs, 3953 matrix_element) 3954 3955 filename = pjoin(Ppath, 'get_color.f') 3956 self.write_colors_file(writers.FortranWriter(filename), 3957 matrix_element) 3958 3959 filename = pjoin(Ppath, 'decayBW.inc') 3960 self.write_decayBW_file(writers.FortranWriter(filename), 3961 s_and_t_channels) 3962 3963 filename = pjoin(Ppath, 'dname.mg') 3964 self.write_dname_file(writers.FileWriter(filename), 3965 "P"+matrix_element.get('processes')[0].shell_string()) 3966 3967 filename = pjoin(Ppath, 'iproc.dat') 3968 self.write_iproc_file(writers.FortranWriter(filename), 3969 me_number) 3970 3971 filename = pjoin(Ppath, 'leshouche.inc') 3972 self.write_leshouche_file(writers.FortranWriter(filename), 3973 matrix_element) 3974 3975 filename = pjoin(Ppath, 'maxamps.inc') 3976 self.write_maxamps_file(writers.FortranWriter(filename), 3977 len(matrix_element.get('diagrams')), 3978 ncolor, 3979 len(matrix_element.get('processes')), 3980 1) 3981 3982 filename = pjoin(Ppath, 'mg.sym') 3983 self.write_mg_sym_file(writers.FortranWriter(filename), 3984 matrix_element) 3985 3986 filename = pjoin(Ppath, 'ncombs.inc') 3987 self.write_ncombs_file(writers.FortranWriter(filename), 3988 nexternal) 3989 3990 filename = pjoin(Ppath, 'nexternal.inc') 3991 self.write_nexternal_file(writers.FortranWriter(filename), 3992 nexternal, ninitial) 3993 3994 filename = pjoin(Ppath, 'ngraphs.inc') 3995 self.write_ngraphs_file(writers.FortranWriter(filename), 3996 len(mapconfigs)) 3997 3998 3999 filename = pjoin(Ppath, 'pmass.inc') 4000 self.write_pmass_file(writers.FortranWriter(filename), 4001 matrix_element) 4002 4003 filename = pjoin(Ppath, 'props.inc') 4004 self.write_props_file(writers.FortranWriter(filename), 4005 matrix_element, 4006 s_and_t_channels) 4007 4008 # Find config symmetries and permutations 4009 symmetry, perms, ident_perms = \ 4010 diagram_symmetry.find_symmetry(matrix_element) 4011 4012 filename = pjoin(Ppath, 'symswap.inc') 4013 self.write_symswap_file(writers.FortranWriter(filename), 4014 ident_perms) 4015 4016 filename = pjoin(Ppath, 'symfact_orig.dat') 4017 self.write_symfact_file(open(filename, 'w'), symmetry) 4018 4019 # Generate diagrams 4020 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 4021 filename = pjoin(Ppath, "matrix.ps") 4022 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4023 get('diagrams'), 4024 filename, 4025 model=matrix_element.get('processes')[0].\ 4026 get('model'), 4027 amplitude=True) 4028 logger.info("Generating Feynman diagrams for " + \ 4029 matrix_element.get('processes')[0].nice_string()) 4030 plot.draw() 4031 4032 self.link_files_in_SubProcess(Ppath) 4033 4034 #import nexternal/leshouche in Source 4035 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 4036 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 4037 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 4038 # Return to SubProcesses dir 4039 #os.chdir(os.path.pardir) 4040 4041 # Add subprocess to subproc.mg 4042 filename = pjoin(path, 'subproc.mg') 4043 files.append_to_file(filename, 4044 self.write_subproc, 4045 subprocdir) 4046 4047 # Return to original dir 4048 #os.chdir(cwd) 4049 4050 # Generate info page 4051 gen_infohtml.make_info_html(self.dir_path) 4052 4053 4054 if not calls: 4055 calls = 0 4056 return calls
4057 4058 link_Sub_files = ['addmothers.f', 4059 'cluster.f', 4060 'cluster.inc', 4061 'coupl.inc', 4062 'cuts.f', 4063 'cuts.inc', 4064 'genps.f', 4065 'genps.inc', 4066 'idenparts.f', 4067 'initcluster.f', 4068 'makefile', 4069 'message.inc', 4070 'myamp.f', 4071 'reweight.f', 4072 'run.inc', 4073 'maxconfigs.inc', 4074 'maxparticles.inc', 4075 'run_config.inc', 4076 'lhe_event_infos.inc', 4077 'setcuts.f', 4078 'setscales.f', 4079 'sudakov.inc', 4080 'symmetry.f', 4081 'unwgt.f', 4082 'dummy_fct.f' 4083 ] 4084 4098 4099
4100 - def finalize(self, matrix_elements, history, mg5options, flaglist):
4101 """Finalize ME v4 directory by creating jpeg diagrams, html 4102 pages,proc_card_mg5.dat and madevent.tar.gz.""" 4103 4104 if 'nojpeg' in flaglist: 4105 makejpg = False 4106 else: 4107 makejpg = True 4108 if 'online' in flaglist: 4109 online = True 4110 else: 4111 online = False 4112 4113 compiler = {'fortran': mg5options['fortran_compiler'], 4114 'cpp': mg5options['cpp_compiler'], 4115 'f2py': mg5options['f2py_compiler']} 4116 4117 # indicate that the output type is not grouped 4118 if not isinstance(self, ProcessExporterFortranMEGroup): 4119 self.proc_characteristic['grouped_matrix'] = False 4120 4121 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 4122 4123 # set limitation linked to the model 4124 4125 4126 # indicate the PDG of all initial particle 4127 try: 4128 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4129 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4130 except AttributeError: 4131 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4132 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4133 self.proc_characteristic['pdg_initial1'] = pdgs1 4134 self.proc_characteristic['pdg_initial2'] = pdgs2 4135 4136 4137 modelname = self.opt['model'] 4138 if modelname == 'mssm' or modelname.startswith('mssm-'): 4139 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 4140 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 4141 check_param_card.convert_to_mg5card(param_card, mg5_param) 4142 check_param_card.check_valid_param_card(mg5_param) 4143 4144 # Add the combine_events.f modify param_card path/number of @X 4145 filename = pjoin(self.dir_path,'Source','combine_events.f') 4146 try: 4147 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 4148 except AttributeError: 4149 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 4150 nb_proc = len(set(nb_proc)) 4151 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 4152 # Write maxconfigs.inc based on max of ME's/subprocess groups 4153 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 4154 self.write_maxconfigs_file(writers.FortranWriter(filename), 4155 matrix_elements) 4156 4157 # Write maxparticles.inc based on max of ME's/subprocess groups 4158 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 4159 self.write_maxparticles_file(writers.FortranWriter(filename), 4160 matrix_elements) 4161 4162 # Touch "done" file 4163 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 4164 4165 # Check for compiler 4166 self.set_compiler(compiler) 4167 self.set_cpp_compiler(compiler['cpp']) 4168 4169 4170 old_pos = os.getcwd() 4171 subpath = pjoin(self.dir_path, 'SubProcesses') 4172 4173 P_dir_list = [proc for proc in os.listdir(subpath) 4174 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 4175 4176 devnull = os.open(os.devnull, os.O_RDWR) 4177 # Convert the poscript in jpg files (if authorize) 4178 if makejpg: 4179 try: 4180 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 4181 except Exception as error: 4182 pass 4183 4184 if misc.which('gs'): 4185 logger.info("Generate jpeg diagrams") 4186 for Pdir in P_dir_list: 4187 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 4188 stdout = devnull, cwd=pjoin(subpath, Pdir)) 4189 4190 logger.info("Generate web pages") 4191 # Create the WebPage using perl script 4192 4193 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 4194 stdout = devnull,cwd=pjoin(self.dir_path)) 4195 4196 #os.chdir(os.path.pardir) 4197 4198 obj = gen_infohtml.make_info_html(self.dir_path) 4199 4200 if online: 4201 nb_channel = obj.rep_rule['nb_gen_diag'] 4202 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4203 #add the information to proc_charac 4204 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4205 4206 # Write command history as proc_card_mg5 4207 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4208 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4209 history.write(output_file) 4210 4211 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4212 stdout = devnull) 4213 4214 #crate the proc_characteristic file 4215 self.create_proc_charac(matrix_elements, history) 4216 4217 # create the run_card 4218 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4219 4220 # Run "make" to generate madevent.tar.gz file 4221 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4222 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4223 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4224 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4225 stdout = devnull, cwd=self.dir_path) 4226 4227 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4228 stdout = devnull, cwd=self.dir_path)
4229 4230 4231 4232 4233 4234 4235 #return to the initial dir 4236 #os.chdir(old_pos) 4237 4238 #=========================================================================== 4239 # write_matrix_element_v4 4240 #===========================================================================
4241 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4242 proc_id = "", config_map = [], subproc_number = ""):
4243 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4244 4245 if not matrix_element.get('processes') or \ 4246 not matrix_element.get('diagrams'): 4247 return 0 4248 4249 if writer: 4250 if not isinstance(writer, writers.FortranWriter): 4251 raise writers.FortranWriter.FortranWriterError(\ 4252 "writer not FortranWriter") 4253 # Set lowercase/uppercase Fortran code 4254 writers.FortranWriter.downcase = False 4255 4256 # check if MLM/.../ is supported for this matrix-element and update associate flag 4257 if self.model and 'MLM' in self.model["limitations"]: 4258 if 'MLM' not in self.proc_characteristic["limitations"]: 4259 used_couplings = matrix_element.get_used_couplings(output="set") 4260 for vertex in self.model.get('interactions'): 4261 particles = [p for p in vertex.get('particles')] 4262 if 21 in [p.get('pdg_code') for p in particles]: 4263 colors = [par.get('color') for par in particles] 4264 if 1 in colors: 4265 continue 4266 elif 'QCD' not in vertex.get('orders'): 4267 for bad_coup in vertex.get('couplings').values(): 4268 if bad_coup in used_couplings: 4269 self.proc_characteristic["limitations"].append('MLM') 4270 break 4271 4272 # The proc prefix is not used for MadEvent output so it can safely be set 4273 # to an empty string. 4274 replace_dict = {'proc_prefix':''} 4275 4276 4277 # Extract helas calls 4278 helas_calls = fortran_model.get_matrix_element_calls(\ 4279 matrix_element) 4280 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4281 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4282 ProcessExporterFortranME.done_warning_tchannel = True 4283 4284 replace_dict['helas_calls'] = "\n".join(helas_calls) 4285 4286 4287 #adding the support for the fake width (forbidding too small width) 4288 mass_width = matrix_element.get_all_mass_widths() 4289 mass_width = sorted(list(mass_width)) 4290 width_list = set([e[1] for e in mass_width]) 4291 4292 replace_dict['fake_width_declaration'] = \ 4293 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4294 replace_dict['fake_width_declaration'] += \ 4295 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4296 fk_w_defs = [] 4297 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4298 for m, w in mass_width: 4299 if w == 'zero': 4300 if ' fk_zero = 0d0' not in fk_w_defs: 4301 fk_w_defs.append(' fk_zero = 0d0') 4302 continue 4303 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4304 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4305 4306 # Extract version number and date from VERSION file 4307 info_lines = self.get_mg5_info_lines() 4308 replace_dict['info_lines'] = info_lines 4309 4310 # Extract process info lines 4311 process_lines = self.get_process_info_lines(matrix_element) 4312 replace_dict['process_lines'] = process_lines 4313 4314 # Set proc_id 4315 replace_dict['proc_id'] = proc_id 4316 4317 # Extract ncomb 4318 ncomb = matrix_element.get_helicity_combinations() 4319 replace_dict['ncomb'] = ncomb 4320 4321 # Extract helicity lines 4322 helicity_lines = self.get_helicity_lines(matrix_element) 4323 replace_dict['helicity_lines'] = helicity_lines 4324 4325 # Extract IC line 4326 ic_line = self.get_ic_line(matrix_element) 4327 replace_dict['ic_line'] = ic_line 4328 4329 # Extract overall denominator 4330 # Averaging initial state color, spin, and identical FS particles 4331 den_factor_line = self.get_den_factor_line(matrix_element) 4332 replace_dict['den_factor_line'] = den_factor_line 4333 4334 # Extract ngraphs 4335 ngraphs = matrix_element.get_number_of_amplitudes() 4336 replace_dict['ngraphs'] = ngraphs 4337 4338 # Extract ndiags 4339 ndiags = len(matrix_element.get('diagrams')) 4340 replace_dict['ndiags'] = ndiags 4341 4342 # Set define_iconfigs_lines 4343 replace_dict['define_iconfigs_lines'] = \ 4344 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4345 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4346 4347 if proc_id: 4348 # Set lines for subprocess group version 4349 # Set define_iconfigs_lines 4350 replace_dict['define_iconfigs_lines'] += \ 4351 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4352 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4353 # Set set_amp2_line 4354 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4355 proc_id 4356 else: 4357 # Standard running 4358 # Set set_amp2_line 4359 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4360 4361 # Extract nwavefuncs 4362 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4363 replace_dict['nwavefuncs'] = nwavefuncs 4364 4365 # Extract ncolor 4366 ncolor = max(1, len(matrix_element.get('color_basis'))) 4367 replace_dict['ncolor'] = ncolor 4368 4369 # Extract color data lines 4370 color_data_lines = self.get_color_data_lines(matrix_element) 4371 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4372 4373 4374 # Set the size of Wavefunction 4375 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4376 replace_dict['wavefunctionsize'] = 18 4377 else: 4378 replace_dict['wavefunctionsize'] = 6 4379 4380 # Extract amp2 lines 4381 amp2_lines = self.get_amp2_lines(matrix_element, config_map, replace_dict) 4382 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4383 4384 # The JAMP definition depends on the splitting order 4385 split_orders=matrix_element.get('processes')[0].get('split_orders') 4386 if len(split_orders)>0: 4387 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4388 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4389 matrix_element.get('processes')[0],squared_orders) 4390 replace_dict['select_configs_if'] = ' IF (CHOSEN_SO_CONFIGS(SQSOINDEX%(proc_id)s(M,N))) THEN' % replace_dict 4391 replace_dict['select_configs_endif'] = ' endif' 4392 else: 4393 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4394 # set all amplitude order to weight 1 and only one squared order 4395 # contribution which is of course ALL_ORDERS=2. 4396 squared_orders = [(2,),] 4397 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4398 replace_dict['chosen_so_configs'] = '.TRUE.' 4399 # addtionally set the function to NOT be called 4400 replace_dict['select_configs_if'] = '' 4401 replace_dict['select_configs_endif'] = '' 4402 4403 replace_dict['nAmpSplitOrders']=len(amp_orders) 4404 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4405 replace_dict['split_order_str_list']=str(split_orders) 4406 replace_dict['nSplitOrders']=max(len(split_orders),1) 4407 amp_so = self.get_split_orders_lines( 4408 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4409 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4410 replace_dict['ampsplitorders']='\n'.join(amp_so) 4411 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4412 4413 4414 # Extract JAMP lines 4415 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4416 jamp_lines, nb_temp = self.get_JAMP_lines_split_order(\ 4417 matrix_element,amp_orders,split_order_names= 4418 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4419 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4420 replace_dict['nb_temp_jamp'] = nb_temp 4421 4422 replace_dict['template_file'] = pjoin(_file_path, \ 4423 'iolibs/template_files/%s' % self.matrix_file) 4424 replace_dict['template_file2'] = pjoin(_file_path, \ 4425 'iolibs/template_files/split_orders_helping_functions.inc') 4426 4427 s1,s2 = matrix_element.get_spin_state_initial() 4428 replace_dict['nb_spin_state1'] = s1 4429 replace_dict['nb_spin_state2'] = s2 4430 4431 if writer: 4432 file = open(replace_dict['template_file']).read() 4433 file = file % replace_dict 4434 # Add the split orders helper functions. 4435 file = file + '\n' + open(replace_dict['template_file2'])\ 4436 .read()%replace_dict 4437 # Write the file 4438 writer.writelines(file) 4439 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4440 else: 4441 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4442 return replace_dict
4443 4444 #=========================================================================== 4445 # write_auto_dsig_file 4446 #===========================================================================
4447 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4448 """Write the auto_dsig.f file for the differential cross section 4449 calculation, includes pdf call information""" 4450 4451 if not matrix_element.get('processes') or \ 4452 not matrix_element.get('diagrams'): 4453 return 0 4454 4455 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4456 self.proc_characteristic['ninitial'] = ninitial 4457 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4458 4459 # Add information relevant for MLM matching: 4460 # Maximum QCD power in all the contributions 4461 max_qcd_order = 0 4462 for diag in matrix_element.get('diagrams'): 4463 orders = diag.calculate_orders() 4464 if 'QCD' in orders: 4465 max_qcd_order = max(max_qcd_order,orders['QCD']) 4466 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4467 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4468 proc.get('model').get_particle(id).get('color')>1]) 4469 for proc in matrix_element.get('processes')) 4470 # Maximum number of final state light jets to be matched 4471 self.proc_characteristic['max_n_matched_jets'] = max( 4472 self.proc_characteristic['max_n_matched_jets'], 4473 min(max_qcd_order,max_n_light_final_partons)) 4474 4475 # List of default pdgs to be considered for the CKKWl merging cut 4476 self.proc_characteristic['colored_pdgs'] = \ 4477 sorted(list(set([abs(p.get('pdg_code')) for p in 4478 matrix_element.get('processes')[0].get('model').get('particles') if 4479 p.get('color')>1]))) 4480 4481 if ninitial < 1 or ninitial > 2: 4482 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4483 4484 replace_dict = {} 4485 4486 # Extract version number and date from VERSION file 4487 info_lines = self.get_mg5_info_lines() 4488 replace_dict['info_lines'] = info_lines 4489 4490 # Extract process info lines 4491 process_lines = self.get_process_info_lines(matrix_element) 4492 replace_dict['process_lines'] = process_lines 4493 4494 # Set proc_id 4495 replace_dict['proc_id'] = proc_id 4496 replace_dict['numproc'] = 1 4497 4498 # Set dsig_line 4499 if ninitial == 1: 4500 # No conversion, since result of decay should be given in GeV 4501 dsig_line = "pd(0)*dsiguu" 4502 else: 4503 # Convert result (in GeV) to pb 4504 dsig_line = "pd(0)*conv*dsiguu" 4505 4506 replace_dict['dsig_line'] = dsig_line 4507 4508 # Extract pdf lines 4509 pdf_vars, pdf_data, pdf_lines = \ 4510 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4511 replace_dict['pdf_vars'] = pdf_vars 4512 replace_dict['pdf_data'] = pdf_data 4513 replace_dict['pdf_lines'] = pdf_lines 4514 4515 # Lines that differ between subprocess group and regular 4516 if proc_id: 4517 replace_dict['numproc'] = int(proc_id) 4518 replace_dict['passcuts_begin'] = "" 4519 replace_dict['passcuts_end'] = "" 4520 # Set lines for subprocess group version 4521 # Set define_iconfigs_lines 4522 replace_dict['define_subdiag_lines'] = \ 4523 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4524 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4525 replace_dict['cutsdone'] = "" 4526 else: 4527 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4528 replace_dict['passcuts_end'] = "ENDIF" 4529 replace_dict['define_subdiag_lines'] = "" 4530 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4531 4532 if not isinstance(self, ProcessExporterFortranMEGroup): 4533 ncomb=matrix_element.get_helicity_combinations() 4534 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4535 else: 4536 replace_dict['read_write_good_hel'] = "" 4537 4538 context = {'read_write_good_hel':True} 4539 4540 if writer: 4541 file = open(pjoin(_file_path, \ 4542 'iolibs/template_files/auto_dsig_v4.inc')).read() 4543 file = file % replace_dict 4544 4545 # Write the file 4546 writer.writelines(file, context=context) 4547 else: 4548 return replace_dict, context
4549 #=========================================================================== 4550 # write_coloramps_file 4551 #===========================================================================
4552 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4553 """Write the coloramps.inc file for MadEvent""" 4554 4555 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4556 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4557 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4558 len(mapconfigs))) 4559 4560 4561 # Write the file 4562 writer.writelines(lines) 4563 4564 return True
4565 4566 #=========================================================================== 4567 # write_colors_file 4568 #===========================================================================
4569 - def write_colors_file(self, writer, matrix_elements):
4570 """Write the get_color.f file for MadEvent, which returns color 4571 for all particles used in the matrix element.""" 4572 4573 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4574 matrix_elements = [matrix_elements] 4575 4576 model = matrix_elements[0].get('processes')[0].get('model') 4577 4578 # We need the both particle and antiparticle wf_ids, since the identity 4579 # depends on the direction of the wf. 4580 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4581 for wf in d.get('wavefunctions')],[]) \ 4582 for d in me.get('diagrams')], []) \ 4583 for me in matrix_elements], [])) 4584 4585 leg_ids = set(sum([sum([sum([[l.get('id'), 4586 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4587 for l in p.get_legs_with_decays()], []) \ 4588 for p in me.get('processes')], []) \ 4589 for me in matrix_elements], [])) 4590 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4591 4592 lines = """function get_color(ipdg) 4593 implicit none 4594 integer get_color, ipdg 4595 4596 if(ipdg.eq.%d)then 4597 get_color=%d 4598 return 4599 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4600 4601 for part_id in particle_ids[1:]: 4602 lines += """else if(ipdg.eq.%d)then 4603 get_color=%d 4604 return 4605 """ % (part_id, model.get_particle(part_id).get_color()) 4606 # Dummy particle for multiparticle vertices with pdg given by 4607 # first code not in the model 4608 lines += """else if(ipdg.eq.%d)then 4609 c This is dummy particle used in multiparticle vertices 4610 get_color=2 4611 return 4612 """ % model.get_first_non_pdg() 4613 lines += """else 4614 write(*,*)'Error: No color given for pdg ',ipdg 4615 get_color=0 4616 return 4617 endif 4618 end 4619 """ 4620 4621 # Write the file 4622 writer.writelines(lines) 4623 4624 return True
4625 4626 #=========================================================================== 4627 # write_config_nqcd_file 4628 #===========================================================================
4629 - def write_config_nqcd_file(self, writer, nqcd_list):
4630 """Write the config_nqcd.inc with the number of QCD couplings 4631 for each config""" 4632 4633 lines = [] 4634 for iconf, n in enumerate(nqcd_list): 4635 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4636 4637 # Write the file 4638 writer.writelines(lines) 4639 4640 return True
4641 4642 #=========================================================================== 4643 # write_maxconfigs_file 4644 #===========================================================================
4645 - def write_maxconfigs_file(self, writer, matrix_elements):
4646 """Write the maxconfigs.inc file for MadEvent""" 4647 4648 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4649 maxconfigs = max([me.get_num_configs() for me in \ 4650 matrix_elements.get('matrix_elements')]) 4651 else: 4652 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4653 4654 lines = "integer lmaxconfigs\n" 4655 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4656 4657 # Write the file 4658 writer.writelines(lines) 4659 4660 return True
4661 4662 #=========================================================================== 4663 # read_write_good_hel 4664 #===========================================================================
4665 - def read_write_good_hel(self, ncomb):
4666 """return the code to read/write the good_hel common_block""" 4667 4668 convert = {'ncomb' : ncomb} 4669 output = """ 4670 subroutine write_good_hel(stream_id) 4671 implicit none 4672 integer stream_id 4673 INTEGER NCOMB 4674 PARAMETER ( NCOMB=%(ncomb)d) 4675 LOGICAL GOODHEL(NCOMB) 4676 INTEGER NTRY 4677 common/BLOCK_GOODHEL/NTRY,GOODHEL 4678 write(stream_id,*) GOODHEL 4679 return 4680 end 4681 4682 4683 subroutine read_good_hel(stream_id) 4684 implicit none 4685 include 'genps.inc' 4686 integer stream_id 4687 INTEGER NCOMB 4688 PARAMETER ( NCOMB=%(ncomb)d) 4689 LOGICAL GOODHEL(NCOMB) 4690 INTEGER NTRY 4691 common/BLOCK_GOODHEL/NTRY,GOODHEL 4692 read(stream_id,*) GOODHEL 4693 NTRY = MAXTRIES + 1 4694 return 4695 end 4696 4697 subroutine init_good_hel() 4698 implicit none 4699 INTEGER NCOMB 4700 PARAMETER ( NCOMB=%(ncomb)d) 4701 LOGICAL GOODHEL(NCOMB) 4702 INTEGER NTRY 4703 INTEGER I 4704 4705 do i=1,NCOMB 4706 GOODHEL(I) = .false. 4707 enddo 4708 NTRY = 0 4709 end 4710 4711 integer function get_maxsproc() 4712 implicit none 4713 get_maxsproc = 1 4714 return 4715 end 4716 4717 """ % convert 4718 4719 return output
4720 4721 #=========================================================================== 4722 # write_config_subproc_map_file 4723 #===========================================================================
4724 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4725 """Write a dummy config_subproc.inc file for MadEvent""" 4726 4727 lines = [] 4728 4729 for iconfig in range(len(s_and_t_channels)): 4730 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4731 (iconfig + 1)) 4732 4733 # Write the file 4734 writer.writelines(lines) 4735 4736 return True
4737 4738 #=========================================================================== 4739 # write_configs_file 4740 #===========================================================================
4741 - def write_configs_file(self, writer, matrix_element):
4742 """Write the configs.inc file for MadEvent""" 4743 4744 # Extract number of external particles 4745 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4746 4747 model = matrix_element.get('processes')[0].get('model') 4748 configs = [(i+1, d) for (i, d) in \ 4749 enumerate(matrix_element.get('diagrams'))] 4750 mapconfigs = [c[0] for c in configs] 4751 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4752 [[c[1]] for c in configs], 4753 mapconfigs, 4754 nexternal, ninitial, 4755 model)
4756 4757 #=========================================================================== 4758 # write_run_configs_file 4759 #===========================================================================
4760 - def write_run_config_file(self, writer):
4761 """Write the run_configs.inc file for MadEvent""" 4762 4763 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4764 4765 if self.proc_characteristic['loop_induced']: 4766 job_per_chan = 1 4767 else: 4768 job_per_chan = 5 4769 4770 if writer: 4771 text = open(path).read() % {'chanperjob': job_per_chan} 4772 writer.write(text) 4773 return True 4774 else: 4775 return {'chanperjob': job_per_chan}
4776 4777 #=========================================================================== 4778 # write_configs_file_from_diagrams 4779 #===========================================================================
4780 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4781 nexternal, ninitial, model):
4782 """Write the actual configs.inc file. 4783 4784 configs is the diagrams corresponding to configs (each 4785 diagrams is a list of corresponding diagrams for all 4786 subprocesses, with None if there is no corresponding diagrams 4787 for a given process). 4788 mapconfigs gives the diagram number for each config. 4789 4790 For s-channels, we need to output one PDG for each subprocess in 4791 the subprocess group, in order to be able to pick the right 4792 one for multiprocesses.""" 4793 4794 lines = [] 4795 4796 s_and_t_channels = [] 4797 4798 nqcd_list = [] 4799 4800 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4801 for config in configs if [d for d in config if d][0].\ 4802 get_vertex_leg_numbers()!=[]] 4803 minvert = min(vert_list) if vert_list!=[] else 0 4804 4805 # Number of subprocesses 4806 nsubprocs = len(configs[0]) 4807 4808 nconfigs = 0 4809 4810 new_pdg = model.get_first_non_pdg() 4811 4812 for iconfig, helas_diags in enumerate(configs): 4813 if any([vert > minvert for vert in 4814 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4815 # Only 3-vertices allowed in configs.inc 4816 continue 4817 nconfigs += 1 4818 4819 # Need s- and t-channels for all subprocesses, including 4820 # those that don't contribute to this config 4821 empty_verts = [] 4822 stchannels = [] 4823 for h in helas_diags: 4824 if h: 4825 # get_s_and_t_channels gives vertices starting from 4826 # final state external particles and working inwards 4827 stchannels.append(h.get('amplitudes')[0].\ 4828 get_s_and_t_channels(ninitial, model, 4829 new_pdg)) 4830 else: 4831 stchannels.append((empty_verts, None)) 4832 4833 4834 # For t-channels, just need the first non-empty one 4835 tchannels = [t for s,t in stchannels if t != None][0] 4836 4837 # pass to ping-pong strategy for t-channel for 3 ore more T-channel 4838 # this is directly related to change in genps.f 4839 tstrat = self.opt.get('t_strategy', 0) 4840 tchannels, tchannels_strategy = ProcessExporterFortranME.reorder_tchannels(tchannels, tstrat, self.model) 4841 4842 # For s_and_t_channels (to be used later) use only first config 4843 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4844 tchannels, tchannels_strategy]) 4845 4846 # Make sure empty_verts is same length as real vertices 4847 if any([s for s,t in stchannels]): 4848 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4849 4850 # Reorganize s-channel vertices to get a list of all 4851 # subprocesses for each vertex 4852 schannels = list(zip(*[s for s,t in stchannels])) 4853 else: 4854 schannels = [] 4855 4856 allchannels = schannels 4857 if len(tchannels) > 1: 4858 # Write out tchannels only if there are any non-trivial ones 4859 allchannels = schannels + tchannels 4860 4861 # Write out propagators for s-channel and t-channel vertices 4862 4863 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4864 # Correspondance between the config and the diagram = amp2 4865 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4866 mapconfigs[iconfig])) 4867 lines.append("data tstrategy(%d)/%d/" % (nconfigs, tchannels_strategy)) 4868 # Number of QCD couplings in this diagram 4869 nqcd = 0 4870 for h in helas_diags: 4871 if h: 4872 try: 4873 nqcd = h.calculate_orders()['QCD'] 4874 except KeyError: 4875 pass 4876 break 4877 else: 4878 continue 4879 4880 nqcd_list.append(nqcd) 4881 4882 for verts in allchannels: 4883 if verts in schannels: 4884 vert = [v for v in verts if v][0] 4885 else: 4886 vert = verts 4887 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4888 last_leg = vert.get('legs')[-1] 4889 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4890 (last_leg.get('number'), nconfigs, len(daughters), 4891 ",".join([str(d) for d in daughters]))) 4892 if verts in schannels: 4893 pdgs = [] 4894 for v in verts: 4895 if v: 4896 pdgs.append(v.get('legs')[-1].get('id')) 4897 else: 4898 pdgs.append(0) 4899 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4900 (last_leg.get('number'), nconfigs, nsubprocs, 4901 ",".join([str(d) for d in pdgs]))) 4902 lines.append("data tprid(%d,%d)/0/" % \ 4903 (last_leg.get('number'), nconfigs)) 4904 elif verts in tchannels[:-1]: 4905 lines.append("data tprid(%d,%d)/%d/" % \ 4906 (last_leg.get('number'), nconfigs, 4907 abs(last_leg.get('id')))) 4908 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4909 (last_leg.get('number'), nconfigs, nsubprocs, 4910 ",".join(['0'] * nsubprocs))) 4911 4912 # Write out number of configs 4913 lines.append("# Number of configs") 4914 lines.append("data mapconfig(0)/%d/" % nconfigs) 4915 4916 # Write the file 4917 writer.writelines(lines) 4918 4919 return s_and_t_channels, nqcd_list
4920 4921 4922 4923 #=========================================================================== 4924 # reoder t-channels 4925 #=========================================================================== 4926 4927 #ordering = 0 4928 @staticmethod
4929 - def reorder_tchannels(tchannels, tstrat, model):
4930 # no need to modified anything if 1 or less T-Channel 4931 #Note that this counts the number of vertex (one more vertex compare to T) 4932 #ProcessExporterFortranME.ordering +=1 4933 if len(tchannels) < 3 or tstrat == 2 or not model: 4934 return tchannels, 2 4935 elif tstrat == 1: 4936 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4937 elif tstrat == -2: 4938 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4939 elif tstrat == -1: 4940 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels, 1), -1 4941 elif len(tchannels) < 4: 4942 # 4943 first = tchannels[0]['legs'][1]['number'] 4944 t1 = tchannels[0]['legs'][-1]['id'] 4945 last = tchannels[-1]['legs'][1]['number'] 4946 t2 = tchannels[-1]['legs'][0]['id'] 4947 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4948 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4949 if m2 and not m1: 4950 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4951 elif m1 and not m2: 4952 return tchannels, 2 4953 elif first < last: 4954 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4955 else: 4956 return tchannels, 2 4957 else: 4958 first = tchannels[0]['legs'][1]['number'] 4959 t1 = tchannels[0]['legs'][-1]['id'] 4960 last = tchannels[-1]['legs'][1]['number'] 4961 t2 = tchannels[-1]['legs'][0]['id'] 4962 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4963 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4964 4965 t12 = tchannels[1]['legs'][-1]['id'] 4966 m12 = model.get_particle(t12).get('mass') == 'ZERO' 4967 t22 = tchannels[-2]['legs'][0]['id'] 4968 m22 = model.get_particle(t22).get('mass') == 'ZERO' 4969 if m2 and not m1: 4970 if m22: 4971 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4972 else: 4973 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4974 elif m1 and not m2: 4975 if m12: 4976 return tchannels, 2 4977 else: 4978 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4979 elif m1 and m2 and len(tchannels) == 4 and not m12: # 3 T propa 4980 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4981 # this case seems quite sensitive we tested method 2 specifically and this was not helping in general 4982 elif not m1 and not m2 and len(tchannels) == 4 and m12: 4983 if first < last: 4984 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4985 return tchannels, 2 4986 else: 4987 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2
4988 4989 4990 4991 4992 @staticmethod
4993 - def reorder_tchannels_flipside(tchannels):
4994 """change the tchannel ordering to pass to a ping-pong strategy. 4995 assume ninitial == 2 4996 4997 We assume that we receive something like this 4998 4999 1 ----- X ------- -2 5000 | 5001 | (-X) 5002 | 5003 X -------- 4 5004 | 5005 | (-X-1) 5006 | 5007 X --------- -1 5008 5009 X---------- 3 5010 | 5011 | (-N+2) 5012 | 5013 X --------- L 5014 | 5015 | (-N+1) 5016 | 5017 -N ----- X ------- P 5018 5019 coded as 5020 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5021 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5022 5023 we want to convert this as: 5024 -N ----- X ------- -2 5025 | 5026 | (-N+1) 5027 | 5028 X -------- 4 5029 | 5030 | (-N+2) 5031 | 5032 X --------- -1 5033 5034 X---------- 3 5035 | 5036 | (-X-1) 5037 | 5038 X --------- L 5039 | 5040 | (-X) 5041 | 5042 2 ----- X ------- P 5043 5044 coded as 5045 ( 2 P > -X) (-X L > -X-1) (-X-1 3 > -X-2)... (-X-L -2 > -N) 5046 """ 5047 5048 # no need to modified anything if 1 or less T-Channel 5049 #Note that this counts the number of vertex (one more vertex compare to T) 5050 if len(tchannels) < 2: 5051 return tchannels 5052 5053 out = [] 5054 oldid2new = {} 5055 5056 # initialisation 5057 # id of the first T-channel (-X) 5058 propa_id = tchannels[0]['legs'][-1]['number'] 5059 # 5060 # Setup the last vertex to refenence the second id beam 5061 # -N (need to setup it to 2. 5062 initialid = tchannels[-1]['legs'][-1]['number'] 5063 oldid2new[initialid] = 2 5064 oldid2new[1] = initialid 5065 5066 i = 0 5067 while tchannels: 5068 old_vert = tchannels.pop() 5069 5070 #copy the vertex /leglist to avoid side effects 5071 new_vert = base_objects.Vertex(old_vert) 5072 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5073 # vertex taken from the bottom we have 5074 # (-N+1 X > -N) we need to flip to pass to 5075 # -N X > -N+1 (and then relabel -N and -N+1 5076 legs = new_vert['legs'] # shorcut 5077 id1 = legs[0]['number'] 5078 id2 = legs[1]['number'] 5079 id3 = legs[2]['number'] 5080 # to be secure we also support (X -N+1 > -N) 5081 if id3 == id2 -1 and id1 !=1: 5082 legs[0], legs[1] = legs[1], legs[0] 5083 #flipping side 5084 legs[0], legs[2] = legs[2], legs[0] 5085 5086 # the only new relabelling is the last element of the list 5087 # always thanks to the above flipping 5088 old_propa_id = new_vert['legs'][-1]['number'] 5089 oldid2new[old_propa_id] = propa_id 5090 5091 5092 #pass to new convention for leg numbering: 5093 for l in new_vert['legs']: 5094 if l['number'] in oldid2new: 5095 l['number'] = oldid2new[l['number']] 5096 5097 # new_vert is now ready 5098 out.append(new_vert) 5099 # prepare next iteration 5100 propa_id -=1 5101 i +=1 5102 5103 return out
5104 5105 @staticmethod
5106 - def reorder_tchannels_pingpong(tchannels, id=2):
5107 """change the tchannel ordering to pass to a ping-pong strategy. 5108 assume ninitial == 2 5109 5110 We assume that we receive something like this 5111 5112 1 ----- X ------- -2 5113 | 5114 | (-X) 5115 | 5116 X -------- 4 5117 | 5118 | (-X-1) 5119 | 5120 X --------- -1 5121 5122 X---------- 3 5123 | 5124 | (-N+2) 5125 | 5126 X --------- L 5127 | 5128 | (-N+1) 5129 | 5130 -N ----- X ------- P 5131 5132 coded as 5133 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5134 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5135 5136 we want to convert this as: 5137 1 ----- X ------- -2 5138 | 5139 | (-X) 5140 | 5141 X -------- 4 5142 | 5143 | (-X-2) 5144 | 5145 X --------- -1 5146 5147 X---------- 3 5148 | 5149 | (-X-3) 5150 | 5151 X --------- L 5152 | 5153 | (-X-1) 5154 | 5155 2 ----- X ------- P 5156 5157 coded as 5158 (1 -2 > -X) (2 P > -X-1) (-X 4 > -X-2) (-X-1 L > -X-3) ... 5159 """ 5160 5161 # no need to modified anything if 1 or less T-Channel 5162 #Note that this counts the number of vertex (one more vertex compare to T) 5163 if len(tchannels) < 2: 5164 return tchannels 5165 5166 out = [] 5167 oldid2new = {} 5168 5169 # initialisation 5170 # id of the first T-channel (-X) 5171 propa_id = tchannels[0]['legs'][-1]['number'] 5172 # 5173 # Setup the last vertex to refenence the second id beam 5174 # -N (need to setup it to 2. 5175 initialid = tchannels[-1]['legs'][-1]['number'] 5176 oldid2new[initialid] = id 5177 5178 5179 5180 i = 0 5181 while tchannels: 5182 #ping pong by taking first/last element in aternance 5183 if id ==2: 5184 if i % 2 == 0: 5185 old_vert = tchannels.pop(0) 5186 else: 5187 old_vert = tchannels.pop() 5188 else: 5189 if i % 2 != 0: 5190 old_vert = tchannels.pop(0) 5191 else: 5192 old_vert = tchannels.pop() 5193 5194 #copy the vertex /leglist to avoid side effects 5195 new_vert = base_objects.Vertex(old_vert) 5196 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5197 # if vertex taken from the bottom we have 5198 # (-N+1 X > -N) we need to flip to pass to 5199 # -N X > -N+1 (and then relabel -N and -N+1 5200 # to be secure we also support (X -N+1 > -N) 5201 if (i % 2 ==1 and id ==2) or (i %2 == 0 and id ==1): 5202 legs = new_vert['legs'] # shorcut 5203 id1 = legs[0]['number'] 5204 id2 = legs[1]['number'] 5205 if id1 > id2: 5206 legs[0], legs[1] = legs[1], legs[0] 5207 else: 5208 legs[0], legs[2] = legs[2], legs[0] 5209 5210 # the only new relabelling is the last element of the list 5211 # always thanks to the above flipping 5212 old_propa_id = new_vert['legs'][-1]['number'] 5213 oldid2new[old_propa_id] = propa_id 5214 5215 if i==0 and id==1: 5216 legs[0]['number'] = 2 5217 5218 #pass to new convention for leg numbering: 5219 for l in new_vert['legs']: 5220 if l['number'] in oldid2new: 5221 l['number'] = oldid2new[l['number']] 5222 5223 # new_vert is now ready 5224 out.append(new_vert) 5225 # prepare next iteration 5226 propa_id -=1 5227 i +=1 5228 5229 return out
5230 5231 5232 5233 5234 5235 #=========================================================================== 5236 # write_decayBW_file 5237 #===========================================================================
5238 - def write_decayBW_file(self, writer, s_and_t_channels):
5239 """Write the decayBW.inc file for MadEvent""" 5240 5241 lines = [] 5242 5243 booldict = {None: "0", True: "1", False: "2"} 5244 5245 for iconf, config in enumerate(s_and_t_channels): 5246 schannels = config[0] 5247 for vertex in schannels: 5248 # For the resulting leg, pick out whether it comes from 5249 # decay or not, as given by the onshell flag 5250 leg = vertex.get('legs')[-1] 5251 lines.append("data gForceBW(%d,%d)/%s/" % \ 5252 (leg.get('number'), iconf + 1, 5253 booldict[leg.get('onshell')])) 5254 5255 # Write the file 5256 writer.writelines(lines) 5257 5258 return True
5259 5260 #=========================================================================== 5261 # write_dname_file 5262 #===========================================================================
5263 - def write_dname_file(self, writer, dir_name):
5264 """Write the dname.mg file for MG4""" 5265 5266 line = "DIRNAME=%s" % dir_name 5267 5268 # Write the file 5269 writer.write(line + "\n") 5270 5271 return True
5272 5273 #=========================================================================== 5274 # write_driver 5275 #===========================================================================
5276 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
5277 """Write the SubProcess/driver.f file for MG4""" 5278 5279 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 5280 5281 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5282 card = 'Source/MODEL/MG5_param.dat' 5283 else: 5284 card = 'param_card.dat' 5285 # Requiring each helicity configuration to be probed by 10 points for 5286 # matrix element before using the resulting grid for MC over helicity 5287 # sampling. 5288 # We multiply this by 2 because each grouped subprocess is called at most 5289 # twice for each IMIRROR. 5290 replace_dict = {'param_card_name':card, 5291 'ncomb':ncomb, 5292 'hel_init_points':n_grouped_proc*10*2} 5293 if not v5: 5294 replace_dict['secondparam']=',.true.' 5295 else: 5296 replace_dict['secondparam']='' 5297 5298 if writer: 5299 text = open(path).read() % replace_dict 5300 writer.write(text) 5301 return True 5302 else: 5303 return replace_dict
5304 5305 #=========================================================================== 5306 # write_addmothers 5307 #===========================================================================
5308 - def write_addmothers(self, writer):
5309 """Write the SubProcess/addmothers.f""" 5310 5311 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5312 5313 text = open(path).read() % {'iconfig': 'diag_number'} 5314 writer.write(text) 5315 5316 return True
5317 5318 5319 #=========================================================================== 5320 # write_combine_events 5321 #===========================================================================
5322 - def write_combine_events(self, writer, nb_proc=100):
5323 """Write the SubProcess/driver.f file for MG4""" 5324 5325 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 5326 5327 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5328 card = 'Source/MODEL/MG5_param.dat' 5329 else: 5330 card = 'param_card.dat' 5331 5332 #set maxpup (number of @X in the process card) 5333 5334 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 5335 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 5336 writer.write(text) 5337 5338 return True
5339 5340 5341 #=========================================================================== 5342 # write_symmetry 5343 #===========================================================================
5344 - def write_symmetry(self, writer, v5=True):
5345 """Write the SubProcess/driver.f file for ME""" 5346 5347 5348 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 5349 5350 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5351 card = 'Source/MODEL/MG5_param.dat' 5352 else: 5353 card = 'param_card.dat' 5354 5355 if v5: 5356 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 5357 else: 5358 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 5359 5360 if writer: 5361 text = open(path).read() 5362 text = text % replace_dict 5363 writer.write(text) 5364 return True 5365 else: 5366 return replace_dict
5367 5368 5369 5370 #=========================================================================== 5371 # write_iproc_file 5372 #===========================================================================
5373 - def write_iproc_file(self, writer, me_number):
5374 """Write the iproc.dat file for MG4""" 5375 line = "%d" % (me_number + 1) 5376 5377 # Write the file 5378 for line_to_write in writer.write_line(line): 5379 writer.write(line_to_write) 5380 return True
5381 5382 #=========================================================================== 5383 # write_mg_sym_file 5384 #===========================================================================
5385 - def write_mg_sym_file(self, writer, matrix_element):
5386 """Write the mg.sym file for MadEvent.""" 5387 5388 lines = [] 5389 5390 # Extract process with all decays included 5391 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 5392 5393 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 5394 5395 identical_indices = {} 5396 5397 # Extract identical particle info 5398 for i, leg in enumerate(final_legs): 5399 if leg.get('id') in identical_indices: 5400 identical_indices[leg.get('id')].append(\ 5401 i + ninitial + 1) 5402 else: 5403 identical_indices[leg.get('id')] = [i + ninitial + 1] 5404 5405 # Remove keys which have only one particle 5406 for key in list(identical_indices.keys()): 5407 if len(identical_indices[key]) < 2: 5408 del identical_indices[key] 5409 5410 # Write mg.sym file 5411 lines.append(str(len(list(identical_indices.keys())))) 5412 for key in identical_indices.keys(): 5413 lines.append(str(len(identical_indices[key]))) 5414 for number in identical_indices[key]: 5415 lines.append(str(number)) 5416 5417 # Write the file 5418 writer.writelines(lines) 5419 5420 return True
5421 5422 #=========================================================================== 5423 # write_mg_sym_file 5424 #===========================================================================
5425 - def write_default_mg_sym_file(self, writer):
5426 """Write the mg.sym file for MadEvent.""" 5427 5428 lines = "0" 5429 5430 # Write the file 5431 writer.writelines(lines) 5432 5433 return True
5434 5435 #=========================================================================== 5436 # write_ncombs_file 5437 #===========================================================================
5438 - def write_ncombs_file(self, writer, nexternal):
5439 """Write the ncombs.inc file for MadEvent.""" 5440 5441 # ncomb (used for clustering) is 2^nexternal 5442 file = " integer n_max_cl\n" 5443 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 5444 5445 # Write the file 5446 writer.writelines(file) 5447 5448 return True
5449 5450 #=========================================================================== 5451 # write_processes_file 5452 #===========================================================================
5453 - def write_processes_file(self, writer, subproc_group):
5454 """Write the processes.dat file with info about the subprocesses 5455 in this group.""" 5456 5457 lines = [] 5458 5459 for ime, me in \ 5460 enumerate(subproc_group.get('matrix_elements')): 5461 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 5462 ",".join(p.base_string() for p in \ 5463 me.get('processes')))) 5464 if me.get('has_mirror_process'): 5465 mirror_procs = [copy.copy(p) for p in me.get('processes')] 5466 for proc in mirror_procs: 5467 legs = copy.copy(proc.get('legs_with_decays')) 5468 legs.insert(0, legs.pop(1)) 5469 proc.set("legs_with_decays", legs) 5470 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 5471 mirror_procs)) 5472 else: 5473 lines.append("mirror none") 5474 5475 # Write the file 5476 writer.write("\n".join(lines)) 5477 5478 return True
5479 5480 #=========================================================================== 5481 # write_symswap_file 5482 #===========================================================================
5483 - def write_symswap_file(self, writer, ident_perms):
5484 """Write the file symswap.inc for MG4 by comparing diagrams using 5485 the internal matrix element value functionality.""" 5486 5487 lines = [] 5488 5489 # Write out lines for symswap.inc file (used to permute the 5490 # external leg momenta 5491 for iperm, perm in enumerate(ident_perms): 5492 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 5493 (iperm+1, ",".join([str(i+1) for i in perm]))) 5494 lines.append("data nsym/%d/" % len(ident_perms)) 5495 5496 # Write the file 5497 writer.writelines(lines) 5498 5499 return True
5500 5501 #=========================================================================== 5502 # write_symfact_file 5503 #===========================================================================
5504 - def write_symfact_file(self, writer, symmetry):
5505 """Write the files symfact.dat for MG4 by comparing diagrams using 5506 the internal matrix element value functionality.""" 5507 5508 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 5509 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 5510 # Write out lines for symswap.inc file (used to permute the 5511 # external leg momenta 5512 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 5513 # Write the file 5514 writer.write('\n'.join(lines)) 5515 writer.write('\n') 5516 5517 return True
5518 5519 #=========================================================================== 5520 # write_symperms_file 5521 #===========================================================================
5522 - def write_symperms_file(self, writer, perms):
5523 """Write the symperms.inc file for subprocess group, used for 5524 symmetric configurations""" 5525 5526 lines = [] 5527 for iperm, perm in enumerate(perms): 5528 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 5529 (iperm+1, ",".join([str(i+1) for i in perm]))) 5530 5531 # Write the file 5532 writer.writelines(lines) 5533 5534 return True
5535 5536 #=========================================================================== 5537 # write_subproc 5538 #===========================================================================
5539 - def write_subproc(self, writer, subprocdir):
5540 """Append this subprocess to the subproc.mg file for MG4""" 5541 5542 # Write line to file 5543 writer.write(subprocdir + "\n") 5544 5545 return True
5546
5547 #=============================================================================== 5548 # ProcessExporterFortranMEGroup 5549 #=============================================================================== 5550 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5551 """Class to take care of exporting a set of matrix elements to 5552 MadEvent subprocess group format.""" 5553 5554 matrix_file = "matrix_madevent_group_v4.inc" 5555 grouped_mode = 'madevent' 5556 default_opt = {'clean': False, 'complex_mass':False, 5557 'export_format':'madevent', 'mp': False, 5558 'v5_model': True, 5559 'output_options':{}, 5560 'hel_recycling': True 5561 } 5562 5563 5564 #=========================================================================== 5565 # generate_subprocess_directory 5566 #===========================================================================
5567 - def generate_subprocess_directory(self, subproc_group, 5568 fortran_model, 5569 group_number):
5570 """Generate the Pn directory for a subprocess group in MadEvent, 5571 including the necessary matrix_N.f files, configs.inc and various 5572 other helper files.""" 5573 5574 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5575 "subproc_group object not SubProcessGroup" 5576 5577 if not self.model: 5578 self.model = subproc_group.get('matrix_elements')[0].\ 5579 get('processes')[0].get('model') 5580 5581 cwd = os.getcwd() 5582 path = pjoin(self.dir_path, 'SubProcesses') 5583 5584 os.chdir(path) 5585 pathdir = os.getcwd() 5586 5587 # Create the directory PN in the specified path 5588 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5589 subproc_group.get('name')) 5590 try: 5591 os.mkdir(subprocdir) 5592 except os.error as error: 5593 logger.warning(error.strerror + " " + subprocdir) 5594 5595 try: 5596 os.chdir(subprocdir) 5597 except os.error: 5598 logger.error('Could not cd to directory %s' % subprocdir) 5599 return 0 5600 5601 logger.info('Creating files in directory %s' % subprocdir) 5602 5603 # Create the matrix.f files, auto_dsig.f files and all inc files 5604 # for all subprocesses in the group 5605 5606 maxamps = 0 5607 maxflows = 0 5608 tot_calls = 0 5609 5610 matrix_elements = subproc_group.get('matrix_elements') 5611 5612 # Add the driver.f, all grouped ME's must share the same number of 5613 # helicity configuration 5614 ncomb = matrix_elements[0].get_helicity_combinations() 5615 for me in matrix_elements[1:]: 5616 if ncomb!=me.get_helicity_combinations(): 5617 raise MadGraph5Error("All grouped processes must share the "+\ 5618 "same number of helicity configurations.") 5619 5620 filename = 'driver.f' 5621 self.write_driver(writers.FortranWriter(filename),ncomb, 5622 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5623 5624 try: 5625 self.proc_characteristic['hel_recycling'] = self.opt['hel_recycling'] 5626 except KeyError: 5627 self.proc_characteristic['hel_recycling'] = False 5628 self.opt['hel_recycling'] = False 5629 for ime, matrix_element in \ 5630 enumerate(matrix_elements): 5631 if self.opt['hel_recycling']: 5632 filename = 'matrix%d_orig.f' % (ime+1) 5633 replace_dict = self.write_matrix_element_v4(None, 5634 matrix_element, 5635 fortran_model, 5636 proc_id=str(ime+1), 5637 config_map=subproc_group.get('diagram_maps')[ime], 5638 subproc_number=group_number) 5639 calls,ncolor = replace_dict['return_value'] 5640 tfile = open(replace_dict['template_file']).read() 5641 file = tfile % replace_dict 5642 # Add the split orders helper functions. 5643 file = file + '\n' + open(replace_dict['template_file2'])\ 5644 .read()%replace_dict 5645 # Write the file 5646 writer = writers.FortranWriter(filename) 5647 writer.writelines(file) 5648 5649 # 5650 # write the dedicated template for helicity recycling 5651 # 5652 tfile = open(replace_dict['template_file'].replace('.inc',"_hel.inc")).read() 5653 file = tfile % replace_dict 5654 # Add the split orders helper functions. 5655 file = file + '\n' + open(replace_dict['template_file2'])\ 5656 .read()%replace_dict 5657 # Write the file 5658 writer = writers.FortranWriter('template_matrix%d.f' % (ime+1)) 5659 writer.uniformcase = False 5660 writer.writelines(file) 5661 5662 5663 5664 5665 else: 5666 filename = 'matrix%d.f' % (ime+1) 5667 calls, ncolor = \ 5668 self.write_matrix_element_v4(writers.FortranWriter(filename), 5669 matrix_element, 5670 fortran_model, 5671 proc_id=str(ime+1), 5672 config_map=subproc_group.get('diagram_maps')[ime], 5673 subproc_number=group_number) 5674 5675 5676 5677 filename = 'auto_dsig%d.f' % (ime+1) 5678 self.write_auto_dsig_file(writers.FortranWriter(filename), 5679 matrix_element, 5680 str(ime+1)) 5681 5682 # Keep track of needed quantities 5683 tot_calls += int(calls) 5684 maxflows = max(maxflows, ncolor) 5685 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5686 5687 # Draw diagrams 5688 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5689 filename = "matrix%d.ps" % (ime+1) 5690 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5691 get('diagrams'), 5692 filename, 5693 model = \ 5694 matrix_element.get('processes')[0].\ 5695 get('model'), 5696 amplitude=True) 5697 logger.info("Generating Feynman diagrams for " + \ 5698 matrix_element.get('processes')[0].nice_string()) 5699 plot.draw() 5700 5701 # Extract number of external particles 5702 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5703 5704 # Generate a list of diagrams corresponding to each configuration 5705 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5706 # If a subprocess has no diagrams for this config, the number is 0 5707 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5708 5709 filename = 'auto_dsig.f' 5710 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5711 subproc_group) 5712 5713 filename = 'coloramps.inc' 5714 self.write_coloramps_file(writers.FortranWriter(filename), 5715 subproc_diagrams_for_config, 5716 maxflows, 5717 matrix_elements) 5718 5719 filename = 'get_color.f' 5720 self.write_colors_file(writers.FortranWriter(filename), 5721 matrix_elements) 5722 5723 filename = 'config_subproc_map.inc' 5724 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5725 subproc_diagrams_for_config) 5726 5727 filename = 'configs.inc' 5728 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5729 writers.FortranWriter(filename), 5730 subproc_group, 5731 subproc_diagrams_for_config) 5732 5733 filename = 'config_nqcd.inc' 5734 self.write_config_nqcd_file(writers.FortranWriter(filename), 5735 nqcd_list) 5736 5737 filename = 'decayBW.inc' 5738 self.write_decayBW_file(writers.FortranWriter(filename), 5739 s_and_t_channels) 5740 5741 filename = 'dname.mg' 5742 self.write_dname_file(writers.FortranWriter(filename), 5743 subprocdir) 5744 5745 filename = 'iproc.dat' 5746 self.write_iproc_file(writers.FortranWriter(filename), 5747 group_number) 5748 5749 filename = 'leshouche.inc' 5750 self.write_leshouche_file(writers.FortranWriter(filename), 5751 subproc_group) 5752 5753 filename = 'maxamps.inc' 5754 self.write_maxamps_file(writers.FortranWriter(filename), 5755 maxamps, 5756 maxflows, 5757 max([len(me.get('processes')) for me in \ 5758 matrix_elements]), 5759 len(matrix_elements)) 5760 5761 # Note that mg.sym is not relevant for this case 5762 filename = 'mg.sym' 5763 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5764 5765 filename = 'mirrorprocs.inc' 5766 self.write_mirrorprocs(writers.FortranWriter(filename), 5767 subproc_group) 5768 5769 filename = 'ncombs.inc' 5770 self.write_ncombs_file(writers.FortranWriter(filename), 5771 nexternal) 5772 5773 filename = 'nexternal.inc' 5774 self.write_nexternal_file(writers.FortranWriter(filename), 5775 nexternal, ninitial) 5776 5777 filename = 'ngraphs.inc' 5778 self.write_ngraphs_file(writers.FortranWriter(filename), 5779 nconfigs) 5780 5781 filename = 'pmass.inc' 5782 self.write_pmass_file(writers.FortranWriter(filename), 5783 matrix_element) 5784 5785 filename = 'props.inc' 5786 self.write_props_file(writers.FortranWriter(filename), 5787 matrix_element, 5788 s_and_t_channels) 5789 5790 filename = 'processes.dat' 5791 files.write_to_file(filename, 5792 self.write_processes_file, 5793 subproc_group) 5794 5795 # Find config symmetries and permutations 5796 symmetry, perms, ident_perms = \ 5797 diagram_symmetry.find_symmetry(subproc_group) 5798 5799 filename = 'symswap.inc' 5800 self.write_symswap_file(writers.FortranWriter(filename), 5801 ident_perms) 5802 5803 filename = 'symfact_orig.dat' 5804 self.write_symfact_file(open(filename, 'w'), symmetry) 5805 5806 # check consistency 5807 for i, sym_fact in enumerate(symmetry): 5808 5809 if sym_fact >= 0: 5810 continue 5811 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5812 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5813 raise Exception("identical diagram with different QCD powwer") 5814 5815 5816 filename = 'symperms.inc' 5817 self.write_symperms_file(writers.FortranWriter(filename), 5818 perms) 5819 5820 # Generate jpgs -> pass in make_html 5821 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5822 5823 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5824 5825 #import nexternal/leshouch in Source 5826 ln('nexternal.inc', '../../Source', log=False) 5827 ln('leshouche.inc', '../../Source', log=False) 5828 ln('maxamps.inc', '../../Source', log=False) 5829 5830 # Return to SubProcesses dir) 5831 os.chdir(pathdir) 5832 5833 # Add subprocess to subproc.mg 5834 filename = 'subproc.mg' 5835 files.append_to_file(filename, 5836 self.write_subproc, 5837 subprocdir) 5838 5839 # Return to original dir 5840 os.chdir(cwd) 5841 5842 if not tot_calls: 5843 tot_calls = 0 5844 return tot_calls
5845 5846 #=========================================================================== 5847 # write_super_auto_dsig_file 5848 #===========================================================================
5849 - def write_super_auto_dsig_file(self, writer, subproc_group):
5850 """Write the auto_dsig.f file selecting between the subprocesses 5851 in subprocess group mode""" 5852 5853 replace_dict = {} 5854 5855 # Extract version number and date from VERSION file 5856 info_lines = self.get_mg5_info_lines() 5857 replace_dict['info_lines'] = info_lines 5858 5859 matrix_elements = subproc_group.get('matrix_elements') 5860 5861 # Extract process info lines 5862 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5863 matrix_elements]) 5864 replace_dict['process_lines'] = process_lines 5865 5866 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5867 replace_dict['nexternal'] = nexternal 5868 5869 replace_dict['nsprocs'] = 2*len(matrix_elements) 5870 5871 # Generate dsig definition line 5872 dsig_def_line = "DOUBLE PRECISION " + \ 5873 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5874 range(len(matrix_elements))]) 5875 replace_dict["dsig_def_line"] = dsig_def_line 5876 5877 # Generate dsig process lines 5878 call_dsig_proc_lines = [] 5879 for iproc in range(len(matrix_elements)): 5880 call_dsig_proc_lines.append(\ 5881 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5882 {"num": iproc + 1, 5883 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5884 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5885 5886 ncomb=matrix_elements[0].get_helicity_combinations() 5887 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5888 5889 s1,s2 = matrix_elements[0].get_spin_state_initial() 5890 replace_dict['nb_spin_state1'] = s1 5891 replace_dict['nb_spin_state2'] = s2 5892 5893 printzeroamp = [] 5894 for iproc in range(len(matrix_elements)): 5895 printzeroamp.append(\ 5896 " call print_zero_amp_%i()" % ( iproc + 1)) 5897 replace_dict['print_zero_amp'] = "\n".join(printzeroamp) 5898 5899 5900 if writer: 5901 file = open(pjoin(_file_path, \ 5902 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5903 file = file % replace_dict 5904 5905 # Write the file 5906 writer.writelines(file) 5907 else: 5908 return replace_dict
5909 5910 #=========================================================================== 5911 # write_mirrorprocs 5912 #===========================================================================
5913 - def write_mirrorprocs(self, writer, subproc_group):
5914 """Write the mirrorprocs.inc file determining which processes have 5915 IS mirror process in subprocess group mode.""" 5916 5917 lines = [] 5918 bool_dict = {True: '.true.', False: '.false.'} 5919 matrix_elements = subproc_group.get('matrix_elements') 5920 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5921 (len(matrix_elements), 5922 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5923 me in matrix_elements]))) 5924 # Write the file 5925 writer.writelines(lines)
5926 5927 #=========================================================================== 5928 # write_addmothers 5929 #===========================================================================
5930 - def write_addmothers(self, writer):
5931 """Write the SubProcess/addmothers.f""" 5932 5933 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5934 5935 text = open(path).read() % {'iconfig': 'lconfig'} 5936 writer.write(text) 5937 5938 return True
5939 5940 5941 #=========================================================================== 5942 # write_coloramps_file 5943 #===========================================================================
5944 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5945 matrix_elements):
5946 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5947 5948 # Create a map from subprocess (matrix element) to a list of 5949 # the diagrams corresponding to each config 5950 5951 lines = [] 5952 5953 subproc_to_confdiag = {} 5954 for config in diagrams_for_config: 5955 for subproc, diag in enumerate(config): 5956 try: 5957 subproc_to_confdiag[subproc].append(diag) 5958 except KeyError: 5959 subproc_to_confdiag[subproc] = [diag] 5960 5961 for subproc in sorted(subproc_to_confdiag.keys()): 5962 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5963 matrix_elements[subproc], 5964 subproc + 1)) 5965 5966 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5967 (maxflows, 5968 len(diagrams_for_config), 5969 len(matrix_elements))) 5970 5971 # Write the file 5972 writer.writelines(lines) 5973 5974 return True
5975 5976 #=========================================================================== 5977 # write_config_subproc_map_file 5978 #===========================================================================
5979 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5980 """Write the config_subproc_map.inc file for subprocess groups""" 5981 5982 lines = [] 5983 # Output only configs that have some corresponding diagrams 5984 iconfig = 0 5985 for config in config_subproc_map: 5986 if set(config) == set([0]): 5987 continue 5988 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5989 (iconfig + 1, len(config), 5990 ",".join([str(i) for i in config]))) 5991 iconfig += 1 5992 # Write the file 5993 writer.writelines(lines) 5994 5995 return True
5996 5997 #=========================================================================== 5998 # read_write_good_hel 5999 #===========================================================================
6000 - def read_write_good_hel(self, ncomb):
6001 """return the code to read/write the good_hel common_block""" 6002 6003 convert = {'ncomb' : ncomb} 6004 6005 output = """ 6006 subroutine write_good_hel(stream_id) 6007 implicit none 6008 integer stream_id 6009 INTEGER NCOMB 6010 PARAMETER ( NCOMB=%(ncomb)d) 6011 LOGICAL GOODHEL(NCOMB, 2) 6012 INTEGER NTRY(2) 6013 common/BLOCK_GOODHEL/NTRY,GOODHEL 6014 write(stream_id,*) GOODHEL 6015 return 6016 end 6017 6018 6019 subroutine read_good_hel(stream_id) 6020 implicit none 6021 include 'genps.inc' 6022 integer stream_id 6023 INTEGER NCOMB 6024 PARAMETER ( NCOMB=%(ncomb)d) 6025 LOGICAL GOODHEL(NCOMB, 2) 6026 INTEGER NTRY(2) 6027 common/BLOCK_GOODHEL/NTRY,GOODHEL 6028 read(stream_id,*) GOODHEL 6029 NTRY(1) = MAXTRIES + 1 6030 NTRY(2) = MAXTRIES + 1 6031 return 6032 end 6033 6034 subroutine init_good_hel() 6035 implicit none 6036 INTEGER NCOMB 6037 PARAMETER ( NCOMB=%(ncomb)d) 6038 LOGICAL GOODHEL(NCOMB, 2) 6039 INTEGER NTRY(2) 6040 INTEGER I 6041 6042 do i=1,NCOMB 6043 GOODHEL(I,1) = .false. 6044 GOODHEL(I,2) = .false. 6045 enddo 6046 NTRY(1) = 0 6047 NTRY(2) = 0 6048 end 6049 6050 integer function get_maxsproc() 6051 implicit none 6052 include 'maxamps.inc' 6053 6054 get_maxsproc = maxsproc 6055 return 6056 end 6057 6058 """ % convert 6059 6060 return output
6061 6062 6063 6064 #=========================================================================== 6065 # write_configs_file 6066 #===========================================================================
6067 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6068 """Write the configs.inc file with topology information for a 6069 subprocess group. Use the first subprocess with a diagram for each 6070 configuration.""" 6071 6072 matrix_elements = subproc_group.get('matrix_elements') 6073 model = matrix_elements[0].get('processes')[0].get('model') 6074 6075 diagrams = [] 6076 config_numbers = [] 6077 for iconfig, config in enumerate(diagrams_for_config): 6078 # Check if any diagrams correspond to this config 6079 if set(config) == set([0]): 6080 continue 6081 subproc_diags = [] 6082 for s,d in enumerate(config): 6083 if d: 6084 subproc_diags.append(matrix_elements[s].\ 6085 get('diagrams')[d-1]) 6086 else: 6087 subproc_diags.append(None) 6088 diagrams.append(subproc_diags) 6089 config_numbers.append(iconfig + 1) 6090 6091 # Extract number of external particles 6092 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6093 6094 return len(diagrams), \ 6095 self.write_configs_file_from_diagrams(writer, diagrams, 6096 config_numbers, 6097 nexternal, ninitial, 6098 model)
6099 6100 #=========================================================================== 6101 # write_run_configs_file 6102 #===========================================================================
6103 - def write_run_config_file(self, writer):
6104 """Write the run_configs.inc file for MadEvent""" 6105 6106 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 6107 if self.proc_characteristic['loop_induced']: 6108 job_per_chan = 1 6109 else: 6110 job_per_chan = 2 6111 text = open(path).read() % {'chanperjob':job_per_chan} 6112 writer.write(text) 6113 return True
6114 6115 6116 #=========================================================================== 6117 # write_leshouche_file 6118 #===========================================================================
6119 - def write_leshouche_file(self, writer, subproc_group):
6120 """Write the leshouche.inc file for MG4""" 6121 6122 all_lines = [] 6123 6124 for iproc, matrix_element in \ 6125 enumerate(subproc_group.get('matrix_elements')): 6126 all_lines.extend(self.get_leshouche_lines(matrix_element, 6127 iproc)) 6128 # Write the file 6129 writer.writelines(all_lines) 6130 return True
6131 6132
6133 - def finalize(self,*args, **opts):
6134 6135 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 6136 #ensure that the grouping information is on the correct value 6137 self.proc_characteristic['grouped_matrix'] = True
6138 6139 6140 #=============================================================================== 6141 # UFO_model_to_mg4 6142 #=============================================================================== 6143 6144 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
6145 6146 -class UFO_model_to_mg4(object):
6147 """ A converter of the UFO-MG5 Model to the MG4 format """ 6148 6149 # The list below shows the only variables the user is allowed to change by 6150 # himself for each PS point. If he changes any other, then calling 6151 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 6152 # correctly account for the change. 6153 PS_dependent_key = ['aS','MU_R'] 6154 mp_complex_format = 'complex*32' 6155 mp_real_format = 'real*16' 6156 # Warning, it is crucial none of the couplings/parameters of the model 6157 # starts with this prefix. I should add a check for this. 6158 # You can change it as the global variable to check_param_card.ParamCard 6159 mp_prefix = check_param_card.ParamCard.mp_prefix 6160
6161 - def __init__(self, model, output_path, opt=None):
6162 """ initialization of the objects """ 6163 6164 self.model = model 6165 self.model_name = model['name'] 6166 self.dir_path = output_path 6167 6168 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 6169 'loop_induced': False} 6170 if opt: 6171 self.opt.update(opt) 6172 6173 self.coups_dep = [] # (name, expression, type) 6174 self.coups_indep = [] # (name, expression, type) 6175 self.params_dep = [] # (name, expression, type) 6176 self.params_indep = [] # (name, expression, type) 6177 self.params_ext = [] # external parameter 6178 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 6179 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
6180 6181
6183 """modify the parameter if some of them are identical up to the case""" 6184 6185 lower_dict={} 6186 duplicate = set() 6187 keys = list(self.model['parameters'].keys()) 6188 keys.sort() 6189 for key in keys: 6190 for param in self.model['parameters'][key]: 6191 lower_name = param.name.lower() 6192 if not lower_name: 6193 continue 6194 try: 6195 lower_dict[lower_name].append(param) 6196 except KeyError as error: 6197 lower_dict[lower_name] = [param] 6198 else: 6199 duplicate.add(lower_name) 6200 logger.debug('%s is define both as lower case and upper case.' 6201 % lower_name) 6202 if not duplicate: 6203 return 6204 6205 re_expr = r'''\b(%s)\b''' 6206 to_change = [] 6207 change={} 6208 for value in duplicate: 6209 for i, var in enumerate(lower_dict[value]): 6210 to_change.append(var.name) 6211 new_name = '%s%s' % (var.name.lower(), 6212 ('__%d'%(i+1) if i>0 else '')) 6213 change[var.name] = new_name 6214 var.name = new_name 6215 6216 # Apply the modification to the map_CTcoup_CTparam of the model 6217 # if it has one (giving for each coupling the CT parameters whcih 6218 # are necessary and which should be exported to the model. 6219 if hasattr(self.model,'map_CTcoup_CTparam'): 6220 for coup, ctparams in self.model.map_CTcoup_CTparam: 6221 for i, ctparam in enumerate(ctparams): 6222 try: 6223 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 6224 except KeyError: 6225 pass 6226 6227 replace = lambda match_pattern: change[match_pattern.groups()[0]] 6228 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 6229 6230 # change parameters 6231 for key in keys: 6232 if key == ('external',): 6233 continue 6234 for param in self.model['parameters'][key]: 6235 param.expr = rep_pattern.sub(replace, param.expr) 6236 6237 # change couplings 6238 for key in self.model['couplings'].keys(): 6239 for coup in self.model['couplings'][key]: 6240 coup.expr = rep_pattern.sub(replace, coup.expr) 6241 6242 # change mass/width 6243 for part in self.model['particles']: 6244 if str(part.get('mass')) in to_change: 6245 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 6246 if str(part.get('width')) in to_change: 6247 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
6248
6249 - def refactorize(self, wanted_couplings = []):
6250 """modify the couplings to fit with MG4 convention """ 6251 6252 # Keep only separation in alphaS 6253 keys = list(self.model['parameters'].keys()) 6254 keys.sort(key=len) 6255 for key in keys: 6256 to_add = [o for o in self.model['parameters'][key] if o.name] 6257 6258 if key == ('external',): 6259 self.params_ext += to_add 6260 elif any([(k in key) for k in self.PS_dependent_key]): 6261 self.params_dep += to_add 6262 else: 6263 self.params_indep += to_add 6264 # same for couplings 6265 keys = list(self.model['couplings'].keys()) 6266 keys.sort(key=len) 6267 for key, coup_list in self.model['couplings'].items(): 6268 if any([(k in key) for k in self.PS_dependent_key]): 6269 self.coups_dep += [c for c in coup_list if 6270 (not wanted_couplings or c.name in \ 6271 wanted_couplings)] 6272 else: 6273 self.coups_indep += [c for c in coup_list if 6274 (not wanted_couplings or c.name in \ 6275 wanted_couplings)] 6276 6277 # MG4 use G and not aS as it basic object for alphas related computation 6278 #Pass G in the independant list 6279 if 'G' in self.params_dep: 6280 index = self.params_dep.index('G') 6281 G = self.params_dep.pop(index) 6282 # G.expr = '2*cmath.sqrt(as*pi)' 6283 # self.params_indep.insert(0, self.params_dep.pop(index)) 6284 # No need to add it if not defined 6285 6286 if 'aS' not in self.params_ext: 6287 logger.critical('aS not define as external parameter adding it!') 6288 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 6289 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 6290 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
6291 - def build(self, wanted_couplings = [], full=True):
6292 """modify the couplings to fit with MG4 convention and creates all the 6293 different files""" 6294 6295 self.pass_parameter_to_case_insensitive() 6296 self.refactorize(wanted_couplings) 6297 6298 # write the files 6299 if full: 6300 if wanted_couplings: 6301 # extract the wanted ct parameters 6302 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 6303 self.write_all()
6304 6305
6306 - def open(self, name, comment='c', format='default'):
6307 """ Open the file name in the correct directory and with a valid 6308 header.""" 6309 6310 file_path = pjoin(self.dir_path, name) 6311 6312 if format == 'fortran': 6313 fsock = writers.FortranWriter(file_path, 'w') 6314 write_class = io.FileIO 6315 6316 write_class.writelines(fsock, comment * 77 + '\n') 6317 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 6318 {'comment': comment + (6 - len(comment)) * ' '}) 6319 write_class.writelines(fsock, comment * 77 + '\n\n') 6320 else: 6321 fsock = open(file_path, 'w') 6322 fsock.writelines(comment * 77 + '\n') 6323 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 6324 {'comment': comment + (6 - len(comment)) * ' '}) 6325 fsock.writelines(comment * 77 + '\n\n') 6326 return fsock
6327 6328
6329 - def write_all(self):
6330 """ write all the files """ 6331 #write the part related to the external parameter 6332 self.create_ident_card() 6333 self.create_param_read() 6334 6335 #write the definition of the parameter 6336 self.create_input() 6337 self.create_intparam_def(dp=True,mp=False) 6338 if self.opt['mp']: 6339 self.create_intparam_def(dp=False,mp=True) 6340 6341 # definition of the coupling. 6342 self.create_actualize_mp_ext_param_inc() 6343 self.create_coupl_inc() 6344 self.create_write_couplings() 6345 self.create_couplings() 6346 6347 # the makefile 6348 self.create_makeinc() 6349 self.create_param_write() 6350 6351 # The model functions 6352 self.create_model_functions_inc() 6353 self.create_model_functions_def() 6354 6355 # The param_card.dat 6356 self.create_param_card() 6357 6358 6359 # All the standard files 6360 self.copy_standard_file()
6361 6362 ############################################################################ 6363 ## ROUTINE CREATING THE FILES ############################################ 6364 ############################################################################ 6365
6366 - def copy_standard_file(self):
6367 """Copy the standard files for the fortran model.""" 6368 6369 #copy the library files 6370 file_to_link = ['formats.inc','printout.f', \ 6371 'rw_para.f', 'testprog.f'] 6372 6373 for filename in file_to_link: 6374 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 6375 self.dir_path) 6376 6377 file = open(os.path.join(MG5DIR,\ 6378 'models/template_files/fortran/rw_para.f')).read() 6379 6380 includes=["include \'coupl.inc\'","include \'input.inc\'", 6381 "include \'model_functions.inc\'"] 6382 if self.opt['mp']: 6383 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 6384 # In standalone and madloop we do no use the compiled param card but 6385 # still parse the .dat one so we must load it. 6386 if self.opt['loop_induced']: 6387 #loop induced follow MadEvent way to handle the card. 6388 load_card = '' 6389 lha_read_filename='lha_read.f' 6390 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 6391 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6392 lha_read_filename='lha_read_mp.f' 6393 elif self.opt['export_format'].startswith('standalone') \ 6394 or self.opt['export_format'] in ['madweight', 'plugin']\ 6395 or self.opt['export_format'].startswith('matchbox'): 6396 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6397 lha_read_filename='lha_read.f' 6398 else: 6399 load_card = '' 6400 lha_read_filename='lha_read.f' 6401 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 6402 os.path.join(self.dir_path,'lha_read.f')) 6403 6404 file=file%{'includes':'\n '.join(includes), 6405 'load_card':load_card} 6406 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 6407 writer.writelines(file) 6408 writer.close() 6409 6410 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6411 or self.opt['loop_induced']: 6412 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 6413 self.dir_path + '/makefile') 6414 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 6415 path = pjoin(self.dir_path, 'makefile') 6416 text = open(path).read() 6417 text = text.replace('madevent','aMCatNLO') 6418 open(path, 'w').writelines(text) 6419 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 6420 'madloop','madloop_optimized', 'standalone_rw', 6421 'madweight','matchbox','madloop_matchbox', 'plugin']: 6422 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 6423 self.dir_path + '/makefile') 6424 #elif self.opt['export_format'] in []: 6425 #pass 6426 else: 6427 raise MadGraph5Error('Unknown format')
6428
6429 - def create_coupl_inc(self):
6430 """ write coupling.inc """ 6431 6432 fsock = self.open('coupl.inc', format='fortran') 6433 if self.opt['mp']: 6434 mp_fsock = self.open('mp_coupl.inc', format='fortran') 6435 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 6436 format='fortran') 6437 6438 # Write header 6439 header = """double precision G 6440 common/strong/ G 6441 6442 double complex gal(2) 6443 common/weak/ gal 6444 6445 double precision MU_R 6446 common/rscale/ MU_R 6447 6448 double precision Nf 6449 parameter(Nf=%d) 6450 """ % self.model.get_nflav() 6451 6452 fsock.writelines(header) 6453 6454 if self.opt['mp']: 6455 header = """%(real_mp_format)s %(mp_prefix)sG 6456 common/MP_strong/ %(mp_prefix)sG 6457 6458 %(complex_mp_format)s %(mp_prefix)sgal(2) 6459 common/MP_weak/ %(mp_prefix)sgal 6460 6461 %(complex_mp_format)s %(mp_prefix)sMU_R 6462 common/MP_rscale/ %(mp_prefix)sMU_R 6463 6464 """ 6465 6466 6467 6468 6469 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 6470 'complex_mp_format':self.mp_complex_format, 6471 'mp_prefix':self.mp_prefix}) 6472 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 6473 'complex_mp_format':self.mp_complex_format, 6474 'mp_prefix':''}) 6475 6476 # Write the Mass definition/ common block 6477 masses = set() 6478 widths = set() 6479 if self.opt['complex_mass']: 6480 complex_mass = set() 6481 6482 for particle in self.model.get('particles'): 6483 #find masses 6484 one_mass = particle.get('mass') 6485 if one_mass.lower() != 'zero': 6486 masses.add(one_mass) 6487 6488 # find width 6489 one_width = particle.get('width') 6490 if one_width.lower() != 'zero': 6491 widths.add(one_width) 6492 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 6493 complex_mass.add('CMASS_%s' % one_mass) 6494 6495 if masses: 6496 fsock.writelines('double precision '+','.join(masses)+'\n') 6497 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 6498 if self.opt['mp']: 6499 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6500 ','.join(masses)+'\n') 6501 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 6502 ','.join(masses)+'\n\n') 6503 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6504 self.mp_prefix+m for m in masses])+'\n') 6505 mp_fsock.writelines('common/MP_masses/ '+\ 6506 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 6507 6508 if widths: 6509 fsock.writelines('double precision '+','.join(widths)+'\n') 6510 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 6511 if self.opt['mp']: 6512 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6513 ','.join(widths)+'\n') 6514 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 6515 ','.join(widths)+'\n\n') 6516 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6517 self.mp_prefix+w for w in widths])+'\n') 6518 mp_fsock.writelines('common/MP_widths/ '+\ 6519 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 6520 6521 # Write the Couplings 6522 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 6523 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 6524 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 6525 if self.opt['mp']: 6526 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6527 ','.join(coupling_list)+'\n') 6528 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 6529 ','.join(coupling_list)+'\n\n') 6530 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6531 self.mp_prefix+c for c in coupling_list])+'\n') 6532 mp_fsock.writelines('common/MP_couplings/ '+\ 6533 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 6534 6535 # Write complex mass for complex mass scheme (if activated) 6536 if self.opt['complex_mass'] and complex_mass: 6537 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 6538 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 6539 if self.opt['mp']: 6540 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6541 ','.join(complex_mass)+'\n') 6542 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 6543 ','.join(complex_mass)+'\n\n') 6544 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6545 self.mp_prefix+cm for cm in complex_mass])+'\n') 6546 mp_fsock.writelines('common/MP_complex_mass/ '+\ 6547 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
6548
6549 - def create_write_couplings(self):
6550 """ write the file coupl_write.inc """ 6551 6552 fsock = self.open('coupl_write.inc', format='fortran') 6553 6554 fsock.writelines("""write(*,*) ' Couplings of %s' 6555 write(*,*) ' ---------------------------------' 6556 write(*,*) ' '""" % self.model_name) 6557 def format(coupl): 6558 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
6559 6560 # Write the Couplings 6561 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 6562 fsock.writelines('\n'.join(lines)) 6563 6564
6565 - def create_input(self):
6566 """create input.inc containing the definition of the parameters""" 6567 6568 fsock = self.open('input.inc', format='fortran') 6569 if self.opt['mp']: 6570 mp_fsock = self.open('mp_input.inc', format='fortran') 6571 6572 #find mass/ width since they are already define 6573 already_def = set() 6574 for particle in self.model.get('particles'): 6575 already_def.add(particle.get('mass').lower()) 6576 already_def.add(particle.get('width').lower()) 6577 if self.opt['complex_mass']: 6578 already_def.add('cmass_%s' % particle.get('mass').lower()) 6579 6580 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 6581 name.lower() not in already_def 6582 6583 real_parameters = [param.name for param in self.params_dep + 6584 self.params_indep if param.type == 'real' 6585 and is_valid(param.name)] 6586 6587 real_parameters += [param.name for param in self.params_ext 6588 if param.type == 'real'and 6589 is_valid(param.name)] 6590 6591 # check the parameter is a CT parameter or not 6592 # if yes, just use the needed ones 6593 real_parameters = [param for param in real_parameters \ 6594 if self.check_needed_param(param)] 6595 6596 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6597 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6598 if self.opt['mp']: 6599 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6600 self.mp_prefix+p for p in real_parameters])+'\n') 6601 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6602 self.mp_prefix+p for p in real_parameters])+'\n\n') 6603 6604 complex_parameters = [param.name for param in self.params_dep + 6605 self.params_indep if param.type == 'complex' and 6606 is_valid(param.name)] 6607 6608 # check the parameter is a CT parameter or not 6609 # if yes, just use the needed ones 6610 complex_parameters = [param for param in complex_parameters \ 6611 if self.check_needed_param(param)] 6612 6613 if complex_parameters: 6614 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6615 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6616 if self.opt['mp']: 6617 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6618 self.mp_prefix+p for p in complex_parameters])+'\n') 6619 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6620 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6621
6622 - def check_needed_param(self, param):
6623 """ Returns whether the parameter in argument is needed for this 6624 specific computation or not.""" 6625 6626 # If this is a leading order model or if there was no CT parameter 6627 # employed in this NLO model, one can directly return that the 6628 # parameter is needed since only CTParameters are filtered. 6629 if not hasattr(self, 'allCTparameters') or \ 6630 self.allCTparameters is None or self.usedCTparameters is None or \ 6631 len(self.allCTparameters)==0: 6632 return True 6633 6634 # We must allow the conjugate shorthand for the complex parameter as 6635 # well so we check wether either the parameter name or its name with 6636 # 'conjg__' substituted with '' is present in the list. 6637 # This is acceptable even if some parameter had an original name 6638 # including 'conjg__' in it, because at worst we export a parameter 6639 # was not needed. 6640 param = param.lower() 6641 cjg_param = param.replace('conjg__','',1) 6642 6643 # First make sure it is a CTparameter 6644 if param not in self.allCTparameters and \ 6645 cjg_param not in self.allCTparameters: 6646 return True 6647 6648 # Now check if it is in the list of CTparameters actually used 6649 return (param in self.usedCTparameters or \ 6650 cjg_param in self.usedCTparameters)
6651
6652 - def extract_needed_CTparam(self,wanted_couplings=[]):
6653 """ Extract what are the needed CT parameters given the wanted_couplings""" 6654 6655 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6656 # Setting these lists to none wil disable the filtering in 6657 # check_needed_param 6658 self.allCTparameters = None 6659 self.usedCTparameters = None 6660 return 6661 6662 # All CTparameters appearin in all CT couplings 6663 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6664 # Define in this class the list of all CT parameters 6665 self.allCTparameters=list(\ 6666 set(itertools.chain.from_iterable(allCTparameters))) 6667 6668 # All used CT couplings 6669 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6670 allUsedCTCouplings = [coupl for coupl in 6671 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6672 6673 # Now define the list of all CT parameters that are actually used 6674 self.usedCTparameters=list(\ 6675 set(itertools.chain.from_iterable([ 6676 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6677 ]))) 6678 6679 # Now at last, make these list case insensitive 6680 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6681 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6682
6683 - def create_intparam_def(self, dp=True, mp=False):
6684 """ create intparam_definition.inc setting the internal parameters. 6685 Output the double precision and/or the multiple precision parameters 6686 depending on the parameters dp and mp. If mp only, then the file names 6687 get the 'mp_' prefix. 6688 """ 6689 6690 fsock = self.open('%sintparam_definition.inc'% 6691 ('mp_' if mp and not dp else ''), format='fortran') 6692 6693 fsock.write_comments(\ 6694 "Parameters that should not be recomputed event by event.\n") 6695 fsock.writelines("if(readlha) then\n") 6696 if dp: 6697 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6698 if mp: 6699 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6700 6701 for param in self.params_indep: 6702 if param.name == 'ZERO': 6703 continue 6704 # check whether the parameter is a CT parameter 6705 # if yes,just used the needed ones 6706 if not self.check_needed_param(param.name): 6707 continue 6708 if dp: 6709 fsock.writelines("%s = %s\n" % (param.name, 6710 self.p_to_f.parse(param.expr))) 6711 if mp: 6712 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6713 self.mp_p_to_f.parse(param.expr))) 6714 6715 fsock.writelines('endif') 6716 6717 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6718 if dp: 6719 fsock.writelines("aS = G**2/4/pi\n") 6720 if mp: 6721 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6722 for param in self.params_dep: 6723 # check whether the parameter is a CT parameter 6724 # if yes,just used the needed ones 6725 if not self.check_needed_param(param.name): 6726 continue 6727 if dp: 6728 fsock.writelines("%s = %s\n" % (param.name, 6729 self.p_to_f.parse(param.expr))) 6730 elif mp: 6731 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6732 self.mp_p_to_f.parse(param.expr))) 6733 6734 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6735 if ('aEWM1',) in self.model['parameters']: 6736 if dp: 6737 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6738 gal(2) = 1d0 6739 """) 6740 elif mp: 6741 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6742 %(mp_prefix)sgal(2) = 1d0 6743 """ %{'mp_prefix':self.mp_prefix}) 6744 pass 6745 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6746 elif ('Gf',) in self.model['parameters']: 6747 if dp: 6748 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6749 gal(2) = 1d0 6750 """) 6751 elif mp: 6752 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6753 %(mp_prefix)sgal(2) = 1d0 6754 """ %{'mp_prefix':self.mp_prefix}) 6755 pass 6756 else: 6757 if dp: 6758 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6759 fsock.writelines(""" gal(1) = 1d0 6760 gal(2) = 1d0 6761 """) 6762 elif mp: 6763 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6764 %(mp_prefix)sgal(2) = 1e0_16 6765 """%{'mp_prefix':self.mp_prefix})
6766 6767
6768 - def create_couplings(self):
6769 """ create couplings.f and all couplingsX.f """ 6770 6771 nb_def_by_file = 25 6772 6773 self.create_couplings_main(nb_def_by_file) 6774 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6775 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6776 6777 for i in range(nb_coup_indep): 6778 # For the independent couplings, we compute the double and multiple 6779 # precision ones together 6780 data = self.coups_indep[nb_def_by_file * i: 6781 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6782 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6783 6784 for i in range(nb_coup_dep): 6785 # For the dependent couplings, we compute the double and multiple 6786 # precision ones in separate subroutines. 6787 data = self.coups_dep[nb_def_by_file * i: 6788 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6789 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6790 dp=True,mp=False) 6791 if self.opt['mp']: 6792 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6793 dp=False,mp=True)
6794 6795
6796 - def create_couplings_main(self, nb_def_by_file=25):
6797 """ create couplings.f """ 6798 6799 fsock = self.open('couplings.f', format='fortran') 6800 6801 fsock.writelines("""subroutine coup() 6802 6803 implicit none 6804 double precision PI, ZERO 6805 logical READLHA 6806 parameter (PI=3.141592653589793d0) 6807 parameter (ZERO=0d0) 6808 include \'model_functions.inc\'""") 6809 if self.opt['mp']: 6810 fsock.writelines("""%s MP__PI, MP__ZERO 6811 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6812 parameter (MP__ZERO=0e0_16) 6813 include \'mp_input.inc\' 6814 include \'mp_coupl.inc\' 6815 """%self.mp_real_format) 6816 fsock.writelines("""include \'input.inc\' 6817 include \'coupl.inc\' 6818 READLHA = .true. 6819 include \'intparam_definition.inc\'""") 6820 if self.opt['mp']: 6821 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6822 6823 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6824 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6825 6826 fsock.writelines('\n'.join(\ 6827 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6828 6829 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6830 6831 fsock.writelines('\n'.join(\ 6832 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6833 for i in range(nb_coup_dep)])) 6834 if self.opt['mp']: 6835 fsock.writelines('\n'.join(\ 6836 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6837 for i in range(nb_coup_dep)])) 6838 fsock.writelines('''\n return \n end\n''') 6839 6840 fsock.writelines("""subroutine update_as_param() 6841 6842 implicit none 6843 double precision PI, ZERO 6844 logical READLHA 6845 parameter (PI=3.141592653589793d0) 6846 parameter (ZERO=0d0) 6847 include \'model_functions.inc\'""") 6848 fsock.writelines("""include \'input.inc\' 6849 include \'coupl.inc\' 6850 READLHA = .false.""") 6851 fsock.writelines(""" 6852 include \'intparam_definition.inc\'\n 6853 """) 6854 6855 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6856 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6857 6858 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6859 6860 fsock.writelines('\n'.join(\ 6861 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6862 for i in range(nb_coup_dep)])) 6863 fsock.writelines('''\n return \n end\n''') 6864 6865 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6866 6867 implicit none 6868 double precision PI 6869 parameter (PI=3.141592653589793d0) 6870 double precision mu_r2, as2 6871 include \'model_functions.inc\'""") 6872 fsock.writelines("""include \'input.inc\' 6873 include \'coupl.inc\'""") 6874 fsock.writelines(""" 6875 if (mu_r2.gt.0d0) MU_R = mu_r2 6876 G = SQRT(4.0d0*PI*AS2) 6877 AS = as2 6878 6879 CALL UPDATE_AS_PARAM() 6880 """) 6881 fsock.writelines('''\n return \n end\n''') 6882 6883 if self.opt['mp']: 6884 fsock.writelines("""subroutine mp_update_as_param() 6885 6886 implicit none 6887 logical READLHA 6888 include \'model_functions.inc\'""") 6889 fsock.writelines("""%s MP__PI, MP__ZERO 6890 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6891 parameter (MP__ZERO=0e0_16) 6892 include \'mp_input.inc\' 6893 include \'mp_coupl.inc\' 6894 """%self.mp_real_format) 6895 fsock.writelines("""include \'input.inc\' 6896 include \'coupl.inc\' 6897 include \'actualize_mp_ext_params.inc\' 6898 READLHA = .false. 6899 include \'mp_intparam_definition.inc\'\n 6900 """) 6901 6902 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6903 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6904 6905 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6906 6907 fsock.writelines('\n'.join(\ 6908 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6909 for i in range(nb_coup_dep)])) 6910 fsock.writelines('''\n return \n end\n''')
6911
6912 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6913 """ create couplings[nb_file].f containing information coming from data. 6914 Outputs the computation of the double precision and/or the multiple 6915 precision couplings depending on the parameters dp and mp. 6916 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6917 filename and subroutine name. 6918 """ 6919 6920 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6921 nb_file), format='fortran') 6922 fsock.writelines("""subroutine %scoup%s() 6923 6924 implicit none 6925 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6926 if dp: 6927 fsock.writelines(""" 6928 double precision PI, ZERO 6929 parameter (PI=3.141592653589793d0) 6930 parameter (ZERO=0d0) 6931 include 'input.inc' 6932 include 'coupl.inc'""") 6933 if mp: 6934 fsock.writelines("""%s MP__PI, MP__ZERO 6935 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6936 parameter (MP__ZERO=0e0_16) 6937 include \'mp_input.inc\' 6938 include \'mp_coupl.inc\' 6939 """%self.mp_real_format) 6940 6941 for coupling in data: 6942 if dp: 6943 fsock.writelines('%s = %s' % (coupling.name, 6944 self.p_to_f.parse(coupling.expr))) 6945 if mp: 6946 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6947 self.mp_p_to_f.parse(coupling.expr))) 6948 fsock.writelines('end')
6949
6950 - def create_model_functions_inc(self):
6951 """ Create model_functions.inc which contains the various declarations 6952 of auxiliary functions which might be used in the couplings expressions 6953 """ 6954 6955 additional_fct = [] 6956 # check for functions define in the UFO model 6957 ufo_fct = self.model.get('functions') 6958 if ufo_fct: 6959 for fct in ufo_fct: 6960 # already handle by default 6961 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6962 "csc", "asec", "acsc", "theta_function", "cond", 6963 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6964 "grreglog","regsqrt"]: 6965 additional_fct.append(fct.name) 6966 6967 fsock = self.open('model_functions.inc', format='fortran') 6968 fsock.writelines("""double complex cond 6969 double complex condif 6970 double complex reglog 6971 double complex reglogp 6972 double complex reglogm 6973 double complex recms 6974 double complex arg 6975 double complex grreglog 6976 double complex regsqrt 6977 %s 6978 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6979 6980 6981 if self.opt['mp']: 6982 fsock.writelines("""%(complex_mp_format)s mp_cond 6983 %(complex_mp_format)s mp_condif 6984 %(complex_mp_format)s mp_reglog 6985 %(complex_mp_format)s mp_reglogp 6986 %(complex_mp_format)s mp_reglogm 6987 %(complex_mp_format)s mp_recms 6988 %(complex_mp_format)s mp_arg 6989 %(complex_mp_format)s mp_grreglog 6990 %(complex_mp_format)s mp_regsqrt 6991 %(additional)s 6992 """ %\ 6993 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6994 'complex_mp_format':self.mp_complex_format 6995 })
6996
6997 - def create_model_functions_def(self):
6998 """ Create model_functions.f which contains the various definitions 6999 of auxiliary functions which might be used in the couplings expressions 7000 Add the functions.f functions for formfactors support 7001 """ 7002 7003 fsock = self.open('model_functions.f', format='fortran') 7004 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 7005 implicit none 7006 double complex condition,truecase,falsecase 7007 if(condition.eq.(0.0d0,0.0d0)) then 7008 cond=truecase 7009 else 7010 cond=falsecase 7011 endif 7012 end 7013 7014 double complex function condif(condition,truecase,falsecase) 7015 implicit none 7016 logical condition 7017 double complex truecase,falsecase 7018 if(condition) then 7019 condif=truecase 7020 else 7021 condif=falsecase 7022 endif 7023 end 7024 7025 double complex function recms(condition,expr) 7026 implicit none 7027 logical condition 7028 double complex expr 7029 if(condition)then 7030 recms=expr 7031 else 7032 recms=dcmplx(dble(expr)) 7033 endif 7034 end 7035 7036 double complex function reglog(arg) 7037 implicit none 7038 double complex TWOPII 7039 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7040 double complex arg 7041 if(arg.eq.(0.0d0,0.0d0)) then 7042 reglog=(0.0d0,0.0d0) 7043 else 7044 reglog=log(arg) 7045 endif 7046 end 7047 7048 double complex function reglogp(arg) 7049 implicit none 7050 double complex TWOPII 7051 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7052 double complex arg 7053 if(arg.eq.(0.0d0,0.0d0))then 7054 reglogp=(0.0d0,0.0d0) 7055 else 7056 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 7057 reglogp=log(arg) + TWOPII 7058 else 7059 reglogp=log(arg) 7060 endif 7061 endif 7062 end 7063 7064 double complex function reglogm(arg) 7065 implicit none 7066 double complex TWOPII 7067 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7068 double complex arg 7069 if(arg.eq.(0.0d0,0.0d0))then 7070 reglogm=(0.0d0,0.0d0) 7071 else 7072 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 7073 reglogm=log(arg) - TWOPII 7074 else 7075 reglogm=log(arg) 7076 endif 7077 endif 7078 end 7079 7080 double complex function regsqrt(arg_in) 7081 implicit none 7082 double complex arg_in 7083 double complex arg 7084 arg=arg_in 7085 if(dabs(dimag(arg)).eq.0.0d0)then 7086 arg=dcmplx(dble(arg),0.0d0) 7087 endif 7088 if(dabs(dble(arg)).eq.0.0d0)then 7089 arg=dcmplx(0.0d0,dimag(arg)) 7090 endif 7091 regsqrt=sqrt(arg) 7092 end 7093 7094 double complex function grreglog(logsw,expr1_in,expr2_in) 7095 implicit none 7096 double complex TWOPII 7097 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7098 double complex expr1_in,expr2_in 7099 double complex expr1,expr2 7100 double precision logsw 7101 double precision imagexpr 7102 logical firstsheet 7103 expr1=expr1_in 7104 expr2=expr2_in 7105 if(dabs(dimag(expr1)).eq.0.0d0)then 7106 expr1=dcmplx(dble(expr1),0.0d0) 7107 endif 7108 if(dabs(dble(expr1)).eq.0.0d0)then 7109 expr1=dcmplx(0.0d0,dimag(expr1)) 7110 endif 7111 if(dabs(dimag(expr2)).eq.0.0d0)then 7112 expr2=dcmplx(dble(expr2),0.0d0) 7113 endif 7114 if(dabs(dble(expr2)).eq.0.0d0)then 7115 expr2=dcmplx(0.0d0,dimag(expr2)) 7116 endif 7117 if(expr1.eq.(0.0d0,0.0d0))then 7118 grreglog=(0.0d0,0.0d0) 7119 else 7120 imagexpr=dimag(expr1)*dimag(expr2) 7121 firstsheet=imagexpr.ge.0.0d0 7122 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 7123 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 7124 if(firstsheet)then 7125 grreglog=log(expr1) 7126 else 7127 if(dimag(expr1).gt.0.0d0)then 7128 grreglog=log(expr1) - logsw*TWOPII 7129 else 7130 grreglog=log(expr1) + logsw*TWOPII 7131 endif 7132 endif 7133 endif 7134 end 7135 7136 double complex function arg(comnum) 7137 implicit none 7138 double complex comnum 7139 double complex iim 7140 iim = (0.0d0,1.0d0) 7141 if(comnum.eq.(0.0d0,0.0d0)) then 7142 arg=(0.0d0,0.0d0) 7143 else 7144 arg=log(comnum/abs(comnum))/iim 7145 endif 7146 end""") 7147 if self.opt['mp']: 7148 fsock.writelines(""" 7149 7150 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 7151 implicit none 7152 %(complex_mp_format)s condition,truecase,falsecase 7153 if(condition.eq.(0.0e0_16,0.0e0_16)) then 7154 mp_cond=truecase 7155 else 7156 mp_cond=falsecase 7157 endif 7158 end 7159 7160 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 7161 implicit none 7162 logical condition 7163 %(complex_mp_format)s truecase,falsecase 7164 if(condition) then 7165 mp_condif=truecase 7166 else 7167 mp_condif=falsecase 7168 endif 7169 end 7170 7171 %(complex_mp_format)s function mp_recms(condition,expr) 7172 implicit none 7173 logical condition 7174 %(complex_mp_format)s expr 7175 if(condition)then 7176 mp_recms=expr 7177 else 7178 mp_recms=cmplx(real(expr),kind=16) 7179 endif 7180 end 7181 7182 %(complex_mp_format)s function mp_reglog(arg) 7183 implicit none 7184 %(complex_mp_format)s TWOPII 7185 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7186 %(complex_mp_format)s arg 7187 if(arg.eq.(0.0e0_16,0.0e0_16)) then 7188 mp_reglog=(0.0e0_16,0.0e0_16) 7189 else 7190 mp_reglog=log(arg) 7191 endif 7192 end 7193 7194 %(complex_mp_format)s function mp_reglogp(arg) 7195 implicit none 7196 %(complex_mp_format)s TWOPII 7197 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7198 %(complex_mp_format)s arg 7199 if(arg.eq.(0.0e0_16,0.0e0_16))then 7200 mp_reglogp=(0.0e0_16,0.0e0_16) 7201 else 7202 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 7203 mp_reglogp=log(arg) + TWOPII 7204 else 7205 mp_reglogp=log(arg) 7206 endif 7207 endif 7208 end 7209 7210 %(complex_mp_format)s function mp_reglogm(arg) 7211 implicit none 7212 %(complex_mp_format)s TWOPII 7213 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7214 %(complex_mp_format)s arg 7215 if(arg.eq.(0.0e0_16,0.0e0_16))then 7216 mp_reglogm=(0.0e0_16,0.0e0_16) 7217 else 7218 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 7219 mp_reglogm=log(arg) - TWOPII 7220 else 7221 mp_reglogm=log(arg) 7222 endif 7223 endif 7224 end 7225 7226 %(complex_mp_format)s function mp_regsqrt(arg_in) 7227 implicit none 7228 %(complex_mp_format)s arg_in 7229 %(complex_mp_format)s arg 7230 arg=arg_in 7231 if(abs(imagpart(arg)).eq.0.0e0_16)then 7232 arg=cmplx(real(arg,kind=16),0.0e0_16) 7233 endif 7234 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 7235 arg=cmplx(0.0e0_16,imagpart(arg)) 7236 endif 7237 mp_regsqrt=sqrt(arg) 7238 end 7239 7240 7241 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 7242 implicit none 7243 %(complex_mp_format)s TWOPII 7244 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7245 %(complex_mp_format)s expr1_in,expr2_in 7246 %(complex_mp_format)s expr1,expr2 7247 %(real_mp_format)s logsw 7248 %(real_mp_format)s imagexpr 7249 logical firstsheet 7250 expr1=expr1_in 7251 expr2=expr2_in 7252 if(abs(imagpart(expr1)).eq.0.0e0_16)then 7253 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 7254 endif 7255 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 7256 expr1=cmplx(0.0e0_16,imagpart(expr1)) 7257 endif 7258 if(abs(imagpart(expr2)).eq.0.0e0_16)then 7259 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 7260 endif 7261 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 7262 expr2=cmplx(0.0e0_16,imagpart(expr2)) 7263 endif 7264 if(expr1.eq.(0.0e0_16,0.0e0_16))then 7265 mp_grreglog=(0.0e0_16,0.0e0_16) 7266 else 7267 imagexpr=imagpart(expr1)*imagpart(expr2) 7268 firstsheet=imagexpr.ge.0.0e0_16 7269 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 7270 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 7271 if(firstsheet)then 7272 mp_grreglog=log(expr1) 7273 else 7274 if(imagpart(expr1).gt.0.0e0_16)then 7275 mp_grreglog=log(expr1) - logsw*TWOPII 7276 else 7277 mp_grreglog=log(expr1) + logsw*TWOPII 7278 endif 7279 endif 7280 endif 7281 end 7282 7283 %(complex_mp_format)s function mp_arg(comnum) 7284 implicit none 7285 %(complex_mp_format)s comnum 7286 %(complex_mp_format)s imm 7287 imm = (0.0e0_16,1.0e0_16) 7288 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 7289 mp_arg=(0.0e0_16,0.0e0_16) 7290 else 7291 mp_arg=log(comnum/abs(comnum))/imm 7292 endif 7293 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 7294 7295 7296 #check for the file functions.f 7297 model_path = self.model.get('modelpath') 7298 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 7299 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 7300 input = pjoin(model_path,'Fortran','functions.f') 7301 fsock.writelines(open(input).read()) 7302 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 7303 7304 # check for functions define in the UFO model 7305 ufo_fct = self.model.get('functions') 7306 if ufo_fct: 7307 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 7308 done = [] 7309 for fct in ufo_fct: 7310 # already handle by default 7311 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 7312 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 7313 "grreglog","regsqrt"] + done: 7314 done.append(str(fct.name.lower())) 7315 ufo_fct_template = """ 7316 double complex function %(name)s(%(args)s) 7317 implicit none 7318 double complex %(args)s 7319 %(definitions)s 7320 %(name)s = %(fct)s 7321 7322 return 7323 end 7324 """ 7325 str_fct = self.p_to_f.parse(fct.expr) 7326 if not self.p_to_f.to_define: 7327 definitions = [] 7328 else: 7329 definitions=[] 7330 for d in self.p_to_f.to_define: 7331 if d == 'pi': 7332 definitions.append(' double precision pi') 7333 definitions.append(' data pi /3.1415926535897932d0/') 7334 else: 7335 definitions.append(' double complex %s' % d) 7336 7337 text = ufo_fct_template % { 7338 'name': fct.name, 7339 'args': ", ".join(fct.arguments), 7340 'fct': str_fct, 7341 'definitions': '\n'.join(definitions) 7342 } 7343 7344 fsock.writelines(text) 7345 if self.opt['mp']: 7346 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 7347 for fct in ufo_fct: 7348 # already handle by default 7349 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 7350 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 7351 "grreglog","regsqrt"]: 7352 ufo_fct_template = """ 7353 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 7354 implicit none 7355 %(complex_mp_format)s mp__%(args)s 7356 %(definitions)s 7357 mp_%(name)s = %(fct)s 7358 7359 return 7360 end 7361 """ 7362 str_fct = self.mp_p_to_f.parse(fct.expr) 7363 if not self.mp_p_to_f.to_define: 7364 definitions = [] 7365 else: 7366 definitions=[] 7367 for d in self.mp_p_to_f.to_define: 7368 if d == 'pi': 7369 definitions.append(' %s mp__pi' % self.mp_real_format) 7370 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 7371 else: 7372 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 7373 text = ufo_fct_template % { 7374 'name': fct.name, 7375 'args': ", mp__".join(fct.arguments), 7376 'fct': str_fct, 7377 'definitions': '\n'.join(definitions), 7378 'complex_mp_format': self.mp_complex_format 7379 } 7380 fsock.writelines(text) 7381 7382 7383 7384 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
7385 7386 7387
7388 - def create_makeinc(self):
7389 """create makeinc.inc containing the file to compile """ 7390 7391 fsock = self.open('makeinc.inc', comment='#') 7392 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 7393 text += ' model_functions.o ' 7394 7395 nb_coup_indep = 1 + len(self.coups_dep) // 25 7396 nb_coup_dep = 1 + len(self.coups_indep) // 25 7397 couplings_files=['couplings%s.o' % (i+1) \ 7398 for i in range(nb_coup_dep + nb_coup_indep) ] 7399 if self.opt['mp']: 7400 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 7401 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 7402 text += ' '.join(couplings_files) 7403 fsock.writelines(text)
7404
7405 - def create_param_write(self):
7406 """ create param_write """ 7407 7408 fsock = self.open('param_write.inc', format='fortran') 7409 7410 fsock.writelines("""write(*,*) ' External Params' 7411 write(*,*) ' ---------------------------------' 7412 write(*,*) ' '""") 7413 def format(name): 7414 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
7415 7416 # Write the external parameter 7417 lines = [format(param.name) for param in self.params_ext] 7418 fsock.writelines('\n'.join(lines)) 7419 7420 fsock.writelines("""write(*,*) ' Internal Params' 7421 write(*,*) ' ---------------------------------' 7422 write(*,*) ' '""") 7423 lines = [format(data.name) for data in self.params_indep 7424 if data.name != 'ZERO' and self.check_needed_param(data.name)] 7425 fsock.writelines('\n'.join(lines)) 7426 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 7427 write(*,*) ' ----------------------------------------' 7428 write(*,*) ' '""") 7429 lines = [format(data.name) for data in self.params_dep \ 7430 if self.check_needed_param(data.name)] 7431 7432 fsock.writelines('\n'.join(lines)) 7433 7434 7435
7436 - def create_ident_card(self):
7437 """ create the ident_card.dat """ 7438 7439 def format(parameter): 7440 """return the line for the ident_card corresponding to this parameter""" 7441 colum = [parameter.lhablock.lower()] + \ 7442 [str(value) for value in parameter.lhacode] + \ 7443 [parameter.name] 7444 if not parameter.name: 7445 return '' 7446 return ' '.join(colum)+'\n'
7447 7448 fsock = self.open('ident_card.dat') 7449 7450 external_param = [format(param) for param in self.params_ext] 7451 fsock.writelines('\n'.join(external_param)) 7452
7453 - def create_actualize_mp_ext_param_inc(self):
7454 """ create the actualize_mp_ext_params.inc code """ 7455 7456 # In principle one should actualize all external, but for now, it is 7457 # hardcoded that only AS and MU_R can by dynamically changed by the user 7458 # so that we only update those ones. 7459 # Of course, to be on the safe side, one could decide to update all 7460 # external parameters. 7461 update_params_list=[p for p in self.params_ext if p.name in 7462 self.PS_dependent_key] 7463 7464 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 7465 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 7466 for param in update_params_list] 7467 # When read_lha is false, it is G which is taken in input and not AS, so 7468 # this is what should be reset here too. 7469 if 'aS' in [param.name for param in update_params_list]: 7470 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 7471 7472 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 7473 fsock.writelines('\n'.join(res_strings))
7474
7475 - def create_param_read(self):
7476 """create param_read""" 7477 7478 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 7479 or self.opt['loop_induced']: 7480 fsock = self.open('param_read.inc', format='fortran') 7481 fsock.writelines(' include \'../param_card.inc\'') 7482 return 7483 7484 def format_line(parameter): 7485 """return the line for the ident_card corresponding to this 7486 parameter""" 7487 template = \ 7488 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 7489 % {'name': parameter.name, 7490 'value': self.p_to_f.parse(str(parameter.value.real))} 7491 if self.opt['mp']: 7492 template = template+ \ 7493 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 7494 "%(mp_prefix)s%(name)s,%(value)s)") \ 7495 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 7496 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 7497 7498 if parameter.lhablock.lower() == 'loop': 7499 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 7500 7501 return template 7502 7503 fsock = self.open('param_read.inc', format='fortran') 7504 res_strings = [format_line(param) \ 7505 for param in self.params_ext] 7506 7507 # Correct width sign for Majorana particles (where the width 7508 # and mass need to have the same sign) 7509 for particle in self.model.get('particles'): 7510 if particle.is_fermion() and particle.get('self_antipart') and \ 7511 particle.get('width').lower() != 'zero': 7512 7513 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 7514 {'width': particle.get('width'), 'mass': particle.get('mass')}) 7515 if self.opt['mp']: 7516 res_strings.append(\ 7517 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 7518 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 7519 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 7520 7521 fsock.writelines('\n'.join(res_strings)) 7522 7523 7524 @staticmethod
7525 - def create_param_card_static(model, output_path, rule_card_path=False, 7526 mssm_convert=True, write_special=True):
7527 """ create the param_card.dat for a givent model --static method-- """ 7528 #1. Check if a default param_card is present: 7529 done = False 7530 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 7531 restrict_name = os.path.basename(model.restrict_card)[9:-4] 7532 model_path = model.get('modelpath') 7533 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 7534 done = True 7535 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 7536 output_path) 7537 if not done: 7538 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 7539 7540 if rule_card_path: 7541 if hasattr(model, 'rule_card'): 7542 model.rule_card.write_file(rule_card_path) 7543 7544 if mssm_convert: 7545 model_name = model.get('name') 7546 # IF MSSM convert the card to SLAH1 7547 if model_name == 'mssm' or model_name.startswith('mssm-'): 7548 import models.check_param_card as translator 7549 # Check the format of the param_card for Pythia and make it correct 7550 if rule_card_path: 7551 translator.make_valid_param_card(output_path, rule_card_path) 7552 translator.convert_to_slha1(output_path)
7553
7554 - def create_param_card(self, write_special=True):
7555 """ create the param_card.dat """ 7556 7557 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 7558 if not hasattr(self.model, 'rule_card'): 7559 rule_card=False 7560 write_special = True 7561 if 'exporter' in self.opt: 7562 import madgraph.loop.loop_exporters as loop_exporters 7563 import madgraph.iolibs.export_fks as export_fks 7564 write_special = False 7565 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 7566 write_special = True 7567 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 7568 write_special = False 7569 7570 self.create_param_card_static(self.model, 7571 output_path=pjoin(self.dir_path, 'param_card.dat'), 7572 rule_card_path=rule_card, 7573 mssm_convert=True, 7574 write_special=write_special)
7575
7576 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
7577 """ Determine which Export_v4 class is required. cmd is the command 7578 interface containing all potential usefull information. 7579 The output_type argument specifies from which context the output 7580 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 7581 and 'default' for tree-level outputs.""" 7582 7583 opt = dict(cmd.options) 7584 opt['output_options'] = cmd_options 7585 7586 # ========================================================================== 7587 # First check whether Ninja must be installed. 7588 # Ninja would only be required if: 7589 # a) Loop optimized output is selected 7590 # b) the process gathered from the amplitude generated use loops 7591 7592 if len(cmd._curr_amps)>0: 7593 try: 7594 curr_proc = cmd._curr_amps[0].get('process') 7595 except base_objects.PhysicsObject.PhysicsObjectError: 7596 curr_proc = None 7597 elif hasattr(cmd,'_fks_multi_proc') and \ 7598 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7599 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7600 else: 7601 curr_proc = None 7602 7603 requires_reduction_tool = opt['loop_optimized_output'] and \ 7604 (not curr_proc is None) and \ 7605 (curr_proc.get('perturbation_couplings') != [] and \ 7606 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7607 7608 # An installation is required then, but only if the specified path is the 7609 # default local one and that the Ninja library appears missing. 7610 if requires_reduction_tool: 7611 cmd.install_reduction_library() 7612 7613 # ========================================================================== 7614 # First treat the MadLoop5 standalone case 7615 MadLoop_SA_options = {'clean': not noclean, 7616 'complex_mass':cmd.options['complex_mass_scheme'], 7617 'export_format':'madloop', 7618 'mp':True, 7619 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7620 'cuttools_dir': cmd._cuttools_dir, 7621 'iregi_dir':cmd._iregi_dir, 7622 'golem_dir':cmd.options['golem'], 7623 'samurai_dir':cmd.options['samurai'], 7624 'ninja_dir':cmd.options['ninja'], 7625 'collier_dir':cmd.options['collier'], 7626 'fortran_compiler':cmd.options['fortran_compiler'], 7627 'f2py_compiler':cmd.options['f2py_compiler'], 7628 'output_dependencies':cmd.options['output_dependencies'], 7629 'SubProc_prefix':'P', 7630 'compute_color_flows':cmd.options['loop_color_flows'], 7631 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7632 'cluster_local_path': cmd.options['cluster_local_path'], 7633 'output_options': cmd_options 7634 } 7635 7636 if output_type.startswith('madloop'): 7637 import madgraph.loop.loop_exporters as loop_exporters 7638 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7639 ExporterClass=None 7640 if not cmd.options['loop_optimized_output']: 7641 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7642 else: 7643 if output_type == "madloop": 7644 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7645 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7646 elif output_type == "madloop_matchbox": 7647 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7648 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7649 else: 7650 raise Exception("output_type not recognize %s" % output_type) 7651 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7652 else: 7653 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7654 ' in %s'%str(cmd._mgme_dir)) 7655 7656 # Then treat the aMC@NLO output 7657 elif output_type=='amcatnlo': 7658 import madgraph.iolibs.export_fks as export_fks 7659 ExporterClass=None 7660 amcatnlo_options = dict(opt) 7661 amcatnlo_options.update(MadLoop_SA_options) 7662 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7663 if not cmd.options['loop_optimized_output']: 7664 logger.info("Writing out the aMC@NLO code") 7665 ExporterClass = export_fks.ProcessExporterFortranFKS 7666 amcatnlo_options['export_format']='FKS5_default' 7667 else: 7668 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7669 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7670 amcatnlo_options['export_format']='FKS5_optimized' 7671 return ExporterClass(cmd._export_dir, amcatnlo_options) 7672 7673 7674 # Then the default tree-level output 7675 elif output_type=='default': 7676 assert group_subprocesses in [True, False] 7677 7678 opt = dict(opt) 7679 opt.update({'clean': not noclean, 7680 'complex_mass': cmd.options['complex_mass_scheme'], 7681 'export_format':cmd._export_format, 7682 'mp': False, 7683 'sa_symmetry':False, 7684 'model': cmd._curr_model.get('name'), 7685 'v5_model': False if cmd._model_v4_path else True }) 7686 7687 format = cmd._export_format #shortcut 7688 7689 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7690 opt['sa_symmetry'] = True 7691 elif format == 'plugin': 7692 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7693 7694 loop_induced_opt = dict(opt) 7695 loop_induced_opt.update(MadLoop_SA_options) 7696 loop_induced_opt['export_format'] = 'madloop_optimized' 7697 loop_induced_opt['SubProc_prefix'] = 'PV' 7698 # For loop_induced output with MadEvent, we must have access to the 7699 # color flows. 7700 loop_induced_opt['compute_color_flows'] = True 7701 for key in opt: 7702 if key not in loop_induced_opt: 7703 loop_induced_opt[key] = opt[key] 7704 7705 # Madevent output supports MadAnalysis5 7706 if format in ['madevent']: 7707 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7708 7709 if format == 'matrix' or format.startswith('standalone'): 7710 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7711 7712 elif format in ['madevent'] and group_subprocesses: 7713 if isinstance(cmd._curr_amps[0], 7714 loop_diagram_generation.LoopAmplitude): 7715 import madgraph.loop.loop_exporters as loop_exporters 7716 return loop_exporters.LoopInducedExporterMEGroup( 7717 cmd._export_dir,loop_induced_opt) 7718 else: 7719 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7720 elif format in ['madevent']: 7721 if isinstance(cmd._curr_amps[0], 7722 loop_diagram_generation.LoopAmplitude): 7723 import madgraph.loop.loop_exporters as loop_exporters 7724 return loop_exporters.LoopInducedExporterMENoGroup( 7725 cmd._export_dir,loop_induced_opt) 7726 else: 7727 return ProcessExporterFortranME(cmd._export_dir,opt) 7728 elif format in ['matchbox']: 7729 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7730 elif cmd._export_format in ['madweight'] and group_subprocesses: 7731 7732 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7733 elif cmd._export_format in ['madweight']: 7734 return ProcessExporterFortranMW(cmd._export_dir, opt) 7735 elif format == 'plugin': 7736 if isinstance(cmd._curr_amps[0], 7737 loop_diagram_generation.LoopAmplitude): 7738 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7739 else: 7740 return cmd._export_plugin(cmd._export_dir, opt) 7741 7742 else: 7743 raise Exception('Wrong export_v4 format') 7744 else: 7745 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7746
7747 7748 7749 7750 #=============================================================================== 7751 # ProcessExporterFortranMWGroup 7752 #=============================================================================== 7753 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7754 """Class to take care of exporting a set of matrix elements to 7755 MadEvent subprocess group format.""" 7756 7757 matrix_file = "matrix_madweight_group_v4.inc" 7758 grouped_mode = 'madweight' 7759 #=========================================================================== 7760 # generate_subprocess_directory 7761 #===========================================================================
7762 - def generate_subprocess_directory(self, subproc_group, 7763 fortran_model, 7764 group_number):
7765 """Generate the Pn directory for a subprocess group in MadEvent, 7766 including the necessary matrix_N.f files, configs.inc and various 7767 other helper files.""" 7768 7769 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7770 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7771 7772 if not self.model: 7773 self.model = subproc_group.get('matrix_elements')[0].\ 7774 get('processes')[0].get('model') 7775 7776 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7777 7778 # Create the directory PN in the specified path 7779 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7780 subproc_group.get('name')) 7781 try: 7782 os.mkdir(pjoin(pathdir, subprocdir)) 7783 except os.error as error: 7784 logger.warning(error.strerror + " " + subprocdir) 7785 7786 7787 logger.info('Creating files in directory %s' % subprocdir) 7788 Ppath = pjoin(pathdir, subprocdir) 7789 7790 # Create the matrix.f files, auto_dsig.f files and all inc files 7791 # for all subprocesses in the group 7792 7793 maxamps = 0 7794 maxflows = 0 7795 tot_calls = 0 7796 7797 matrix_elements = subproc_group.get('matrix_elements') 7798 7799 for ime, matrix_element in \ 7800 enumerate(matrix_elements): 7801 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7802 calls, ncolor = \ 7803 self.write_matrix_element_v4(writers.FortranWriter(filename), 7804 matrix_element, 7805 fortran_model, 7806 str(ime+1), 7807 subproc_group.get('diagram_maps')[\ 7808 ime]) 7809 7810 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7811 self.write_auto_dsig_file(writers.FortranWriter(filename), 7812 matrix_element, 7813 str(ime+1)) 7814 7815 # Keep track of needed quantities 7816 tot_calls += int(calls) 7817 maxflows = max(maxflows, ncolor) 7818 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7819 7820 # Draw diagrams 7821 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7822 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7823 get('diagrams'), 7824 filename, 7825 model = \ 7826 matrix_element.get('processes')[0].\ 7827 get('model'), 7828 amplitude=True) 7829 logger.info("Generating Feynman diagrams for " + \ 7830 matrix_element.get('processes')[0].nice_string()) 7831 plot.draw() 7832 7833 # Extract number of external particles 7834 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7835 7836 # Generate a list of diagrams corresponding to each configuration 7837 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7838 # If a subprocess has no diagrams for this config, the number is 0 7839 7840 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7841 7842 filename = pjoin(Ppath, 'auto_dsig.f') 7843 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7844 subproc_group) 7845 7846 filename = pjoin(Ppath,'configs.inc') 7847 nconfigs, s_and_t_channels = self.write_configs_file(\ 7848 writers.FortranWriter(filename), 7849 subproc_group, 7850 subproc_diagrams_for_config) 7851 7852 filename = pjoin(Ppath, 'leshouche.inc') 7853 self.write_leshouche_file(writers.FortranWriter(filename), 7854 subproc_group) 7855 7856 filename = pjoin(Ppath, 'phasespace.inc') 7857 self.write_phasespace_file(writers.FortranWriter(filename), 7858 nconfigs) 7859 7860 7861 filename = pjoin(Ppath, 'maxamps.inc') 7862 self.write_maxamps_file(writers.FortranWriter(filename), 7863 maxamps, 7864 maxflows, 7865 max([len(me.get('processes')) for me in \ 7866 matrix_elements]), 7867 len(matrix_elements)) 7868 7869 filename = pjoin(Ppath, 'mirrorprocs.inc') 7870 self.write_mirrorprocs(writers.FortranWriter(filename), 7871 subproc_group) 7872 7873 filename = pjoin(Ppath, 'nexternal.inc') 7874 self.write_nexternal_file(writers.FortranWriter(filename), 7875 nexternal, ninitial) 7876 7877 filename = pjoin(Ppath, 'pmass.inc') 7878 self.write_pmass_file(writers.FortranWriter(filename), 7879 matrix_element) 7880 7881 filename = pjoin(Ppath, 'props.inc') 7882 self.write_props_file(writers.FortranWriter(filename), 7883 matrix_element, 7884 s_and_t_channels) 7885 7886 # filename = pjoin(Ppath, 'processes.dat') 7887 # files.write_to_file(filename, 7888 # self.write_processes_file, 7889 # subproc_group) 7890 7891 # Generate jpgs -> pass in make_html 7892 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7893 7894 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7895 7896 for file in linkfiles: 7897 ln('../%s' % file, cwd=Ppath) 7898 7899 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7900 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7901 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7902 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7903 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7904 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7905 if not tot_calls: 7906 tot_calls = 0 7907 return tot_calls
7908 7909 7910 #=========================================================================== 7911 # Helper functions 7912 #===========================================================================
7913 - def modify_grouping(self, matrix_element):
7914 """allow to modify the grouping (if grouping is in place) 7915 return two value: 7916 - True/False if the matrix_element was modified 7917 - the new(or old) matrix element""" 7918 7919 return True, matrix_element.split_lepton_grouping()
7920 7921 #=========================================================================== 7922 # write_super_auto_dsig_file 7923 #===========================================================================
7924 - def write_super_auto_dsig_file(self, writer, subproc_group):
7925 """Write the auto_dsig.f file selecting between the subprocesses 7926 in subprocess group mode""" 7927 7928 replace_dict = {} 7929 7930 # Extract version number and date from VERSION file 7931 info_lines = self.get_mg5_info_lines() 7932 replace_dict['info_lines'] = info_lines 7933 7934 matrix_elements = subproc_group.get('matrix_elements') 7935 7936 # Extract process info lines 7937 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7938 matrix_elements]) 7939 replace_dict['process_lines'] = process_lines 7940 7941 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7942 replace_dict['nexternal'] = nexternal 7943 7944 replace_dict['nsprocs'] = 2*len(matrix_elements) 7945 7946 # Generate dsig definition line 7947 dsig_def_line = "DOUBLE PRECISION " + \ 7948 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7949 range(len(matrix_elements))]) 7950 replace_dict["dsig_def_line"] = dsig_def_line 7951 7952 # Generate dsig process lines 7953 call_dsig_proc_lines = [] 7954 for iproc in range(len(matrix_elements)): 7955 call_dsig_proc_lines.append(\ 7956 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7957 {"num": iproc + 1, 7958 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7959 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7960 7961 if writer: 7962 file = open(os.path.join(_file_path, \ 7963 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7964 file = file % replace_dict 7965 # Write the file 7966 writer.writelines(file) 7967 else: 7968 return replace_dict
7969 7970 #=========================================================================== 7971 # write_mirrorprocs 7972 #===========================================================================
7973 - def write_mirrorprocs(self, writer, subproc_group):
7974 """Write the mirrorprocs.inc file determining which processes have 7975 IS mirror process in subprocess group mode.""" 7976 7977 lines = [] 7978 bool_dict = {True: '.true.', False: '.false.'} 7979 matrix_elements = subproc_group.get('matrix_elements') 7980 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7981 (len(matrix_elements), 7982 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7983 me in matrix_elements]))) 7984 # Write the file 7985 writer.writelines(lines)
7986 7987 #=========================================================================== 7988 # write_configs_file 7989 #===========================================================================
7990 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7991 """Write the configs.inc file with topology information for a 7992 subprocess group. Use the first subprocess with a diagram for each 7993 configuration.""" 7994 7995 matrix_elements = subproc_group.get('matrix_elements') 7996 model = matrix_elements[0].get('processes')[0].get('model') 7997 7998 diagrams = [] 7999 config_numbers = [] 8000 for iconfig, config in enumerate(diagrams_for_config): 8001 # Check if any diagrams correspond to this config 8002 if set(config) == set([0]): 8003 continue 8004 subproc_diags = [] 8005 for s,d in enumerate(config): 8006 if d: 8007 subproc_diags.append(matrix_elements[s].\ 8008 get('diagrams')[d-1]) 8009 else: 8010 subproc_diags.append(None) 8011 diagrams.append(subproc_diags) 8012 config_numbers.append(iconfig + 1) 8013 8014 # Extract number of external particles 8015 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 8016 8017 return len(diagrams), \ 8018 self.write_configs_file_from_diagrams(writer, diagrams, 8019 config_numbers, 8020 nexternal, ninitial, 8021 matrix_elements[0],model)
8022 8023 #=========================================================================== 8024 # write_run_configs_file 8025 #===========================================================================
8026 - def write_run_config_file(self, writer):
8027 """Write the run_configs.inc file for MadEvent""" 8028 8029 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 8030 text = open(path).read() % {'chanperjob':'2'} 8031 writer.write(text) 8032 return True
8033 8034 8035 #=========================================================================== 8036 # write_leshouche_file 8037 #===========================================================================
8038 - def write_leshouche_file(self, writer, subproc_group):
8039 """Write the leshouche.inc file for MG4""" 8040 8041 all_lines = [] 8042 8043 for iproc, matrix_element in \ 8044 enumerate(subproc_group.get('matrix_elements')): 8045 all_lines.extend(self.get_leshouche_lines(matrix_element, 8046 iproc)) 8047 8048 # Write the file 8049 writer.writelines(all_lines) 8050 8051 return True
8052