Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import, division 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  from fractions import Fraction 
  20  """Methods and classes to export matrix elements to v4 format.""" 
  21   
  22  import copy 
  23  from six import StringIO 
  24  import itertools 
  25  import fractions 
  26  import glob 
  27  import logging 
  28  import math 
  29  import os 
  30  import io 
  31  import re 
  32  import shutil 
  33  import subprocess 
  34  import sys 
  35  import time 
  36  import traceback 
  37  import  collections 
  38   
  39  import aloha 
  40   
  41  import madgraph.core.base_objects as base_objects 
  42  import madgraph.core.color_algebra as color 
  43  import madgraph.core.helas_objects as helas_objects 
  44  import madgraph.iolibs.drawing_eps as draw 
  45  import madgraph.iolibs.files as files 
  46  import madgraph.iolibs.group_subprocs as group_subprocs 
  47  import madgraph.iolibs.file_writers as writers 
  48  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  49  import madgraph.iolibs.template_files as template_files 
  50  import madgraph.iolibs.ufo_expression_parsers as parsers 
  51  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  52  import madgraph.interface.common_run_interface as common_run_interface 
  53  import madgraph.various.diagram_symmetry as diagram_symmetry 
  54  import madgraph.various.misc as misc 
  55  import madgraph.various.banner as banner_mod 
  56  import madgraph.various.process_checks as process_checks 
  57  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  58  import aloha.create_aloha as create_aloha 
  59  import models.import_ufo as import_ufo 
  60  import models.write_param_card as param_writer 
  61  import models.check_param_card as check_param_card 
  62   
  63   
  64  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  65  from madgraph.iolibs.files import cp, ln, mv 
  66   
  67  from madgraph import InvalidCmd 
  68   
  69  pjoin = os.path.join 
  70   
  71  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  72  logger = logging.getLogger('madgraph.export_v4') 
  73   
  74  default_compiler= {'fortran': 'gfortran', 
  75                         'f2py': 'f2py', 
  76                         'cpp':'g++'} 
77 78 79 -class VirtualExporter(object):
80 81 #exporter variable who modified the way madgraph interacts with this class 82 83 grouped_mode = 'madevent' 84 # This variable changes the type of object called within 'generate_subprocess_directory' 85 #functions. 86 # False to avoid grouping (only identical matrix element are merged) 87 # 'madevent' group the massless quark and massless lepton 88 # 'madweight' group the gluon with the massless quark 89 sa_symmetry = False 90 # If no grouped_mode=False, uu~ and u~u will be called independently. 91 #Putting sa_symmetry generates only one of the two matrix-element. 92 check = True 93 # Ask madgraph to check if the directory already exists and propose to the user to 94 #remove it first if this is the case 95 output = 'Template' 96 # [Template, None, dir] 97 # - Template, madgraph will call copy_template 98 # - dir, madgraph will just create an empty directory for initialisation 99 # - None, madgraph do nothing for initialisation 100 exporter = 'v4' 101 # language of the output 'v4' for Fortran output 102 # 'cpp' for C++ output 103 104
105 - def __init__(self, dir_path = "", opt=None):
106 # cmd_options is a dictionary with all the optional argurment passed at output time 107 108 # Activate some monkey patching for the helas call writer. 109 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 110 self.helas_call_writer_custom
111 112 113 # helper function for customise helas writter 114 @staticmethod
115 - def custom_helas_call(call, arg):
116 """static method to customise the way aloha function call are written 117 call is the default template for the call 118 arg are the dictionary used for the call 119 """ 120 return call, arg
121 122 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 123 124
125 - def copy_template(self, model):
126 return
127
128 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
129 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 130 return 0 # return an integer stating the number of call to helicity routine
131
132 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
133 return
134
135 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
136 return
137 138
139 - def pass_information_from_cmd(self, cmd):
140 """pass information from the command interface to the exporter. 141 Please do not modify any object of the interface from the exporter. 142 """ 143 return
144
145 - def modify_grouping(self, matrix_element):
146 return False, matrix_element
147
148 - def export_model_files(self, model_v4_path):
149 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 150 return
151
152 - def export_helas(self, HELAS_PATH):
153 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 154 return
155
156 #=============================================================================== 157 # ProcessExporterFortran 158 #=============================================================================== 159 -class ProcessExporterFortran(VirtualExporter):
160 """Class to take care of exporting a set of matrix elements to 161 Fortran (v4) format.""" 162 163 default_opt = {'clean': False, 'complex_mass':False, 164 'export_format':'madevent', 'mp': False, 165 'v5_model': True, 166 'output_options':{} 167 } 168 grouped_mode = False 169 jamp_optim = False 170
171 - def __init__(self, dir_path = "", opt=None):
172 """Initiate the ProcessExporterFortran with directory information""" 173 self.mgme_dir = MG5DIR 174 self.dir_path = dir_path 175 self.model = None 176 177 self.opt = dict(self.default_opt) 178 if opt: 179 self.opt.update(opt) 180 self.cmd_options = self.opt['output_options'] 181 182 #place holder to pass information to the run_interface 183 self.proc_characteristic = banner_mod.ProcCharacteristic() 184 # call mother class 185 super(ProcessExporterFortran,self).__init__(dir_path, opt)
186 187 188 #=========================================================================== 189 # process exporter fortran switch between group and not grouped 190 #===========================================================================
191 - def export_processes(self, matrix_elements, fortran_model):
192 """Make the switch between grouped and not grouped output""" 193 194 calls = 0 195 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 196 for (group_number, me_group) in enumerate(matrix_elements): 197 calls = calls + self.generate_subprocess_directory(\ 198 me_group, fortran_model, group_number) 199 else: 200 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 201 calls = calls + self.generate_subprocess_directory(\ 202 me, fortran_model, me_number) 203 204 return calls
205 206 207 #=========================================================================== 208 # create the run_card 209 #===========================================================================
210 - def create_run_card(self, matrix_elements, history):
211 """ """ 212 213 214 # bypass this for the loop-check 215 import madgraph.loop.loop_helas_objects as loop_helas_objects 216 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 217 matrix_elements = None 218 219 run_card = banner_mod.RunCard() 220 221 222 default=True 223 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 224 processes = [me.get('processes') for megroup in matrix_elements 225 for me in megroup['matrix_elements']] 226 elif matrix_elements: 227 processes = [me.get('processes') 228 for me in matrix_elements['matrix_elements']] 229 else: 230 default =False 231 232 if default: 233 run_card.create_default_for_process(self.proc_characteristic, 234 history, 235 processes) 236 237 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 238 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 239 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
240 241 242 243 #=========================================================================== 244 # copy the Template in a new directory. 245 #===========================================================================
246 - def copy_template(self, model):
247 """create the directory run_name as a copy of the MadEvent 248 Template, and clean the directory 249 """ 250 251 #First copy the full template tree if dir_path doesn't exit 252 if not os.path.isdir(self.dir_path): 253 assert self.mgme_dir, \ 254 "No valid MG_ME path given for MG4 run directory creation." 255 logger.info('initialize a new directory: %s' % \ 256 os.path.basename(self.dir_path)) 257 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 258 self.dir_path, True) 259 # misc.copytree since dir_path already exists 260 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 261 self.dir_path) 262 # copy plot_card 263 for card in ['plot_card']: 264 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 265 try: 266 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 267 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 268 except IOError: 269 logger.warning("Failed to copy " + card + ".dat to default") 270 elif os.getcwd() == os.path.realpath(self.dir_path): 271 logger.info('working in local directory: %s' % \ 272 os.path.realpath(self.dir_path)) 273 # misc.copytree since dir_path already exists 274 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 275 self.dir_path) 276 # for name in misc.glob('Template/LO/*', self.mgme_dir): 277 # name = os.path.basename(name) 278 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 279 # if os.path.isfile(filename): 280 # files.cp(filename, pjoin(self.dir_path,name)) 281 # elif os.path.isdir(filename): 282 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 283 # misc.copytree since dir_path already exists 284 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 285 self.dir_path) 286 # Copy plot_card 287 for card in ['plot_card']: 288 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 289 try: 290 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 291 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 292 except IOError: 293 logger.warning("Failed to copy " + card + ".dat to default") 294 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 295 assert self.mgme_dir, \ 296 "No valid MG_ME path given for MG4 run directory creation." 297 try: 298 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 299 except IOError: 300 MG5_version = misc.get_pkg_info() 301 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 302 303 #Ensure that the Template is clean 304 if self.opt['clean']: 305 logger.info('remove old information in %s' % \ 306 os.path.basename(self.dir_path)) 307 if 'MADGRAPH_BASE' in os.environ: 308 misc.call([pjoin('bin', 'internal', 'clean_template'), 309 '--web'], cwd=self.dir_path) 310 else: 311 try: 312 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 313 cwd=self.dir_path) 314 except Exception as why: 315 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 316 % (os.path.basename(self.dir_path),why)) 317 318 #Write version info 319 MG_version = misc.get_pkg_info() 320 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 321 MG_version['version']) 322 323 # add the makefile in Source directory 324 filename = pjoin(self.dir_path,'Source','makefile') 325 self.write_source_makefile(writers.FileWriter(filename)) 326 327 # add the DiscreteSampler information 328 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 329 pjoin(self.dir_path, 'Source')) 330 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 331 pjoin(self.dir_path, 'Source')) 332 333 # We need to create the correct open_data for the pdf 334 self.write_pdf_opendata()
335 336 337 #=========================================================================== 338 # Call MadAnalysis5 to generate the default cards for this process 339 #===========================================================================
340 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 341 ma5_path, output_dir, levels = ['parton','hadron']):
342 """ Call MA5 so that it writes default cards for both parton and 343 post-shower levels, tailored for this particular process.""" 344 345 if len(levels)==0: 346 return 347 start = time.time() 348 logger.info('Generating MadAnalysis5 default cards tailored to this process') 349 try: 350 MA5_interpreter = common_run_interface.CommonRunCmd.\ 351 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 352 except (Exception, SystemExit) as e: 353 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 354 return 355 if MA5_interpreter is None: 356 return 357 358 MA5_main = MA5_interpreter.main 359 for lvl in ['parton','hadron']: 360 if lvl in levels: 361 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 362 try: 363 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 364 except (Exception, SystemExit) as e: 365 # keep the default card (skip only) 366 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 367 ' default analysis card for this process.') 368 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 369 error=StringIO() 370 traceback.print_exc(file=error) 371 logger.debug('MadAnalysis5 error was:') 372 logger.debug('-'*60) 373 logger.debug(error.getvalue()[:-1]) 374 logger.debug('-'*60) 375 else: 376 open(card_to_generate,'w').write(text) 377 stop = time.time() 378 if stop-start >1: 379 logger.info('Cards created in %.2fs' % (stop-start))
380 381 #=========================================================================== 382 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 383 #===========================================================================
384 - def write_procdef_mg5(self, file_pos, modelname, process_str):
385 """ write an equivalent of the MG4 proc_card in order that all the Madevent 386 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 387 388 proc_card_template = template_files.mg4_proc_card.mg4_template 389 process_template = template_files.mg4_proc_card.process_template 390 process_text = '' 391 coupling = '' 392 new_process_content = [] 393 394 395 # First find the coupling and suppress the coupling from process_str 396 #But first ensure that coupling are define whithout spaces: 397 process_str = process_str.replace(' =', '=') 398 process_str = process_str.replace('= ', '=') 399 process_str = process_str.replace(',',' , ') 400 #now loop on the element and treat all the coupling 401 for info in process_str.split(): 402 if '=' in info: 403 coupling += info + '\n' 404 else: 405 new_process_content.append(info) 406 # Recombine the process_str (which is the input process_str without coupling 407 #info) 408 process_str = ' '.join(new_process_content) 409 410 #format the SubProcess 411 replace_dict = {'process': process_str, 412 'coupling': coupling} 413 process_text += process_template.substitute(replace_dict) 414 415 replace_dict = {'process': process_text, 416 'model': modelname, 417 'multiparticle':''} 418 text = proc_card_template.substitute(replace_dict) 419 420 if file_pos: 421 ff = open(file_pos, 'w') 422 ff.write(text) 423 ff.close() 424 else: 425 return replace_dict
426 427
428 - def pass_information_from_cmd(self, cmd):
429 """Pass information for MA5""" 430 431 self.proc_defs = cmd._curr_proc_defs
432 433 #=========================================================================== 434 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 435 #===========================================================================
436 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
437 """Function to finalize v4 directory, for inheritance.""" 438 439 self.create_run_card(matrix_elements, history) 440 self.create_MA5_cards(matrix_elements, history)
441
442 - def create_MA5_cards(self,matrix_elements,history):
443 """ A wrapper around the creation of the MA5 cards so that it can be 444 bypassed by daughter classes (i.e. in standalone).""" 445 if 'madanalysis5_path' in self.opt and not \ 446 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 447 processes = None 448 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 449 processes = [me.get('processes') for megroup in matrix_elements 450 for me in megroup['matrix_elements']] 451 elif matrix_elements: 452 processes = [me.get('processes') 453 for me in matrix_elements['matrix_elements']] 454 455 self.create_default_madanalysis5_cards( 456 history, self.proc_defs, processes, 457 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 458 levels = ['hadron','parton']) 459 460 for level in ['hadron','parton']: 461 # Copying these cards turn on the use of MadAnalysis5 by default. 462 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 463 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 464 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
465 466 #=========================================================================== 467 # Create the proc_characteristic file passing information to the run_interface 468 #===========================================================================
469 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
470 471 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
472 473 #=========================================================================== 474 # write_matrix_element_v4 475 #===========================================================================
476 - def write_matrix_element_v4(self):
477 """Function to write a matrix.f file, for inheritance. 478 """ 479 pass
480 481 #=========================================================================== 482 # write_pdf_opendata 483 #===========================================================================
484 - def write_pdf_opendata(self):
485 """ modify the pdf opendata file, to allow direct access to cluster node 486 repository if configure""" 487 488 if not self.opt["cluster_local_path"]: 489 changer = {"pdf_systemwide": ""} 490 else: 491 to_add = """ 492 tempname='%(path)s'//Tablefile 493 open(IU,file=tempname,status='old',ERR=1) 494 return 495 1 tempname='%(path)s/Pdfdata/'//Tablefile 496 open(IU,file=tempname,status='old',ERR=2) 497 return 498 2 tempname='%(path)s/lhapdf'//Tablefile 499 open(IU,file=tempname,status='old',ERR=3) 500 return 501 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 502 open(IU,file=tempname,status='old',ERR=4) 503 return 504 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 505 open(IU,file=tempname,status='old',ERR=5) 506 return 507 """ % {"path" : self.opt["cluster_local_path"]} 508 509 changer = {"pdf_systemwide": to_add} 510 511 512 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 513 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 514 ff.writelines(template % changer) 515 516 # Do the same for lhapdf set 517 if not self.opt["cluster_local_path"]: 518 changer = {"cluster_specific_path": ""} 519 else: 520 to_add=""" 521 LHAPath='%(path)s/PDFsets' 522 Inquire(File=LHAPath, exist=exists) 523 if(exists)return 524 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 525 Inquire(File=LHAPath, exist=exists) 526 if(exists)return 527 LHAPath='%(path)s/../lhapdf/pdfsets/' 528 Inquire(File=LHAPath, exist=exists) 529 if(exists)return 530 LHAPath='./PDFsets' 531 """ % {"path" : self.opt["cluster_local_path"]} 532 changer = {"cluster_specific_path": to_add} 533 534 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 535 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 536 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 537 ff.writelines(template % changer) 538 539 540 return
541 542 543 544 #=========================================================================== 545 # write_maxparticles_file 546 #===========================================================================
547 - def write_maxparticles_file(self, writer, matrix_elements):
548 """Write the maxparticles.inc file for MadEvent""" 549 550 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 551 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 552 matrix_elements.get('matrix_elements')]) 553 else: 554 maxparticles = max([me.get_nexternal_ninitial()[0] \ 555 for me in matrix_elements]) 556 557 lines = "integer max_particles\n" 558 lines += "parameter(max_particles=%d)" % maxparticles 559 560 # Write the file 561 writer.writelines(lines) 562 563 return True
564 565 566 #=========================================================================== 567 # export the model 568 #===========================================================================
569 - def export_model_files(self, model_path):
570 """Configure the files/link of the process according to the model""" 571 572 # Import the model 573 for file in os.listdir(model_path): 574 if os.path.isfile(pjoin(model_path, file)): 575 shutil.copy2(pjoin(model_path, file), \ 576 pjoin(self.dir_path, 'Source', 'MODEL'))
577 578 592 600 601 602 #=========================================================================== 603 # export the helas routine 604 #===========================================================================
605 - def export_helas(self, helas_path):
606 """Configure the files/link of the process according to the model""" 607 608 # Import helas routine 609 for filename in os.listdir(helas_path): 610 filepos = pjoin(helas_path, filename) 611 if os.path.isfile(filepos): 612 if filepos.endswith('Makefile.template'): 613 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 614 elif filepos.endswith('Makefile'): 615 pass 616 else: 617 cp(filepos, self.dir_path + '/Source/DHELAS')
618 # following lines do the same but whithout symbolic link 619 # 620 #def export_helas(mgme_dir, dir_path): 621 # 622 # # Copy the HELAS directory 623 # helas_dir = pjoin(mgme_dir, 'HELAS') 624 # for filename in os.listdir(helas_dir): 625 # if os.path.isfile(pjoin(helas_dir, filename)): 626 # shutil.copy2(pjoin(helas_dir, filename), 627 # pjoin(dir_path, 'Source', 'DHELAS')) 628 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 629 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 630 # 631 632 #=========================================================================== 633 # generate_subprocess_directory 634 #===========================================================================
635 - def generate_subprocess_directory(self, matrix_element, 636 fortran_model, 637 me_number):
638 """Routine to generate a subprocess directory (for inheritance)""" 639 640 pass
641 642 #=========================================================================== 643 # get_source_libraries_list 644 #===========================================================================
645 - def get_source_libraries_list(self):
646 """ Returns the list of libraries to be compiling when compiling the 647 SOURCE directory. It is different for loop_induced processes and 648 also depends on the value of the 'output_dependencies' option""" 649 650 return ['$(LIBDIR)libdhelas.$(libext)', 651 '$(LIBDIR)libpdf.$(libext)', 652 '$(LIBDIR)libmodel.$(libext)', 653 '$(LIBDIR)libcernlib.$(libext)', 654 '$(LIBDIR)libbias.$(libext)']
655 656 #=========================================================================== 657 # write_source_makefile 658 #===========================================================================
659 - def write_source_makefile(self, writer):
660 """Write the nexternal.inc file for MG4""" 661 662 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 663 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 664 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 665 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 666 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 667 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 668 else: 669 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 670 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 671 672 replace_dict= {'libraries': set_of_lib, 673 'model':model_line, 674 'additional_dsample': '', 675 'additional_dependencies':''} 676 677 if writer: 678 text = open(path).read() % replace_dict 679 writer.write(text) 680 681 return replace_dict
682 683 #=========================================================================== 684 # write_nexternal_madspin 685 #===========================================================================
686 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
687 """Write the nexternal_prod.inc file for madspin""" 688 689 replace_dict = {} 690 691 replace_dict['nexternal'] = nexternal 692 replace_dict['ninitial'] = ninitial 693 694 file = """ \ 695 integer nexternal_prod 696 parameter (nexternal_prod=%(nexternal)d) 697 integer nincoming_prod 698 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 699 700 # Write the file 701 if writer: 702 writer.writelines(file) 703 return True 704 else: 705 return replace_dict
706 707 #=========================================================================== 708 # write_helamp_madspin 709 #===========================================================================
710 - def write_helamp_madspin(self, writer, ncomb):
711 """Write the helamp.inc file for madspin""" 712 713 replace_dict = {} 714 715 replace_dict['ncomb'] = ncomb 716 717 file = """ \ 718 integer ncomb1 719 parameter (ncomb1=%(ncomb)d) 720 double precision helamp(ncomb1) 721 common /to_helamp/helamp """ % replace_dict 722 723 # Write the file 724 if writer: 725 writer.writelines(file) 726 return True 727 else: 728 return replace_dict
729 730 731 732 #=========================================================================== 733 # write_nexternal_file 734 #===========================================================================
735 - def write_nexternal_file(self, writer, nexternal, ninitial):
736 """Write the nexternal.inc file for MG4""" 737 738 replace_dict = {} 739 740 replace_dict['nexternal'] = nexternal 741 replace_dict['ninitial'] = ninitial 742 743 file = """ \ 744 integer nexternal 745 parameter (nexternal=%(nexternal)d) 746 integer nincoming 747 parameter (nincoming=%(ninitial)d)""" % replace_dict 748 749 # Write the file 750 if writer: 751 writer.writelines(file) 752 return True 753 else: 754 return replace_dict
755 #=========================================================================== 756 # write_pmass_file 757 #===========================================================================
758 - def write_pmass_file(self, writer, matrix_element):
759 """Write the pmass.inc file for MG4""" 760 761 model = matrix_element.get('processes')[0].get('model') 762 763 lines = [] 764 for wf in matrix_element.get_external_wavefunctions(): 765 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 766 if mass.lower() != "zero": 767 mass = "abs(%s)" % mass 768 769 lines.append("pmass(%d)=%s" % \ 770 (wf.get('number_external'), mass)) 771 772 # Write the file 773 writer.writelines(lines) 774 775 return True
776 777 #=========================================================================== 778 # write_ngraphs_file 779 #===========================================================================
780 - def write_ngraphs_file(self, writer, nconfigs):
781 """Write the ngraphs.inc file for MG4. Needs input from 782 write_configs_file.""" 783 784 file = " integer n_max_cg\n" 785 file = file + "parameter (n_max_cg=%d)" % nconfigs 786 787 # Write the file 788 writer.writelines(file) 789 790 return True
791 792 #=========================================================================== 793 # write_leshouche_file 794 #===========================================================================
795 - def write_leshouche_file(self, writer, matrix_element):
796 """Write the leshouche.inc file for MG4""" 797 798 # Write the file 799 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 800 801 return True
802 803 #=========================================================================== 804 # get_leshouche_lines 805 #===========================================================================
806 - def get_leshouche_lines(self, matrix_element, numproc):
807 """Write the leshouche.inc file for MG4""" 808 809 # Extract number of external particles 810 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 811 812 lines = [] 813 for iproc, proc in enumerate(matrix_element.get('processes')): 814 legs = proc.get_legs_with_decays() 815 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 816 (iproc + 1, numproc+1, nexternal, 817 ",".join([str(l.get('id')) for l in legs]))) 818 if iproc == 0 and numproc == 0: 819 for i in [1, 2]: 820 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 821 (i, nexternal, 822 ",".join([ "%3r" % 0 ] * ninitial + \ 823 [ "%3r" % i ] * (nexternal - ninitial)))) 824 825 # Here goes the color connections corresponding to the JAMPs 826 # Only one output, for the first subproc! 827 if iproc == 0: 828 # If no color basis, just output trivial color flow 829 if not matrix_element.get('color_basis'): 830 for i in [1, 2]: 831 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 832 (i, numproc+1,nexternal, 833 ",".join([ "%3r" % 0 ] * nexternal))) 834 835 else: 836 # First build a color representation dictionnary 837 repr_dict = {} 838 for l in legs: 839 repr_dict[l.get('number')] = \ 840 proc.get('model').get_particle(l.get('id')).get_color()\ 841 * (-1)**(1+l.get('state')) 842 # Get the list of color flows 843 color_flow_list = \ 844 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 845 ninitial) 846 # And output them properly 847 for cf_i, color_flow_dict in enumerate(color_flow_list): 848 for i in [0, 1]: 849 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 850 (i + 1, cf_i + 1, numproc+1, nexternal, 851 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 852 for l in legs]))) 853 854 return lines
855 856 857 858 859 #=========================================================================== 860 # write_maxamps_file 861 #===========================================================================
862 - def write_maxamps_file(self, writer, maxamps, maxflows, 863 maxproc,maxsproc):
864 """Write the maxamps.inc file for MG4.""" 865 866 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 867 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 868 (maxamps, maxflows) 869 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 870 (maxproc, maxsproc) 871 872 # Write the file 873 writer.writelines(file) 874 875 return True
876 877 878 #=========================================================================== 879 # Routines to output UFO models in MG4 format 880 #=========================================================================== 881
882 - def convert_model(self, model, wanted_lorentz = [], 883 wanted_couplings = []):
884 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 885 886 # Make sure aloha is in quadruple precision if needed 887 old_aloha_mp=aloha.mp_precision 888 aloha.mp_precision=self.opt['mp'] 889 self.model = model 890 # create the MODEL 891 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 892 self.opt['exporter'] = self.__class__ 893 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 894 model_builder.build(wanted_couplings) 895 896 # Backup the loop mode, because it can be changed in what follows. 897 old_loop_mode = aloha.loop_mode 898 899 # Create the aloha model or use the existing one (for loop exporters 900 # this is useful as the aloha model will be used again in the 901 # LoopHelasMatrixElements generated). We do not save the model generated 902 # here if it didn't exist already because it would be a waste of 903 # memory for tree level applications since aloha is only needed at the 904 # time of creating the aloha fortran subroutines. 905 if hasattr(self, 'aloha_model'): 906 aloha_model = self.aloha_model 907 else: 908 try: 909 with misc.MuteLogger(['madgraph.models'], [60]): 910 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 911 except ImportError: 912 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 913 aloha_model.add_Lorentz_object(model.get('lorentz')) 914 915 # Compute the subroutines 916 if wanted_lorentz: 917 aloha_model.compute_subset(wanted_lorentz) 918 else: 919 aloha_model.compute_all(save=False) 920 921 # Write them out 922 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 923 aloha_model.write(write_dir, 'Fortran') 924 925 # Revert the original aloha loop mode 926 aloha.loop_mode = old_loop_mode 927 928 #copy Helas Template 929 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 930 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 931 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 932 write_dir+'/aloha_functions.f') 933 aloha_model.loop_mode = False 934 else: 935 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 936 write_dir+'/aloha_functions.f') 937 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 938 939 # Make final link in the Process 940 self.make_model_symbolic_link() 941 942 # Re-establish original aloha mode 943 aloha.mp_precision=old_aloha_mp
944 945 946 #=========================================================================== 947 # Helper functions 948 #===========================================================================
949 - def modify_grouping(self, matrix_element):
950 """allow to modify the grouping (if grouping is in place) 951 return two value: 952 - True/False if the matrix_element was modified 953 - the new(or old) matrix element""" 954 955 return False, matrix_element
956 957 #=========================================================================== 958 # Helper functions 959 #===========================================================================
960 - def get_mg5_info_lines(self):
961 """Return info lines for MG5, suitable to place at beginning of 962 Fortran files""" 963 964 info = misc.get_pkg_info() 965 info_lines = "" 966 if info and 'version' in info and 'date' in info: 967 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 968 (info['version'], info['date']) 969 info_lines = info_lines + \ 970 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 971 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 972 else: 973 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 974 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 975 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 976 977 return info_lines
978
979 - def get_process_info_lines(self, matrix_element):
980 """Return info lines describing the processes for this matrix element""" 981 982 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 983 for process in matrix_element.get('processes')])
984 985
986 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
987 """Return the Helicity matrix definition lines for this matrix element""" 988 989 helicity_line_list = [] 990 i = 0 991 for helicities in matrix_element.get_helicity_matrix(): 992 i = i + 1 993 int_list = [i, len(helicities)] 994 int_list.extend(helicities) 995 helicity_line_list.append(\ 996 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 997 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 998 999 return "\n".join(helicity_line_list)
1000
1001 - def get_ic_line(self, matrix_element):
1002 """Return the IC definition line coming after helicities, required by 1003 switchmom in madevent""" 1004 1005 nexternal = matrix_element.get_nexternal_ninitial()[0] 1006 int_list = list(range(1, nexternal + 1)) 1007 1008 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1009 ",".join([str(i) for \ 1010 i in int_list]))
1011
1012 - def set_chosen_SO_index(self, process, squared_orders):
1013 """ From the squared order constraints set by the user, this function 1014 finds what indices of the squared_orders list the user intends to pick. 1015 It returns this as a string of comma-separated successive '.true.' or 1016 '.false.' for each index.""" 1017 1018 user_squared_orders = process.get('squared_orders') 1019 split_orders = process.get('split_orders') 1020 1021 if len(user_squared_orders)==0: 1022 return ','.join(['.true.']*len(squared_orders)) 1023 1024 res = [] 1025 for sqsos in squared_orders: 1026 is_a_match = True 1027 for user_sqso, value in user_squared_orders.items(): 1028 if (process.get_squared_order_type(user_sqso) =='==' and \ 1029 value!=sqsos[split_orders.index(user_sqso)]) or \ 1030 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1031 value<sqsos[split_orders.index(user_sqso)]) or \ 1032 (process.get_squared_order_type(user_sqso) == '>' and \ 1033 value>=sqsos[split_orders.index(user_sqso)]): 1034 is_a_match = False 1035 break 1036 res.append('.true.' if is_a_match else '.false.') 1037 1038 return ','.join(res)
1039
1040 - def get_split_orders_lines(self, orders, array_name, n=5):
1041 """ Return the split orders definition as defined in the list orders and 1042 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1043 1044 ret_list = [] 1045 for index, order in enumerate(orders): 1046 for k in range(0, len(order), n): 1047 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1048 (array_name,index + 1, k + 1, min(k + n, len(order)), 1049 ','.join(["%5r" % i for i in order[k:k + n]]))) 1050 return ret_list
1051
1052 - def format_integer_list(self, list, name, n=5):
1053 """ Return an initialization of the python list in argument following 1054 the fortran syntax using the data keyword assignment, filling an array 1055 of name 'name'. It splits rows in chunks of size n.""" 1056 1057 ret_list = [] 1058 for k in range(0, len(list), n): 1059 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1060 (name, k + 1, min(k + n, len(list)), 1061 ','.join(["%5r" % i for i in list[k:k + n]]))) 1062 return ret_list
1063
1064 - def get_color_data_lines(self, matrix_element, n=6):
1065 """Return the color matrix definition lines for this matrix element. Split 1066 rows in chunks of size n.""" 1067 1068 if not matrix_element.get('color_matrix'): 1069 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1070 else: 1071 ret_list = [] 1072 my_cs = color.ColorString() 1073 for index, denominator in \ 1074 enumerate(matrix_element.get('color_matrix').\ 1075 get_line_denominators()): 1076 # First write the common denominator for this color matrix line 1077 #ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1078 # Then write the numerators for the matrix elements 1079 num_list = matrix_element.get('color_matrix').\ 1080 get_line_numerators(index, denominator) 1081 1082 assert all([int(i)==i for i in num_list]) 1083 1084 for k in range(0, len(num_list), n): 1085 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1086 (index + 1, k + 1, min(k + n, len(num_list)), 1087 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 1088 1089 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1090 ret_list.append("C %s" % repr(my_cs)) 1091 return ret_list
1092 1093
1094 - def get_den_factor_line(self, matrix_element):
1095 """Return the denominator factor line for this matrix element""" 1096 1097 return "DATA IDEN/%2r/" % \ 1098 matrix_element.get_denominator_factor()
1099
1100 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1101 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1102 which configs (diagrams).""" 1103 1104 ret_list = [] 1105 1106 booldict = {False: ".false.", True: ".true."} 1107 1108 if not matrix_element.get('color_basis'): 1109 # No color, so only one color factor. Simply write a ".true." 1110 # for each config (i.e., each diagram with only 3 particle 1111 # vertices 1112 configs = len(mapconfigs) 1113 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1114 (num_matrix_element, configs, 1115 ','.join([".true." for i in range(configs)]))) 1116 return ret_list 1117 1118 1119 # There is a color basis - create a list showing which JAMPs have 1120 # contributions to which configs 1121 1122 # Only want to include leading color flows, so find max_Nc 1123 color_basis = matrix_element.get('color_basis') 1124 1125 # We don't want to include the power of Nc's which come from the potential 1126 # loop color trace (i.e. in the case of a closed fermion loop for example) 1127 # so we subtract it here when computing max_Nc 1128 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1129 color_basis.values()],[])) 1130 1131 # Crate dictionary between diagram number and JAMP number 1132 diag_jamp = {} 1133 for ijamp, col_basis_elem in \ 1134 enumerate(sorted(matrix_element.get('color_basis').keys())): 1135 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1136 # Only use color flows with Nc == max_Nc. However, notice that 1137 # we don't want to include the Nc power coming from the loop 1138 # in this counting. 1139 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1140 diag_num = diag_tuple[0] + 1 1141 # Add this JAMP number to this diag_num 1142 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1143 [ijamp+1] 1144 else: 1145 self.proc_characteristic['single_color'] = False 1146 1147 colamps = ijamp + 1 1148 for iconfig, num_diag in enumerate(mapconfigs): 1149 if num_diag == 0: 1150 continue 1151 1152 # List of True or False 1153 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1154 # Add line 1155 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1156 (iconfig+1, num_matrix_element, colamps, 1157 ','.join(["%s" % booldict[b] for b in \ 1158 bool_list]))) 1159 1160 return ret_list
1161
1162 - def get_amp2_lines(self, matrix_element, config_map = [], replace_dict=None):
1163 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1164 1165 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1166 # Get minimum legs in a vertex 1167 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1168 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1169 minvert = min(vert_list) if vert_list!=[] else 0 1170 1171 ret_lines = [] 1172 if config_map: 1173 # In this case, we need to sum up all amplitudes that have 1174 # identical topologies, as given by the config_map (which 1175 # gives the topology/config for each of the diagrams 1176 diagrams = matrix_element.get('diagrams') 1177 # Combine the diagrams with identical topologies 1178 config_to_diag_dict = {} 1179 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1180 if config_map[idiag] == 0: 1181 continue 1182 try: 1183 config_to_diag_dict[config_map[idiag]].append(idiag) 1184 except KeyError: 1185 config_to_diag_dict[config_map[idiag]] = [idiag] 1186 # Write out the AMP2s summing squares of amplitudes belonging 1187 # to eiher the same diagram or different diagrams with 1188 # identical propagator properties. Note that we need to use 1189 # AMP2 number corresponding to the first diagram number used 1190 # for that AMP2. 1191 for config in sorted(config_to_diag_dict.keys()): 1192 1193 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1194 {"num": (config_to_diag_dict[config][0] + 1)} 1195 1196 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1197 sum([diagrams[idiag].get('amplitudes') for \ 1198 idiag in config_to_diag_dict[config]], [])]) 1199 1200 # Not using \sum |M|^2 anymore since this creates troubles 1201 # when ckm is not diagonal due to the JIM mechanism. 1202 if '+' in amp: 1203 amp = "(%s)*dconjg(%s)" % (amp, amp) 1204 else: 1205 amp = "%s*dconjg(%s)" % (amp, amp) 1206 1207 line = line + "%s" % (amp) 1208 #line += " * get_channel_cut(p, %s) " % (config) 1209 ret_lines.append(line) 1210 else: 1211 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1212 # Ignore any diagrams with 4-particle vertices. 1213 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1214 continue 1215 # Now write out the expression for AMP2, meaning the sum of 1216 # squared amplitudes belonging to the same diagram 1217 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1218 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1219 {"num": a.get('number')} for a in \ 1220 diag.get('amplitudes')]) 1221 ret_lines.append(line) 1222 1223 return ret_lines
1224 1225 #=========================================================================== 1226 # Returns the data statements initializing the coeffictients for the JAMP 1227 # decomposition. It is used when the JAMP initialization is decided to be 1228 # done through big arrays containing the projection coefficients. 1229 #===========================================================================
1230 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1231 n=50, Nc_value=3):
1232 """This functions return the lines defining the DATA statement setting 1233 the coefficients building the JAMPS out of the AMPS. Split rows in 1234 bunches of size n. 1235 One can specify the color_basis from which the color amplitudes originates 1236 so that there are commentaries telling what color structure each JAMP 1237 corresponds to.""" 1238 1239 if(not isinstance(color_amplitudes,list) or 1240 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1241 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1242 1243 res_list = [] 1244 my_cs = color.ColorString() 1245 for index, coeff_list in enumerate(color_amplitudes): 1246 # Create the list of the complete numerical coefficient. 1247 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1248 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1249 coefficient in coeff_list] 1250 # Create the list of the numbers of the contributing amplitudes. 1251 # Mutliply by -1 for those which have an imaginary coefficient. 1252 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1253 for coefficient in coeff_list] 1254 # Find the common denominator. 1255 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1256 num_list=[(coefficient*commondenom).numerator \ 1257 for coefficient in coefs_list] 1258 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1259 index+1,len(num_list))) 1260 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1261 index+1,commondenom)) 1262 if color_basis: 1263 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1264 res_list.append("C %s" % repr(my_cs)) 1265 for k in range(0, len(num_list), n): 1266 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1267 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1268 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1269 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1270 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1271 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1272 pass 1273 return res_list
1274 1275
1276 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1277 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1278 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1279 defined as a matrix element or directly as a color_amplitudes dictionary. 1280 The split_order_amps specifies the group of amplitudes sharing the same 1281 amplitude orders which should be put in together in a given set of JAMPS. 1282 The split_order_amps is supposed to have the format of the second output 1283 of the function get_split_orders_mapping function in helas_objects.py. 1284 The split_order_names is optional (it should correspond to the process 1285 'split_orders' attribute) and only present to provide comments in the 1286 JAMP definitions in the code.""" 1287 1288 # Let the user call get_JAMP_lines_split_order directly from a 1289 error_msg="Malformed '%s' argument passed to the "+\ 1290 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1291 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1292 color_amplitudes=col_amps.get_color_amplitudes() 1293 elif(isinstance(col_amps,list)): 1294 if(col_amps and isinstance(col_amps[0],list)): 1295 color_amplitudes=col_amps 1296 else: 1297 raise MadGraph5Error(error_msg%'col_amps') 1298 else: 1299 raise MadGraph5Error(error_msg%'col_amps') 1300 1301 # Verify the sanity of the split_order_amps and split_order_names args 1302 if isinstance(split_order_amps,list): 1303 for elem in split_order_amps: 1304 if len(elem)!=2: 1305 raise MadGraph5Error(error_msg%'split_order_amps') 1306 # Check the first element of the two lists to make sure they are 1307 # integers, although in principle they should all be integers. 1308 if not isinstance(elem[0],tuple) or \ 1309 not isinstance(elem[1],tuple) or \ 1310 not isinstance(elem[0][0],int) or \ 1311 not isinstance(elem[1][0],int): 1312 raise MadGraph5Error(error_msg%'split_order_amps') 1313 else: 1314 raise MadGraph5Error(error_msg%'split_order_amps') 1315 1316 if not split_order_names is None: 1317 if isinstance(split_order_names,list): 1318 # Should specify the same number of names as there are elements 1319 # in the key of the split_order_amps. 1320 if len(split_order_names)!=len(split_order_amps[0][0]): 1321 raise MadGraph5Error(error_msg%'split_order_names') 1322 # Check the first element of the list to be a string 1323 if not isinstance(split_order_names[0],str): 1324 raise MadGraph5Error(error_msg%'split_order_names') 1325 else: 1326 raise MadGraph5Error(error_msg%'split_order_names') 1327 1328 # Now scan all contributing orders to be individually computed and 1329 # construct the list of color_amplitudes for JAMP to be constructed 1330 # accordingly. 1331 res_list=[] 1332 max_tmp = 0 1333 for i, amp_order in enumerate(split_order_amps): 1334 col_amps_order = [] 1335 for jamp in color_amplitudes: 1336 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1337 if split_order_names: 1338 res_list.append('C JAMPs contributing to orders '+' '.join( 1339 ['%s=%i'%order for order in zip(split_order_names, 1340 amp_order[0])])) 1341 if self.opt['export_format'] in ['madloop_matchbox']: 1342 res_list.extend(self.get_JAMP_lines(col_amps_order, 1343 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1344 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))[0]) 1345 else: 1346 toadd, nb_tmp = self.get_JAMP_lines(col_amps_order, 1347 JAMP_format="JAMP(%s,{0})".format(str(i+1))) 1348 res_list.extend(toadd) 1349 max_tmp = max(max_tmp, nb_tmp) 1350 1351 return res_list, max_tmp
1352 1353
1354 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1355 split=-1):
1356 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1357 defined as a matrix element or directly as a color_amplitudes dictionary, 1358 Jamp_formatLC should be define to allow to add LeadingColor computation 1359 (usefull for MatchBox) 1360 The split argument defines how the JAMP lines should be split in order 1361 not to be too long.""" 1362 1363 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1364 # the color amplitudes lists. 1365 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1366 color_amplitudes=col_amps.get_color_amplitudes() 1367 elif(isinstance(col_amps,list)): 1368 if(col_amps and isinstance(col_amps[0],list)): 1369 color_amplitudes=col_amps 1370 else: 1371 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1372 else: 1373 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1374 1375 all_element = {} 1376 res_list = [] 1377 for i, coeff_list in enumerate(color_amplitudes): 1378 # It might happen that coeff_list is empty if this function was 1379 # called from get_JAMP_lines_split_order (i.e. if some color flow 1380 # does not contribute at all for a given order). 1381 # In this case we simply set it to 0. 1382 if coeff_list==[]: 1383 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1384 continue 1385 # Break the JAMP definition into 'n=split' pieces to avoid having 1386 # arbitrarly long lines. 1387 first=True 1388 n = (len(coeff_list)+1 if split<=0 else split) 1389 while coeff_list!=[]: 1390 coefs=coeff_list[:n] 1391 coeff_list=coeff_list[n:] 1392 res = ((JAMP_format+"=") % str(i + 1)) + \ 1393 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1394 1395 first=False 1396 # Optimization: if all contributions to that color basis element have 1397 # the same coefficient (up to a sign), put it in front 1398 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1399 common_factor = False 1400 diff_fracs = list(set(list_fracs)) 1401 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1402 common_factor = True 1403 global_factor = diff_fracs[0] 1404 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1405 1406 # loop for JAMP 1407 for (coefficient, amp_number) in coefs: 1408 if not coefficient: 1409 continue 1410 value = (1j if coefficient[2] else 1)* coefficient[0] * coefficient[1] * fractions.Fraction(3)**coefficient[3] 1411 if (i+1, amp_number) not in all_element: 1412 all_element[(i+1, amp_number)] = value 1413 else: 1414 all_element[(i+1, amp_number)] += value 1415 if common_factor: 1416 res = (res + "%s" + AMP_format) % \ 1417 (self.coeff(coefficient[0], 1418 coefficient[1] / abs(coefficient[1]), 1419 coefficient[2], 1420 coefficient[3]), 1421 str(amp_number)) 1422 else: 1423 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1424 coefficient[1], 1425 coefficient[2], 1426 coefficient[3]), 1427 str(amp_number)) 1428 1429 if common_factor: 1430 res = res + ')' 1431 1432 res_list.append(res) 1433 1434 if 'jamp_optim' in self.cmd_options: 1435 jamp_optim = banner_mod.ConfigFile.format_variable(self.cmd_options['jamp_optim'], bool, 'jamp_optim') 1436 else: 1437 # class default 1438 jamp_optim = self.jamp_optim 1439 1440 if not jamp_optim: 1441 return res_list, 0 1442 else: 1443 saved = list(res_list) 1444 1445 if len(all_element) > 1000: 1446 logger.info("Computing Color-Flow optimization [%s term]", len(all_element)) 1447 start_time = time.time() 1448 else: 1449 start_time = 0 1450 1451 res_list = [] 1452 #misc.sprint(len(all_element)) 1453 1454 self.myjamp_count = 0 1455 new_mat, defs = self.optimise_jamp(all_element) 1456 if start_time: 1457 logger.info("Color-Flow passed to %s term in %ss. Introduce %i contraction", len(new_mat), int(time.time()-start_time), len(defs)) 1458 1459 1460 #misc.sprint("number of iteration", self.myjamp_count) 1461 def format(frac): 1462 if isinstance(frac, Fraction): 1463 if frac.denominator == 1: 1464 return str(frac.numerator) 1465 else: 1466 return "%id0/%id0" % (frac.numerator, frac.denominator) 1467 elif frac.real == frac: 1468 #misc.sprint(frac.real, frac) 1469 return str(float(frac.real)).replace('e','d') 1470 else: 1471 return str(frac).replace('e','d').replace('j','*imag1')
1472 1473 1474 1475 for i, amp1, amp2, frac, nb in defs: 1476 if amp1 > 0: 1477 amp1 = AMP_format % amp1 1478 else: 1479 amp1 = "TMP_JAMP(%d)" % -amp1 1480 if amp2 > 0: 1481 amp2 = AMP_format % amp2 1482 else: 1483 amp2 = "TMP_JAMP(%d)" % -amp2 1484 1485 res_list.append(' TMP_JAMP(%d) = %s + (%s) * %s ! used %d times' % (i,amp1, format(frac), amp2, nb)) 1486 1487 1488 # misc.sprint(new_mat) 1489 jamp_res = collections.defaultdict(list) 1490 max_jamp=0 1491 for (jamp, var), factor in new_mat.items(): 1492 if var > 0: 1493 name = AMP_format % var 1494 else: 1495 name = "TMP_JAMP(%d)" % -var 1496 jamp_res[jamp].append("(%s)*%s" % (format(factor), name)) 1497 max_jamp = max(max_jamp, jamp) 1498 1499 1500 for i in range(1,max_jamp+1): 1501 name = JAMP_format % i 1502 res_list.append(" %s = %s" %(name, '+'.join(jamp_res[i]))) 1503 1504 return res_list, len(defs)
1505
1506 - def optimise_jamp(self, all_element, nb_line=0, nb_col=0, added=0):
1507 """ optimise problem of type Y = A X 1508 A is a matrix (all_element) 1509 X is the fortran name of the input. 1510 The code iteratively add sub-expression jtemp[sub_add] 1511 and recall itself (this is add to the X size) 1512 """ 1513 self.myjamp_count +=1 1514 1515 if not nb_line: 1516 for i,j in all_element: 1517 if i > nb_line: 1518 nb_line = i+1 1519 if j> nb_col: 1520 nb_col = j+1 1521 1522 #misc.sprint(nb_line, nb_col) 1523 1524 1525 max_count = 0 1526 all_index = [] 1527 operation = collections.defaultdict(lambda: collections.defaultdict(int)) 1528 for i in range(nb_line): 1529 for j1 in range(-added, nb_col): 1530 v1 = all_element.get((i,j1), 0) 1531 if not v1: 1532 continue 1533 for j2 in range(j1+1, nb_col): 1534 R = all_element.get((i,j2), 0)/v1 1535 if not R: 1536 continue 1537 1538 #misc.sprint(j1,j2) 1539 operation[(j1,j2)][R] +=1 1540 if operation[(j1,j2)][R] > max_count: 1541 max_count = operation[(j1,j2)][R] 1542 all_index = [(j1,j2, R)] 1543 elif operation[(j1,j2)][R] == max_count: 1544 all_index.append((j1,j2, R)) 1545 if max_count <= 1: 1546 return all_element, [] 1547 #added += 1 1548 #misc.sprint(max_count, len(all_index)) 1549 #misc.sprint(operation) 1550 to_add = [] 1551 for index in all_index: 1552 j1,j2,R = index 1553 first = True 1554 for i in range(nb_line): 1555 v1 = all_element.get((i,j1), 0) 1556 v2 = all_element.get((i,j2), 0) 1557 if not v1 or not v2: 1558 continue 1559 if v2/v1 == R: 1560 if first: 1561 first = False 1562 added +=1 1563 to_add.append((added,j1,j2,R, max_count)) 1564 1565 all_element[(i,-added)] = v1 1566 del all_element[(i,j1)] #= 0 1567 del all_element[(i,j2)] #= 0 1568 1569 logger.log(5,"Define %d new shortcut reused %d times", len(to_add), max_count) 1570 new_element, new_def = self.optimise_jamp(all_element, nb_line=nb_line, nb_col=nb_col, added=added) 1571 for one_def in to_add: 1572 new_def.insert(0, one_def) 1573 return new_element, new_def
1574 1575 1576 1577 1578
1579 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1580 """Generate the PDF lines for the auto_dsig.f file""" 1581 1582 processes = matrix_element.get('processes') 1583 model = processes[0].get('model') 1584 1585 pdf_definition_lines = "" 1586 pdf_data_lines = "" 1587 pdf_lines = "" 1588 1589 if ninitial == 1: 1590 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1591 for i, proc in enumerate(processes): 1592 process_line = proc.base_string() 1593 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1594 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1595 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1596 else: 1597 # Pick out all initial state particles for the two beams 1598 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1599 p in processes]))), 1600 sorted(list(set([p.get_initial_pdg(2) for \ 1601 p in processes])))] 1602 1603 # Prepare all variable names 1604 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1605 sum(initial_states,[])]) 1606 for key,val in pdf_codes.items(): 1607 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1608 1609 # Set conversion from PDG code to number used in PDF calls 1610 pdgtopdf = {21: 0, 22: 7} 1611 1612 # Fill in missing entries of pdgtopdf 1613 for pdg in sum(initial_states,[]): 1614 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1615 pdgtopdf[pdg] = pdg 1616 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1617 # If any particle has pdg code 7, we need to use something else 1618 pdgtopdf[pdg] = 6000000 + pdg 1619 1620 # Get PDF variable declarations for all initial states 1621 for i in [0,1]: 1622 pdf_definition_lines += "DOUBLE PRECISION " + \ 1623 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1624 for pdg in \ 1625 initial_states[i]]) + \ 1626 "\n" 1627 1628 # Get PDF data lines for all initial states 1629 for i in [0,1]: 1630 pdf_data_lines += "DATA " + \ 1631 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1632 for pdg in initial_states[i]]) + \ 1633 "/%d*1D0/" % len(initial_states[i]) + \ 1634 "\n" 1635 1636 # Get PDF lines for all different initial states 1637 for i, init_states in enumerate(initial_states): 1638 if subproc_group: 1639 pdf_lines = pdf_lines + \ 1640 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1641 % (i + 1, i + 1) 1642 else: 1643 pdf_lines = pdf_lines + \ 1644 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1645 % (i + 1, i + 1) 1646 1647 for nbi,initial_state in enumerate(init_states): 1648 if initial_state in list(pdf_codes.keys()): 1649 if subproc_group: 1650 pdf_lines = pdf_lines + \ 1651 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1652 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1653 (pdf_codes[initial_state], 1654 i + 1, i + 1, pdgtopdf[initial_state], 1655 i + 1, i + 1) 1656 else: 1657 pdf_lines = pdf_lines + \ 1658 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1659 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1660 (pdf_codes[initial_state], 1661 i + 1, i + 1, pdgtopdf[initial_state], 1662 i + 1, 1663 i + 1, i + 1) 1664 pdf_lines = pdf_lines + "ENDIF\n" 1665 1666 # Add up PDFs for the different initial state particles 1667 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1668 for proc in processes: 1669 process_line = proc.base_string() 1670 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1671 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1672 for ibeam in [1, 2]: 1673 initial_state = proc.get_initial_pdg(ibeam) 1674 if initial_state in list(pdf_codes.keys()): 1675 pdf_lines = pdf_lines + "%s%d*" % \ 1676 (pdf_codes[initial_state], ibeam) 1677 else: 1678 pdf_lines = pdf_lines + "1d0*" 1679 # Remove last "*" from pdf_lines 1680 pdf_lines = pdf_lines[:-1] + "\n" 1681 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1682 1683 # Remove last line break from the return variables 1684 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1685 1686 #=========================================================================== 1687 # write_props_file 1688 #===========================================================================
1689 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1690 """Write the props.inc file for MadEvent. Needs input from 1691 write_configs_file.""" 1692 1693 lines = [] 1694 1695 particle_dict = matrix_element.get('processes')[0].get('model').\ 1696 get('particle_dict') 1697 1698 for iconf, configs in enumerate(s_and_t_channels): 1699 for vertex in configs[0] + configs[1][:-1]: 1700 leg = vertex.get('legs')[-1] 1701 if leg.get('id') not in particle_dict: 1702 # Fake propagator used in multiparticle vertices 1703 mass = 'zero' 1704 width = 'zero' 1705 pow_part = 0 1706 else: 1707 particle = particle_dict[leg.get('id')] 1708 # Get mass 1709 if particle.get('mass').lower() == 'zero': 1710 mass = particle.get('mass') 1711 else: 1712 mass = "abs(%s)" % particle.get('mass') 1713 # Get width 1714 if particle.get('width').lower() == 'zero': 1715 width = particle.get('width') 1716 else: 1717 width = "abs(%s)" % particle.get('width') 1718 1719 pow_part = 1 + int(particle.is_boson()) 1720 1721 lines.append("prmass(%d,%d) = %s" % \ 1722 (leg.get('number'), iconf + 1, mass)) 1723 lines.append("prwidth(%d,%d) = %s" % \ 1724 (leg.get('number'), iconf + 1, width)) 1725 lines.append("pow(%d,%d) = %d" % \ 1726 (leg.get('number'), iconf + 1, pow_part)) 1727 1728 # Write the file 1729 writer.writelines(lines) 1730 1731 return True
1732 1733 #=========================================================================== 1734 # write_configs_file 1735 #===========================================================================
1736 - def write_configs_file(self, writer, matrix_element):
1737 """Write the configs.inc file for MadEvent""" 1738 1739 # Extract number of external particles 1740 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1741 1742 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1743 mapconfigs = [c[0] for c in configs] 1744 model = matrix_element.get('processes')[0].get('model') 1745 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1746 [[c[1]] for c in configs], 1747 mapconfigs, 1748 nexternal, ninitial, 1749 model)
1750 1751 #=========================================================================== 1752 # write_configs_file_from_diagrams 1753 #===========================================================================
1754 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1755 nexternal, ninitial, model):
1756 """Write the actual configs.inc file. 1757 1758 configs is the diagrams corresponding to configs (each 1759 diagrams is a list of corresponding diagrams for all 1760 subprocesses, with None if there is no corresponding diagrams 1761 for a given process). 1762 mapconfigs gives the diagram number for each config. 1763 1764 For s-channels, we need to output one PDG for each subprocess in 1765 the subprocess group, in order to be able to pick the right 1766 one for multiprocesses.""" 1767 1768 lines = [] 1769 1770 s_and_t_channels = [] 1771 1772 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1773 for config in configs if [d for d in config if d][0].\ 1774 get_vertex_leg_numbers()!=[]] 1775 minvert = min(vert_list) if vert_list!=[] else 0 1776 1777 # Number of subprocesses 1778 nsubprocs = len(configs[0]) 1779 1780 nconfigs = 0 1781 1782 new_pdg = model.get_first_non_pdg() 1783 1784 for iconfig, helas_diags in enumerate(configs): 1785 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1786 [0].get_vertex_leg_numbers()) : 1787 # Only 3-vertices allowed in configs.inc except for vertices 1788 # which originate from a shrunk loop. 1789 continue 1790 nconfigs += 1 1791 1792 # Need s- and t-channels for all subprocesses, including 1793 # those that don't contribute to this config 1794 empty_verts = [] 1795 stchannels = [] 1796 for h in helas_diags: 1797 if h: 1798 # get_s_and_t_channels gives vertices starting from 1799 # final state external particles and working inwards 1800 stchannels.append(h.get('amplitudes')[0].\ 1801 get_s_and_t_channels(ninitial, model, new_pdg)) 1802 else: 1803 stchannels.append((empty_verts, None)) 1804 1805 # For t-channels, just need the first non-empty one 1806 tchannels = [t for s,t in stchannels if t != None][0] 1807 1808 # For s_and_t_channels (to be used later) use only first config 1809 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1810 tchannels]) 1811 1812 # Make sure empty_verts is same length as real vertices 1813 if any([s for s,t in stchannels]): 1814 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1815 1816 # Reorganize s-channel vertices to get a list of all 1817 # subprocesses for each vertex 1818 schannels = list(zip(*[s for s,t in stchannels])) 1819 else: 1820 schannels = [] 1821 1822 allchannels = schannels 1823 if len(tchannels) > 1: 1824 # Write out tchannels only if there are any non-trivial ones 1825 allchannels = schannels + tchannels 1826 1827 # Write out propagators for s-channel and t-channel vertices 1828 1829 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1830 # Correspondance between the config and the diagram = amp2 1831 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1832 mapconfigs[iconfig])) 1833 1834 for verts in allchannels: 1835 if verts in schannels: 1836 vert = [v for v in verts if v][0] 1837 else: 1838 vert = verts 1839 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1840 last_leg = vert.get('legs')[-1] 1841 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1842 (last_leg.get('number'), nconfigs, len(daughters), 1843 ",".join([str(d) for d in daughters]))) 1844 if verts in schannels: 1845 pdgs = [] 1846 for v in verts: 1847 if v: 1848 pdgs.append(v.get('legs')[-1].get('id')) 1849 else: 1850 pdgs.append(0) 1851 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1852 (last_leg.get('number'), nconfigs, nsubprocs, 1853 ",".join([str(d) for d in pdgs]))) 1854 lines.append("data tprid(%d,%d)/0/" % \ 1855 (last_leg.get('number'), nconfigs)) 1856 elif verts in tchannels[:-1]: 1857 lines.append("data tprid(%d,%d)/%d/" % \ 1858 (last_leg.get('number'), nconfigs, 1859 abs(last_leg.get('id')))) 1860 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1861 (last_leg.get('number'), nconfigs, nsubprocs, 1862 ",".join(['0'] * nsubprocs))) 1863 1864 # Write out number of configs 1865 lines.append("# Number of configs") 1866 lines.append("data mapconfig(0)/%d/" % nconfigs) 1867 1868 # Write the file 1869 writer.writelines(lines) 1870 1871 return s_and_t_channels
1872 1873 #=========================================================================== 1874 # Global helper methods 1875 #=========================================================================== 1876
1877 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1878 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1879 1880 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1881 1882 if total_coeff == 1: 1883 if is_imaginary: 1884 return '+imag1*' 1885 else: 1886 return '+' 1887 elif total_coeff == -1: 1888 if is_imaginary: 1889 return '-imag1*' 1890 else: 1891 return '-' 1892 1893 res_str = '%+iD0' % total_coeff.numerator 1894 1895 if total_coeff.denominator != 1: 1896 # Check if total_coeff is an integer 1897 res_str = res_str + '/%iD0' % total_coeff.denominator 1898 1899 if is_imaginary: 1900 res_str = res_str + '*imag1' 1901 1902 return res_str + '*'
1903 1904
1905 - def set_fortran_compiler(self, default_compiler, force=False):
1906 """Set compiler based on what's available on the system""" 1907 1908 # Check for compiler 1909 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1910 f77_compiler = default_compiler['fortran'] 1911 elif misc.which('gfortran'): 1912 f77_compiler = 'gfortran' 1913 elif misc.which('g77'): 1914 f77_compiler = 'g77' 1915 elif misc.which('f77'): 1916 f77_compiler = 'f77' 1917 elif default_compiler['fortran']: 1918 logger.warning('No Fortran Compiler detected! Please install one') 1919 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1920 else: 1921 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1922 logger.info('Use Fortran compiler ' + f77_compiler) 1923 1924 1925 # Check for compiler. 1. set default. 1926 if default_compiler['f2py']: 1927 f2py_compiler = default_compiler['f2py'] 1928 else: 1929 f2py_compiler = '' 1930 # Try to find the correct one. 1931 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1932 f2py_compiler = default_compiler['f2py'] 1933 elif misc.which('f2py'): 1934 f2py_compiler = 'f2py' 1935 elif sys.version_info[1] == 6: 1936 if misc.which('f2py-2.6'): 1937 f2py_compiler = 'f2py-2.6' 1938 elif misc.which('f2py2.6'): 1939 f2py_compiler = 'f2py2.6' 1940 elif sys.version_info[1] == 7: 1941 if misc.which('f2py-2.7'): 1942 f2py_compiler = 'f2py-2.7' 1943 elif misc.which('f2py2.7'): 1944 f2py_compiler = 'f2py2.7' 1945 1946 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1947 1948 1949 self.replace_make_opt_f_compiler(to_replace) 1950 # Replace also for Template but not for cluster 1951 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1952 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1953 1954 return f77_compiler
1955 1956 # an alias for backward compatibility 1957 set_compiler = set_fortran_compiler 1958 1959
1960 - def set_cpp_compiler(self, default_compiler, force=False):
1961 """Set compiler based on what's available on the system""" 1962 1963 # Check for compiler 1964 if default_compiler and misc.which(default_compiler): 1965 compiler = default_compiler 1966 elif misc.which('g++'): 1967 #check if clang version 1968 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1969 stderr=subprocess.PIPE) 1970 out, _ = p.communicate() 1971 out = out.decode() 1972 if 'clang' in str(out) and misc.which('clang'): 1973 compiler = 'clang' 1974 else: 1975 compiler = 'g++' 1976 elif misc.which('c++'): 1977 compiler = 'c++' 1978 elif misc.which('clang'): 1979 compiler = 'clang' 1980 elif default_compiler: 1981 logger.warning('No c++ Compiler detected! Please install one') 1982 compiler = default_compiler # maybe misc fail so try with it 1983 else: 1984 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1985 logger.info('Use c++ compiler ' + compiler) 1986 self.replace_make_opt_c_compiler(compiler) 1987 # Replace also for Template but not for cluster 1988 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 1989 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1990 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1991 1992 return compiler
1993 1994
1995 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1996 """Set FC=compiler in Source/make_opts""" 1997 1998 assert isinstance(compilers, dict) 1999 2000 mod = False #avoid to rewrite the file if not needed 2001 if not root_dir: 2002 root_dir = self.dir_path 2003 2004 compiler= compilers['fortran'] 2005 f2py_compiler = compilers['f2py'] 2006 if not f2py_compiler: 2007 f2py_compiler = 'f2py' 2008 for_update= {'DEFAULT_F_COMPILER':compiler, 2009 'DEFAULT_F2PY_COMPILER':f2py_compiler} 2010 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2011 2012 try: 2013 common_run_interface.CommonRunCmd.update_make_opts_full( 2014 make_opts, for_update) 2015 except IOError: 2016 if root_dir == self.dir_path: 2017 logger.info('Fail to set compiler. Trying to continue anyway.')
2018
2019 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
2020 """Set CXX=compiler in Source/make_opts. 2021 The version is also checked, in order to set some extra flags 2022 if the compiler is clang (on MACOS)""" 2023 2024 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 2025 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 2026 2027 2028 # list of the variable to set in the make_opts file 2029 for_update= {'DEFAULT_CPP_COMPILER':compiler, 2030 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 2031 'STDLIB': '-lc++' if is_lc else '-lstdc++', 2032 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 2033 } 2034 2035 # for MOJAVE remove the MACFLAG: 2036 if is_clang: 2037 import platform 2038 version, _, _ = platform.mac_ver() 2039 if not version:# not linux 2040 version = 14 # set version to remove MACFLAG 2041 else: 2042 majversion, version = [int(x) for x in version.split('.',3)[:2]] 2043 2044 if majversion >= 11 or (majversion ==10 and version >= 14): 2045 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 2046 2047 if not root_dir: 2048 root_dir = self.dir_path 2049 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2050 2051 try: 2052 common_run_interface.CommonRunCmd.update_make_opts_full( 2053 make_opts, for_update) 2054 except IOError: 2055 if root_dir == self.dir_path: 2056 logger.info('Fail to set compiler. Trying to continue anyway.') 2057 2058 return
2059
2060 #=============================================================================== 2061 # ProcessExporterFortranSA 2062 #=============================================================================== 2063 -class ProcessExporterFortranSA(ProcessExporterFortran):
2064 """Class to take care of exporting a set of matrix elements to 2065 MadGraph v4 StandAlone format.""" 2066 2067 matrix_template = "matrix_standalone_v4.inc" 2068
2069 - def __init__(self, *args,**opts):
2070 """add the format information compare to standard init""" 2071 2072 if 'format' in opts: 2073 self.format = opts['format'] 2074 del opts['format'] 2075 else: 2076 self.format = 'standalone' 2077 2078 self.prefix_info = {} 2079 ProcessExporterFortran.__init__(self, *args, **opts)
2080
2081 - def copy_template(self, model):
2082 """Additional actions needed for setup of Template 2083 """ 2084 2085 #First copy the full template tree if dir_path doesn't exit 2086 if os.path.isdir(self.dir_path): 2087 return 2088 2089 logger.info('initialize a new standalone directory: %s' % \ 2090 os.path.basename(self.dir_path)) 2091 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 2092 2093 # Create the directory structure 2094 os.mkdir(self.dir_path) 2095 os.mkdir(pjoin(self.dir_path, 'Source')) 2096 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 2097 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 2098 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 2099 os.mkdir(pjoin(self.dir_path, 'bin')) 2100 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 2101 os.mkdir(pjoin(self.dir_path, 'lib')) 2102 os.mkdir(pjoin(self.dir_path, 'Cards')) 2103 2104 # Information at top-level 2105 #Write version info 2106 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 2107 try: 2108 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 2109 except IOError: 2110 MG5_version = misc.get_pkg_info() 2111 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 2112 "5." + MG5_version['version']) 2113 2114 2115 # Add file in SubProcesses 2116 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 2117 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 2118 2119 if self.format == 'standalone': 2120 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 2121 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 2122 2123 # Add file in Source 2124 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 2125 pjoin(self.dir_path, 'Source')) 2126 # add the makefile 2127 filename = pjoin(self.dir_path,'Source','makefile') 2128 self.write_source_makefile(writers.FileWriter(filename))
2129 2130 #=========================================================================== 2131 # export model files 2132 #===========================================================================
2133 - def export_model_files(self, model_path):
2134 """export the model dependent files for V4 model""" 2135 2136 super(ProcessExporterFortranSA,self).export_model_files(model_path) 2137 # Add the routine update_as_param in v4 model 2138 # This is a function created in the UFO 2139 text=""" 2140 subroutine update_as_param() 2141 call setpara('param_card.dat',.false.) 2142 return 2143 end 2144 """ 2145 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2146 ff.write(text) 2147 ff.close() 2148 2149 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 2150 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 2151 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 2152 fsock.write(text) 2153 fsock.close() 2154 2155 self.make_model_symbolic_link()
2156 2157 #=========================================================================== 2158 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 2159 #===========================================================================
2160 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2161 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2162 Perl script of MadEvent4 are still working properly for pure MG5 run. 2163 Not needed for StandAlone so just return 2164 """ 2165 2166 return
2167 2168 2169 #=========================================================================== 2170 # Make the Helas and Model directories for Standalone directory 2171 #===========================================================================
2172 - def make(self):
2173 """Run make in the DHELAS and MODEL directories, to set up 2174 everything for running standalone 2175 """ 2176 2177 source_dir = pjoin(self.dir_path, "Source") 2178 logger.info("Running make for Helas") 2179 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2180 logger.info("Running make for Model") 2181 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2182 2183 #=========================================================================== 2184 # Create proc_card_mg5.dat for Standalone directory 2185 #===========================================================================
2186 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2187 """Finalize Standalone MG4 directory by 2188 generation proc_card_mg5.dat 2189 generate a global makefile 2190 """ 2191 2192 compiler = {'fortran': mg5options['fortran_compiler'], 2193 'cpp': mg5options['cpp_compiler'], 2194 'f2py': mg5options['f2py_compiler']} 2195 2196 self.compiler_choice(compiler) 2197 self.make() 2198 2199 # Write command history as proc_card_mg5 2200 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2201 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2202 history.write(output_file) 2203 2204 ProcessExporterFortran.finalize(self, matrix_elements, 2205 history, mg5options, flaglist) 2206 open(pjoin(self.dir_path,'__init__.py'),'w') 2207 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2208 2209 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2210 #add the module to hande the NLO weight 2211 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2212 pjoin(self.dir_path, 'Source')) 2213 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2214 pjoin(self.dir_path, 'Source', 'PDF')) 2215 self.write_pdf_opendata() 2216 2217 if self.prefix_info: 2218 self.write_f2py_splitter() 2219 self.write_f2py_makefile() 2220 self.write_f2py_check_sa(matrix_elements, 2221 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2222 else: 2223 # create a single makefile to compile all the subprocesses 2224 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2225 deppython = '' 2226 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2227 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2228 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2229 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2230 text+='all: %s\n\techo \'done\'' % deppython 2231 2232 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2233 ff.write(text) 2234 ff.close()
2235
2236 - def write_f2py_splitter(self):
2237 """write a function to call the correct matrix element""" 2238 2239 template = """ 2240 %(python_information)s 2241 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2242 IMPLICIT NONE 2243 C ALPHAS is given at scale2 (SHOULD be different of 0 for loop induced, ignore for LO) 2244 2245 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2246 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2247 CF2PY integer, intent(in):: procid 2248 CF2PY integer, intent(in) :: npdg 2249 CF2PY double precision, intent(out) :: ANS 2250 CF2PY double precision, intent(in) :: ALPHAS 2251 CF2PY double precision, intent(in) :: SCALE2 2252 integer pdgs(*) 2253 integer npdg, nhel, procid 2254 double precision p(*) 2255 double precision ANS, ALPHAS, PI,SCALE2 2256 include 'coupl.inc' 2257 2258 PI = 3.141592653589793D0 2259 G = 2* DSQRT(ALPHAS*PI) 2260 CALL UPDATE_AS_PARAM() 2261 c if (scale2.ne.0d0) stop 1 2262 2263 %(smatrixhel)s 2264 2265 return 2266 end 2267 2268 SUBROUTINE INITIALISE(PATH) 2269 C ROUTINE FOR F2PY to read the benchmark point. 2270 IMPLICIT NONE 2271 CHARACTER*512 PATH 2272 CF2PY INTENT(IN) :: PATH 2273 CALL SETPARA(PATH) !first call to setup the paramaters 2274 RETURN 2275 END 2276 2277 2278 subroutine CHANGE_PARA(name, value) 2279 implicit none 2280 CF2PY intent(in) :: name 2281 CF2PY intent(in) :: value 2282 2283 character*512 name 2284 double precision value 2285 2286 %(helreset_def)s 2287 2288 include '../Source/MODEL/input.inc' 2289 include '../Source/MODEL/coupl.inc' 2290 2291 %(helreset_setup)s 2292 2293 SELECT CASE (name) 2294 %(parameter_setup)s 2295 CASE DEFAULT 2296 write(*,*) 'no parameter matching', name, value 2297 END SELECT 2298 2299 return 2300 end 2301 2302 subroutine update_all_coup() 2303 implicit none 2304 call coup() 2305 return 2306 end 2307 2308 2309 subroutine get_pdg_order(PDG, ALLPROC) 2310 IMPLICIT NONE 2311 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2312 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2313 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2314 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2315 DATA PDGS/ %(pdgs)s / 2316 DATA PIDS/ %(pids)s / 2317 PDG = PDGS 2318 ALLPROC = PIDS 2319 RETURN 2320 END 2321 2322 subroutine get_prefix(PREFIX) 2323 IMPLICIT NONE 2324 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2325 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2326 DATA PREF / '%(prefix)s'/ 2327 PREFIX = PREF 2328 RETURN 2329 END 2330 2331 2332 """ 2333 2334 allids = list(self.prefix_info.keys()) 2335 allprefix = [self.prefix_info[key][0] for key in allids] 2336 min_nexternal = min([len(ids[0]) for ids in allids]) 2337 max_nexternal = max([len(ids[0]) for ids in allids]) 2338 2339 info = [] 2340 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2341 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2342 2343 2344 text = [] 2345 for n_ext in range(min_nexternal, max_nexternal+1): 2346 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2347 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2348 if not current_id: 2349 continue 2350 if min_nexternal != max_nexternal: 2351 if n_ext == min_nexternal: 2352 text.append(' if (npdg.eq.%i)then' % n_ext) 2353 else: 2354 text.append(' else if (npdg.eq.%i)then' % n_ext) 2355 for ii,pdgs in enumerate(current_id): 2356 pid = current_pid[ii] 2357 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2358 if ii==0: 2359 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2360 else: 2361 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2362 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2363 text.append(' endif') 2364 #close the function 2365 if min_nexternal != max_nexternal: 2366 text.append('endif') 2367 2368 params = self.get_model_parameter(self.model) 2369 parameter_setup =[] 2370 for key, var in params.items(): 2371 parameter_setup.append(' CASE ("%s")\n %s = value' 2372 % (key, var)) 2373 2374 # part for the resetting of the helicity 2375 helreset_def = [] 2376 helreset_setup = [] 2377 for prefix in set(allprefix): 2378 helreset_setup.append(' %shelreset = .true. ' % prefix) 2379 helreset_def.append(' logical %shelreset \n common /%shelreset/ %shelreset' % (prefix, prefix, prefix)) 2380 2381 2382 formatting = {'python_information':'\n'.join(info), 2383 'smatrixhel': '\n'.join(text), 2384 'maxpart': max_nexternal, 2385 'nb_me': len(allids), 2386 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2387 for i in range(max_nexternal) for (pdg,pid) in allids), 2388 'prefix':'\',\''.join(allprefix), 2389 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2390 'parameter_setup': '\n'.join(parameter_setup), 2391 'helreset_def' : '\n'.join(helreset_def), 2392 'helreset_setup' : '\n'.join(helreset_setup), 2393 } 2394 formatting['lenprefix'] = len(formatting['prefix']) 2395 text = template % formatting 2396 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2397 fsock.writelines(text) 2398 fsock.close()
2399
2400 - def get_model_parameter(self, model):
2401 """ returns all the model parameter 2402 """ 2403 params = {} 2404 for p in model.get('parameters')[('external',)]: 2405 name = p.name 2406 nopref = name[4:] if name.startswith('mdl_') else name 2407 params[nopref] = name 2408 2409 block = p.lhablock 2410 lha = '_'.join([str(i) for i in p.lhacode]) 2411 params['%s_%s' % (block.upper(), lha)] = name 2412 2413 return params
2414 2415 2416 2417 2418
2419 - def write_f2py_check_sa(self, matrix_element, writer):
2420 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2421 # To be implemented. It is just an example file, i.e. not crucial. 2422 return
2423
2424 - def write_f2py_makefile(self):
2425 """ """ 2426 # Add file in SubProcesses 2427 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2428 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2429
2430 - def create_MA5_cards(self,*args,**opts):
2431 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2432 pass
2433
2434 - def compiler_choice(self, compiler):
2435 """ Different daughter classes might want different compilers. 2436 So this function is meant to be overloaded if desired.""" 2437 2438 self.set_compiler(compiler)
2439 2440 #=========================================================================== 2441 # generate_subprocess_directory 2442 #===========================================================================
2443 - def generate_subprocess_directory(self, matrix_element, 2444 fortran_model, number):
2445 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2446 including the necessary matrix.f and nexternal.inc files""" 2447 2448 cwd = os.getcwd() 2449 # Create the directory PN_xx_xxxxx in the specified path 2450 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2451 "P%s" % matrix_element.get('processes')[0].shell_string()) 2452 2453 if self.opt['sa_symmetry']: 2454 # avoid symmetric output 2455 for i,proc in enumerate(matrix_element.get('processes')): 2456 2457 tag = proc.get_tag() 2458 legs = proc.get('legs')[:] 2459 leg0 = proc.get('legs')[0] 2460 leg1 = proc.get('legs')[1] 2461 if not leg1.get('state'): 2462 proc.get('legs')[0] = leg1 2463 proc.get('legs')[1] = leg0 2464 flegs = proc.get('legs')[2:] 2465 for perm in itertools.permutations(flegs): 2466 for i,p in enumerate(perm): 2467 proc.get('legs')[i+2] = p 2468 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2469 "P%s" % proc.shell_string()) 2470 #restore original order 2471 proc.get('legs')[2:] = legs[2:] 2472 if os.path.exists(dirpath2): 2473 proc.get('legs')[:] = legs 2474 return 0 2475 proc.get('legs')[:] = legs 2476 2477 try: 2478 os.mkdir(dirpath) 2479 except os.error as error: 2480 logger.warning(error.strerror + " " + dirpath) 2481 2482 #try: 2483 # os.chdir(dirpath) 2484 #except os.error: 2485 # logger.error('Could not cd to directory %s' % dirpath) 2486 # return 0 2487 2488 logger.info('Creating files in directory %s' % dirpath) 2489 2490 # Extract number of external particles 2491 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2492 2493 # Create the matrix.f file and the nexternal.inc file 2494 if self.opt['export_format']=='standalone_msP': 2495 filename = pjoin(dirpath, 'matrix_prod.f') 2496 else: 2497 filename = pjoin(dirpath, 'matrix.f') 2498 2499 proc_prefix = '' 2500 if 'prefix' in self.cmd_options: 2501 if self.cmd_options['prefix'] == 'int': 2502 proc_prefix = 'M%s_' % number 2503 elif self.cmd_options['prefix'] == 'proc': 2504 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2505 else: 2506 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2507 for proc in matrix_element.get('processes'): 2508 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2509 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2510 2511 calls = self.write_matrix_element_v4( 2512 writers.FortranWriter(filename), 2513 matrix_element, 2514 fortran_model, 2515 proc_prefix=proc_prefix) 2516 2517 if self.opt['export_format'] == 'standalone_msP': 2518 filename = pjoin(dirpath,'configs_production.inc') 2519 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2520 writers.FortranWriter(filename), 2521 matrix_element) 2522 2523 filename = pjoin(dirpath,'props_production.inc') 2524 self.write_props_file(writers.FortranWriter(filename), 2525 matrix_element, 2526 s_and_t_channels) 2527 2528 filename = pjoin(dirpath,'nexternal_prod.inc') 2529 self.write_nexternal_madspin(writers.FortranWriter(filename), 2530 nexternal, ninitial) 2531 2532 if self.opt['export_format']=='standalone_msF': 2533 filename = pjoin(dirpath, 'helamp.inc') 2534 ncomb=matrix_element.get_helicity_combinations() 2535 self.write_helamp_madspin(writers.FortranWriter(filename), 2536 ncomb) 2537 2538 filename = pjoin(dirpath, 'nexternal.inc') 2539 self.write_nexternal_file(writers.FortranWriter(filename), 2540 nexternal, ninitial) 2541 2542 filename = pjoin(dirpath, 'pmass.inc') 2543 self.write_pmass_file(writers.FortranWriter(filename), 2544 matrix_element) 2545 2546 filename = pjoin(dirpath, 'ngraphs.inc') 2547 self.write_ngraphs_file(writers.FortranWriter(filename), 2548 len(matrix_element.get_all_amplitudes())) 2549 2550 # Generate diagrams 2551 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2552 filename = pjoin(dirpath, "matrix.ps") 2553 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2554 get('diagrams'), 2555 filename, 2556 model=matrix_element.get('processes')[0].\ 2557 get('model'), 2558 amplitude=True) 2559 logger.info("Generating Feynman diagrams for " + \ 2560 matrix_element.get('processes')[0].nice_string()) 2561 plot.draw() 2562 2563 linkfiles = ['check_sa.f', 'coupl.inc'] 2564 2565 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2566 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2567 pat = re.compile('smatrix', re.I) 2568 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2569 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2570 f.write(new_text) 2571 linkfiles.pop(0) 2572 2573 for file in linkfiles: 2574 ln('../%s' % file, cwd=dirpath) 2575 ln('../makefileP', name='makefile', cwd=dirpath) 2576 # Return to original PWD 2577 #os.chdir(cwd) 2578 2579 if not calls: 2580 calls = 0 2581 return calls
2582 2583 2584 #=========================================================================== 2585 # write_source_makefile 2586 #===========================================================================
2587 - def write_source_makefile(self, writer):
2588 """Write the nexternal.inc file for MG4""" 2589 2590 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2591 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2592 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2593 2594 replace_dict= {'libraries': set_of_lib, 2595 'model':model_line, 2596 'additional_dsample': '', 2597 'additional_dependencies':''} 2598 2599 text = open(path).read() % replace_dict 2600 2601 if writer: 2602 writer.write(text) 2603 2604 return replace_dict
2605 2606 #=========================================================================== 2607 # write_matrix_element_v4 2608 #===========================================================================
2609 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2610 write=True, proc_prefix=''):
2611 """Export a matrix element to a matrix.f file in MG4 standalone format 2612 if write is on False, just return the replace_dict and not write anything.""" 2613 2614 2615 if not matrix_element.get('processes') or \ 2616 not matrix_element.get('diagrams'): 2617 return 0 2618 2619 if writer: 2620 if not isinstance(writer, writers.FortranWriter): 2621 raise writers.FortranWriter.FortranWriterError(\ 2622 "writer not FortranWriter but %s" % type(writer)) 2623 # Set lowercase/uppercase Fortran code 2624 writers.FortranWriter.downcase = False 2625 2626 2627 if 'sa_symmetry' not in self.opt: 2628 self.opt['sa_symmetry']=False 2629 2630 2631 # The proc_id is for MadEvent grouping which is never used in SA. 2632 replace_dict = {'global_variable':'', 'amp2_lines':'', 2633 'proc_prefix':proc_prefix, 'proc_id':''} 2634 2635 # Extract helas calls 2636 helas_calls = fortran_model.get_matrix_element_calls(\ 2637 matrix_element) 2638 2639 replace_dict['helas_calls'] = "\n".join(helas_calls) 2640 2641 # Extract version number and date from VERSION file 2642 info_lines = self.get_mg5_info_lines() 2643 replace_dict['info_lines'] = info_lines 2644 2645 # Extract process info lines 2646 process_lines = self.get_process_info_lines(matrix_element) 2647 replace_dict['process_lines'] = process_lines 2648 2649 # Extract number of external particles 2650 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2651 replace_dict['nexternal'] = nexternal 2652 replace_dict['nincoming'] = ninitial 2653 2654 # Extract ncomb 2655 ncomb = matrix_element.get_helicity_combinations() 2656 replace_dict['ncomb'] = ncomb 2657 2658 # Extract helicity lines 2659 helicity_lines = self.get_helicity_lines(matrix_element) 2660 replace_dict['helicity_lines'] = helicity_lines 2661 2662 # Extract overall denominator 2663 # Averaging initial state color, spin, and identical FS particles 2664 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2665 2666 # Extract ngraphs 2667 ngraphs = matrix_element.get_number_of_amplitudes() 2668 replace_dict['ngraphs'] = ngraphs 2669 2670 # Extract nwavefuncs 2671 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2672 replace_dict['nwavefuncs'] = nwavefuncs 2673 2674 # Extract ncolor 2675 ncolor = max(1, len(matrix_element.get('color_basis'))) 2676 replace_dict['ncolor'] = ncolor 2677 2678 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2679 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2680 matrix_element.get_beams_hel_avg_factor() 2681 2682 # Extract color data lines 2683 color_data_lines = self.get_color_data_lines(matrix_element) 2684 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2685 2686 if self.opt['export_format']=='standalone_msP': 2687 # For MadSpin need to return the AMP2 2688 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2689 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2690 replace_dict['global_variable'] = \ 2691 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2692 2693 # JAMP definition, depends on the number of independent split orders 2694 split_orders=matrix_element.get('processes')[0].get('split_orders') 2695 2696 if len(split_orders)==0: 2697 replace_dict['nSplitOrders']='' 2698 # Extract JAMP lines 2699 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2700 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2701 # set all amplitude order to weight 1 and only one squared order 2702 # contribution which is of course ALL_ORDERS=2. 2703 squared_orders = [(2,),] 2704 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2705 replace_dict['chosen_so_configs'] = '.TRUE.' 2706 replace_dict['nSqAmpSplitOrders']=1 2707 replace_dict['split_order_str_list']='' 2708 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2709 2710 else: 2711 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2712 replace_dict['nAmpSplitOrders']=len(amp_orders) 2713 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2714 replace_dict['nSplitOrders']=len(split_orders) 2715 replace_dict['split_order_str_list']=str(split_orders) 2716 amp_so = self.get_split_orders_lines( 2717 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2718 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2719 replace_dict['ampsplitorders']='\n'.join(amp_so) 2720 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2721 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2722 matrix_element,amp_orders,split_order_names=split_orders) 2723 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2724 # Now setup the array specifying what squared split order is chosen 2725 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2726 matrix_element.get('processes')[0],squared_orders) 2727 2728 # For convenience we also write the driver check_sa_splitOrders.f 2729 # that explicitely writes out the contribution from each squared order. 2730 # The original driver still works and is compiled with 'make' while 2731 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2732 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2733 self.write_check_sa_splitOrders(squared_orders,split_orders, 2734 nexternal,ninitial,proc_prefix,check_sa_writer) 2735 2736 if write: 2737 writers.FortranWriter('nsqso_born.inc').writelines( 2738 """INTEGER NSQSO_BORN 2739 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2740 2741 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2742 2743 matrix_template = self.matrix_template 2744 if self.opt['export_format']=='standalone_msP' : 2745 matrix_template = 'matrix_standalone_msP_v4.inc' 2746 elif self.opt['export_format']=='standalone_msF': 2747 matrix_template = 'matrix_standalone_msF_v4.inc' 2748 elif self.opt['export_format']=='matchbox': 2749 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2750 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2751 2752 if len(split_orders)>0: 2753 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2754 logger.debug("Warning: The export format %s is not "+\ 2755 " available for individual ME evaluation of given coupl. orders."+\ 2756 " Only the total ME will be computed.", self.opt['export_format']) 2757 elif self.opt['export_format'] in ['madloop_matchbox']: 2758 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2759 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2760 else: 2761 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2762 2763 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2764 replace_dict['template_file2'] = pjoin(_file_path, \ 2765 'iolibs/template_files/split_orders_helping_functions.inc') 2766 if write and writer: 2767 path = replace_dict['template_file'] 2768 content = open(path).read() 2769 content = content % replace_dict 2770 # Write the file 2771 writer.writelines(content) 2772 # Add the helper functions. 2773 if len(split_orders)>0: 2774 content = '\n' + open(replace_dict['template_file2'])\ 2775 .read()%replace_dict 2776 writer.writelines(content) 2777 return len([call for call in helas_calls if call.find('#') != 0]) 2778 else: 2779 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2780 return replace_dict # for subclass update
2781
2782 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2783 nincoming, proc_prefix, writer):
2784 """ Write out a more advanced version of the check_sa drivers that 2785 individually returns the matrix element for each contributing squared 2786 order.""" 2787 2788 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2789 'template_files', 'check_sa_splitOrders.f')).read() 2790 printout_sq_orders=[] 2791 for i, squared_order in enumerate(squared_orders): 2792 sq_orders=[] 2793 for j, sqo in enumerate(squared_order): 2794 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2795 printout_sq_orders.append(\ 2796 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2797 %(i+1,' '.join(sq_orders),i+1)) 2798 printout_sq_orders='\n'.join(printout_sq_orders) 2799 replace_dict = {'printout_sqorders':printout_sq_orders, 2800 'nSplitOrders':len(squared_orders), 2801 'nexternal':nexternal, 2802 'nincoming':nincoming, 2803 'proc_prefix':proc_prefix} 2804 2805 if writer: 2806 writer.writelines(check_sa_content % replace_dict) 2807 else: 2808 return replace_dict
2809
2810 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2811 """class to take care of exporting a set of matrix element for the Matchbox 2812 code in the case of Born only routine""" 2813 2814 default_opt = {'clean': False, 'complex_mass':False, 2815 'export_format':'matchbox', 'mp': False, 2816 'sa_symmetry': True} 2817 2818 #specific template of the born 2819 2820 2821 matrix_template = "matrix_standalone_matchbox.inc" 2822 2823 @staticmethod
2824 - def get_color_string_lines(matrix_element):
2825 """Return the color matrix definition lines for this matrix element. Split 2826 rows in chunks of size n.""" 2827 2828 if not matrix_element.get('color_matrix'): 2829 return "\n".join(["out = 1"]) 2830 2831 #start the real work 2832 color_denominators = matrix_element.get('color_matrix').\ 2833 get_line_denominators() 2834 matrix_strings = [] 2835 my_cs = color.ColorString() 2836 for i_color in range(len(color_denominators)): 2837 # Then write the numerators for the matrix elements 2838 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2839 t_str=repr(my_cs) 2840 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2841 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2842 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2843 all_matches = t_match.findall(t_str) 2844 output = {} 2845 arg=[] 2846 for match in all_matches: 2847 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2848 if ctype in ['ColorOne' ]: 2849 continue 2850 if ctype not in ['T', 'Tr' ]: 2851 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2852 tmparg += ['0'] 2853 arg +=tmparg 2854 for j, v in enumerate(arg): 2855 output[(i_color,j)] = v 2856 2857 for key in output: 2858 if matrix_strings == []: 2859 #first entry 2860 matrix_strings.append(""" 2861 if (in1.eq.%s.and.in2.eq.%s)then 2862 out = %s 2863 """ % (key[0], key[1], output[key])) 2864 else: 2865 #not first entry 2866 matrix_strings.append(""" 2867 elseif (in1.eq.%s.and.in2.eq.%s)then 2868 out = %s 2869 """ % (key[0], key[1], output[key])) 2870 if len(matrix_strings): 2871 matrix_strings.append(" else \n out = - 1 \n endif") 2872 else: 2873 return "\n out = - 1 \n " 2874 return "\n".join(matrix_strings)
2875
2876 - def make(self,*args,**opts):
2877 pass
2878
2879 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2880 JAMP_formatLC=None):
2881 2882 """Adding leading color part of the colorflow""" 2883 2884 if not JAMP_formatLC: 2885 JAMP_formatLC= "LN%s" % JAMP_format 2886 2887 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2888 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2889 col_amps=col_amps.get_color_amplitudes() 2890 elif(isinstance(col_amps,list)): 2891 if(col_amps and isinstance(col_amps[0],list)): 2892 col_amps=col_amps 2893 else: 2894 raise MadGraph5Error(error_msg % 'col_amps') 2895 else: 2896 raise MadGraph5Error(error_msg % 'col_amps') 2897 2898 text, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2899 JAMP_format=JAMP_format, 2900 AMP_format=AMP_format, 2901 split=-1) 2902 2903 2904 # Filter the col_ampls to generate only those without any 1/NC terms 2905 2906 LC_col_amps = [] 2907 for coeff_list in col_amps: 2908 to_add = [] 2909 for (coefficient, amp_number) in coeff_list: 2910 if coefficient[3]==0: 2911 to_add.append( (coefficient, amp_number) ) 2912 LC_col_amps.append(to_add) 2913 2914 text2, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2915 JAMP_format=JAMP_formatLC, 2916 AMP_format=AMP_format, 2917 split=-1) 2918 text += text2 2919 2920 return text, 0
2921
2922 2923 2924 2925 #=============================================================================== 2926 # ProcessExporterFortranMW 2927 #=============================================================================== 2928 -class ProcessExporterFortranMW(ProcessExporterFortran):
2929 """Class to take care of exporting a set of matrix elements to 2930 MadGraph v4 - MadWeight format.""" 2931 2932 matrix_file="matrix_standalone_v4.inc" 2933 jamp_optim = False 2934
2935 - def copy_template(self, model):
2936 """Additional actions needed for setup of Template 2937 """ 2938 2939 super(ProcessExporterFortranMW, self).copy_template(model) 2940 2941 # Add the MW specific file 2942 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2943 pjoin(self.dir_path, 'Source','MadWeight'), True) 2944 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2945 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2946 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2947 pjoin(self.dir_path, 'Source','setrun.f')) 2948 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2949 pjoin(self.dir_path, 'Source','run.inc')) 2950 # File created from Template (Different in some child class) 2951 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2952 self.write_run_config_file(writers.FortranWriter(filename)) 2953 2954 try: 2955 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2956 stdout = os.open(os.devnull, os.O_RDWR), 2957 stderr = os.open(os.devnull, os.O_RDWR), 2958 cwd=self.dir_path) 2959 except OSError: 2960 # Probably madweight already called 2961 pass 2962 2963 # Copy the different python file in the Template 2964 self.copy_python_file() 2965 # create the appropriate cuts.f 2966 self.get_mw_cuts_version() 2967 2968 # add the makefile in Source directory 2969 filename = os.path.join(self.dir_path,'Source','makefile') 2970 self.write_source_makefile(writers.FortranWriter(filename))
2971 2972 2973 2974 2975 #=========================================================================== 2976 # convert_model 2977 #===========================================================================
2978 - def convert_model(self, model, wanted_lorentz = [], 2979 wanted_couplings = []):
2980 2981 super(ProcessExporterFortranMW,self).convert_model(model, 2982 wanted_lorentz, wanted_couplings) 2983 2984 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2985 try: 2986 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2987 except OSError as error: 2988 pass 2989 model_path = model.get('modelpath') 2990 # This is not safe if there is a '##' or '-' in the path. 2991 shutil.copytree(model_path, 2992 pjoin(self.dir_path,'bin','internal','ufomodel'), 2993 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2994 if hasattr(model, 'restrict_card'): 2995 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2996 'restrict_default.dat') 2997 if isinstance(model.restrict_card, check_param_card.ParamCard): 2998 model.restrict_card.write(out_path) 2999 else: 3000 files.cp(model.restrict_card, out_path)
3001 3002 #=========================================================================== 3003 # generate_subprocess_directory 3004 #===========================================================================
3005 - def copy_python_file(self):
3006 """copy the python file require for the Template""" 3007 3008 # madevent interface 3009 cp(_file_path+'/interface/madweight_interface.py', 3010 self.dir_path+'/bin/internal/madweight_interface.py') 3011 cp(_file_path+'/interface/extended_cmd.py', 3012 self.dir_path+'/bin/internal/extended_cmd.py') 3013 cp(_file_path+'/interface/common_run_interface.py', 3014 self.dir_path+'/bin/internal/common_run_interface.py') 3015 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3016 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3017 cp(_file_path+'/iolibs/save_load_object.py', 3018 self.dir_path+'/bin/internal/save_load_object.py') 3019 cp(_file_path+'/madevent/gen_crossxhtml.py', 3020 self.dir_path+'/bin/internal/gen_crossxhtml.py') 3021 cp(_file_path+'/madevent/sum_html.py', 3022 self.dir_path+'/bin/internal/sum_html.py') 3023 cp(_file_path+'/various/FO_analyse_card.py', 3024 self.dir_path+'/bin/internal/FO_analyse_card.py') 3025 cp(_file_path+'/iolibs/file_writers.py', 3026 self.dir_path+'/bin/internal/file_writers.py') 3027 #model file 3028 cp(_file_path+'../models/check_param_card.py', 3029 self.dir_path+'/bin/internal/check_param_card.py') 3030 3031 #madevent file 3032 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3033 cp(_file_path+'/various/lhe_parser.py', 3034 self.dir_path+'/bin/internal/lhe_parser.py') 3035 3036 cp(_file_path+'/various/banner.py', 3037 self.dir_path+'/bin/internal/banner.py') 3038 cp(_file_path+'/various/shower_card.py', 3039 self.dir_path+'/bin/internal/shower_card.py') 3040 cp(_file_path+'/various/cluster.py', 3041 self.dir_path+'/bin/internal/cluster.py') 3042 3043 # logging configuration 3044 cp(_file_path+'/interface/.mg5_logging.conf', 3045 self.dir_path+'/bin/internal/me5_logging.conf') 3046 cp(_file_path+'/interface/coloring_logging.py', 3047 self.dir_path+'/bin/internal/coloring_logging.py')
3048 3049 3050 #=========================================================================== 3051 # Change the version of cuts.f to the one compatible with MW 3052 #===========================================================================
3053 - def get_mw_cuts_version(self, outpath=None):
3054 """create the appropriate cuts.f 3055 This is based on the one associated to ME output but: 3056 1) No clustering (=> remove initcluster/setclscales) 3057 2) Adding the definition of cut_bw at the file. 3058 """ 3059 3060 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 3061 3062 text = StringIO() 3063 #1) remove all dependencies in ickkw >1: 3064 nb_if = 0 3065 for line in template: 3066 if 'if(xqcut.gt.0d0' in line: 3067 nb_if = 1 3068 if nb_if == 0: 3069 text.write(line) 3070 continue 3071 if re.search(r'if\(.*\)\s*then', line): 3072 nb_if += 1 3073 elif 'endif' in line: 3074 nb_if -= 1 3075 3076 #2) add fake cut_bw (have to put the true one later) 3077 text.write(""" 3078 logical function cut_bw(p) 3079 include 'madweight_param.inc' 3080 double precision p(*) 3081 if (bw_cut) then 3082 cut_bw = .true. 3083 else 3084 stop 1 3085 endif 3086 return 3087 end 3088 """) 3089 3090 final = text.getvalue() 3091 #3) remove the call to initcluster: 3092 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 3093 template = template.replace('genps.inc', 'maxparticles.inc') 3094 #Now we can write it 3095 if not outpath: 3096 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 3097 elif isinstance(outpath, str): 3098 fsock = open(outpath, 'w') 3099 else: 3100 fsock = outpath 3101 fsock.write(template)
3102 3103 3104 3105 #=========================================================================== 3106 # Make the Helas and Model directories for Standalone directory 3107 #===========================================================================
3108 - def make(self):
3109 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 3110 everything for running madweight 3111 """ 3112 3113 source_dir = os.path.join(self.dir_path, "Source") 3114 logger.info("Running make for Helas") 3115 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 3116 logger.info("Running make for Model") 3117 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 3118 logger.info("Running make for PDF") 3119 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 3120 logger.info("Running make for CERNLIB") 3121 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 3122 logger.info("Running make for GENERIC") 3123 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 3124 logger.info("Running make for blocks") 3125 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 3126 logger.info("Running make for tools") 3127 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
3128 3129 #=========================================================================== 3130 # Create proc_card_mg5.dat for MadWeight directory 3131 #===========================================================================
3132 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3133 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 3134 3135 compiler = {'fortran': mg5options['fortran_compiler'], 3136 'cpp': mg5options['cpp_compiler'], 3137 'f2py': mg5options['f2py_compiler']} 3138 3139 3140 3141 #proc_charac 3142 self.create_proc_charac() 3143 3144 # Write maxparticles.inc based on max of ME's/subprocess groups 3145 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3146 self.write_maxparticles_file(writers.FortranWriter(filename), 3147 matrix_elements) 3148 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3149 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 3150 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3151 pjoin(self.dir_path, 'Source','MadWeight','tools')) 3152 3153 self.set_compiler(compiler) 3154 self.make() 3155 3156 # Write command history as proc_card_mg5 3157 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 3158 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 3159 history.write(output_file) 3160 3161 ProcessExporterFortran.finalize(self, matrix_elements, 3162 history, mg5options, flaglist)
3163 3164 3165 3166 #=========================================================================== 3167 # create the run_card for MW 3168 #===========================================================================
3169 - def create_run_card(self, matrix_elements, history):
3170 """ """ 3171 3172 run_card = banner_mod.RunCard() 3173 3174 # pass to default for MW 3175 run_card["run_tag"] = "\'not_use\'" 3176 run_card["fixed_ren_scale"] = "T" 3177 run_card["fixed_fac_scale"] = "T" 3178 run_card.remove_all_cut() 3179 3180 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3181 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3182 python_template=True) 3183 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3184 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3185 python_template=True)
3186 3187 #=========================================================================== 3188 # export model files 3189 #===========================================================================
3190 - def export_model_files(self, model_path):
3191 """export the model dependent files for V4 model""" 3192 3193 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3194 # Add the routine update_as_param in v4 model 3195 # This is a function created in the UFO 3196 text=""" 3197 subroutine update_as_param() 3198 call setpara('param_card.dat',.false.) 3199 return 3200 end 3201 """ 3202 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3203 ff.write(text) 3204 ff.close() 3205 3206 # Modify setrun.f 3207 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3208 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3209 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3210 fsock.write(text) 3211 fsock.close() 3212 3213 # Modify initialization.f 3214 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3215 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3216 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3217 fsock.write(text) 3218 fsock.close() 3219 3220 3221 self.make_model_symbolic_link()
3222 3223 #=========================================================================== 3224 # generate_subprocess_directory 3225 #===========================================================================
3226 - def generate_subprocess_directory(self, matrix_element, 3227 fortran_model,number):
3228 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3229 including the necessary matrix.f and nexternal.inc files""" 3230 3231 cwd = os.getcwd() 3232 # Create the directory PN_xx_xxxxx in the specified path 3233 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3234 "P%s" % matrix_element.get('processes')[0].shell_string()) 3235 3236 try: 3237 os.mkdir(dirpath) 3238 except os.error as error: 3239 logger.warning(error.strerror + " " + dirpath) 3240 3241 #try: 3242 # os.chdir(dirpath) 3243 #except os.error: 3244 # logger.error('Could not cd to directory %s' % dirpath) 3245 # return 0 3246 3247 logger.info('Creating files in directory %s' % dirpath) 3248 3249 # Extract number of external particles 3250 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3251 3252 # Create the matrix.f file and the nexternal.inc file 3253 filename = pjoin(dirpath,'matrix.f') 3254 calls,ncolor = self.write_matrix_element_v4( 3255 writers.FortranWriter(filename), 3256 matrix_element, 3257 fortran_model) 3258 3259 filename = pjoin(dirpath, 'auto_dsig.f') 3260 self.write_auto_dsig_file(writers.FortranWriter(filename), 3261 matrix_element) 3262 3263 filename = pjoin(dirpath, 'configs.inc') 3264 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3265 writers.FortranWriter(filename), 3266 matrix_element) 3267 3268 filename = pjoin(dirpath, 'nexternal.inc') 3269 self.write_nexternal_file(writers.FortranWriter(filename), 3270 nexternal, ninitial) 3271 3272 filename = pjoin(dirpath, 'leshouche.inc') 3273 self.write_leshouche_file(writers.FortranWriter(filename), 3274 matrix_element) 3275 3276 filename = pjoin(dirpath, 'props.inc') 3277 self.write_props_file(writers.FortranWriter(filename), 3278 matrix_element, 3279 s_and_t_channels) 3280 3281 filename = pjoin(dirpath, 'pmass.inc') 3282 self.write_pmass_file(writers.FortranWriter(filename), 3283 matrix_element) 3284 3285 filename = pjoin(dirpath, 'ngraphs.inc') 3286 self.write_ngraphs_file(writers.FortranWriter(filename), 3287 len(matrix_element.get_all_amplitudes())) 3288 3289 filename = pjoin(dirpath, 'maxamps.inc') 3290 self.write_maxamps_file(writers.FortranWriter(filename), 3291 len(matrix_element.get('diagrams')), 3292 ncolor, 3293 len(matrix_element.get('processes')), 3294 1) 3295 3296 filename = pjoin(dirpath, 'phasespace.inc') 3297 self.write_phasespace_file(writers.FortranWriter(filename), 3298 len(matrix_element.get('diagrams')), 3299 ) 3300 3301 # Generate diagrams 3302 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3303 filename = pjoin(dirpath, "matrix.ps") 3304 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3305 get('diagrams'), 3306 filename, 3307 model=matrix_element.get('processes')[0].\ 3308 get('model'), 3309 amplitude='') 3310 logger.info("Generating Feynman diagrams for " + \ 3311 matrix_element.get('processes')[0].nice_string()) 3312 plot.draw() 3313 3314 #import genps.inc and maxconfigs.inc into Subprocesses 3315 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3316 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3317 3318 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3319 3320 for file in linkfiles: 3321 ln('../%s' % file, starting_dir=cwd) 3322 3323 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3324 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3325 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3326 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3327 # Return to original PWD 3328 #os.chdir(cwd) 3329 3330 if not calls: 3331 calls = 0 3332 return calls
3333 3334 #=========================================================================== 3335 # write_matrix_element_v4 3336 #===========================================================================
3337 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3338 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3339 3340 if not matrix_element.get('processes') or \ 3341 not matrix_element.get('diagrams'): 3342 return 0 3343 3344 if writer: 3345 if not isinstance(writer, writers.FortranWriter): 3346 raise writers.FortranWriter.FortranWriterError(\ 3347 "writer not FortranWriter") 3348 3349 # Set lowercase/uppercase Fortran code 3350 writers.FortranWriter.downcase = False 3351 3352 replace_dict = {} 3353 3354 # Extract version number and date from VERSION file 3355 info_lines = self.get_mg5_info_lines() 3356 replace_dict['info_lines'] = info_lines 3357 3358 # Extract process info lines 3359 process_lines = self.get_process_info_lines(matrix_element) 3360 replace_dict['process_lines'] = process_lines 3361 3362 # Set proc_id 3363 replace_dict['proc_id'] = proc_id 3364 3365 # Extract number of external particles 3366 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3367 replace_dict['nexternal'] = nexternal 3368 3369 # Extract ncomb 3370 ncomb = matrix_element.get_helicity_combinations() 3371 replace_dict['ncomb'] = ncomb 3372 3373 # Extract helicity lines 3374 helicity_lines = self.get_helicity_lines(matrix_element) 3375 replace_dict['helicity_lines'] = helicity_lines 3376 3377 # Extract overall denominator 3378 # Averaging initial state color, spin, and identical FS particles 3379 den_factor_line = self.get_den_factor_line(matrix_element) 3380 replace_dict['den_factor_line'] = den_factor_line 3381 3382 # Extract ngraphs 3383 ngraphs = matrix_element.get_number_of_amplitudes() 3384 replace_dict['ngraphs'] = ngraphs 3385 3386 # Extract nwavefuncs 3387 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3388 replace_dict['nwavefuncs'] = nwavefuncs 3389 3390 # Extract ncolor 3391 ncolor = max(1, len(matrix_element.get('color_basis'))) 3392 replace_dict['ncolor'] = ncolor 3393 3394 # Extract color data lines 3395 color_data_lines = self.get_color_data_lines(matrix_element) 3396 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3397 3398 # Extract helas calls 3399 helas_calls = fortran_model.get_matrix_element_calls(\ 3400 matrix_element) 3401 3402 replace_dict['helas_calls'] = "\n".join(helas_calls) 3403 3404 # Extract JAMP lines 3405 jamp_lines, nb = self.get_JAMP_lines(matrix_element) 3406 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3407 3408 replace_dict['template_file'] = os.path.join(_file_path, \ 3409 'iolibs/template_files/%s' % self.matrix_file) 3410 replace_dict['template_file2'] = '' 3411 3412 if writer: 3413 file = open(replace_dict['template_file']).read() 3414 file = file % replace_dict 3415 # Write the file 3416 writer.writelines(file) 3417 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3418 else: 3419 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3420 3421 #=========================================================================== 3422 # write_source_makefile 3423 #===========================================================================
3424 - def write_source_makefile(self, writer):
3425 """Write the nexternal.inc file for madweight""" 3426 3427 3428 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3429 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3430 text = open(path).read() % {'libraries': set_of_lib} 3431 writer.write(text) 3432 3433 return True
3434
3435 - def write_phasespace_file(self, writer, nb_diag):
3436 """ """ 3437 3438 template = """ include 'maxparticles.inc' 3439 integer max_branches 3440 parameter (max_branches=max_particles-1) 3441 integer max_configs 3442 parameter (max_configs=%(nb_diag)s) 3443 3444 c channel position 3445 integer config_pos,perm_pos 3446 common /to_config/config_pos,perm_pos 3447 3448 """ 3449 3450 writer.write(template % {'nb_diag': nb_diag})
3451 3452 3453 #=========================================================================== 3454 # write_auto_dsig_file 3455 #===========================================================================
3456 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3457 """Write the auto_dsig.f file for the differential cross section 3458 calculation, includes pdf call information (MadWeight format)""" 3459 3460 if not matrix_element.get('processes') or \ 3461 not matrix_element.get('diagrams'): 3462 return 0 3463 3464 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3465 3466 if ninitial < 1 or ninitial > 2: 3467 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3468 3469 replace_dict = {} 3470 3471 # Extract version number and date from VERSION file 3472 info_lines = self.get_mg5_info_lines() 3473 replace_dict['info_lines'] = info_lines 3474 3475 # Extract process info lines 3476 process_lines = self.get_process_info_lines(matrix_element) 3477 replace_dict['process_lines'] = process_lines 3478 3479 # Set proc_id 3480 replace_dict['proc_id'] = proc_id 3481 replace_dict['numproc'] = 1 3482 3483 # Set dsig_line 3484 if ninitial == 1: 3485 # No conversion, since result of decay should be given in GeV 3486 dsig_line = "pd(0)*dsiguu" 3487 else: 3488 # Convert result (in GeV) to pb 3489 dsig_line = "pd(0)*conv*dsiguu" 3490 3491 replace_dict['dsig_line'] = dsig_line 3492 3493 # Extract pdf lines 3494 pdf_vars, pdf_data, pdf_lines = \ 3495 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3496 replace_dict['pdf_vars'] = pdf_vars 3497 replace_dict['pdf_data'] = pdf_data 3498 replace_dict['pdf_lines'] = pdf_lines 3499 3500 # Lines that differ between subprocess group and regular 3501 if proc_id: 3502 replace_dict['numproc'] = int(proc_id) 3503 replace_dict['passcuts_begin'] = "" 3504 replace_dict['passcuts_end'] = "" 3505 # Set lines for subprocess group version 3506 # Set define_iconfigs_lines 3507 replace_dict['define_subdiag_lines'] = \ 3508 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3509 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3510 else: 3511 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3512 replace_dict['passcuts_end'] = "ENDIF" 3513 replace_dict['define_subdiag_lines'] = "" 3514 3515 if writer: 3516 file = open(os.path.join(_file_path, \ 3517 'iolibs/template_files/auto_dsig_mw.inc')).read() 3518 3519 file = file % replace_dict 3520 # Write the file 3521 writer.writelines(file) 3522 else: 3523 return replace_dict
3524 #=========================================================================== 3525 # write_configs_file 3526 #===========================================================================
3527 - def write_configs_file(self, writer, matrix_element):
3528 """Write the configs.inc file for MadEvent""" 3529 3530 # Extract number of external particles 3531 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3532 3533 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3534 mapconfigs = [c[0] for c in configs] 3535 model = matrix_element.get('processes')[0].get('model') 3536 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3537 [[c[1]] for c in configs], 3538 mapconfigs, 3539 nexternal, ninitial,matrix_element, model)
3540 3541 #=========================================================================== 3542 # write_run_configs_file 3543 #===========================================================================
3544 - def write_run_config_file(self, writer):
3545 """Write the run_configs.inc file for MadWeight""" 3546 3547 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3548 text = open(path).read() % {'chanperjob':'5'} 3549 writer.write(text) 3550 return True
3551 3552 #=========================================================================== 3553 # write_configs_file_from_diagrams 3554 #===========================================================================
3555 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3556 nexternal, ninitial, matrix_element, model):
3557 """Write the actual configs.inc file. 3558 3559 configs is the diagrams corresponding to configs (each 3560 diagrams is a list of corresponding diagrams for all 3561 subprocesses, with None if there is no corresponding diagrams 3562 for a given process). 3563 mapconfigs gives the diagram number for each config. 3564 3565 For s-channels, we need to output one PDG for each subprocess in 3566 the subprocess group, in order to be able to pick the right 3567 one for multiprocesses.""" 3568 3569 lines = [] 3570 3571 particle_dict = matrix_element.get('processes')[0].get('model').\ 3572 get('particle_dict') 3573 3574 s_and_t_channels = [] 3575 3576 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3577 for config in configs if [d for d in config if d][0].\ 3578 get_vertex_leg_numbers()!=[]] 3579 3580 minvert = min(vert_list) if vert_list!=[] else 0 3581 # Number of subprocesses 3582 nsubprocs = len(configs[0]) 3583 3584 nconfigs = 0 3585 3586 new_pdg = model.get_first_non_pdg() 3587 3588 for iconfig, helas_diags in enumerate(configs): 3589 if any([vert > minvert for vert in 3590 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3591 # Only 3-vertices allowed in configs.inc 3592 continue 3593 nconfigs += 1 3594 3595 # Need s- and t-channels for all subprocesses, including 3596 # those that don't contribute to this config 3597 empty_verts = [] 3598 stchannels = [] 3599 for h in helas_diags: 3600 if h: 3601 # get_s_and_t_channels gives vertices starting from 3602 # final state external particles and working inwards 3603 stchannels.append(h.get('amplitudes')[0].\ 3604 get_s_and_t_channels(ninitial,model,new_pdg)) 3605 else: 3606 stchannels.append((empty_verts, None)) 3607 3608 # For t-channels, just need the first non-empty one 3609 tchannels = [t for s,t in stchannels if t != None][0] 3610 3611 # For s_and_t_channels (to be used later) use only first config 3612 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3613 tchannels]) 3614 3615 # Make sure empty_verts is same length as real vertices 3616 if any([s for s,t in stchannels]): 3617 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3618 3619 # Reorganize s-channel vertices to get a list of all 3620 # subprocesses for each vertex 3621 schannels = list(zip(*[s for s,t in stchannels])) 3622 else: 3623 schannels = [] 3624 3625 allchannels = schannels 3626 if len(tchannels) > 1: 3627 # Write out tchannels only if there are any non-trivial ones 3628 allchannels = schannels + tchannels 3629 3630 # Write out propagators for s-channel and t-channel vertices 3631 3632 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3633 # Correspondance between the config and the diagram = amp2 3634 lines.append("* %d %d " % (nconfigs, 3635 mapconfigs[iconfig])) 3636 3637 for verts in allchannels: 3638 if verts in schannels: 3639 vert = [v for v in verts if v][0] 3640 else: 3641 vert = verts 3642 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3643 last_leg = vert.get('legs')[-1] 3644 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3645 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3646 # (last_leg.get('number'), nconfigs, len(daughters), 3647 # ",".join([str(d) for d in daughters]))) 3648 3649 if last_leg.get('id') == 21 and 21 not in particle_dict: 3650 # Fake propagator used in multiparticle vertices 3651 mass = 'zero' 3652 width = 'zero' 3653 pow_part = 0 3654 else: 3655 if (last_leg.get('id')!=7): 3656 particle = particle_dict[last_leg.get('id')] 3657 # Get mass 3658 mass = particle.get('mass') 3659 # Get width 3660 width = particle.get('width') 3661 else : # fake propagator used in multiparticle vertices 3662 mass= 'zero' 3663 width= 'zero' 3664 3665 line=line+" "+mass+" "+width+" " 3666 3667 if verts in schannels: 3668 pdgs = [] 3669 for v in verts: 3670 if v: 3671 pdgs.append(v.get('legs')[-1].get('id')) 3672 else: 3673 pdgs.append(0) 3674 lines.append(line+" S "+str(last_leg.get('id'))) 3675 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3676 # (last_leg.get('number'), nconfigs, nsubprocs, 3677 # ",".join([str(d) for d in pdgs]))) 3678 # lines.append("data tprid(%d,%d)/0/" % \ 3679 # (last_leg.get('number'), nconfigs)) 3680 elif verts in tchannels[:-1]: 3681 lines.append(line+" T "+str(last_leg.get('id'))) 3682 # lines.append("data tprid(%d,%d)/%d/" % \ 3683 # (last_leg.get('number'), nconfigs, 3684 # abs(last_leg.get('id')))) 3685 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3686 # (last_leg.get('number'), nconfigs, nsubprocs, 3687 # ",".join(['0'] * nsubprocs))) 3688 3689 # Write out number of configs 3690 # lines.append("# Number of configs") 3691 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3692 lines.append(" * ") # a line with just a star indicates this is the end of file 3693 # Write the file 3694 writer.writelines(lines) 3695 3696 return s_and_t_channels
3697
3698 3699 3700 #=============================================================================== 3701 # ProcessExporterFortranME 3702 #=============================================================================== 3703 -class ProcessExporterFortranME(ProcessExporterFortran):
3704 """Class to take care of exporting a set of matrix elements to 3705 MadEvent format.""" 3706 3707 matrix_file = "matrix_madevent_v4.inc" 3708 done_warning_tchannel = False 3709 3710 default_opt = {'clean': False, 'complex_mass':False, 3711 'export_format':'madevent', 'mp': False, 3712 'v5_model': True, 3713 'output_options':{}, 3714 'hel_recycling': False 3715 } 3716 jamp_optim = True 3717
3718 - def __init__(self, dir_path = "", opt=None):
3719 3720 super(ProcessExporterFortranME, self).__init__(dir_path, opt) 3721 3722 # check and format the hel_recycling options as it should if provided 3723 if opt and isinstance(opt['output_options'], dict) and \ 3724 'hel_recycling' in opt['output_options']: 3725 self.opt['hel_recycling'] = banner_mod.ConfigFile.format_variable( 3726 opt['output_options']['hel_recycling'], bool, 'hel_recycling') 3727 3728 if opt and isinstance(opt['output_options'], dict) and \ 3729 't_strategy' in opt['output_options']: 3730 self.opt['t_strategy'] = banner_mod.ConfigFile.format_variable( 3731 opt['output_options']['t_strategy'], int, 't_strategy')
3732 3733 # helper function for customise helas writter 3734 @staticmethod
3735 - def custom_helas_call(call, arg):
3736 if arg['mass'] == '%(M)s,%(W)s,': 3737 arg['mass'] = '%(M)s, fk_%(W)s,' 3738 elif '%(W)s' in arg['mass']: 3739 raise Exception 3740 return call, arg
3741
3742 - def copy_template(self, model):
3743 """Additional actions needed for setup of Template 3744 """ 3745 3746 super(ProcessExporterFortranME, self).copy_template(model) 3747 3748 # File created from Template (Different in some child class) 3749 filename = pjoin(self.dir_path,'Source','run_config.inc') 3750 self.write_run_config_file(writers.FortranWriter(filename)) 3751 3752 # The next file are model dependant (due to SLAH convention) 3753 self.model_name = model.get('name') 3754 # Add the symmetry.f 3755 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3756 self.write_symmetry(writers.FortranWriter(filename)) 3757 # 3758 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3759 self.write_addmothers(writers.FortranWriter(filename)) 3760 # Copy the different python file in the Template 3761 self.copy_python_file()
3762 3763 3764 3765 3766 3767 3768 #=========================================================================== 3769 # generate_subprocess_directory 3770 #===========================================================================
3771 - def copy_python_file(self):
3772 """copy the python file require for the Template""" 3773 3774 # madevent interface 3775 cp(_file_path+'/interface/madevent_interface.py', 3776 self.dir_path+'/bin/internal/madevent_interface.py') 3777 cp(_file_path+'/interface/extended_cmd.py', 3778 self.dir_path+'/bin/internal/extended_cmd.py') 3779 cp(_file_path+'/interface/common_run_interface.py', 3780 self.dir_path+'/bin/internal/common_run_interface.py') 3781 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3782 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3783 cp(_file_path+'/iolibs/save_load_object.py', 3784 self.dir_path+'/bin/internal/save_load_object.py') 3785 cp(_file_path+'/iolibs/file_writers.py', 3786 self.dir_path+'/bin/internal/file_writers.py') 3787 #model file 3788 cp(_file_path+'../models/check_param_card.py', 3789 self.dir_path+'/bin/internal/check_param_card.py') 3790 3791 #copy all the file present in madevent directory 3792 for name in os.listdir(pjoin(_file_path, 'madevent')): 3793 if name not in ['__init__.py'] and name.endswith('.py'): 3794 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3795 3796 #madevent file 3797 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3798 cp(_file_path+'/various/lhe_parser.py', 3799 self.dir_path+'/bin/internal/lhe_parser.py') 3800 cp(_file_path+'/various/banner.py', 3801 self.dir_path+'/bin/internal/banner.py') 3802 cp(_file_path+'/various/histograms.py', 3803 self.dir_path+'/bin/internal/histograms.py') 3804 cp(_file_path+'/various/plot_djrs.py', 3805 self.dir_path+'/bin/internal/plot_djrs.py') 3806 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3807 3808 cp(_file_path+'/various/cluster.py', 3809 self.dir_path+'/bin/internal/cluster.py') 3810 cp(_file_path+'/madevent/combine_runs.py', 3811 self.dir_path+'/bin/internal/combine_runs.py') 3812 # logging configuration 3813 cp(_file_path+'/interface/.mg5_logging.conf', 3814 self.dir_path+'/bin/internal/me5_logging.conf') 3815 cp(_file_path+'/interface/coloring_logging.py', 3816 self.dir_path+'/bin/internal/coloring_logging.py') 3817 # shower card and FO_analyse_card. 3818 # Although not needed, it is imported by banner.py 3819 cp(_file_path+'/various/shower_card.py', 3820 self.dir_path+'/bin/internal/shower_card.py') 3821 cp(_file_path+'/various/FO_analyse_card.py', 3822 self.dir_path+'/bin/internal/FO_analyse_card.py')
3823 3824
3825 - def convert_model(self, model, wanted_lorentz = [], 3826 wanted_couplings = []):
3827 3828 super(ProcessExporterFortranME,self).convert_model(model, 3829 wanted_lorentz, wanted_couplings) 3830 3831 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3832 try: 3833 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3834 except OSError as error: 3835 pass 3836 model_path = model.get('modelpath') 3837 # This is not safe if there is a '##' or '-' in the path. 3838 shutil.copytree(model_path, 3839 pjoin(self.dir_path,'bin','internal','ufomodel'), 3840 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3841 if hasattr(model, 'restrict_card'): 3842 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3843 'restrict_default.dat') 3844 if isinstance(model.restrict_card, check_param_card.ParamCard): 3845 model.restrict_card.write(out_path) 3846 else: 3847 files.cp(model.restrict_card, out_path)
3848 3849 #=========================================================================== 3850 # export model files 3851 #===========================================================================
3852 - def export_model_files(self, model_path):
3853 """export the model dependent files""" 3854 3855 super(ProcessExporterFortranME,self).export_model_files(model_path) 3856 3857 # Add the routine update_as_param in v4 model 3858 # This is a function created in the UFO 3859 text=""" 3860 subroutine update_as_param() 3861 call setpara('param_card.dat',.false.) 3862 return 3863 end 3864 """ 3865 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3866 ff.write(text) 3867 ff.close() 3868 3869 # Add the symmetry.f 3870 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3871 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3872 3873 # Modify setrun.f 3874 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3875 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3876 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3877 fsock.write(text) 3878 fsock.close() 3879 3880 self.make_model_symbolic_link()
3881 3882 #=========================================================================== 3883 # generate_subprocess_directory 3884 #===========================================================================
3885 - def generate_subprocess_directory(self, matrix_element, 3886 fortran_model, 3887 me_number):
3888 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3889 including the necessary matrix.f and various helper files""" 3890 3891 cwd = os.getcwd() 3892 path = pjoin(self.dir_path, 'SubProcesses') 3893 3894 3895 if not self.model: 3896 self.model = matrix_element.get('processes')[0].get('model') 3897 3898 #os.chdir(path) 3899 # Create the directory PN_xx_xxxxx in the specified path 3900 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3901 try: 3902 os.mkdir(pjoin(path,subprocdir)) 3903 except os.error as error: 3904 logger.warning(error.strerror + " " + subprocdir) 3905 3906 #try: 3907 # os.chdir(subprocdir) 3908 #except os.error: 3909 # logger.error('Could not cd to directory %s' % subprocdir) 3910 # return 0 3911 3912 logger.info('Creating files in directory %s' % subprocdir) 3913 Ppath = pjoin(path, subprocdir) 3914 3915 # Extract number of external particles 3916 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3917 3918 # Add the driver.f 3919 ncomb = matrix_element.get_helicity_combinations() 3920 filename = pjoin(Ppath,'driver.f') 3921 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3922 v5=self.opt['v5_model']) 3923 3924 3925 # Create the matrix.f file, auto_dsig.f file and all inc files 3926 if self.opt['hel_recycling']: 3927 filename = pjoin(Ppath, 'matrix_orig.f') 3928 else: 3929 filename = pjoin(Ppath, 'matrix.f') 3930 calls, ncolor = \ 3931 self.write_matrix_element_v4(writers.FortranWriter(filename), 3932 matrix_element, fortran_model, subproc_number = me_number) 3933 3934 filename = pjoin(Ppath, 'auto_dsig.f') 3935 self.write_auto_dsig_file(writers.FortranWriter(filename), 3936 matrix_element) 3937 3938 filename = pjoin(Ppath, 'configs.inc') 3939 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3940 writers.FortranWriter(filename), 3941 matrix_element) 3942 3943 filename = pjoin(Ppath, 'config_nqcd.inc') 3944 self.write_config_nqcd_file(writers.FortranWriter(filename), 3945 nqcd_list) 3946 3947 filename = pjoin(Ppath, 'config_subproc_map.inc') 3948 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3949 s_and_t_channels) 3950 3951 filename = pjoin(Ppath, 'coloramps.inc') 3952 self.write_coloramps_file(writers.FortranWriter(filename), 3953 mapconfigs, 3954 matrix_element) 3955 3956 filename = pjoin(Ppath, 'get_color.f') 3957 self.write_colors_file(writers.FortranWriter(filename), 3958 matrix_element) 3959 3960 filename = pjoin(Ppath, 'decayBW.inc') 3961 self.write_decayBW_file(writers.FortranWriter(filename), 3962 s_and_t_channels) 3963 3964 filename = pjoin(Ppath, 'dname.mg') 3965 self.write_dname_file(writers.FileWriter(filename), 3966 "P"+matrix_element.get('processes')[0].shell_string()) 3967 3968 filename = pjoin(Ppath, 'iproc.dat') 3969 self.write_iproc_file(writers.FortranWriter(filename), 3970 me_number) 3971 3972 filename = pjoin(Ppath, 'leshouche.inc') 3973 self.write_leshouche_file(writers.FortranWriter(filename), 3974 matrix_element) 3975 3976 filename = pjoin(Ppath, 'maxamps.inc') 3977 self.write_maxamps_file(writers.FortranWriter(filename), 3978 len(matrix_element.get('diagrams')), 3979 ncolor, 3980 len(matrix_element.get('processes')), 3981 1) 3982 3983 filename = pjoin(Ppath, 'mg.sym') 3984 self.write_mg_sym_file(writers.FortranWriter(filename), 3985 matrix_element) 3986 3987 filename = pjoin(Ppath, 'ncombs.inc') 3988 self.write_ncombs_file(writers.FortranWriter(filename), 3989 nexternal) 3990 3991 filename = pjoin(Ppath, 'nexternal.inc') 3992 self.write_nexternal_file(writers.FortranWriter(filename), 3993 nexternal, ninitial) 3994 3995 filename = pjoin(Ppath, 'ngraphs.inc') 3996 self.write_ngraphs_file(writers.FortranWriter(filename), 3997 len(mapconfigs)) 3998 3999 4000 filename = pjoin(Ppath, 'pmass.inc') 4001 self.write_pmass_file(writers.FortranWriter(filename), 4002 matrix_element) 4003 4004 filename = pjoin(Ppath, 'props.inc') 4005 self.write_props_file(writers.FortranWriter(filename), 4006 matrix_element, 4007 s_and_t_channels) 4008 4009 # Find config symmetries and permutations 4010 symmetry, perms, ident_perms = \ 4011 diagram_symmetry.find_symmetry(matrix_element) 4012 4013 filename = pjoin(Ppath, 'symswap.inc') 4014 self.write_symswap_file(writers.FortranWriter(filename), 4015 ident_perms) 4016 4017 filename = pjoin(Ppath, 'symfact_orig.dat') 4018 self.write_symfact_file(open(filename, 'w'), symmetry) 4019 4020 # Generate diagrams 4021 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 4022 filename = pjoin(Ppath, "matrix.ps") 4023 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4024 get('diagrams'), 4025 filename, 4026 model=matrix_element.get('processes')[0].\ 4027 get('model'), 4028 amplitude=True) 4029 logger.info("Generating Feynman diagrams for " + \ 4030 matrix_element.get('processes')[0].nice_string()) 4031 plot.draw() 4032 4033 self.link_files_in_SubProcess(Ppath) 4034 4035 #import nexternal/leshouche in Source 4036 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 4037 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 4038 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 4039 # Return to SubProcesses dir 4040 #os.chdir(os.path.pardir) 4041 4042 # Add subprocess to subproc.mg 4043 filename = pjoin(path, 'subproc.mg') 4044 files.append_to_file(filename, 4045 self.write_subproc, 4046 subprocdir) 4047 4048 # Return to original dir 4049 #os.chdir(cwd) 4050 4051 # Generate info page 4052 gen_infohtml.make_info_html(self.dir_path) 4053 4054 4055 if not calls: 4056 calls = 0 4057 return calls
4058 4059 link_Sub_files = ['addmothers.f', 4060 'cluster.f', 4061 'cluster.inc', 4062 'coupl.inc', 4063 'cuts.f', 4064 'cuts.inc', 4065 'genps.f', 4066 'genps.inc', 4067 'idenparts.f', 4068 'initcluster.f', 4069 'makefile', 4070 'message.inc', 4071 'myamp.f', 4072 'reweight.f', 4073 'run.inc', 4074 'maxconfigs.inc', 4075 'maxparticles.inc', 4076 'run_config.inc', 4077 'lhe_event_infos.inc', 4078 'setcuts.f', 4079 'setscales.f', 4080 'sudakov.inc', 4081 'symmetry.f', 4082 'unwgt.f', 4083 'dummy_fct.f' 4084 ] 4085 4099 4100
4101 - def finalize(self, matrix_elements, history, mg5options, flaglist):
4102 """Finalize ME v4 directory by creating jpeg diagrams, html 4103 pages,proc_card_mg5.dat and madevent.tar.gz.""" 4104 4105 if 'nojpeg' in flaglist: 4106 makejpg = False 4107 else: 4108 makejpg = True 4109 if 'online' in flaglist: 4110 online = True 4111 else: 4112 online = False 4113 4114 compiler = {'fortran': mg5options['fortran_compiler'], 4115 'cpp': mg5options['cpp_compiler'], 4116 'f2py': mg5options['f2py_compiler']} 4117 4118 # indicate that the output type is not grouped 4119 if not isinstance(self, ProcessExporterFortranMEGroup): 4120 self.proc_characteristic['grouped_matrix'] = False 4121 4122 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 4123 4124 # set limitation linked to the model 4125 4126 4127 # indicate the PDG of all initial particle 4128 try: 4129 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4130 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4131 except AttributeError: 4132 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4133 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4134 self.proc_characteristic['pdg_initial1'] = pdgs1 4135 self.proc_characteristic['pdg_initial2'] = pdgs2 4136 4137 4138 modelname = self.opt['model'] 4139 if modelname == 'mssm' or modelname.startswith('mssm-'): 4140 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 4141 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 4142 check_param_card.convert_to_mg5card(param_card, mg5_param) 4143 check_param_card.check_valid_param_card(mg5_param) 4144 4145 # Add the combine_events.f modify param_card path/number of @X 4146 filename = pjoin(self.dir_path,'Source','combine_events.f') 4147 try: 4148 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 4149 except AttributeError: 4150 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 4151 nb_proc = len(set(nb_proc)) 4152 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 4153 # Write maxconfigs.inc based on max of ME's/subprocess groups 4154 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 4155 self.write_maxconfigs_file(writers.FortranWriter(filename), 4156 matrix_elements) 4157 4158 # Write maxparticles.inc based on max of ME's/subprocess groups 4159 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 4160 self.write_maxparticles_file(writers.FortranWriter(filename), 4161 matrix_elements) 4162 4163 # Touch "done" file 4164 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 4165 4166 # Check for compiler 4167 self.set_compiler(compiler) 4168 self.set_cpp_compiler(compiler['cpp']) 4169 4170 4171 old_pos = os.getcwd() 4172 subpath = pjoin(self.dir_path, 'SubProcesses') 4173 4174 P_dir_list = [proc for proc in os.listdir(subpath) 4175 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 4176 4177 devnull = os.open(os.devnull, os.O_RDWR) 4178 # Convert the poscript in jpg files (if authorize) 4179 if makejpg: 4180 try: 4181 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 4182 except Exception as error: 4183 pass 4184 4185 if misc.which('gs'): 4186 logger.info("Generate jpeg diagrams") 4187 for Pdir in P_dir_list: 4188 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 4189 stdout = devnull, cwd=pjoin(subpath, Pdir)) 4190 4191 logger.info("Generate web pages") 4192 # Create the WebPage using perl script 4193 4194 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 4195 stdout = devnull,cwd=pjoin(self.dir_path)) 4196 4197 #os.chdir(os.path.pardir) 4198 4199 obj = gen_infohtml.make_info_html(self.dir_path) 4200 4201 if online: 4202 nb_channel = obj.rep_rule['nb_gen_diag'] 4203 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4204 #add the information to proc_charac 4205 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4206 4207 # Write command history as proc_card_mg5 4208 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4209 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4210 history.write(output_file) 4211 4212 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4213 stdout = devnull) 4214 4215 #crate the proc_characteristic file 4216 self.create_proc_charac(matrix_elements, history) 4217 4218 # create the run_card 4219 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4220 4221 # Run "make" to generate madevent.tar.gz file 4222 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4223 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4224 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4225 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4226 stdout = devnull, cwd=self.dir_path) 4227 4228 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4229 stdout = devnull, cwd=self.dir_path)
4230 4231 4232 4233 4234 4235 4236 #return to the initial dir 4237 #os.chdir(old_pos) 4238 4239 #=========================================================================== 4240 # write_matrix_element_v4 4241 #===========================================================================
4242 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4243 proc_id = "", config_map = [], subproc_number = ""):
4244 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4245 4246 if not matrix_element.get('processes') or \ 4247 not matrix_element.get('diagrams'): 4248 return 0 4249 4250 if writer: 4251 if not isinstance(writer, writers.FortranWriter): 4252 raise writers.FortranWriter.FortranWriterError(\ 4253 "writer not FortranWriter") 4254 # Set lowercase/uppercase Fortran code 4255 writers.FortranWriter.downcase = False 4256 4257 # check if MLM/.../ is supported for this matrix-element and update associate flag 4258 if self.model and 'MLM' in self.model["limitations"]: 4259 if 'MLM' not in self.proc_characteristic["limitations"]: 4260 used_couplings = matrix_element.get_used_couplings(output="set") 4261 for vertex in self.model.get('interactions'): 4262 particles = [p for p in vertex.get('particles')] 4263 if 21 in [p.get('pdg_code') for p in particles]: 4264 colors = [par.get('color') for par in particles] 4265 if 1 in colors: 4266 continue 4267 elif 'QCD' not in vertex.get('orders'): 4268 for bad_coup in vertex.get('couplings').values(): 4269 if bad_coup in used_couplings: 4270 self.proc_characteristic["limitations"].append('MLM') 4271 break 4272 4273 # The proc prefix is not used for MadEvent output so it can safely be set 4274 # to an empty string. 4275 replace_dict = {'proc_prefix':''} 4276 4277 4278 # Extract helas calls 4279 helas_calls = fortran_model.get_matrix_element_calls(\ 4280 matrix_element) 4281 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4282 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4283 ProcessExporterFortranME.done_warning_tchannel = True 4284 4285 replace_dict['helas_calls'] = "\n".join(helas_calls) 4286 4287 4288 #adding the support for the fake width (forbidding too small width) 4289 mass_width = matrix_element.get_all_mass_widths() 4290 mass_width = sorted(list(mass_width)) 4291 width_list = set([e[1] for e in mass_width]) 4292 4293 replace_dict['fake_width_declaration'] = \ 4294 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4295 replace_dict['fake_width_declaration'] += \ 4296 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4297 fk_w_defs = [] 4298 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4299 for m, w in mass_width: 4300 if w == 'zero': 4301 if ' fk_zero = 0d0' not in fk_w_defs: 4302 fk_w_defs.append(' fk_zero = 0d0') 4303 continue 4304 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4305 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4306 4307 # Extract version number and date from VERSION file 4308 info_lines = self.get_mg5_info_lines() 4309 replace_dict['info_lines'] = info_lines 4310 4311 # Extract process info lines 4312 process_lines = self.get_process_info_lines(matrix_element) 4313 replace_dict['process_lines'] = process_lines 4314 4315 # Set proc_id 4316 replace_dict['proc_id'] = proc_id 4317 4318 # Extract ncomb 4319 ncomb = matrix_element.get_helicity_combinations() 4320 replace_dict['ncomb'] = ncomb 4321 4322 # Extract helicity lines 4323 helicity_lines = self.get_helicity_lines(matrix_element) 4324 replace_dict['helicity_lines'] = helicity_lines 4325 4326 # Extract IC line 4327 ic_line = self.get_ic_line(matrix_element) 4328 replace_dict['ic_line'] = ic_line 4329 4330 # Extract overall denominator 4331 # Averaging initial state color, spin, and identical FS particles 4332 den_factor_line = self.get_den_factor_line(matrix_element) 4333 replace_dict['den_factor_line'] = den_factor_line 4334 4335 # Extract ngraphs 4336 ngraphs = matrix_element.get_number_of_amplitudes() 4337 replace_dict['ngraphs'] = ngraphs 4338 4339 # Extract ndiags 4340 ndiags = len(matrix_element.get('diagrams')) 4341 replace_dict['ndiags'] = ndiags 4342 4343 # Set define_iconfigs_lines 4344 replace_dict['define_iconfigs_lines'] = \ 4345 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4346 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4347 4348 if proc_id: 4349 # Set lines for subprocess group version 4350 # Set define_iconfigs_lines 4351 replace_dict['define_iconfigs_lines'] += \ 4352 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4353 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4354 # Set set_amp2_line 4355 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4356 proc_id 4357 else: 4358 # Standard running 4359 # Set set_amp2_line 4360 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4361 4362 # Extract nwavefuncs 4363 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4364 replace_dict['nwavefuncs'] = nwavefuncs 4365 4366 # Extract ncolor 4367 ncolor = max(1, len(matrix_element.get('color_basis'))) 4368 replace_dict['ncolor'] = ncolor 4369 4370 # Extract color data lines 4371 color_data_lines = self.get_color_data_lines(matrix_element) 4372 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4373 4374 4375 # Set the size of Wavefunction 4376 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4377 replace_dict['wavefunctionsize'] = 18 4378 else: 4379 replace_dict['wavefunctionsize'] = 6 4380 4381 # Extract amp2 lines 4382 amp2_lines = self.get_amp2_lines(matrix_element, config_map, replace_dict) 4383 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4384 4385 # The JAMP definition depends on the splitting order 4386 split_orders=matrix_element.get('processes')[0].get('split_orders') 4387 if len(split_orders)>0: 4388 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4389 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4390 matrix_element.get('processes')[0],squared_orders) 4391 replace_dict['select_configs_if'] = ' IF (CHOSEN_SO_CONFIGS(SQSOINDEX%(proc_id)s(M,N))) THEN' % replace_dict 4392 replace_dict['select_configs_endif'] = ' endif' 4393 else: 4394 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4395 # set all amplitude order to weight 1 and only one squared order 4396 # contribution which is of course ALL_ORDERS=2. 4397 squared_orders = [(2,),] 4398 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4399 replace_dict['chosen_so_configs'] = '.TRUE.' 4400 # addtionally set the function to NOT be called 4401 replace_dict['select_configs_if'] = '' 4402 replace_dict['select_configs_endif'] = '' 4403 4404 replace_dict['nAmpSplitOrders']=len(amp_orders) 4405 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4406 replace_dict['split_order_str_list']=str(split_orders) 4407 replace_dict['nSplitOrders']=max(len(split_orders),1) 4408 amp_so = self.get_split_orders_lines( 4409 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4410 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4411 replace_dict['ampsplitorders']='\n'.join(amp_so) 4412 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4413 4414 4415 # Extract JAMP lines 4416 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4417 jamp_lines, nb_temp = self.get_JAMP_lines_split_order(\ 4418 matrix_element,amp_orders,split_order_names= 4419 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4420 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4421 replace_dict['nb_temp_jamp'] = nb_temp 4422 4423 replace_dict['template_file'] = pjoin(_file_path, \ 4424 'iolibs/template_files/%s' % self.matrix_file) 4425 replace_dict['template_file2'] = pjoin(_file_path, \ 4426 'iolibs/template_files/split_orders_helping_functions.inc') 4427 4428 s1,s2 = matrix_element.get_spin_state_initial() 4429 replace_dict['nb_spin_state1'] = s1 4430 replace_dict['nb_spin_state2'] = s2 4431 4432 if writer: 4433 file = open(replace_dict['template_file']).read() 4434 file = file % replace_dict 4435 # Add the split orders helper functions. 4436 file = file + '\n' + open(replace_dict['template_file2'])\ 4437 .read()%replace_dict 4438 # Write the file 4439 writer.writelines(file) 4440 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4441 else: 4442 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4443 return replace_dict
4444 4445 #=========================================================================== 4446 # write_auto_dsig_file 4447 #===========================================================================
4448 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4449 """Write the auto_dsig.f file for the differential cross section 4450 calculation, includes pdf call information""" 4451 4452 if not matrix_element.get('processes') or \ 4453 not matrix_element.get('diagrams'): 4454 return 0 4455 4456 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4457 self.proc_characteristic['ninitial'] = ninitial 4458 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4459 4460 # Add information relevant for MLM matching: 4461 # Maximum QCD power in all the contributions 4462 max_qcd_order = 0 4463 for diag in matrix_element.get('diagrams'): 4464 orders = diag.calculate_orders() 4465 if 'QCD' in orders: 4466 max_qcd_order = max(max_qcd_order,orders['QCD']) 4467 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4468 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4469 proc.get('model').get_particle(id).get('color')>1]) 4470 for proc in matrix_element.get('processes')) 4471 # Maximum number of final state light jets to be matched 4472 self.proc_characteristic['max_n_matched_jets'] = max( 4473 self.proc_characteristic['max_n_matched_jets'], 4474 min(max_qcd_order,max_n_light_final_partons)) 4475 4476 # List of default pdgs to be considered for the CKKWl merging cut 4477 self.proc_characteristic['colored_pdgs'] = \ 4478 sorted(list(set([abs(p.get('pdg_code')) for p in 4479 matrix_element.get('processes')[0].get('model').get('particles') if 4480 p.get('color')>1]))) 4481 4482 if ninitial < 1 or ninitial > 2: 4483 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4484 4485 replace_dict = {} 4486 4487 # Extract version number and date from VERSION file 4488 info_lines = self.get_mg5_info_lines() 4489 replace_dict['info_lines'] = info_lines 4490 4491 # Extract process info lines 4492 process_lines = self.get_process_info_lines(matrix_element) 4493 replace_dict['process_lines'] = process_lines 4494 4495 # Set proc_id 4496 replace_dict['proc_id'] = proc_id 4497 replace_dict['numproc'] = 1 4498 4499 # Set dsig_line 4500 if ninitial == 1: 4501 # No conversion, since result of decay should be given in GeV 4502 dsig_line = "pd(0)*dsiguu" 4503 else: 4504 # Convert result (in GeV) to pb 4505 dsig_line = "pd(0)*conv*dsiguu" 4506 4507 replace_dict['dsig_line'] = dsig_line 4508 4509 # Extract pdf lines 4510 pdf_vars, pdf_data, pdf_lines = \ 4511 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4512 replace_dict['pdf_vars'] = pdf_vars 4513 replace_dict['pdf_data'] = pdf_data 4514 replace_dict['pdf_lines'] = pdf_lines 4515 4516 # Lines that differ between subprocess group and regular 4517 if proc_id: 4518 replace_dict['numproc'] = int(proc_id) 4519 replace_dict['passcuts_begin'] = "" 4520 replace_dict['passcuts_end'] = "" 4521 # Set lines for subprocess group version 4522 # Set define_iconfigs_lines 4523 replace_dict['define_subdiag_lines'] = \ 4524 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4525 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4526 replace_dict['cutsdone'] = "" 4527 else: 4528 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4529 replace_dict['passcuts_end'] = "ENDIF" 4530 replace_dict['define_subdiag_lines'] = "" 4531 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4532 4533 if not isinstance(self, ProcessExporterFortranMEGroup): 4534 ncomb=matrix_element.get_helicity_combinations() 4535 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4536 else: 4537 replace_dict['read_write_good_hel'] = "" 4538 4539 context = {'read_write_good_hel':True} 4540 4541 if writer: 4542 file = open(pjoin(_file_path, \ 4543 'iolibs/template_files/auto_dsig_v4.inc')).read() 4544 file = file % replace_dict 4545 4546 # Write the file 4547 writer.writelines(file, context=context) 4548 else: 4549 return replace_dict, context
4550 #=========================================================================== 4551 # write_coloramps_file 4552 #===========================================================================
4553 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4554 """Write the coloramps.inc file for MadEvent""" 4555 4556 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4557 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4558 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4559 len(mapconfigs))) 4560 4561 4562 # Write the file 4563 writer.writelines(lines) 4564 4565 return True
4566 4567 #=========================================================================== 4568 # write_colors_file 4569 #===========================================================================
4570 - def write_colors_file(self, writer, matrix_elements):
4571 """Write the get_color.f file for MadEvent, which returns color 4572 for all particles used in the matrix element.""" 4573 4574 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4575 matrix_elements = [matrix_elements] 4576 4577 model = matrix_elements[0].get('processes')[0].get('model') 4578 4579 # We need the both particle and antiparticle wf_ids, since the identity 4580 # depends on the direction of the wf. 4581 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4582 for wf in d.get('wavefunctions')],[]) \ 4583 for d in me.get('diagrams')], []) \ 4584 for me in matrix_elements], [])) 4585 4586 leg_ids = set(sum([sum([sum([[l.get('id'), 4587 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4588 for l in p.get_legs_with_decays()], []) \ 4589 for p in me.get('processes')], []) \ 4590 for me in matrix_elements], [])) 4591 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4592 4593 lines = """function get_color(ipdg) 4594 implicit none 4595 integer get_color, ipdg 4596 4597 if(ipdg.eq.%d)then 4598 get_color=%d 4599 return 4600 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4601 4602 for part_id in particle_ids[1:]: 4603 lines += """else if(ipdg.eq.%d)then 4604 get_color=%d 4605 return 4606 """ % (part_id, model.get_particle(part_id).get_color()) 4607 # Dummy particle for multiparticle vertices with pdg given by 4608 # first code not in the model 4609 lines += """else if(ipdg.eq.%d)then 4610 c This is dummy particle used in multiparticle vertices 4611 get_color=2 4612 return 4613 """ % model.get_first_non_pdg() 4614 lines += """else 4615 write(*,*)'Error: No color given for pdg ',ipdg 4616 get_color=0 4617 return 4618 endif 4619 end 4620 """ 4621 4622 # Write the file 4623 writer.writelines(lines) 4624 4625 return True
4626 4627 #=========================================================================== 4628 # write_config_nqcd_file 4629 #===========================================================================
4630 - def write_config_nqcd_file(self, writer, nqcd_list):
4631 """Write the config_nqcd.inc with the number of QCD couplings 4632 for each config""" 4633 4634 lines = [] 4635 for iconf, n in enumerate(nqcd_list): 4636 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4637 4638 # Write the file 4639 writer.writelines(lines) 4640 4641 return True
4642 4643 #=========================================================================== 4644 # write_maxconfigs_file 4645 #===========================================================================
4646 - def write_maxconfigs_file(self, writer, matrix_elements):
4647 """Write the maxconfigs.inc file for MadEvent""" 4648 4649 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4650 maxconfigs = max([me.get_num_configs() for me in \ 4651 matrix_elements.get('matrix_elements')]) 4652 else: 4653 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4654 4655 lines = "integer lmaxconfigs\n" 4656 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4657 4658 # Write the file 4659 writer.writelines(lines) 4660 4661 return True
4662 4663 #=========================================================================== 4664 # read_write_good_hel 4665 #===========================================================================
4666 - def read_write_good_hel(self, ncomb):
4667 """return the code to read/write the good_hel common_block""" 4668 4669 convert = {'ncomb' : ncomb} 4670 output = """ 4671 subroutine write_good_hel(stream_id) 4672 implicit none 4673 integer stream_id 4674 INTEGER NCOMB 4675 PARAMETER ( NCOMB=%(ncomb)d) 4676 LOGICAL GOODHEL(NCOMB) 4677 INTEGER NTRY 4678 common/BLOCK_GOODHEL/NTRY,GOODHEL 4679 write(stream_id,*) GOODHEL 4680 return 4681 end 4682 4683 4684 subroutine read_good_hel(stream_id) 4685 implicit none 4686 include 'genps.inc' 4687 integer stream_id 4688 INTEGER NCOMB 4689 PARAMETER ( NCOMB=%(ncomb)d) 4690 LOGICAL GOODHEL(NCOMB) 4691 INTEGER NTRY 4692 common/BLOCK_GOODHEL/NTRY,GOODHEL 4693 read(stream_id,*) GOODHEL 4694 NTRY = MAXTRIES + 1 4695 return 4696 end 4697 4698 subroutine init_good_hel() 4699 implicit none 4700 INTEGER NCOMB 4701 PARAMETER ( NCOMB=%(ncomb)d) 4702 LOGICAL GOODHEL(NCOMB) 4703 INTEGER NTRY 4704 INTEGER I 4705 4706 do i=1,NCOMB 4707 GOODHEL(I) = .false. 4708 enddo 4709 NTRY = 0 4710 end 4711 4712 integer function get_maxsproc() 4713 implicit none 4714 get_maxsproc = 1 4715 return 4716 end 4717 4718 """ % convert 4719 4720 return output
4721 4722 #=========================================================================== 4723 # write_config_subproc_map_file 4724 #===========================================================================
4725 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4726 """Write a dummy config_subproc.inc file for MadEvent""" 4727 4728 lines = [] 4729 4730 for iconfig in range(len(s_and_t_channels)): 4731 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4732 (iconfig + 1)) 4733 4734 # Write the file 4735 writer.writelines(lines) 4736 4737 return True
4738 4739 #=========================================================================== 4740 # write_configs_file 4741 #===========================================================================
4742 - def write_configs_file(self, writer, matrix_element):
4743 """Write the configs.inc file for MadEvent""" 4744 4745 # Extract number of external particles 4746 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4747 4748 model = matrix_element.get('processes')[0].get('model') 4749 configs = [(i+1, d) for (i, d) in \ 4750 enumerate(matrix_element.get('diagrams'))] 4751 mapconfigs = [c[0] for c in configs] 4752 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4753 [[c[1]] for c in configs], 4754 mapconfigs, 4755 nexternal, ninitial, 4756 model)
4757 4758 #=========================================================================== 4759 # write_run_configs_file 4760 #===========================================================================
4761 - def write_run_config_file(self, writer):
4762 """Write the run_configs.inc file for MadEvent""" 4763 4764 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4765 4766 if self.proc_characteristic['loop_induced']: 4767 job_per_chan = 1 4768 else: 4769 job_per_chan = 5 4770 4771 if writer: 4772 text = open(path).read() % {'chanperjob': job_per_chan} 4773 writer.write(text) 4774 return True 4775 else: 4776 return {'chanperjob': job_per_chan}
4777 4778 #=========================================================================== 4779 # write_configs_file_from_diagrams 4780 #===========================================================================
4781 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4782 nexternal, ninitial, model):
4783 """Write the actual configs.inc file. 4784 4785 configs is the diagrams corresponding to configs (each 4786 diagrams is a list of corresponding diagrams for all 4787 subprocesses, with None if there is no corresponding diagrams 4788 for a given process). 4789 mapconfigs gives the diagram number for each config. 4790 4791 For s-channels, we need to output one PDG for each subprocess in 4792 the subprocess group, in order to be able to pick the right 4793 one for multiprocesses.""" 4794 4795 lines = [] 4796 4797 s_and_t_channels = [] 4798 4799 nqcd_list = [] 4800 4801 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4802 for config in configs if [d for d in config if d][0].\ 4803 get_vertex_leg_numbers()!=[]] 4804 minvert = min(vert_list) if vert_list!=[] else 0 4805 4806 # Number of subprocesses 4807 nsubprocs = len(configs[0]) 4808 4809 nconfigs = 0 4810 4811 new_pdg = model.get_first_non_pdg() 4812 4813 for iconfig, helas_diags in enumerate(configs): 4814 if any([vert > minvert for vert in 4815 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4816 # Only 3-vertices allowed in configs.inc 4817 continue 4818 nconfigs += 1 4819 4820 # Need s- and t-channels for all subprocesses, including 4821 # those that don't contribute to this config 4822 empty_verts = [] 4823 stchannels = [] 4824 for h in helas_diags: 4825 if h: 4826 # get_s_and_t_channels gives vertices starting from 4827 # final state external particles and working inwards 4828 stchannels.append(h.get('amplitudes')[0].\ 4829 get_s_and_t_channels(ninitial, model, 4830 new_pdg)) 4831 else: 4832 stchannels.append((empty_verts, None)) 4833 4834 4835 # For t-channels, just need the first non-empty one 4836 tchannels = [t for s,t in stchannels if t != None][0] 4837 4838 # pass to ping-pong strategy for t-channel for 3 ore more T-channel 4839 # this is directly related to change in genps.f 4840 tstrat = self.opt.get('t_strategy', 0) 4841 tchannels, tchannels_strategy = ProcessExporterFortranME.reorder_tchannels(tchannels, tstrat, self.model) 4842 4843 # For s_and_t_channels (to be used later) use only first config 4844 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4845 tchannels, tchannels_strategy]) 4846 4847 # Make sure empty_verts is same length as real vertices 4848 if any([s for s,t in stchannels]): 4849 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4850 4851 # Reorganize s-channel vertices to get a list of all 4852 # subprocesses for each vertex 4853 schannels = list(zip(*[s for s,t in stchannels])) 4854 else: 4855 schannels = [] 4856 4857 allchannels = schannels 4858 if len(tchannels) > 1: 4859 # Write out tchannels only if there are any non-trivial ones 4860 allchannels = schannels + tchannels 4861 4862 # Write out propagators for s-channel and t-channel vertices 4863 4864 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4865 # Correspondance between the config and the diagram = amp2 4866 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4867 mapconfigs[iconfig])) 4868 lines.append("data tstrategy(%d)/%d/" % (nconfigs, tchannels_strategy)) 4869 # Number of QCD couplings in this diagram 4870 nqcd = 0 4871 for h in helas_diags: 4872 if h: 4873 try: 4874 nqcd = h.calculate_orders()['QCD'] 4875 except KeyError: 4876 pass 4877 break 4878 else: 4879 continue 4880 4881 nqcd_list.append(nqcd) 4882 4883 for verts in allchannels: 4884 if verts in schannels: 4885 vert = [v for v in verts if v][0] 4886 else: 4887 vert = verts 4888 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4889 last_leg = vert.get('legs')[-1] 4890 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4891 (last_leg.get('number'), nconfigs, len(daughters), 4892 ",".join([str(d) for d in daughters]))) 4893 if verts in schannels: 4894 pdgs = [] 4895 for v in verts: 4896 if v: 4897 pdgs.append(v.get('legs')[-1].get('id')) 4898 else: 4899 pdgs.append(0) 4900 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4901 (last_leg.get('number'), nconfigs, nsubprocs, 4902 ",".join([str(d) for d in pdgs]))) 4903 lines.append("data tprid(%d,%d)/0/" % \ 4904 (last_leg.get('number'), nconfigs)) 4905 elif verts in tchannels[:-1]: 4906 lines.append("data tprid(%d,%d)/%d/" % \ 4907 (last_leg.get('number'), nconfigs, 4908 abs(last_leg.get('id')))) 4909 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4910 (last_leg.get('number'), nconfigs, nsubprocs, 4911 ",".join(['0'] * nsubprocs))) 4912 4913 # Write out number of configs 4914 lines.append("# Number of configs") 4915 lines.append("data mapconfig(0)/%d/" % nconfigs) 4916 4917 # Write the file 4918 writer.writelines(lines) 4919 4920 return s_and_t_channels, nqcd_list
4921 4922 4923 4924 #=========================================================================== 4925 # reoder t-channels 4926 #=========================================================================== 4927 4928 #ordering = 0 4929 @staticmethod
4930 - def reorder_tchannels(tchannels, tstrat, model):
4931 # no need to modified anything if 1 or less T-Channel 4932 #Note that this counts the number of vertex (one more vertex compare to T) 4933 #ProcessExporterFortranME.ordering +=1 4934 if len(tchannels) < 3 or tstrat == 2 or not model: 4935 return tchannels, 2 4936 elif tstrat == 1: 4937 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4938 elif tstrat == -2: 4939 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4940 elif tstrat == -1: 4941 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels, 1), -1 4942 elif len(tchannels) < 4: 4943 # 4944 first = tchannels[0]['legs'][1]['number'] 4945 t1 = tchannels[0]['legs'][-1]['id'] 4946 last = tchannels[-1]['legs'][1]['number'] 4947 t2 = tchannels[-1]['legs'][0]['id'] 4948 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4949 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4950 if m2 and not m1: 4951 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4952 elif m1 and not m2: 4953 return tchannels, 2 4954 elif first < last: 4955 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4956 else: 4957 return tchannels, 2 4958 else: 4959 first = tchannels[0]['legs'][1]['number'] 4960 t1 = tchannels[0]['legs'][-1]['id'] 4961 last = tchannels[-1]['legs'][1]['number'] 4962 t2 = tchannels[-1]['legs'][0]['id'] 4963 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4964 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4965 4966 t12 = tchannels[1]['legs'][-1]['id'] 4967 m12 = model.get_particle(t12).get('mass') == 'ZERO' 4968 t22 = tchannels[-2]['legs'][0]['id'] 4969 m22 = model.get_particle(t22).get('mass') == 'ZERO' 4970 if m2 and not m1: 4971 if m22: 4972 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4973 else: 4974 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4975 elif m1 and not m2: 4976 if m12: 4977 return tchannels, 2 4978 else: 4979 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4980 elif m1 and m2 and len(tchannels) == 4 and not m12: # 3 T propa 4981 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4982 # this case seems quite sensitive we tested method 2 specifically and this was not helping in general 4983 elif not m1 and not m2 and len(tchannels) == 4 and m12: 4984 if first < last: 4985 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4986 return tchannels, 2 4987 else: 4988 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2
4989 4990 4991 4992 4993 @staticmethod
4994 - def reorder_tchannels_flipside(tchannels):
4995 """change the tchannel ordering to pass to a ping-pong strategy. 4996 assume ninitial == 2 4997 4998 We assume that we receive something like this 4999 5000 1 ----- X ------- -2 5001 | 5002 | (-X) 5003 | 5004 X -------- 4 5005 | 5006 | (-X-1) 5007 | 5008 X --------- -1 5009 5010 X---------- 3 5011 | 5012 | (-N+2) 5013 | 5014 X --------- L 5015 | 5016 | (-N+1) 5017 | 5018 -N ----- X ------- P 5019 5020 coded as 5021 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5022 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5023 5024 we want to convert this as: 5025 -N ----- X ------- -2 5026 | 5027 | (-N+1) 5028 | 5029 X -------- 4 5030 | 5031 | (-N+2) 5032 | 5033 X --------- -1 5034 5035 X---------- 3 5036 | 5037 | (-X-1) 5038 | 5039 X --------- L 5040 | 5041 | (-X) 5042 | 5043 2 ----- X ------- P 5044 5045 coded as 5046 ( 2 P > -X) (-X L > -X-1) (-X-1 3 > -X-2)... (-X-L -2 > -N) 5047 """ 5048 5049 # no need to modified anything if 1 or less T-Channel 5050 #Note that this counts the number of vertex (one more vertex compare to T) 5051 if len(tchannels) < 2: 5052 return tchannels 5053 5054 out = [] 5055 oldid2new = {} 5056 5057 # initialisation 5058 # id of the first T-channel (-X) 5059 propa_id = tchannels[0]['legs'][-1]['number'] 5060 # 5061 # Setup the last vertex to refenence the second id beam 5062 # -N (need to setup it to 2. 5063 initialid = tchannels[-1]['legs'][-1]['number'] 5064 oldid2new[initialid] = 2 5065 oldid2new[1] = initialid 5066 5067 i = 0 5068 while tchannels: 5069 old_vert = tchannels.pop() 5070 5071 #copy the vertex /leglist to avoid side effects 5072 new_vert = base_objects.Vertex(old_vert) 5073 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5074 # vertex taken from the bottom we have 5075 # (-N+1 X > -N) we need to flip to pass to 5076 # -N X > -N+1 (and then relabel -N and -N+1 5077 legs = new_vert['legs'] # shorcut 5078 id1 = legs[0]['number'] 5079 id2 = legs[1]['number'] 5080 id3 = legs[2]['number'] 5081 # to be secure we also support (X -N+1 > -N) 5082 if id3 == id2 -1 and id1 !=1: 5083 legs[0], legs[1] = legs[1], legs[0] 5084 #flipping side 5085 legs[0], legs[2] = legs[2], legs[0] 5086 5087 # the only new relabelling is the last element of the list 5088 # always thanks to the above flipping 5089 old_propa_id = new_vert['legs'][-1]['number'] 5090 oldid2new[old_propa_id] = propa_id 5091 5092 5093 #pass to new convention for leg numbering: 5094 for l in new_vert['legs']: 5095 if l['number'] in oldid2new: 5096 l['number'] = oldid2new[l['number']] 5097 5098 # new_vert is now ready 5099 out.append(new_vert) 5100 # prepare next iteration 5101 propa_id -=1 5102 i +=1 5103 5104 return out
5105 5106 @staticmethod
5107 - def reorder_tchannels_pingpong(tchannels, id=2):
5108 """change the tchannel ordering to pass to a ping-pong strategy. 5109 assume ninitial == 2 5110 5111 We assume that we receive something like this 5112 5113 1 ----- X ------- -2 5114 | 5115 | (-X) 5116 | 5117 X -------- 4 5118 | 5119 | (-X-1) 5120 | 5121 X --------- -1 5122 5123 X---------- 3 5124 | 5125 | (-N+2) 5126 | 5127 X --------- L 5128 | 5129 | (-N+1) 5130 | 5131 -N ----- X ------- P 5132 5133 coded as 5134 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5135 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5136 5137 we want to convert this as: 5138 1 ----- X ------- -2 5139 | 5140 | (-X) 5141 | 5142 X -------- 4 5143 | 5144 | (-X-2) 5145 | 5146 X --------- -1 5147 5148 X---------- 3 5149 | 5150 | (-X-3) 5151 | 5152 X --------- L 5153 | 5154 | (-X-1) 5155 | 5156 2 ----- X ------- P 5157 5158 coded as 5159 (1 -2 > -X) (2 P > -X-1) (-X 4 > -X-2) (-X-1 L > -X-3) ... 5160 """ 5161 5162 # no need to modified anything if 1 or less T-Channel 5163 #Note that this counts the number of vertex (one more vertex compare to T) 5164 if len(tchannels) < 2: 5165 return tchannels 5166 5167 out = [] 5168 oldid2new = {} 5169 5170 # initialisation 5171 # id of the first T-channel (-X) 5172 propa_id = tchannels[0]['legs'][-1]['number'] 5173 # 5174 # Setup the last vertex to refenence the second id beam 5175 # -N (need to setup it to 2. 5176 initialid = tchannels[-1]['legs'][-1]['number'] 5177 oldid2new[initialid] = id 5178 5179 5180 5181 i = 0 5182 while tchannels: 5183 #ping pong by taking first/last element in aternance 5184 if id ==2: 5185 if i % 2 == 0: 5186 old_vert = tchannels.pop(0) 5187 else: 5188 old_vert = tchannels.pop() 5189 else: 5190 if i % 2 != 0: 5191 old_vert = tchannels.pop(0) 5192 else: 5193 old_vert = tchannels.pop() 5194 5195 #copy the vertex /leglist to avoid side effects 5196 new_vert = base_objects.Vertex(old_vert) 5197 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5198 # if vertex taken from the bottom we have 5199 # (-N+1 X > -N) we need to flip to pass to 5200 # -N X > -N+1 (and then relabel -N and -N+1 5201 # to be secure we also support (X -N+1 > -N) 5202 if (i % 2 ==1 and id ==2) or (i %2 == 0 and id ==1): 5203 legs = new_vert['legs'] # shorcut 5204 id1 = legs[0]['number'] 5205 id2 = legs[1]['number'] 5206 if id1 > id2: 5207 legs[0], legs[1] = legs[1], legs[0] 5208 else: 5209 legs[0], legs[2] = legs[2], legs[0] 5210 5211 # the only new relabelling is the last element of the list 5212 # always thanks to the above flipping 5213 old_propa_id = new_vert['legs'][-1]['number'] 5214 oldid2new[old_propa_id] = propa_id 5215 5216 if i==0 and id==1: 5217 legs[0]['number'] = 2 5218 5219 #pass to new convention for leg numbering: 5220 for l in new_vert['legs']: 5221 if l['number'] in oldid2new: 5222 l['number'] = oldid2new[l['number']] 5223 5224 # new_vert is now ready 5225 out.append(new_vert) 5226 # prepare next iteration 5227 propa_id -=1 5228 i +=1 5229 5230 return out
5231 5232 5233 5234 5235 5236 #=========================================================================== 5237 # write_decayBW_file 5238 #===========================================================================
5239 - def write_decayBW_file(self, writer, s_and_t_channels):
5240 """Write the decayBW.inc file for MadEvent""" 5241 5242 lines = [] 5243 5244 booldict = {None: "0", True: "1", False: "2"} 5245 5246 for iconf, config in enumerate(s_and_t_channels): 5247 schannels = config[0] 5248 for vertex in schannels: 5249 # For the resulting leg, pick out whether it comes from 5250 # decay or not, as given by the onshell flag 5251 leg = vertex.get('legs')[-1] 5252 lines.append("data gForceBW(%d,%d)/%s/" % \ 5253 (leg.get('number'), iconf + 1, 5254 booldict[leg.get('onshell')])) 5255 5256 # Write the file 5257 writer.writelines(lines) 5258 5259 return True
5260 5261 #=========================================================================== 5262 # write_dname_file 5263 #===========================================================================
5264 - def write_dname_file(self, writer, dir_name):
5265 """Write the dname.mg file for MG4""" 5266 5267 line = "DIRNAME=%s" % dir_name 5268 5269 # Write the file 5270 writer.write(line + "\n") 5271 5272 return True
5273 5274 #=========================================================================== 5275 # write_driver 5276 #===========================================================================
5277 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
5278 """Write the SubProcess/driver.f file for MG4""" 5279 5280 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 5281 5282 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5283 card = 'Source/MODEL/MG5_param.dat' 5284 else: 5285 card = 'param_card.dat' 5286 # Requiring each helicity configuration to be probed by 10 points for 5287 # matrix element before using the resulting grid for MC over helicity 5288 # sampling. 5289 # We multiply this by 2 because each grouped subprocess is called at most 5290 # twice for each IMIRROR. 5291 replace_dict = {'param_card_name':card, 5292 'ncomb':ncomb, 5293 'hel_init_points':n_grouped_proc*10*2} 5294 if not v5: 5295 replace_dict['secondparam']=',.true.' 5296 else: 5297 replace_dict['secondparam']='' 5298 5299 if writer: 5300 text = open(path).read() % replace_dict 5301 writer.write(text) 5302 return True 5303 else: 5304 return replace_dict
5305 5306 #=========================================================================== 5307 # write_addmothers 5308 #===========================================================================
5309 - def write_addmothers(self, writer):
5310 """Write the SubProcess/addmothers.f""" 5311 5312 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5313 5314 text = open(path).read() % {'iconfig': 'diag_number'} 5315 writer.write(text) 5316 5317 return True
5318 5319 5320 #=========================================================================== 5321 # write_combine_events 5322 #===========================================================================
5323 - def write_combine_events(self, writer, nb_proc=100):
5324 """Write the SubProcess/driver.f file for MG4""" 5325 5326 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 5327 5328 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5329 card = 'Source/MODEL/MG5_param.dat' 5330 else: 5331 card = 'param_card.dat' 5332 5333 #set maxpup (number of @X in the process card) 5334 5335 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 5336 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 5337 writer.write(text) 5338 5339 return True
5340 5341 5342 #=========================================================================== 5343 # write_symmetry 5344 #===========================================================================
5345 - def write_symmetry(self, writer, v5=True):
5346 """Write the SubProcess/driver.f file for ME""" 5347 5348 5349 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 5350 5351 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5352 card = 'Source/MODEL/MG5_param.dat' 5353 else: 5354 card = 'param_card.dat' 5355 5356 if v5: 5357 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 5358 else: 5359 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 5360 5361 if writer: 5362 text = open(path).read() 5363 text = text % replace_dict 5364 writer.write(text) 5365 return True 5366 else: 5367 return replace_dict
5368 5369 5370 5371 #=========================================================================== 5372 # write_iproc_file 5373 #===========================================================================
5374 - def write_iproc_file(self, writer, me_number):
5375 """Write the iproc.dat file for MG4""" 5376 line = "%d" % (me_number + 1) 5377 5378 # Write the file 5379 for line_to_write in writer.write_line(line): 5380 writer.write(line_to_write) 5381 return True
5382 5383 #=========================================================================== 5384 # write_mg_sym_file 5385 #===========================================================================
5386 - def write_mg_sym_file(self, writer, matrix_element):
5387 """Write the mg.sym file for MadEvent.""" 5388 5389 lines = [] 5390 5391 # Extract process with all decays included 5392 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 5393 5394 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 5395 5396 identical_indices = {} 5397 5398 # Extract identical particle info 5399 for i, leg in enumerate(final_legs): 5400 if leg.get('id') in identical_indices: 5401 identical_indices[leg.get('id')].append(\ 5402 i + ninitial + 1) 5403 else: 5404 identical_indices[leg.get('id')] = [i + ninitial + 1] 5405 5406 # Remove keys which have only one particle 5407 for key in list(identical_indices.keys()): 5408 if len(identical_indices[key]) < 2: 5409 del identical_indices[key] 5410 5411 # Write mg.sym file 5412 lines.append(str(len(list(identical_indices.keys())))) 5413 for key in identical_indices.keys(): 5414 lines.append(str(len(identical_indices[key]))) 5415 for number in identical_indices[key]: 5416 lines.append(str(number)) 5417 5418 # Write the file 5419 writer.writelines(lines) 5420 5421 return True
5422 5423 #=========================================================================== 5424 # write_mg_sym_file 5425 #===========================================================================
5426 - def write_default_mg_sym_file(self, writer):
5427 """Write the mg.sym file for MadEvent.""" 5428 5429 lines = "0" 5430 5431 # Write the file 5432 writer.writelines(lines) 5433 5434 return True
5435 5436 #=========================================================================== 5437 # write_ncombs_file 5438 #===========================================================================
5439 - def write_ncombs_file(self, writer, nexternal):
5440 """Write the ncombs.inc file for MadEvent.""" 5441 5442 # ncomb (used for clustering) is 2^nexternal 5443 file = " integer n_max_cl\n" 5444 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 5445 5446 # Write the file 5447 writer.writelines(file) 5448 5449 return True
5450 5451 #=========================================================================== 5452 # write_processes_file 5453 #===========================================================================
5454 - def write_processes_file(self, writer, subproc_group):
5455 """Write the processes.dat file with info about the subprocesses 5456 in this group.""" 5457 5458 lines = [] 5459 5460 for ime, me in \ 5461 enumerate(subproc_group.get('matrix_elements')): 5462 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 5463 ",".join(p.base_string() for p in \ 5464 me.get('processes')))) 5465 if me.get('has_mirror_process'): 5466 mirror_procs = [copy.copy(p) for p in me.get('processes')] 5467 for proc in mirror_procs: 5468 legs = copy.copy(proc.get('legs_with_decays')) 5469 legs.insert(0, legs.pop(1)) 5470 proc.set("legs_with_decays", legs) 5471 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 5472 mirror_procs)) 5473 else: 5474 lines.append("mirror none") 5475 5476 # Write the file 5477 writer.write("\n".join(lines)) 5478 5479 return True
5480 5481 #=========================================================================== 5482 # write_symswap_file 5483 #===========================================================================
5484 - def write_symswap_file(self, writer, ident_perms):
5485 """Write the file symswap.inc for MG4 by comparing diagrams using 5486 the internal matrix element value functionality.""" 5487 5488 lines = [] 5489 5490 # Write out lines for symswap.inc file (used to permute the 5491 # external leg momenta 5492 for iperm, perm in enumerate(ident_perms): 5493 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 5494 (iperm+1, ",".join([str(i+1) for i in perm]))) 5495 lines.append("data nsym/%d/" % len(ident_perms)) 5496 5497 # Write the file 5498 writer.writelines(lines) 5499 5500 return True
5501 5502 #=========================================================================== 5503 # write_symfact_file 5504 #===========================================================================
5505 - def write_symfact_file(self, writer, symmetry):
5506 """Write the files symfact.dat for MG4 by comparing diagrams using 5507 the internal matrix element value functionality.""" 5508 5509 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 5510 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 5511 # Write out lines for symswap.inc file (used to permute the 5512 # external leg momenta 5513 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 5514 # Write the file 5515 writer.write('\n'.join(lines)) 5516 writer.write('\n') 5517 5518 return True
5519 5520 #=========================================================================== 5521 # write_symperms_file 5522 #===========================================================================
5523 - def write_symperms_file(self, writer, perms):
5524 """Write the symperms.inc file for subprocess group, used for 5525 symmetric configurations""" 5526 5527 lines = [] 5528 for iperm, perm in enumerate(perms): 5529 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 5530 (iperm+1, ",".join([str(i+1) for i in perm]))) 5531 5532 # Write the file 5533 writer.writelines(lines) 5534 5535 return True
5536 5537 #=========================================================================== 5538 # write_subproc 5539 #===========================================================================
5540 - def write_subproc(self, writer, subprocdir):
5541 """Append this subprocess to the subproc.mg file for MG4""" 5542 5543 # Write line to file 5544 writer.write(subprocdir + "\n") 5545 5546 return True
5547
5548 #=============================================================================== 5549 # ProcessExporterFortranMEGroup 5550 #=============================================================================== 5551 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5552 """Class to take care of exporting a set of matrix elements to 5553 MadEvent subprocess group format.""" 5554 5555 matrix_file = "matrix_madevent_group_v4.inc" 5556 grouped_mode = 'madevent' 5557 default_opt = {'clean': False, 'complex_mass':False, 5558 'export_format':'madevent', 'mp': False, 5559 'v5_model': True, 5560 'output_options':{}, 5561 'hel_recycling': True 5562 } 5563 5564 5565 #=========================================================================== 5566 # generate_subprocess_directory 5567 #===========================================================================
5568 - def generate_subprocess_directory(self, subproc_group, 5569 fortran_model, 5570 group_number):
5571 """Generate the Pn directory for a subprocess group in MadEvent, 5572 including the necessary matrix_N.f files, configs.inc and various 5573 other helper files.""" 5574 5575 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5576 "subproc_group object not SubProcessGroup" 5577 5578 if not self.model: 5579 self.model = subproc_group.get('matrix_elements')[0].\ 5580 get('processes')[0].get('model') 5581 5582 cwd = os.getcwd() 5583 path = pjoin(self.dir_path, 'SubProcesses') 5584 5585 os.chdir(path) 5586 pathdir = os.getcwd() 5587 5588 # Create the directory PN in the specified path 5589 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5590 subproc_group.get('name')) 5591 try: 5592 os.mkdir(subprocdir) 5593 except os.error as error: 5594 logger.warning(error.strerror + " " + subprocdir) 5595 5596 try: 5597 os.chdir(subprocdir) 5598 except os.error: 5599 logger.error('Could not cd to directory %s' % subprocdir) 5600 return 0 5601 5602 logger.info('Creating files in directory %s' % subprocdir) 5603 5604 # Create the matrix.f files, auto_dsig.f files and all inc files 5605 # for all subprocesses in the group 5606 5607 maxamps = 0 5608 maxflows = 0 5609 tot_calls = 0 5610 5611 matrix_elements = subproc_group.get('matrix_elements') 5612 5613 # Add the driver.f, all grouped ME's must share the same number of 5614 # helicity configuration 5615 ncomb = matrix_elements[0].get_helicity_combinations() 5616 for me in matrix_elements[1:]: 5617 if ncomb!=me.get_helicity_combinations(): 5618 raise MadGraph5Error("All grouped processes must share the "+\ 5619 "same number of helicity configurations.") 5620 5621 filename = 'driver.f' 5622 self.write_driver(writers.FortranWriter(filename),ncomb, 5623 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5624 5625 self.proc_characteristic['hel_recycling'] = self.opt['hel_recycling'] 5626 for ime, matrix_element in \ 5627 enumerate(matrix_elements): 5628 if self.opt['hel_recycling']: 5629 filename = 'matrix%d_orig.f' % (ime+1) 5630 replace_dict = self.write_matrix_element_v4(None, 5631 matrix_element, 5632 fortran_model, 5633 proc_id=str(ime+1), 5634 config_map=subproc_group.get('diagram_maps')[ime], 5635 subproc_number=group_number) 5636 calls,ncolor = replace_dict['return_value'] 5637 tfile = open(replace_dict['template_file']).read() 5638 file = tfile % replace_dict 5639 # Add the split orders helper functions. 5640 file = file + '\n' + open(replace_dict['template_file2'])\ 5641 .read()%replace_dict 5642 # Write the file 5643 writer = writers.FortranWriter(filename) 5644 writer.writelines(file) 5645 5646 # 5647 # write the dedicated template for helicity recycling 5648 # 5649 tfile = open(replace_dict['template_file'].replace('.inc',"_hel.inc")).read() 5650 file = tfile % replace_dict 5651 # Add the split orders helper functions. 5652 file = file + '\n' + open(replace_dict['template_file2'])\ 5653 .read()%replace_dict 5654 # Write the file 5655 writer = writers.FortranWriter('template_matrix%d.f' % (ime+1)) 5656 writer.uniformcase = False 5657 writer.writelines(file) 5658 5659 5660 5661 5662 else: 5663 filename = 'matrix%d.f' % (ime+1) 5664 calls, ncolor = \ 5665 self.write_matrix_element_v4(writers.FortranWriter(filename), 5666 matrix_element, 5667 fortran_model, 5668 proc_id=str(ime+1), 5669 config_map=subproc_group.get('diagram_maps')[ime], 5670 subproc_number=group_number) 5671 5672 5673 5674 filename = 'auto_dsig%d.f' % (ime+1) 5675 self.write_auto_dsig_file(writers.FortranWriter(filename), 5676 matrix_element, 5677 str(ime+1)) 5678 5679 # Keep track of needed quantities 5680 tot_calls += int(calls) 5681 maxflows = max(maxflows, ncolor) 5682 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5683 5684 # Draw diagrams 5685 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5686 filename = "matrix%d.ps" % (ime+1) 5687 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5688 get('diagrams'), 5689 filename, 5690 model = \ 5691 matrix_element.get('processes')[0].\ 5692 get('model'), 5693 amplitude=True) 5694 logger.info("Generating Feynman diagrams for " + \ 5695 matrix_element.get('processes')[0].nice_string()) 5696 plot.draw() 5697 5698 # Extract number of external particles 5699 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5700 5701 # Generate a list of diagrams corresponding to each configuration 5702 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5703 # If a subprocess has no diagrams for this config, the number is 0 5704 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5705 5706 filename = 'auto_dsig.f' 5707 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5708 subproc_group) 5709 5710 filename = 'coloramps.inc' 5711 self.write_coloramps_file(writers.FortranWriter(filename), 5712 subproc_diagrams_for_config, 5713 maxflows, 5714 matrix_elements) 5715 5716 filename = 'get_color.f' 5717 self.write_colors_file(writers.FortranWriter(filename), 5718 matrix_elements) 5719 5720 filename = 'config_subproc_map.inc' 5721 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5722 subproc_diagrams_for_config) 5723 5724 filename = 'configs.inc' 5725 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5726 writers.FortranWriter(filename), 5727 subproc_group, 5728 subproc_diagrams_for_config) 5729 5730 filename = 'config_nqcd.inc' 5731 self.write_config_nqcd_file(writers.FortranWriter(filename), 5732 nqcd_list) 5733 5734 filename = 'decayBW.inc' 5735 self.write_decayBW_file(writers.FortranWriter(filename), 5736 s_and_t_channels) 5737 5738 filename = 'dname.mg' 5739 self.write_dname_file(writers.FortranWriter(filename), 5740 subprocdir) 5741 5742 filename = 'iproc.dat' 5743 self.write_iproc_file(writers.FortranWriter(filename), 5744 group_number) 5745 5746 filename = 'leshouche.inc' 5747 self.write_leshouche_file(writers.FortranWriter(filename), 5748 subproc_group) 5749 5750 filename = 'maxamps.inc' 5751 self.write_maxamps_file(writers.FortranWriter(filename), 5752 maxamps, 5753 maxflows, 5754 max([len(me.get('processes')) for me in \ 5755 matrix_elements]), 5756 len(matrix_elements)) 5757 5758 # Note that mg.sym is not relevant for this case 5759 filename = 'mg.sym' 5760 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5761 5762 filename = 'mirrorprocs.inc' 5763 self.write_mirrorprocs(writers.FortranWriter(filename), 5764 subproc_group) 5765 5766 filename = 'ncombs.inc' 5767 self.write_ncombs_file(writers.FortranWriter(filename), 5768 nexternal) 5769 5770 filename = 'nexternal.inc' 5771 self.write_nexternal_file(writers.FortranWriter(filename), 5772 nexternal, ninitial) 5773 5774 filename = 'ngraphs.inc' 5775 self.write_ngraphs_file(writers.FortranWriter(filename), 5776 nconfigs) 5777 5778 filename = 'pmass.inc' 5779 self.write_pmass_file(writers.FortranWriter(filename), 5780 matrix_element) 5781 5782 filename = 'props.inc' 5783 self.write_props_file(writers.FortranWriter(filename), 5784 matrix_element, 5785 s_and_t_channels) 5786 5787 filename = 'processes.dat' 5788 files.write_to_file(filename, 5789 self.write_processes_file, 5790 subproc_group) 5791 5792 # Find config symmetries and permutations 5793 symmetry, perms, ident_perms = \ 5794 diagram_symmetry.find_symmetry(subproc_group) 5795 5796 filename = 'symswap.inc' 5797 self.write_symswap_file(writers.FortranWriter(filename), 5798 ident_perms) 5799 5800 filename = 'symfact_orig.dat' 5801 self.write_symfact_file(open(filename, 'w'), symmetry) 5802 5803 # check consistency 5804 for i, sym_fact in enumerate(symmetry): 5805 5806 if sym_fact >= 0: 5807 continue 5808 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5809 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5810 raise Exception("identical diagram with different QCD powwer") 5811 5812 5813 filename = 'symperms.inc' 5814 self.write_symperms_file(writers.FortranWriter(filename), 5815 perms) 5816 5817 # Generate jpgs -> pass in make_html 5818 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5819 5820 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5821 5822 #import nexternal/leshouch in Source 5823 ln('nexternal.inc', '../../Source', log=False) 5824 ln('leshouche.inc', '../../Source', log=False) 5825 ln('maxamps.inc', '../../Source', log=False) 5826 5827 # Return to SubProcesses dir) 5828 os.chdir(pathdir) 5829 5830 # Add subprocess to subproc.mg 5831 filename = 'subproc.mg' 5832 files.append_to_file(filename, 5833 self.write_subproc, 5834 subprocdir) 5835 5836 # Return to original dir 5837 os.chdir(cwd) 5838 5839 if not tot_calls: 5840 tot_calls = 0 5841 return tot_calls
5842 5843 #=========================================================================== 5844 # write_super_auto_dsig_file 5845 #===========================================================================
5846 - def write_super_auto_dsig_file(self, writer, subproc_group):
5847 """Write the auto_dsig.f file selecting between the subprocesses 5848 in subprocess group mode""" 5849 5850 replace_dict = {} 5851 5852 # Extract version number and date from VERSION file 5853 info_lines = self.get_mg5_info_lines() 5854 replace_dict['info_lines'] = info_lines 5855 5856 matrix_elements = subproc_group.get('matrix_elements') 5857 5858 # Extract process info lines 5859 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5860 matrix_elements]) 5861 replace_dict['process_lines'] = process_lines 5862 5863 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5864 replace_dict['nexternal'] = nexternal 5865 5866 replace_dict['nsprocs'] = 2*len(matrix_elements) 5867 5868 # Generate dsig definition line 5869 dsig_def_line = "DOUBLE PRECISION " + \ 5870 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5871 range(len(matrix_elements))]) 5872 replace_dict["dsig_def_line"] = dsig_def_line 5873 5874 # Generate dsig process lines 5875 call_dsig_proc_lines = [] 5876 for iproc in range(len(matrix_elements)): 5877 call_dsig_proc_lines.append(\ 5878 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5879 {"num": iproc + 1, 5880 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5881 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5882 5883 ncomb=matrix_elements[0].get_helicity_combinations() 5884 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5885 5886 s1,s2 = matrix_elements[0].get_spin_state_initial() 5887 replace_dict['nb_spin_state1'] = s1 5888 replace_dict['nb_spin_state2'] = s2 5889 5890 printzeroamp = [] 5891 for iproc in range(len(matrix_elements)): 5892 printzeroamp.append(\ 5893 " call print_zero_amp_%i()" % ( iproc + 1)) 5894 replace_dict['print_zero_amp'] = "\n".join(printzeroamp) 5895 5896 5897 if writer: 5898 file = open(pjoin(_file_path, \ 5899 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5900 file = file % replace_dict 5901 5902 # Write the file 5903 writer.writelines(file) 5904 else: 5905 return replace_dict
5906 5907 #=========================================================================== 5908 # write_mirrorprocs 5909 #===========================================================================
5910 - def write_mirrorprocs(self, writer, subproc_group):
5911 """Write the mirrorprocs.inc file determining which processes have 5912 IS mirror process in subprocess group mode.""" 5913 5914 lines = [] 5915 bool_dict = {True: '.true.', False: '.false.'} 5916 matrix_elements = subproc_group.get('matrix_elements') 5917 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5918 (len(matrix_elements), 5919 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5920 me in matrix_elements]))) 5921 # Write the file 5922 writer.writelines(lines)
5923 5924 #=========================================================================== 5925 # write_addmothers 5926 #===========================================================================
5927 - def write_addmothers(self, writer):
5928 """Write the SubProcess/addmothers.f""" 5929 5930 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5931 5932 text = open(path).read() % {'iconfig': 'lconfig'} 5933 writer.write(text) 5934 5935 return True
5936 5937 5938 #=========================================================================== 5939 # write_coloramps_file 5940 #===========================================================================
5941 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5942 matrix_elements):
5943 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5944 5945 # Create a map from subprocess (matrix element) to a list of 5946 # the diagrams corresponding to each config 5947 5948 lines = [] 5949 5950 subproc_to_confdiag = {} 5951 for config in diagrams_for_config: 5952 for subproc, diag in enumerate(config): 5953 try: 5954 subproc_to_confdiag[subproc].append(diag) 5955 except KeyError: 5956 subproc_to_confdiag[subproc] = [diag] 5957 5958 for subproc in sorted(subproc_to_confdiag.keys()): 5959 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5960 matrix_elements[subproc], 5961 subproc + 1)) 5962 5963 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5964 (maxflows, 5965 len(diagrams_for_config), 5966 len(matrix_elements))) 5967 5968 # Write the file 5969 writer.writelines(lines) 5970 5971 return True
5972 5973 #=========================================================================== 5974 # write_config_subproc_map_file 5975 #===========================================================================
5976 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5977 """Write the config_subproc_map.inc file for subprocess groups""" 5978 5979 lines = [] 5980 # Output only configs that have some corresponding diagrams 5981 iconfig = 0 5982 for config in config_subproc_map: 5983 if set(config) == set([0]): 5984 continue 5985 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5986 (iconfig + 1, len(config), 5987 ",".join([str(i) for i in config]))) 5988 iconfig += 1 5989 # Write the file 5990 writer.writelines(lines) 5991 5992 return True
5993 5994 #=========================================================================== 5995 # read_write_good_hel 5996 #===========================================================================
5997 - def read_write_good_hel(self, ncomb):
5998 """return the code to read/write the good_hel common_block""" 5999 6000 convert = {'ncomb' : ncomb} 6001 6002 output = """ 6003 subroutine write_good_hel(stream_id) 6004 implicit none 6005 integer stream_id 6006 INTEGER NCOMB 6007 PARAMETER ( NCOMB=%(ncomb)d) 6008 LOGICAL GOODHEL(NCOMB, 2) 6009 INTEGER NTRY(2) 6010 common/BLOCK_GOODHEL/NTRY,GOODHEL 6011 write(stream_id,*) GOODHEL 6012 return 6013 end 6014 6015 6016 subroutine read_good_hel(stream_id) 6017 implicit none 6018 include 'genps.inc' 6019 integer stream_id 6020 INTEGER NCOMB 6021 PARAMETER ( NCOMB=%(ncomb)d) 6022 LOGICAL GOODHEL(NCOMB, 2) 6023 INTEGER NTRY(2) 6024 common/BLOCK_GOODHEL/NTRY,GOODHEL 6025 read(stream_id,*) GOODHEL 6026 NTRY(1) = MAXTRIES + 1 6027 NTRY(2) = MAXTRIES + 1 6028 return 6029 end 6030 6031 subroutine init_good_hel() 6032 implicit none 6033 INTEGER NCOMB 6034 PARAMETER ( NCOMB=%(ncomb)d) 6035 LOGICAL GOODHEL(NCOMB, 2) 6036 INTEGER NTRY(2) 6037 INTEGER I 6038 6039 do i=1,NCOMB 6040 GOODHEL(I,1) = .false. 6041 GOODHEL(I,2) = .false. 6042 enddo 6043 NTRY(1) = 0 6044 NTRY(2) = 0 6045 end 6046 6047 integer function get_maxsproc() 6048 implicit none 6049 include 'maxamps.inc' 6050 6051 get_maxsproc = maxsproc 6052 return 6053 end 6054 6055 """ % convert 6056 6057 return output
6058 6059 6060 6061 #=========================================================================== 6062 # write_configs_file 6063 #===========================================================================
6064 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6065 """Write the configs.inc file with topology information for a 6066 subprocess group. Use the first subprocess with a diagram for each 6067 configuration.""" 6068 6069 matrix_elements = subproc_group.get('matrix_elements') 6070 model = matrix_elements[0].get('processes')[0].get('model') 6071 6072 diagrams = [] 6073 config_numbers = [] 6074 for iconfig, config in enumerate(diagrams_for_config): 6075 # Check if any diagrams correspond to this config 6076 if set(config) == set([0]): 6077 continue 6078 subproc_diags = [] 6079 for s,d in enumerate(config): 6080 if d: 6081 subproc_diags.append(matrix_elements[s].\ 6082 get('diagrams')[d-1]) 6083 else: 6084 subproc_diags.append(None) 6085 diagrams.append(subproc_diags) 6086 config_numbers.append(iconfig + 1) 6087 6088 # Extract number of external particles 6089 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6090 6091 return len(diagrams), \ 6092 self.write_configs_file_from_diagrams(writer, diagrams, 6093 config_numbers, 6094 nexternal, ninitial, 6095 model)
6096 6097 #=========================================================================== 6098 # write_run_configs_file 6099 #===========================================================================
6100 - def write_run_config_file(self, writer):
6101 """Write the run_configs.inc file for MadEvent""" 6102 6103 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 6104 if self.proc_characteristic['loop_induced']: 6105 job_per_chan = 1 6106 else: 6107 job_per_chan = 2 6108 text = open(path).read() % {'chanperjob':job_per_chan} 6109 writer.write(text) 6110 return True
6111 6112 6113 #=========================================================================== 6114 # write_leshouche_file 6115 #===========================================================================
6116 - def write_leshouche_file(self, writer, subproc_group):
6117 """Write the leshouche.inc file for MG4""" 6118 6119 all_lines = [] 6120 6121 for iproc, matrix_element in \ 6122 enumerate(subproc_group.get('matrix_elements')): 6123 all_lines.extend(self.get_leshouche_lines(matrix_element, 6124 iproc)) 6125 # Write the file 6126 writer.writelines(all_lines) 6127 return True
6128 6129
6130 - def finalize(self,*args, **opts):
6131 6132 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 6133 #ensure that the grouping information is on the correct value 6134 self.proc_characteristic['grouped_matrix'] = True
6135 6136 6137 #=============================================================================== 6138 # UFO_model_to_mg4 6139 #=============================================================================== 6140 6141 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
6142 6143 -class UFO_model_to_mg4(object):
6144 """ A converter of the UFO-MG5 Model to the MG4 format """ 6145 6146 # The list below shows the only variables the user is allowed to change by 6147 # himself for each PS point. If he changes any other, then calling 6148 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 6149 # correctly account for the change. 6150 PS_dependent_key = ['aS','MU_R'] 6151 mp_complex_format = 'complex*32' 6152 mp_real_format = 'real*16' 6153 # Warning, it is crucial none of the couplings/parameters of the model 6154 # starts with this prefix. I should add a check for this. 6155 # You can change it as the global variable to check_param_card.ParamCard 6156 mp_prefix = check_param_card.ParamCard.mp_prefix 6157
6158 - def __init__(self, model, output_path, opt=None):
6159 """ initialization of the objects """ 6160 6161 self.model = model 6162 self.model_name = model['name'] 6163 self.dir_path = output_path 6164 6165 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 6166 'loop_induced': False} 6167 if opt: 6168 self.opt.update(opt) 6169 6170 self.coups_dep = [] # (name, expression, type) 6171 self.coups_indep = [] # (name, expression, type) 6172 self.params_dep = [] # (name, expression, type) 6173 self.params_indep = [] # (name, expression, type) 6174 self.params_ext = [] # external parameter 6175 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 6176 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
6177 6178
6180 """modify the parameter if some of them are identical up to the case""" 6181 6182 lower_dict={} 6183 duplicate = set() 6184 keys = list(self.model['parameters'].keys()) 6185 keys.sort() 6186 for key in keys: 6187 for param in self.model['parameters'][key]: 6188 lower_name = param.name.lower() 6189 if not lower_name: 6190 continue 6191 try: 6192 lower_dict[lower_name].append(param) 6193 except KeyError as error: 6194 lower_dict[lower_name] = [param] 6195 else: 6196 duplicate.add(lower_name) 6197 logger.debug('%s is define both as lower case and upper case.' 6198 % lower_name) 6199 if not duplicate: 6200 return 6201 6202 re_expr = r'''\b(%s)\b''' 6203 to_change = [] 6204 change={} 6205 for value in duplicate: 6206 for i, var in enumerate(lower_dict[value]): 6207 to_change.append(var.name) 6208 new_name = '%s%s' % (var.name.lower(), 6209 ('__%d'%(i+1) if i>0 else '')) 6210 change[var.name] = new_name 6211 var.name = new_name 6212 6213 # Apply the modification to the map_CTcoup_CTparam of the model 6214 # if it has one (giving for each coupling the CT parameters whcih 6215 # are necessary and which should be exported to the model. 6216 if hasattr(self.model,'map_CTcoup_CTparam'): 6217 for coup, ctparams in self.model.map_CTcoup_CTparam: 6218 for i, ctparam in enumerate(ctparams): 6219 try: 6220 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 6221 except KeyError: 6222 pass 6223 6224 replace = lambda match_pattern: change[match_pattern.groups()[0]] 6225 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 6226 6227 # change parameters 6228 for key in keys: 6229 if key == ('external',): 6230 continue 6231 for param in self.model['parameters'][key]: 6232 param.expr = rep_pattern.sub(replace, param.expr) 6233 6234 # change couplings 6235 for key in self.model['couplings'].keys(): 6236 for coup in self.model['couplings'][key]: 6237 coup.expr = rep_pattern.sub(replace, coup.expr) 6238 6239 # change mass/width 6240 for part in self.model['particles']: 6241 if str(part.get('mass')) in to_change: 6242 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 6243 if str(part.get('width')) in to_change: 6244 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
6245
6246 - def refactorize(self, wanted_couplings = []):
6247 """modify the couplings to fit with MG4 convention """ 6248 6249 # Keep only separation in alphaS 6250 keys = list(self.model['parameters'].keys()) 6251 keys.sort(key=len) 6252 for key in keys: 6253 to_add = [o for o in self.model['parameters'][key] if o.name] 6254 6255 if key == ('external',): 6256 self.params_ext += to_add 6257 elif any([(k in key) for k in self.PS_dependent_key]): 6258 self.params_dep += to_add 6259 else: 6260 self.params_indep += to_add 6261 # same for couplings 6262 keys = list(self.model['couplings'].keys()) 6263 keys.sort(key=len) 6264 for key, coup_list in self.model['couplings'].items(): 6265 if any([(k in key) for k in self.PS_dependent_key]): 6266 self.coups_dep += [c for c in coup_list if 6267 (not wanted_couplings or c.name in \ 6268 wanted_couplings)] 6269 else: 6270 self.coups_indep += [c for c in coup_list if 6271 (not wanted_couplings or c.name in \ 6272 wanted_couplings)] 6273 6274 # MG4 use G and not aS as it basic object for alphas related computation 6275 #Pass G in the independant list 6276 if 'G' in self.params_dep: 6277 index = self.params_dep.index('G') 6278 G = self.params_dep.pop(index) 6279 # G.expr = '2*cmath.sqrt(as*pi)' 6280 # self.params_indep.insert(0, self.params_dep.pop(index)) 6281 # No need to add it if not defined 6282 6283 if 'aS' not in self.params_ext: 6284 logger.critical('aS not define as external parameter adding it!') 6285 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 6286 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 6287 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
6288 - def build(self, wanted_couplings = [], full=True):
6289 """modify the couplings to fit with MG4 convention and creates all the 6290 different files""" 6291 6292 self.pass_parameter_to_case_insensitive() 6293 self.refactorize(wanted_couplings) 6294 6295 # write the files 6296 if full: 6297 if wanted_couplings: 6298 # extract the wanted ct parameters 6299 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 6300 self.write_all()
6301 6302
6303 - def open(self, name, comment='c', format='default'):
6304 """ Open the file name in the correct directory and with a valid 6305 header.""" 6306 6307 file_path = pjoin(self.dir_path, name) 6308 6309 if format == 'fortran': 6310 fsock = writers.FortranWriter(file_path, 'w') 6311 write_class = io.FileIO 6312 6313 write_class.writelines(fsock, comment * 77 + '\n') 6314 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 6315 {'comment': comment + (6 - len(comment)) * ' '}) 6316 write_class.writelines(fsock, comment * 77 + '\n\n') 6317 else: 6318 fsock = open(file_path, 'w') 6319 fsock.writelines(comment * 77 + '\n') 6320 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 6321 {'comment': comment + (6 - len(comment)) * ' '}) 6322 fsock.writelines(comment * 77 + '\n\n') 6323 return fsock
6324 6325
6326 - def write_all(self):
6327 """ write all the files """ 6328 #write the part related to the external parameter 6329 self.create_ident_card() 6330 self.create_param_read() 6331 6332 #write the definition of the parameter 6333 self.create_input() 6334 self.create_intparam_def(dp=True,mp=False) 6335 if self.opt['mp']: 6336 self.create_intparam_def(dp=False,mp=True) 6337 6338 # definition of the coupling. 6339 self.create_actualize_mp_ext_param_inc() 6340 self.create_coupl_inc() 6341 self.create_write_couplings() 6342 self.create_couplings() 6343 6344 # the makefile 6345 self.create_makeinc() 6346 self.create_param_write() 6347 6348 # The model functions 6349 self.create_model_functions_inc() 6350 self.create_model_functions_def() 6351 6352 # The param_card.dat 6353 self.create_param_card() 6354 6355 6356 # All the standard files 6357 self.copy_standard_file()
6358 6359 ############################################################################ 6360 ## ROUTINE CREATING THE FILES ############################################ 6361 ############################################################################ 6362
6363 - def copy_standard_file(self):
6364 """Copy the standard files for the fortran model.""" 6365 6366 #copy the library files 6367 file_to_link = ['formats.inc','printout.f', \ 6368 'rw_para.f', 'testprog.f'] 6369 6370 for filename in file_to_link: 6371 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 6372 self.dir_path) 6373 6374 file = open(os.path.join(MG5DIR,\ 6375 'models/template_files/fortran/rw_para.f')).read() 6376 6377 includes=["include \'coupl.inc\'","include \'input.inc\'", 6378 "include \'model_functions.inc\'"] 6379 if self.opt['mp']: 6380 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 6381 # In standalone and madloop we do no use the compiled param card but 6382 # still parse the .dat one so we must load it. 6383 if self.opt['loop_induced']: 6384 #loop induced follow MadEvent way to handle the card. 6385 load_card = '' 6386 lha_read_filename='lha_read.f' 6387 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 6388 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6389 lha_read_filename='lha_read_mp.f' 6390 elif self.opt['export_format'].startswith('standalone') \ 6391 or self.opt['export_format'] in ['madweight', 'plugin']\ 6392 or self.opt['export_format'].startswith('matchbox'): 6393 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6394 lha_read_filename='lha_read.f' 6395 else: 6396 load_card = '' 6397 lha_read_filename='lha_read.f' 6398 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 6399 os.path.join(self.dir_path,'lha_read.f')) 6400 6401 file=file%{'includes':'\n '.join(includes), 6402 'load_card':load_card} 6403 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 6404 writer.writelines(file) 6405 writer.close() 6406 6407 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6408 or self.opt['loop_induced']: 6409 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 6410 self.dir_path + '/makefile') 6411 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 6412 path = pjoin(self.dir_path, 'makefile') 6413 text = open(path).read() 6414 text = text.replace('madevent','aMCatNLO') 6415 open(path, 'w').writelines(text) 6416 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 6417 'madloop','madloop_optimized', 'standalone_rw', 6418 'madweight','matchbox','madloop_matchbox', 'plugin']: 6419 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 6420 self.dir_path + '/makefile') 6421 #elif self.opt['export_format'] in []: 6422 #pass 6423 else: 6424 raise MadGraph5Error('Unknown format')
6425
6426 - def create_coupl_inc(self):
6427 """ write coupling.inc """ 6428 6429 fsock = self.open('coupl.inc', format='fortran') 6430 if self.opt['mp']: 6431 mp_fsock = self.open('mp_coupl.inc', format='fortran') 6432 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 6433 format='fortran') 6434 6435 # Write header 6436 header = """double precision G 6437 common/strong/ G 6438 6439 double complex gal(2) 6440 common/weak/ gal 6441 6442 double precision MU_R 6443 common/rscale/ MU_R 6444 6445 double precision Nf 6446 parameter(Nf=%d) 6447 """ % self.model.get_nflav() 6448 6449 fsock.writelines(header) 6450 6451 if self.opt['mp']: 6452 header = """%(real_mp_format)s %(mp_prefix)sG 6453 common/MP_strong/ %(mp_prefix)sG 6454 6455 %(complex_mp_format)s %(mp_prefix)sgal(2) 6456 common/MP_weak/ %(mp_prefix)sgal 6457 6458 %(complex_mp_format)s %(mp_prefix)sMU_R 6459 common/MP_rscale/ %(mp_prefix)sMU_R 6460 6461 """ 6462 6463 6464 6465 6466 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 6467 'complex_mp_format':self.mp_complex_format, 6468 'mp_prefix':self.mp_prefix}) 6469 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 6470 'complex_mp_format':self.mp_complex_format, 6471 'mp_prefix':''}) 6472 6473 # Write the Mass definition/ common block 6474 masses = set() 6475 widths = set() 6476 if self.opt['complex_mass']: 6477 complex_mass = set() 6478 6479 for particle in self.model.get('particles'): 6480 #find masses 6481 one_mass = particle.get('mass') 6482 if one_mass.lower() != 'zero': 6483 masses.add(one_mass) 6484 6485 # find width 6486 one_width = particle.get('width') 6487 if one_width.lower() != 'zero': 6488 widths.add(one_width) 6489 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 6490 complex_mass.add('CMASS_%s' % one_mass) 6491 6492 if masses: 6493 fsock.writelines('double precision '+','.join(masses)+'\n') 6494 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 6495 if self.opt['mp']: 6496 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6497 ','.join(masses)+'\n') 6498 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 6499 ','.join(masses)+'\n\n') 6500 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6501 self.mp_prefix+m for m in masses])+'\n') 6502 mp_fsock.writelines('common/MP_masses/ '+\ 6503 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 6504 6505 if widths: 6506 fsock.writelines('double precision '+','.join(widths)+'\n') 6507 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 6508 if self.opt['mp']: 6509 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6510 ','.join(widths)+'\n') 6511 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 6512 ','.join(widths)+'\n\n') 6513 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6514 self.mp_prefix+w for w in widths])+'\n') 6515 mp_fsock.writelines('common/MP_widths/ '+\ 6516 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 6517 6518 # Write the Couplings 6519 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 6520 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 6521 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 6522 if self.opt['mp']: 6523 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6524 ','.join(coupling_list)+'\n') 6525 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 6526 ','.join(coupling_list)+'\n\n') 6527 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6528 self.mp_prefix+c for c in coupling_list])+'\n') 6529 mp_fsock.writelines('common/MP_couplings/ '+\ 6530 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 6531 6532 # Write complex mass for complex mass scheme (if activated) 6533 if self.opt['complex_mass'] and complex_mass: 6534 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 6535 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 6536 if self.opt['mp']: 6537 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6538 ','.join(complex_mass)+'\n') 6539 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 6540 ','.join(complex_mass)+'\n\n') 6541 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6542 self.mp_prefix+cm for cm in complex_mass])+'\n') 6543 mp_fsock.writelines('common/MP_complex_mass/ '+\ 6544 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
6545
6546 - def create_write_couplings(self):
6547 """ write the file coupl_write.inc """ 6548 6549 fsock = self.open('coupl_write.inc', format='fortran') 6550 6551 fsock.writelines("""write(*,*) ' Couplings of %s' 6552 write(*,*) ' ---------------------------------' 6553 write(*,*) ' '""" % self.model_name) 6554 def format(coupl): 6555 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
6556 6557 # Write the Couplings 6558 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 6559 fsock.writelines('\n'.join(lines)) 6560 6561
6562 - def create_input(self):
6563 """create input.inc containing the definition of the parameters""" 6564 6565 fsock = self.open('input.inc', format='fortran') 6566 if self.opt['mp']: 6567 mp_fsock = self.open('mp_input.inc', format='fortran') 6568 6569 #find mass/ width since they are already define 6570 already_def = set() 6571 for particle in self.model.get('particles'): 6572 already_def.add(particle.get('mass').lower()) 6573 already_def.add(particle.get('width').lower()) 6574 if self.opt['complex_mass']: 6575 already_def.add('cmass_%s' % particle.get('mass').lower()) 6576 6577 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 6578 name.lower() not in already_def 6579 6580 real_parameters = [param.name for param in self.params_dep + 6581 self.params_indep if param.type == 'real' 6582 and is_valid(param.name)] 6583 6584 real_parameters += [param.name for param in self.params_ext 6585 if param.type == 'real'and 6586 is_valid(param.name)] 6587 6588 # check the parameter is a CT parameter or not 6589 # if yes, just use the needed ones 6590 real_parameters = [param for param in real_parameters \ 6591 if self.check_needed_param(param)] 6592 6593 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6594 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6595 if self.opt['mp']: 6596 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6597 self.mp_prefix+p for p in real_parameters])+'\n') 6598 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6599 self.mp_prefix+p for p in real_parameters])+'\n\n') 6600 6601 complex_parameters = [param.name for param in self.params_dep + 6602 self.params_indep if param.type == 'complex' and 6603 is_valid(param.name)] 6604 6605 # check the parameter is a CT parameter or not 6606 # if yes, just use the needed ones 6607 complex_parameters = [param for param in complex_parameters \ 6608 if self.check_needed_param(param)] 6609 6610 if complex_parameters: 6611 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6612 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6613 if self.opt['mp']: 6614 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6615 self.mp_prefix+p for p in complex_parameters])+'\n') 6616 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6617 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6618
6619 - def check_needed_param(self, param):
6620 """ Returns whether the parameter in argument is needed for this 6621 specific computation or not.""" 6622 6623 # If this is a leading order model or if there was no CT parameter 6624 # employed in this NLO model, one can directly return that the 6625 # parameter is needed since only CTParameters are filtered. 6626 if not hasattr(self, 'allCTparameters') or \ 6627 self.allCTparameters is None or self.usedCTparameters is None or \ 6628 len(self.allCTparameters)==0: 6629 return True 6630 6631 # We must allow the conjugate shorthand for the complex parameter as 6632 # well so we check wether either the parameter name or its name with 6633 # 'conjg__' substituted with '' is present in the list. 6634 # This is acceptable even if some parameter had an original name 6635 # including 'conjg__' in it, because at worst we export a parameter 6636 # was not needed. 6637 param = param.lower() 6638 cjg_param = param.replace('conjg__','',1) 6639 6640 # First make sure it is a CTparameter 6641 if param not in self.allCTparameters and \ 6642 cjg_param not in self.allCTparameters: 6643 return True 6644 6645 # Now check if it is in the list of CTparameters actually used 6646 return (param in self.usedCTparameters or \ 6647 cjg_param in self.usedCTparameters)
6648
6649 - def extract_needed_CTparam(self,wanted_couplings=[]):
6650 """ Extract what are the needed CT parameters given the wanted_couplings""" 6651 6652 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6653 # Setting these lists to none wil disable the filtering in 6654 # check_needed_param 6655 self.allCTparameters = None 6656 self.usedCTparameters = None 6657 return 6658 6659 # All CTparameters appearin in all CT couplings 6660 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6661 # Define in this class the list of all CT parameters 6662 self.allCTparameters=list(\ 6663 set(itertools.chain.from_iterable(allCTparameters))) 6664 6665 # All used CT couplings 6666 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6667 allUsedCTCouplings = [coupl for coupl in 6668 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6669 6670 # Now define the list of all CT parameters that are actually used 6671 self.usedCTparameters=list(\ 6672 set(itertools.chain.from_iterable([ 6673 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6674 ]))) 6675 6676 # Now at last, make these list case insensitive 6677 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6678 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6679
6680 - def create_intparam_def(self, dp=True, mp=False):
6681 """ create intparam_definition.inc setting the internal parameters. 6682 Output the double precision and/or the multiple precision parameters 6683 depending on the parameters dp and mp. If mp only, then the file names 6684 get the 'mp_' prefix. 6685 """ 6686 6687 fsock = self.open('%sintparam_definition.inc'% 6688 ('mp_' if mp and not dp else ''), format='fortran') 6689 6690 fsock.write_comments(\ 6691 "Parameters that should not be recomputed event by event.\n") 6692 fsock.writelines("if(readlha) then\n") 6693 if dp: 6694 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6695 if mp: 6696 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6697 6698 for param in self.params_indep: 6699 if param.name == 'ZERO': 6700 continue 6701 # check whether the parameter is a CT parameter 6702 # if yes,just used the needed ones 6703 if not self.check_needed_param(param.name): 6704 continue 6705 if dp: 6706 fsock.writelines("%s = %s\n" % (param.name, 6707 self.p_to_f.parse(param.expr))) 6708 if mp: 6709 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6710 self.mp_p_to_f.parse(param.expr))) 6711 6712 fsock.writelines('endif') 6713 6714 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6715 if dp: 6716 fsock.writelines("aS = G**2/4/pi\n") 6717 if mp: 6718 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6719 for param in self.params_dep: 6720 # check whether the parameter is a CT parameter 6721 # if yes,just used the needed ones 6722 if not self.check_needed_param(param.name): 6723 continue 6724 if dp: 6725 fsock.writelines("%s = %s\n" % (param.name, 6726 self.p_to_f.parse(param.expr))) 6727 elif mp: 6728 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6729 self.mp_p_to_f.parse(param.expr))) 6730 6731 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6732 if ('aEWM1',) in self.model['parameters']: 6733 if dp: 6734 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6735 gal(2) = 1d0 6736 """) 6737 elif mp: 6738 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6739 %(mp_prefix)sgal(2) = 1d0 6740 """ %{'mp_prefix':self.mp_prefix}) 6741 pass 6742 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6743 elif ('Gf',) in self.model['parameters']: 6744 if dp: 6745 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6746 gal(2) = 1d0 6747 """) 6748 elif mp: 6749 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6750 %(mp_prefix)sgal(2) = 1d0 6751 """ %{'mp_prefix':self.mp_prefix}) 6752 pass 6753 else: 6754 if dp: 6755 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6756 fsock.writelines(""" gal(1) = 1d0 6757 gal(2) = 1d0 6758 """) 6759 elif mp: 6760 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6761 %(mp_prefix)sgal(2) = 1e0_16 6762 """%{'mp_prefix':self.mp_prefix})
6763 6764
6765 - def create_couplings(self):
6766 """ create couplings.f and all couplingsX.f """ 6767 6768 nb_def_by_file = 25 6769 6770 self.create_couplings_main(nb_def_by_file) 6771 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6772 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6773 6774 for i in range(nb_coup_indep): 6775 # For the independent couplings, we compute the double and multiple 6776 # precision ones together 6777 data = self.coups_indep[nb_def_by_file * i: 6778 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6779 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6780 6781 for i in range(nb_coup_dep): 6782 # For the dependent couplings, we compute the double and multiple 6783 # precision ones in separate subroutines. 6784 data = self.coups_dep[nb_def_by_file * i: 6785 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6786 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6787 dp=True,mp=False) 6788 if self.opt['mp']: 6789 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6790 dp=False,mp=True)
6791 6792
6793 - def create_couplings_main(self, nb_def_by_file=25):
6794 """ create couplings.f """ 6795 6796 fsock = self.open('couplings.f', format='fortran') 6797 6798 fsock.writelines("""subroutine coup() 6799 6800 implicit none 6801 double precision PI, ZERO 6802 logical READLHA 6803 parameter (PI=3.141592653589793d0) 6804 parameter (ZERO=0d0) 6805 include \'model_functions.inc\'""") 6806 if self.opt['mp']: 6807 fsock.writelines("""%s MP__PI, MP__ZERO 6808 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6809 parameter (MP__ZERO=0e0_16) 6810 include \'mp_input.inc\' 6811 include \'mp_coupl.inc\' 6812 """%self.mp_real_format) 6813 fsock.writelines("""include \'input.inc\' 6814 include \'coupl.inc\' 6815 READLHA = .true. 6816 include \'intparam_definition.inc\'""") 6817 if self.opt['mp']: 6818 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6819 6820 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6821 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6822 6823 fsock.writelines('\n'.join(\ 6824 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6825 6826 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6827 6828 fsock.writelines('\n'.join(\ 6829 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6830 for i in range(nb_coup_dep)])) 6831 if self.opt['mp']: 6832 fsock.writelines('\n'.join(\ 6833 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6834 for i in range(nb_coup_dep)])) 6835 fsock.writelines('''\n return \n end\n''') 6836 6837 fsock.writelines("""subroutine update_as_param() 6838 6839 implicit none 6840 double precision PI, ZERO 6841 logical READLHA 6842 parameter (PI=3.141592653589793d0) 6843 parameter (ZERO=0d0) 6844 include \'model_functions.inc\'""") 6845 fsock.writelines("""include \'input.inc\' 6846 include \'coupl.inc\' 6847 READLHA = .false.""") 6848 fsock.writelines(""" 6849 include \'intparam_definition.inc\'\n 6850 """) 6851 6852 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6853 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6854 6855 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6856 6857 fsock.writelines('\n'.join(\ 6858 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6859 for i in range(nb_coup_dep)])) 6860 fsock.writelines('''\n return \n end\n''') 6861 6862 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6863 6864 implicit none 6865 double precision PI 6866 parameter (PI=3.141592653589793d0) 6867 double precision mu_r2, as2 6868 include \'model_functions.inc\'""") 6869 fsock.writelines("""include \'input.inc\' 6870 include \'coupl.inc\'""") 6871 fsock.writelines(""" 6872 if (mu_r2.gt.0d0) MU_R = mu_r2 6873 G = SQRT(4.0d0*PI*AS2) 6874 AS = as2 6875 6876 CALL UPDATE_AS_PARAM() 6877 """) 6878 fsock.writelines('''\n return \n end\n''') 6879 6880 if self.opt['mp']: 6881 fsock.writelines("""subroutine mp_update_as_param() 6882 6883 implicit none 6884 logical READLHA 6885 include \'model_functions.inc\'""") 6886 fsock.writelines("""%s MP__PI, MP__ZERO 6887 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6888 parameter (MP__ZERO=0e0_16) 6889 include \'mp_input.inc\' 6890 include \'mp_coupl.inc\' 6891 """%self.mp_real_format) 6892 fsock.writelines("""include \'input.inc\' 6893 include \'coupl.inc\' 6894 include \'actualize_mp_ext_params.inc\' 6895 READLHA = .false. 6896 include \'mp_intparam_definition.inc\'\n 6897 """) 6898 6899 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6900 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6901 6902 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6903 6904 fsock.writelines('\n'.join(\ 6905 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6906 for i in range(nb_coup_dep)])) 6907 fsock.writelines('''\n return \n end\n''')
6908
6909 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6910 """ create couplings[nb_file].f containing information coming from data. 6911 Outputs the computation of the double precision and/or the multiple 6912 precision couplings depending on the parameters dp and mp. 6913 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6914 filename and subroutine name. 6915 """ 6916 6917 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6918 nb_file), format='fortran') 6919 fsock.writelines("""subroutine %scoup%s() 6920 6921 implicit none 6922 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6923 if dp: 6924 fsock.writelines(""" 6925 double precision PI, ZERO 6926 parameter (PI=3.141592653589793d0) 6927 parameter (ZERO=0d0) 6928 include 'input.inc' 6929 include 'coupl.inc'""") 6930 if mp: 6931 fsock.writelines("""%s MP__PI, MP__ZERO 6932 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6933 parameter (MP__ZERO=0e0_16) 6934 include \'mp_input.inc\' 6935 include \'mp_coupl.inc\' 6936 """%self.mp_real_format) 6937 6938 for coupling in data: 6939 if dp: 6940 fsock.writelines('%s = %s' % (coupling.name, 6941 self.p_to_f.parse(coupling.expr))) 6942 if mp: 6943 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6944 self.mp_p_to_f.parse(coupling.expr))) 6945 fsock.writelines('end')
6946
6947 - def create_model_functions_inc(self):
6948 """ Create model_functions.inc which contains the various declarations 6949 of auxiliary functions which might be used in the couplings expressions 6950 """ 6951 6952 additional_fct = [] 6953 # check for functions define in the UFO model 6954 ufo_fct = self.model.get('functions') 6955 if ufo_fct: 6956 for fct in ufo_fct: 6957 # already handle by default 6958 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6959 "csc", "asec", "acsc", "theta_function", "cond", 6960 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6961 "grreglog","regsqrt"]: 6962 additional_fct.append(fct.name) 6963 6964 fsock = self.open('model_functions.inc', format='fortran') 6965 fsock.writelines("""double complex cond 6966 double complex condif 6967 double complex reglog 6968 double complex reglogp 6969 double complex reglogm 6970 double complex recms 6971 double complex arg 6972 double complex grreglog 6973 double complex regsqrt 6974 %s 6975 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6976 6977 6978 if self.opt['mp']: 6979 fsock.writelines("""%(complex_mp_format)s mp_cond 6980 %(complex_mp_format)s mp_condif 6981 %(complex_mp_format)s mp_reglog 6982 %(complex_mp_format)s mp_reglogp 6983 %(complex_mp_format)s mp_reglogm 6984 %(complex_mp_format)s mp_recms 6985 %(complex_mp_format)s mp_arg 6986 %(complex_mp_format)s mp_grreglog 6987 %(complex_mp_format)s mp_regsqrt 6988 %(additional)s 6989 """ %\ 6990 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6991 'complex_mp_format':self.mp_complex_format 6992 })
6993
6994 - def create_model_functions_def(self):
6995 """ Create model_functions.f which contains the various definitions 6996 of auxiliary functions which might be used in the couplings expressions 6997 Add the functions.f functions for formfactors support 6998 """ 6999 7000 fsock = self.open('model_functions.f', format='fortran') 7001 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 7002 implicit none 7003 double complex condition,truecase,falsecase 7004 if(condition.eq.(0.0d0,0.0d0)) then 7005 cond=truecase 7006 else 7007 cond=falsecase 7008 endif 7009 end 7010 7011 double complex function condif(condition,truecase,falsecase) 7012 implicit none 7013 logical condition 7014 double complex truecase,falsecase 7015 if(condition) then 7016 condif=truecase 7017 else 7018 condif=falsecase 7019 endif 7020 end 7021 7022 double complex function recms(condition,expr) 7023 implicit none 7024 logical condition 7025 double complex expr 7026 if(condition)then 7027 recms=expr 7028 else 7029 recms=dcmplx(dble(expr)) 7030 endif 7031 end 7032 7033 double complex function reglog(arg) 7034 implicit none 7035 double complex TWOPII 7036 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7037 double complex arg 7038 if(arg.eq.(0.0d0,0.0d0)) then 7039 reglog=(0.0d0,0.0d0) 7040 else 7041 reglog=log(arg) 7042 endif 7043 end 7044 7045 double complex function reglogp(arg) 7046 implicit none 7047 double complex TWOPII 7048 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7049 double complex arg 7050 if(arg.eq.(0.0d0,0.0d0))then 7051 reglogp=(0.0d0,0.0d0) 7052 else 7053 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 7054 reglogp=log(arg) + TWOPII 7055 else 7056 reglogp=log(arg) 7057 endif 7058 endif 7059 end 7060 7061 double complex function reglogm(arg) 7062 implicit none 7063 double complex TWOPII 7064 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7065 double complex arg 7066 if(arg.eq.(0.0d0,0.0d0))then 7067 reglogm=(0.0d0,0.0d0) 7068 else 7069 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 7070 reglogm=log(arg) - TWOPII 7071 else 7072 reglogm=log(arg) 7073 endif 7074 endif 7075 end 7076 7077 double complex function regsqrt(arg_in) 7078 implicit none 7079 double complex arg_in 7080 double complex arg 7081 arg=arg_in 7082 if(dabs(dimag(arg)).eq.0.0d0)then 7083 arg=dcmplx(dble(arg),0.0d0) 7084 endif 7085 if(dabs(dble(arg)).eq.0.0d0)then 7086 arg=dcmplx(0.0d0,dimag(arg)) 7087 endif 7088 regsqrt=sqrt(arg) 7089 end 7090 7091 double complex function grreglog(logsw,expr1_in,expr2_in) 7092 implicit none 7093 double complex TWOPII 7094 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7095 double complex expr1_in,expr2_in 7096 double complex expr1,expr2 7097 double precision logsw 7098 double precision imagexpr 7099 logical firstsheet 7100 expr1=expr1_in 7101 expr2=expr2_in 7102 if(dabs(dimag(expr1)).eq.0.0d0)then 7103 expr1=dcmplx(dble(expr1),0.0d0) 7104 endif 7105 if(dabs(dble(expr1)).eq.0.0d0)then 7106 expr1=dcmplx(0.0d0,dimag(expr1)) 7107 endif 7108 if(dabs(dimag(expr2)).eq.0.0d0)then 7109 expr2=dcmplx(dble(expr2),0.0d0) 7110 endif 7111 if(dabs(dble(expr2)).eq.0.0d0)then 7112 expr2=dcmplx(0.0d0,dimag(expr2)) 7113 endif 7114 if(expr1.eq.(0.0d0,0.0d0))then 7115 grreglog=(0.0d0,0.0d0) 7116 else 7117 imagexpr=dimag(expr1)*dimag(expr2) 7118 firstsheet=imagexpr.ge.0.0d0 7119 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 7120 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 7121 if(firstsheet)then 7122 grreglog=log(expr1) 7123 else 7124 if(dimag(expr1).gt.0.0d0)then 7125 grreglog=log(expr1) - logsw*TWOPII 7126 else 7127 grreglog=log(expr1) + logsw*TWOPII 7128 endif 7129 endif 7130 endif 7131 end 7132 7133 double complex function arg(comnum) 7134 implicit none 7135 double complex comnum 7136 double complex iim 7137 iim = (0.0d0,1.0d0) 7138 if(comnum.eq.(0.0d0,0.0d0)) then 7139 arg=(0.0d0,0.0d0) 7140 else 7141 arg=log(comnum/abs(comnum))/iim 7142 endif 7143 end""") 7144 if self.opt['mp']: 7145 fsock.writelines(""" 7146 7147 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 7148 implicit none 7149 %(complex_mp_format)s condition,truecase,falsecase 7150 if(condition.eq.(0.0e0_16,0.0e0_16)) then 7151 mp_cond=truecase 7152 else 7153 mp_cond=falsecase 7154 endif 7155 end 7156 7157 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 7158 implicit none 7159 logical condition 7160 %(complex_mp_format)s truecase,falsecase 7161 if(condition) then 7162 mp_condif=truecase 7163 else 7164 mp_condif=falsecase 7165 endif 7166 end 7167 7168 %(complex_mp_format)s function mp_recms(condition,expr) 7169 implicit none 7170 logical condition 7171 %(complex_mp_format)s expr 7172 if(condition)then 7173 mp_recms=expr 7174 else 7175 mp_recms=cmplx(real(expr),kind=16) 7176 endif 7177 end 7178 7179 %(complex_mp_format)s function mp_reglog(arg) 7180 implicit none 7181 %(complex_mp_format)s TWOPII 7182 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7183 %(complex_mp_format)s arg 7184 if(arg.eq.(0.0e0_16,0.0e0_16)) then 7185 mp_reglog=(0.0e0_16,0.0e0_16) 7186 else 7187 mp_reglog=log(arg) 7188 endif 7189 end 7190 7191 %(complex_mp_format)s function mp_reglogp(arg) 7192 implicit none 7193 %(complex_mp_format)s TWOPII 7194 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7195 %(complex_mp_format)s arg 7196 if(arg.eq.(0.0e0_16,0.0e0_16))then 7197 mp_reglogp=(0.0e0_16,0.0e0_16) 7198 else 7199 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 7200 mp_reglogp=log(arg) + TWOPII 7201 else 7202 mp_reglogp=log(arg) 7203 endif 7204 endif 7205 end 7206 7207 %(complex_mp_format)s function mp_reglogm(arg) 7208 implicit none 7209 %(complex_mp_format)s TWOPII 7210 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7211 %(complex_mp_format)s arg 7212 if(arg.eq.(0.0e0_16,0.0e0_16))then 7213 mp_reglogm=(0.0e0_16,0.0e0_16) 7214 else 7215 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 7216 mp_reglogm=log(arg) - TWOPII 7217 else 7218 mp_reglogm=log(arg) 7219 endif 7220 endif 7221 end 7222 7223 %(complex_mp_format)s function mp_regsqrt(arg_in) 7224 implicit none 7225 %(complex_mp_format)s arg_in 7226 %(complex_mp_format)s arg 7227 arg=arg_in 7228 if(abs(imagpart(arg)).eq.0.0e0_16)then 7229 arg=cmplx(real(arg,kind=16),0.0e0_16) 7230 endif 7231 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 7232 arg=cmplx(0.0e0_16,imagpart(arg)) 7233 endif 7234 mp_regsqrt=sqrt(arg) 7235 end 7236 7237 7238 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 7239 implicit none 7240 %(complex_mp_format)s TWOPII 7241 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7242 %(complex_mp_format)s expr1_in,expr2_in 7243 %(complex_mp_format)s expr1,expr2 7244 %(real_mp_format)s logsw 7245 %(real_mp_format)s imagexpr 7246 logical firstsheet 7247 expr1=expr1_in 7248 expr2=expr2_in 7249 if(abs(imagpart(expr1)).eq.0.0e0_16)then 7250 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 7251 endif 7252 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 7253 expr1=cmplx(0.0e0_16,imagpart(expr1)) 7254 endif 7255 if(abs(imagpart(expr2)).eq.0.0e0_16)then 7256 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 7257 endif 7258 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 7259 expr2=cmplx(0.0e0_16,imagpart(expr2)) 7260 endif 7261 if(expr1.eq.(0.0e0_16,0.0e0_16))then 7262 mp_grreglog=(0.0e0_16,0.0e0_16) 7263 else 7264 imagexpr=imagpart(expr1)*imagpart(expr2) 7265 firstsheet=imagexpr.ge.0.0e0_16 7266 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 7267 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 7268 if(firstsheet)then 7269 mp_grreglog=log(expr1) 7270 else 7271 if(imagpart(expr1).gt.0.0e0_16)then 7272 mp_grreglog=log(expr1) - logsw*TWOPII 7273 else 7274 mp_grreglog=log(expr1) + logsw*TWOPII 7275 endif 7276 endif 7277 endif 7278 end 7279 7280 %(complex_mp_format)s function mp_arg(comnum) 7281 implicit none 7282 %(complex_mp_format)s comnum 7283 %(complex_mp_format)s imm 7284 imm = (0.0e0_16,1.0e0_16) 7285 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 7286 mp_arg=(0.0e0_16,0.0e0_16) 7287 else 7288 mp_arg=log(comnum/abs(comnum))/imm 7289 endif 7290 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 7291 7292 7293 #check for the file functions.f 7294 model_path = self.model.get('modelpath') 7295 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 7296 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 7297 input = pjoin(model_path,'Fortran','functions.f') 7298 fsock.writelines(open(input).read()) 7299 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 7300 7301 # check for functions define in the UFO model 7302 ufo_fct = self.model.get('functions') 7303 if ufo_fct: 7304 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 7305 done = [] 7306 for fct in ufo_fct: 7307 # already handle by default 7308 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 7309 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 7310 "grreglog","regsqrt"] + done: 7311 done.append(str(fct.name.lower())) 7312 ufo_fct_template = """ 7313 double complex function %(name)s(%(args)s) 7314 implicit none 7315 double complex %(args)s 7316 %(definitions)s 7317 %(name)s = %(fct)s 7318 7319 return 7320 end 7321 """ 7322 str_fct = self.p_to_f.parse(fct.expr) 7323 if not self.p_to_f.to_define: 7324 definitions = [] 7325 else: 7326 definitions=[] 7327 for d in self.p_to_f.to_define: 7328 if d == 'pi': 7329 definitions.append(' double precision pi') 7330 definitions.append(' data pi /3.1415926535897932d0/') 7331 else: 7332 definitions.append(' double complex %s' % d) 7333 7334 text = ufo_fct_template % { 7335 'name': fct.name, 7336 'args': ", ".join(fct.arguments), 7337 'fct': str_fct, 7338 'definitions': '\n'.join(definitions) 7339 } 7340 7341 fsock.writelines(text) 7342 if self.opt['mp']: 7343 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 7344 for fct in ufo_fct: 7345 # already handle by default 7346 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 7347 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 7348 "grreglog","regsqrt"]: 7349 ufo_fct_template = """ 7350 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 7351 implicit none 7352 %(complex_mp_format)s mp__%(args)s 7353 %(definitions)s 7354 mp_%(name)s = %(fct)s 7355 7356 return 7357 end 7358 """ 7359 str_fct = self.mp_p_to_f.parse(fct.expr) 7360 if not self.mp_p_to_f.to_define: 7361 definitions = [] 7362 else: 7363 definitions=[] 7364 for d in self.mp_p_to_f.to_define: 7365 if d == 'pi': 7366 definitions.append(' %s mp__pi' % self.mp_real_format) 7367 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 7368 else: 7369 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 7370 text = ufo_fct_template % { 7371 'name': fct.name, 7372 'args': ", mp__".join(fct.arguments), 7373 'fct': str_fct, 7374 'definitions': '\n'.join(definitions), 7375 'complex_mp_format': self.mp_complex_format 7376 } 7377 fsock.writelines(text) 7378 7379 7380 7381 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
7382 7383 7384
7385 - def create_makeinc(self):
7386 """create makeinc.inc containing the file to compile """ 7387 7388 fsock = self.open('makeinc.inc', comment='#') 7389 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 7390 text += ' model_functions.o ' 7391 7392 nb_coup_indep = 1 + len(self.coups_dep) // 25 7393 nb_coup_dep = 1 + len(self.coups_indep) // 25 7394 couplings_files=['couplings%s.o' % (i+1) \ 7395 for i in range(nb_coup_dep + nb_coup_indep) ] 7396 if self.opt['mp']: 7397 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 7398 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 7399 text += ' '.join(couplings_files) 7400 fsock.writelines(text)
7401
7402 - def create_param_write(self):
7403 """ create param_write """ 7404 7405 fsock = self.open('param_write.inc', format='fortran') 7406 7407 fsock.writelines("""write(*,*) ' External Params' 7408 write(*,*) ' ---------------------------------' 7409 write(*,*) ' '""") 7410 def format(name): 7411 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
7412 7413 # Write the external parameter 7414 lines = [format(param.name) for param in self.params_ext] 7415 fsock.writelines('\n'.join(lines)) 7416 7417 fsock.writelines("""write(*,*) ' Internal Params' 7418 write(*,*) ' ---------------------------------' 7419 write(*,*) ' '""") 7420 lines = [format(data.name) for data in self.params_indep 7421 if data.name != 'ZERO' and self.check_needed_param(data.name)] 7422 fsock.writelines('\n'.join(lines)) 7423 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 7424 write(*,*) ' ----------------------------------------' 7425 write(*,*) ' '""") 7426 lines = [format(data.name) for data in self.params_dep \ 7427 if self.check_needed_param(data.name)] 7428 7429 fsock.writelines('\n'.join(lines)) 7430 7431 7432
7433 - def create_ident_card(self):
7434 """ create the ident_card.dat """ 7435 7436 def format(parameter): 7437 """return the line for the ident_card corresponding to this parameter""" 7438 colum = [parameter.lhablock.lower()] + \ 7439 [str(value) for value in parameter.lhacode] + \ 7440 [parameter.name] 7441 if not parameter.name: 7442 return '' 7443 return ' '.join(colum)+'\n'
7444 7445 fsock = self.open('ident_card.dat') 7446 7447 external_param = [format(param) for param in self.params_ext] 7448 fsock.writelines('\n'.join(external_param)) 7449
7450 - def create_actualize_mp_ext_param_inc(self):
7451 """ create the actualize_mp_ext_params.inc code """ 7452 7453 # In principle one should actualize all external, but for now, it is 7454 # hardcoded that only AS and MU_R can by dynamically changed by the user 7455 # so that we only update those ones. 7456 # Of course, to be on the safe side, one could decide to update all 7457 # external parameters. 7458 update_params_list=[p for p in self.params_ext if p.name in 7459 self.PS_dependent_key] 7460 7461 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 7462 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 7463 for param in update_params_list] 7464 # When read_lha is false, it is G which is taken in input and not AS, so 7465 # this is what should be reset here too. 7466 if 'aS' in [param.name for param in update_params_list]: 7467 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 7468 7469 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 7470 fsock.writelines('\n'.join(res_strings))
7471
7472 - def create_param_read(self):
7473 """create param_read""" 7474 7475 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 7476 or self.opt['loop_induced']: 7477 fsock = self.open('param_read.inc', format='fortran') 7478 fsock.writelines(' include \'../param_card.inc\'') 7479 return 7480 7481 def format_line(parameter): 7482 """return the line for the ident_card corresponding to this 7483 parameter""" 7484 template = \ 7485 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 7486 % {'name': parameter.name, 7487 'value': self.p_to_f.parse(str(parameter.value.real))} 7488 if self.opt['mp']: 7489 template = template+ \ 7490 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 7491 "%(mp_prefix)s%(name)s,%(value)s)") \ 7492 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 7493 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 7494 7495 if parameter.lhablock.lower() == 'loop': 7496 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 7497 7498 return template 7499 7500 fsock = self.open('param_read.inc', format='fortran') 7501 res_strings = [format_line(param) \ 7502 for param in self.params_ext] 7503 7504 # Correct width sign for Majorana particles (where the width 7505 # and mass need to have the same sign) 7506 for particle in self.model.get('particles'): 7507 if particle.is_fermion() and particle.get('self_antipart') and \ 7508 particle.get('width').lower() != 'zero': 7509 7510 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 7511 {'width': particle.get('width'), 'mass': particle.get('mass')}) 7512 if self.opt['mp']: 7513 res_strings.append(\ 7514 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 7515 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 7516 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 7517 7518 fsock.writelines('\n'.join(res_strings)) 7519 7520 7521 @staticmethod
7522 - def create_param_card_static(model, output_path, rule_card_path=False, 7523 mssm_convert=True, write_special=True):
7524 """ create the param_card.dat for a givent model --static method-- """ 7525 #1. Check if a default param_card is present: 7526 done = False 7527 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 7528 restrict_name = os.path.basename(model.restrict_card)[9:-4] 7529 model_path = model.get('modelpath') 7530 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 7531 done = True 7532 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 7533 output_path) 7534 if not done: 7535 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 7536 7537 if rule_card_path: 7538 if hasattr(model, 'rule_card'): 7539 model.rule_card.write_file(rule_card_path) 7540 7541 if mssm_convert: 7542 model_name = model.get('name') 7543 # IF MSSM convert the card to SLAH1 7544 if model_name == 'mssm' or model_name.startswith('mssm-'): 7545 import models.check_param_card as translator 7546 # Check the format of the param_card for Pythia and make it correct 7547 if rule_card_path: 7548 translator.make_valid_param_card(output_path, rule_card_path) 7549 translator.convert_to_slha1(output_path)
7550
7551 - def create_param_card(self, write_special=True):
7552 """ create the param_card.dat """ 7553 7554 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 7555 if not hasattr(self.model, 'rule_card'): 7556 rule_card=False 7557 write_special = True 7558 if 'exporter' in self.opt: 7559 import madgraph.loop.loop_exporters as loop_exporters 7560 import madgraph.iolibs.export_fks as export_fks 7561 write_special = False 7562 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 7563 write_special = True 7564 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 7565 write_special = False 7566 7567 self.create_param_card_static(self.model, 7568 output_path=pjoin(self.dir_path, 'param_card.dat'), 7569 rule_card_path=rule_card, 7570 mssm_convert=True, 7571 write_special=write_special)
7572
7573 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
7574 """ Determine which Export_v4 class is required. cmd is the command 7575 interface containing all potential usefull information. 7576 The output_type argument specifies from which context the output 7577 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 7578 and 'default' for tree-level outputs.""" 7579 7580 opt = dict(cmd.options) 7581 opt['output_options'] = cmd_options 7582 7583 # ========================================================================== 7584 # First check whether Ninja must be installed. 7585 # Ninja would only be required if: 7586 # a) Loop optimized output is selected 7587 # b) the process gathered from the amplitude generated use loops 7588 7589 if len(cmd._curr_amps)>0: 7590 try: 7591 curr_proc = cmd._curr_amps[0].get('process') 7592 except base_objects.PhysicsObject.PhysicsObjectError: 7593 curr_proc = None 7594 elif hasattr(cmd,'_fks_multi_proc') and \ 7595 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7596 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7597 else: 7598 curr_proc = None 7599 7600 requires_reduction_tool = opt['loop_optimized_output'] and \ 7601 (not curr_proc is None) and \ 7602 (curr_proc.get('perturbation_couplings') != [] and \ 7603 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7604 7605 # An installation is required then, but only if the specified path is the 7606 # default local one and that the Ninja library appears missing. 7607 if requires_reduction_tool: 7608 cmd.install_reduction_library() 7609 7610 # ========================================================================== 7611 # First treat the MadLoop5 standalone case 7612 MadLoop_SA_options = {'clean': not noclean, 7613 'complex_mass':cmd.options['complex_mass_scheme'], 7614 'export_format':'madloop', 7615 'mp':True, 7616 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7617 'cuttools_dir': cmd._cuttools_dir, 7618 'iregi_dir':cmd._iregi_dir, 7619 'golem_dir':cmd.options['golem'], 7620 'samurai_dir':cmd.options['samurai'], 7621 'ninja_dir':cmd.options['ninja'], 7622 'collier_dir':cmd.options['collier'], 7623 'fortran_compiler':cmd.options['fortran_compiler'], 7624 'f2py_compiler':cmd.options['f2py_compiler'], 7625 'output_dependencies':cmd.options['output_dependencies'], 7626 'SubProc_prefix':'P', 7627 'compute_color_flows':cmd.options['loop_color_flows'], 7628 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7629 'cluster_local_path': cmd.options['cluster_local_path'], 7630 'output_options': cmd_options 7631 } 7632 7633 if output_type.startswith('madloop'): 7634 import madgraph.loop.loop_exporters as loop_exporters 7635 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7636 ExporterClass=None 7637 if not cmd.options['loop_optimized_output']: 7638 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7639 else: 7640 if output_type == "madloop": 7641 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7642 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7643 elif output_type == "madloop_matchbox": 7644 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7645 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7646 else: 7647 raise Exception("output_type not recognize %s" % output_type) 7648 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7649 else: 7650 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7651 ' in %s'%str(cmd._mgme_dir)) 7652 7653 # Then treat the aMC@NLO output 7654 elif output_type=='amcatnlo': 7655 import madgraph.iolibs.export_fks as export_fks 7656 ExporterClass=None 7657 amcatnlo_options = dict(opt) 7658 amcatnlo_options.update(MadLoop_SA_options) 7659 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7660 if not cmd.options['loop_optimized_output']: 7661 logger.info("Writing out the aMC@NLO code") 7662 ExporterClass = export_fks.ProcessExporterFortranFKS 7663 amcatnlo_options['export_format']='FKS5_default' 7664 else: 7665 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7666 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7667 amcatnlo_options['export_format']='FKS5_optimized' 7668 return ExporterClass(cmd._export_dir, amcatnlo_options) 7669 7670 7671 # Then the default tree-level output 7672 elif output_type=='default': 7673 assert group_subprocesses in [True, False] 7674 7675 opt = dict(opt) 7676 opt.update({'clean': not noclean, 7677 'complex_mass': cmd.options['complex_mass_scheme'], 7678 'export_format':cmd._export_format, 7679 'mp': False, 7680 'sa_symmetry':False, 7681 'model': cmd._curr_model.get('name'), 7682 'v5_model': False if cmd._model_v4_path else True }) 7683 7684 format = cmd._export_format #shortcut 7685 7686 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7687 opt['sa_symmetry'] = True 7688 elif format == 'plugin': 7689 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7690 7691 loop_induced_opt = dict(opt) 7692 loop_induced_opt.update(MadLoop_SA_options) 7693 loop_induced_opt['export_format'] = 'madloop_optimized' 7694 loop_induced_opt['SubProc_prefix'] = 'PV' 7695 # For loop_induced output with MadEvent, we must have access to the 7696 # color flows. 7697 loop_induced_opt['compute_color_flows'] = True 7698 for key in opt: 7699 if key not in loop_induced_opt: 7700 loop_induced_opt[key] = opt[key] 7701 7702 # Madevent output supports MadAnalysis5 7703 if format in ['madevent']: 7704 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7705 7706 if format == 'matrix' or format.startswith('standalone'): 7707 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7708 7709 elif format in ['madevent'] and group_subprocesses: 7710 if isinstance(cmd._curr_amps[0], 7711 loop_diagram_generation.LoopAmplitude): 7712 import madgraph.loop.loop_exporters as loop_exporters 7713 return loop_exporters.LoopInducedExporterMEGroup( 7714 cmd._export_dir,loop_induced_opt) 7715 else: 7716 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7717 elif format in ['madevent']: 7718 if isinstance(cmd._curr_amps[0], 7719 loop_diagram_generation.LoopAmplitude): 7720 import madgraph.loop.loop_exporters as loop_exporters 7721 return loop_exporters.LoopInducedExporterMENoGroup( 7722 cmd._export_dir,loop_induced_opt) 7723 else: 7724 return ProcessExporterFortranME(cmd._export_dir,opt) 7725 elif format in ['matchbox']: 7726 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7727 elif cmd._export_format in ['madweight'] and group_subprocesses: 7728 7729 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7730 elif cmd._export_format in ['madweight']: 7731 return ProcessExporterFortranMW(cmd._export_dir, opt) 7732 elif format == 'plugin': 7733 if isinstance(cmd._curr_amps[0], 7734 loop_diagram_generation.LoopAmplitude): 7735 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7736 else: 7737 return cmd._export_plugin(cmd._export_dir, opt) 7738 7739 else: 7740 raise Exception('Wrong export_v4 format') 7741 else: 7742 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7743
7744 7745 7746 7747 #=============================================================================== 7748 # ProcessExporterFortranMWGroup 7749 #=============================================================================== 7750 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7751 """Class to take care of exporting a set of matrix elements to 7752 MadEvent subprocess group format.""" 7753 7754 matrix_file = "matrix_madweight_group_v4.inc" 7755 grouped_mode = 'madweight' 7756 #=========================================================================== 7757 # generate_subprocess_directory 7758 #===========================================================================
7759 - def generate_subprocess_directory(self, subproc_group, 7760 fortran_model, 7761 group_number):
7762 """Generate the Pn directory for a subprocess group in MadEvent, 7763 including the necessary matrix_N.f files, configs.inc and various 7764 other helper files.""" 7765 7766 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7767 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7768 7769 if not self.model: 7770 self.model = subproc_group.get('matrix_elements')[0].\ 7771 get('processes')[0].get('model') 7772 7773 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7774 7775 # Create the directory PN in the specified path 7776 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7777 subproc_group.get('name')) 7778 try: 7779 os.mkdir(pjoin(pathdir, subprocdir)) 7780 except os.error as error: 7781 logger.warning(error.strerror + " " + subprocdir) 7782 7783 7784 logger.info('Creating files in directory %s' % subprocdir) 7785 Ppath = pjoin(pathdir, subprocdir) 7786 7787 # Create the matrix.f files, auto_dsig.f files and all inc files 7788 # for all subprocesses in the group 7789 7790 maxamps = 0 7791 maxflows = 0 7792 tot_calls = 0 7793 7794 matrix_elements = subproc_group.get('matrix_elements') 7795 7796 for ime, matrix_element in \ 7797 enumerate(matrix_elements): 7798 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7799 calls, ncolor = \ 7800 self.write_matrix_element_v4(writers.FortranWriter(filename), 7801 matrix_element, 7802 fortran_model, 7803 str(ime+1), 7804 subproc_group.get('diagram_maps')[\ 7805 ime]) 7806 7807 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7808 self.write_auto_dsig_file(writers.FortranWriter(filename), 7809 matrix_element, 7810 str(ime+1)) 7811 7812 # Keep track of needed quantities 7813 tot_calls += int(calls) 7814 maxflows = max(maxflows, ncolor) 7815 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7816 7817 # Draw diagrams 7818 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7819 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7820 get('diagrams'), 7821 filename, 7822 model = \ 7823 matrix_element.get('processes')[0].\ 7824 get('model'), 7825 amplitude=True) 7826 logger.info("Generating Feynman diagrams for " + \ 7827 matrix_element.get('processes')[0].nice_string()) 7828 plot.draw() 7829 7830 # Extract number of external particles 7831 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7832 7833 # Generate a list of diagrams corresponding to each configuration 7834 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7835 # If a subprocess has no diagrams for this config, the number is 0 7836 7837 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7838 7839 filename = pjoin(Ppath, 'auto_dsig.f') 7840 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7841 subproc_group) 7842 7843 filename = pjoin(Ppath,'configs.inc') 7844 nconfigs, s_and_t_channels = self.write_configs_file(\ 7845 writers.FortranWriter(filename), 7846 subproc_group, 7847 subproc_diagrams_for_config) 7848 7849 filename = pjoin(Ppath, 'leshouche.inc') 7850 self.write_leshouche_file(writers.FortranWriter(filename), 7851 subproc_group) 7852 7853 filename = pjoin(Ppath, 'phasespace.inc') 7854 self.write_phasespace_file(writers.FortranWriter(filename), 7855 nconfigs) 7856 7857 7858 filename = pjoin(Ppath, 'maxamps.inc') 7859 self.write_maxamps_file(writers.FortranWriter(filename), 7860 maxamps, 7861 maxflows, 7862 max([len(me.get('processes')) for me in \ 7863 matrix_elements]), 7864 len(matrix_elements)) 7865 7866 filename = pjoin(Ppath, 'mirrorprocs.inc') 7867 self.write_mirrorprocs(writers.FortranWriter(filename), 7868 subproc_group) 7869 7870 filename = pjoin(Ppath, 'nexternal.inc') 7871 self.write_nexternal_file(writers.FortranWriter(filename), 7872 nexternal, ninitial) 7873 7874 filename = pjoin(Ppath, 'pmass.inc') 7875 self.write_pmass_file(writers.FortranWriter(filename), 7876 matrix_element) 7877 7878 filename = pjoin(Ppath, 'props.inc') 7879 self.write_props_file(writers.FortranWriter(filename), 7880 matrix_element, 7881 s_and_t_channels) 7882 7883 # filename = pjoin(Ppath, 'processes.dat') 7884 # files.write_to_file(filename, 7885 # self.write_processes_file, 7886 # subproc_group) 7887 7888 # Generate jpgs -> pass in make_html 7889 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7890 7891 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7892 7893 for file in linkfiles: 7894 ln('../%s' % file, cwd=Ppath) 7895 7896 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7897 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7898 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7899 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7900 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7901 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7902 if not tot_calls: 7903 tot_calls = 0 7904 return tot_calls
7905 7906 7907 #=========================================================================== 7908 # Helper functions 7909 #===========================================================================
7910 - def modify_grouping(self, matrix_element):
7911 """allow to modify the grouping (if grouping is in place) 7912 return two value: 7913 - True/False if the matrix_element was modified 7914 - the new(or old) matrix element""" 7915 7916 return True, matrix_element.split_lepton_grouping()
7917 7918 #=========================================================================== 7919 # write_super_auto_dsig_file 7920 #===========================================================================
7921 - def write_super_auto_dsig_file(self, writer, subproc_group):
7922 """Write the auto_dsig.f file selecting between the subprocesses 7923 in subprocess group mode""" 7924 7925 replace_dict = {} 7926 7927 # Extract version number and date from VERSION file 7928 info_lines = self.get_mg5_info_lines() 7929 replace_dict['info_lines'] = info_lines 7930 7931 matrix_elements = subproc_group.get('matrix_elements') 7932 7933 # Extract process info lines 7934 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7935 matrix_elements]) 7936 replace_dict['process_lines'] = process_lines 7937 7938 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7939 replace_dict['nexternal'] = nexternal 7940 7941 replace_dict['nsprocs'] = 2*len(matrix_elements) 7942 7943 # Generate dsig definition line 7944 dsig_def_line = "DOUBLE PRECISION " + \ 7945 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7946 range(len(matrix_elements))]) 7947 replace_dict["dsig_def_line"] = dsig_def_line 7948 7949 # Generate dsig process lines 7950 call_dsig_proc_lines = [] 7951 for iproc in range(len(matrix_elements)): 7952 call_dsig_proc_lines.append(\ 7953 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7954 {"num": iproc + 1, 7955 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7956 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7957 7958 if writer: 7959 file = open(os.path.join(_file_path, \ 7960 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7961 file = file % replace_dict 7962 # Write the file 7963 writer.writelines(file) 7964 else: 7965 return replace_dict
7966 7967 #=========================================================================== 7968 # write_mirrorprocs 7969 #===========================================================================
7970 - def write_mirrorprocs(self, writer, subproc_group):
7971 """Write the mirrorprocs.inc file determining which processes have 7972 IS mirror process in subprocess group mode.""" 7973 7974 lines = [] 7975 bool_dict = {True: '.true.', False: '.false.'} 7976 matrix_elements = subproc_group.get('matrix_elements') 7977 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7978 (len(matrix_elements), 7979 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7980 me in matrix_elements]))) 7981 # Write the file 7982 writer.writelines(lines)
7983 7984 #=========================================================================== 7985 # write_configs_file 7986 #===========================================================================
7987 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7988 """Write the configs.inc file with topology information for a 7989 subprocess group. Use the first subprocess with a diagram for each 7990 configuration.""" 7991 7992 matrix_elements = subproc_group.get('matrix_elements') 7993 model = matrix_elements[0].get('processes')[0].get('model') 7994 7995 diagrams = [] 7996 config_numbers = [] 7997 for iconfig, config in enumerate(diagrams_for_config): 7998 # Check if any diagrams correspond to this config 7999 if set(config) == set([0]): 8000 continue 8001 subproc_diags = [] 8002 for s,d in enumerate(config): 8003 if d: 8004 subproc_diags.append(matrix_elements[s].\ 8005 get('diagrams')[d-1]) 8006 else: 8007 subproc_diags.append(None) 8008 diagrams.append(subproc_diags) 8009 config_numbers.append(iconfig + 1) 8010 8011 # Extract number of external particles 8012 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 8013 8014 return len(diagrams), \ 8015 self.write_configs_file_from_diagrams(writer, diagrams, 8016 config_numbers, 8017 nexternal, ninitial, 8018 matrix_elements[0],model)
8019 8020 #=========================================================================== 8021 # write_run_configs_file 8022 #===========================================================================
8023 - def write_run_config_file(self, writer):
8024 """Write the run_configs.inc file for MadEvent""" 8025 8026 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 8027 text = open(path).read() % {'chanperjob':'2'} 8028 writer.write(text) 8029 return True
8030 8031 8032 #=========================================================================== 8033 # write_leshouche_file 8034 #===========================================================================
8035 - def write_leshouche_file(self, writer, subproc_group):
8036 """Write the leshouche.inc file for MG4""" 8037 8038 all_lines = [] 8039 8040 for iproc, matrix_element in \ 8041 enumerate(subproc_group.get('matrix_elements')): 8042 all_lines.extend(self.get_leshouche_lines(matrix_element, 8043 iproc)) 8044 8045 # Write the file 8046 writer.writelines(all_lines) 8047 8048 return True
8049