Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  """Methods and classes to export matrix elements to v4 format.""" 
  20   
  21  import copy 
  22  from six import StringIO 
  23  import itertools 
  24  import fractions 
  25  import glob 
  26  import logging 
  27  import math 
  28  import os 
  29  import io 
  30  import re 
  31  import shutil 
  32  import subprocess 
  33  import sys 
  34  import time 
  35  import traceback 
  36   
  37  import aloha 
  38   
  39  import madgraph.core.base_objects as base_objects 
  40  import madgraph.core.color_algebra as color 
  41  import madgraph.core.helas_objects as helas_objects 
  42  import madgraph.iolibs.drawing_eps as draw 
  43  import madgraph.iolibs.files as files 
  44  import madgraph.iolibs.group_subprocs as group_subprocs 
  45  import madgraph.iolibs.file_writers as writers 
  46  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  47  import madgraph.iolibs.template_files as template_files 
  48  import madgraph.iolibs.ufo_expression_parsers as parsers 
  49  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  50  import madgraph.interface.common_run_interface as common_run_interface 
  51  import madgraph.various.diagram_symmetry as diagram_symmetry 
  52  import madgraph.various.misc as misc 
  53  import madgraph.various.banner as banner_mod 
  54  import madgraph.various.process_checks as process_checks 
  55  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  56  import aloha.create_aloha as create_aloha 
  57  import models.import_ufo as import_ufo 
  58  import models.write_param_card as param_writer 
  59  import models.check_param_card as check_param_card 
  60   
  61   
  62  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  63  from madgraph.iolibs.files import cp, ln, mv 
  64   
  65  from madgraph import InvalidCmd 
  66   
  67  pjoin = os.path.join 
  68   
  69  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  70  logger = logging.getLogger('madgraph.export_v4') 
  71   
  72  default_compiler= {'fortran': 'gfortran', 
  73                         'f2py': 'f2py', 
  74                         'cpp':'g++'} 
75 76 77 -class VirtualExporter(object):
78 79 #exporter variable who modified the way madgraph interacts with this class 80 81 grouped_mode = 'madevent' 82 # This variable changes the type of object called within 'generate_subprocess_directory' 83 #functions. 84 # False to avoid grouping (only identical matrix element are merged) 85 # 'madevent' group the massless quark and massless lepton 86 # 'madweight' group the gluon with the massless quark 87 sa_symmetry = False 88 # If no grouped_mode=False, uu~ and u~u will be called independently. 89 #Putting sa_symmetry generates only one of the two matrix-element. 90 check = True 91 # Ask madgraph to check if the directory already exists and propose to the user to 92 #remove it first if this is the case 93 output = 'Template' 94 # [Template, None, dir] 95 # - Template, madgraph will call copy_template 96 # - dir, madgraph will just create an empty directory for initialisation 97 # - None, madgraph do nothing for initialisation 98 exporter = 'v4' 99 # language of the output 'v4' for Fortran output 100 # 'cpp' for C++ output 101 102
103 - def __init__(self, dir_path = "", opt=None):
104 # cmd_options is a dictionary with all the optional argurment passed at output time 105 106 # Activate some monkey patching for the helas call writer. 107 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 108 self.helas_call_writer_custom
109 110 111 # helper function for customise helas writter 112 @staticmethod
113 - def custom_helas_call(call, arg):
114 """static method to customise the way aloha function call are written 115 call is the default template for the call 116 arg are the dictionary used for the call 117 """ 118 return call, arg
119 120 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 121 122
123 - def copy_template(self, model):
124 return
125
126 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
127 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 128 return 0 # return an integer stating the number of call to helicity routine
129
130 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
131 return
132
133 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
134 return
135 136
137 - def pass_information_from_cmd(self, cmd):
138 """pass information from the command interface to the exporter. 139 Please do not modify any object of the interface from the exporter. 140 """ 141 return
142
143 - def modify_grouping(self, matrix_element):
144 return False, matrix_element
145
146 - def export_model_files(self, model_v4_path):
147 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 148 return
149
150 - def export_helas(self, HELAS_PATH):
151 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 152 return
153
154 #=============================================================================== 155 # ProcessExporterFortran 156 #=============================================================================== 157 -class ProcessExporterFortran(VirtualExporter):
158 """Class to take care of exporting a set of matrix elements to 159 Fortran (v4) format.""" 160 161 default_opt = {'clean': False, 'complex_mass':False, 162 'export_format':'madevent', 'mp': False, 163 'v5_model': True, 164 'output_options':{} 165 } 166 grouped_mode = False 167
168 - def __init__(self, dir_path = "", opt=None):
169 """Initiate the ProcessExporterFortran with directory information""" 170 self.mgme_dir = MG5DIR 171 self.dir_path = dir_path 172 self.model = None 173 174 self.opt = dict(self.default_opt) 175 if opt: 176 self.opt.update(opt) 177 178 self.cmd_options = self.opt['output_options'] 179 180 #place holder to pass information to the run_interface 181 self.proc_characteristic = banner_mod.ProcCharacteristic() 182 # call mother class 183 super(ProcessExporterFortran,self).__init__(dir_path, opt)
184 185 186 #=========================================================================== 187 # process exporter fortran switch between group and not grouped 188 #===========================================================================
189 - def export_processes(self, matrix_elements, fortran_model):
190 """Make the switch between grouped and not grouped output""" 191 192 calls = 0 193 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 194 for (group_number, me_group) in enumerate(matrix_elements): 195 calls = calls + self.generate_subprocess_directory(\ 196 me_group, fortran_model, group_number) 197 else: 198 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 199 calls = calls + self.generate_subprocess_directory(\ 200 me, fortran_model, me_number) 201 202 return calls
203 204 205 #=========================================================================== 206 # create the run_card 207 #===========================================================================
208 - def create_run_card(self, matrix_elements, history):
209 """ """ 210 211 212 # bypass this for the loop-check 213 import madgraph.loop.loop_helas_objects as loop_helas_objects 214 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 215 matrix_elements = None 216 217 run_card = banner_mod.RunCard() 218 219 220 default=True 221 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 222 processes = [me.get('processes') for megroup in matrix_elements 223 for me in megroup['matrix_elements']] 224 elif matrix_elements: 225 processes = [me.get('processes') 226 for me in matrix_elements['matrix_elements']] 227 else: 228 default =False 229 230 if default: 231 run_card.create_default_for_process(self.proc_characteristic, 232 history, 233 processes) 234 235 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 236 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 237 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
238 239 240 241 #=========================================================================== 242 # copy the Template in a new directory. 243 #===========================================================================
244 - def copy_template(self, model):
245 """create the directory run_name as a copy of the MadEvent 246 Template, and clean the directory 247 """ 248 249 #First copy the full template tree if dir_path doesn't exit 250 if not os.path.isdir(self.dir_path): 251 assert self.mgme_dir, \ 252 "No valid MG_ME path given for MG4 run directory creation." 253 logger.info('initialize a new directory: %s' % \ 254 os.path.basename(self.dir_path)) 255 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 256 self.dir_path, True) 257 # misc.copytree since dir_path already exists 258 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 259 self.dir_path) 260 # copy plot_card 261 for card in ['plot_card']: 262 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 263 try: 264 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 265 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 266 except IOError: 267 logger.warning("Failed to copy " + card + ".dat to default") 268 elif os.getcwd() == os.path.realpath(self.dir_path): 269 logger.info('working in local directory: %s' % \ 270 os.path.realpath(self.dir_path)) 271 # misc.copytree since dir_path already exists 272 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 273 self.dir_path) 274 # for name in misc.glob('Template/LO/*', self.mgme_dir): 275 # name = os.path.basename(name) 276 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 277 # if os.path.isfile(filename): 278 # files.cp(filename, pjoin(self.dir_path,name)) 279 # elif os.path.isdir(filename): 280 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 281 # misc.copytree since dir_path already exists 282 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 283 self.dir_path) 284 # Copy plot_card 285 for card in ['plot_card']: 286 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 287 try: 288 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 289 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 290 except IOError: 291 logger.warning("Failed to copy " + card + ".dat to default") 292 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 293 assert self.mgme_dir, \ 294 "No valid MG_ME path given for MG4 run directory creation." 295 try: 296 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 297 except IOError: 298 MG5_version = misc.get_pkg_info() 299 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 300 301 #Ensure that the Template is clean 302 if self.opt['clean']: 303 logger.info('remove old information in %s' % \ 304 os.path.basename(self.dir_path)) 305 if 'MADGRAPH_BASE' in os.environ: 306 misc.call([pjoin('bin', 'internal', 'clean_template'), 307 '--web'], cwd=self.dir_path) 308 else: 309 try: 310 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 311 cwd=self.dir_path) 312 except Exception as why: 313 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 314 % (os.path.basename(self.dir_path),why)) 315 316 #Write version info 317 MG_version = misc.get_pkg_info() 318 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 319 MG_version['version']) 320 321 # add the makefile in Source directory 322 filename = pjoin(self.dir_path,'Source','makefile') 323 self.write_source_makefile(writers.FileWriter(filename)) 324 325 # add the DiscreteSampler information 326 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 327 pjoin(self.dir_path, 'Source')) 328 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 329 pjoin(self.dir_path, 'Source')) 330 331 # We need to create the correct open_data for the pdf 332 self.write_pdf_opendata()
333 334 335 #=========================================================================== 336 # Call MadAnalysis5 to generate the default cards for this process 337 #===========================================================================
338 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 339 ma5_path, output_dir, levels = ['parton','hadron']):
340 """ Call MA5 so that it writes default cards for both parton and 341 post-shower levels, tailored for this particular process.""" 342 343 if len(levels)==0: 344 return 345 start = time.time() 346 logger.info('Generating MadAnalysis5 default cards tailored to this process') 347 try: 348 MA5_interpreter = common_run_interface.CommonRunCmd.\ 349 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 350 except (Exception, SystemExit) as e: 351 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 352 return 353 if MA5_interpreter is None: 354 return 355 356 MA5_main = MA5_interpreter.main 357 for lvl in ['parton','hadron']: 358 if lvl in levels: 359 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 360 try: 361 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 362 except (Exception, SystemExit) as e: 363 # keep the default card (skip only) 364 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 365 ' default analysis card for this process.') 366 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 367 error=StringIO() 368 traceback.print_exc(file=error) 369 logger.debug('MadAnalysis5 error was:') 370 logger.debug('-'*60) 371 logger.debug(error.getvalue()[:-1]) 372 logger.debug('-'*60) 373 else: 374 open(card_to_generate,'w').write(text) 375 stop = time.time() 376 if stop-start >1: 377 logger.info('Cards created in %.2fs' % (stop-start))
378 379 #=========================================================================== 380 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 381 #===========================================================================
382 - def write_procdef_mg5(self, file_pos, modelname, process_str):
383 """ write an equivalent of the MG4 proc_card in order that all the Madevent 384 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 385 386 proc_card_template = template_files.mg4_proc_card.mg4_template 387 process_template = template_files.mg4_proc_card.process_template 388 process_text = '' 389 coupling = '' 390 new_process_content = [] 391 392 393 # First find the coupling and suppress the coupling from process_str 394 #But first ensure that coupling are define whithout spaces: 395 process_str = process_str.replace(' =', '=') 396 process_str = process_str.replace('= ', '=') 397 process_str = process_str.replace(',',' , ') 398 #now loop on the element and treat all the coupling 399 for info in process_str.split(): 400 if '=' in info: 401 coupling += info + '\n' 402 else: 403 new_process_content.append(info) 404 # Recombine the process_str (which is the input process_str without coupling 405 #info) 406 process_str = ' '.join(new_process_content) 407 408 #format the SubProcess 409 replace_dict = {'process': process_str, 410 'coupling': coupling} 411 process_text += process_template.substitute(replace_dict) 412 413 replace_dict = {'process': process_text, 414 'model': modelname, 415 'multiparticle':''} 416 text = proc_card_template.substitute(replace_dict) 417 418 if file_pos: 419 ff = open(file_pos, 'w') 420 ff.write(text) 421 ff.close() 422 else: 423 return replace_dict
424 425
426 - def pass_information_from_cmd(self, cmd):
427 """Pass information for MA5""" 428 429 self.proc_defs = cmd._curr_proc_defs
430 431 #=========================================================================== 432 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 433 #===========================================================================
434 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
435 """Function to finalize v4 directory, for inheritance.""" 436 437 self.create_run_card(matrix_elements, history) 438 self.create_MA5_cards(matrix_elements, history)
439
440 - def create_MA5_cards(self,matrix_elements,history):
441 """ A wrapper around the creation of the MA5 cards so that it can be 442 bypassed by daughter classes (i.e. in standalone).""" 443 if 'madanalysis5_path' in self.opt and not \ 444 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 445 processes = None 446 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 447 processes = [me.get('processes') for megroup in matrix_elements 448 for me in megroup['matrix_elements']] 449 elif matrix_elements: 450 processes = [me.get('processes') 451 for me in matrix_elements['matrix_elements']] 452 453 self.create_default_madanalysis5_cards( 454 history, self.proc_defs, processes, 455 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 456 levels = ['hadron','parton']) 457 458 for level in ['hadron','parton']: 459 # Copying these cards turn on the use of MadAnalysis5 by default. 460 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 461 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 462 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
463 464 #=========================================================================== 465 # Create the proc_characteristic file passing information to the run_interface 466 #===========================================================================
467 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
468 469 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
470 471 #=========================================================================== 472 # write_matrix_element_v4 473 #===========================================================================
474 - def write_matrix_element_v4(self):
475 """Function to write a matrix.f file, for inheritance. 476 """ 477 pass
478 479 #=========================================================================== 480 # write_pdf_opendata 481 #===========================================================================
482 - def write_pdf_opendata(self):
483 """ modify the pdf opendata file, to allow direct access to cluster node 484 repository if configure""" 485 486 if not self.opt["cluster_local_path"]: 487 changer = {"pdf_systemwide": ""} 488 else: 489 to_add = """ 490 tempname='%(path)s'//Tablefile 491 open(IU,file=tempname,status='old',ERR=1) 492 return 493 1 tempname='%(path)s/Pdfdata/'//Tablefile 494 open(IU,file=tempname,status='old',ERR=2) 495 return 496 2 tempname='%(path)s/lhapdf'//Tablefile 497 open(IU,file=tempname,status='old',ERR=3) 498 return 499 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 500 open(IU,file=tempname,status='old',ERR=4) 501 return 502 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 503 open(IU,file=tempname,status='old',ERR=5) 504 return 505 """ % {"path" : self.opt["cluster_local_path"]} 506 507 changer = {"pdf_systemwide": to_add} 508 509 510 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 512 ff.writelines(template % changer) 513 514 # Do the same for lhapdf set 515 if not self.opt["cluster_local_path"]: 516 changer = {"cluster_specific_path": ""} 517 else: 518 to_add=""" 519 LHAPath='%(path)s/PDFsets' 520 Inquire(File=LHAPath, exist=exists) 521 if(exists)return 522 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='%(path)s/../lhapdf/pdfsets/' 526 Inquire(File=LHAPath, exist=exists) 527 if(exists)return 528 LHAPath='./PDFsets' 529 """ % {"path" : self.opt["cluster_local_path"]} 530 changer = {"cluster_specific_path": to_add} 531 532 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 533 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 534 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 535 ff.writelines(template % changer) 536 537 538 return
539 540 541 542 #=========================================================================== 543 # write_maxparticles_file 544 #===========================================================================
545 - def write_maxparticles_file(self, writer, matrix_elements):
546 """Write the maxparticles.inc file for MadEvent""" 547 548 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 549 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 550 matrix_elements.get('matrix_elements')]) 551 else: 552 maxparticles = max([me.get_nexternal_ninitial()[0] \ 553 for me in matrix_elements]) 554 555 lines = "integer max_particles\n" 556 lines += "parameter(max_particles=%d)" % maxparticles 557 558 # Write the file 559 writer.writelines(lines) 560 561 return True
562 563 564 #=========================================================================== 565 # export the model 566 #===========================================================================
567 - def export_model_files(self, model_path):
568 """Configure the files/link of the process according to the model""" 569 570 # Import the model 571 for file in os.listdir(model_path): 572 if os.path.isfile(pjoin(model_path, file)): 573 shutil.copy2(pjoin(model_path, file), \ 574 pjoin(self.dir_path, 'Source', 'MODEL'))
575 576 590 598 599 600 #=========================================================================== 601 # export the helas routine 602 #===========================================================================
603 - def export_helas(self, helas_path):
604 """Configure the files/link of the process according to the model""" 605 606 # Import helas routine 607 for filename in os.listdir(helas_path): 608 filepos = pjoin(helas_path, filename) 609 if os.path.isfile(filepos): 610 if filepos.endswith('Makefile.template'): 611 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 612 elif filepos.endswith('Makefile'): 613 pass 614 else: 615 cp(filepos, self.dir_path + '/Source/DHELAS')
616 # following lines do the same but whithout symbolic link 617 # 618 #def export_helas(mgme_dir, dir_path): 619 # 620 # # Copy the HELAS directory 621 # helas_dir = pjoin(mgme_dir, 'HELAS') 622 # for filename in os.listdir(helas_dir): 623 # if os.path.isfile(pjoin(helas_dir, filename)): 624 # shutil.copy2(pjoin(helas_dir, filename), 625 # pjoin(dir_path, 'Source', 'DHELAS')) 626 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 627 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 628 # 629 630 #=========================================================================== 631 # generate_subprocess_directory 632 #===========================================================================
633 - def generate_subprocess_directory(self, matrix_element, 634 fortran_model, 635 me_number):
636 """Routine to generate a subprocess directory (for inheritance)""" 637 638 pass
639 640 #=========================================================================== 641 # get_source_libraries_list 642 #===========================================================================
643 - def get_source_libraries_list(self):
644 """ Returns the list of libraries to be compiling when compiling the 645 SOURCE directory. It is different for loop_induced processes and 646 also depends on the value of the 'output_dependencies' option""" 647 648 return ['$(LIBDIR)libdhelas.$(libext)', 649 '$(LIBDIR)libpdf.$(libext)', 650 '$(LIBDIR)libmodel.$(libext)', 651 '$(LIBDIR)libcernlib.$(libext)', 652 '$(LIBDIR)libbias.$(libext)']
653 654 #=========================================================================== 655 # write_source_makefile 656 #===========================================================================
657 - def write_source_makefile(self, writer):
658 """Write the nexternal.inc file for MG4""" 659 660 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 661 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 662 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 663 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 664 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 665 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 666 else: 667 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 668 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 669 670 replace_dict= {'libraries': set_of_lib, 671 'model':model_line, 672 'additional_dsample': '', 673 'additional_dependencies':''} 674 675 if writer: 676 text = open(path).read() % replace_dict 677 writer.write(text) 678 679 return replace_dict
680 681 #=========================================================================== 682 # write_nexternal_madspin 683 #===========================================================================
684 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
685 """Write the nexternal_prod.inc file for madspin""" 686 687 replace_dict = {} 688 689 replace_dict['nexternal'] = nexternal 690 replace_dict['ninitial'] = ninitial 691 692 file = """ \ 693 integer nexternal_prod 694 parameter (nexternal_prod=%(nexternal)d) 695 integer nincoming_prod 696 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 #=========================================================================== 706 # write_helamp_madspin 707 #===========================================================================
708 - def write_helamp_madspin(self, writer, ncomb):
709 """Write the helamp.inc file for madspin""" 710 711 replace_dict = {} 712 713 replace_dict['ncomb'] = ncomb 714 715 file = """ \ 716 integer ncomb1 717 parameter (ncomb1=%(ncomb)d) 718 double precision helamp(ncomb1) 719 common /to_helamp/helamp """ % replace_dict 720 721 # Write the file 722 if writer: 723 writer.writelines(file) 724 return True 725 else: 726 return replace_dict
727 728 729 730 #=========================================================================== 731 # write_nexternal_file 732 #===========================================================================
733 - def write_nexternal_file(self, writer, nexternal, ninitial):
734 """Write the nexternal.inc file for MG4""" 735 736 replace_dict = {} 737 738 replace_dict['nexternal'] = nexternal 739 replace_dict['ninitial'] = ninitial 740 741 file = """ \ 742 integer nexternal 743 parameter (nexternal=%(nexternal)d) 744 integer nincoming 745 parameter (nincoming=%(ninitial)d)""" % replace_dict 746 747 # Write the file 748 if writer: 749 writer.writelines(file) 750 return True 751 else: 752 return replace_dict
753 #=========================================================================== 754 # write_pmass_file 755 #===========================================================================
756 - def write_pmass_file(self, writer, matrix_element):
757 """Write the pmass.inc file for MG4""" 758 759 model = matrix_element.get('processes')[0].get('model') 760 761 lines = [] 762 for wf in matrix_element.get_external_wavefunctions(): 763 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 764 if mass.lower() != "zero": 765 mass = "abs(%s)" % mass 766 767 lines.append("pmass(%d)=%s" % \ 768 (wf.get('number_external'), mass)) 769 770 # Write the file 771 writer.writelines(lines) 772 773 return True
774 775 #=========================================================================== 776 # write_ngraphs_file 777 #===========================================================================
778 - def write_ngraphs_file(self, writer, nconfigs):
779 """Write the ngraphs.inc file for MG4. Needs input from 780 write_configs_file.""" 781 782 file = " integer n_max_cg\n" 783 file = file + "parameter (n_max_cg=%d)" % nconfigs 784 785 # Write the file 786 writer.writelines(file) 787 788 return True
789 790 #=========================================================================== 791 # write_leshouche_file 792 #===========================================================================
793 - def write_leshouche_file(self, writer, matrix_element):
794 """Write the leshouche.inc file for MG4""" 795 796 # Write the file 797 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 798 799 return True
800 801 #=========================================================================== 802 # get_leshouche_lines 803 #===========================================================================
804 - def get_leshouche_lines(self, matrix_element, numproc):
805 """Write the leshouche.inc file for MG4""" 806 807 # Extract number of external particles 808 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 809 810 lines = [] 811 for iproc, proc in enumerate(matrix_element.get('processes')): 812 legs = proc.get_legs_with_decays() 813 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 814 (iproc + 1, numproc+1, nexternal, 815 ",".join([str(l.get('id')) for l in legs]))) 816 if iproc == 0 and numproc == 0: 817 for i in [1, 2]: 818 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 819 (i, nexternal, 820 ",".join([ "%3r" % 0 ] * ninitial + \ 821 [ "%3r" % i ] * (nexternal - ninitial)))) 822 823 # Here goes the color connections corresponding to the JAMPs 824 # Only one output, for the first subproc! 825 if iproc == 0: 826 # If no color basis, just output trivial color flow 827 if not matrix_element.get('color_basis'): 828 for i in [1, 2]: 829 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 830 (i, numproc+1,nexternal, 831 ",".join([ "%3r" % 0 ] * nexternal))) 832 833 else: 834 # First build a color representation dictionnary 835 repr_dict = {} 836 for l in legs: 837 repr_dict[l.get('number')] = \ 838 proc.get('model').get_particle(l.get('id')).get_color()\ 839 * (-1)**(1+l.get('state')) 840 # Get the list of color flows 841 color_flow_list = \ 842 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 843 ninitial) 844 # And output them properly 845 for cf_i, color_flow_dict in enumerate(color_flow_list): 846 for i in [0, 1]: 847 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 848 (i + 1, cf_i + 1, numproc+1, nexternal, 849 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 850 for l in legs]))) 851 852 return lines
853 854 855 856 857 #=========================================================================== 858 # write_maxamps_file 859 #===========================================================================
860 - def write_maxamps_file(self, writer, maxamps, maxflows, 861 maxproc,maxsproc):
862 """Write the maxamps.inc file for MG4.""" 863 864 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 865 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 866 (maxamps, maxflows) 867 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 868 (maxproc, maxsproc) 869 870 # Write the file 871 writer.writelines(file) 872 873 return True
874 875 876 #=========================================================================== 877 # Routines to output UFO models in MG4 format 878 #=========================================================================== 879
880 - def convert_model(self, model, wanted_lorentz = [], 881 wanted_couplings = []):
882 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 883 884 # Make sure aloha is in quadruple precision if needed 885 old_aloha_mp=aloha.mp_precision 886 aloha.mp_precision=self.opt['mp'] 887 self.model = model 888 # create the MODEL 889 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 890 self.opt['exporter'] = self.__class__ 891 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 892 model_builder.build(wanted_couplings) 893 894 # Backup the loop mode, because it can be changed in what follows. 895 old_loop_mode = aloha.loop_mode 896 897 # Create the aloha model or use the existing one (for loop exporters 898 # this is useful as the aloha model will be used again in the 899 # LoopHelasMatrixElements generated). We do not save the model generated 900 # here if it didn't exist already because it would be a waste of 901 # memory for tree level applications since aloha is only needed at the 902 # time of creating the aloha fortran subroutines. 903 if hasattr(self, 'aloha_model'): 904 aloha_model = self.aloha_model 905 else: 906 try: 907 with misc.MuteLogger(['madgraph.models'], [60]): 908 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 909 except ImportError: 910 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 911 aloha_model.add_Lorentz_object(model.get('lorentz')) 912 913 # Compute the subroutines 914 if wanted_lorentz: 915 aloha_model.compute_subset(wanted_lorentz) 916 else: 917 aloha_model.compute_all(save=False) 918 919 # Write them out 920 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 921 aloha_model.write(write_dir, 'Fortran') 922 923 # Revert the original aloha loop mode 924 aloha.loop_mode = old_loop_mode 925 926 #copy Helas Template 927 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 928 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 929 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 930 write_dir+'/aloha_functions.f') 931 aloha_model.loop_mode = False 932 else: 933 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 934 write_dir+'/aloha_functions.f') 935 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 936 937 # Make final link in the Process 938 self.make_model_symbolic_link() 939 940 # Re-establish original aloha mode 941 aloha.mp_precision=old_aloha_mp
942 943 944 #=========================================================================== 945 # Helper functions 946 #===========================================================================
947 - def modify_grouping(self, matrix_element):
948 """allow to modify the grouping (if grouping is in place) 949 return two value: 950 - True/False if the matrix_element was modified 951 - the new(or old) matrix element""" 952 953 return False, matrix_element
954 955 #=========================================================================== 956 # Helper functions 957 #===========================================================================
958 - def get_mg5_info_lines(self):
959 """Return info lines for MG5, suitable to place at beginning of 960 Fortran files""" 961 962 info = misc.get_pkg_info() 963 info_lines = "" 964 if info and 'version' in info and 'date' in info: 965 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 966 (info['version'], info['date']) 967 info_lines = info_lines + \ 968 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 969 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 970 else: 971 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 972 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 973 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 974 975 return info_lines
976
977 - def get_process_info_lines(self, matrix_element):
978 """Return info lines describing the processes for this matrix element""" 979 980 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 981 for process in matrix_element.get('processes')])
982 983
984 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
985 """Return the Helicity matrix definition lines for this matrix element""" 986 987 helicity_line_list = [] 988 i = 0 989 for helicities in matrix_element.get_helicity_matrix(): 990 i = i + 1 991 int_list = [i, len(helicities)] 992 int_list.extend(helicities) 993 helicity_line_list.append(\ 994 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 995 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 996 997 return "\n".join(helicity_line_list)
998
999 - def get_ic_line(self, matrix_element):
1000 """Return the IC definition line coming after helicities, required by 1001 switchmom in madevent""" 1002 1003 nexternal = matrix_element.get_nexternal_ninitial()[0] 1004 int_list = list(range(1, nexternal + 1)) 1005 1006 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1007 ",".join([str(i) for \ 1008 i in int_list]))
1009
1010 - def set_chosen_SO_index(self, process, squared_orders):
1011 """ From the squared order constraints set by the user, this function 1012 finds what indices of the squared_orders list the user intends to pick. 1013 It returns this as a string of comma-separated successive '.true.' or 1014 '.false.' for each index.""" 1015 1016 user_squared_orders = process.get('squared_orders') 1017 split_orders = process.get('split_orders') 1018 1019 if len(user_squared_orders)==0: 1020 return ','.join(['.true.']*len(squared_orders)) 1021 1022 res = [] 1023 for sqsos in squared_orders: 1024 is_a_match = True 1025 for user_sqso, value in user_squared_orders.items(): 1026 if (process.get_squared_order_type(user_sqso) =='==' and \ 1027 value!=sqsos[split_orders.index(user_sqso)]) or \ 1028 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1029 value<sqsos[split_orders.index(user_sqso)]) or \ 1030 (process.get_squared_order_type(user_sqso) == '>' and \ 1031 value>=sqsos[split_orders.index(user_sqso)]): 1032 is_a_match = False 1033 break 1034 res.append('.true.' if is_a_match else '.false.') 1035 1036 return ','.join(res)
1037
1038 - def get_split_orders_lines(self, orders, array_name, n=5):
1039 """ Return the split orders definition as defined in the list orders and 1040 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1041 1042 ret_list = [] 1043 for index, order in enumerate(orders): 1044 for k in range(0, len(order), n): 1045 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1046 (array_name,index + 1, k + 1, min(k + n, len(order)), 1047 ','.join(["%5r" % i for i in order[k:k + n]]))) 1048 return ret_list
1049
1050 - def format_integer_list(self, list, name, n=5):
1051 """ Return an initialization of the python list in argument following 1052 the fortran syntax using the data keyword assignment, filling an array 1053 of name 'name'. It splits rows in chunks of size n.""" 1054 1055 ret_list = [] 1056 for k in range(0, len(list), n): 1057 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1058 (name, k + 1, min(k + n, len(list)), 1059 ','.join(["%5r" % i for i in list[k:k + n]]))) 1060 return ret_list
1061
1062 - def get_color_data_lines(self, matrix_element, n=6):
1063 """Return the color matrix definition lines for this matrix element. Split 1064 rows in chunks of size n.""" 1065 1066 if not matrix_element.get('color_matrix'): 1067 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1068 else: 1069 ret_list = [] 1070 my_cs = color.ColorString() 1071 for index, denominator in \ 1072 enumerate(matrix_element.get('color_matrix').\ 1073 get_line_denominators()): 1074 # First write the common denominator for this color matrix line 1075 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1076 # Then write the numerators for the matrix elements 1077 num_list = matrix_element.get('color_matrix').\ 1078 get_line_numerators(index, denominator) 1079 1080 assert all([int(i)==i for i in num_list]) 1081 1082 for k in range(0, len(num_list), n): 1083 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1084 (index + 1, k + 1, min(k + n, len(num_list)), 1085 ','.join(["%5i" % int(i) for i in num_list[k:k + n]]))) 1086 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1087 ret_list.append("C %s" % repr(my_cs)) 1088 1089 return ret_list
1090 1091
1092 - def get_den_factor_line(self, matrix_element):
1093 """Return the denominator factor line for this matrix element""" 1094 1095 return "DATA IDEN/%2r/" % \ 1096 matrix_element.get_denominator_factor()
1097
1098 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1099 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1100 which configs (diagrams).""" 1101 1102 ret_list = [] 1103 1104 booldict = {False: ".false.", True: ".true."} 1105 1106 if not matrix_element.get('color_basis'): 1107 # No color, so only one color factor. Simply write a ".true." 1108 # for each config (i.e., each diagram with only 3 particle 1109 # vertices 1110 configs = len(mapconfigs) 1111 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1112 (num_matrix_element, configs, 1113 ','.join([".true." for i in range(configs)]))) 1114 return ret_list 1115 1116 # There is a color basis - create a list showing which JAMPs have 1117 # contributions to which configs 1118 1119 # Only want to include leading color flows, so find max_Nc 1120 color_basis = matrix_element.get('color_basis') 1121 1122 # We don't want to include the power of Nc's which come from the potential 1123 # loop color trace (i.e. in the case of a closed fermion loop for example) 1124 # so we subtract it here when computing max_Nc 1125 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1126 color_basis.values()],[])) 1127 1128 # Crate dictionary between diagram number and JAMP number 1129 diag_jamp = {} 1130 for ijamp, col_basis_elem in \ 1131 enumerate(sorted(matrix_element.get('color_basis').keys())): 1132 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1133 # Only use color flows with Nc == max_Nc. However, notice that 1134 # we don't want to include the Nc power coming from the loop 1135 # in this counting. 1136 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1137 diag_num = diag_tuple[0] + 1 1138 # Add this JAMP number to this diag_num 1139 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1140 [ijamp+1] 1141 1142 colamps = ijamp + 1 1143 for iconfig, num_diag in enumerate(mapconfigs): 1144 if num_diag == 0: 1145 continue 1146 1147 # List of True or False 1148 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1149 # Add line 1150 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1151 (iconfig+1, num_matrix_element, colamps, 1152 ','.join(["%s" % booldict[b] for b in \ 1153 bool_list]))) 1154 1155 return ret_list
1156
1157 - def get_amp2_lines(self, matrix_element, config_map = []):
1158 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1159 1160 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1161 # Get minimum legs in a vertex 1162 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1163 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1164 minvert = min(vert_list) if vert_list!=[] else 0 1165 1166 ret_lines = [] 1167 if config_map: 1168 # In this case, we need to sum up all amplitudes that have 1169 # identical topologies, as given by the config_map (which 1170 # gives the topology/config for each of the diagrams 1171 diagrams = matrix_element.get('diagrams') 1172 # Combine the diagrams with identical topologies 1173 config_to_diag_dict = {} 1174 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1175 if config_map[idiag] == 0: 1176 continue 1177 try: 1178 config_to_diag_dict[config_map[idiag]].append(idiag) 1179 except KeyError: 1180 config_to_diag_dict[config_map[idiag]] = [idiag] 1181 # Write out the AMP2s summing squares of amplitudes belonging 1182 # to eiher the same diagram or different diagrams with 1183 # identical propagator properties. Note that we need to use 1184 # AMP2 number corresponding to the first diagram number used 1185 # for that AMP2. 1186 for config in sorted(config_to_diag_dict.keys()): 1187 1188 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1189 {"num": (config_to_diag_dict[config][0] + 1)} 1190 1191 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1192 sum([diagrams[idiag].get('amplitudes') for \ 1193 idiag in config_to_diag_dict[config]], [])]) 1194 1195 # Not using \sum |M|^2 anymore since this creates troubles 1196 # when ckm is not diagonal due to the JIM mechanism. 1197 if '+' in amp: 1198 line += "(%s)*dconjg(%s)" % (amp, amp) 1199 else: 1200 line += "%s*dconjg(%s)" % (amp, amp) 1201 ret_lines.append(line) 1202 else: 1203 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1204 # Ignore any diagrams with 4-particle vertices. 1205 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1206 continue 1207 # Now write out the expression for AMP2, meaning the sum of 1208 # squared amplitudes belonging to the same diagram 1209 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1210 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1211 {"num": a.get('number')} for a in \ 1212 diag.get('amplitudes')]) 1213 ret_lines.append(line) 1214 1215 return ret_lines
1216 1217 #=========================================================================== 1218 # Returns the data statements initializing the coeffictients for the JAMP 1219 # decomposition. It is used when the JAMP initialization is decided to be 1220 # done through big arrays containing the projection coefficients. 1221 #===========================================================================
1222 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1223 n=50, Nc_value=3):
1224 """This functions return the lines defining the DATA statement setting 1225 the coefficients building the JAMPS out of the AMPS. Split rows in 1226 bunches of size n. 1227 One can specify the color_basis from which the color amplitudes originates 1228 so that there are commentaries telling what color structure each JAMP 1229 corresponds to.""" 1230 1231 if(not isinstance(color_amplitudes,list) or 1232 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1233 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1234 1235 res_list = [] 1236 my_cs = color.ColorString() 1237 for index, coeff_list in enumerate(color_amplitudes): 1238 # Create the list of the complete numerical coefficient. 1239 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1240 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1241 coefficient in coeff_list] 1242 # Create the list of the numbers of the contributing amplitudes. 1243 # Mutliply by -1 for those which have an imaginary coefficient. 1244 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1245 for coefficient in coeff_list] 1246 # Find the common denominator. 1247 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1248 num_list=[(coefficient*commondenom).numerator \ 1249 for coefficient in coefs_list] 1250 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1251 index+1,len(num_list))) 1252 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1253 index+1,commondenom)) 1254 if color_basis: 1255 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1256 res_list.append("C %s" % repr(my_cs)) 1257 for k in range(0, len(num_list), n): 1258 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1259 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1260 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1261 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1262 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1263 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1264 pass 1265 return res_list
1266 1267
1268 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1269 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1270 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1271 defined as a matrix element or directly as a color_amplitudes dictionary. 1272 The split_order_amps specifies the group of amplitudes sharing the same 1273 amplitude orders which should be put in together in a given set of JAMPS. 1274 The split_order_amps is supposed to have the format of the second output 1275 of the function get_split_orders_mapping function in helas_objects.py. 1276 The split_order_names is optional (it should correspond to the process 1277 'split_orders' attribute) and only present to provide comments in the 1278 JAMP definitions in the code.""" 1279 1280 # Let the user call get_JAMP_lines_split_order directly from a 1281 error_msg="Malformed '%s' argument passed to the "+\ 1282 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1283 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1284 color_amplitudes=col_amps.get_color_amplitudes() 1285 elif(isinstance(col_amps,list)): 1286 if(col_amps and isinstance(col_amps[0],list)): 1287 color_amplitudes=col_amps 1288 else: 1289 raise MadGraph5Error(error_msg%'col_amps') 1290 else: 1291 raise MadGraph5Error(error_msg%'col_amps') 1292 1293 # Verify the sanity of the split_order_amps and split_order_names args 1294 if isinstance(split_order_amps,list): 1295 for elem in split_order_amps: 1296 if len(elem)!=2: 1297 raise MadGraph5Error(error_msg%'split_order_amps') 1298 # Check the first element of the two lists to make sure they are 1299 # integers, although in principle they should all be integers. 1300 if not isinstance(elem[0],tuple) or \ 1301 not isinstance(elem[1],tuple) or \ 1302 not isinstance(elem[0][0],int) or \ 1303 not isinstance(elem[1][0],int): 1304 raise MadGraph5Error(error_msg%'split_order_amps') 1305 else: 1306 raise MadGraph5Error(error_msg%'split_order_amps') 1307 1308 if not split_order_names is None: 1309 if isinstance(split_order_names,list): 1310 # Should specify the same number of names as there are elements 1311 # in the key of the split_order_amps. 1312 if len(split_order_names)!=len(split_order_amps[0][0]): 1313 raise MadGraph5Error(error_msg%'split_order_names') 1314 # Check the first element of the list to be a string 1315 if not isinstance(split_order_names[0],str): 1316 raise MadGraph5Error(error_msg%'split_order_names') 1317 else: 1318 raise MadGraph5Error(error_msg%'split_order_names') 1319 1320 # Now scan all contributing orders to be individually computed and 1321 # construct the list of color_amplitudes for JAMP to be constructed 1322 # accordingly. 1323 res_list=[] 1324 for i, amp_order in enumerate(split_order_amps): 1325 col_amps_order = [] 1326 for jamp in color_amplitudes: 1327 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1328 if split_order_names: 1329 res_list.append('C JAMPs contributing to orders '+' '.join( 1330 ['%s=%i'%order for order in zip(split_order_names, 1331 amp_order[0])])) 1332 if self.opt['export_format'] in ['madloop_matchbox']: 1333 res_list.extend(self.get_JAMP_lines(col_amps_order, 1334 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1335 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1336 else: 1337 res_list.extend(self.get_JAMP_lines(col_amps_order, 1338 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1339 1340 return res_list
1341 1342
1343 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1344 split=-1):
1345 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1346 defined as a matrix element or directly as a color_amplitudes dictionary, 1347 Jamp_formatLC should be define to allow to add LeadingColor computation 1348 (usefull for MatchBox) 1349 The split argument defines how the JAMP lines should be split in order 1350 not to be too long.""" 1351 1352 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1353 # the color amplitudes lists. 1354 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1355 color_amplitudes=col_amps.get_color_amplitudes() 1356 elif(isinstance(col_amps,list)): 1357 if(col_amps and isinstance(col_amps[0],list)): 1358 color_amplitudes=col_amps 1359 else: 1360 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1361 else: 1362 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1363 1364 1365 res_list = [] 1366 for i, coeff_list in enumerate(color_amplitudes): 1367 # It might happen that coeff_list is empty if this function was 1368 # called from get_JAMP_lines_split_order (i.e. if some color flow 1369 # does not contribute at all for a given order). 1370 # In this case we simply set it to 0. 1371 if coeff_list==[]: 1372 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1373 continue 1374 # Break the JAMP definition into 'n=split' pieces to avoid having 1375 # arbitrarly long lines. 1376 first=True 1377 n = (len(coeff_list)+1 if split<=0 else split) 1378 while coeff_list!=[]: 1379 coefs=coeff_list[:n] 1380 coeff_list=coeff_list[n:] 1381 res = ((JAMP_format+"=") % str(i + 1)) + \ 1382 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1383 1384 first=False 1385 # Optimization: if all contributions to that color basis element have 1386 # the same coefficient (up to a sign), put it in front 1387 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1388 common_factor = False 1389 diff_fracs = list(set(list_fracs)) 1390 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1391 common_factor = True 1392 global_factor = diff_fracs[0] 1393 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1394 1395 # loop for JAMP 1396 for (coefficient, amp_number) in coefs: 1397 if not coefficient: 1398 continue 1399 if common_factor: 1400 res = (res + "%s" + AMP_format) % \ 1401 (self.coeff(coefficient[0], 1402 coefficient[1] / abs(coefficient[1]), 1403 coefficient[2], 1404 coefficient[3]), 1405 str(amp_number)) 1406 else: 1407 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1408 coefficient[1], 1409 coefficient[2], 1410 coefficient[3]), 1411 str(amp_number)) 1412 1413 if common_factor: 1414 res = res + ')' 1415 1416 res_list.append(res) 1417 1418 return res_list
1419
1420 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1421 """Generate the PDF lines for the auto_dsig.f file""" 1422 1423 processes = matrix_element.get('processes') 1424 model = processes[0].get('model') 1425 1426 pdf_definition_lines = "" 1427 pdf_data_lines = "" 1428 pdf_lines = "" 1429 1430 if ninitial == 1: 1431 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1432 for i, proc in enumerate(processes): 1433 process_line = proc.base_string() 1434 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1435 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1436 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1437 else: 1438 # Pick out all initial state particles for the two beams 1439 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1440 p in processes]))), 1441 sorted(list(set([p.get_initial_pdg(2) for \ 1442 p in processes])))] 1443 1444 # Prepare all variable names 1445 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1446 sum(initial_states,[])]) 1447 for key,val in pdf_codes.items(): 1448 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1449 1450 # Set conversion from PDG code to number used in PDF calls 1451 pdgtopdf = {21: 0, 22: 7} 1452 1453 # Fill in missing entries of pdgtopdf 1454 for pdg in sum(initial_states,[]): 1455 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1456 pdgtopdf[pdg] = pdg 1457 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1458 # If any particle has pdg code 7, we need to use something else 1459 pdgtopdf[pdg] = 6000000 + pdg 1460 1461 # Get PDF variable declarations for all initial states 1462 for i in [0,1]: 1463 pdf_definition_lines += "DOUBLE PRECISION " + \ 1464 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1465 for pdg in \ 1466 initial_states[i]]) + \ 1467 "\n" 1468 1469 # Get PDF data lines for all initial states 1470 for i in [0,1]: 1471 pdf_data_lines += "DATA " + \ 1472 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1473 for pdg in initial_states[i]]) + \ 1474 "/%d*1D0/" % len(initial_states[i]) + \ 1475 "\n" 1476 1477 # Get PDF lines for all different initial states 1478 for i, init_states in enumerate(initial_states): 1479 if subproc_group: 1480 pdf_lines = pdf_lines + \ 1481 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1482 % (i + 1, i + 1) 1483 else: 1484 pdf_lines = pdf_lines + \ 1485 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1486 % (i + 1, i + 1) 1487 1488 for nbi,initial_state in enumerate(init_states): 1489 if initial_state in list(pdf_codes.keys()): 1490 if subproc_group: 1491 pdf_lines = pdf_lines + \ 1492 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1493 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1494 (pdf_codes[initial_state], 1495 i + 1, i + 1, pdgtopdf[initial_state], 1496 i + 1, i + 1) 1497 else: 1498 pdf_lines = pdf_lines + \ 1499 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1500 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1501 (pdf_codes[initial_state], 1502 i + 1, i + 1, pdgtopdf[initial_state], 1503 i + 1, 1504 i + 1, i + 1) 1505 pdf_lines = pdf_lines + "ENDIF\n" 1506 1507 # Add up PDFs for the different initial state particles 1508 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1509 for proc in processes: 1510 process_line = proc.base_string() 1511 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1512 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1513 for ibeam in [1, 2]: 1514 initial_state = proc.get_initial_pdg(ibeam) 1515 if initial_state in list(pdf_codes.keys()): 1516 pdf_lines = pdf_lines + "%s%d*" % \ 1517 (pdf_codes[initial_state], ibeam) 1518 else: 1519 pdf_lines = pdf_lines + "1d0*" 1520 # Remove last "*" from pdf_lines 1521 pdf_lines = pdf_lines[:-1] + "\n" 1522 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1523 1524 # Remove last line break from the return variables 1525 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1526 1527 #=========================================================================== 1528 # write_props_file 1529 #===========================================================================
1530 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1531 """Write the props.inc file for MadEvent. Needs input from 1532 write_configs_file.""" 1533 1534 lines = [] 1535 1536 particle_dict = matrix_element.get('processes')[0].get('model').\ 1537 get('particle_dict') 1538 1539 for iconf, configs in enumerate(s_and_t_channels): 1540 for vertex in configs[0] + configs[1][:-1]: 1541 leg = vertex.get('legs')[-1] 1542 if leg.get('id') not in particle_dict: 1543 # Fake propagator used in multiparticle vertices 1544 mass = 'zero' 1545 width = 'zero' 1546 pow_part = 0 1547 else: 1548 particle = particle_dict[leg.get('id')] 1549 # Get mass 1550 if particle.get('mass').lower() == 'zero': 1551 mass = particle.get('mass') 1552 else: 1553 mass = "abs(%s)" % particle.get('mass') 1554 # Get width 1555 if particle.get('width').lower() == 'zero': 1556 width = particle.get('width') 1557 else: 1558 width = "abs(%s)" % particle.get('width') 1559 1560 pow_part = 1 + int(particle.is_boson()) 1561 1562 lines.append("prmass(%d,%d) = %s" % \ 1563 (leg.get('number'), iconf + 1, mass)) 1564 lines.append("prwidth(%d,%d) = %s" % \ 1565 (leg.get('number'), iconf + 1, width)) 1566 lines.append("pow(%d,%d) = %d" % \ 1567 (leg.get('number'), iconf + 1, pow_part)) 1568 1569 # Write the file 1570 writer.writelines(lines) 1571 1572 return True
1573 1574 #=========================================================================== 1575 # write_configs_file 1576 #===========================================================================
1577 - def write_configs_file(self, writer, matrix_element):
1578 """Write the configs.inc file for MadEvent""" 1579 1580 # Extract number of external particles 1581 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1582 1583 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1584 mapconfigs = [c[0] for c in configs] 1585 model = matrix_element.get('processes')[0].get('model') 1586 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1587 [[c[1]] for c in configs], 1588 mapconfigs, 1589 nexternal, ninitial, 1590 model)
1591 1592 #=========================================================================== 1593 # write_configs_file_from_diagrams 1594 #===========================================================================
1595 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1596 nexternal, ninitial, model):
1597 """Write the actual configs.inc file. 1598 1599 configs is the diagrams corresponding to configs (each 1600 diagrams is a list of corresponding diagrams for all 1601 subprocesses, with None if there is no corresponding diagrams 1602 for a given process). 1603 mapconfigs gives the diagram number for each config. 1604 1605 For s-channels, we need to output one PDG for each subprocess in 1606 the subprocess group, in order to be able to pick the right 1607 one for multiprocesses.""" 1608 1609 lines = [] 1610 1611 s_and_t_channels = [] 1612 1613 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1614 for config in configs if [d for d in config if d][0].\ 1615 get_vertex_leg_numbers()!=[]] 1616 minvert = min(vert_list) if vert_list!=[] else 0 1617 1618 # Number of subprocesses 1619 nsubprocs = len(configs[0]) 1620 1621 nconfigs = 0 1622 1623 new_pdg = model.get_first_non_pdg() 1624 1625 for iconfig, helas_diags in enumerate(configs): 1626 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1627 [0].get_vertex_leg_numbers()) : 1628 # Only 3-vertices allowed in configs.inc except for vertices 1629 # which originate from a shrunk loop. 1630 continue 1631 nconfigs += 1 1632 1633 # Need s- and t-channels for all subprocesses, including 1634 # those that don't contribute to this config 1635 empty_verts = [] 1636 stchannels = [] 1637 for h in helas_diags: 1638 if h: 1639 # get_s_and_t_channels gives vertices starting from 1640 # final state external particles and working inwards 1641 stchannels.append(h.get('amplitudes')[0].\ 1642 get_s_and_t_channels(ninitial, model, new_pdg)) 1643 else: 1644 stchannels.append((empty_verts, None)) 1645 1646 # For t-channels, just need the first non-empty one 1647 tchannels = [t for s,t in stchannels if t != None][0] 1648 1649 # For s_and_t_channels (to be used later) use only first config 1650 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1651 tchannels]) 1652 1653 # Make sure empty_verts is same length as real vertices 1654 if any([s for s,t in stchannels]): 1655 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1656 1657 # Reorganize s-channel vertices to get a list of all 1658 # subprocesses for each vertex 1659 schannels = list(zip(*[s for s,t in stchannels])) 1660 else: 1661 schannels = [] 1662 1663 allchannels = schannels 1664 if len(tchannels) > 1: 1665 # Write out tchannels only if there are any non-trivial ones 1666 allchannels = schannels + tchannels 1667 1668 # Write out propagators for s-channel and t-channel vertices 1669 1670 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1671 # Correspondance between the config and the diagram = amp2 1672 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1673 mapconfigs[iconfig])) 1674 1675 for verts in allchannels: 1676 if verts in schannels: 1677 vert = [v for v in verts if v][0] 1678 else: 1679 vert = verts 1680 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1681 last_leg = vert.get('legs')[-1] 1682 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1683 (last_leg.get('number'), nconfigs, len(daughters), 1684 ",".join([str(d) for d in daughters]))) 1685 if verts in schannels: 1686 pdgs = [] 1687 for v in verts: 1688 if v: 1689 pdgs.append(v.get('legs')[-1].get('id')) 1690 else: 1691 pdgs.append(0) 1692 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1693 (last_leg.get('number'), nconfigs, nsubprocs, 1694 ",".join([str(d) for d in pdgs]))) 1695 lines.append("data tprid(%d,%d)/0/" % \ 1696 (last_leg.get('number'), nconfigs)) 1697 elif verts in tchannels[:-1]: 1698 lines.append("data tprid(%d,%d)/%d/" % \ 1699 (last_leg.get('number'), nconfigs, 1700 abs(last_leg.get('id')))) 1701 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1702 (last_leg.get('number'), nconfigs, nsubprocs, 1703 ",".join(['0'] * nsubprocs))) 1704 1705 # Write out number of configs 1706 lines.append("# Number of configs") 1707 lines.append("data mapconfig(0)/%d/" % nconfigs) 1708 1709 # Write the file 1710 writer.writelines(lines) 1711 1712 return s_and_t_channels
1713 1714 #=========================================================================== 1715 # Global helper methods 1716 #=========================================================================== 1717
1718 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1719 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1720 1721 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1722 1723 if total_coeff == 1: 1724 if is_imaginary: 1725 return '+imag1*' 1726 else: 1727 return '+' 1728 elif total_coeff == -1: 1729 if is_imaginary: 1730 return '-imag1*' 1731 else: 1732 return '-' 1733 1734 res_str = '%+iD0' % total_coeff.numerator 1735 1736 if total_coeff.denominator != 1: 1737 # Check if total_coeff is an integer 1738 res_str = res_str + '/%iD0' % total_coeff.denominator 1739 1740 if is_imaginary: 1741 res_str = res_str + '*imag1' 1742 1743 return res_str + '*'
1744 1745
1746 - def set_fortran_compiler(self, default_compiler, force=False):
1747 """Set compiler based on what's available on the system""" 1748 1749 # Check for compiler 1750 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1751 f77_compiler = default_compiler['fortran'] 1752 elif misc.which('gfortran'): 1753 f77_compiler = 'gfortran' 1754 elif misc.which('g77'): 1755 f77_compiler = 'g77' 1756 elif misc.which('f77'): 1757 f77_compiler = 'f77' 1758 elif default_compiler['fortran']: 1759 logger.warning('No Fortran Compiler detected! Please install one') 1760 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1761 else: 1762 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1763 logger.info('Use Fortran compiler ' + f77_compiler) 1764 1765 1766 # Check for compiler. 1. set default. 1767 if default_compiler['f2py']: 1768 f2py_compiler = default_compiler['f2py'] 1769 else: 1770 f2py_compiler = '' 1771 # Try to find the correct one. 1772 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1773 f2py_compiler = default_compiler['f2py'] 1774 elif misc.which('f2py'): 1775 f2py_compiler = 'f2py' 1776 elif sys.version_info[1] == 6: 1777 if misc.which('f2py-2.6'): 1778 f2py_compiler = 'f2py-2.6' 1779 elif misc.which('f2py2.6'): 1780 f2py_compiler = 'f2py2.6' 1781 elif sys.version_info[1] == 7: 1782 if misc.which('f2py-2.7'): 1783 f2py_compiler = 'f2py-2.7' 1784 elif misc.which('f2py2.7'): 1785 f2py_compiler = 'f2py2.7' 1786 1787 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1788 1789 1790 self.replace_make_opt_f_compiler(to_replace) 1791 # Replace also for Template but not for cluster 1792 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1793 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1794 1795 return f77_compiler
1796 1797 # an alias for backward compatibility 1798 set_compiler = set_fortran_compiler 1799 1800
1801 - def set_cpp_compiler(self, default_compiler, force=False):
1802 """Set compiler based on what's available on the system""" 1803 1804 # Check for compiler 1805 if default_compiler and misc.which(default_compiler): 1806 compiler = default_compiler 1807 elif misc.which('g++'): 1808 #check if clang version 1809 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1810 stderr=subprocess.PIPE) 1811 out, _ = p.communicate() 1812 out = out.decode() 1813 if 'clang' in str(out) and misc.which('clang'): 1814 compiler = 'clang' 1815 else: 1816 compiler = 'g++' 1817 elif misc.which('c++'): 1818 compiler = 'c++' 1819 elif misc.which('clang'): 1820 compiler = 'clang' 1821 elif default_compiler: 1822 logger.warning('No c++ Compiler detected! Please install one') 1823 compiler = default_compiler # maybe misc fail so try with it 1824 else: 1825 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1826 logger.info('Use c++ compiler ' + compiler) 1827 self.replace_make_opt_c_compiler(compiler) 1828 # Replace also for Template but not for cluster 1829 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 1830 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1831 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1832 1833 return compiler
1834 1835
1836 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1837 """Set FC=compiler in Source/make_opts""" 1838 1839 assert isinstance(compilers, dict) 1840 1841 mod = False #avoid to rewrite the file if not needed 1842 if not root_dir: 1843 root_dir = self.dir_path 1844 1845 compiler= compilers['fortran'] 1846 f2py_compiler = compilers['f2py'] 1847 if not f2py_compiler: 1848 f2py_compiler = 'f2py' 1849 for_update= {'DEFAULT_F_COMPILER':compiler, 1850 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1851 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1852 1853 try: 1854 common_run_interface.CommonRunCmd.update_make_opts_full( 1855 make_opts, for_update) 1856 except IOError: 1857 if root_dir == self.dir_path: 1858 logger.info('Fail to set compiler. Trying to continue anyway.')
1859
1860 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1861 """Set CXX=compiler in Source/make_opts. 1862 The version is also checked, in order to set some extra flags 1863 if the compiler is clang (on MACOS)""" 1864 1865 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1866 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1867 1868 1869 # list of the variable to set in the make_opts file 1870 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1871 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1872 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1873 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1874 } 1875 1876 # for MOJAVE remove the MACFLAG: 1877 if is_clang: 1878 import platform 1879 version, _, _ = platform.mac_ver() 1880 if not version:# not linux 1881 version = 14 # set version to remove MACFLAG 1882 else: 1883 version = int(version.split('.')[1]) 1884 if version >= 14: 1885 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1886 1887 if not root_dir: 1888 root_dir = self.dir_path 1889 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1890 1891 try: 1892 common_run_interface.CommonRunCmd.update_make_opts_full( 1893 make_opts, for_update) 1894 except IOError: 1895 if root_dir == self.dir_path: 1896 logger.info('Fail to set compiler. Trying to continue anyway.') 1897 1898 return
1899
1900 #=============================================================================== 1901 # ProcessExporterFortranSA 1902 #=============================================================================== 1903 -class ProcessExporterFortranSA(ProcessExporterFortran):
1904 """Class to take care of exporting a set of matrix elements to 1905 MadGraph v4 StandAlone format.""" 1906 1907 matrix_template = "matrix_standalone_v4.inc" 1908
1909 - def __init__(self, *args,**opts):
1910 """add the format information compare to standard init""" 1911 1912 if 'format' in opts: 1913 self.format = opts['format'] 1914 del opts['format'] 1915 else: 1916 self.format = 'standalone' 1917 1918 self.prefix_info = {} 1919 ProcessExporterFortran.__init__(self, *args, **opts)
1920
1921 - def copy_template(self, model):
1922 """Additional actions needed for setup of Template 1923 """ 1924 1925 #First copy the full template tree if dir_path doesn't exit 1926 if os.path.isdir(self.dir_path): 1927 return 1928 1929 logger.info('initialize a new standalone directory: %s' % \ 1930 os.path.basename(self.dir_path)) 1931 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1932 1933 # Create the directory structure 1934 os.mkdir(self.dir_path) 1935 os.mkdir(pjoin(self.dir_path, 'Source')) 1936 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1937 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1938 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1939 os.mkdir(pjoin(self.dir_path, 'bin')) 1940 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1941 os.mkdir(pjoin(self.dir_path, 'lib')) 1942 os.mkdir(pjoin(self.dir_path, 'Cards')) 1943 1944 # Information at top-level 1945 #Write version info 1946 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1947 try: 1948 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1949 except IOError: 1950 MG5_version = misc.get_pkg_info() 1951 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1952 "5." + MG5_version['version']) 1953 1954 1955 # Add file in SubProcesses 1956 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1957 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1958 1959 if self.format == 'standalone': 1960 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1961 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1962 1963 # Add file in Source 1964 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1965 pjoin(self.dir_path, 'Source')) 1966 # add the makefile 1967 filename = pjoin(self.dir_path,'Source','makefile') 1968 self.write_source_makefile(writers.FileWriter(filename))
1969 1970 #=========================================================================== 1971 # export model files 1972 #===========================================================================
1973 - def export_model_files(self, model_path):
1974 """export the model dependent files for V4 model""" 1975 1976 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1977 # Add the routine update_as_param in v4 model 1978 # This is a function created in the UFO 1979 text=""" 1980 subroutine update_as_param() 1981 call setpara('param_card.dat',.false.) 1982 return 1983 end 1984 """ 1985 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1986 ff.write(text) 1987 ff.close() 1988 1989 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1990 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1991 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1992 fsock.write(text) 1993 fsock.close() 1994 1995 self.make_model_symbolic_link()
1996 1997 #=========================================================================== 1998 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1999 #===========================================================================
2000 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2001 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2002 Perl script of MadEvent4 are still working properly for pure MG5 run. 2003 Not needed for StandAlone so just return 2004 """ 2005 2006 return
2007 2008 2009 #=========================================================================== 2010 # Make the Helas and Model directories for Standalone directory 2011 #===========================================================================
2012 - def make(self):
2013 """Run make in the DHELAS and MODEL directories, to set up 2014 everything for running standalone 2015 """ 2016 2017 source_dir = pjoin(self.dir_path, "Source") 2018 logger.info("Running make for Helas") 2019 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2020 logger.info("Running make for Model") 2021 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2022 2023 #=========================================================================== 2024 # Create proc_card_mg5.dat for Standalone directory 2025 #===========================================================================
2026 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2027 """Finalize Standalone MG4 directory by 2028 generation proc_card_mg5.dat 2029 generate a global makefile 2030 """ 2031 2032 compiler = {'fortran': mg5options['fortran_compiler'], 2033 'cpp': mg5options['cpp_compiler'], 2034 'f2py': mg5options['f2py_compiler']} 2035 2036 self.compiler_choice(compiler) 2037 self.make() 2038 2039 # Write command history as proc_card_mg5 2040 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2041 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2042 history.write(output_file) 2043 2044 ProcessExporterFortran.finalize(self, matrix_elements, 2045 history, mg5options, flaglist) 2046 open(pjoin(self.dir_path,'__init__.py'),'w') 2047 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2048 2049 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2050 #add the module to hande the NLO weight 2051 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2052 pjoin(self.dir_path, 'Source')) 2053 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2054 pjoin(self.dir_path, 'Source', 'PDF')) 2055 self.write_pdf_opendata() 2056 2057 if self.prefix_info: 2058 self.write_f2py_splitter() 2059 self.write_f2py_makefile() 2060 self.write_f2py_check_sa(matrix_elements, 2061 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2062 else: 2063 # create a single makefile to compile all the subprocesses 2064 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2065 deppython = '' 2066 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2067 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2068 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2069 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2070 text+='all: %s\n\techo \'done\'' % deppython 2071 2072 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2073 ff.write(text) 2074 ff.close()
2075
2076 - def write_f2py_splitter(self):
2077 """write a function to call the correct matrix element""" 2078 2079 template = """ 2080 %(python_information)s 2081 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2082 IMPLICIT NONE 2083 2084 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2085 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2086 CF2PY integer, intent(in):: procid 2087 CF2PY integer, intent(in) :: npdg 2088 CF2PY double precision, intent(out) :: ANS 2089 CF2PY double precision, intent(in) :: ALPHAS 2090 CF2PY double precision, intent(in) :: SCALE2 2091 integer pdgs(*) 2092 integer npdg, nhel, procid 2093 double precision p(*) 2094 double precision ANS, ALPHAS, PI,SCALE2 2095 include 'coupl.inc' 2096 2097 PI = 3.141592653589793D0 2098 G = 2* DSQRT(ALPHAS*PI) 2099 CALL UPDATE_AS_PARAM() 2100 if (scale2.ne.0d0) stop 1 2101 2102 %(smatrixhel)s 2103 2104 return 2105 end 2106 2107 SUBROUTINE INITIALISE(PATH) 2108 C ROUTINE FOR F2PY to read the benchmark point. 2109 IMPLICIT NONE 2110 CHARACTER*512 PATH 2111 CF2PY INTENT(IN) :: PATH 2112 CALL SETPARA(PATH) !first call to setup the paramaters 2113 RETURN 2114 END 2115 2116 2117 subroutine CHANGE_PARA(name, value) 2118 implicit none 2119 CF2PY intent(in) :: name 2120 CF2PY intent(in) :: value 2121 2122 character*512 name 2123 double precision value 2124 2125 include '../Source/MODEL/input.inc' 2126 include '../Source/MODEL/coupl.inc' 2127 2128 SELECT CASE (name) 2129 %(parameter_setup)s 2130 CASE DEFAULT 2131 write(*,*) 'no parameter matching', name, value 2132 END SELECT 2133 2134 return 2135 end 2136 2137 subroutine update_all_coup() 2138 implicit none 2139 call coup() 2140 return 2141 end 2142 2143 2144 subroutine get_pdg_order(PDG, ALLPROC) 2145 IMPLICIT NONE 2146 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2147 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2148 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2149 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2150 DATA PDGS/ %(pdgs)s / 2151 DATA PIDS/ %(pids)s / 2152 PDG = PDGS 2153 ALLPROC = PIDS 2154 RETURN 2155 END 2156 2157 subroutine get_prefix(PREFIX) 2158 IMPLICIT NONE 2159 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2160 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2161 DATA PREF / '%(prefix)s'/ 2162 PREFIX = PREF 2163 RETURN 2164 END 2165 2166 2167 """ 2168 2169 allids = list(self.prefix_info.keys()) 2170 allprefix = [self.prefix_info[key][0] for key in allids] 2171 min_nexternal = min([len(ids[0]) for ids in allids]) 2172 max_nexternal = max([len(ids[0]) for ids in allids]) 2173 2174 info = [] 2175 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2176 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2177 2178 2179 text = [] 2180 for n_ext in range(min_nexternal, max_nexternal+1): 2181 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2182 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2183 if not current_id: 2184 continue 2185 if min_nexternal != max_nexternal: 2186 if n_ext == min_nexternal: 2187 text.append(' if (npdg.eq.%i)then' % n_ext) 2188 else: 2189 text.append(' else if (npdg.eq.%i)then' % n_ext) 2190 for ii,pdgs in enumerate(current_id): 2191 pid = current_pid[ii] 2192 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2193 if ii==0: 2194 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2195 else: 2196 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2197 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2198 text.append(' endif') 2199 #close the function 2200 if min_nexternal != max_nexternal: 2201 text.append('endif') 2202 2203 params = self.get_model_parameter(self.model) 2204 parameter_setup =[] 2205 for key, var in params.items(): 2206 parameter_setup.append(' CASE ("%s")\n %s = value' 2207 % (key, var)) 2208 2209 formatting = {'python_information':'\n'.join(info), 2210 'smatrixhel': '\n'.join(text), 2211 'maxpart': max_nexternal, 2212 'nb_me': len(allids), 2213 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2214 for i in range(max_nexternal) for (pdg,pid) in allids), 2215 'prefix':'\',\''.join(allprefix), 2216 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2217 'parameter_setup': '\n'.join(parameter_setup), 2218 } 2219 formatting['lenprefix'] = len(formatting['prefix']) 2220 text = template % formatting 2221 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2222 fsock.writelines(text) 2223 fsock.close()
2224
2225 - def get_model_parameter(self, model):
2226 """ returns all the model parameter 2227 """ 2228 params = {} 2229 for p in model.get('parameters')[('external',)]: 2230 name = p.name 2231 nopref = name[4:] if name.startswith('mdl_') else name 2232 params[nopref] = name 2233 2234 block = p.lhablock 2235 lha = '_'.join([str(i) for i in p.lhacode]) 2236 params['%s_%s' % (block.upper(), lha)] = name 2237 2238 return params
2239 2240 2241 2242 2243
2244 - def write_f2py_check_sa(self, matrix_element, writer):
2245 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2246 # To be implemented. It is just an example file, i.e. not crucial. 2247 return
2248
2249 - def write_f2py_makefile(self):
2250 """ """ 2251 # Add file in SubProcesses 2252 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2253 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2254
2255 - def create_MA5_cards(self,*args,**opts):
2256 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2257 pass
2258
2259 - def compiler_choice(self, compiler):
2260 """ Different daughter classes might want different compilers. 2261 So this function is meant to be overloaded if desired.""" 2262 2263 self.set_compiler(compiler)
2264 2265 #=========================================================================== 2266 # generate_subprocess_directory 2267 #===========================================================================
2268 - def generate_subprocess_directory(self, matrix_element, 2269 fortran_model, number):
2270 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2271 including the necessary matrix.f and nexternal.inc files""" 2272 2273 cwd = os.getcwd() 2274 # Create the directory PN_xx_xxxxx in the specified path 2275 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2276 "P%s" % matrix_element.get('processes')[0].shell_string()) 2277 2278 if self.opt['sa_symmetry']: 2279 # avoid symmetric output 2280 for i,proc in enumerate(matrix_element.get('processes')): 2281 2282 tag = proc.get_tag() 2283 legs = proc.get('legs')[:] 2284 leg0 = proc.get('legs')[0] 2285 leg1 = proc.get('legs')[1] 2286 if not leg1.get('state'): 2287 proc.get('legs')[0] = leg1 2288 proc.get('legs')[1] = leg0 2289 flegs = proc.get('legs')[2:] 2290 for perm in itertools.permutations(flegs): 2291 for i,p in enumerate(perm): 2292 proc.get('legs')[i+2] = p 2293 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2294 "P%s" % proc.shell_string()) 2295 #restore original order 2296 proc.get('legs')[2:] = legs[2:] 2297 if os.path.exists(dirpath2): 2298 proc.get('legs')[:] = legs 2299 return 0 2300 proc.get('legs')[:] = legs 2301 2302 try: 2303 os.mkdir(dirpath) 2304 except os.error as error: 2305 logger.warning(error.strerror + " " + dirpath) 2306 2307 #try: 2308 # os.chdir(dirpath) 2309 #except os.error: 2310 # logger.error('Could not cd to directory %s' % dirpath) 2311 # return 0 2312 2313 logger.info('Creating files in directory %s' % dirpath) 2314 2315 # Extract number of external particles 2316 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2317 2318 # Create the matrix.f file and the nexternal.inc file 2319 if self.opt['export_format']=='standalone_msP': 2320 filename = pjoin(dirpath, 'matrix_prod.f') 2321 else: 2322 filename = pjoin(dirpath, 'matrix.f') 2323 2324 proc_prefix = '' 2325 if 'prefix' in self.cmd_options: 2326 if self.cmd_options['prefix'] == 'int': 2327 proc_prefix = 'M%s_' % number 2328 elif self.cmd_options['prefix'] == 'proc': 2329 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2330 else: 2331 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2332 for proc in matrix_element.get('processes'): 2333 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2334 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2335 2336 calls = self.write_matrix_element_v4( 2337 writers.FortranWriter(filename), 2338 matrix_element, 2339 fortran_model, 2340 proc_prefix=proc_prefix) 2341 2342 if self.opt['export_format'] == 'standalone_msP': 2343 filename = pjoin(dirpath,'configs_production.inc') 2344 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2345 writers.FortranWriter(filename), 2346 matrix_element) 2347 2348 filename = pjoin(dirpath,'props_production.inc') 2349 self.write_props_file(writers.FortranWriter(filename), 2350 matrix_element, 2351 s_and_t_channels) 2352 2353 filename = pjoin(dirpath,'nexternal_prod.inc') 2354 self.write_nexternal_madspin(writers.FortranWriter(filename), 2355 nexternal, ninitial) 2356 2357 if self.opt['export_format']=='standalone_msF': 2358 filename = pjoin(dirpath, 'helamp.inc') 2359 ncomb=matrix_element.get_helicity_combinations() 2360 self.write_helamp_madspin(writers.FortranWriter(filename), 2361 ncomb) 2362 2363 filename = pjoin(dirpath, 'nexternal.inc') 2364 self.write_nexternal_file(writers.FortranWriter(filename), 2365 nexternal, ninitial) 2366 2367 filename = pjoin(dirpath, 'pmass.inc') 2368 self.write_pmass_file(writers.FortranWriter(filename), 2369 matrix_element) 2370 2371 filename = pjoin(dirpath, 'ngraphs.inc') 2372 self.write_ngraphs_file(writers.FortranWriter(filename), 2373 len(matrix_element.get_all_amplitudes())) 2374 2375 # Generate diagrams 2376 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2377 filename = pjoin(dirpath, "matrix.ps") 2378 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2379 get('diagrams'), 2380 filename, 2381 model=matrix_element.get('processes')[0].\ 2382 get('model'), 2383 amplitude=True) 2384 logger.info("Generating Feynman diagrams for " + \ 2385 matrix_element.get('processes')[0].nice_string()) 2386 plot.draw() 2387 2388 linkfiles = ['check_sa.f', 'coupl.inc'] 2389 2390 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2391 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2392 pat = re.compile('smatrix', re.I) 2393 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2394 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2395 f.write(new_text) 2396 linkfiles.pop(0) 2397 2398 for file in linkfiles: 2399 ln('../%s' % file, cwd=dirpath) 2400 ln('../makefileP', name='makefile', cwd=dirpath) 2401 # Return to original PWD 2402 #os.chdir(cwd) 2403 2404 if not calls: 2405 calls = 0 2406 return calls
2407 2408 2409 #=========================================================================== 2410 # write_source_makefile 2411 #===========================================================================
2412 - def write_source_makefile(self, writer):
2413 """Write the nexternal.inc file for MG4""" 2414 2415 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2416 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2417 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2418 2419 replace_dict= {'libraries': set_of_lib, 2420 'model':model_line, 2421 'additional_dsample': '', 2422 'additional_dependencies':''} 2423 2424 text = open(path).read() % replace_dict 2425 2426 if writer: 2427 writer.write(text) 2428 2429 return replace_dict
2430 2431 #=========================================================================== 2432 # write_matrix_element_v4 2433 #===========================================================================
2434 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2435 write=True, proc_prefix=''):
2436 """Export a matrix element to a matrix.f file in MG4 standalone format 2437 if write is on False, just return the replace_dict and not write anything.""" 2438 2439 2440 if not matrix_element.get('processes') or \ 2441 not matrix_element.get('diagrams'): 2442 return 0 2443 2444 if writer: 2445 if not isinstance(writer, writers.FortranWriter): 2446 raise writers.FortranWriter.FortranWriterError(\ 2447 "writer not FortranWriter but %s" % type(writer)) 2448 # Set lowercase/uppercase Fortran code 2449 writers.FortranWriter.downcase = False 2450 2451 2452 if 'sa_symmetry' not in self.opt: 2453 self.opt['sa_symmetry']=False 2454 2455 2456 # The proc_id is for MadEvent grouping which is never used in SA. 2457 replace_dict = {'global_variable':'', 'amp2_lines':'', 2458 'proc_prefix':proc_prefix, 'proc_id':''} 2459 2460 # Extract helas calls 2461 helas_calls = fortran_model.get_matrix_element_calls(\ 2462 matrix_element) 2463 2464 replace_dict['helas_calls'] = "\n".join(helas_calls) 2465 2466 # Extract version number and date from VERSION file 2467 info_lines = self.get_mg5_info_lines() 2468 replace_dict['info_lines'] = info_lines 2469 2470 # Extract process info lines 2471 process_lines = self.get_process_info_lines(matrix_element) 2472 replace_dict['process_lines'] = process_lines 2473 2474 # Extract number of external particles 2475 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2476 replace_dict['nexternal'] = nexternal 2477 replace_dict['nincoming'] = ninitial 2478 2479 # Extract ncomb 2480 ncomb = matrix_element.get_helicity_combinations() 2481 replace_dict['ncomb'] = ncomb 2482 2483 # Extract helicity lines 2484 helicity_lines = self.get_helicity_lines(matrix_element) 2485 replace_dict['helicity_lines'] = helicity_lines 2486 2487 # Extract overall denominator 2488 # Averaging initial state color, spin, and identical FS particles 2489 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2490 2491 # Extract ngraphs 2492 ngraphs = matrix_element.get_number_of_amplitudes() 2493 replace_dict['ngraphs'] = ngraphs 2494 2495 # Extract nwavefuncs 2496 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2497 replace_dict['nwavefuncs'] = nwavefuncs 2498 2499 # Extract ncolor 2500 ncolor = max(1, len(matrix_element.get('color_basis'))) 2501 replace_dict['ncolor'] = ncolor 2502 2503 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2504 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2505 matrix_element.get_beams_hel_avg_factor() 2506 2507 # Extract color data lines 2508 color_data_lines = self.get_color_data_lines(matrix_element) 2509 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2510 2511 if self.opt['export_format']=='standalone_msP': 2512 # For MadSpin need to return the AMP2 2513 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2514 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2515 replace_dict['global_variable'] = \ 2516 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2517 2518 # JAMP definition, depends on the number of independent split orders 2519 split_orders=matrix_element.get('processes')[0].get('split_orders') 2520 2521 if len(split_orders)==0: 2522 replace_dict['nSplitOrders']='' 2523 # Extract JAMP lines 2524 jamp_lines = self.get_JAMP_lines(matrix_element) 2525 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2526 # set all amplitude order to weight 1 and only one squared order 2527 # contribution which is of course ALL_ORDERS=2. 2528 squared_orders = [(2,),] 2529 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2530 replace_dict['chosen_so_configs'] = '.TRUE.' 2531 replace_dict['nSqAmpSplitOrders']=1 2532 replace_dict['split_order_str_list']='' 2533 else: 2534 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2535 replace_dict['nAmpSplitOrders']=len(amp_orders) 2536 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2537 replace_dict['nSplitOrders']=len(split_orders) 2538 replace_dict['split_order_str_list']=str(split_orders) 2539 amp_so = self.get_split_orders_lines( 2540 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2541 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2542 replace_dict['ampsplitorders']='\n'.join(amp_so) 2543 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2544 jamp_lines = self.get_JAMP_lines_split_order(\ 2545 matrix_element,amp_orders,split_order_names=split_orders) 2546 2547 # Now setup the array specifying what squared split order is chosen 2548 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2549 matrix_element.get('processes')[0],squared_orders) 2550 2551 # For convenience we also write the driver check_sa_splitOrders.f 2552 # that explicitely writes out the contribution from each squared order. 2553 # The original driver still works and is compiled with 'make' while 2554 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2555 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2556 self.write_check_sa_splitOrders(squared_orders,split_orders, 2557 nexternal,ninitial,proc_prefix,check_sa_writer) 2558 2559 if write: 2560 writers.FortranWriter('nsqso_born.inc').writelines( 2561 """INTEGER NSQSO_BORN 2562 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2563 2564 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2565 2566 matrix_template = self.matrix_template 2567 if self.opt['export_format']=='standalone_msP' : 2568 matrix_template = 'matrix_standalone_msP_v4.inc' 2569 elif self.opt['export_format']=='standalone_msF': 2570 matrix_template = 'matrix_standalone_msF_v4.inc' 2571 elif self.opt['export_format']=='matchbox': 2572 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2573 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2574 2575 if len(split_orders)>0: 2576 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2577 logger.debug("Warning: The export format %s is not "+\ 2578 " available for individual ME evaluation of given coupl. orders."+\ 2579 " Only the total ME will be computed.", self.opt['export_format']) 2580 elif self.opt['export_format'] in ['madloop_matchbox']: 2581 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2582 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2583 else: 2584 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2585 2586 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2587 replace_dict['template_file2'] = pjoin(_file_path, \ 2588 'iolibs/template_files/split_orders_helping_functions.inc') 2589 if write and writer: 2590 path = replace_dict['template_file'] 2591 content = open(path).read() 2592 content = content % replace_dict 2593 # Write the file 2594 writer.writelines(content) 2595 # Add the helper functions. 2596 if len(split_orders)>0: 2597 content = '\n' + open(replace_dict['template_file2'])\ 2598 .read()%replace_dict 2599 writer.writelines(content) 2600 return len([call for call in helas_calls if call.find('#') != 0]) 2601 else: 2602 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2603 return replace_dict # for subclass update
2604
2605 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2606 nincoming, proc_prefix, writer):
2607 """ Write out a more advanced version of the check_sa drivers that 2608 individually returns the matrix element for each contributing squared 2609 order.""" 2610 2611 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2612 'template_files', 'check_sa_splitOrders.f')).read() 2613 printout_sq_orders=[] 2614 for i, squared_order in enumerate(squared_orders): 2615 sq_orders=[] 2616 for j, sqo in enumerate(squared_order): 2617 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2618 printout_sq_orders.append(\ 2619 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2620 %(i+1,' '.join(sq_orders),i+1)) 2621 printout_sq_orders='\n'.join(printout_sq_orders) 2622 replace_dict = {'printout_sqorders':printout_sq_orders, 2623 'nSplitOrders':len(squared_orders), 2624 'nexternal':nexternal, 2625 'nincoming':nincoming, 2626 'proc_prefix':proc_prefix} 2627 2628 if writer: 2629 writer.writelines(check_sa_content % replace_dict) 2630 else: 2631 return replace_dict
2632
2633 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2634 """class to take care of exporting a set of matrix element for the Matchbox 2635 code in the case of Born only routine""" 2636 2637 default_opt = {'clean': False, 'complex_mass':False, 2638 'export_format':'matchbox', 'mp': False, 2639 'sa_symmetry': True} 2640 2641 #specific template of the born 2642 2643 2644 matrix_template = "matrix_standalone_matchbox.inc" 2645 2646 @staticmethod
2647 - def get_color_string_lines(matrix_element):
2648 """Return the color matrix definition lines for this matrix element. Split 2649 rows in chunks of size n.""" 2650 2651 if not matrix_element.get('color_matrix'): 2652 return "\n".join(["out = 1"]) 2653 2654 #start the real work 2655 color_denominators = matrix_element.get('color_matrix').\ 2656 get_line_denominators() 2657 matrix_strings = [] 2658 my_cs = color.ColorString() 2659 for i_color in range(len(color_denominators)): 2660 # Then write the numerators for the matrix elements 2661 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2662 t_str=repr(my_cs) 2663 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2664 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2665 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2666 all_matches = t_match.findall(t_str) 2667 output = {} 2668 arg=[] 2669 for match in all_matches: 2670 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2671 if ctype in ['ColorOne' ]: 2672 continue 2673 if ctype not in ['T', 'Tr' ]: 2674 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2675 tmparg += ['0'] 2676 arg +=tmparg 2677 for j, v in enumerate(arg): 2678 output[(i_color,j)] = v 2679 2680 for key in output: 2681 if matrix_strings == []: 2682 #first entry 2683 matrix_strings.append(""" 2684 if (in1.eq.%s.and.in2.eq.%s)then 2685 out = %s 2686 """ % (key[0], key[1], output[key])) 2687 else: 2688 #not first entry 2689 matrix_strings.append(""" 2690 elseif (in1.eq.%s.and.in2.eq.%s)then 2691 out = %s 2692 """ % (key[0], key[1], output[key])) 2693 if len(matrix_strings): 2694 matrix_strings.append(" else \n out = - 1 \n endif") 2695 else: 2696 return "\n out = - 1 \n " 2697 return "\n".join(matrix_strings)
2698
2699 - def make(self,*args,**opts):
2700 pass
2701
2702 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2703 JAMP_formatLC=None):
2704 2705 """Adding leading color part of the colorflow""" 2706 2707 if not JAMP_formatLC: 2708 JAMP_formatLC= "LN%s" % JAMP_format 2709 2710 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2711 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2712 col_amps=col_amps.get_color_amplitudes() 2713 elif(isinstance(col_amps,list)): 2714 if(col_amps and isinstance(col_amps[0],list)): 2715 col_amps=col_amps 2716 else: 2717 raise MadGraph5Error(error_msg % 'col_amps') 2718 else: 2719 raise MadGraph5Error(error_msg % 'col_amps') 2720 2721 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2722 JAMP_format=JAMP_format, 2723 AMP_format=AMP_format, 2724 split=-1) 2725 2726 2727 # Filter the col_ampls to generate only those without any 1/NC terms 2728 2729 LC_col_amps = [] 2730 for coeff_list in col_amps: 2731 to_add = [] 2732 for (coefficient, amp_number) in coeff_list: 2733 if coefficient[3]==0: 2734 to_add.append( (coefficient, amp_number) ) 2735 LC_col_amps.append(to_add) 2736 2737 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2738 JAMP_format=JAMP_formatLC, 2739 AMP_format=AMP_format, 2740 split=-1) 2741 2742 return text
2743
2744 2745 2746 2747 #=============================================================================== 2748 # ProcessExporterFortranMW 2749 #=============================================================================== 2750 -class ProcessExporterFortranMW(ProcessExporterFortran):
2751 """Class to take care of exporting a set of matrix elements to 2752 MadGraph v4 - MadWeight format.""" 2753 2754 matrix_file="matrix_standalone_v4.inc" 2755
2756 - def copy_template(self, model):
2757 """Additional actions needed for setup of Template 2758 """ 2759 2760 super(ProcessExporterFortranMW, self).copy_template(model) 2761 2762 # Add the MW specific file 2763 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2764 pjoin(self.dir_path, 'Source','MadWeight'), True) 2765 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2766 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2767 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2768 pjoin(self.dir_path, 'Source','setrun.f')) 2769 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2770 pjoin(self.dir_path, 'Source','run.inc')) 2771 # File created from Template (Different in some child class) 2772 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2773 self.write_run_config_file(writers.FortranWriter(filename)) 2774 2775 try: 2776 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2777 stdout = os.open(os.devnull, os.O_RDWR), 2778 stderr = os.open(os.devnull, os.O_RDWR), 2779 cwd=self.dir_path) 2780 except OSError: 2781 # Probably madweight already called 2782 pass 2783 2784 # Copy the different python file in the Template 2785 self.copy_python_file() 2786 # create the appropriate cuts.f 2787 self.get_mw_cuts_version() 2788 2789 # add the makefile in Source directory 2790 filename = os.path.join(self.dir_path,'Source','makefile') 2791 self.write_source_makefile(writers.FortranWriter(filename))
2792 2793 2794 2795 2796 #=========================================================================== 2797 # convert_model 2798 #===========================================================================
2799 - def convert_model(self, model, wanted_lorentz = [], 2800 wanted_couplings = []):
2801 2802 super(ProcessExporterFortranMW,self).convert_model(model, 2803 wanted_lorentz, wanted_couplings) 2804 2805 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2806 try: 2807 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2808 except OSError as error: 2809 pass 2810 model_path = model.get('modelpath') 2811 # This is not safe if there is a '##' or '-' in the path. 2812 shutil.copytree(model_path, 2813 pjoin(self.dir_path,'bin','internal','ufomodel'), 2814 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2815 if hasattr(model, 'restrict_card'): 2816 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2817 'restrict_default.dat') 2818 if isinstance(model.restrict_card, check_param_card.ParamCard): 2819 model.restrict_card.write(out_path) 2820 else: 2821 files.cp(model.restrict_card, out_path)
2822 2823 #=========================================================================== 2824 # generate_subprocess_directory 2825 #===========================================================================
2826 - def copy_python_file(self):
2827 """copy the python file require for the Template""" 2828 2829 # madevent interface 2830 cp(_file_path+'/interface/madweight_interface.py', 2831 self.dir_path+'/bin/internal/madweight_interface.py') 2832 cp(_file_path+'/interface/extended_cmd.py', 2833 self.dir_path+'/bin/internal/extended_cmd.py') 2834 cp(_file_path+'/interface/common_run_interface.py', 2835 self.dir_path+'/bin/internal/common_run_interface.py') 2836 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2837 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2838 cp(_file_path+'/iolibs/save_load_object.py', 2839 self.dir_path+'/bin/internal/save_load_object.py') 2840 cp(_file_path+'/madevent/gen_crossxhtml.py', 2841 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2842 cp(_file_path+'/madevent/sum_html.py', 2843 self.dir_path+'/bin/internal/sum_html.py') 2844 cp(_file_path+'/various/FO_analyse_card.py', 2845 self.dir_path+'/bin/internal/FO_analyse_card.py') 2846 cp(_file_path+'/iolibs/file_writers.py', 2847 self.dir_path+'/bin/internal/file_writers.py') 2848 #model file 2849 cp(_file_path+'../models/check_param_card.py', 2850 self.dir_path+'/bin/internal/check_param_card.py') 2851 2852 #madevent file 2853 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2854 cp(_file_path+'/various/lhe_parser.py', 2855 self.dir_path+'/bin/internal/lhe_parser.py') 2856 2857 cp(_file_path+'/various/banner.py', 2858 self.dir_path+'/bin/internal/banner.py') 2859 cp(_file_path+'/various/shower_card.py', 2860 self.dir_path+'/bin/internal/shower_card.py') 2861 cp(_file_path+'/various/cluster.py', 2862 self.dir_path+'/bin/internal/cluster.py') 2863 2864 # logging configuration 2865 cp(_file_path+'/interface/.mg5_logging.conf', 2866 self.dir_path+'/bin/internal/me5_logging.conf') 2867 cp(_file_path+'/interface/coloring_logging.py', 2868 self.dir_path+'/bin/internal/coloring_logging.py')
2869 2870 2871 #=========================================================================== 2872 # Change the version of cuts.f to the one compatible with MW 2873 #===========================================================================
2874 - def get_mw_cuts_version(self, outpath=None):
2875 """create the appropriate cuts.f 2876 This is based on the one associated to ME output but: 2877 1) No clustering (=> remove initcluster/setclscales) 2878 2) Adding the definition of cut_bw at the file. 2879 """ 2880 2881 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2882 2883 text = StringIO() 2884 #1) remove all dependencies in ickkw >1: 2885 nb_if = 0 2886 for line in template: 2887 if 'if(xqcut.gt.0d0' in line: 2888 nb_if = 1 2889 if nb_if == 0: 2890 text.write(line) 2891 continue 2892 if re.search(r'if\(.*\)\s*then', line): 2893 nb_if += 1 2894 elif 'endif' in line: 2895 nb_if -= 1 2896 2897 #2) add fake cut_bw (have to put the true one later) 2898 text.write(""" 2899 logical function cut_bw(p) 2900 include 'madweight_param.inc' 2901 double precision p(*) 2902 if (bw_cut) then 2903 cut_bw = .true. 2904 else 2905 stop 1 2906 endif 2907 return 2908 end 2909 """) 2910 2911 final = text.getvalue() 2912 #3) remove the call to initcluster: 2913 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2914 template = template.replace('genps.inc', 'maxparticles.inc') 2915 #Now we can write it 2916 if not outpath: 2917 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2918 elif isinstance(outpath, str): 2919 fsock = open(outpath, 'w') 2920 else: 2921 fsock = outpath 2922 fsock.write(template)
2923 2924 2925 2926 #=========================================================================== 2927 # Make the Helas and Model directories for Standalone directory 2928 #===========================================================================
2929 - def make(self):
2930 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2931 everything for running madweight 2932 """ 2933 2934 source_dir = os.path.join(self.dir_path, "Source") 2935 logger.info("Running make for Helas") 2936 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2937 logger.info("Running make for Model") 2938 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2939 logger.info("Running make for PDF") 2940 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2941 logger.info("Running make for CERNLIB") 2942 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2943 logger.info("Running make for GENERIC") 2944 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2945 logger.info("Running make for blocks") 2946 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2947 logger.info("Running make for tools") 2948 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2949 2950 #=========================================================================== 2951 # Create proc_card_mg5.dat for MadWeight directory 2952 #===========================================================================
2953 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2954 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2955 2956 compiler = {'fortran': mg5options['fortran_compiler'], 2957 'cpp': mg5options['cpp_compiler'], 2958 'f2py': mg5options['f2py_compiler']} 2959 2960 2961 2962 #proc_charac 2963 self.create_proc_charac() 2964 2965 # Write maxparticles.inc based on max of ME's/subprocess groups 2966 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2967 self.write_maxparticles_file(writers.FortranWriter(filename), 2968 matrix_elements) 2969 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2970 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2971 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2972 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2973 2974 self.set_compiler(compiler) 2975 self.make() 2976 2977 # Write command history as proc_card_mg5 2978 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2979 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2980 history.write(output_file) 2981 2982 ProcessExporterFortran.finalize(self, matrix_elements, 2983 history, mg5options, flaglist)
2984 2985 2986 2987 #=========================================================================== 2988 # create the run_card for MW 2989 #===========================================================================
2990 - def create_run_card(self, matrix_elements, history):
2991 """ """ 2992 2993 run_card = banner_mod.RunCard() 2994 2995 # pass to default for MW 2996 run_card["run_tag"] = "\'not_use\'" 2997 run_card["fixed_ren_scale"] = "T" 2998 run_card["fixed_fac_scale"] = "T" 2999 run_card.remove_all_cut() 3000 3001 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3002 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3003 python_template=True) 3004 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3005 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3006 python_template=True)
3007 3008 #=========================================================================== 3009 # export model files 3010 #===========================================================================
3011 - def export_model_files(self, model_path):
3012 """export the model dependent files for V4 model""" 3013 3014 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3015 # Add the routine update_as_param in v4 model 3016 # This is a function created in the UFO 3017 text=""" 3018 subroutine update_as_param() 3019 call setpara('param_card.dat',.false.) 3020 return 3021 end 3022 """ 3023 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3024 ff.write(text) 3025 ff.close() 3026 3027 # Modify setrun.f 3028 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3029 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3030 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3031 fsock.write(text) 3032 fsock.close() 3033 3034 # Modify initialization.f 3035 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3036 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3037 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3038 fsock.write(text) 3039 fsock.close() 3040 3041 3042 self.make_model_symbolic_link()
3043 3044 #=========================================================================== 3045 # generate_subprocess_directory 3046 #===========================================================================
3047 - def generate_subprocess_directory(self, matrix_element, 3048 fortran_model,number):
3049 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3050 including the necessary matrix.f and nexternal.inc files""" 3051 3052 cwd = os.getcwd() 3053 # Create the directory PN_xx_xxxxx in the specified path 3054 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3055 "P%s" % matrix_element.get('processes')[0].shell_string()) 3056 3057 try: 3058 os.mkdir(dirpath) 3059 except os.error as error: 3060 logger.warning(error.strerror + " " + dirpath) 3061 3062 #try: 3063 # os.chdir(dirpath) 3064 #except os.error: 3065 # logger.error('Could not cd to directory %s' % dirpath) 3066 # return 0 3067 3068 logger.info('Creating files in directory %s' % dirpath) 3069 3070 # Extract number of external particles 3071 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3072 3073 # Create the matrix.f file and the nexternal.inc file 3074 filename = pjoin(dirpath,'matrix.f') 3075 calls,ncolor = self.write_matrix_element_v4( 3076 writers.FortranWriter(filename), 3077 matrix_element, 3078 fortran_model) 3079 3080 filename = pjoin(dirpath, 'auto_dsig.f') 3081 self.write_auto_dsig_file(writers.FortranWriter(filename), 3082 matrix_element) 3083 3084 filename = pjoin(dirpath, 'configs.inc') 3085 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3086 writers.FortranWriter(filename), 3087 matrix_element) 3088 3089 filename = pjoin(dirpath, 'nexternal.inc') 3090 self.write_nexternal_file(writers.FortranWriter(filename), 3091 nexternal, ninitial) 3092 3093 filename = pjoin(dirpath, 'leshouche.inc') 3094 self.write_leshouche_file(writers.FortranWriter(filename), 3095 matrix_element) 3096 3097 filename = pjoin(dirpath, 'props.inc') 3098 self.write_props_file(writers.FortranWriter(filename), 3099 matrix_element, 3100 s_and_t_channels) 3101 3102 filename = pjoin(dirpath, 'pmass.inc') 3103 self.write_pmass_file(writers.FortranWriter(filename), 3104 matrix_element) 3105 3106 filename = pjoin(dirpath, 'ngraphs.inc') 3107 self.write_ngraphs_file(writers.FortranWriter(filename), 3108 len(matrix_element.get_all_amplitudes())) 3109 3110 filename = pjoin(dirpath, 'maxamps.inc') 3111 self.write_maxamps_file(writers.FortranWriter(filename), 3112 len(matrix_element.get('diagrams')), 3113 ncolor, 3114 len(matrix_element.get('processes')), 3115 1) 3116 3117 filename = pjoin(dirpath, 'phasespace.inc') 3118 self.write_phasespace_file(writers.FortranWriter(filename), 3119 len(matrix_element.get('diagrams')), 3120 ) 3121 3122 # Generate diagrams 3123 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3124 filename = pjoin(dirpath, "matrix.ps") 3125 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3126 get('diagrams'), 3127 filename, 3128 model=matrix_element.get('processes')[0].\ 3129 get('model'), 3130 amplitude='') 3131 logger.info("Generating Feynman diagrams for " + \ 3132 matrix_element.get('processes')[0].nice_string()) 3133 plot.draw() 3134 3135 #import genps.inc and maxconfigs.inc into Subprocesses 3136 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3137 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3138 3139 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3140 3141 for file in linkfiles: 3142 ln('../%s' % file, starting_dir=cwd) 3143 3144 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3145 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3146 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3147 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3148 # Return to original PWD 3149 #os.chdir(cwd) 3150 3151 if not calls: 3152 calls = 0 3153 return calls
3154 3155 #=========================================================================== 3156 # write_matrix_element_v4 3157 #===========================================================================
3158 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3159 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3160 3161 if not matrix_element.get('processes') or \ 3162 not matrix_element.get('diagrams'): 3163 return 0 3164 3165 if writer: 3166 if not isinstance(writer, writers.FortranWriter): 3167 raise writers.FortranWriter.FortranWriterError(\ 3168 "writer not FortranWriter") 3169 3170 # Set lowercase/uppercase Fortran code 3171 writers.FortranWriter.downcase = False 3172 3173 replace_dict = {} 3174 3175 # Extract version number and date from VERSION file 3176 info_lines = self.get_mg5_info_lines() 3177 replace_dict['info_lines'] = info_lines 3178 3179 # Extract process info lines 3180 process_lines = self.get_process_info_lines(matrix_element) 3181 replace_dict['process_lines'] = process_lines 3182 3183 # Set proc_id 3184 replace_dict['proc_id'] = proc_id 3185 3186 # Extract number of external particles 3187 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3188 replace_dict['nexternal'] = nexternal 3189 3190 # Extract ncomb 3191 ncomb = matrix_element.get_helicity_combinations() 3192 replace_dict['ncomb'] = ncomb 3193 3194 # Extract helicity lines 3195 helicity_lines = self.get_helicity_lines(matrix_element) 3196 replace_dict['helicity_lines'] = helicity_lines 3197 3198 # Extract overall denominator 3199 # Averaging initial state color, spin, and identical FS particles 3200 den_factor_line = self.get_den_factor_line(matrix_element) 3201 replace_dict['den_factor_line'] = den_factor_line 3202 3203 # Extract ngraphs 3204 ngraphs = matrix_element.get_number_of_amplitudes() 3205 replace_dict['ngraphs'] = ngraphs 3206 3207 # Extract nwavefuncs 3208 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3209 replace_dict['nwavefuncs'] = nwavefuncs 3210 3211 # Extract ncolor 3212 ncolor = max(1, len(matrix_element.get('color_basis'))) 3213 replace_dict['ncolor'] = ncolor 3214 3215 # Extract color data lines 3216 color_data_lines = self.get_color_data_lines(matrix_element) 3217 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3218 3219 # Extract helas calls 3220 helas_calls = fortran_model.get_matrix_element_calls(\ 3221 matrix_element) 3222 3223 replace_dict['helas_calls'] = "\n".join(helas_calls) 3224 3225 # Extract JAMP lines 3226 jamp_lines = self.get_JAMP_lines(matrix_element) 3227 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3228 3229 replace_dict['template_file'] = os.path.join(_file_path, \ 3230 'iolibs/template_files/%s' % self.matrix_file) 3231 replace_dict['template_file2'] = '' 3232 3233 if writer: 3234 file = open(replace_dict['template_file']).read() 3235 file = file % replace_dict 3236 # Write the file 3237 writer.writelines(file) 3238 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3239 else: 3240 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3241 3242 #=========================================================================== 3243 # write_source_makefile 3244 #===========================================================================
3245 - def write_source_makefile(self, writer):
3246 """Write the nexternal.inc file for madweight""" 3247 3248 3249 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3250 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3251 text = open(path).read() % {'libraries': set_of_lib} 3252 writer.write(text) 3253 3254 return True
3255
3256 - def write_phasespace_file(self, writer, nb_diag):
3257 """ """ 3258 3259 template = """ include 'maxparticles.inc' 3260 integer max_branches 3261 parameter (max_branches=max_particles-1) 3262 integer max_configs 3263 parameter (max_configs=%(nb_diag)s) 3264 3265 c channel position 3266 integer config_pos,perm_pos 3267 common /to_config/config_pos,perm_pos 3268 3269 """ 3270 3271 writer.write(template % {'nb_diag': nb_diag})
3272 3273 3274 #=========================================================================== 3275 # write_auto_dsig_file 3276 #===========================================================================
3277 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3278 """Write the auto_dsig.f file for the differential cross section 3279 calculation, includes pdf call information (MadWeight format)""" 3280 3281 if not matrix_element.get('processes') or \ 3282 not matrix_element.get('diagrams'): 3283 return 0 3284 3285 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3286 3287 if ninitial < 1 or ninitial > 2: 3288 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3289 3290 replace_dict = {} 3291 3292 # Extract version number and date from VERSION file 3293 info_lines = self.get_mg5_info_lines() 3294 replace_dict['info_lines'] = info_lines 3295 3296 # Extract process info lines 3297 process_lines = self.get_process_info_lines(matrix_element) 3298 replace_dict['process_lines'] = process_lines 3299 3300 # Set proc_id 3301 replace_dict['proc_id'] = proc_id 3302 replace_dict['numproc'] = 1 3303 3304 # Set dsig_line 3305 if ninitial == 1: 3306 # No conversion, since result of decay should be given in GeV 3307 dsig_line = "pd(0)*dsiguu" 3308 else: 3309 # Convert result (in GeV) to pb 3310 dsig_line = "pd(0)*conv*dsiguu" 3311 3312 replace_dict['dsig_line'] = dsig_line 3313 3314 # Extract pdf lines 3315 pdf_vars, pdf_data, pdf_lines = \ 3316 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3317 replace_dict['pdf_vars'] = pdf_vars 3318 replace_dict['pdf_data'] = pdf_data 3319 replace_dict['pdf_lines'] = pdf_lines 3320 3321 # Lines that differ between subprocess group and regular 3322 if proc_id: 3323 replace_dict['numproc'] = int(proc_id) 3324 replace_dict['passcuts_begin'] = "" 3325 replace_dict['passcuts_end'] = "" 3326 # Set lines for subprocess group version 3327 # Set define_iconfigs_lines 3328 replace_dict['define_subdiag_lines'] = \ 3329 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3330 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3331 else: 3332 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3333 replace_dict['passcuts_end'] = "ENDIF" 3334 replace_dict['define_subdiag_lines'] = "" 3335 3336 if writer: 3337 file = open(os.path.join(_file_path, \ 3338 'iolibs/template_files/auto_dsig_mw.inc')).read() 3339 3340 file = file % replace_dict 3341 # Write the file 3342 writer.writelines(file) 3343 else: 3344 return replace_dict
3345 #=========================================================================== 3346 # write_configs_file 3347 #===========================================================================
3348 - def write_configs_file(self, writer, matrix_element):
3349 """Write the configs.inc file for MadEvent""" 3350 3351 # Extract number of external particles 3352 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3353 3354 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3355 mapconfigs = [c[0] for c in configs] 3356 model = matrix_element.get('processes')[0].get('model') 3357 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3358 [[c[1]] for c in configs], 3359 mapconfigs, 3360 nexternal, ninitial,matrix_element, model)
3361 3362 #=========================================================================== 3363 # write_run_configs_file 3364 #===========================================================================
3365 - def write_run_config_file(self, writer):
3366 """Write the run_configs.inc file for MadWeight""" 3367 3368 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3369 text = open(path).read() % {'chanperjob':'5'} 3370 writer.write(text) 3371 return True
3372 3373 #=========================================================================== 3374 # write_configs_file_from_diagrams 3375 #===========================================================================
3376 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3377 nexternal, ninitial, matrix_element, model):
3378 """Write the actual configs.inc file. 3379 3380 configs is the diagrams corresponding to configs (each 3381 diagrams is a list of corresponding diagrams for all 3382 subprocesses, with None if there is no corresponding diagrams 3383 for a given process). 3384 mapconfigs gives the diagram number for each config. 3385 3386 For s-channels, we need to output one PDG for each subprocess in 3387 the subprocess group, in order to be able to pick the right 3388 one for multiprocesses.""" 3389 3390 lines = [] 3391 3392 particle_dict = matrix_element.get('processes')[0].get('model').\ 3393 get('particle_dict') 3394 3395 s_and_t_channels = [] 3396 3397 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3398 for config in configs if [d for d in config if d][0].\ 3399 get_vertex_leg_numbers()!=[]] 3400 3401 minvert = min(vert_list) if vert_list!=[] else 0 3402 # Number of subprocesses 3403 nsubprocs = len(configs[0]) 3404 3405 nconfigs = 0 3406 3407 new_pdg = model.get_first_non_pdg() 3408 3409 for iconfig, helas_diags in enumerate(configs): 3410 if any([vert > minvert for vert in 3411 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3412 # Only 3-vertices allowed in configs.inc 3413 continue 3414 nconfigs += 1 3415 3416 # Need s- and t-channels for all subprocesses, including 3417 # those that don't contribute to this config 3418 empty_verts = [] 3419 stchannels = [] 3420 for h in helas_diags: 3421 if h: 3422 # get_s_and_t_channels gives vertices starting from 3423 # final state external particles and working inwards 3424 stchannels.append(h.get('amplitudes')[0].\ 3425 get_s_and_t_channels(ninitial,model,new_pdg)) 3426 else: 3427 stchannels.append((empty_verts, None)) 3428 3429 # For t-channels, just need the first non-empty one 3430 tchannels = [t for s,t in stchannels if t != None][0] 3431 3432 # For s_and_t_channels (to be used later) use only first config 3433 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3434 tchannels]) 3435 3436 # Make sure empty_verts is same length as real vertices 3437 if any([s for s,t in stchannels]): 3438 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3439 3440 # Reorganize s-channel vertices to get a list of all 3441 # subprocesses for each vertex 3442 schannels = list(zip(*[s for s,t in stchannels])) 3443 else: 3444 schannels = [] 3445 3446 allchannels = schannels 3447 if len(tchannels) > 1: 3448 # Write out tchannels only if there are any non-trivial ones 3449 allchannels = schannels + tchannels 3450 3451 # Write out propagators for s-channel and t-channel vertices 3452 3453 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3454 # Correspondance between the config and the diagram = amp2 3455 lines.append("* %d %d " % (nconfigs, 3456 mapconfigs[iconfig])) 3457 3458 for verts in allchannels: 3459 if verts in schannels: 3460 vert = [v for v in verts if v][0] 3461 else: 3462 vert = verts 3463 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3464 last_leg = vert.get('legs')[-1] 3465 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3466 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3467 # (last_leg.get('number'), nconfigs, len(daughters), 3468 # ",".join([str(d) for d in daughters]))) 3469 3470 if last_leg.get('id') == 21 and 21 not in particle_dict: 3471 # Fake propagator used in multiparticle vertices 3472 mass = 'zero' 3473 width = 'zero' 3474 pow_part = 0 3475 else: 3476 if (last_leg.get('id')!=7): 3477 particle = particle_dict[last_leg.get('id')] 3478 # Get mass 3479 mass = particle.get('mass') 3480 # Get width 3481 width = particle.get('width') 3482 else : # fake propagator used in multiparticle vertices 3483 mass= 'zero' 3484 width= 'zero' 3485 3486 line=line+" "+mass+" "+width+" " 3487 3488 if verts in schannels: 3489 pdgs = [] 3490 for v in verts: 3491 if v: 3492 pdgs.append(v.get('legs')[-1].get('id')) 3493 else: 3494 pdgs.append(0) 3495 lines.append(line+" S "+str(last_leg.get('id'))) 3496 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3497 # (last_leg.get('number'), nconfigs, nsubprocs, 3498 # ",".join([str(d) for d in pdgs]))) 3499 # lines.append("data tprid(%d,%d)/0/" % \ 3500 # (last_leg.get('number'), nconfigs)) 3501 elif verts in tchannels[:-1]: 3502 lines.append(line+" T "+str(last_leg.get('id'))) 3503 # lines.append("data tprid(%d,%d)/%d/" % \ 3504 # (last_leg.get('number'), nconfigs, 3505 # abs(last_leg.get('id')))) 3506 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3507 # (last_leg.get('number'), nconfigs, nsubprocs, 3508 # ",".join(['0'] * nsubprocs))) 3509 3510 # Write out number of configs 3511 # lines.append("# Number of configs") 3512 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3513 lines.append(" * ") # a line with just a star indicates this is the end of file 3514 # Write the file 3515 writer.writelines(lines) 3516 3517 return s_and_t_channels
3518
3519 3520 3521 #=============================================================================== 3522 # ProcessExporterFortranME 3523 #=============================================================================== 3524 -class ProcessExporterFortranME(ProcessExporterFortran):
3525 """Class to take care of exporting a set of matrix elements to 3526 MadEvent format.""" 3527 3528 matrix_file = "matrix_madevent_v4.inc" 3529 done_warning_tchannel = False 3530 3531 # helper function for customise helas writter 3532 @staticmethod
3533 - def custom_helas_call(call, arg):
3534 if arg['mass'] == '%(M)s,%(W)s,': 3535 arg['mass'] = '%(M)s, fk_%(W)s,' 3536 elif '%(W)s' in arg['mass']: 3537 raise Exception 3538 return call, arg
3539
3540 - def copy_template(self, model):
3541 """Additional actions needed for setup of Template 3542 """ 3543 3544 super(ProcessExporterFortranME, self).copy_template(model) 3545 3546 # File created from Template (Different in some child class) 3547 filename = pjoin(self.dir_path,'Source','run_config.inc') 3548 self.write_run_config_file(writers.FortranWriter(filename)) 3549 3550 # The next file are model dependant (due to SLAH convention) 3551 self.model_name = model.get('name') 3552 # Add the symmetry.f 3553 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3554 self.write_symmetry(writers.FortranWriter(filename)) 3555 # 3556 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3557 self.write_addmothers(writers.FortranWriter(filename)) 3558 # Copy the different python file in the Template 3559 self.copy_python_file()
3560 3561 3562 3563 3564 3565 3566 #=========================================================================== 3567 # generate_subprocess_directory 3568 #===========================================================================
3569 - def copy_python_file(self):
3570 """copy the python file require for the Template""" 3571 3572 # madevent interface 3573 cp(_file_path+'/interface/madevent_interface.py', 3574 self.dir_path+'/bin/internal/madevent_interface.py') 3575 cp(_file_path+'/interface/extended_cmd.py', 3576 self.dir_path+'/bin/internal/extended_cmd.py') 3577 cp(_file_path+'/interface/common_run_interface.py', 3578 self.dir_path+'/bin/internal/common_run_interface.py') 3579 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3580 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3581 cp(_file_path+'/iolibs/save_load_object.py', 3582 self.dir_path+'/bin/internal/save_load_object.py') 3583 cp(_file_path+'/iolibs/file_writers.py', 3584 self.dir_path+'/bin/internal/file_writers.py') 3585 #model file 3586 cp(_file_path+'../models/check_param_card.py', 3587 self.dir_path+'/bin/internal/check_param_card.py') 3588 3589 #copy all the file present in madevent directory 3590 for name in os.listdir(pjoin(_file_path, 'madevent')): 3591 if name not in ['__init__.py'] and name.endswith('.py'): 3592 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3593 3594 #madevent file 3595 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3596 cp(_file_path+'/various/lhe_parser.py', 3597 self.dir_path+'/bin/internal/lhe_parser.py') 3598 cp(_file_path+'/various/banner.py', 3599 self.dir_path+'/bin/internal/banner.py') 3600 cp(_file_path+'/various/histograms.py', 3601 self.dir_path+'/bin/internal/histograms.py') 3602 cp(_file_path+'/various/plot_djrs.py', 3603 self.dir_path+'/bin/internal/plot_djrs.py') 3604 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3605 3606 cp(_file_path+'/various/cluster.py', 3607 self.dir_path+'/bin/internal/cluster.py') 3608 cp(_file_path+'/madevent/combine_runs.py', 3609 self.dir_path+'/bin/internal/combine_runs.py') 3610 # logging configuration 3611 cp(_file_path+'/interface/.mg5_logging.conf', 3612 self.dir_path+'/bin/internal/me5_logging.conf') 3613 cp(_file_path+'/interface/coloring_logging.py', 3614 self.dir_path+'/bin/internal/coloring_logging.py') 3615 # shower card and FO_analyse_card. 3616 # Although not needed, it is imported by banner.py 3617 cp(_file_path+'/various/shower_card.py', 3618 self.dir_path+'/bin/internal/shower_card.py') 3619 cp(_file_path+'/various/FO_analyse_card.py', 3620 self.dir_path+'/bin/internal/FO_analyse_card.py')
3621 3622
3623 - def convert_model(self, model, wanted_lorentz = [], 3624 wanted_couplings = []):
3625 3626 super(ProcessExporterFortranME,self).convert_model(model, 3627 wanted_lorentz, wanted_couplings) 3628 3629 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3630 try: 3631 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3632 except OSError as error: 3633 pass 3634 model_path = model.get('modelpath') 3635 # This is not safe if there is a '##' or '-' in the path. 3636 shutil.copytree(model_path, 3637 pjoin(self.dir_path,'bin','internal','ufomodel'), 3638 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3639 if hasattr(model, 'restrict_card'): 3640 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3641 'restrict_default.dat') 3642 if isinstance(model.restrict_card, check_param_card.ParamCard): 3643 model.restrict_card.write(out_path) 3644 else: 3645 files.cp(model.restrict_card, out_path)
3646 3647 #=========================================================================== 3648 # export model files 3649 #===========================================================================
3650 - def export_model_files(self, model_path):
3651 """export the model dependent files""" 3652 3653 super(ProcessExporterFortranME,self).export_model_files(model_path) 3654 3655 # Add the routine update_as_param in v4 model 3656 # This is a function created in the UFO 3657 text=""" 3658 subroutine update_as_param() 3659 call setpara('param_card.dat',.false.) 3660 return 3661 end 3662 """ 3663 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3664 ff.write(text) 3665 ff.close() 3666 3667 # Add the symmetry.f 3668 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3669 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3670 3671 # Modify setrun.f 3672 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3673 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3674 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3675 fsock.write(text) 3676 fsock.close() 3677 3678 self.make_model_symbolic_link()
3679 3680 #=========================================================================== 3681 # generate_subprocess_directory 3682 #===========================================================================
3683 - def generate_subprocess_directory(self, matrix_element, 3684 fortran_model, 3685 me_number):
3686 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3687 including the necessary matrix.f and various helper files""" 3688 3689 cwd = os.getcwd() 3690 path = pjoin(self.dir_path, 'SubProcesses') 3691 3692 3693 if not self.model: 3694 self.model = matrix_element.get('processes')[0].get('model') 3695 3696 3697 3698 #os.chdir(path) 3699 # Create the directory PN_xx_xxxxx in the specified path 3700 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3701 try: 3702 os.mkdir(pjoin(path,subprocdir)) 3703 except os.error as error: 3704 logger.warning(error.strerror + " " + subprocdir) 3705 3706 #try: 3707 # os.chdir(subprocdir) 3708 #except os.error: 3709 # logger.error('Could not cd to directory %s' % subprocdir) 3710 # return 0 3711 3712 logger.info('Creating files in directory %s' % subprocdir) 3713 Ppath = pjoin(path, subprocdir) 3714 3715 # Extract number of external particles 3716 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3717 3718 # Add the driver.f 3719 ncomb = matrix_element.get_helicity_combinations() 3720 filename = pjoin(Ppath,'driver.f') 3721 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3722 v5=self.opt['v5_model']) 3723 3724 # Create the matrix.f file, auto_dsig.f file and all inc files 3725 filename = pjoin(Ppath, 'matrix.f') 3726 calls, ncolor = \ 3727 self.write_matrix_element_v4(writers.FortranWriter(filename), 3728 matrix_element, fortran_model, subproc_number = me_number) 3729 3730 filename = pjoin(Ppath, 'auto_dsig.f') 3731 self.write_auto_dsig_file(writers.FortranWriter(filename), 3732 matrix_element) 3733 3734 filename = pjoin(Ppath, 'configs.inc') 3735 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3736 writers.FortranWriter(filename), 3737 matrix_element) 3738 3739 filename = pjoin(Ppath, 'config_nqcd.inc') 3740 self.write_config_nqcd_file(writers.FortranWriter(filename), 3741 nqcd_list) 3742 3743 filename = pjoin(Ppath, 'config_subproc_map.inc') 3744 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3745 s_and_t_channels) 3746 3747 filename = pjoin(Ppath, 'coloramps.inc') 3748 self.write_coloramps_file(writers.FortranWriter(filename), 3749 mapconfigs, 3750 matrix_element) 3751 3752 filename = pjoin(Ppath, 'get_color.f') 3753 self.write_colors_file(writers.FortranWriter(filename), 3754 matrix_element) 3755 3756 filename = pjoin(Ppath, 'decayBW.inc') 3757 self.write_decayBW_file(writers.FortranWriter(filename), 3758 s_and_t_channels) 3759 3760 filename = pjoin(Ppath, 'dname.mg') 3761 self.write_dname_file(writers.FileWriter(filename), 3762 "P"+matrix_element.get('processes')[0].shell_string()) 3763 3764 filename = pjoin(Ppath, 'iproc.dat') 3765 self.write_iproc_file(writers.FortranWriter(filename), 3766 me_number) 3767 3768 filename = pjoin(Ppath, 'leshouche.inc') 3769 self.write_leshouche_file(writers.FortranWriter(filename), 3770 matrix_element) 3771 3772 filename = pjoin(Ppath, 'maxamps.inc') 3773 self.write_maxamps_file(writers.FortranWriter(filename), 3774 len(matrix_element.get('diagrams')), 3775 ncolor, 3776 len(matrix_element.get('processes')), 3777 1) 3778 3779 filename = pjoin(Ppath, 'mg.sym') 3780 self.write_mg_sym_file(writers.FortranWriter(filename), 3781 matrix_element) 3782 3783 filename = pjoin(Ppath, 'ncombs.inc') 3784 self.write_ncombs_file(writers.FortranWriter(filename), 3785 nexternal) 3786 3787 filename = pjoin(Ppath, 'nexternal.inc') 3788 self.write_nexternal_file(writers.FortranWriter(filename), 3789 nexternal, ninitial) 3790 3791 filename = pjoin(Ppath, 'ngraphs.inc') 3792 self.write_ngraphs_file(writers.FortranWriter(filename), 3793 len(mapconfigs)) 3794 3795 3796 filename = pjoin(Ppath, 'pmass.inc') 3797 self.write_pmass_file(writers.FortranWriter(filename), 3798 matrix_element) 3799 3800 filename = pjoin(Ppath, 'props.inc') 3801 self.write_props_file(writers.FortranWriter(filename), 3802 matrix_element, 3803 s_and_t_channels) 3804 3805 # Find config symmetries and permutations 3806 symmetry, perms, ident_perms = \ 3807 diagram_symmetry.find_symmetry(matrix_element) 3808 3809 filename = pjoin(Ppath, 'symswap.inc') 3810 self.write_symswap_file(writers.FortranWriter(filename), 3811 ident_perms) 3812 3813 filename = pjoin(Ppath, 'symfact_orig.dat') 3814 self.write_symfact_file(open(filename, 'w'), symmetry) 3815 3816 # Generate diagrams 3817 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3818 filename = pjoin(Ppath, "matrix.ps") 3819 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3820 get('diagrams'), 3821 filename, 3822 model=matrix_element.get('processes')[0].\ 3823 get('model'), 3824 amplitude=True) 3825 logger.info("Generating Feynman diagrams for " + \ 3826 matrix_element.get('processes')[0].nice_string()) 3827 plot.draw() 3828 3829 self.link_files_in_SubProcess(Ppath) 3830 3831 #import nexternal/leshouche in Source 3832 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3833 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3834 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3835 # Return to SubProcesses dir 3836 #os.chdir(os.path.pardir) 3837 3838 # Add subprocess to subproc.mg 3839 filename = pjoin(path, 'subproc.mg') 3840 files.append_to_file(filename, 3841 self.write_subproc, 3842 subprocdir) 3843 3844 # Return to original dir 3845 #os.chdir(cwd) 3846 3847 # Generate info page 3848 gen_infohtml.make_info_html(self.dir_path) 3849 3850 3851 if not calls: 3852 calls = 0 3853 return calls
3854 3855 link_Sub_files = ['addmothers.f', 3856 'cluster.f', 3857 'cluster.inc', 3858 'coupl.inc', 3859 'cuts.f', 3860 'cuts.inc', 3861 'genps.f', 3862 'genps.inc', 3863 'idenparts.f', 3864 'initcluster.f', 3865 'makefile', 3866 'message.inc', 3867 'myamp.f', 3868 'reweight.f', 3869 'run.inc', 3870 'maxconfigs.inc', 3871 'maxparticles.inc', 3872 'run_config.inc', 3873 'lhe_event_infos.inc', 3874 'setcuts.f', 3875 'setscales.f', 3876 'sudakov.inc', 3877 'symmetry.f', 3878 'unwgt.f', 3879 'dummy_fct.f' 3880 ] 3881 3895 3896
3897 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3898 """Finalize ME v4 directory by creating jpeg diagrams, html 3899 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3900 3901 if 'nojpeg' in flaglist: 3902 makejpg = False 3903 else: 3904 makejpg = True 3905 if 'online' in flaglist: 3906 online = True 3907 else: 3908 online = False 3909 3910 compiler = {'fortran': mg5options['fortran_compiler'], 3911 'cpp': mg5options['cpp_compiler'], 3912 'f2py': mg5options['f2py_compiler']} 3913 3914 # indicate that the output type is not grouped 3915 if not isinstance(self, ProcessExporterFortranMEGroup): 3916 self.proc_characteristic['grouped_matrix'] = False 3917 3918 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3919 3920 # set limitation linked to the model 3921 3922 3923 # indicate the PDG of all initial particle 3924 try: 3925 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3926 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3927 except AttributeError: 3928 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3929 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3930 self.proc_characteristic['pdg_initial1'] = pdgs1 3931 self.proc_characteristic['pdg_initial2'] = pdgs2 3932 3933 3934 modelname = self.opt['model'] 3935 if modelname == 'mssm' or modelname.startswith('mssm-'): 3936 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3937 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3938 check_param_card.convert_to_mg5card(param_card, mg5_param) 3939 check_param_card.check_valid_param_card(mg5_param) 3940 3941 # Add the combine_events.f modify param_card path/number of @X 3942 filename = pjoin(self.dir_path,'Source','combine_events.f') 3943 try: 3944 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3945 except AttributeError: 3946 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3947 nb_proc = len(set(nb_proc)) 3948 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3949 # Write maxconfigs.inc based on max of ME's/subprocess groups 3950 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3951 self.write_maxconfigs_file(writers.FortranWriter(filename), 3952 matrix_elements) 3953 3954 # Write maxparticles.inc based on max of ME's/subprocess groups 3955 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3956 self.write_maxparticles_file(writers.FortranWriter(filename), 3957 matrix_elements) 3958 3959 # Touch "done" file 3960 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3961 3962 # Check for compiler 3963 self.set_compiler(compiler) 3964 self.set_cpp_compiler(compiler['cpp']) 3965 3966 3967 old_pos = os.getcwd() 3968 subpath = pjoin(self.dir_path, 'SubProcesses') 3969 3970 P_dir_list = [proc for proc in os.listdir(subpath) 3971 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3972 3973 devnull = os.open(os.devnull, os.O_RDWR) 3974 # Convert the poscript in jpg files (if authorize) 3975 if makejpg: 3976 try: 3977 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3978 except Exception as error: 3979 pass 3980 3981 if misc.which('gs'): 3982 logger.info("Generate jpeg diagrams") 3983 for Pdir in P_dir_list: 3984 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3985 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3986 3987 logger.info("Generate web pages") 3988 # Create the WebPage using perl script 3989 3990 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3991 stdout = devnull,cwd=pjoin(self.dir_path)) 3992 3993 #os.chdir(os.path.pardir) 3994 3995 obj = gen_infohtml.make_info_html(self.dir_path) 3996 3997 if online: 3998 nb_channel = obj.rep_rule['nb_gen_diag'] 3999 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4000 #add the information to proc_charac 4001 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4002 4003 # Write command history as proc_card_mg5 4004 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4005 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4006 history.write(output_file) 4007 4008 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4009 stdout = devnull) 4010 4011 #crate the proc_characteristic file 4012 self.create_proc_charac(matrix_elements, history) 4013 4014 # create the run_card 4015 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4016 4017 # Run "make" to generate madevent.tar.gz file 4018 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4019 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4020 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4021 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4022 stdout = devnull, cwd=self.dir_path) 4023 4024 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4025 stdout = devnull, cwd=self.dir_path)
4026 4027 4028 4029 4030 4031 4032 #return to the initial dir 4033 #os.chdir(old_pos) 4034 4035 #=========================================================================== 4036 # write_matrix_element_v4 4037 #===========================================================================
4038 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4039 proc_id = "", config_map = [], subproc_number = ""):
4040 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4041 4042 if not matrix_element.get('processes') or \ 4043 not matrix_element.get('diagrams'): 4044 return 0 4045 4046 if writer: 4047 if not isinstance(writer, writers.FortranWriter): 4048 raise writers.FortranWriter.FortranWriterError(\ 4049 "writer not FortranWriter") 4050 # Set lowercase/uppercase Fortran code 4051 writers.FortranWriter.downcase = False 4052 4053 # check if MLM/.../ is supported for this matrix-element and update associate flag 4054 if self.model and 'MLM' in self.model["limitations"]: 4055 if 'MLM' not in self.proc_characteristic["limitations"]: 4056 used_couplings = matrix_element.get_used_couplings(output="set") 4057 for vertex in self.model.get('interactions'): 4058 particles = [p for p in vertex.get('particles')] 4059 if 21 in [p.get('pdg_code') for p in particles]: 4060 colors = [par.get('color') for par in particles] 4061 if 1 in colors: 4062 continue 4063 elif 'QCD' not in vertex.get('orders'): 4064 for bad_coup in vertex.get('couplings').values(): 4065 if bad_coup in used_couplings: 4066 self.proc_characteristic["limitations"].append('MLM') 4067 break 4068 4069 # The proc prefix is not used for MadEvent output so it can safely be set 4070 # to an empty string. 4071 replace_dict = {'proc_prefix':''} 4072 4073 # Extract helas calls 4074 helas_calls = fortran_model.get_matrix_element_calls(\ 4075 matrix_element) 4076 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4077 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4078 ProcessExporterFortranME.done_warning_tchannel = True 4079 4080 replace_dict['helas_calls'] = "\n".join(helas_calls) 4081 4082 4083 #adding the support for the fake width (forbidding too small width) 4084 mass_width = matrix_element.get_all_mass_widths() 4085 mass_width = sorted(list(mass_width)) 4086 width_list = set([e[1] for e in mass_width]) 4087 4088 replace_dict['fake_width_declaration'] = \ 4089 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4090 replace_dict['fake_width_declaration'] += \ 4091 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4092 fk_w_defs = [] 4093 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4094 for m, w in mass_width: 4095 if w == 'zero': 4096 if ' fk_zero = 0d0' not in fk_w_defs: 4097 fk_w_defs.append(' fk_zero = 0d0') 4098 continue 4099 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4100 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4101 4102 # Extract version number and date from VERSION file 4103 info_lines = self.get_mg5_info_lines() 4104 replace_dict['info_lines'] = info_lines 4105 4106 # Extract process info lines 4107 process_lines = self.get_process_info_lines(matrix_element) 4108 replace_dict['process_lines'] = process_lines 4109 4110 # Set proc_id 4111 replace_dict['proc_id'] = proc_id 4112 4113 # Extract ncomb 4114 ncomb = matrix_element.get_helicity_combinations() 4115 replace_dict['ncomb'] = ncomb 4116 4117 # Extract helicity lines 4118 helicity_lines = self.get_helicity_lines(matrix_element) 4119 replace_dict['helicity_lines'] = helicity_lines 4120 4121 # Extract IC line 4122 ic_line = self.get_ic_line(matrix_element) 4123 replace_dict['ic_line'] = ic_line 4124 4125 # Extract overall denominator 4126 # Averaging initial state color, spin, and identical FS particles 4127 den_factor_line = self.get_den_factor_line(matrix_element) 4128 replace_dict['den_factor_line'] = den_factor_line 4129 4130 # Extract ngraphs 4131 ngraphs = matrix_element.get_number_of_amplitudes() 4132 replace_dict['ngraphs'] = ngraphs 4133 4134 # Extract ndiags 4135 ndiags = len(matrix_element.get('diagrams')) 4136 replace_dict['ndiags'] = ndiags 4137 4138 # Set define_iconfigs_lines 4139 replace_dict['define_iconfigs_lines'] = \ 4140 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4141 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4142 4143 if proc_id: 4144 # Set lines for subprocess group version 4145 # Set define_iconfigs_lines 4146 replace_dict['define_iconfigs_lines'] += \ 4147 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4148 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4149 # Set set_amp2_line 4150 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4151 proc_id 4152 else: 4153 # Standard running 4154 # Set set_amp2_line 4155 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4156 4157 # Extract nwavefuncs 4158 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4159 replace_dict['nwavefuncs'] = nwavefuncs 4160 4161 # Extract ncolor 4162 ncolor = max(1, len(matrix_element.get('color_basis'))) 4163 replace_dict['ncolor'] = ncolor 4164 4165 # Extract color data lines 4166 color_data_lines = self.get_color_data_lines(matrix_element) 4167 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4168 4169 4170 # Set the size of Wavefunction 4171 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4172 replace_dict['wavefunctionsize'] = 18 4173 else: 4174 replace_dict['wavefunctionsize'] = 6 4175 4176 # Extract amp2 lines 4177 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4178 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4179 4180 # The JAMP definition depends on the splitting order 4181 split_orders=matrix_element.get('processes')[0].get('split_orders') 4182 if len(split_orders)>0: 4183 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4184 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4185 matrix_element.get('processes')[0],squared_orders) 4186 else: 4187 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4188 # set all amplitude order to weight 1 and only one squared order 4189 # contribution which is of course ALL_ORDERS=2. 4190 squared_orders = [(2,),] 4191 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4192 replace_dict['chosen_so_configs'] = '.TRUE.' 4193 4194 replace_dict['nAmpSplitOrders']=len(amp_orders) 4195 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4196 replace_dict['split_order_str_list']=str(split_orders) 4197 replace_dict['nSplitOrders']=max(len(split_orders),1) 4198 amp_so = self.get_split_orders_lines( 4199 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4200 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4201 replace_dict['ampsplitorders']='\n'.join(amp_so) 4202 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4203 4204 4205 # Extract JAMP lines 4206 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4207 jamp_lines = self.get_JAMP_lines_split_order(\ 4208 matrix_element,amp_orders,split_order_names= 4209 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4210 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4211 4212 replace_dict['template_file'] = pjoin(_file_path, \ 4213 'iolibs/template_files/%s' % self.matrix_file) 4214 replace_dict['template_file2'] = pjoin(_file_path, \ 4215 'iolibs/template_files/split_orders_helping_functions.inc') 4216 4217 s1,s2 = matrix_element.get_spin_state_initial() 4218 replace_dict['nb_spin_state1'] = s1 4219 replace_dict['nb_spin_state2'] = s2 4220 4221 if writer: 4222 file = open(replace_dict['template_file']).read() 4223 file = file % replace_dict 4224 # Add the split orders helper functions. 4225 file = file + '\n' + open(replace_dict['template_file2'])\ 4226 .read()%replace_dict 4227 # Write the file 4228 writer.writelines(file) 4229 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4230 else: 4231 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4232 return replace_dict
4233 4234 #=========================================================================== 4235 # write_auto_dsig_file 4236 #===========================================================================
4237 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4238 """Write the auto_dsig.f file for the differential cross section 4239 calculation, includes pdf call information""" 4240 4241 if not matrix_element.get('processes') or \ 4242 not matrix_element.get('diagrams'): 4243 return 0 4244 4245 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4246 self.proc_characteristic['ninitial'] = ninitial 4247 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4248 4249 # Add information relevant for MLM matching: 4250 # Maximum QCD power in all the contributions 4251 max_qcd_order = 0 4252 for diag in matrix_element.get('diagrams'): 4253 orders = diag.calculate_orders() 4254 if 'QCD' in orders: 4255 max_qcd_order = max(max_qcd_order,orders['QCD']) 4256 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4257 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4258 proc.get('model').get_particle(id).get('color')>1]) 4259 for proc in matrix_element.get('processes')) 4260 # Maximum number of final state light jets to be matched 4261 self.proc_characteristic['max_n_matched_jets'] = max( 4262 self.proc_characteristic['max_n_matched_jets'], 4263 min(max_qcd_order,max_n_light_final_partons)) 4264 4265 # List of default pdgs to be considered for the CKKWl merging cut 4266 self.proc_characteristic['colored_pdgs'] = \ 4267 sorted(list(set([abs(p.get('pdg_code')) for p in 4268 matrix_element.get('processes')[0].get('model').get('particles') if 4269 p.get('color')>1]))) 4270 4271 if ninitial < 1 or ninitial > 2: 4272 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4273 4274 replace_dict = {} 4275 4276 # Extract version number and date from VERSION file 4277 info_lines = self.get_mg5_info_lines() 4278 replace_dict['info_lines'] = info_lines 4279 4280 # Extract process info lines 4281 process_lines = self.get_process_info_lines(matrix_element) 4282 replace_dict['process_lines'] = process_lines 4283 4284 # Set proc_id 4285 replace_dict['proc_id'] = proc_id 4286 replace_dict['numproc'] = 1 4287 4288 # Set dsig_line 4289 if ninitial == 1: 4290 # No conversion, since result of decay should be given in GeV 4291 dsig_line = "pd(0)*dsiguu" 4292 else: 4293 # Convert result (in GeV) to pb 4294 dsig_line = "pd(0)*conv*dsiguu" 4295 4296 replace_dict['dsig_line'] = dsig_line 4297 4298 # Extract pdf lines 4299 pdf_vars, pdf_data, pdf_lines = \ 4300 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4301 replace_dict['pdf_vars'] = pdf_vars 4302 replace_dict['pdf_data'] = pdf_data 4303 replace_dict['pdf_lines'] = pdf_lines 4304 4305 # Lines that differ between subprocess group and regular 4306 if proc_id: 4307 replace_dict['numproc'] = int(proc_id) 4308 replace_dict['passcuts_begin'] = "" 4309 replace_dict['passcuts_end'] = "" 4310 # Set lines for subprocess group version 4311 # Set define_iconfigs_lines 4312 replace_dict['define_subdiag_lines'] = \ 4313 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4314 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4315 replace_dict['cutsdone'] = "" 4316 else: 4317 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4318 replace_dict['passcuts_end'] = "ENDIF" 4319 replace_dict['define_subdiag_lines'] = "" 4320 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4321 4322 if not isinstance(self, ProcessExporterFortranMEGroup): 4323 ncomb=matrix_element.get_helicity_combinations() 4324 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4325 else: 4326 replace_dict['read_write_good_hel'] = "" 4327 4328 context = {'read_write_good_hel':True} 4329 4330 if writer: 4331 file = open(pjoin(_file_path, \ 4332 'iolibs/template_files/auto_dsig_v4.inc')).read() 4333 file = file % replace_dict 4334 4335 # Write the file 4336 writer.writelines(file, context=context) 4337 else: 4338 return replace_dict, context
4339 #=========================================================================== 4340 # write_coloramps_file 4341 #===========================================================================
4342 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4343 """Write the coloramps.inc file for MadEvent""" 4344 4345 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4346 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4347 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4348 len(mapconfigs))) 4349 4350 4351 # Write the file 4352 writer.writelines(lines) 4353 4354 return True
4355 4356 #=========================================================================== 4357 # write_colors_file 4358 #===========================================================================
4359 - def write_colors_file(self, writer, matrix_elements):
4360 """Write the get_color.f file for MadEvent, which returns color 4361 for all particles used in the matrix element.""" 4362 4363 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4364 matrix_elements = [matrix_elements] 4365 4366 model = matrix_elements[0].get('processes')[0].get('model') 4367 4368 # We need the both particle and antiparticle wf_ids, since the identity 4369 # depends on the direction of the wf. 4370 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4371 for wf in d.get('wavefunctions')],[]) \ 4372 for d in me.get('diagrams')], []) \ 4373 for me in matrix_elements], [])) 4374 4375 leg_ids = set(sum([sum([sum([[l.get('id'), 4376 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4377 for l in p.get_legs_with_decays()], []) \ 4378 for p in me.get('processes')], []) \ 4379 for me in matrix_elements], [])) 4380 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4381 4382 lines = """function get_color(ipdg) 4383 implicit none 4384 integer get_color, ipdg 4385 4386 if(ipdg.eq.%d)then 4387 get_color=%d 4388 return 4389 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4390 4391 for part_id in particle_ids[1:]: 4392 lines += """else if(ipdg.eq.%d)then 4393 get_color=%d 4394 return 4395 """ % (part_id, model.get_particle(part_id).get_color()) 4396 # Dummy particle for multiparticle vertices with pdg given by 4397 # first code not in the model 4398 lines += """else if(ipdg.eq.%d)then 4399 c This is dummy particle used in multiparticle vertices 4400 get_color=2 4401 return 4402 """ % model.get_first_non_pdg() 4403 lines += """else 4404 write(*,*)'Error: No color given for pdg ',ipdg 4405 get_color=0 4406 return 4407 endif 4408 end 4409 """ 4410 4411 # Write the file 4412 writer.writelines(lines) 4413 4414 return True
4415 4416 #=========================================================================== 4417 # write_config_nqcd_file 4418 #===========================================================================
4419 - def write_config_nqcd_file(self, writer, nqcd_list):
4420 """Write the config_nqcd.inc with the number of QCD couplings 4421 for each config""" 4422 4423 lines = [] 4424 for iconf, n in enumerate(nqcd_list): 4425 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4426 4427 # Write the file 4428 writer.writelines(lines) 4429 4430 return True
4431 4432 #=========================================================================== 4433 # write_maxconfigs_file 4434 #===========================================================================
4435 - def write_maxconfigs_file(self, writer, matrix_elements):
4436 """Write the maxconfigs.inc file for MadEvent""" 4437 4438 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4439 maxconfigs = max([me.get_num_configs() for me in \ 4440 matrix_elements.get('matrix_elements')]) 4441 else: 4442 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4443 4444 lines = "integer lmaxconfigs\n" 4445 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4446 4447 # Write the file 4448 writer.writelines(lines) 4449 4450 return True
4451 4452 #=========================================================================== 4453 # read_write_good_hel 4454 #===========================================================================
4455 - def read_write_good_hel(self, ncomb):
4456 """return the code to read/write the good_hel common_block""" 4457 4458 convert = {'ncomb' : ncomb} 4459 output = """ 4460 subroutine write_good_hel(stream_id) 4461 implicit none 4462 integer stream_id 4463 INTEGER NCOMB 4464 PARAMETER ( NCOMB=%(ncomb)d) 4465 LOGICAL GOODHEL(NCOMB) 4466 INTEGER NTRY 4467 common/BLOCK_GOODHEL/NTRY,GOODHEL 4468 write(stream_id,*) GOODHEL 4469 return 4470 end 4471 4472 4473 subroutine read_good_hel(stream_id) 4474 implicit none 4475 include 'genps.inc' 4476 integer stream_id 4477 INTEGER NCOMB 4478 PARAMETER ( NCOMB=%(ncomb)d) 4479 LOGICAL GOODHEL(NCOMB) 4480 INTEGER NTRY 4481 common/BLOCK_GOODHEL/NTRY,GOODHEL 4482 read(stream_id,*) GOODHEL 4483 NTRY = MAXTRIES + 1 4484 return 4485 end 4486 4487 subroutine init_good_hel() 4488 implicit none 4489 INTEGER NCOMB 4490 PARAMETER ( NCOMB=%(ncomb)d) 4491 LOGICAL GOODHEL(NCOMB) 4492 INTEGER NTRY 4493 INTEGER I 4494 4495 do i=1,NCOMB 4496 GOODHEL(I) = .false. 4497 enddo 4498 NTRY = 0 4499 end 4500 4501 integer function get_maxsproc() 4502 implicit none 4503 get_maxsproc = 1 4504 return 4505 end 4506 4507 """ % convert 4508 4509 return output
4510 4511 #=========================================================================== 4512 # write_config_subproc_map_file 4513 #===========================================================================
4514 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4515 """Write a dummy config_subproc.inc file for MadEvent""" 4516 4517 lines = [] 4518 4519 for iconfig in range(len(s_and_t_channels)): 4520 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4521 (iconfig + 1)) 4522 4523 # Write the file 4524 writer.writelines(lines) 4525 4526 return True
4527 4528 #=========================================================================== 4529 # write_configs_file 4530 #===========================================================================
4531 - def write_configs_file(self, writer, matrix_element):
4532 """Write the configs.inc file for MadEvent""" 4533 4534 # Extract number of external particles 4535 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4536 4537 model = matrix_element.get('processes')[0].get('model') 4538 configs = [(i+1, d) for (i, d) in \ 4539 enumerate(matrix_element.get('diagrams'))] 4540 mapconfigs = [c[0] for c in configs] 4541 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4542 [[c[1]] for c in configs], 4543 mapconfigs, 4544 nexternal, ninitial, 4545 model)
4546 4547 #=========================================================================== 4548 # write_run_configs_file 4549 #===========================================================================
4550 - def write_run_config_file(self, writer):
4551 """Write the run_configs.inc file for MadEvent""" 4552 4553 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4554 4555 if self.proc_characteristic['loop_induced']: 4556 job_per_chan = 1 4557 else: 4558 job_per_chan = 5 4559 4560 if writer: 4561 text = open(path).read() % {'chanperjob': job_per_chan} 4562 writer.write(text) 4563 return True 4564 else: 4565 return {'chanperjob': job_per_chan}
4566 4567 #=========================================================================== 4568 # write_configs_file_from_diagrams 4569 #===========================================================================
4570 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4571 nexternal, ninitial, model):
4572 """Write the actual configs.inc file. 4573 4574 configs is the diagrams corresponding to configs (each 4575 diagrams is a list of corresponding diagrams for all 4576 subprocesses, with None if there is no corresponding diagrams 4577 for a given process). 4578 mapconfigs gives the diagram number for each config. 4579 4580 For s-channels, we need to output one PDG for each subprocess in 4581 the subprocess group, in order to be able to pick the right 4582 one for multiprocesses.""" 4583 4584 lines = [] 4585 4586 s_and_t_channels = [] 4587 4588 nqcd_list = [] 4589 4590 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4591 for config in configs if [d for d in config if d][0].\ 4592 get_vertex_leg_numbers()!=[]] 4593 minvert = min(vert_list) if vert_list!=[] else 0 4594 4595 # Number of subprocesses 4596 nsubprocs = len(configs[0]) 4597 4598 nconfigs = 0 4599 4600 new_pdg = model.get_first_non_pdg() 4601 4602 for iconfig, helas_diags in enumerate(configs): 4603 if any([vert > minvert for vert in 4604 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4605 # Only 3-vertices allowed in configs.inc 4606 continue 4607 nconfigs += 1 4608 4609 # Need s- and t-channels for all subprocesses, including 4610 # those that don't contribute to this config 4611 empty_verts = [] 4612 stchannels = [] 4613 for h in helas_diags: 4614 if h: 4615 # get_s_and_t_channels gives vertices starting from 4616 # final state external particles and working inwards 4617 stchannels.append(h.get('amplitudes')[0].\ 4618 get_s_and_t_channels(ninitial, model, 4619 new_pdg)) 4620 else: 4621 stchannels.append((empty_verts, None)) 4622 4623 # For t-channels, just need the first non-empty one 4624 tchannels = [t for s,t in stchannels if t != None][0] 4625 4626 # For s_and_t_channels (to be used later) use only first config 4627 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4628 tchannels]) 4629 4630 # Make sure empty_verts is same length as real vertices 4631 if any([s for s,t in stchannels]): 4632 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4633 4634 # Reorganize s-channel vertices to get a list of all 4635 # subprocesses for each vertex 4636 schannels = list(zip(*[s for s,t in stchannels])) 4637 else: 4638 schannels = [] 4639 4640 allchannels = schannels 4641 if len(tchannels) > 1: 4642 # Write out tchannels only if there are any non-trivial ones 4643 allchannels = schannels + tchannels 4644 4645 # Write out propagators for s-channel and t-channel vertices 4646 4647 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4648 # Correspondance between the config and the diagram = amp2 4649 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4650 mapconfigs[iconfig])) 4651 # Number of QCD couplings in this diagram 4652 nqcd = 0 4653 for h in helas_diags: 4654 if h: 4655 try: 4656 nqcd = h.calculate_orders()['QCD'] 4657 except KeyError: 4658 pass 4659 break 4660 else: 4661 continue 4662 4663 nqcd_list.append(nqcd) 4664 4665 for verts in allchannels: 4666 if verts in schannels: 4667 vert = [v for v in verts if v][0] 4668 else: 4669 vert = verts 4670 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4671 last_leg = vert.get('legs')[-1] 4672 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4673 (last_leg.get('number'), nconfigs, len(daughters), 4674 ",".join([str(d) for d in daughters]))) 4675 if verts in schannels: 4676 pdgs = [] 4677 for v in verts: 4678 if v: 4679 pdgs.append(v.get('legs')[-1].get('id')) 4680 else: 4681 pdgs.append(0) 4682 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4683 (last_leg.get('number'), nconfigs, nsubprocs, 4684 ",".join([str(d) for d in pdgs]))) 4685 lines.append("data tprid(%d,%d)/0/" % \ 4686 (last_leg.get('number'), nconfigs)) 4687 elif verts in tchannels[:-1]: 4688 lines.append("data tprid(%d,%d)/%d/" % \ 4689 (last_leg.get('number'), nconfigs, 4690 abs(last_leg.get('id')))) 4691 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4692 (last_leg.get('number'), nconfigs, nsubprocs, 4693 ",".join(['0'] * nsubprocs))) 4694 4695 # Write out number of configs 4696 lines.append("# Number of configs") 4697 lines.append("data mapconfig(0)/%d/" % nconfigs) 4698 4699 # Write the file 4700 writer.writelines(lines) 4701 4702 return s_and_t_channels, nqcd_list
4703 4704 #=========================================================================== 4705 # write_decayBW_file 4706 #===========================================================================
4707 - def write_decayBW_file(self, writer, s_and_t_channels):
4708 """Write the decayBW.inc file for MadEvent""" 4709 4710 lines = [] 4711 4712 booldict = {None: "0", True: "1", False: "2"} 4713 4714 for iconf, config in enumerate(s_and_t_channels): 4715 schannels = config[0] 4716 for vertex in schannels: 4717 # For the resulting leg, pick out whether it comes from 4718 # decay or not, as given by the onshell flag 4719 leg = vertex.get('legs')[-1] 4720 lines.append("data gForceBW(%d,%d)/%s/" % \ 4721 (leg.get('number'), iconf + 1, 4722 booldict[leg.get('onshell')])) 4723 4724 # Write the file 4725 writer.writelines(lines) 4726 4727 return True
4728 4729 #=========================================================================== 4730 # write_dname_file 4731 #===========================================================================
4732 - def write_dname_file(self, writer, dir_name):
4733 """Write the dname.mg file for MG4""" 4734 4735 line = "DIRNAME=%s" % dir_name 4736 4737 # Write the file 4738 writer.write(line + "\n") 4739 4740 return True
4741 4742 #=========================================================================== 4743 # write_driver 4744 #===========================================================================
4745 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4746 """Write the SubProcess/driver.f file for MG4""" 4747 4748 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4749 4750 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4751 card = 'Source/MODEL/MG5_param.dat' 4752 else: 4753 card = 'param_card.dat' 4754 # Requiring each helicity configuration to be probed by 10 points for 4755 # matrix element before using the resulting grid for MC over helicity 4756 # sampling. 4757 # We multiply this by 2 because each grouped subprocess is called at most 4758 # twice for each IMIRROR. 4759 replace_dict = {'param_card_name':card, 4760 'ncomb':ncomb, 4761 'hel_init_points':n_grouped_proc*10*2} 4762 if not v5: 4763 replace_dict['secondparam']=',.true.' 4764 else: 4765 replace_dict['secondparam']='' 4766 4767 if writer: 4768 text = open(path).read() % replace_dict 4769 writer.write(text) 4770 return True 4771 else: 4772 return replace_dict
4773 4774 #=========================================================================== 4775 # write_addmothers 4776 #===========================================================================
4777 - def write_addmothers(self, writer):
4778 """Write the SubProcess/addmothers.f""" 4779 4780 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4781 4782 text = open(path).read() % {'iconfig': 'diag_number'} 4783 writer.write(text) 4784 4785 return True
4786 4787 4788 #=========================================================================== 4789 # write_combine_events 4790 #===========================================================================
4791 - def write_combine_events(self, writer, nb_proc=100):
4792 """Write the SubProcess/driver.f file for MG4""" 4793 4794 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4795 4796 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4797 card = 'Source/MODEL/MG5_param.dat' 4798 else: 4799 card = 'param_card.dat' 4800 4801 #set maxpup (number of @X in the process card) 4802 4803 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4804 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4805 writer.write(text) 4806 4807 return True
4808 4809 4810 #=========================================================================== 4811 # write_symmetry 4812 #===========================================================================
4813 - def write_symmetry(self, writer, v5=True):
4814 """Write the SubProcess/driver.f file for ME""" 4815 4816 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4817 4818 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4819 card = 'Source/MODEL/MG5_param.dat' 4820 else: 4821 card = 'param_card.dat' 4822 4823 if v5: 4824 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4825 else: 4826 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4827 4828 if writer: 4829 text = open(path).read() 4830 text = text % replace_dict 4831 writer.write(text) 4832 return True 4833 else: 4834 return replace_dict
4835 4836 4837 4838 #=========================================================================== 4839 # write_iproc_file 4840 #===========================================================================
4841 - def write_iproc_file(self, writer, me_number):
4842 """Write the iproc.dat file for MG4""" 4843 line = "%d" % (me_number + 1) 4844 4845 # Write the file 4846 for line_to_write in writer.write_line(line): 4847 writer.write(line_to_write) 4848 return True
4849 4850 #=========================================================================== 4851 # write_mg_sym_file 4852 #===========================================================================
4853 - def write_mg_sym_file(self, writer, matrix_element):
4854 """Write the mg.sym file for MadEvent.""" 4855 4856 lines = [] 4857 4858 # Extract process with all decays included 4859 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 4860 4861 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 4862 4863 identical_indices = {} 4864 4865 # Extract identical particle info 4866 for i, leg in enumerate(final_legs): 4867 if leg.get('id') in identical_indices: 4868 identical_indices[leg.get('id')].append(\ 4869 i + ninitial + 1) 4870 else: 4871 identical_indices[leg.get('id')] = [i + ninitial + 1] 4872 4873 # Remove keys which have only one particle 4874 for key in list(identical_indices.keys()): 4875 if len(identical_indices[key]) < 2: 4876 del identical_indices[key] 4877 4878 # Write mg.sym file 4879 lines.append(str(len(list(identical_indices.keys())))) 4880 for key in identical_indices.keys(): 4881 lines.append(str(len(identical_indices[key]))) 4882 for number in identical_indices[key]: 4883 lines.append(str(number)) 4884 4885 # Write the file 4886 writer.writelines(lines) 4887 4888 return True
4889 4890 #=========================================================================== 4891 # write_mg_sym_file 4892 #===========================================================================
4893 - def write_default_mg_sym_file(self, writer):
4894 """Write the mg.sym file for MadEvent.""" 4895 4896 lines = "0" 4897 4898 # Write the file 4899 writer.writelines(lines) 4900 4901 return True
4902 4903 #=========================================================================== 4904 # write_ncombs_file 4905 #===========================================================================
4906 - def write_ncombs_file(self, writer, nexternal):
4907 """Write the ncombs.inc file for MadEvent.""" 4908 4909 # ncomb (used for clustering) is 2^nexternal 4910 file = " integer n_max_cl\n" 4911 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4912 4913 # Write the file 4914 writer.writelines(file) 4915 4916 return True
4917 4918 #=========================================================================== 4919 # write_processes_file 4920 #===========================================================================
4921 - def write_processes_file(self, writer, subproc_group):
4922 """Write the processes.dat file with info about the subprocesses 4923 in this group.""" 4924 4925 lines = [] 4926 4927 for ime, me in \ 4928 enumerate(subproc_group.get('matrix_elements')): 4929 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4930 ",".join(p.base_string() for p in \ 4931 me.get('processes')))) 4932 if me.get('has_mirror_process'): 4933 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4934 for proc in mirror_procs: 4935 legs = copy.copy(proc.get('legs_with_decays')) 4936 legs.insert(0, legs.pop(1)) 4937 proc.set("legs_with_decays", legs) 4938 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4939 mirror_procs)) 4940 else: 4941 lines.append("mirror none") 4942 4943 # Write the file 4944 writer.write("\n".join(lines)) 4945 4946 return True
4947 4948 #=========================================================================== 4949 # write_symswap_file 4950 #===========================================================================
4951 - def write_symswap_file(self, writer, ident_perms):
4952 """Write the file symswap.inc for MG4 by comparing diagrams using 4953 the internal matrix element value functionality.""" 4954 4955 lines = [] 4956 4957 # Write out lines for symswap.inc file (used to permute the 4958 # external leg momenta 4959 for iperm, perm in enumerate(ident_perms): 4960 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4961 (iperm+1, ",".join([str(i+1) for i in perm]))) 4962 lines.append("data nsym/%d/" % len(ident_perms)) 4963 4964 # Write the file 4965 writer.writelines(lines) 4966 4967 return True
4968 4969 #=========================================================================== 4970 # write_symfact_file 4971 #===========================================================================
4972 - def write_symfact_file(self, writer, symmetry):
4973 """Write the files symfact.dat for MG4 by comparing diagrams using 4974 the internal matrix element value functionality.""" 4975 4976 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4977 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4978 # Write out lines for symswap.inc file (used to permute the 4979 # external leg momenta 4980 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4981 # Write the file 4982 writer.write('\n'.join(lines)) 4983 writer.write('\n') 4984 4985 return True
4986 4987 #=========================================================================== 4988 # write_symperms_file 4989 #===========================================================================
4990 - def write_symperms_file(self, writer, perms):
4991 """Write the symperms.inc file for subprocess group, used for 4992 symmetric configurations""" 4993 4994 lines = [] 4995 for iperm, perm in enumerate(perms): 4996 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4997 (iperm+1, ",".join([str(i+1) for i in perm]))) 4998 4999 # Write the file 5000 writer.writelines(lines) 5001 5002 return True
5003 5004 #=========================================================================== 5005 # write_subproc 5006 #===========================================================================
5007 - def write_subproc(self, writer, subprocdir):
5008 """Append this subprocess to the subproc.mg file for MG4""" 5009 5010 # Write line to file 5011 writer.write(subprocdir + "\n") 5012 5013 return True
5014
5015 #=============================================================================== 5016 # ProcessExporterFortranMEGroup 5017 #=============================================================================== 5018 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5019 """Class to take care of exporting a set of matrix elements to 5020 MadEvent subprocess group format.""" 5021 5022 matrix_file = "matrix_madevent_group_v4.inc" 5023 grouped_mode = 'madevent' 5024 #=========================================================================== 5025 # generate_subprocess_directory 5026 #===========================================================================
5027 - def generate_subprocess_directory(self, subproc_group, 5028 fortran_model, 5029 group_number):
5030 """Generate the Pn directory for a subprocess group in MadEvent, 5031 including the necessary matrix_N.f files, configs.inc and various 5032 other helper files.""" 5033 5034 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5035 "subproc_group object not SubProcessGroup" 5036 5037 if not self.model: 5038 self.model = subproc_group.get('matrix_elements')[0].\ 5039 get('processes')[0].get('model') 5040 5041 cwd = os.getcwd() 5042 path = pjoin(self.dir_path, 'SubProcesses') 5043 5044 os.chdir(path) 5045 pathdir = os.getcwd() 5046 5047 # Create the directory PN in the specified path 5048 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5049 subproc_group.get('name')) 5050 try: 5051 os.mkdir(subprocdir) 5052 except os.error as error: 5053 logger.warning(error.strerror + " " + subprocdir) 5054 5055 try: 5056 os.chdir(subprocdir) 5057 except os.error: 5058 logger.error('Could not cd to directory %s' % subprocdir) 5059 return 0 5060 5061 logger.info('Creating files in directory %s' % subprocdir) 5062 5063 # Create the matrix.f files, auto_dsig.f files and all inc files 5064 # for all subprocesses in the group 5065 5066 maxamps = 0 5067 maxflows = 0 5068 tot_calls = 0 5069 5070 matrix_elements = subproc_group.get('matrix_elements') 5071 5072 # Add the driver.f, all grouped ME's must share the same number of 5073 # helicity configuration 5074 ncomb = matrix_elements[0].get_helicity_combinations() 5075 for me in matrix_elements[1:]: 5076 if ncomb!=me.get_helicity_combinations(): 5077 raise MadGraph5Error("All grouped processes must share the "+\ 5078 "same number of helicity configurations.") 5079 5080 filename = 'driver.f' 5081 self.write_driver(writers.FortranWriter(filename),ncomb, 5082 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5083 5084 for ime, matrix_element in \ 5085 enumerate(matrix_elements): 5086 filename = 'matrix%d.f' % (ime+1) 5087 calls, ncolor = \ 5088 self.write_matrix_element_v4(writers.FortranWriter(filename), 5089 matrix_element, 5090 fortran_model, 5091 proc_id=str(ime+1), 5092 config_map=subproc_group.get('diagram_maps')[ime], 5093 subproc_number=group_number) 5094 5095 filename = 'auto_dsig%d.f' % (ime+1) 5096 self.write_auto_dsig_file(writers.FortranWriter(filename), 5097 matrix_element, 5098 str(ime+1)) 5099 5100 # Keep track of needed quantities 5101 tot_calls += int(calls) 5102 maxflows = max(maxflows, ncolor) 5103 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5104 5105 # Draw diagrams 5106 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5107 filename = "matrix%d.ps" % (ime+1) 5108 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5109 get('diagrams'), 5110 filename, 5111 model = \ 5112 matrix_element.get('processes')[0].\ 5113 get('model'), 5114 amplitude=True) 5115 logger.info("Generating Feynman diagrams for " + \ 5116 matrix_element.get('processes')[0].nice_string()) 5117 plot.draw() 5118 5119 # Extract number of external particles 5120 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5121 5122 # Generate a list of diagrams corresponding to each configuration 5123 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5124 # If a subprocess has no diagrams for this config, the number is 0 5125 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5126 5127 filename = 'auto_dsig.f' 5128 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5129 subproc_group) 5130 5131 filename = 'coloramps.inc' 5132 self.write_coloramps_file(writers.FortranWriter(filename), 5133 subproc_diagrams_for_config, 5134 maxflows, 5135 matrix_elements) 5136 5137 filename = 'get_color.f' 5138 self.write_colors_file(writers.FortranWriter(filename), 5139 matrix_elements) 5140 5141 filename = 'config_subproc_map.inc' 5142 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5143 subproc_diagrams_for_config) 5144 5145 filename = 'configs.inc' 5146 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5147 writers.FortranWriter(filename), 5148 subproc_group, 5149 subproc_diagrams_for_config) 5150 5151 filename = 'config_nqcd.inc' 5152 self.write_config_nqcd_file(writers.FortranWriter(filename), 5153 nqcd_list) 5154 5155 filename = 'decayBW.inc' 5156 self.write_decayBW_file(writers.FortranWriter(filename), 5157 s_and_t_channels) 5158 5159 filename = 'dname.mg' 5160 self.write_dname_file(writers.FortranWriter(filename), 5161 subprocdir) 5162 5163 filename = 'iproc.dat' 5164 self.write_iproc_file(writers.FortranWriter(filename), 5165 group_number) 5166 5167 filename = 'leshouche.inc' 5168 self.write_leshouche_file(writers.FortranWriter(filename), 5169 subproc_group) 5170 5171 filename = 'maxamps.inc' 5172 self.write_maxamps_file(writers.FortranWriter(filename), 5173 maxamps, 5174 maxflows, 5175 max([len(me.get('processes')) for me in \ 5176 matrix_elements]), 5177 len(matrix_elements)) 5178 5179 # Note that mg.sym is not relevant for this case 5180 filename = 'mg.sym' 5181 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5182 5183 filename = 'mirrorprocs.inc' 5184 self.write_mirrorprocs(writers.FortranWriter(filename), 5185 subproc_group) 5186 5187 filename = 'ncombs.inc' 5188 self.write_ncombs_file(writers.FortranWriter(filename), 5189 nexternal) 5190 5191 filename = 'nexternal.inc' 5192 self.write_nexternal_file(writers.FortranWriter(filename), 5193 nexternal, ninitial) 5194 5195 filename = 'ngraphs.inc' 5196 self.write_ngraphs_file(writers.FortranWriter(filename), 5197 nconfigs) 5198 5199 filename = 'pmass.inc' 5200 self.write_pmass_file(writers.FortranWriter(filename), 5201 matrix_element) 5202 5203 filename = 'props.inc' 5204 self.write_props_file(writers.FortranWriter(filename), 5205 matrix_element, 5206 s_and_t_channels) 5207 5208 filename = 'processes.dat' 5209 files.write_to_file(filename, 5210 self.write_processes_file, 5211 subproc_group) 5212 5213 # Find config symmetries and permutations 5214 symmetry, perms, ident_perms = \ 5215 diagram_symmetry.find_symmetry(subproc_group) 5216 5217 filename = 'symswap.inc' 5218 self.write_symswap_file(writers.FortranWriter(filename), 5219 ident_perms) 5220 5221 filename = 'symfact_orig.dat' 5222 self.write_symfact_file(open(filename, 'w'), symmetry) 5223 5224 # check consistency 5225 for i, sym_fact in enumerate(symmetry): 5226 5227 if sym_fact >= 0: 5228 continue 5229 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5230 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5231 raise Exception("identical diagram with different QCD powwer") 5232 5233 5234 filename = 'symperms.inc' 5235 self.write_symperms_file(writers.FortranWriter(filename), 5236 perms) 5237 5238 # Generate jpgs -> pass in make_html 5239 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5240 5241 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5242 5243 #import nexternal/leshouch in Source 5244 ln('nexternal.inc', '../../Source', log=False) 5245 ln('leshouche.inc', '../../Source', log=False) 5246 ln('maxamps.inc', '../../Source', log=False) 5247 5248 # Return to SubProcesses dir) 5249 os.chdir(pathdir) 5250 5251 # Add subprocess to subproc.mg 5252 filename = 'subproc.mg' 5253 files.append_to_file(filename, 5254 self.write_subproc, 5255 subprocdir) 5256 5257 # Return to original dir 5258 os.chdir(cwd) 5259 5260 if not tot_calls: 5261 tot_calls = 0 5262 return tot_calls
5263 5264 #=========================================================================== 5265 # write_super_auto_dsig_file 5266 #===========================================================================
5267 - def write_super_auto_dsig_file(self, writer, subproc_group):
5268 """Write the auto_dsig.f file selecting between the subprocesses 5269 in subprocess group mode""" 5270 5271 replace_dict = {} 5272 5273 # Extract version number and date from VERSION file 5274 info_lines = self.get_mg5_info_lines() 5275 replace_dict['info_lines'] = info_lines 5276 5277 matrix_elements = subproc_group.get('matrix_elements') 5278 5279 # Extract process info lines 5280 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5281 matrix_elements]) 5282 replace_dict['process_lines'] = process_lines 5283 5284 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5285 replace_dict['nexternal'] = nexternal 5286 5287 replace_dict['nsprocs'] = 2*len(matrix_elements) 5288 5289 # Generate dsig definition line 5290 dsig_def_line = "DOUBLE PRECISION " + \ 5291 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5292 range(len(matrix_elements))]) 5293 replace_dict["dsig_def_line"] = dsig_def_line 5294 5295 # Generate dsig process lines 5296 call_dsig_proc_lines = [] 5297 for iproc in range(len(matrix_elements)): 5298 call_dsig_proc_lines.append(\ 5299 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5300 {"num": iproc + 1, 5301 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5302 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5303 5304 ncomb=matrix_elements[0].get_helicity_combinations() 5305 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5306 5307 s1,s2 = matrix_elements[0].get_spin_state_initial() 5308 replace_dict['nb_spin_state1'] = s1 5309 replace_dict['nb_spin_state2'] = s2 5310 5311 if writer: 5312 file = open(pjoin(_file_path, \ 5313 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5314 file = file % replace_dict 5315 5316 # Write the file 5317 writer.writelines(file) 5318 else: 5319 return replace_dict
5320 5321 #=========================================================================== 5322 # write_mirrorprocs 5323 #===========================================================================
5324 - def write_mirrorprocs(self, writer, subproc_group):
5325 """Write the mirrorprocs.inc file determining which processes have 5326 IS mirror process in subprocess group mode.""" 5327 5328 lines = [] 5329 bool_dict = {True: '.true.', False: '.false.'} 5330 matrix_elements = subproc_group.get('matrix_elements') 5331 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5332 (len(matrix_elements), 5333 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5334 me in matrix_elements]))) 5335 # Write the file 5336 writer.writelines(lines)
5337 5338 #=========================================================================== 5339 # write_addmothers 5340 #===========================================================================
5341 - def write_addmothers(self, writer):
5342 """Write the SubProcess/addmothers.f""" 5343 5344 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5345 5346 text = open(path).read() % {'iconfig': 'lconfig'} 5347 writer.write(text) 5348 5349 return True
5350 5351 5352 #=========================================================================== 5353 # write_coloramps_file 5354 #===========================================================================
5355 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5356 matrix_elements):
5357 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5358 5359 # Create a map from subprocess (matrix element) to a list of 5360 # the diagrams corresponding to each config 5361 5362 lines = [] 5363 5364 subproc_to_confdiag = {} 5365 for config in diagrams_for_config: 5366 for subproc, diag in enumerate(config): 5367 try: 5368 subproc_to_confdiag[subproc].append(diag) 5369 except KeyError: 5370 subproc_to_confdiag[subproc] = [diag] 5371 5372 for subproc in sorted(subproc_to_confdiag.keys()): 5373 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5374 matrix_elements[subproc], 5375 subproc + 1)) 5376 5377 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5378 (maxflows, 5379 len(diagrams_for_config), 5380 len(matrix_elements))) 5381 5382 # Write the file 5383 writer.writelines(lines) 5384 5385 return True
5386 5387 #=========================================================================== 5388 # write_config_subproc_map_file 5389 #===========================================================================
5390 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5391 """Write the config_subproc_map.inc file for subprocess groups""" 5392 5393 lines = [] 5394 # Output only configs that have some corresponding diagrams 5395 iconfig = 0 5396 for config in config_subproc_map: 5397 if set(config) == set([0]): 5398 continue 5399 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5400 (iconfig + 1, len(config), 5401 ",".join([str(i) for i in config]))) 5402 iconfig += 1 5403 # Write the file 5404 writer.writelines(lines) 5405 5406 return True
5407 5408 #=========================================================================== 5409 # read_write_good_hel 5410 #===========================================================================
5411 - def read_write_good_hel(self, ncomb):
5412 """return the code to read/write the good_hel common_block""" 5413 5414 convert = {'ncomb' : ncomb} 5415 5416 output = """ 5417 subroutine write_good_hel(stream_id) 5418 implicit none 5419 integer stream_id 5420 INTEGER NCOMB 5421 PARAMETER ( NCOMB=%(ncomb)d) 5422 LOGICAL GOODHEL(NCOMB, 2) 5423 INTEGER NTRY(2) 5424 common/BLOCK_GOODHEL/NTRY,GOODHEL 5425 write(stream_id,*) GOODHEL 5426 return 5427 end 5428 5429 5430 subroutine read_good_hel(stream_id) 5431 implicit none 5432 include 'genps.inc' 5433 integer stream_id 5434 INTEGER NCOMB 5435 PARAMETER ( NCOMB=%(ncomb)d) 5436 LOGICAL GOODHEL(NCOMB, 2) 5437 INTEGER NTRY(2) 5438 common/BLOCK_GOODHEL/NTRY,GOODHEL 5439 read(stream_id,*) GOODHEL 5440 NTRY(1) = MAXTRIES + 1 5441 NTRY(2) = MAXTRIES + 1 5442 return 5443 end 5444 5445 subroutine init_good_hel() 5446 implicit none 5447 INTEGER NCOMB 5448 PARAMETER ( NCOMB=%(ncomb)d) 5449 LOGICAL GOODHEL(NCOMB, 2) 5450 INTEGER NTRY(2) 5451 INTEGER I 5452 5453 do i=1,NCOMB 5454 GOODHEL(I,1) = .false. 5455 GOODHEL(I,2) = .false. 5456 enddo 5457 NTRY(1) = 0 5458 NTRY(2) = 0 5459 end 5460 5461 integer function get_maxsproc() 5462 implicit none 5463 include 'maxamps.inc' 5464 5465 get_maxsproc = maxsproc 5466 return 5467 end 5468 5469 """ % convert 5470 5471 return output
5472 5473 5474 5475 #=========================================================================== 5476 # write_configs_file 5477 #===========================================================================
5478 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5479 """Write the configs.inc file with topology information for a 5480 subprocess group. Use the first subprocess with a diagram for each 5481 configuration.""" 5482 5483 matrix_elements = subproc_group.get('matrix_elements') 5484 model = matrix_elements[0].get('processes')[0].get('model') 5485 5486 diagrams = [] 5487 config_numbers = [] 5488 for iconfig, config in enumerate(diagrams_for_config): 5489 # Check if any diagrams correspond to this config 5490 if set(config) == set([0]): 5491 continue 5492 subproc_diags = [] 5493 for s,d in enumerate(config): 5494 if d: 5495 subproc_diags.append(matrix_elements[s].\ 5496 get('diagrams')[d-1]) 5497 else: 5498 subproc_diags.append(None) 5499 diagrams.append(subproc_diags) 5500 config_numbers.append(iconfig + 1) 5501 5502 # Extract number of external particles 5503 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5504 5505 return len(diagrams), \ 5506 self.write_configs_file_from_diagrams(writer, diagrams, 5507 config_numbers, 5508 nexternal, ninitial, 5509 model)
5510 5511 #=========================================================================== 5512 # write_run_configs_file 5513 #===========================================================================
5514 - def write_run_config_file(self, writer):
5515 """Write the run_configs.inc file for MadEvent""" 5516 5517 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5518 if self.proc_characteristic['loop_induced']: 5519 job_per_chan = 1 5520 else: 5521 job_per_chan = 2 5522 text = open(path).read() % {'chanperjob':job_per_chan} 5523 writer.write(text) 5524 return True
5525 5526 5527 #=========================================================================== 5528 # write_leshouche_file 5529 #===========================================================================
5530 - def write_leshouche_file(self, writer, subproc_group):
5531 """Write the leshouche.inc file for MG4""" 5532 5533 all_lines = [] 5534 5535 for iproc, matrix_element in \ 5536 enumerate(subproc_group.get('matrix_elements')): 5537 all_lines.extend(self.get_leshouche_lines(matrix_element, 5538 iproc)) 5539 # Write the file 5540 writer.writelines(all_lines) 5541 return True
5542 5543
5544 - def finalize(self,*args, **opts):
5545 5546 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5547 #ensure that the grouping information is on the correct value 5548 self.proc_characteristic['grouped_matrix'] = True
5549 5550 5551 #=============================================================================== 5552 # UFO_model_to_mg4 5553 #=============================================================================== 5554 5555 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5556 5557 -class UFO_model_to_mg4(object):
5558 """ A converter of the UFO-MG5 Model to the MG4 format """ 5559 5560 # The list below shows the only variables the user is allowed to change by 5561 # himself for each PS point. If he changes any other, then calling 5562 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5563 # correctly account for the change. 5564 PS_dependent_key = ['aS','MU_R'] 5565 mp_complex_format = 'complex*32' 5566 mp_real_format = 'real*16' 5567 # Warning, it is crucial none of the couplings/parameters of the model 5568 # starts with this prefix. I should add a check for this. 5569 # You can change it as the global variable to check_param_card.ParamCard 5570 mp_prefix = check_param_card.ParamCard.mp_prefix 5571
5572 - def __init__(self, model, output_path, opt=None):
5573 """ initialization of the objects """ 5574 5575 self.model = model 5576 self.model_name = model['name'] 5577 self.dir_path = output_path 5578 5579 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5580 'loop_induced': False} 5581 if opt: 5582 self.opt.update(opt) 5583 5584 self.coups_dep = [] # (name, expression, type) 5585 self.coups_indep = [] # (name, expression, type) 5586 self.params_dep = [] # (name, expression, type) 5587 self.params_indep = [] # (name, expression, type) 5588 self.params_ext = [] # external parameter 5589 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5590 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5591 5592
5594 """modify the parameter if some of them are identical up to the case""" 5595 5596 lower_dict={} 5597 duplicate = set() 5598 keys = list(self.model['parameters'].keys()) 5599 keys.sort() 5600 for key in keys: 5601 for param in self.model['parameters'][key]: 5602 lower_name = param.name.lower() 5603 if not lower_name: 5604 continue 5605 try: 5606 lower_dict[lower_name].append(param) 5607 except KeyError as error: 5608 lower_dict[lower_name] = [param] 5609 else: 5610 duplicate.add(lower_name) 5611 logger.debug('%s is define both as lower case and upper case.' 5612 % lower_name) 5613 if not duplicate: 5614 return 5615 5616 re_expr = r'''\b(%s)\b''' 5617 to_change = [] 5618 change={} 5619 for value in duplicate: 5620 for i, var in enumerate(lower_dict[value]): 5621 to_change.append(var.name) 5622 new_name = '%s%s' % (var.name.lower(), 5623 ('__%d'%(i+1) if i>0 else '')) 5624 change[var.name] = new_name 5625 var.name = new_name 5626 5627 # Apply the modification to the map_CTcoup_CTparam of the model 5628 # if it has one (giving for each coupling the CT parameters whcih 5629 # are necessary and which should be exported to the model. 5630 if hasattr(self.model,'map_CTcoup_CTparam'): 5631 for coup, ctparams in self.model.map_CTcoup_CTparam: 5632 for i, ctparam in enumerate(ctparams): 5633 try: 5634 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5635 except KeyError: 5636 pass 5637 5638 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5639 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5640 5641 # change parameters 5642 for key in keys: 5643 if key == ('external',): 5644 continue 5645 for param in self.model['parameters'][key]: 5646 param.expr = rep_pattern.sub(replace, param.expr) 5647 5648 # change couplings 5649 for key in self.model['couplings'].keys(): 5650 for coup in self.model['couplings'][key]: 5651 coup.expr = rep_pattern.sub(replace, coup.expr) 5652 5653 # change mass/width 5654 for part in self.model['particles']: 5655 if str(part.get('mass')) in to_change: 5656 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5657 if str(part.get('width')) in to_change: 5658 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5659
5660 - def refactorize(self, wanted_couplings = []):
5661 """modify the couplings to fit with MG4 convention """ 5662 5663 # Keep only separation in alphaS 5664 keys = list(self.model['parameters'].keys()) 5665 keys.sort(key=len) 5666 for key in keys: 5667 to_add = [o for o in self.model['parameters'][key] if o.name] 5668 5669 if key == ('external',): 5670 self.params_ext += to_add 5671 elif any([(k in key) for k in self.PS_dependent_key]): 5672 self.params_dep += to_add 5673 else: 5674 self.params_indep += to_add 5675 # same for couplings 5676 keys = list(self.model['couplings'].keys()) 5677 keys.sort(key=len) 5678 for key, coup_list in self.model['couplings'].items(): 5679 if any([(k in key) for k in self.PS_dependent_key]): 5680 self.coups_dep += [c for c in coup_list if 5681 (not wanted_couplings or c.name in \ 5682 wanted_couplings)] 5683 else: 5684 self.coups_indep += [c for c in coup_list if 5685 (not wanted_couplings or c.name in \ 5686 wanted_couplings)] 5687 5688 # MG4 use G and not aS as it basic object for alphas related computation 5689 #Pass G in the independant list 5690 if 'G' in self.params_dep: 5691 index = self.params_dep.index('G') 5692 G = self.params_dep.pop(index) 5693 # G.expr = '2*cmath.sqrt(as*pi)' 5694 # self.params_indep.insert(0, self.params_dep.pop(index)) 5695 # No need to add it if not defined 5696 5697 if 'aS' not in self.params_ext: 5698 logger.critical('aS not define as external parameter adding it!') 5699 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5700 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5701 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5702 - def build(self, wanted_couplings = [], full=True):
5703 """modify the couplings to fit with MG4 convention and creates all the 5704 different files""" 5705 5706 self.pass_parameter_to_case_insensitive() 5707 self.refactorize(wanted_couplings) 5708 5709 # write the files 5710 if full: 5711 if wanted_couplings: 5712 # extract the wanted ct parameters 5713 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5714 self.write_all()
5715 5716
5717 - def open(self, name, comment='c', format='default'):
5718 """ Open the file name in the correct directory and with a valid 5719 header.""" 5720 5721 file_path = pjoin(self.dir_path, name) 5722 5723 if format == 'fortran': 5724 fsock = writers.FortranWriter(file_path, 'w') 5725 write_class = io.FileIO 5726 5727 write_class.writelines(fsock, comment * 77 + '\n') 5728 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 5729 {'comment': comment + (6 - len(comment)) * ' '}) 5730 write_class.writelines(fsock, comment * 77 + '\n\n') 5731 else: 5732 fsock = open(file_path, 'w') 5733 fsock.writelines(comment * 77 + '\n') 5734 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 5735 {'comment': comment + (6 - len(comment)) * ' '}) 5736 fsock.writelines(comment * 77 + '\n\n') 5737 return fsock
5738 5739
5740 - def write_all(self):
5741 """ write all the files """ 5742 #write the part related to the external parameter 5743 self.create_ident_card() 5744 self.create_param_read() 5745 5746 #write the definition of the parameter 5747 self.create_input() 5748 self.create_intparam_def(dp=True,mp=False) 5749 if self.opt['mp']: 5750 self.create_intparam_def(dp=False,mp=True) 5751 5752 # definition of the coupling. 5753 self.create_actualize_mp_ext_param_inc() 5754 self.create_coupl_inc() 5755 self.create_write_couplings() 5756 self.create_couplings() 5757 5758 # the makefile 5759 self.create_makeinc() 5760 self.create_param_write() 5761 5762 # The model functions 5763 self.create_model_functions_inc() 5764 self.create_model_functions_def() 5765 5766 # The param_card.dat 5767 self.create_param_card() 5768 5769 5770 # All the standard files 5771 self.copy_standard_file()
5772 5773 ############################################################################ 5774 ## ROUTINE CREATING THE FILES ############################################ 5775 ############################################################################ 5776
5777 - def copy_standard_file(self):
5778 """Copy the standard files for the fortran model.""" 5779 5780 #copy the library files 5781 file_to_link = ['formats.inc','printout.f', \ 5782 'rw_para.f', 'testprog.f'] 5783 5784 for filename in file_to_link: 5785 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5786 self.dir_path) 5787 5788 file = open(os.path.join(MG5DIR,\ 5789 'models/template_files/fortran/rw_para.f')).read() 5790 5791 includes=["include \'coupl.inc\'","include \'input.inc\'", 5792 "include \'model_functions.inc\'"] 5793 if self.opt['mp']: 5794 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5795 # In standalone and madloop we do no use the compiled param card but 5796 # still parse the .dat one so we must load it. 5797 if self.opt['loop_induced']: 5798 #loop induced follow MadEvent way to handle the card. 5799 load_card = '' 5800 lha_read_filename='lha_read.f' 5801 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5802 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5803 lha_read_filename='lha_read_mp.f' 5804 elif self.opt['export_format'].startswith('standalone') \ 5805 or self.opt['export_format'] in ['madweight', 'plugin']\ 5806 or self.opt['export_format'].startswith('matchbox'): 5807 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5808 lha_read_filename='lha_read.f' 5809 else: 5810 load_card = '' 5811 lha_read_filename='lha_read.f' 5812 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5813 os.path.join(self.dir_path,'lha_read.f')) 5814 5815 file=file%{'includes':'\n '.join(includes), 5816 'load_card':load_card} 5817 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5818 writer.writelines(file) 5819 writer.close() 5820 5821 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5822 or self.opt['loop_induced']: 5823 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5824 self.dir_path + '/makefile') 5825 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5826 path = pjoin(self.dir_path, 'makefile') 5827 text = open(path).read() 5828 text = text.replace('madevent','aMCatNLO') 5829 open(path, 'w').writelines(text) 5830 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5831 'madloop','madloop_optimized', 'standalone_rw', 5832 'madweight','matchbox','madloop_matchbox', 'plugin']: 5833 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5834 self.dir_path + '/makefile') 5835 #elif self.opt['export_format'] in []: 5836 #pass 5837 else: 5838 raise MadGraph5Error('Unknown format')
5839
5840 - def create_coupl_inc(self):
5841 """ write coupling.inc """ 5842 5843 fsock = self.open('coupl.inc', format='fortran') 5844 if self.opt['mp']: 5845 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5846 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5847 format='fortran') 5848 5849 # Write header 5850 header = """double precision G 5851 common/strong/ G 5852 5853 double complex gal(2) 5854 common/weak/ gal 5855 5856 double precision MU_R 5857 common/rscale/ MU_R 5858 5859 double precision Nf 5860 parameter(Nf=%d) 5861 """ % self.model.get_nflav() 5862 5863 fsock.writelines(header) 5864 5865 if self.opt['mp']: 5866 header = """%(real_mp_format)s %(mp_prefix)sG 5867 common/MP_strong/ %(mp_prefix)sG 5868 5869 %(complex_mp_format)s %(mp_prefix)sgal(2) 5870 common/MP_weak/ %(mp_prefix)sgal 5871 5872 %(complex_mp_format)s %(mp_prefix)sMU_R 5873 common/MP_rscale/ %(mp_prefix)sMU_R 5874 5875 """ 5876 5877 5878 5879 5880 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5881 'complex_mp_format':self.mp_complex_format, 5882 'mp_prefix':self.mp_prefix}) 5883 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5884 'complex_mp_format':self.mp_complex_format, 5885 'mp_prefix':''}) 5886 5887 # Write the Mass definition/ common block 5888 masses = set() 5889 widths = set() 5890 if self.opt['complex_mass']: 5891 complex_mass = set() 5892 5893 for particle in self.model.get('particles'): 5894 #find masses 5895 one_mass = particle.get('mass') 5896 if one_mass.lower() != 'zero': 5897 masses.add(one_mass) 5898 5899 # find width 5900 one_width = particle.get('width') 5901 if one_width.lower() != 'zero': 5902 widths.add(one_width) 5903 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5904 complex_mass.add('CMASS_%s' % one_mass) 5905 5906 if masses: 5907 fsock.writelines('double precision '+','.join(masses)+'\n') 5908 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5909 if self.opt['mp']: 5910 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5911 ','.join(masses)+'\n') 5912 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5913 ','.join(masses)+'\n\n') 5914 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5915 self.mp_prefix+m for m in masses])+'\n') 5916 mp_fsock.writelines('common/MP_masses/ '+\ 5917 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5918 5919 if widths: 5920 fsock.writelines('double precision '+','.join(widths)+'\n') 5921 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5922 if self.opt['mp']: 5923 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5924 ','.join(widths)+'\n') 5925 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5926 ','.join(widths)+'\n\n') 5927 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5928 self.mp_prefix+w for w in widths])+'\n') 5929 mp_fsock.writelines('common/MP_widths/ '+\ 5930 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5931 5932 # Write the Couplings 5933 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5934 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5935 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5936 if self.opt['mp']: 5937 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5938 ','.join(coupling_list)+'\n') 5939 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5940 ','.join(coupling_list)+'\n\n') 5941 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5942 self.mp_prefix+c for c in coupling_list])+'\n') 5943 mp_fsock.writelines('common/MP_couplings/ '+\ 5944 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5945 5946 # Write complex mass for complex mass scheme (if activated) 5947 if self.opt['complex_mass'] and complex_mass: 5948 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5949 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5950 if self.opt['mp']: 5951 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5952 ','.join(complex_mass)+'\n') 5953 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5954 ','.join(complex_mass)+'\n\n') 5955 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5956 self.mp_prefix+cm for cm in complex_mass])+'\n') 5957 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5958 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5959
5960 - def create_write_couplings(self):
5961 """ write the file coupl_write.inc """ 5962 5963 fsock = self.open('coupl_write.inc', format='fortran') 5964 5965 fsock.writelines("""write(*,*) ' Couplings of %s' 5966 write(*,*) ' ---------------------------------' 5967 write(*,*) ' '""" % self.model_name) 5968 def format(coupl): 5969 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5970 5971 # Write the Couplings 5972 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5973 fsock.writelines('\n'.join(lines)) 5974 5975
5976 - def create_input(self):
5977 """create input.inc containing the definition of the parameters""" 5978 5979 fsock = self.open('input.inc', format='fortran') 5980 if self.opt['mp']: 5981 mp_fsock = self.open('mp_input.inc', format='fortran') 5982 5983 #find mass/ width since they are already define 5984 already_def = set() 5985 for particle in self.model.get('particles'): 5986 already_def.add(particle.get('mass').lower()) 5987 already_def.add(particle.get('width').lower()) 5988 if self.opt['complex_mass']: 5989 already_def.add('cmass_%s' % particle.get('mass').lower()) 5990 5991 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5992 name.lower() not in already_def 5993 5994 real_parameters = [param.name for param in self.params_dep + 5995 self.params_indep if param.type == 'real' 5996 and is_valid(param.name)] 5997 5998 real_parameters += [param.name for param in self.params_ext 5999 if param.type == 'real'and 6000 is_valid(param.name)] 6001 6002 # check the parameter is a CT parameter or not 6003 # if yes, just use the needed ones 6004 real_parameters = [param for param in real_parameters \ 6005 if self.check_needed_param(param)] 6006 6007 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6008 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6009 if self.opt['mp']: 6010 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6011 self.mp_prefix+p for p in real_parameters])+'\n') 6012 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6013 self.mp_prefix+p for p in real_parameters])+'\n\n') 6014 6015 complex_parameters = [param.name for param in self.params_dep + 6016 self.params_indep if param.type == 'complex' and 6017 is_valid(param.name)] 6018 6019 # check the parameter is a CT parameter or not 6020 # if yes, just use the needed ones 6021 complex_parameters = [param for param in complex_parameters \ 6022 if self.check_needed_param(param)] 6023 6024 if complex_parameters: 6025 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6026 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6027 if self.opt['mp']: 6028 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6029 self.mp_prefix+p for p in complex_parameters])+'\n') 6030 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6031 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6032
6033 - def check_needed_param(self, param):
6034 """ Returns whether the parameter in argument is needed for this 6035 specific computation or not.""" 6036 6037 # If this is a leading order model or if there was no CT parameter 6038 # employed in this NLO model, one can directly return that the 6039 # parameter is needed since only CTParameters are filtered. 6040 if not hasattr(self, 'allCTparameters') or \ 6041 self.allCTparameters is None or self.usedCTparameters is None or \ 6042 len(self.allCTparameters)==0: 6043 return True 6044 6045 # We must allow the conjugate shorthand for the complex parameter as 6046 # well so we check wether either the parameter name or its name with 6047 # 'conjg__' substituted with '' is present in the list. 6048 # This is acceptable even if some parameter had an original name 6049 # including 'conjg__' in it, because at worst we export a parameter 6050 # was not needed. 6051 param = param.lower() 6052 cjg_param = param.replace('conjg__','',1) 6053 6054 # First make sure it is a CTparameter 6055 if param not in self.allCTparameters and \ 6056 cjg_param not in self.allCTparameters: 6057 return True 6058 6059 # Now check if it is in the list of CTparameters actually used 6060 return (param in self.usedCTparameters or \ 6061 cjg_param in self.usedCTparameters)
6062
6063 - def extract_needed_CTparam(self,wanted_couplings=[]):
6064 """ Extract what are the needed CT parameters given the wanted_couplings""" 6065 6066 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6067 # Setting these lists to none wil disable the filtering in 6068 # check_needed_param 6069 self.allCTparameters = None 6070 self.usedCTparameters = None 6071 return 6072 6073 # All CTparameters appearin in all CT couplings 6074 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6075 # Define in this class the list of all CT parameters 6076 self.allCTparameters=list(\ 6077 set(itertools.chain.from_iterable(allCTparameters))) 6078 6079 # All used CT couplings 6080 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6081 allUsedCTCouplings = [coupl for coupl in 6082 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6083 6084 # Now define the list of all CT parameters that are actually used 6085 self.usedCTparameters=list(\ 6086 set(itertools.chain.from_iterable([ 6087 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6088 ]))) 6089 6090 # Now at last, make these list case insensitive 6091 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6092 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6093
6094 - def create_intparam_def(self, dp=True, mp=False):
6095 """ create intparam_definition.inc setting the internal parameters. 6096 Output the double precision and/or the multiple precision parameters 6097 depending on the parameters dp and mp. If mp only, then the file names 6098 get the 'mp_' prefix. 6099 """ 6100 6101 fsock = self.open('%sintparam_definition.inc'% 6102 ('mp_' if mp and not dp else ''), format='fortran') 6103 6104 fsock.write_comments(\ 6105 "Parameters that should not be recomputed event by event.\n") 6106 fsock.writelines("if(readlha) then\n") 6107 if dp: 6108 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6109 if mp: 6110 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6111 6112 for param in self.params_indep: 6113 if param.name == 'ZERO': 6114 continue 6115 # check whether the parameter is a CT parameter 6116 # if yes,just used the needed ones 6117 if not self.check_needed_param(param.name): 6118 continue 6119 if dp: 6120 fsock.writelines("%s = %s\n" % (param.name, 6121 self.p_to_f.parse(param.expr))) 6122 if mp: 6123 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6124 self.mp_p_to_f.parse(param.expr))) 6125 6126 fsock.writelines('endif') 6127 6128 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6129 if dp: 6130 fsock.writelines("aS = G**2/4/pi\n") 6131 if mp: 6132 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6133 for param in self.params_dep: 6134 # check whether the parameter is a CT parameter 6135 # if yes,just used the needed ones 6136 if not self.check_needed_param(param.name): 6137 continue 6138 if dp: 6139 fsock.writelines("%s = %s\n" % (param.name, 6140 self.p_to_f.parse(param.expr))) 6141 elif mp: 6142 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6143 self.mp_p_to_f.parse(param.expr))) 6144 6145 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6146 if ('aEWM1',) in self.model['parameters']: 6147 if dp: 6148 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6149 gal(2) = 1d0 6150 """) 6151 elif mp: 6152 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6153 %(mp_prefix)sgal(2) = 1d0 6154 """ %{'mp_prefix':self.mp_prefix}) 6155 pass 6156 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6157 elif ('Gf',) in self.model['parameters']: 6158 if dp: 6159 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6160 gal(2) = 1d0 6161 """) 6162 elif mp: 6163 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6164 %(mp_prefix)sgal(2) = 1d0 6165 """ %{'mp_prefix':self.mp_prefix}) 6166 pass 6167 else: 6168 if dp: 6169 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6170 fsock.writelines(""" gal(1) = 1d0 6171 gal(2) = 1d0 6172 """) 6173 elif mp: 6174 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6175 %(mp_prefix)sgal(2) = 1e0_16 6176 """%{'mp_prefix':self.mp_prefix})
6177 6178
6179 - def create_couplings(self):
6180 """ create couplings.f and all couplingsX.f """ 6181 6182 nb_def_by_file = 25 6183 6184 self.create_couplings_main(nb_def_by_file) 6185 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6186 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6187 6188 for i in range(nb_coup_indep): 6189 # For the independent couplings, we compute the double and multiple 6190 # precision ones together 6191 data = self.coups_indep[nb_def_by_file * i: 6192 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6193 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6194 6195 for i in range(nb_coup_dep): 6196 # For the dependent couplings, we compute the double and multiple 6197 # precision ones in separate subroutines. 6198 data = self.coups_dep[nb_def_by_file * i: 6199 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6200 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6201 dp=True,mp=False) 6202 if self.opt['mp']: 6203 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6204 dp=False,mp=True)
6205 6206
6207 - def create_couplings_main(self, nb_def_by_file=25):
6208 """ create couplings.f """ 6209 6210 fsock = self.open('couplings.f', format='fortran') 6211 6212 fsock.writelines("""subroutine coup() 6213 6214 implicit none 6215 double precision PI, ZERO 6216 logical READLHA 6217 parameter (PI=3.141592653589793d0) 6218 parameter (ZERO=0d0) 6219 include \'model_functions.inc\'""") 6220 if self.opt['mp']: 6221 fsock.writelines("""%s MP__PI, MP__ZERO 6222 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6223 parameter (MP__ZERO=0e0_16) 6224 include \'mp_input.inc\' 6225 include \'mp_coupl.inc\' 6226 """%self.mp_real_format) 6227 fsock.writelines("""include \'input.inc\' 6228 include \'coupl.inc\' 6229 READLHA = .true. 6230 include \'intparam_definition.inc\'""") 6231 if self.opt['mp']: 6232 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6233 6234 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6235 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6236 6237 fsock.writelines('\n'.join(\ 6238 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6239 6240 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6241 6242 fsock.writelines('\n'.join(\ 6243 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6244 for i in range(nb_coup_dep)])) 6245 if self.opt['mp']: 6246 fsock.writelines('\n'.join(\ 6247 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6248 for i in range(nb_coup_dep)])) 6249 fsock.writelines('''\n return \n end\n''') 6250 6251 fsock.writelines("""subroutine update_as_param() 6252 6253 implicit none 6254 double precision PI, ZERO 6255 logical READLHA 6256 parameter (PI=3.141592653589793d0) 6257 parameter (ZERO=0d0) 6258 include \'model_functions.inc\'""") 6259 fsock.writelines("""include \'input.inc\' 6260 include \'coupl.inc\' 6261 READLHA = .false.""") 6262 fsock.writelines(""" 6263 include \'intparam_definition.inc\'\n 6264 """) 6265 6266 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6267 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6268 6269 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6270 6271 fsock.writelines('\n'.join(\ 6272 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6273 for i in range(nb_coup_dep)])) 6274 fsock.writelines('''\n return \n end\n''') 6275 6276 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6277 6278 implicit none 6279 double precision PI 6280 parameter (PI=3.141592653589793d0) 6281 double precision mu_r2, as2 6282 include \'model_functions.inc\'""") 6283 fsock.writelines("""include \'input.inc\' 6284 include \'coupl.inc\'""") 6285 fsock.writelines(""" 6286 if (mu_r2.gt.0d0) MU_R = mu_r2 6287 G = SQRT(4.0d0*PI*AS2) 6288 AS = as2 6289 6290 CALL UPDATE_AS_PARAM() 6291 """) 6292 fsock.writelines('''\n return \n end\n''') 6293 6294 if self.opt['mp']: 6295 fsock.writelines("""subroutine mp_update_as_param() 6296 6297 implicit none 6298 logical READLHA 6299 include \'model_functions.inc\'""") 6300 fsock.writelines("""%s MP__PI, MP__ZERO 6301 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6302 parameter (MP__ZERO=0e0_16) 6303 include \'mp_input.inc\' 6304 include \'mp_coupl.inc\' 6305 """%self.mp_real_format) 6306 fsock.writelines("""include \'input.inc\' 6307 include \'coupl.inc\' 6308 include \'actualize_mp_ext_params.inc\' 6309 READLHA = .false. 6310 include \'mp_intparam_definition.inc\'\n 6311 """) 6312 6313 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6314 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6315 6316 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6317 6318 fsock.writelines('\n'.join(\ 6319 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6320 for i in range(nb_coup_dep)])) 6321 fsock.writelines('''\n return \n end\n''')
6322
6323 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6324 """ create couplings[nb_file].f containing information coming from data. 6325 Outputs the computation of the double precision and/or the multiple 6326 precision couplings depending on the parameters dp and mp. 6327 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6328 filename and subroutine name. 6329 """ 6330 6331 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6332 nb_file), format='fortran') 6333 fsock.writelines("""subroutine %scoup%s() 6334 6335 implicit none 6336 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6337 if dp: 6338 fsock.writelines(""" 6339 double precision PI, ZERO 6340 parameter (PI=3.141592653589793d0) 6341 parameter (ZERO=0d0) 6342 include 'input.inc' 6343 include 'coupl.inc'""") 6344 if mp: 6345 fsock.writelines("""%s MP__PI, MP__ZERO 6346 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6347 parameter (MP__ZERO=0e0_16) 6348 include \'mp_input.inc\' 6349 include \'mp_coupl.inc\' 6350 """%self.mp_real_format) 6351 6352 for coupling in data: 6353 if dp: 6354 fsock.writelines('%s = %s' % (coupling.name, 6355 self.p_to_f.parse(coupling.expr))) 6356 if mp: 6357 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6358 self.mp_p_to_f.parse(coupling.expr))) 6359 fsock.writelines('end')
6360
6361 - def create_model_functions_inc(self):
6362 """ Create model_functions.inc which contains the various declarations 6363 of auxiliary functions which might be used in the couplings expressions 6364 """ 6365 6366 additional_fct = [] 6367 # check for functions define in the UFO model 6368 ufo_fct = self.model.get('functions') 6369 if ufo_fct: 6370 for fct in ufo_fct: 6371 # already handle by default 6372 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6373 "csc", "asec", "acsc", "theta_function", "cond", 6374 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6375 "grreglog","regsqrt"]: 6376 additional_fct.append(fct.name) 6377 6378 fsock = self.open('model_functions.inc', format='fortran') 6379 fsock.writelines("""double complex cond 6380 double complex condif 6381 double complex reglog 6382 double complex reglogp 6383 double complex reglogm 6384 double complex recms 6385 double complex arg 6386 double complex grreglog 6387 double complex regsqrt 6388 %s 6389 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6390 6391 6392 if self.opt['mp']: 6393 fsock.writelines("""%(complex_mp_format)s mp_cond 6394 %(complex_mp_format)s mp_condif 6395 %(complex_mp_format)s mp_reglog 6396 %(complex_mp_format)s mp_reglogp 6397 %(complex_mp_format)s mp_reglogm 6398 %(complex_mp_format)s mp_recms 6399 %(complex_mp_format)s mp_arg 6400 %(complex_mp_format)s mp_grreglog 6401 %(complex_mp_format)s mp_regsqrt 6402 %(additional)s 6403 """ %\ 6404 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6405 'complex_mp_format':self.mp_complex_format 6406 })
6407
6408 - def create_model_functions_def(self):
6409 """ Create model_functions.f which contains the various definitions 6410 of auxiliary functions which might be used in the couplings expressions 6411 Add the functions.f functions for formfactors support 6412 """ 6413 6414 fsock = self.open('model_functions.f', format='fortran') 6415 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6416 implicit none 6417 double complex condition,truecase,falsecase 6418 if(condition.eq.(0.0d0,0.0d0)) then 6419 cond=truecase 6420 else 6421 cond=falsecase 6422 endif 6423 end 6424 6425 double complex function condif(condition,truecase,falsecase) 6426 implicit none 6427 logical condition 6428 double complex truecase,falsecase 6429 if(condition) then 6430 condif=truecase 6431 else 6432 condif=falsecase 6433 endif 6434 end 6435 6436 double complex function recms(condition,expr) 6437 implicit none 6438 logical condition 6439 double complex expr 6440 if(condition)then 6441 recms=expr 6442 else 6443 recms=dcmplx(dble(expr)) 6444 endif 6445 end 6446 6447 double complex function reglog(arg) 6448 implicit none 6449 double complex TWOPII 6450 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6451 double complex arg 6452 if(arg.eq.(0.0d0,0.0d0)) then 6453 reglog=(0.0d0,0.0d0) 6454 else 6455 reglog=log(arg) 6456 endif 6457 end 6458 6459 double complex function reglogp(arg) 6460 implicit none 6461 double complex TWOPII 6462 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6463 double complex arg 6464 if(arg.eq.(0.0d0,0.0d0))then 6465 reglogp=(0.0d0,0.0d0) 6466 else 6467 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6468 reglogp=log(arg) + TWOPII 6469 else 6470 reglogp=log(arg) 6471 endif 6472 endif 6473 end 6474 6475 double complex function reglogm(arg) 6476 implicit none 6477 double complex TWOPII 6478 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6479 double complex arg 6480 if(arg.eq.(0.0d0,0.0d0))then 6481 reglogm=(0.0d0,0.0d0) 6482 else 6483 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6484 reglogm=log(arg) - TWOPII 6485 else 6486 reglogm=log(arg) 6487 endif 6488 endif 6489 end 6490 6491 double complex function regsqrt(arg_in) 6492 implicit none 6493 double complex arg_in 6494 double complex arg 6495 arg=arg_in 6496 if(dabs(dimag(arg)).eq.0.0d0)then 6497 arg=dcmplx(dble(arg),0.0d0) 6498 endif 6499 if(dabs(dble(arg)).eq.0.0d0)then 6500 arg=dcmplx(0.0d0,dimag(arg)) 6501 endif 6502 regsqrt=sqrt(arg) 6503 end 6504 6505 double complex function grreglog(logsw,expr1_in,expr2_in) 6506 implicit none 6507 double complex TWOPII 6508 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6509 double complex expr1_in,expr2_in 6510 double complex expr1,expr2 6511 double precision logsw 6512 double precision imagexpr 6513 logical firstsheet 6514 expr1=expr1_in 6515 expr2=expr2_in 6516 if(dabs(dimag(expr1)).eq.0.0d0)then 6517 expr1=dcmplx(dble(expr1),0.0d0) 6518 endif 6519 if(dabs(dble(expr1)).eq.0.0d0)then 6520 expr1=dcmplx(0.0d0,dimag(expr1)) 6521 endif 6522 if(dabs(dimag(expr2)).eq.0.0d0)then 6523 expr2=dcmplx(dble(expr2),0.0d0) 6524 endif 6525 if(dabs(dble(expr2)).eq.0.0d0)then 6526 expr2=dcmplx(0.0d0,dimag(expr2)) 6527 endif 6528 if(expr1.eq.(0.0d0,0.0d0))then 6529 grreglog=(0.0d0,0.0d0) 6530 else 6531 imagexpr=dimag(expr1)*dimag(expr2) 6532 firstsheet=imagexpr.ge.0.0d0 6533 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6534 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6535 if(firstsheet)then 6536 grreglog=log(expr1) 6537 else 6538 if(dimag(expr1).gt.0.0d0)then 6539 grreglog=log(expr1) - logsw*TWOPII 6540 else 6541 grreglog=log(expr1) + logsw*TWOPII 6542 endif 6543 endif 6544 endif 6545 end 6546 6547 double complex function arg(comnum) 6548 implicit none 6549 double complex comnum 6550 double complex iim 6551 iim = (0.0d0,1.0d0) 6552 if(comnum.eq.(0.0d0,0.0d0)) then 6553 arg=(0.0d0,0.0d0) 6554 else 6555 arg=log(comnum/abs(comnum))/iim 6556 endif 6557 end""") 6558 if self.opt['mp']: 6559 fsock.writelines(""" 6560 6561 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6562 implicit none 6563 %(complex_mp_format)s condition,truecase,falsecase 6564 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6565 mp_cond=truecase 6566 else 6567 mp_cond=falsecase 6568 endif 6569 end 6570 6571 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6572 implicit none 6573 logical condition 6574 %(complex_mp_format)s truecase,falsecase 6575 if(condition) then 6576 mp_condif=truecase 6577 else 6578 mp_condif=falsecase 6579 endif 6580 end 6581 6582 %(complex_mp_format)s function mp_recms(condition,expr) 6583 implicit none 6584 logical condition 6585 %(complex_mp_format)s expr 6586 if(condition)then 6587 mp_recms=expr 6588 else 6589 mp_recms=cmplx(real(expr),kind=16) 6590 endif 6591 end 6592 6593 %(complex_mp_format)s function mp_reglog(arg) 6594 implicit none 6595 %(complex_mp_format)s TWOPII 6596 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6597 %(complex_mp_format)s arg 6598 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6599 mp_reglog=(0.0e0_16,0.0e0_16) 6600 else 6601 mp_reglog=log(arg) 6602 endif 6603 end 6604 6605 %(complex_mp_format)s function mp_reglogp(arg) 6606 implicit none 6607 %(complex_mp_format)s TWOPII 6608 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6609 %(complex_mp_format)s arg 6610 if(arg.eq.(0.0e0_16,0.0e0_16))then 6611 mp_reglogp=(0.0e0_16,0.0e0_16) 6612 else 6613 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6614 mp_reglogp=log(arg) + TWOPII 6615 else 6616 mp_reglogp=log(arg) 6617 endif 6618 endif 6619 end 6620 6621 %(complex_mp_format)s function mp_reglogm(arg) 6622 implicit none 6623 %(complex_mp_format)s TWOPII 6624 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6625 %(complex_mp_format)s arg 6626 if(arg.eq.(0.0e0_16,0.0e0_16))then 6627 mp_reglogm=(0.0e0_16,0.0e0_16) 6628 else 6629 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6630 mp_reglogm=log(arg) - TWOPII 6631 else 6632 mp_reglogm=log(arg) 6633 endif 6634 endif 6635 end 6636 6637 %(complex_mp_format)s function mp_regsqrt(arg_in) 6638 implicit none 6639 %(complex_mp_format)s arg_in 6640 %(complex_mp_format)s arg 6641 arg=arg_in 6642 if(abs(imagpart(arg)).eq.0.0e0_16)then 6643 arg=cmplx(real(arg,kind=16),0.0e0_16) 6644 endif 6645 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6646 arg=cmplx(0.0e0_16,imagpart(arg)) 6647 endif 6648 mp_regsqrt=sqrt(arg) 6649 end 6650 6651 6652 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6653 implicit none 6654 %(complex_mp_format)s TWOPII 6655 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6656 %(complex_mp_format)s expr1_in,expr2_in 6657 %(complex_mp_format)s expr1,expr2 6658 %(real_mp_format)s logsw 6659 %(real_mp_format)s imagexpr 6660 logical firstsheet 6661 expr1=expr1_in 6662 expr2=expr2_in 6663 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6664 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6665 endif 6666 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6667 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6668 endif 6669 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6670 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6671 endif 6672 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6673 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6674 endif 6675 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6676 mp_grreglog=(0.0e0_16,0.0e0_16) 6677 else 6678 imagexpr=imagpart(expr1)*imagpart(expr2) 6679 firstsheet=imagexpr.ge.0.0e0_16 6680 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6681 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6682 if(firstsheet)then 6683 mp_grreglog=log(expr1) 6684 else 6685 if(imagpart(expr1).gt.0.0e0_16)then 6686 mp_grreglog=log(expr1) - logsw*TWOPII 6687 else 6688 mp_grreglog=log(expr1) + logsw*TWOPII 6689 endif 6690 endif 6691 endif 6692 end 6693 6694 %(complex_mp_format)s function mp_arg(comnum) 6695 implicit none 6696 %(complex_mp_format)s comnum 6697 %(complex_mp_format)s imm 6698 imm = (0.0e0_16,1.0e0_16) 6699 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6700 mp_arg=(0.0e0_16,0.0e0_16) 6701 else 6702 mp_arg=log(comnum/abs(comnum))/imm 6703 endif 6704 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6705 6706 6707 #check for the file functions.f 6708 model_path = self.model.get('modelpath') 6709 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6710 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6711 input = pjoin(model_path,'Fortran','functions.f') 6712 fsock.writelines(open(input).read()) 6713 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6714 6715 # check for functions define in the UFO model 6716 ufo_fct = self.model.get('functions') 6717 if ufo_fct: 6718 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6719 done = [] 6720 for fct in ufo_fct: 6721 # already handle by default 6722 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6723 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6724 "grreglog","regsqrt"] + done: 6725 done.append(str(fct.name.lower())) 6726 ufo_fct_template = """ 6727 double complex function %(name)s(%(args)s) 6728 implicit none 6729 double complex %(args)s 6730 %(definitions)s 6731 %(name)s = %(fct)s 6732 6733 return 6734 end 6735 """ 6736 str_fct = self.p_to_f.parse(fct.expr) 6737 if not self.p_to_f.to_define: 6738 definitions = [] 6739 else: 6740 definitions=[] 6741 for d in self.p_to_f.to_define: 6742 if d == 'pi': 6743 definitions.append(' double precision pi') 6744 definitions.append(' data pi /3.1415926535897932d0/') 6745 else: 6746 definitions.append(' double complex %s' % d) 6747 6748 text = ufo_fct_template % { 6749 'name': fct.name, 6750 'args': ", ".join(fct.arguments), 6751 'fct': str_fct, 6752 'definitions': '\n'.join(definitions) 6753 } 6754 6755 fsock.writelines(text) 6756 if self.opt['mp']: 6757 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6758 for fct in ufo_fct: 6759 # already handle by default 6760 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6761 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6762 "grreglog","regsqrt"]: 6763 ufo_fct_template = """ 6764 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6765 implicit none 6766 %(complex_mp_format)s mp__%(args)s 6767 %(definitions)s 6768 mp_%(name)s = %(fct)s 6769 6770 return 6771 end 6772 """ 6773 str_fct = self.mp_p_to_f.parse(fct.expr) 6774 if not self.mp_p_to_f.to_define: 6775 definitions = [] 6776 else: 6777 definitions=[] 6778 for d in self.mp_p_to_f.to_define: 6779 if d == 'pi': 6780 definitions.append(' %s mp__pi' % self.mp_real_format) 6781 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6782 else: 6783 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6784 text = ufo_fct_template % { 6785 'name': fct.name, 6786 'args': ", mp__".join(fct.arguments), 6787 'fct': str_fct, 6788 'definitions': '\n'.join(definitions), 6789 'complex_mp_format': self.mp_complex_format 6790 } 6791 fsock.writelines(text) 6792 6793 6794 6795 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6796 6797 6798
6799 - def create_makeinc(self):
6800 """create makeinc.inc containing the file to compile """ 6801 6802 fsock = self.open('makeinc.inc', comment='#') 6803 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6804 text += ' model_functions.o ' 6805 6806 nb_coup_indep = 1 + len(self.coups_dep) // 25 6807 nb_coup_dep = 1 + len(self.coups_indep) // 25 6808 couplings_files=['couplings%s.o' % (i+1) \ 6809 for i in range(nb_coup_dep + nb_coup_indep) ] 6810 if self.opt['mp']: 6811 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6812 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6813 text += ' '.join(couplings_files) 6814 fsock.writelines(text)
6815
6816 - def create_param_write(self):
6817 """ create param_write """ 6818 6819 fsock = self.open('param_write.inc', format='fortran') 6820 6821 fsock.writelines("""write(*,*) ' External Params' 6822 write(*,*) ' ---------------------------------' 6823 write(*,*) ' '""") 6824 def format(name): 6825 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6826 6827 # Write the external parameter 6828 lines = [format(param.name) for param in self.params_ext] 6829 fsock.writelines('\n'.join(lines)) 6830 6831 fsock.writelines("""write(*,*) ' Internal Params' 6832 write(*,*) ' ---------------------------------' 6833 write(*,*) ' '""") 6834 lines = [format(data.name) for data in self.params_indep 6835 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6836 fsock.writelines('\n'.join(lines)) 6837 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6838 write(*,*) ' ----------------------------------------' 6839 write(*,*) ' '""") 6840 lines = [format(data.name) for data in self.params_dep \ 6841 if self.check_needed_param(data.name)] 6842 6843 fsock.writelines('\n'.join(lines)) 6844 6845 6846
6847 - def create_ident_card(self):
6848 """ create the ident_card.dat """ 6849 6850 def format(parameter): 6851 """return the line for the ident_card corresponding to this parameter""" 6852 colum = [parameter.lhablock.lower()] + \ 6853 [str(value) for value in parameter.lhacode] + \ 6854 [parameter.name] 6855 if not parameter.name: 6856 return '' 6857 return ' '.join(colum)+'\n'
6858 6859 fsock = self.open('ident_card.dat') 6860 6861 external_param = [format(param) for param in self.params_ext] 6862 fsock.writelines('\n'.join(external_param)) 6863
6864 - def create_actualize_mp_ext_param_inc(self):
6865 """ create the actualize_mp_ext_params.inc code """ 6866 6867 # In principle one should actualize all external, but for now, it is 6868 # hardcoded that only AS and MU_R can by dynamically changed by the user 6869 # so that we only update those ones. 6870 # Of course, to be on the safe side, one could decide to update all 6871 # external parameters. 6872 update_params_list=[p for p in self.params_ext if p.name in 6873 self.PS_dependent_key] 6874 6875 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6876 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6877 for param in update_params_list] 6878 # When read_lha is false, it is G which is taken in input and not AS, so 6879 # this is what should be reset here too. 6880 if 'aS' in [param.name for param in update_params_list]: 6881 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6882 6883 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6884 fsock.writelines('\n'.join(res_strings))
6885
6886 - def create_param_read(self):
6887 """create param_read""" 6888 6889 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6890 or self.opt['loop_induced']: 6891 fsock = self.open('param_read.inc', format='fortran') 6892 fsock.writelines(' include \'../param_card.inc\'') 6893 return 6894 6895 def format_line(parameter): 6896 """return the line for the ident_card corresponding to this 6897 parameter""" 6898 template = \ 6899 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6900 % {'name': parameter.name, 6901 'value': self.p_to_f.parse(str(parameter.value.real))} 6902 if self.opt['mp']: 6903 template = template+ \ 6904 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6905 "%(mp_prefix)s%(name)s,%(value)s)") \ 6906 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6907 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6908 6909 if parameter.lhablock.lower() == 'loop': 6910 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 6911 6912 return template 6913 6914 fsock = self.open('param_read.inc', format='fortran') 6915 res_strings = [format_line(param) \ 6916 for param in self.params_ext] 6917 6918 # Correct width sign for Majorana particles (where the width 6919 # and mass need to have the same sign) 6920 for particle in self.model.get('particles'): 6921 if particle.is_fermion() and particle.get('self_antipart') and \ 6922 particle.get('width').lower() != 'zero': 6923 6924 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6925 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6926 if self.opt['mp']: 6927 res_strings.append(\ 6928 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6929 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6930 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6931 6932 fsock.writelines('\n'.join(res_strings)) 6933 6934 6935 @staticmethod
6936 - def create_param_card_static(model, output_path, rule_card_path=False, 6937 mssm_convert=True, write_special=True):
6938 """ create the param_card.dat for a givent model --static method-- """ 6939 #1. Check if a default param_card is present: 6940 done = False 6941 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6942 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6943 model_path = model.get('modelpath') 6944 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6945 done = True 6946 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6947 output_path) 6948 if not done: 6949 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 6950 6951 if rule_card_path: 6952 if hasattr(model, 'rule_card'): 6953 model.rule_card.write_file(rule_card_path) 6954 6955 if mssm_convert: 6956 model_name = model.get('name') 6957 # IF MSSM convert the card to SLAH1 6958 if model_name == 'mssm' or model_name.startswith('mssm-'): 6959 import models.check_param_card as translator 6960 # Check the format of the param_card for Pythia and make it correct 6961 if rule_card_path: 6962 translator.make_valid_param_card(output_path, rule_card_path) 6963 translator.convert_to_slha1(output_path)
6964
6965 - def create_param_card(self, write_special=True):
6966 """ create the param_card.dat """ 6967 6968 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6969 if not hasattr(self.model, 'rule_card'): 6970 rule_card=False 6971 write_special = True 6972 if 'exporter' in self.opt: 6973 import madgraph.loop.loop_exporters as loop_exporters 6974 import madgraph.iolibs.export_fks as export_fks 6975 write_special = False 6976 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 6977 write_special = True 6978 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 6979 write_special = False 6980 6981 self.create_param_card_static(self.model, 6982 output_path=pjoin(self.dir_path, 'param_card.dat'), 6983 rule_card_path=rule_card, 6984 mssm_convert=True, 6985 write_special=write_special)
6986
6987 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6988 """ Determine which Export_v4 class is required. cmd is the command 6989 interface containing all potential usefull information. 6990 The output_type argument specifies from which context the output 6991 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6992 and 'default' for tree-level outputs.""" 6993 6994 opt = dict(cmd.options) 6995 opt['output_options'] = cmd_options 6996 6997 # ========================================================================== 6998 # First check whether Ninja must be installed. 6999 # Ninja would only be required if: 7000 # a) Loop optimized output is selected 7001 # b) the process gathered from the amplitude generated use loops 7002 7003 if len(cmd._curr_amps)>0: 7004 try: 7005 curr_proc = cmd._curr_amps[0].get('process') 7006 except base_objects.PhysicsObject.PhysicsObjectError: 7007 curr_proc = None 7008 elif hasattr(cmd,'_fks_multi_proc') and \ 7009 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7010 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7011 else: 7012 curr_proc = None 7013 7014 requires_reduction_tool = opt['loop_optimized_output'] and \ 7015 (not curr_proc is None) and \ 7016 (curr_proc.get('perturbation_couplings') != [] and \ 7017 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7018 7019 # An installation is required then, but only if the specified path is the 7020 # default local one and that the Ninja library appears missing. 7021 if requires_reduction_tool: 7022 cmd.install_reduction_library() 7023 7024 # ========================================================================== 7025 # First treat the MadLoop5 standalone case 7026 MadLoop_SA_options = {'clean': not noclean, 7027 'complex_mass':cmd.options['complex_mass_scheme'], 7028 'export_format':'madloop', 7029 'mp':True, 7030 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7031 'cuttools_dir': cmd._cuttools_dir, 7032 'iregi_dir':cmd._iregi_dir, 7033 'golem_dir':cmd.options['golem'], 7034 'samurai_dir':cmd.options['samurai'], 7035 'ninja_dir':cmd.options['ninja'], 7036 'collier_dir':cmd.options['collier'], 7037 'fortran_compiler':cmd.options['fortran_compiler'], 7038 'f2py_compiler':cmd.options['f2py_compiler'], 7039 'output_dependencies':cmd.options['output_dependencies'], 7040 'SubProc_prefix':'P', 7041 'compute_color_flows':cmd.options['loop_color_flows'], 7042 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7043 'cluster_local_path': cmd.options['cluster_local_path'], 7044 'output_options': cmd_options 7045 } 7046 7047 if output_type.startswith('madloop'): 7048 import madgraph.loop.loop_exporters as loop_exporters 7049 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7050 ExporterClass=None 7051 if not cmd.options['loop_optimized_output']: 7052 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7053 else: 7054 if output_type == "madloop": 7055 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7056 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7057 elif output_type == "madloop_matchbox": 7058 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7059 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7060 else: 7061 raise Exception("output_type not recognize %s" % output_type) 7062 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7063 else: 7064 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7065 ' in %s'%str(cmd._mgme_dir)) 7066 7067 # Then treat the aMC@NLO output 7068 elif output_type=='amcatnlo': 7069 import madgraph.iolibs.export_fks as export_fks 7070 ExporterClass=None 7071 amcatnlo_options = dict(opt) 7072 amcatnlo_options.update(MadLoop_SA_options) 7073 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7074 if not cmd.options['loop_optimized_output']: 7075 logger.info("Writing out the aMC@NLO code") 7076 ExporterClass = export_fks.ProcessExporterFortranFKS 7077 amcatnlo_options['export_format']='FKS5_default' 7078 else: 7079 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7080 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7081 amcatnlo_options['export_format']='FKS5_optimized' 7082 return ExporterClass(cmd._export_dir, amcatnlo_options) 7083 7084 7085 # Then the default tree-level output 7086 elif output_type=='default': 7087 assert group_subprocesses in [True, False] 7088 7089 opt = dict(opt) 7090 opt.update({'clean': not noclean, 7091 'complex_mass': cmd.options['complex_mass_scheme'], 7092 'export_format':cmd._export_format, 7093 'mp': False, 7094 'sa_symmetry':False, 7095 'model': cmd._curr_model.get('name'), 7096 'v5_model': False if cmd._model_v4_path else True }) 7097 7098 format = cmd._export_format #shortcut 7099 7100 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7101 opt['sa_symmetry'] = True 7102 elif format == 'plugin': 7103 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7104 7105 loop_induced_opt = dict(opt) 7106 loop_induced_opt.update(MadLoop_SA_options) 7107 loop_induced_opt['export_format'] = 'madloop_optimized' 7108 loop_induced_opt['SubProc_prefix'] = 'PV' 7109 # For loop_induced output with MadEvent, we must have access to the 7110 # color flows. 7111 loop_induced_opt['compute_color_flows'] = True 7112 for key in opt: 7113 if key not in loop_induced_opt: 7114 loop_induced_opt[key] = opt[key] 7115 7116 # Madevent output supports MadAnalysis5 7117 if format in ['madevent']: 7118 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7119 7120 if format == 'matrix' or format.startswith('standalone'): 7121 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7122 7123 elif format in ['madevent'] and group_subprocesses: 7124 if isinstance(cmd._curr_amps[0], 7125 loop_diagram_generation.LoopAmplitude): 7126 import madgraph.loop.loop_exporters as loop_exporters 7127 return loop_exporters.LoopInducedExporterMEGroup( 7128 cmd._export_dir,loop_induced_opt) 7129 else: 7130 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7131 elif format in ['madevent']: 7132 if isinstance(cmd._curr_amps[0], 7133 loop_diagram_generation.LoopAmplitude): 7134 import madgraph.loop.loop_exporters as loop_exporters 7135 return loop_exporters.LoopInducedExporterMENoGroup( 7136 cmd._export_dir,loop_induced_opt) 7137 else: 7138 return ProcessExporterFortranME(cmd._export_dir,opt) 7139 elif format in ['matchbox']: 7140 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7141 elif cmd._export_format in ['madweight'] and group_subprocesses: 7142 7143 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7144 elif cmd._export_format in ['madweight']: 7145 return ProcessExporterFortranMW(cmd._export_dir, opt) 7146 elif format == 'plugin': 7147 if isinstance(cmd._curr_amps[0], 7148 loop_diagram_generation.LoopAmplitude): 7149 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7150 else: 7151 return cmd._export_plugin(cmd._export_dir, opt) 7152 7153 else: 7154 raise Exception('Wrong export_v4 format') 7155 else: 7156 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7157
7158 7159 7160 7161 #=============================================================================== 7162 # ProcessExporterFortranMWGroup 7163 #=============================================================================== 7164 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7165 """Class to take care of exporting a set of matrix elements to 7166 MadEvent subprocess group format.""" 7167 7168 matrix_file = "matrix_madweight_group_v4.inc" 7169 grouped_mode = 'madweight' 7170 #=========================================================================== 7171 # generate_subprocess_directory 7172 #===========================================================================
7173 - def generate_subprocess_directory(self, subproc_group, 7174 fortran_model, 7175 group_number):
7176 """Generate the Pn directory for a subprocess group in MadEvent, 7177 including the necessary matrix_N.f files, configs.inc and various 7178 other helper files.""" 7179 7180 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7181 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7182 7183 if not self.model: 7184 self.model = subproc_group.get('matrix_elements')[0].\ 7185 get('processes')[0].get('model') 7186 7187 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7188 7189 # Create the directory PN in the specified path 7190 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7191 subproc_group.get('name')) 7192 try: 7193 os.mkdir(pjoin(pathdir, subprocdir)) 7194 except os.error as error: 7195 logger.warning(error.strerror + " " + subprocdir) 7196 7197 7198 logger.info('Creating files in directory %s' % subprocdir) 7199 Ppath = pjoin(pathdir, subprocdir) 7200 7201 # Create the matrix.f files, auto_dsig.f files and all inc files 7202 # for all subprocesses in the group 7203 7204 maxamps = 0 7205 maxflows = 0 7206 tot_calls = 0 7207 7208 matrix_elements = subproc_group.get('matrix_elements') 7209 7210 for ime, matrix_element in \ 7211 enumerate(matrix_elements): 7212 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7213 calls, ncolor = \ 7214 self.write_matrix_element_v4(writers.FortranWriter(filename), 7215 matrix_element, 7216 fortran_model, 7217 str(ime+1), 7218 subproc_group.get('diagram_maps')[\ 7219 ime]) 7220 7221 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7222 self.write_auto_dsig_file(writers.FortranWriter(filename), 7223 matrix_element, 7224 str(ime+1)) 7225 7226 # Keep track of needed quantities 7227 tot_calls += int(calls) 7228 maxflows = max(maxflows, ncolor) 7229 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7230 7231 # Draw diagrams 7232 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7233 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7234 get('diagrams'), 7235 filename, 7236 model = \ 7237 matrix_element.get('processes')[0].\ 7238 get('model'), 7239 amplitude=True) 7240 logger.info("Generating Feynman diagrams for " + \ 7241 matrix_element.get('processes')[0].nice_string()) 7242 plot.draw() 7243 7244 # Extract number of external particles 7245 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7246 7247 # Generate a list of diagrams corresponding to each configuration 7248 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7249 # If a subprocess has no diagrams for this config, the number is 0 7250 7251 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7252 7253 filename = pjoin(Ppath, 'auto_dsig.f') 7254 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7255 subproc_group) 7256 7257 filename = pjoin(Ppath,'configs.inc') 7258 nconfigs, s_and_t_channels = self.write_configs_file(\ 7259 writers.FortranWriter(filename), 7260 subproc_group, 7261 subproc_diagrams_for_config) 7262 7263 filename = pjoin(Ppath, 'leshouche.inc') 7264 self.write_leshouche_file(writers.FortranWriter(filename), 7265 subproc_group) 7266 7267 filename = pjoin(Ppath, 'phasespace.inc') 7268 self.write_phasespace_file(writers.FortranWriter(filename), 7269 nconfigs) 7270 7271 7272 filename = pjoin(Ppath, 'maxamps.inc') 7273 self.write_maxamps_file(writers.FortranWriter(filename), 7274 maxamps, 7275 maxflows, 7276 max([len(me.get('processes')) for me in \ 7277 matrix_elements]), 7278 len(matrix_elements)) 7279 7280 filename = pjoin(Ppath, 'mirrorprocs.inc') 7281 self.write_mirrorprocs(writers.FortranWriter(filename), 7282 subproc_group) 7283 7284 filename = pjoin(Ppath, 'nexternal.inc') 7285 self.write_nexternal_file(writers.FortranWriter(filename), 7286 nexternal, ninitial) 7287 7288 filename = pjoin(Ppath, 'pmass.inc') 7289 self.write_pmass_file(writers.FortranWriter(filename), 7290 matrix_element) 7291 7292 filename = pjoin(Ppath, 'props.inc') 7293 self.write_props_file(writers.FortranWriter(filename), 7294 matrix_element, 7295 s_and_t_channels) 7296 7297 # filename = pjoin(Ppath, 'processes.dat') 7298 # files.write_to_file(filename, 7299 # self.write_processes_file, 7300 # subproc_group) 7301 7302 # Generate jpgs -> pass in make_html 7303 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7304 7305 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7306 7307 for file in linkfiles: 7308 ln('../%s' % file, cwd=Ppath) 7309 7310 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7311 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7312 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7313 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7314 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7315 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7316 if not tot_calls: 7317 tot_calls = 0 7318 return tot_calls
7319 7320 7321 #=========================================================================== 7322 # Helper functions 7323 #===========================================================================
7324 - def modify_grouping(self, matrix_element):
7325 """allow to modify the grouping (if grouping is in place) 7326 return two value: 7327 - True/False if the matrix_element was modified 7328 - the new(or old) matrix element""" 7329 7330 return True, matrix_element.split_lepton_grouping()
7331 7332 #=========================================================================== 7333 # write_super_auto_dsig_file 7334 #===========================================================================
7335 - def write_super_auto_dsig_file(self, writer, subproc_group):
7336 """Write the auto_dsig.f file selecting between the subprocesses 7337 in subprocess group mode""" 7338 7339 replace_dict = {} 7340 7341 # Extract version number and date from VERSION file 7342 info_lines = self.get_mg5_info_lines() 7343 replace_dict['info_lines'] = info_lines 7344 7345 matrix_elements = subproc_group.get('matrix_elements') 7346 7347 # Extract process info lines 7348 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7349 matrix_elements]) 7350 replace_dict['process_lines'] = process_lines 7351 7352 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7353 replace_dict['nexternal'] = nexternal 7354 7355 replace_dict['nsprocs'] = 2*len(matrix_elements) 7356 7357 # Generate dsig definition line 7358 dsig_def_line = "DOUBLE PRECISION " + \ 7359 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7360 range(len(matrix_elements))]) 7361 replace_dict["dsig_def_line"] = dsig_def_line 7362 7363 # Generate dsig process lines 7364 call_dsig_proc_lines = [] 7365 for iproc in range(len(matrix_elements)): 7366 call_dsig_proc_lines.append(\ 7367 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7368 {"num": iproc + 1, 7369 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7370 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7371 7372 if writer: 7373 file = open(os.path.join(_file_path, \ 7374 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7375 file = file % replace_dict 7376 # Write the file 7377 writer.writelines(file) 7378 else: 7379 return replace_dict
7380 7381 #=========================================================================== 7382 # write_mirrorprocs 7383 #===========================================================================
7384 - def write_mirrorprocs(self, writer, subproc_group):
7385 """Write the mirrorprocs.inc file determining which processes have 7386 IS mirror process in subprocess group mode.""" 7387 7388 lines = [] 7389 bool_dict = {True: '.true.', False: '.false.'} 7390 matrix_elements = subproc_group.get('matrix_elements') 7391 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7392 (len(matrix_elements), 7393 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7394 me in matrix_elements]))) 7395 # Write the file 7396 writer.writelines(lines)
7397 7398 #=========================================================================== 7399 # write_configs_file 7400 #===========================================================================
7401 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7402 """Write the configs.inc file with topology information for a 7403 subprocess group. Use the first subprocess with a diagram for each 7404 configuration.""" 7405 7406 matrix_elements = subproc_group.get('matrix_elements') 7407 model = matrix_elements[0].get('processes')[0].get('model') 7408 7409 diagrams = [] 7410 config_numbers = [] 7411 for iconfig, config in enumerate(diagrams_for_config): 7412 # Check if any diagrams correspond to this config 7413 if set(config) == set([0]): 7414 continue 7415 subproc_diags = [] 7416 for s,d in enumerate(config): 7417 if d: 7418 subproc_diags.append(matrix_elements[s].\ 7419 get('diagrams')[d-1]) 7420 else: 7421 subproc_diags.append(None) 7422 diagrams.append(subproc_diags) 7423 config_numbers.append(iconfig + 1) 7424 7425 # Extract number of external particles 7426 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7427 7428 return len(diagrams), \ 7429 self.write_configs_file_from_diagrams(writer, diagrams, 7430 config_numbers, 7431 nexternal, ninitial, 7432 matrix_elements[0],model)
7433 7434 #=========================================================================== 7435 # write_run_configs_file 7436 #===========================================================================
7437 - def write_run_config_file(self, writer):
7438 """Write the run_configs.inc file for MadEvent""" 7439 7440 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7441 text = open(path).read() % {'chanperjob':'2'} 7442 writer.write(text) 7443 return True
7444 7445 7446 #=========================================================================== 7447 # write_leshouche_file 7448 #===========================================================================
7449 - def write_leshouche_file(self, writer, subproc_group):
7450 """Write the leshouche.inc file for MG4""" 7451 7452 all_lines = [] 7453 7454 for iproc, matrix_element in \ 7455 enumerate(subproc_group.get('matrix_elements')): 7456 all_lines.extend(self.get_leshouche_lines(matrix_element, 7457 iproc)) 7458 7459 # Write the file 7460 writer.writelines(all_lines) 7461 7462 return True
7463