Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  """Methods and classes to export matrix elements to v4 format.""" 
  20   
  21  import copy 
  22  from six import StringIO 
  23  import itertools 
  24  import fractions 
  25  import glob 
  26  import logging 
  27  import math 
  28  import os 
  29  import io 
  30  import re 
  31  import shutil 
  32  import subprocess 
  33  import sys 
  34  import time 
  35  import traceback 
  36   
  37  import aloha 
  38   
  39  import madgraph.core.base_objects as base_objects 
  40  import madgraph.core.color_algebra as color 
  41  import madgraph.core.helas_objects as helas_objects 
  42  import madgraph.iolibs.drawing_eps as draw 
  43  import madgraph.iolibs.files as files 
  44  import madgraph.iolibs.group_subprocs as group_subprocs 
  45  import madgraph.iolibs.file_writers as writers 
  46  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  47  import madgraph.iolibs.template_files as template_files 
  48  import madgraph.iolibs.ufo_expression_parsers as parsers 
  49  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  50  import madgraph.interface.common_run_interface as common_run_interface 
  51  import madgraph.various.diagram_symmetry as diagram_symmetry 
  52  import madgraph.various.misc as misc 
  53  import madgraph.various.banner as banner_mod 
  54  import madgraph.various.process_checks as process_checks 
  55  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  56  import aloha.create_aloha as create_aloha 
  57  import models.import_ufo as import_ufo 
  58  import models.write_param_card as param_writer 
  59  import models.check_param_card as check_param_card 
  60   
  61   
  62  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  63  from madgraph.iolibs.files import cp, ln, mv 
  64   
  65  from madgraph import InvalidCmd 
  66   
  67  pjoin = os.path.join 
  68   
  69  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  70  logger = logging.getLogger('madgraph.export_v4') 
  71   
  72  default_compiler= {'fortran': 'gfortran', 
  73                         'f2py': 'f2py', 
  74                         'cpp':'g++'} 
75 76 77 -class VirtualExporter(object):
78 79 #exporter variable who modified the way madgraph interacts with this class 80 81 grouped_mode = 'madevent' 82 # This variable changes the type of object called within 'generate_subprocess_directory' 83 #functions. 84 # False to avoid grouping (only identical matrix element are merged) 85 # 'madevent' group the massless quark and massless lepton 86 # 'madweight' group the gluon with the massless quark 87 sa_symmetry = False 88 # If no grouped_mode=False, uu~ and u~u will be called independently. 89 #Putting sa_symmetry generates only one of the two matrix-element. 90 check = True 91 # Ask madgraph to check if the directory already exists and propose to the user to 92 #remove it first if this is the case 93 output = 'Template' 94 # [Template, None, dir] 95 # - Template, madgraph will call copy_template 96 # - dir, madgraph will just create an empty directory for initialisation 97 # - None, madgraph do nothing for initialisation 98 exporter = 'v4' 99 # language of the output 'v4' for Fortran output 100 # 'cpp' for C++ output 101 102
103 - def __init__(self, dir_path = "", opt=None):
104 # cmd_options is a dictionary with all the optional argurment passed at output time 105 106 # Activate some monkey patching for the helas call writer. 107 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 108 self.helas_call_writer_custom
109 110 111 # helper function for customise helas writter 112 @staticmethod
113 - def custom_helas_call(call, arg):
114 """static method to customise the way aloha function call are written 115 call is the default template for the call 116 arg are the dictionary used for the call 117 """ 118 return call, arg
119 120 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 121 122
123 - def copy_template(self, model):
124 return
125
126 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
127 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 128 return 0 # return an integer stating the number of call to helicity routine
129
130 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
131 return
132
133 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
134 return
135 136
137 - def pass_information_from_cmd(self, cmd):
138 """pass information from the command interface to the exporter. 139 Please do not modify any object of the interface from the exporter. 140 """ 141 return
142
143 - def modify_grouping(self, matrix_element):
144 return False, matrix_element
145
146 - def export_model_files(self, model_v4_path):
147 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 148 return
149
150 - def export_helas(self, HELAS_PATH):
151 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 152 return
153
154 #=============================================================================== 155 # ProcessExporterFortran 156 #=============================================================================== 157 -class ProcessExporterFortran(VirtualExporter):
158 """Class to take care of exporting a set of matrix elements to 159 Fortran (v4) format.""" 160 161 default_opt = {'clean': False, 'complex_mass':False, 162 'export_format':'madevent', 'mp': False, 163 'v5_model': True, 164 'output_options':{} 165 } 166 grouped_mode = False 167
168 - def __init__(self, dir_path = "", opt=None):
169 """Initiate the ProcessExporterFortran with directory information""" 170 self.mgme_dir = MG5DIR 171 self.dir_path = dir_path 172 self.model = None 173 174 self.opt = dict(self.default_opt) 175 if opt: 176 self.opt.update(opt) 177 178 self.cmd_options = self.opt['output_options'] 179 180 #place holder to pass information to the run_interface 181 self.proc_characteristic = banner_mod.ProcCharacteristic() 182 # call mother class 183 super(ProcessExporterFortran,self).__init__(dir_path, opt)
184 185 186 #=========================================================================== 187 # process exporter fortran switch between group and not grouped 188 #===========================================================================
189 - def export_processes(self, matrix_elements, fortran_model):
190 """Make the switch between grouped and not grouped output""" 191 192 calls = 0 193 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 194 for (group_number, me_group) in enumerate(matrix_elements): 195 calls = calls + self.generate_subprocess_directory(\ 196 me_group, fortran_model, group_number) 197 else: 198 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 199 calls = calls + self.generate_subprocess_directory(\ 200 me, fortran_model, me_number) 201 202 return calls
203 204 205 #=========================================================================== 206 # create the run_card 207 #===========================================================================
208 - def create_run_card(self, matrix_elements, history):
209 """ """ 210 211 212 # bypass this for the loop-check 213 import madgraph.loop.loop_helas_objects as loop_helas_objects 214 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 215 matrix_elements = None 216 217 run_card = banner_mod.RunCard() 218 219 220 default=True 221 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 222 processes = [me.get('processes') for megroup in matrix_elements 223 for me in megroup['matrix_elements']] 224 elif matrix_elements: 225 processes = [me.get('processes') 226 for me in matrix_elements['matrix_elements']] 227 else: 228 default =False 229 230 if default: 231 run_card.create_default_for_process(self.proc_characteristic, 232 history, 233 processes) 234 235 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 236 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 237 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
238 239 240 241 #=========================================================================== 242 # copy the Template in a new directory. 243 #===========================================================================
244 - def copy_template(self, model):
245 """create the directory run_name as a copy of the MadEvent 246 Template, and clean the directory 247 """ 248 249 #First copy the full template tree if dir_path doesn't exit 250 if not os.path.isdir(self.dir_path): 251 assert self.mgme_dir, \ 252 "No valid MG_ME path given for MG4 run directory creation." 253 logger.info('initialize a new directory: %s' % \ 254 os.path.basename(self.dir_path)) 255 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 256 self.dir_path, True) 257 # misc.copytree since dir_path already exists 258 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 259 self.dir_path) 260 # copy plot_card 261 for card in ['plot_card']: 262 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 263 try: 264 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 265 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 266 except IOError: 267 logger.warning("Failed to copy " + card + ".dat to default") 268 elif os.getcwd() == os.path.realpath(self.dir_path): 269 logger.info('working in local directory: %s' % \ 270 os.path.realpath(self.dir_path)) 271 # misc.copytree since dir_path already exists 272 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 273 self.dir_path) 274 # for name in misc.glob('Template/LO/*', self.mgme_dir): 275 # name = os.path.basename(name) 276 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 277 # if os.path.isfile(filename): 278 # files.cp(filename, pjoin(self.dir_path,name)) 279 # elif os.path.isdir(filename): 280 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 281 # misc.copytree since dir_path already exists 282 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 283 self.dir_path) 284 # Copy plot_card 285 for card in ['plot_card']: 286 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 287 try: 288 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 289 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 290 except IOError: 291 logger.warning("Failed to copy " + card + ".dat to default") 292 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 293 assert self.mgme_dir, \ 294 "No valid MG_ME path given for MG4 run directory creation." 295 try: 296 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 297 except IOError: 298 MG5_version = misc.get_pkg_info() 299 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 300 301 #Ensure that the Template is clean 302 if self.opt['clean']: 303 logger.info('remove old information in %s' % \ 304 os.path.basename(self.dir_path)) 305 if 'MADGRAPH_BASE' in os.environ: 306 misc.call([pjoin('bin', 'internal', 'clean_template'), 307 '--web'], cwd=self.dir_path) 308 else: 309 try: 310 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 311 cwd=self.dir_path) 312 except Exception as why: 313 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 314 % (os.path.basename(self.dir_path),why)) 315 316 #Write version info 317 MG_version = misc.get_pkg_info() 318 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 319 MG_version['version']) 320 321 # add the makefile in Source directory 322 filename = pjoin(self.dir_path,'Source','makefile') 323 self.write_source_makefile(writers.FileWriter(filename)) 324 325 # add the DiscreteSampler information 326 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 327 pjoin(self.dir_path, 'Source')) 328 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 329 pjoin(self.dir_path, 'Source')) 330 331 # We need to create the correct open_data for the pdf 332 self.write_pdf_opendata()
333 334 335 #=========================================================================== 336 # Call MadAnalysis5 to generate the default cards for this process 337 #===========================================================================
338 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 339 ma5_path, output_dir, levels = ['parton','hadron']):
340 """ Call MA5 so that it writes default cards for both parton and 341 post-shower levels, tailored for this particular process.""" 342 343 if len(levels)==0: 344 return 345 start = time.time() 346 logger.info('Generating MadAnalysis5 default cards tailored to this process') 347 try: 348 MA5_interpreter = common_run_interface.CommonRunCmd.\ 349 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 350 except (Exception, SystemExit) as e: 351 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 352 return 353 if MA5_interpreter is None: 354 return 355 356 MA5_main = MA5_interpreter.main 357 for lvl in ['parton','hadron']: 358 if lvl in levels: 359 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 360 try: 361 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 362 except (Exception, SystemExit) as e: 363 # keep the default card (skip only) 364 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 365 ' default analysis card for this process.') 366 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 367 error=StringIO() 368 traceback.print_exc(file=error) 369 logger.debug('MadAnalysis5 error was:') 370 logger.debug('-'*60) 371 logger.debug(error.getvalue()[:-1]) 372 logger.debug('-'*60) 373 else: 374 open(card_to_generate,'w').write(text) 375 stop = time.time() 376 if stop-start >1: 377 logger.info('Cards created in %.2fs' % (stop-start))
378 379 #=========================================================================== 380 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 381 #===========================================================================
382 - def write_procdef_mg5(self, file_pos, modelname, process_str):
383 """ write an equivalent of the MG4 proc_card in order that all the Madevent 384 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 385 386 proc_card_template = template_files.mg4_proc_card.mg4_template 387 process_template = template_files.mg4_proc_card.process_template 388 process_text = '' 389 coupling = '' 390 new_process_content = [] 391 392 393 # First find the coupling and suppress the coupling from process_str 394 #But first ensure that coupling are define whithout spaces: 395 process_str = process_str.replace(' =', '=') 396 process_str = process_str.replace('= ', '=') 397 process_str = process_str.replace(',',' , ') 398 #now loop on the element and treat all the coupling 399 for info in process_str.split(): 400 if '=' in info: 401 coupling += info + '\n' 402 else: 403 new_process_content.append(info) 404 # Recombine the process_str (which is the input process_str without coupling 405 #info) 406 process_str = ' '.join(new_process_content) 407 408 #format the SubProcess 409 replace_dict = {'process': process_str, 410 'coupling': coupling} 411 process_text += process_template.substitute(replace_dict) 412 413 replace_dict = {'process': process_text, 414 'model': modelname, 415 'multiparticle':''} 416 text = proc_card_template.substitute(replace_dict) 417 418 if file_pos: 419 ff = open(file_pos, 'w') 420 ff.write(text) 421 ff.close() 422 else: 423 return replace_dict
424 425
426 - def pass_information_from_cmd(self, cmd):
427 """Pass information for MA5""" 428 429 self.proc_defs = cmd._curr_proc_defs
430 431 #=========================================================================== 432 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 433 #===========================================================================
434 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
435 """Function to finalize v4 directory, for inheritance.""" 436 437 self.create_run_card(matrix_elements, history) 438 self.create_MA5_cards(matrix_elements, history)
439
440 - def create_MA5_cards(self,matrix_elements,history):
441 """ A wrapper around the creation of the MA5 cards so that it can be 442 bypassed by daughter classes (i.e. in standalone).""" 443 if 'madanalysis5_path' in self.opt and not \ 444 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 445 processes = None 446 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 447 processes = [me.get('processes') for megroup in matrix_elements 448 for me in megroup['matrix_elements']] 449 elif matrix_elements: 450 processes = [me.get('processes') 451 for me in matrix_elements['matrix_elements']] 452 453 self.create_default_madanalysis5_cards( 454 history, self.proc_defs, processes, 455 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 456 levels = ['hadron','parton']) 457 458 for level in ['hadron','parton']: 459 # Copying these cards turn on the use of MadAnalysis5 by default. 460 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 461 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 462 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
463 464 #=========================================================================== 465 # Create the proc_characteristic file passing information to the run_interface 466 #===========================================================================
467 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
468 469 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
470 471 #=========================================================================== 472 # write_matrix_element_v4 473 #===========================================================================
474 - def write_matrix_element_v4(self):
475 """Function to write a matrix.f file, for inheritance. 476 """ 477 pass
478 479 #=========================================================================== 480 # write_pdf_opendata 481 #===========================================================================
482 - def write_pdf_opendata(self):
483 """ modify the pdf opendata file, to allow direct access to cluster node 484 repository if configure""" 485 486 if not self.opt["cluster_local_path"]: 487 changer = {"pdf_systemwide": ""} 488 else: 489 to_add = """ 490 tempname='%(path)s'//Tablefile 491 open(IU,file=tempname,status='old',ERR=1) 492 return 493 1 tempname='%(path)s/Pdfdata/'//Tablefile 494 open(IU,file=tempname,status='old',ERR=2) 495 return 496 2 tempname='%(path)s/lhapdf'//Tablefile 497 open(IU,file=tempname,status='old',ERR=3) 498 return 499 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 500 open(IU,file=tempname,status='old',ERR=4) 501 return 502 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 503 open(IU,file=tempname,status='old',ERR=5) 504 return 505 """ % {"path" : self.opt["cluster_local_path"]} 506 507 changer = {"pdf_systemwide": to_add} 508 509 510 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 512 ff.writelines(template % changer) 513 514 # Do the same for lhapdf set 515 if not self.opt["cluster_local_path"]: 516 changer = {"cluster_specific_path": ""} 517 else: 518 to_add=""" 519 LHAPath='%(path)s/PDFsets' 520 Inquire(File=LHAPath, exist=exists) 521 if(exists)return 522 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='%(path)s/../lhapdf/pdfsets/' 526 Inquire(File=LHAPath, exist=exists) 527 if(exists)return 528 LHAPath='./PDFsets' 529 """ % {"path" : self.opt["cluster_local_path"]} 530 changer = {"cluster_specific_path": to_add} 531 532 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 533 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 534 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 535 ff.writelines(template % changer) 536 537 538 return
539 540 541 542 #=========================================================================== 543 # write_maxparticles_file 544 #===========================================================================
545 - def write_maxparticles_file(self, writer, matrix_elements):
546 """Write the maxparticles.inc file for MadEvent""" 547 548 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 549 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 550 matrix_elements.get('matrix_elements')]) 551 else: 552 maxparticles = max([me.get_nexternal_ninitial()[0] \ 553 for me in matrix_elements]) 554 555 lines = "integer max_particles\n" 556 lines += "parameter(max_particles=%d)" % maxparticles 557 558 # Write the file 559 writer.writelines(lines) 560 561 return True
562 563 564 #=========================================================================== 565 # export the model 566 #===========================================================================
567 - def export_model_files(self, model_path):
568 """Configure the files/link of the process according to the model""" 569 570 # Import the model 571 for file in os.listdir(model_path): 572 if os.path.isfile(pjoin(model_path, file)): 573 shutil.copy2(pjoin(model_path, file), \ 574 pjoin(self.dir_path, 'Source', 'MODEL'))
575 576 590 598 599 600 #=========================================================================== 601 # export the helas routine 602 #===========================================================================
603 - def export_helas(self, helas_path):
604 """Configure the files/link of the process according to the model""" 605 606 # Import helas routine 607 for filename in os.listdir(helas_path): 608 filepos = pjoin(helas_path, filename) 609 if os.path.isfile(filepos): 610 if filepos.endswith('Makefile.template'): 611 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 612 elif filepos.endswith('Makefile'): 613 pass 614 else: 615 cp(filepos, self.dir_path + '/Source/DHELAS')
616 # following lines do the same but whithout symbolic link 617 # 618 #def export_helas(mgme_dir, dir_path): 619 # 620 # # Copy the HELAS directory 621 # helas_dir = pjoin(mgme_dir, 'HELAS') 622 # for filename in os.listdir(helas_dir): 623 # if os.path.isfile(pjoin(helas_dir, filename)): 624 # shutil.copy2(pjoin(helas_dir, filename), 625 # pjoin(dir_path, 'Source', 'DHELAS')) 626 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 627 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 628 # 629 630 #=========================================================================== 631 # generate_subprocess_directory 632 #===========================================================================
633 - def generate_subprocess_directory(self, matrix_element, 634 fortran_model, 635 me_number):
636 """Routine to generate a subprocess directory (for inheritance)""" 637 638 pass
639 640 #=========================================================================== 641 # get_source_libraries_list 642 #===========================================================================
643 - def get_source_libraries_list(self):
644 """ Returns the list of libraries to be compiling when compiling the 645 SOURCE directory. It is different for loop_induced processes and 646 also depends on the value of the 'output_dependencies' option""" 647 648 return ['$(LIBDIR)libdhelas.$(libext)', 649 '$(LIBDIR)libpdf.$(libext)', 650 '$(LIBDIR)libmodel.$(libext)', 651 '$(LIBDIR)libcernlib.$(libext)', 652 '$(LIBDIR)libbias.$(libext)']
653 654 #=========================================================================== 655 # write_source_makefile 656 #===========================================================================
657 - def write_source_makefile(self, writer):
658 """Write the nexternal.inc file for MG4""" 659 660 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 661 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 662 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 663 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 664 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 665 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 666 else: 667 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 668 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 669 670 replace_dict= {'libraries': set_of_lib, 671 'model':model_line, 672 'additional_dsample': '', 673 'additional_dependencies':''} 674 675 if writer: 676 text = open(path).read() % replace_dict 677 writer.write(text) 678 679 return replace_dict
680 681 #=========================================================================== 682 # write_nexternal_madspin 683 #===========================================================================
684 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
685 """Write the nexternal_prod.inc file for madspin""" 686 687 replace_dict = {} 688 689 replace_dict['nexternal'] = nexternal 690 replace_dict['ninitial'] = ninitial 691 692 file = """ \ 693 integer nexternal_prod 694 parameter (nexternal_prod=%(nexternal)d) 695 integer nincoming_prod 696 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 #=========================================================================== 706 # write_helamp_madspin 707 #===========================================================================
708 - def write_helamp_madspin(self, writer, ncomb):
709 """Write the helamp.inc file for madspin""" 710 711 replace_dict = {} 712 713 replace_dict['ncomb'] = ncomb 714 715 file = """ \ 716 integer ncomb1 717 parameter (ncomb1=%(ncomb)d) 718 double precision helamp(ncomb1) 719 common /to_helamp/helamp """ % replace_dict 720 721 # Write the file 722 if writer: 723 writer.writelines(file) 724 return True 725 else: 726 return replace_dict
727 728 729 730 #=========================================================================== 731 # write_nexternal_file 732 #===========================================================================
733 - def write_nexternal_file(self, writer, nexternal, ninitial):
734 """Write the nexternal.inc file for MG4""" 735 736 replace_dict = {} 737 738 replace_dict['nexternal'] = nexternal 739 replace_dict['ninitial'] = ninitial 740 741 file = """ \ 742 integer nexternal 743 parameter (nexternal=%(nexternal)d) 744 integer nincoming 745 parameter (nincoming=%(ninitial)d)""" % replace_dict 746 747 # Write the file 748 if writer: 749 writer.writelines(file) 750 return True 751 else: 752 return replace_dict
753 #=========================================================================== 754 # write_pmass_file 755 #===========================================================================
756 - def write_pmass_file(self, writer, matrix_element):
757 """Write the pmass.inc file for MG4""" 758 759 model = matrix_element.get('processes')[0].get('model') 760 761 lines = [] 762 for wf in matrix_element.get_external_wavefunctions(): 763 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 764 if mass.lower() != "zero": 765 mass = "abs(%s)" % mass 766 767 lines.append("pmass(%d)=%s" % \ 768 (wf.get('number_external'), mass)) 769 770 # Write the file 771 writer.writelines(lines) 772 773 return True
774 775 #=========================================================================== 776 # write_ngraphs_file 777 #===========================================================================
778 - def write_ngraphs_file(self, writer, nconfigs):
779 """Write the ngraphs.inc file for MG4. Needs input from 780 write_configs_file.""" 781 782 file = " integer n_max_cg\n" 783 file = file + "parameter (n_max_cg=%d)" % nconfigs 784 785 # Write the file 786 writer.writelines(file) 787 788 return True
789 790 #=========================================================================== 791 # write_leshouche_file 792 #===========================================================================
793 - def write_leshouche_file(self, writer, matrix_element):
794 """Write the leshouche.inc file for MG4""" 795 796 # Write the file 797 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 798 799 return True
800 801 #=========================================================================== 802 # get_leshouche_lines 803 #===========================================================================
804 - def get_leshouche_lines(self, matrix_element, numproc):
805 """Write the leshouche.inc file for MG4""" 806 807 # Extract number of external particles 808 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 809 810 lines = [] 811 for iproc, proc in enumerate(matrix_element.get('processes')): 812 legs = proc.get_legs_with_decays() 813 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 814 (iproc + 1, numproc+1, nexternal, 815 ",".join([str(l.get('id')) for l in legs]))) 816 if iproc == 0 and numproc == 0: 817 for i in [1, 2]: 818 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 819 (i, nexternal, 820 ",".join([ "%3r" % 0 ] * ninitial + \ 821 [ "%3r" % i ] * (nexternal - ninitial)))) 822 823 # Here goes the color connections corresponding to the JAMPs 824 # Only one output, for the first subproc! 825 if iproc == 0: 826 # If no color basis, just output trivial color flow 827 if not matrix_element.get('color_basis'): 828 for i in [1, 2]: 829 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 830 (i, numproc+1,nexternal, 831 ",".join([ "%3r" % 0 ] * nexternal))) 832 833 else: 834 # First build a color representation dictionnary 835 repr_dict = {} 836 for l in legs: 837 repr_dict[l.get('number')] = \ 838 proc.get('model').get_particle(l.get('id')).get_color()\ 839 * (-1)**(1+l.get('state')) 840 # Get the list of color flows 841 color_flow_list = \ 842 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 843 ninitial) 844 # And output them properly 845 for cf_i, color_flow_dict in enumerate(color_flow_list): 846 for i in [0, 1]: 847 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 848 (i + 1, cf_i + 1, numproc+1, nexternal, 849 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 850 for l in legs]))) 851 852 return lines
853 854 855 856 857 #=========================================================================== 858 # write_maxamps_file 859 #===========================================================================
860 - def write_maxamps_file(self, writer, maxamps, maxflows, 861 maxproc,maxsproc):
862 """Write the maxamps.inc file for MG4.""" 863 864 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 865 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 866 (maxamps, maxflows) 867 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 868 (maxproc, maxsproc) 869 870 # Write the file 871 writer.writelines(file) 872 873 return True
874 875 876 #=========================================================================== 877 # Routines to output UFO models in MG4 format 878 #=========================================================================== 879
880 - def convert_model(self, model, wanted_lorentz = [], 881 wanted_couplings = []):
882 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 883 884 # Make sure aloha is in quadruple precision if needed 885 old_aloha_mp=aloha.mp_precision 886 aloha.mp_precision=self.opt['mp'] 887 self.model = model 888 # create the MODEL 889 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 890 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 891 model_builder.build(wanted_couplings) 892 893 # Backup the loop mode, because it can be changed in what follows. 894 old_loop_mode = aloha.loop_mode 895 896 # Create the aloha model or use the existing one (for loop exporters 897 # this is useful as the aloha model will be used again in the 898 # LoopHelasMatrixElements generated). We do not save the model generated 899 # here if it didn't exist already because it would be a waste of 900 # memory for tree level applications since aloha is only needed at the 901 # time of creating the aloha fortran subroutines. 902 if hasattr(self, 'aloha_model'): 903 aloha_model = self.aloha_model 904 else: 905 try: 906 with misc.MuteLogger(['madgraph.models'], [60]): 907 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 908 except ImportError: 909 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 910 aloha_model.add_Lorentz_object(model.get('lorentz')) 911 912 # Compute the subroutines 913 if wanted_lorentz: 914 aloha_model.compute_subset(wanted_lorentz) 915 else: 916 aloha_model.compute_all(save=False) 917 918 # Write them out 919 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 920 aloha_model.write(write_dir, 'Fortran') 921 922 # Revert the original aloha loop mode 923 aloha.loop_mode = old_loop_mode 924 925 #copy Helas Template 926 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 927 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 928 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 929 write_dir+'/aloha_functions.f') 930 aloha_model.loop_mode = False 931 else: 932 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 933 write_dir+'/aloha_functions.f') 934 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 935 936 # Make final link in the Process 937 self.make_model_symbolic_link() 938 939 # Re-establish original aloha mode 940 aloha.mp_precision=old_aloha_mp
941 942 943 #=========================================================================== 944 # Helper functions 945 #===========================================================================
946 - def modify_grouping(self, matrix_element):
947 """allow to modify the grouping (if grouping is in place) 948 return two value: 949 - True/False if the matrix_element was modified 950 - the new(or old) matrix element""" 951 952 return False, matrix_element
953 954 #=========================================================================== 955 # Helper functions 956 #===========================================================================
957 - def get_mg5_info_lines(self):
958 """Return info lines for MG5, suitable to place at beginning of 959 Fortran files""" 960 961 info = misc.get_pkg_info() 962 info_lines = "" 963 if info and 'version' in info and 'date' in info: 964 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 965 (info['version'], info['date']) 966 info_lines = info_lines + \ 967 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 968 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 969 else: 970 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 971 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 972 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 973 974 return info_lines
975
976 - def get_process_info_lines(self, matrix_element):
977 """Return info lines describing the processes for this matrix element""" 978 979 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 980 for process in matrix_element.get('processes')])
981 982
983 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
984 """Return the Helicity matrix definition lines for this matrix element""" 985 986 helicity_line_list = [] 987 i = 0 988 for helicities in matrix_element.get_helicity_matrix(): 989 i = i + 1 990 int_list = [i, len(helicities)] 991 int_list.extend(helicities) 992 helicity_line_list.append(\ 993 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 994 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 995 996 return "\n".join(helicity_line_list)
997
998 - def get_ic_line(self, matrix_element):
999 """Return the IC definition line coming after helicities, required by 1000 switchmom in madevent""" 1001 1002 nexternal = matrix_element.get_nexternal_ninitial()[0] 1003 int_list = list(range(1, nexternal + 1)) 1004 1005 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1006 ",".join([str(i) for \ 1007 i in int_list]))
1008
1009 - def set_chosen_SO_index(self, process, squared_orders):
1010 """ From the squared order constraints set by the user, this function 1011 finds what indices of the squared_orders list the user intends to pick. 1012 It returns this as a string of comma-separated successive '.true.' or 1013 '.false.' for each index.""" 1014 1015 user_squared_orders = process.get('squared_orders') 1016 split_orders = process.get('split_orders') 1017 1018 if len(user_squared_orders)==0: 1019 return ','.join(['.true.']*len(squared_orders)) 1020 1021 res = [] 1022 for sqsos in squared_orders: 1023 is_a_match = True 1024 for user_sqso, value in user_squared_orders.items(): 1025 if (process.get_squared_order_type(user_sqso) =='==' and \ 1026 value!=sqsos[split_orders.index(user_sqso)]) or \ 1027 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1028 value<sqsos[split_orders.index(user_sqso)]) or \ 1029 (process.get_squared_order_type(user_sqso) == '>' and \ 1030 value>=sqsos[split_orders.index(user_sqso)]): 1031 is_a_match = False 1032 break 1033 res.append('.true.' if is_a_match else '.false.') 1034 1035 return ','.join(res)
1036
1037 - def get_split_orders_lines(self, orders, array_name, n=5):
1038 """ Return the split orders definition as defined in the list orders and 1039 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1040 1041 ret_list = [] 1042 for index, order in enumerate(orders): 1043 for k in range(0, len(order), n): 1044 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1045 (array_name,index + 1, k + 1, min(k + n, len(order)), 1046 ','.join(["%5r" % i for i in order[k:k + n]]))) 1047 return ret_list
1048
1049 - def format_integer_list(self, list, name, n=5):
1050 """ Return an initialization of the python list in argument following 1051 the fortran syntax using the data keyword assignment, filling an array 1052 of name 'name'. It splits rows in chunks of size n.""" 1053 1054 ret_list = [] 1055 for k in range(0, len(list), n): 1056 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1057 (name, k + 1, min(k + n, len(list)), 1058 ','.join(["%5r" % i for i in list[k:k + n]]))) 1059 return ret_list
1060
1061 - def get_color_data_lines(self, matrix_element, n=6):
1062 """Return the color matrix definition lines for this matrix element. Split 1063 rows in chunks of size n.""" 1064 1065 if not matrix_element.get('color_matrix'): 1066 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1067 else: 1068 ret_list = [] 1069 my_cs = color.ColorString() 1070 for index, denominator in \ 1071 enumerate(matrix_element.get('color_matrix').\ 1072 get_line_denominators()): 1073 # First write the common denominator for this color matrix line 1074 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1075 # Then write the numerators for the matrix elements 1076 num_list = matrix_element.get('color_matrix').\ 1077 get_line_numerators(index, denominator) 1078 1079 assert all([int(i)==i for i in num_list]) 1080 1081 for k in range(0, len(num_list), n): 1082 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1083 (index + 1, k + 1, min(k + n, len(num_list)), 1084 ','.join(["%5i" % int(i) for i in num_list[k:k + n]]))) 1085 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1086 ret_list.append("C %s" % repr(my_cs)) 1087 1088 return ret_list
1089 1090
1091 - def get_den_factor_line(self, matrix_element):
1092 """Return the denominator factor line for this matrix element""" 1093 1094 return "DATA IDEN/%2r/" % \ 1095 matrix_element.get_denominator_factor()
1096
1097 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1098 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1099 which configs (diagrams).""" 1100 1101 ret_list = [] 1102 1103 booldict = {False: ".false.", True: ".true."} 1104 1105 if not matrix_element.get('color_basis'): 1106 # No color, so only one color factor. Simply write a ".true." 1107 # for each config (i.e., each diagram with only 3 particle 1108 # vertices 1109 configs = len(mapconfigs) 1110 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1111 (num_matrix_element, configs, 1112 ','.join([".true." for i in range(configs)]))) 1113 return ret_list 1114 1115 # There is a color basis - create a list showing which JAMPs have 1116 # contributions to which configs 1117 1118 # Only want to include leading color flows, so find max_Nc 1119 color_basis = matrix_element.get('color_basis') 1120 1121 # We don't want to include the power of Nc's which come from the potential 1122 # loop color trace (i.e. in the case of a closed fermion loop for example) 1123 # so we subtract it here when computing max_Nc 1124 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1125 color_basis.values()],[])) 1126 1127 # Crate dictionary between diagram number and JAMP number 1128 diag_jamp = {} 1129 for ijamp, col_basis_elem in \ 1130 enumerate(sorted(matrix_element.get('color_basis').keys())): 1131 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1132 # Only use color flows with Nc == max_Nc. However, notice that 1133 # we don't want to include the Nc power coming from the loop 1134 # in this counting. 1135 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1136 diag_num = diag_tuple[0] + 1 1137 # Add this JAMP number to this diag_num 1138 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1139 [ijamp+1] 1140 1141 colamps = ijamp + 1 1142 for iconfig, num_diag in enumerate(mapconfigs): 1143 if num_diag == 0: 1144 continue 1145 1146 # List of True or False 1147 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1148 # Add line 1149 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1150 (iconfig+1, num_matrix_element, colamps, 1151 ','.join(["%s" % booldict[b] for b in \ 1152 bool_list]))) 1153 1154 return ret_list
1155
1156 - def get_amp2_lines(self, matrix_element, config_map = []):
1157 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1158 1159 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1160 # Get minimum legs in a vertex 1161 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1162 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1163 minvert = min(vert_list) if vert_list!=[] else 0 1164 1165 ret_lines = [] 1166 if config_map: 1167 # In this case, we need to sum up all amplitudes that have 1168 # identical topologies, as given by the config_map (which 1169 # gives the topology/config for each of the diagrams 1170 diagrams = matrix_element.get('diagrams') 1171 # Combine the diagrams with identical topologies 1172 config_to_diag_dict = {} 1173 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1174 if config_map[idiag] == 0: 1175 continue 1176 try: 1177 config_to_diag_dict[config_map[idiag]].append(idiag) 1178 except KeyError: 1179 config_to_diag_dict[config_map[idiag]] = [idiag] 1180 # Write out the AMP2s summing squares of amplitudes belonging 1181 # to eiher the same diagram or different diagrams with 1182 # identical propagator properties. Note that we need to use 1183 # AMP2 number corresponding to the first diagram number used 1184 # for that AMP2. 1185 for config in sorted(config_to_diag_dict.keys()): 1186 1187 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1188 {"num": (config_to_diag_dict[config][0] + 1)} 1189 1190 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1191 sum([diagrams[idiag].get('amplitudes') for \ 1192 idiag in config_to_diag_dict[config]], [])]) 1193 1194 # Not using \sum |M|^2 anymore since this creates troubles 1195 # when ckm is not diagonal due to the JIM mechanism. 1196 if '+' in amp: 1197 line += "(%s)*dconjg(%s)" % (amp, amp) 1198 else: 1199 line += "%s*dconjg(%s)" % (amp, amp) 1200 ret_lines.append(line) 1201 else: 1202 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1203 # Ignore any diagrams with 4-particle vertices. 1204 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1205 continue 1206 # Now write out the expression for AMP2, meaning the sum of 1207 # squared amplitudes belonging to the same diagram 1208 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1209 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1210 {"num": a.get('number')} for a in \ 1211 diag.get('amplitudes')]) 1212 ret_lines.append(line) 1213 1214 return ret_lines
1215 1216 #=========================================================================== 1217 # Returns the data statements initializing the coeffictients for the JAMP 1218 # decomposition. It is used when the JAMP initialization is decided to be 1219 # done through big arrays containing the projection coefficients. 1220 #===========================================================================
1221 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1222 n=50, Nc_value=3):
1223 """This functions return the lines defining the DATA statement setting 1224 the coefficients building the JAMPS out of the AMPS. Split rows in 1225 bunches of size n. 1226 One can specify the color_basis from which the color amplitudes originates 1227 so that there are commentaries telling what color structure each JAMP 1228 corresponds to.""" 1229 1230 if(not isinstance(color_amplitudes,list) or 1231 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1232 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1233 1234 res_list = [] 1235 my_cs = color.ColorString() 1236 for index, coeff_list in enumerate(color_amplitudes): 1237 # Create the list of the complete numerical coefficient. 1238 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1239 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1240 coefficient in coeff_list] 1241 # Create the list of the numbers of the contributing amplitudes. 1242 # Mutliply by -1 for those which have an imaginary coefficient. 1243 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1244 for coefficient in coeff_list] 1245 # Find the common denominator. 1246 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1247 num_list=[(coefficient*commondenom).numerator \ 1248 for coefficient in coefs_list] 1249 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1250 index+1,len(num_list))) 1251 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1252 index+1,commondenom)) 1253 if color_basis: 1254 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1255 res_list.append("C %s" % repr(my_cs)) 1256 for k in range(0, len(num_list), n): 1257 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1258 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1259 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1260 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1261 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1262 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1263 pass 1264 return res_list
1265 1266
1267 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1268 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1269 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1270 defined as a matrix element or directly as a color_amplitudes dictionary. 1271 The split_order_amps specifies the group of amplitudes sharing the same 1272 amplitude orders which should be put in together in a given set of JAMPS. 1273 The split_order_amps is supposed to have the format of the second output 1274 of the function get_split_orders_mapping function in helas_objects.py. 1275 The split_order_names is optional (it should correspond to the process 1276 'split_orders' attribute) and only present to provide comments in the 1277 JAMP definitions in the code.""" 1278 1279 # Let the user call get_JAMP_lines_split_order directly from a 1280 error_msg="Malformed '%s' argument passed to the "+\ 1281 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1282 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1283 color_amplitudes=col_amps.get_color_amplitudes() 1284 elif(isinstance(col_amps,list)): 1285 if(col_amps and isinstance(col_amps[0],list)): 1286 color_amplitudes=col_amps 1287 else: 1288 raise MadGraph5Error(error_msg%'col_amps') 1289 else: 1290 raise MadGraph5Error(error_msg%'col_amps') 1291 1292 # Verify the sanity of the split_order_amps and split_order_names args 1293 if isinstance(split_order_amps,list): 1294 for elem in split_order_amps: 1295 if len(elem)!=2: 1296 raise MadGraph5Error(error_msg%'split_order_amps') 1297 # Check the first element of the two lists to make sure they are 1298 # integers, although in principle they should all be integers. 1299 if not isinstance(elem[0],tuple) or \ 1300 not isinstance(elem[1],tuple) or \ 1301 not isinstance(elem[0][0],int) or \ 1302 not isinstance(elem[1][0],int): 1303 raise MadGraph5Error(error_msg%'split_order_amps') 1304 else: 1305 raise MadGraph5Error(error_msg%'split_order_amps') 1306 1307 if not split_order_names is None: 1308 if isinstance(split_order_names,list): 1309 # Should specify the same number of names as there are elements 1310 # in the key of the split_order_amps. 1311 if len(split_order_names)!=len(split_order_amps[0][0]): 1312 raise MadGraph5Error(error_msg%'split_order_names') 1313 # Check the first element of the list to be a string 1314 if not isinstance(split_order_names[0],str): 1315 raise MadGraph5Error(error_msg%'split_order_names') 1316 else: 1317 raise MadGraph5Error(error_msg%'split_order_names') 1318 1319 # Now scan all contributing orders to be individually computed and 1320 # construct the list of color_amplitudes for JAMP to be constructed 1321 # accordingly. 1322 res_list=[] 1323 for i, amp_order in enumerate(split_order_amps): 1324 col_amps_order = [] 1325 for jamp in color_amplitudes: 1326 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1327 if split_order_names: 1328 res_list.append('C JAMPs contributing to orders '+' '.join( 1329 ['%s=%i'%order for order in zip(split_order_names, 1330 amp_order[0])])) 1331 if self.opt['export_format'] in ['madloop_matchbox']: 1332 res_list.extend(self.get_JAMP_lines(col_amps_order, 1333 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1334 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1335 else: 1336 res_list.extend(self.get_JAMP_lines(col_amps_order, 1337 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1338 1339 return res_list
1340 1341
1342 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1343 split=-1):
1344 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1345 defined as a matrix element or directly as a color_amplitudes dictionary, 1346 Jamp_formatLC should be define to allow to add LeadingColor computation 1347 (usefull for MatchBox) 1348 The split argument defines how the JAMP lines should be split in order 1349 not to be too long.""" 1350 1351 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1352 # the color amplitudes lists. 1353 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1354 color_amplitudes=col_amps.get_color_amplitudes() 1355 elif(isinstance(col_amps,list)): 1356 if(col_amps and isinstance(col_amps[0],list)): 1357 color_amplitudes=col_amps 1358 else: 1359 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1360 else: 1361 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1362 1363 1364 res_list = [] 1365 for i, coeff_list in enumerate(color_amplitudes): 1366 # It might happen that coeff_list is empty if this function was 1367 # called from get_JAMP_lines_split_order (i.e. if some color flow 1368 # does not contribute at all for a given order). 1369 # In this case we simply set it to 0. 1370 if coeff_list==[]: 1371 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1372 continue 1373 # Break the JAMP definition into 'n=split' pieces to avoid having 1374 # arbitrarly long lines. 1375 first=True 1376 n = (len(coeff_list)+1 if split<=0 else split) 1377 while coeff_list!=[]: 1378 coefs=coeff_list[:n] 1379 coeff_list=coeff_list[n:] 1380 res = ((JAMP_format+"=") % str(i + 1)) + \ 1381 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1382 1383 first=False 1384 # Optimization: if all contributions to that color basis element have 1385 # the same coefficient (up to a sign), put it in front 1386 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1387 common_factor = False 1388 diff_fracs = list(set(list_fracs)) 1389 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1390 common_factor = True 1391 global_factor = diff_fracs[0] 1392 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1393 1394 # loop for JAMP 1395 for (coefficient, amp_number) in coefs: 1396 if not coefficient: 1397 continue 1398 if common_factor: 1399 res = (res + "%s" + AMP_format) % \ 1400 (self.coeff(coefficient[0], 1401 coefficient[1] / abs(coefficient[1]), 1402 coefficient[2], 1403 coefficient[3]), 1404 str(amp_number)) 1405 else: 1406 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1407 coefficient[1], 1408 coefficient[2], 1409 coefficient[3]), 1410 str(amp_number)) 1411 1412 if common_factor: 1413 res = res + ')' 1414 1415 res_list.append(res) 1416 1417 return res_list
1418
1419 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1420 """Generate the PDF lines for the auto_dsig.f file""" 1421 1422 processes = matrix_element.get('processes') 1423 model = processes[0].get('model') 1424 1425 pdf_definition_lines = "" 1426 pdf_data_lines = "" 1427 pdf_lines = "" 1428 1429 if ninitial == 1: 1430 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1431 for i, proc in enumerate(processes): 1432 process_line = proc.base_string() 1433 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1434 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1435 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1436 else: 1437 # Pick out all initial state particles for the two beams 1438 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1439 p in processes]))), 1440 sorted(list(set([p.get_initial_pdg(2) for \ 1441 p in processes])))] 1442 1443 # Prepare all variable names 1444 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1445 sum(initial_states,[])]) 1446 for key,val in pdf_codes.items(): 1447 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1448 1449 # Set conversion from PDG code to number used in PDF calls 1450 pdgtopdf = {21: 0, 22: 7} 1451 1452 # Fill in missing entries of pdgtopdf 1453 for pdg in sum(initial_states,[]): 1454 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1455 pdgtopdf[pdg] = pdg 1456 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1457 # If any particle has pdg code 7, we need to use something else 1458 pdgtopdf[pdg] = 6000000 + pdg 1459 1460 # Get PDF variable declarations for all initial states 1461 for i in [0,1]: 1462 pdf_definition_lines += "DOUBLE PRECISION " + \ 1463 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1464 for pdg in \ 1465 initial_states[i]]) + \ 1466 "\n" 1467 1468 # Get PDF data lines for all initial states 1469 for i in [0,1]: 1470 pdf_data_lines += "DATA " + \ 1471 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1472 for pdg in initial_states[i]]) + \ 1473 "/%d*1D0/" % len(initial_states[i]) + \ 1474 "\n" 1475 1476 # Get PDF lines for all different initial states 1477 for i, init_states in enumerate(initial_states): 1478 if subproc_group: 1479 pdf_lines = pdf_lines + \ 1480 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1481 % (i + 1, i + 1) 1482 else: 1483 pdf_lines = pdf_lines + \ 1484 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1485 % (i + 1, i + 1) 1486 1487 for nbi,initial_state in enumerate(init_states): 1488 if initial_state in list(pdf_codes.keys()): 1489 if subproc_group: 1490 pdf_lines = pdf_lines + \ 1491 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1492 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1493 (pdf_codes[initial_state], 1494 i + 1, i + 1, pdgtopdf[initial_state], 1495 i + 1, i + 1) 1496 else: 1497 pdf_lines = pdf_lines + \ 1498 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1499 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1500 (pdf_codes[initial_state], 1501 i + 1, i + 1, pdgtopdf[initial_state], 1502 i + 1, 1503 i + 1, i + 1) 1504 pdf_lines = pdf_lines + "ENDIF\n" 1505 1506 # Add up PDFs for the different initial state particles 1507 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1508 for proc in processes: 1509 process_line = proc.base_string() 1510 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1511 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1512 for ibeam in [1, 2]: 1513 initial_state = proc.get_initial_pdg(ibeam) 1514 if initial_state in list(pdf_codes.keys()): 1515 pdf_lines = pdf_lines + "%s%d*" % \ 1516 (pdf_codes[initial_state], ibeam) 1517 else: 1518 pdf_lines = pdf_lines + "1d0*" 1519 # Remove last "*" from pdf_lines 1520 pdf_lines = pdf_lines[:-1] + "\n" 1521 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1522 1523 # Remove last line break from the return variables 1524 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1525 1526 #=========================================================================== 1527 # write_props_file 1528 #===========================================================================
1529 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1530 """Write the props.inc file for MadEvent. Needs input from 1531 write_configs_file.""" 1532 1533 lines = [] 1534 1535 particle_dict = matrix_element.get('processes')[0].get('model').\ 1536 get('particle_dict') 1537 1538 for iconf, configs in enumerate(s_and_t_channels): 1539 for vertex in configs[0] + configs[1][:-1]: 1540 leg = vertex.get('legs')[-1] 1541 if leg.get('id') not in particle_dict: 1542 # Fake propagator used in multiparticle vertices 1543 mass = 'zero' 1544 width = 'zero' 1545 pow_part = 0 1546 else: 1547 particle = particle_dict[leg.get('id')] 1548 # Get mass 1549 if particle.get('mass').lower() == 'zero': 1550 mass = particle.get('mass') 1551 else: 1552 mass = "abs(%s)" % particle.get('mass') 1553 # Get width 1554 if particle.get('width').lower() == 'zero': 1555 width = particle.get('width') 1556 else: 1557 width = "abs(%s)" % particle.get('width') 1558 1559 pow_part = 1 + int(particle.is_boson()) 1560 1561 lines.append("prmass(%d,%d) = %s" % \ 1562 (leg.get('number'), iconf + 1, mass)) 1563 lines.append("prwidth(%d,%d) = %s" % \ 1564 (leg.get('number'), iconf + 1, width)) 1565 lines.append("pow(%d,%d) = %d" % \ 1566 (leg.get('number'), iconf + 1, pow_part)) 1567 1568 # Write the file 1569 writer.writelines(lines) 1570 1571 return True
1572 1573 #=========================================================================== 1574 # write_configs_file 1575 #===========================================================================
1576 - def write_configs_file(self, writer, matrix_element):
1577 """Write the configs.inc file for MadEvent""" 1578 1579 # Extract number of external particles 1580 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1581 1582 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1583 mapconfigs = [c[0] for c in configs] 1584 model = matrix_element.get('processes')[0].get('model') 1585 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1586 [[c[1]] for c in configs], 1587 mapconfigs, 1588 nexternal, ninitial, 1589 model)
1590 1591 #=========================================================================== 1592 # write_configs_file_from_diagrams 1593 #===========================================================================
1594 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1595 nexternal, ninitial, model):
1596 """Write the actual configs.inc file. 1597 1598 configs is the diagrams corresponding to configs (each 1599 diagrams is a list of corresponding diagrams for all 1600 subprocesses, with None if there is no corresponding diagrams 1601 for a given process). 1602 mapconfigs gives the diagram number for each config. 1603 1604 For s-channels, we need to output one PDG for each subprocess in 1605 the subprocess group, in order to be able to pick the right 1606 one for multiprocesses.""" 1607 1608 lines = [] 1609 1610 s_and_t_channels = [] 1611 1612 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1613 for config in configs if [d for d in config if d][0].\ 1614 get_vertex_leg_numbers()!=[]] 1615 minvert = min(vert_list) if vert_list!=[] else 0 1616 1617 # Number of subprocesses 1618 nsubprocs = len(configs[0]) 1619 1620 nconfigs = 0 1621 1622 new_pdg = model.get_first_non_pdg() 1623 1624 for iconfig, helas_diags in enumerate(configs): 1625 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1626 [0].get_vertex_leg_numbers()) : 1627 # Only 3-vertices allowed in configs.inc except for vertices 1628 # which originate from a shrunk loop. 1629 continue 1630 nconfigs += 1 1631 1632 # Need s- and t-channels for all subprocesses, including 1633 # those that don't contribute to this config 1634 empty_verts = [] 1635 stchannels = [] 1636 for h in helas_diags: 1637 if h: 1638 # get_s_and_t_channels gives vertices starting from 1639 # final state external particles and working inwards 1640 stchannels.append(h.get('amplitudes')[0].\ 1641 get_s_and_t_channels(ninitial, model, new_pdg)) 1642 else: 1643 stchannels.append((empty_verts, None)) 1644 1645 # For t-channels, just need the first non-empty one 1646 tchannels = [t for s,t in stchannels if t != None][0] 1647 1648 # For s_and_t_channels (to be used later) use only first config 1649 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1650 tchannels]) 1651 1652 # Make sure empty_verts is same length as real vertices 1653 if any([s for s,t in stchannels]): 1654 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1655 1656 # Reorganize s-channel vertices to get a list of all 1657 # subprocesses for each vertex 1658 schannels = list(zip(*[s for s,t in stchannels])) 1659 else: 1660 schannels = [] 1661 1662 allchannels = schannels 1663 if len(tchannels) > 1: 1664 # Write out tchannels only if there are any non-trivial ones 1665 allchannels = schannels + tchannels 1666 1667 # Write out propagators for s-channel and t-channel vertices 1668 1669 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1670 # Correspondance between the config and the diagram = amp2 1671 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1672 mapconfigs[iconfig])) 1673 1674 for verts in allchannels: 1675 if verts in schannels: 1676 vert = [v for v in verts if v][0] 1677 else: 1678 vert = verts 1679 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1680 last_leg = vert.get('legs')[-1] 1681 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1682 (last_leg.get('number'), nconfigs, len(daughters), 1683 ",".join([str(d) for d in daughters]))) 1684 if verts in schannels: 1685 pdgs = [] 1686 for v in verts: 1687 if v: 1688 pdgs.append(v.get('legs')[-1].get('id')) 1689 else: 1690 pdgs.append(0) 1691 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1692 (last_leg.get('number'), nconfigs, nsubprocs, 1693 ",".join([str(d) for d in pdgs]))) 1694 lines.append("data tprid(%d,%d)/0/" % \ 1695 (last_leg.get('number'), nconfigs)) 1696 elif verts in tchannels[:-1]: 1697 lines.append("data tprid(%d,%d)/%d/" % \ 1698 (last_leg.get('number'), nconfigs, 1699 abs(last_leg.get('id')))) 1700 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1701 (last_leg.get('number'), nconfigs, nsubprocs, 1702 ",".join(['0'] * nsubprocs))) 1703 1704 # Write out number of configs 1705 lines.append("# Number of configs") 1706 lines.append("data mapconfig(0)/%d/" % nconfigs) 1707 1708 # Write the file 1709 writer.writelines(lines) 1710 1711 return s_and_t_channels
1712 1713 #=========================================================================== 1714 # Global helper methods 1715 #=========================================================================== 1716
1717 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1718 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1719 1720 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1721 1722 if total_coeff == 1: 1723 if is_imaginary: 1724 return '+imag1*' 1725 else: 1726 return '+' 1727 elif total_coeff == -1: 1728 if is_imaginary: 1729 return '-imag1*' 1730 else: 1731 return '-' 1732 1733 res_str = '%+iD0' % total_coeff.numerator 1734 1735 if total_coeff.denominator != 1: 1736 # Check if total_coeff is an integer 1737 res_str = res_str + '/%iD0' % total_coeff.denominator 1738 1739 if is_imaginary: 1740 res_str = res_str + '*imag1' 1741 1742 return res_str + '*'
1743 1744
1745 - def set_fortran_compiler(self, default_compiler, force=False):
1746 """Set compiler based on what's available on the system""" 1747 1748 # Check for compiler 1749 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1750 f77_compiler = default_compiler['fortran'] 1751 elif misc.which('gfortran'): 1752 f77_compiler = 'gfortran' 1753 elif misc.which('g77'): 1754 f77_compiler = 'g77' 1755 elif misc.which('f77'): 1756 f77_compiler = 'f77' 1757 elif default_compiler['fortran']: 1758 logger.warning('No Fortran Compiler detected! Please install one') 1759 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1760 else: 1761 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1762 logger.info('Use Fortran compiler ' + f77_compiler) 1763 1764 1765 # Check for compiler. 1. set default. 1766 if default_compiler['f2py']: 1767 f2py_compiler = default_compiler['f2py'] 1768 else: 1769 f2py_compiler = '' 1770 # Try to find the correct one. 1771 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1772 f2py_compiler = default_compiler['f2py'] 1773 elif misc.which('f2py'): 1774 f2py_compiler = 'f2py' 1775 elif sys.version_info[1] == 6: 1776 if misc.which('f2py-2.6'): 1777 f2py_compiler = 'f2py-2.6' 1778 elif misc.which('f2py2.6'): 1779 f2py_compiler = 'f2py2.6' 1780 elif sys.version_info[1] == 7: 1781 if misc.which('f2py-2.7'): 1782 f2py_compiler = 'f2py-2.7' 1783 elif misc.which('f2py2.7'): 1784 f2py_compiler = 'f2py2.7' 1785 1786 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1787 1788 1789 self.replace_make_opt_f_compiler(to_replace) 1790 # Replace also for Template but not for cluster 1791 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1792 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1793 1794 return f77_compiler
1795 1796 # an alias for backward compatibility 1797 set_compiler = set_fortran_compiler 1798 1799
1800 - def set_cpp_compiler(self, default_compiler, force=False):
1801 """Set compiler based on what's available on the system""" 1802 1803 # Check for compiler 1804 if default_compiler and misc.which(default_compiler): 1805 compiler = default_compiler 1806 elif misc.which('g++'): 1807 #check if clang version 1808 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1809 stderr=subprocess.PIPE) 1810 out, _ = p.communicate() 1811 out = out.decode() 1812 if 'clang' in str(out) and misc.which('clang'): 1813 compiler = 'clang' 1814 else: 1815 compiler = 'g++' 1816 elif misc.which('c++'): 1817 compiler = 'c++' 1818 elif misc.which('clang'): 1819 compiler = 'clang' 1820 elif default_compiler: 1821 logger.warning('No c++ Compiler detected! Please install one') 1822 compiler = default_compiler # maybe misc fail so try with it 1823 else: 1824 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1825 logger.info('Use c++ compiler ' + compiler) 1826 self.replace_make_opt_c_compiler(compiler) 1827 # Replace also for Template but not for cluster 1828 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 1829 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1830 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1831 1832 return compiler
1833 1834
1835 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1836 """Set FC=compiler in Source/make_opts""" 1837 1838 assert isinstance(compilers, dict) 1839 1840 mod = False #avoid to rewrite the file if not needed 1841 if not root_dir: 1842 root_dir = self.dir_path 1843 1844 compiler= compilers['fortran'] 1845 f2py_compiler = compilers['f2py'] 1846 if not f2py_compiler: 1847 f2py_compiler = 'f2py' 1848 for_update= {'DEFAULT_F_COMPILER':compiler, 1849 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1850 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1851 1852 try: 1853 common_run_interface.CommonRunCmd.update_make_opts_full( 1854 make_opts, for_update) 1855 except IOError: 1856 if root_dir == self.dir_path: 1857 logger.info('Fail to set compiler. Trying to continue anyway.')
1858
1859 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1860 """Set CXX=compiler in Source/make_opts. 1861 The version is also checked, in order to set some extra flags 1862 if the compiler is clang (on MACOS)""" 1863 1864 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1865 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1866 1867 1868 # list of the variable to set in the make_opts file 1869 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1870 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1871 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1872 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1873 } 1874 1875 # for MOJAVE remove the MACFLAG: 1876 if is_clang: 1877 import platform 1878 version, _, _ = platform.mac_ver() 1879 if not version:# not linux 1880 version = 14 # set version to remove MACFLAG 1881 else: 1882 version = int(version.split('.')[1]) 1883 if version >= 14: 1884 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1885 1886 if not root_dir: 1887 root_dir = self.dir_path 1888 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1889 1890 try: 1891 common_run_interface.CommonRunCmd.update_make_opts_full( 1892 make_opts, for_update) 1893 except IOError: 1894 if root_dir == self.dir_path: 1895 logger.info('Fail to set compiler. Trying to continue anyway.') 1896 1897 return
1898
1899 #=============================================================================== 1900 # ProcessExporterFortranSA 1901 #=============================================================================== 1902 -class ProcessExporterFortranSA(ProcessExporterFortran):
1903 """Class to take care of exporting a set of matrix elements to 1904 MadGraph v4 StandAlone format.""" 1905 1906 matrix_template = "matrix_standalone_v4.inc" 1907
1908 - def __init__(self, *args,**opts):
1909 """add the format information compare to standard init""" 1910 1911 if 'format' in opts: 1912 self.format = opts['format'] 1913 del opts['format'] 1914 else: 1915 self.format = 'standalone' 1916 1917 self.prefix_info = {} 1918 ProcessExporterFortran.__init__(self, *args, **opts)
1919
1920 - def copy_template(self, model):
1921 """Additional actions needed for setup of Template 1922 """ 1923 1924 #First copy the full template tree if dir_path doesn't exit 1925 if os.path.isdir(self.dir_path): 1926 return 1927 1928 logger.info('initialize a new standalone directory: %s' % \ 1929 os.path.basename(self.dir_path)) 1930 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1931 1932 # Create the directory structure 1933 os.mkdir(self.dir_path) 1934 os.mkdir(pjoin(self.dir_path, 'Source')) 1935 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1936 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1937 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1938 os.mkdir(pjoin(self.dir_path, 'bin')) 1939 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1940 os.mkdir(pjoin(self.dir_path, 'lib')) 1941 os.mkdir(pjoin(self.dir_path, 'Cards')) 1942 1943 # Information at top-level 1944 #Write version info 1945 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1946 try: 1947 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1948 except IOError: 1949 MG5_version = misc.get_pkg_info() 1950 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1951 "5." + MG5_version['version']) 1952 1953 1954 # Add file in SubProcesses 1955 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1956 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1957 1958 if self.format == 'standalone': 1959 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1960 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1961 1962 # Add file in Source 1963 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1964 pjoin(self.dir_path, 'Source')) 1965 # add the makefile 1966 filename = pjoin(self.dir_path,'Source','makefile') 1967 self.write_source_makefile(writers.FileWriter(filename))
1968 1969 #=========================================================================== 1970 # export model files 1971 #===========================================================================
1972 - def export_model_files(self, model_path):
1973 """export the model dependent files for V4 model""" 1974 1975 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1976 # Add the routine update_as_param in v4 model 1977 # This is a function created in the UFO 1978 text=""" 1979 subroutine update_as_param() 1980 call setpara('param_card.dat',.false.) 1981 return 1982 end 1983 """ 1984 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1985 ff.write(text) 1986 ff.close() 1987 1988 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1989 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1990 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1991 fsock.write(text) 1992 fsock.close() 1993 1994 self.make_model_symbolic_link()
1995 1996 #=========================================================================== 1997 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1998 #===========================================================================
1999 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2000 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2001 Perl script of MadEvent4 are still working properly for pure MG5 run. 2002 Not needed for StandAlone so just return 2003 """ 2004 2005 return
2006 2007 2008 #=========================================================================== 2009 # Make the Helas and Model directories for Standalone directory 2010 #===========================================================================
2011 - def make(self):
2012 """Run make in the DHELAS and MODEL directories, to set up 2013 everything for running standalone 2014 """ 2015 2016 source_dir = pjoin(self.dir_path, "Source") 2017 logger.info("Running make for Helas") 2018 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2019 logger.info("Running make for Model") 2020 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2021 2022 #=========================================================================== 2023 # Create proc_card_mg5.dat for Standalone directory 2024 #===========================================================================
2025 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2026 """Finalize Standalone MG4 directory by 2027 generation proc_card_mg5.dat 2028 generate a global makefile 2029 """ 2030 2031 compiler = {'fortran': mg5options['fortran_compiler'], 2032 'cpp': mg5options['cpp_compiler'], 2033 'f2py': mg5options['f2py_compiler']} 2034 2035 self.compiler_choice(compiler) 2036 self.make() 2037 2038 # Write command history as proc_card_mg5 2039 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2040 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2041 history.write(output_file) 2042 2043 ProcessExporterFortran.finalize(self, matrix_elements, 2044 history, mg5options, flaglist) 2045 open(pjoin(self.dir_path,'__init__.py'),'w') 2046 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2047 2048 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2049 #add the module to hande the NLO weight 2050 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2051 pjoin(self.dir_path, 'Source')) 2052 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2053 pjoin(self.dir_path, 'Source', 'PDF')) 2054 self.write_pdf_opendata() 2055 2056 if self.prefix_info: 2057 self.write_f2py_splitter() 2058 self.write_f2py_makefile() 2059 self.write_f2py_check_sa(matrix_elements, 2060 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2061 else: 2062 # create a single makefile to compile all the subprocesses 2063 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2064 deppython = '' 2065 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2066 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2067 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2068 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2069 text+='all: %s\n\techo \'done\'' % deppython 2070 2071 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2072 ff.write(text) 2073 ff.close()
2074
2075 - def write_f2py_splitter(self):
2076 """write a function to call the correct matrix element""" 2077 2078 template = """ 2079 %(python_information)s 2080 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2081 IMPLICIT NONE 2082 2083 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2084 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2085 CF2PY integer, intent(in):: procid 2086 CF2PY integer, intent(in) :: npdg 2087 CF2PY double precision, intent(out) :: ANS 2088 CF2PY double precision, intent(in) :: ALPHAS 2089 CF2PY double precision, intent(in) :: SCALE2 2090 integer pdgs(*) 2091 integer npdg, nhel, procid 2092 double precision p(*) 2093 double precision ANS, ALPHAS, PI,SCALE2 2094 include 'coupl.inc' 2095 2096 PI = 3.141592653589793D0 2097 G = 2* DSQRT(ALPHAS*PI) 2098 CALL UPDATE_AS_PARAM() 2099 if (scale2.ne.0d0) stop 1 2100 2101 %(smatrixhel)s 2102 2103 return 2104 end 2105 2106 SUBROUTINE INITIALISE(PATH) 2107 C ROUTINE FOR F2PY to read the benchmark point. 2108 IMPLICIT NONE 2109 CHARACTER*512 PATH 2110 CF2PY INTENT(IN) :: PATH 2111 CALL SETPARA(PATH) !first call to setup the paramaters 2112 RETURN 2113 END 2114 2115 2116 subroutine CHANGE_PARA(name, value) 2117 implicit none 2118 CF2PY intent(in) :: name 2119 CF2PY intent(in) :: value 2120 2121 character*512 name 2122 double precision value 2123 2124 include '../Source/MODEL/input.inc' 2125 include '../Source/MODEL/coupl.inc' 2126 2127 SELECT CASE (name) 2128 %(parameter_setup)s 2129 CASE DEFAULT 2130 write(*,*) 'no parameter matching', name, value 2131 END SELECT 2132 2133 return 2134 end 2135 2136 subroutine update_all_coup() 2137 implicit none 2138 call coup() 2139 return 2140 end 2141 2142 2143 subroutine get_pdg_order(PDG, ALLPROC) 2144 IMPLICIT NONE 2145 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2146 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2147 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2148 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2149 DATA PDGS/ %(pdgs)s / 2150 DATA PIDS/ %(pids)s / 2151 PDG = PDGS 2152 ALLPROC = PIDS 2153 RETURN 2154 END 2155 2156 subroutine get_prefix(PREFIX) 2157 IMPLICIT NONE 2158 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2159 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2160 DATA PREF / '%(prefix)s'/ 2161 PREFIX = PREF 2162 RETURN 2163 END 2164 2165 2166 """ 2167 2168 allids = list(self.prefix_info.keys()) 2169 allprefix = [self.prefix_info[key][0] for key in allids] 2170 min_nexternal = min([len(ids[0]) for ids in allids]) 2171 max_nexternal = max([len(ids[0]) for ids in allids]) 2172 2173 info = [] 2174 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2175 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2176 2177 2178 text = [] 2179 for n_ext in range(min_nexternal, max_nexternal+1): 2180 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2181 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2182 if not current_id: 2183 continue 2184 if min_nexternal != max_nexternal: 2185 if n_ext == min_nexternal: 2186 text.append(' if (npdg.eq.%i)then' % n_ext) 2187 else: 2188 text.append(' else if (npdg.eq.%i)then' % n_ext) 2189 for ii,pdgs in enumerate(current_id): 2190 pid = current_pid[ii] 2191 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2192 if ii==0: 2193 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2194 else: 2195 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2196 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2197 text.append(' endif') 2198 #close the function 2199 if min_nexternal != max_nexternal: 2200 text.append('endif') 2201 2202 params = self.get_model_parameter(self.model) 2203 parameter_setup =[] 2204 for key, var in params.items(): 2205 parameter_setup.append(' CASE ("%s")\n %s = value' 2206 % (key, var)) 2207 2208 formatting = {'python_information':'\n'.join(info), 2209 'smatrixhel': '\n'.join(text), 2210 'maxpart': max_nexternal, 2211 'nb_me': len(allids), 2212 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2213 for i in range(max_nexternal) for (pdg,pid) in allids), 2214 'prefix':'\',\''.join(allprefix), 2215 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2216 'parameter_setup': '\n'.join(parameter_setup), 2217 } 2218 formatting['lenprefix'] = len(formatting['prefix']) 2219 text = template % formatting 2220 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2221 fsock.writelines(text) 2222 fsock.close()
2223
2224 - def get_model_parameter(self, model):
2225 """ returns all the model parameter 2226 """ 2227 params = {} 2228 for p in model.get('parameters')[('external',)]: 2229 name = p.name 2230 nopref = name[4:] if name.startswith('mdl_') else name 2231 params[nopref] = name 2232 2233 block = p.lhablock 2234 lha = '_'.join([str(i) for i in p.lhacode]) 2235 params['%s_%s' % (block.upper(), lha)] = name 2236 2237 return params
2238 2239 2240 2241 2242
2243 - def write_f2py_check_sa(self, matrix_element, writer):
2244 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2245 # To be implemented. It is just an example file, i.e. not crucial. 2246 return
2247
2248 - def write_f2py_makefile(self):
2249 """ """ 2250 # Add file in SubProcesses 2251 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2252 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2253
2254 - def create_MA5_cards(self,*args,**opts):
2255 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2256 pass
2257
2258 - def compiler_choice(self, compiler):
2259 """ Different daughter classes might want different compilers. 2260 So this function is meant to be overloaded if desired.""" 2261 2262 self.set_compiler(compiler)
2263 2264 #=========================================================================== 2265 # generate_subprocess_directory 2266 #===========================================================================
2267 - def generate_subprocess_directory(self, matrix_element, 2268 fortran_model, number):
2269 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2270 including the necessary matrix.f and nexternal.inc files""" 2271 2272 cwd = os.getcwd() 2273 # Create the directory PN_xx_xxxxx in the specified path 2274 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2275 "P%s" % matrix_element.get('processes')[0].shell_string()) 2276 2277 if self.opt['sa_symmetry']: 2278 # avoid symmetric output 2279 for i,proc in enumerate(matrix_element.get('processes')): 2280 2281 tag = proc.get_tag() 2282 legs = proc.get('legs')[:] 2283 leg0 = proc.get('legs')[0] 2284 leg1 = proc.get('legs')[1] 2285 if not leg1.get('state'): 2286 proc.get('legs')[0] = leg1 2287 proc.get('legs')[1] = leg0 2288 flegs = proc.get('legs')[2:] 2289 for perm in itertools.permutations(flegs): 2290 for i,p in enumerate(perm): 2291 proc.get('legs')[i+2] = p 2292 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2293 "P%s" % proc.shell_string()) 2294 #restore original order 2295 proc.get('legs')[2:] = legs[2:] 2296 if os.path.exists(dirpath2): 2297 proc.get('legs')[:] = legs 2298 return 0 2299 proc.get('legs')[:] = legs 2300 2301 try: 2302 os.mkdir(dirpath) 2303 except os.error as error: 2304 logger.warning(error.strerror + " " + dirpath) 2305 2306 #try: 2307 # os.chdir(dirpath) 2308 #except os.error: 2309 # logger.error('Could not cd to directory %s' % dirpath) 2310 # return 0 2311 2312 logger.info('Creating files in directory %s' % dirpath) 2313 2314 # Extract number of external particles 2315 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2316 2317 # Create the matrix.f file and the nexternal.inc file 2318 if self.opt['export_format']=='standalone_msP': 2319 filename = pjoin(dirpath, 'matrix_prod.f') 2320 else: 2321 filename = pjoin(dirpath, 'matrix.f') 2322 2323 proc_prefix = '' 2324 if 'prefix' in self.cmd_options: 2325 if self.cmd_options['prefix'] == 'int': 2326 proc_prefix = 'M%s_' % number 2327 elif self.cmd_options['prefix'] == 'proc': 2328 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2329 else: 2330 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2331 for proc in matrix_element.get('processes'): 2332 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2333 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2334 2335 calls = self.write_matrix_element_v4( 2336 writers.FortranWriter(filename), 2337 matrix_element, 2338 fortran_model, 2339 proc_prefix=proc_prefix) 2340 2341 if self.opt['export_format'] == 'standalone_msP': 2342 filename = pjoin(dirpath,'configs_production.inc') 2343 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2344 writers.FortranWriter(filename), 2345 matrix_element) 2346 2347 filename = pjoin(dirpath,'props_production.inc') 2348 self.write_props_file(writers.FortranWriter(filename), 2349 matrix_element, 2350 s_and_t_channels) 2351 2352 filename = pjoin(dirpath,'nexternal_prod.inc') 2353 self.write_nexternal_madspin(writers.FortranWriter(filename), 2354 nexternal, ninitial) 2355 2356 if self.opt['export_format']=='standalone_msF': 2357 filename = pjoin(dirpath, 'helamp.inc') 2358 ncomb=matrix_element.get_helicity_combinations() 2359 self.write_helamp_madspin(writers.FortranWriter(filename), 2360 ncomb) 2361 2362 filename = pjoin(dirpath, 'nexternal.inc') 2363 self.write_nexternal_file(writers.FortranWriter(filename), 2364 nexternal, ninitial) 2365 2366 filename = pjoin(dirpath, 'pmass.inc') 2367 self.write_pmass_file(writers.FortranWriter(filename), 2368 matrix_element) 2369 2370 filename = pjoin(dirpath, 'ngraphs.inc') 2371 self.write_ngraphs_file(writers.FortranWriter(filename), 2372 len(matrix_element.get_all_amplitudes())) 2373 2374 # Generate diagrams 2375 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2376 filename = pjoin(dirpath, "matrix.ps") 2377 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2378 get('diagrams'), 2379 filename, 2380 model=matrix_element.get('processes')[0].\ 2381 get('model'), 2382 amplitude=True) 2383 logger.info("Generating Feynman diagrams for " + \ 2384 matrix_element.get('processes')[0].nice_string()) 2385 plot.draw() 2386 2387 linkfiles = ['check_sa.f', 'coupl.inc'] 2388 2389 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2390 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2391 pat = re.compile('smatrix', re.I) 2392 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2393 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2394 f.write(new_text) 2395 linkfiles.pop(0) 2396 2397 for file in linkfiles: 2398 ln('../%s' % file, cwd=dirpath) 2399 ln('../makefileP', name='makefile', cwd=dirpath) 2400 # Return to original PWD 2401 #os.chdir(cwd) 2402 2403 if not calls: 2404 calls = 0 2405 return calls
2406 2407 2408 #=========================================================================== 2409 # write_source_makefile 2410 #===========================================================================
2411 - def write_source_makefile(self, writer):
2412 """Write the nexternal.inc file for MG4""" 2413 2414 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2415 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2416 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2417 2418 replace_dict= {'libraries': set_of_lib, 2419 'model':model_line, 2420 'additional_dsample': '', 2421 'additional_dependencies':''} 2422 2423 text = open(path).read() % replace_dict 2424 2425 if writer: 2426 writer.write(text) 2427 2428 return replace_dict
2429 2430 #=========================================================================== 2431 # write_matrix_element_v4 2432 #===========================================================================
2433 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2434 write=True, proc_prefix=''):
2435 """Export a matrix element to a matrix.f file in MG4 standalone format 2436 if write is on False, just return the replace_dict and not write anything.""" 2437 2438 2439 if not matrix_element.get('processes') or \ 2440 not matrix_element.get('diagrams'): 2441 return 0 2442 2443 if writer: 2444 if not isinstance(writer, writers.FortranWriter): 2445 raise writers.FortranWriter.FortranWriterError(\ 2446 "writer not FortranWriter but %s" % type(writer)) 2447 # Set lowercase/uppercase Fortran code 2448 writers.FortranWriter.downcase = False 2449 2450 2451 if 'sa_symmetry' not in self.opt: 2452 self.opt['sa_symmetry']=False 2453 2454 2455 # The proc_id is for MadEvent grouping which is never used in SA. 2456 replace_dict = {'global_variable':'', 'amp2_lines':'', 2457 'proc_prefix':proc_prefix, 'proc_id':''} 2458 2459 # Extract helas calls 2460 helas_calls = fortran_model.get_matrix_element_calls(\ 2461 matrix_element) 2462 2463 replace_dict['helas_calls'] = "\n".join(helas_calls) 2464 2465 # Extract version number and date from VERSION file 2466 info_lines = self.get_mg5_info_lines() 2467 replace_dict['info_lines'] = info_lines 2468 2469 # Extract process info lines 2470 process_lines = self.get_process_info_lines(matrix_element) 2471 replace_dict['process_lines'] = process_lines 2472 2473 # Extract number of external particles 2474 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2475 replace_dict['nexternal'] = nexternal 2476 replace_dict['nincoming'] = ninitial 2477 2478 # Extract ncomb 2479 ncomb = matrix_element.get_helicity_combinations() 2480 replace_dict['ncomb'] = ncomb 2481 2482 # Extract helicity lines 2483 helicity_lines = self.get_helicity_lines(matrix_element) 2484 replace_dict['helicity_lines'] = helicity_lines 2485 2486 # Extract overall denominator 2487 # Averaging initial state color, spin, and identical FS particles 2488 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2489 2490 # Extract ngraphs 2491 ngraphs = matrix_element.get_number_of_amplitudes() 2492 replace_dict['ngraphs'] = ngraphs 2493 2494 # Extract nwavefuncs 2495 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2496 replace_dict['nwavefuncs'] = nwavefuncs 2497 2498 # Extract ncolor 2499 ncolor = max(1, len(matrix_element.get('color_basis'))) 2500 replace_dict['ncolor'] = ncolor 2501 2502 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2503 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2504 matrix_element.get_beams_hel_avg_factor() 2505 2506 # Extract color data lines 2507 color_data_lines = self.get_color_data_lines(matrix_element) 2508 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2509 2510 if self.opt['export_format']=='standalone_msP': 2511 # For MadSpin need to return the AMP2 2512 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2513 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2514 replace_dict['global_variable'] = \ 2515 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2516 2517 # JAMP definition, depends on the number of independent split orders 2518 split_orders=matrix_element.get('processes')[0].get('split_orders') 2519 2520 if len(split_orders)==0: 2521 replace_dict['nSplitOrders']='' 2522 # Extract JAMP lines 2523 jamp_lines = self.get_JAMP_lines(matrix_element) 2524 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2525 # set all amplitude order to weight 1 and only one squared order 2526 # contribution which is of course ALL_ORDERS=2. 2527 squared_orders = [(2,),] 2528 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2529 replace_dict['chosen_so_configs'] = '.TRUE.' 2530 replace_dict['nSqAmpSplitOrders']=1 2531 replace_dict['split_order_str_list']='' 2532 else: 2533 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2534 replace_dict['nAmpSplitOrders']=len(amp_orders) 2535 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2536 replace_dict['nSplitOrders']=len(split_orders) 2537 replace_dict['split_order_str_list']=str(split_orders) 2538 amp_so = self.get_split_orders_lines( 2539 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2540 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2541 replace_dict['ampsplitorders']='\n'.join(amp_so) 2542 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2543 jamp_lines = self.get_JAMP_lines_split_order(\ 2544 matrix_element,amp_orders,split_order_names=split_orders) 2545 2546 # Now setup the array specifying what squared split order is chosen 2547 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2548 matrix_element.get('processes')[0],squared_orders) 2549 2550 # For convenience we also write the driver check_sa_splitOrders.f 2551 # that explicitely writes out the contribution from each squared order. 2552 # The original driver still works and is compiled with 'make' while 2553 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2554 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2555 self.write_check_sa_splitOrders(squared_orders,split_orders, 2556 nexternal,ninitial,proc_prefix,check_sa_writer) 2557 2558 if write: 2559 writers.FortranWriter('nsqso_born.inc').writelines( 2560 """INTEGER NSQSO_BORN 2561 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2562 2563 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2564 2565 matrix_template = self.matrix_template 2566 if self.opt['export_format']=='standalone_msP' : 2567 matrix_template = 'matrix_standalone_msP_v4.inc' 2568 elif self.opt['export_format']=='standalone_msF': 2569 matrix_template = 'matrix_standalone_msF_v4.inc' 2570 elif self.opt['export_format']=='matchbox': 2571 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2572 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2573 2574 if len(split_orders)>0: 2575 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2576 logger.debug("Warning: The export format %s is not "+\ 2577 " available for individual ME evaluation of given coupl. orders."+\ 2578 " Only the total ME will be computed.", self.opt['export_format']) 2579 elif self.opt['export_format'] in ['madloop_matchbox']: 2580 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2581 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2582 else: 2583 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2584 2585 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2586 replace_dict['template_file2'] = pjoin(_file_path, \ 2587 'iolibs/template_files/split_orders_helping_functions.inc') 2588 if write and writer: 2589 path = replace_dict['template_file'] 2590 content = open(path).read() 2591 content = content % replace_dict 2592 # Write the file 2593 writer.writelines(content) 2594 # Add the helper functions. 2595 if len(split_orders)>0: 2596 content = '\n' + open(replace_dict['template_file2'])\ 2597 .read()%replace_dict 2598 writer.writelines(content) 2599 return len([call for call in helas_calls if call.find('#') != 0]) 2600 else: 2601 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2602 return replace_dict # for subclass update
2603
2604 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2605 nincoming, proc_prefix, writer):
2606 """ Write out a more advanced version of the check_sa drivers that 2607 individually returns the matrix element for each contributing squared 2608 order.""" 2609 2610 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2611 'template_files', 'check_sa_splitOrders.f')).read() 2612 printout_sq_orders=[] 2613 for i, squared_order in enumerate(squared_orders): 2614 sq_orders=[] 2615 for j, sqo in enumerate(squared_order): 2616 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2617 printout_sq_orders.append(\ 2618 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2619 %(i+1,' '.join(sq_orders),i+1)) 2620 printout_sq_orders='\n'.join(printout_sq_orders) 2621 replace_dict = {'printout_sqorders':printout_sq_orders, 2622 'nSplitOrders':len(squared_orders), 2623 'nexternal':nexternal, 2624 'nincoming':nincoming, 2625 'proc_prefix':proc_prefix} 2626 2627 if writer: 2628 writer.writelines(check_sa_content % replace_dict) 2629 else: 2630 return replace_dict
2631
2632 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2633 """class to take care of exporting a set of matrix element for the Matchbox 2634 code in the case of Born only routine""" 2635 2636 default_opt = {'clean': False, 'complex_mass':False, 2637 'export_format':'matchbox', 'mp': False, 2638 'sa_symmetry': True} 2639 2640 #specific template of the born 2641 2642 2643 matrix_template = "matrix_standalone_matchbox.inc" 2644 2645 @staticmethod
2646 - def get_color_string_lines(matrix_element):
2647 """Return the color matrix definition lines for this matrix element. Split 2648 rows in chunks of size n.""" 2649 2650 if not matrix_element.get('color_matrix'): 2651 return "\n".join(["out = 1"]) 2652 2653 #start the real work 2654 color_denominators = matrix_element.get('color_matrix').\ 2655 get_line_denominators() 2656 matrix_strings = [] 2657 my_cs = color.ColorString() 2658 for i_color in range(len(color_denominators)): 2659 # Then write the numerators for the matrix elements 2660 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2661 t_str=repr(my_cs) 2662 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2663 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2664 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2665 all_matches = t_match.findall(t_str) 2666 output = {} 2667 arg=[] 2668 for match in all_matches: 2669 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2670 if ctype in ['ColorOne' ]: 2671 continue 2672 if ctype not in ['T', 'Tr' ]: 2673 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2674 tmparg += ['0'] 2675 arg +=tmparg 2676 for j, v in enumerate(arg): 2677 output[(i_color,j)] = v 2678 2679 for key in output: 2680 if matrix_strings == []: 2681 #first entry 2682 matrix_strings.append(""" 2683 if (in1.eq.%s.and.in2.eq.%s)then 2684 out = %s 2685 """ % (key[0], key[1], output[key])) 2686 else: 2687 #not first entry 2688 matrix_strings.append(""" 2689 elseif (in1.eq.%s.and.in2.eq.%s)then 2690 out = %s 2691 """ % (key[0], key[1], output[key])) 2692 if len(matrix_strings): 2693 matrix_strings.append(" else \n out = - 1 \n endif") 2694 else: 2695 return "\n out = - 1 \n " 2696 return "\n".join(matrix_strings)
2697
2698 - def make(self,*args,**opts):
2699 pass
2700
2701 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2702 JAMP_formatLC=None):
2703 2704 """Adding leading color part of the colorflow""" 2705 2706 if not JAMP_formatLC: 2707 JAMP_formatLC= "LN%s" % JAMP_format 2708 2709 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2710 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2711 col_amps=col_amps.get_color_amplitudes() 2712 elif(isinstance(col_amps,list)): 2713 if(col_amps and isinstance(col_amps[0],list)): 2714 col_amps=col_amps 2715 else: 2716 raise MadGraph5Error(error_msg % 'col_amps') 2717 else: 2718 raise MadGraph5Error(error_msg % 'col_amps') 2719 2720 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2721 JAMP_format=JAMP_format, 2722 AMP_format=AMP_format, 2723 split=-1) 2724 2725 2726 # Filter the col_ampls to generate only those without any 1/NC terms 2727 2728 LC_col_amps = [] 2729 for coeff_list in col_amps: 2730 to_add = [] 2731 for (coefficient, amp_number) in coeff_list: 2732 if coefficient[3]==0: 2733 to_add.append( (coefficient, amp_number) ) 2734 LC_col_amps.append(to_add) 2735 2736 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2737 JAMP_format=JAMP_formatLC, 2738 AMP_format=AMP_format, 2739 split=-1) 2740 2741 return text
2742
2743 2744 2745 2746 #=============================================================================== 2747 # ProcessExporterFortranMW 2748 #=============================================================================== 2749 -class ProcessExporterFortranMW(ProcessExporterFortran):
2750 """Class to take care of exporting a set of matrix elements to 2751 MadGraph v4 - MadWeight format.""" 2752 2753 matrix_file="matrix_standalone_v4.inc" 2754
2755 - def copy_template(self, model):
2756 """Additional actions needed for setup of Template 2757 """ 2758 2759 super(ProcessExporterFortranMW, self).copy_template(model) 2760 2761 # Add the MW specific file 2762 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2763 pjoin(self.dir_path, 'Source','MadWeight'), True) 2764 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2765 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2766 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2767 pjoin(self.dir_path, 'Source','setrun.f')) 2768 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2769 pjoin(self.dir_path, 'Source','run.inc')) 2770 # File created from Template (Different in some child class) 2771 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2772 self.write_run_config_file(writers.FortranWriter(filename)) 2773 2774 try: 2775 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2776 stdout = os.open(os.devnull, os.O_RDWR), 2777 stderr = os.open(os.devnull, os.O_RDWR), 2778 cwd=self.dir_path) 2779 except OSError: 2780 # Probably madweight already called 2781 pass 2782 2783 # Copy the different python file in the Template 2784 self.copy_python_file() 2785 # create the appropriate cuts.f 2786 self.get_mw_cuts_version() 2787 2788 # add the makefile in Source directory 2789 filename = os.path.join(self.dir_path,'Source','makefile') 2790 self.write_source_makefile(writers.FortranWriter(filename))
2791 2792 2793 2794 2795 #=========================================================================== 2796 # convert_model 2797 #===========================================================================
2798 - def convert_model(self, model, wanted_lorentz = [], 2799 wanted_couplings = []):
2800 2801 super(ProcessExporterFortranMW,self).convert_model(model, 2802 wanted_lorentz, wanted_couplings) 2803 2804 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2805 try: 2806 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2807 except OSError as error: 2808 pass 2809 model_path = model.get('modelpath') 2810 # This is not safe if there is a '##' or '-' in the path. 2811 shutil.copytree(model_path, 2812 pjoin(self.dir_path,'bin','internal','ufomodel'), 2813 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2814 if hasattr(model, 'restrict_card'): 2815 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2816 'restrict_default.dat') 2817 if isinstance(model.restrict_card, check_param_card.ParamCard): 2818 model.restrict_card.write(out_path) 2819 else: 2820 files.cp(model.restrict_card, out_path)
2821 2822 #=========================================================================== 2823 # generate_subprocess_directory 2824 #===========================================================================
2825 - def copy_python_file(self):
2826 """copy the python file require for the Template""" 2827 2828 # madevent interface 2829 cp(_file_path+'/interface/madweight_interface.py', 2830 self.dir_path+'/bin/internal/madweight_interface.py') 2831 cp(_file_path+'/interface/extended_cmd.py', 2832 self.dir_path+'/bin/internal/extended_cmd.py') 2833 cp(_file_path+'/interface/common_run_interface.py', 2834 self.dir_path+'/bin/internal/common_run_interface.py') 2835 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2836 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2837 cp(_file_path+'/iolibs/save_load_object.py', 2838 self.dir_path+'/bin/internal/save_load_object.py') 2839 cp(_file_path+'/madevent/gen_crossxhtml.py', 2840 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2841 cp(_file_path+'/madevent/sum_html.py', 2842 self.dir_path+'/bin/internal/sum_html.py') 2843 cp(_file_path+'/various/FO_analyse_card.py', 2844 self.dir_path+'/bin/internal/FO_analyse_card.py') 2845 cp(_file_path+'/iolibs/file_writers.py', 2846 self.dir_path+'/bin/internal/file_writers.py') 2847 #model file 2848 cp(_file_path+'../models/check_param_card.py', 2849 self.dir_path+'/bin/internal/check_param_card.py') 2850 2851 #madevent file 2852 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2853 cp(_file_path+'/various/lhe_parser.py', 2854 self.dir_path+'/bin/internal/lhe_parser.py') 2855 2856 cp(_file_path+'/various/banner.py', 2857 self.dir_path+'/bin/internal/banner.py') 2858 cp(_file_path+'/various/shower_card.py', 2859 self.dir_path+'/bin/internal/shower_card.py') 2860 cp(_file_path+'/various/cluster.py', 2861 self.dir_path+'/bin/internal/cluster.py') 2862 2863 # logging configuration 2864 cp(_file_path+'/interface/.mg5_logging.conf', 2865 self.dir_path+'/bin/internal/me5_logging.conf') 2866 cp(_file_path+'/interface/coloring_logging.py', 2867 self.dir_path+'/bin/internal/coloring_logging.py')
2868 2869 2870 #=========================================================================== 2871 # Change the version of cuts.f to the one compatible with MW 2872 #===========================================================================
2873 - def get_mw_cuts_version(self, outpath=None):
2874 """create the appropriate cuts.f 2875 This is based on the one associated to ME output but: 2876 1) No clustering (=> remove initcluster/setclscales) 2877 2) Adding the definition of cut_bw at the file. 2878 """ 2879 2880 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2881 2882 text = StringIO() 2883 #1) remove all dependencies in ickkw >1: 2884 nb_if = 0 2885 for line in template: 2886 if 'if(xqcut.gt.0d0' in line: 2887 nb_if = 1 2888 if nb_if == 0: 2889 text.write(line) 2890 continue 2891 if re.search(r'if\(.*\)\s*then', line): 2892 nb_if += 1 2893 elif 'endif' in line: 2894 nb_if -= 1 2895 2896 #2) add fake cut_bw (have to put the true one later) 2897 text.write(""" 2898 logical function cut_bw(p) 2899 include 'madweight_param.inc' 2900 double precision p(*) 2901 if (bw_cut) then 2902 cut_bw = .true. 2903 else 2904 stop 1 2905 endif 2906 return 2907 end 2908 """) 2909 2910 final = text.getvalue() 2911 #3) remove the call to initcluster: 2912 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2913 template = template.replace('genps.inc', 'maxparticles.inc') 2914 #Now we can write it 2915 if not outpath: 2916 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2917 elif isinstance(outpath, str): 2918 fsock = open(outpath, 'w') 2919 else: 2920 fsock = outpath 2921 fsock.write(template)
2922 2923 2924 2925 #=========================================================================== 2926 # Make the Helas and Model directories for Standalone directory 2927 #===========================================================================
2928 - def make(self):
2929 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2930 everything for running madweight 2931 """ 2932 2933 source_dir = os.path.join(self.dir_path, "Source") 2934 logger.info("Running make for Helas") 2935 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2936 logger.info("Running make for Model") 2937 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2938 logger.info("Running make for PDF") 2939 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2940 logger.info("Running make for CERNLIB") 2941 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2942 logger.info("Running make for GENERIC") 2943 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2944 logger.info("Running make for blocks") 2945 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2946 logger.info("Running make for tools") 2947 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2948 2949 #=========================================================================== 2950 # Create proc_card_mg5.dat for MadWeight directory 2951 #===========================================================================
2952 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2953 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2954 2955 compiler = {'fortran': mg5options['fortran_compiler'], 2956 'cpp': mg5options['cpp_compiler'], 2957 'f2py': mg5options['f2py_compiler']} 2958 2959 2960 2961 #proc_charac 2962 self.create_proc_charac() 2963 2964 # Write maxparticles.inc based on max of ME's/subprocess groups 2965 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2966 self.write_maxparticles_file(writers.FortranWriter(filename), 2967 matrix_elements) 2968 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2969 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2970 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2971 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2972 2973 self.set_compiler(compiler) 2974 self.make() 2975 2976 # Write command history as proc_card_mg5 2977 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2978 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2979 history.write(output_file) 2980 2981 ProcessExporterFortran.finalize(self, matrix_elements, 2982 history, mg5options, flaglist)
2983 2984 2985 2986 #=========================================================================== 2987 # create the run_card for MW 2988 #===========================================================================
2989 - def create_run_card(self, matrix_elements, history):
2990 """ """ 2991 2992 run_card = banner_mod.RunCard() 2993 2994 # pass to default for MW 2995 run_card["run_tag"] = "\'not_use\'" 2996 run_card["fixed_ren_scale"] = "T" 2997 run_card["fixed_fac_scale"] = "T" 2998 run_card.remove_all_cut() 2999 3000 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3001 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3002 python_template=True) 3003 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3004 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3005 python_template=True)
3006 3007 #=========================================================================== 3008 # export model files 3009 #===========================================================================
3010 - def export_model_files(self, model_path):
3011 """export the model dependent files for V4 model""" 3012 3013 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3014 # Add the routine update_as_param in v4 model 3015 # This is a function created in the UFO 3016 text=""" 3017 subroutine update_as_param() 3018 call setpara('param_card.dat',.false.) 3019 return 3020 end 3021 """ 3022 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3023 ff.write(text) 3024 ff.close() 3025 3026 # Modify setrun.f 3027 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3028 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3029 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3030 fsock.write(text) 3031 fsock.close() 3032 3033 # Modify initialization.f 3034 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3035 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3036 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3037 fsock.write(text) 3038 fsock.close() 3039 3040 3041 self.make_model_symbolic_link()
3042 3043 #=========================================================================== 3044 # generate_subprocess_directory 3045 #===========================================================================
3046 - def generate_subprocess_directory(self, matrix_element, 3047 fortran_model,number):
3048 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3049 including the necessary matrix.f and nexternal.inc files""" 3050 3051 cwd = os.getcwd() 3052 # Create the directory PN_xx_xxxxx in the specified path 3053 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3054 "P%s" % matrix_element.get('processes')[0].shell_string()) 3055 3056 try: 3057 os.mkdir(dirpath) 3058 except os.error as error: 3059 logger.warning(error.strerror + " " + dirpath) 3060 3061 #try: 3062 # os.chdir(dirpath) 3063 #except os.error: 3064 # logger.error('Could not cd to directory %s' % dirpath) 3065 # return 0 3066 3067 logger.info('Creating files in directory %s' % dirpath) 3068 3069 # Extract number of external particles 3070 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3071 3072 # Create the matrix.f file and the nexternal.inc file 3073 filename = pjoin(dirpath,'matrix.f') 3074 calls,ncolor = self.write_matrix_element_v4( 3075 writers.FortranWriter(filename), 3076 matrix_element, 3077 fortran_model) 3078 3079 filename = pjoin(dirpath, 'auto_dsig.f') 3080 self.write_auto_dsig_file(writers.FortranWriter(filename), 3081 matrix_element) 3082 3083 filename = pjoin(dirpath, 'configs.inc') 3084 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3085 writers.FortranWriter(filename), 3086 matrix_element) 3087 3088 filename = pjoin(dirpath, 'nexternal.inc') 3089 self.write_nexternal_file(writers.FortranWriter(filename), 3090 nexternal, ninitial) 3091 3092 filename = pjoin(dirpath, 'leshouche.inc') 3093 self.write_leshouche_file(writers.FortranWriter(filename), 3094 matrix_element) 3095 3096 filename = pjoin(dirpath, 'props.inc') 3097 self.write_props_file(writers.FortranWriter(filename), 3098 matrix_element, 3099 s_and_t_channels) 3100 3101 filename = pjoin(dirpath, 'pmass.inc') 3102 self.write_pmass_file(writers.FortranWriter(filename), 3103 matrix_element) 3104 3105 filename = pjoin(dirpath, 'ngraphs.inc') 3106 self.write_ngraphs_file(writers.FortranWriter(filename), 3107 len(matrix_element.get_all_amplitudes())) 3108 3109 filename = pjoin(dirpath, 'maxamps.inc') 3110 self.write_maxamps_file(writers.FortranWriter(filename), 3111 len(matrix_element.get('diagrams')), 3112 ncolor, 3113 len(matrix_element.get('processes')), 3114 1) 3115 3116 filename = pjoin(dirpath, 'phasespace.inc') 3117 self.write_phasespace_file(writers.FortranWriter(filename), 3118 len(matrix_element.get('diagrams')), 3119 ) 3120 3121 # Generate diagrams 3122 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3123 filename = pjoin(dirpath, "matrix.ps") 3124 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3125 get('diagrams'), 3126 filename, 3127 model=matrix_element.get('processes')[0].\ 3128 get('model'), 3129 amplitude='') 3130 logger.info("Generating Feynman diagrams for " + \ 3131 matrix_element.get('processes')[0].nice_string()) 3132 plot.draw() 3133 3134 #import genps.inc and maxconfigs.inc into Subprocesses 3135 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3136 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3137 3138 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3139 3140 for file in linkfiles: 3141 ln('../%s' % file, starting_dir=cwd) 3142 3143 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3144 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3145 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3146 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3147 # Return to original PWD 3148 #os.chdir(cwd) 3149 3150 if not calls: 3151 calls = 0 3152 return calls
3153 3154 #=========================================================================== 3155 # write_matrix_element_v4 3156 #===========================================================================
3157 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3158 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3159 3160 if not matrix_element.get('processes') or \ 3161 not matrix_element.get('diagrams'): 3162 return 0 3163 3164 if writer: 3165 if not isinstance(writer, writers.FortranWriter): 3166 raise writers.FortranWriter.FortranWriterError(\ 3167 "writer not FortranWriter") 3168 3169 # Set lowercase/uppercase Fortran code 3170 writers.FortranWriter.downcase = False 3171 3172 replace_dict = {} 3173 3174 # Extract version number and date from VERSION file 3175 info_lines = self.get_mg5_info_lines() 3176 replace_dict['info_lines'] = info_lines 3177 3178 # Extract process info lines 3179 process_lines = self.get_process_info_lines(matrix_element) 3180 replace_dict['process_lines'] = process_lines 3181 3182 # Set proc_id 3183 replace_dict['proc_id'] = proc_id 3184 3185 # Extract number of external particles 3186 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3187 replace_dict['nexternal'] = nexternal 3188 3189 # Extract ncomb 3190 ncomb = matrix_element.get_helicity_combinations() 3191 replace_dict['ncomb'] = ncomb 3192 3193 # Extract helicity lines 3194 helicity_lines = self.get_helicity_lines(matrix_element) 3195 replace_dict['helicity_lines'] = helicity_lines 3196 3197 # Extract overall denominator 3198 # Averaging initial state color, spin, and identical FS particles 3199 den_factor_line = self.get_den_factor_line(matrix_element) 3200 replace_dict['den_factor_line'] = den_factor_line 3201 3202 # Extract ngraphs 3203 ngraphs = matrix_element.get_number_of_amplitudes() 3204 replace_dict['ngraphs'] = ngraphs 3205 3206 # Extract nwavefuncs 3207 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3208 replace_dict['nwavefuncs'] = nwavefuncs 3209 3210 # Extract ncolor 3211 ncolor = max(1, len(matrix_element.get('color_basis'))) 3212 replace_dict['ncolor'] = ncolor 3213 3214 # Extract color data lines 3215 color_data_lines = self.get_color_data_lines(matrix_element) 3216 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3217 3218 # Extract helas calls 3219 helas_calls = fortran_model.get_matrix_element_calls(\ 3220 matrix_element) 3221 3222 replace_dict['helas_calls'] = "\n".join(helas_calls) 3223 3224 # Extract JAMP lines 3225 jamp_lines = self.get_JAMP_lines(matrix_element) 3226 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3227 3228 replace_dict['template_file'] = os.path.join(_file_path, \ 3229 'iolibs/template_files/%s' % self.matrix_file) 3230 replace_dict['template_file2'] = '' 3231 3232 if writer: 3233 file = open(replace_dict['template_file']).read() 3234 file = file % replace_dict 3235 # Write the file 3236 writer.writelines(file) 3237 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3238 else: 3239 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3240 3241 #=========================================================================== 3242 # write_source_makefile 3243 #===========================================================================
3244 - def write_source_makefile(self, writer):
3245 """Write the nexternal.inc file for madweight""" 3246 3247 3248 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3249 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3250 text = open(path).read() % {'libraries': set_of_lib} 3251 writer.write(text) 3252 3253 return True
3254
3255 - def write_phasespace_file(self, writer, nb_diag):
3256 """ """ 3257 3258 template = """ include 'maxparticles.inc' 3259 integer max_branches 3260 parameter (max_branches=max_particles-1) 3261 integer max_configs 3262 parameter (max_configs=%(nb_diag)s) 3263 3264 c channel position 3265 integer config_pos,perm_pos 3266 common /to_config/config_pos,perm_pos 3267 3268 """ 3269 3270 writer.write(template % {'nb_diag': nb_diag})
3271 3272 3273 #=========================================================================== 3274 # write_auto_dsig_file 3275 #===========================================================================
3276 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3277 """Write the auto_dsig.f file for the differential cross section 3278 calculation, includes pdf call information (MadWeight format)""" 3279 3280 if not matrix_element.get('processes') or \ 3281 not matrix_element.get('diagrams'): 3282 return 0 3283 3284 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3285 3286 if ninitial < 1 or ninitial > 2: 3287 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3288 3289 replace_dict = {} 3290 3291 # Extract version number and date from VERSION file 3292 info_lines = self.get_mg5_info_lines() 3293 replace_dict['info_lines'] = info_lines 3294 3295 # Extract process info lines 3296 process_lines = self.get_process_info_lines(matrix_element) 3297 replace_dict['process_lines'] = process_lines 3298 3299 # Set proc_id 3300 replace_dict['proc_id'] = proc_id 3301 replace_dict['numproc'] = 1 3302 3303 # Set dsig_line 3304 if ninitial == 1: 3305 # No conversion, since result of decay should be given in GeV 3306 dsig_line = "pd(0)*dsiguu" 3307 else: 3308 # Convert result (in GeV) to pb 3309 dsig_line = "pd(0)*conv*dsiguu" 3310 3311 replace_dict['dsig_line'] = dsig_line 3312 3313 # Extract pdf lines 3314 pdf_vars, pdf_data, pdf_lines = \ 3315 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3316 replace_dict['pdf_vars'] = pdf_vars 3317 replace_dict['pdf_data'] = pdf_data 3318 replace_dict['pdf_lines'] = pdf_lines 3319 3320 # Lines that differ between subprocess group and regular 3321 if proc_id: 3322 replace_dict['numproc'] = int(proc_id) 3323 replace_dict['passcuts_begin'] = "" 3324 replace_dict['passcuts_end'] = "" 3325 # Set lines for subprocess group version 3326 # Set define_iconfigs_lines 3327 replace_dict['define_subdiag_lines'] = \ 3328 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3329 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3330 else: 3331 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3332 replace_dict['passcuts_end'] = "ENDIF" 3333 replace_dict['define_subdiag_lines'] = "" 3334 3335 if writer: 3336 file = open(os.path.join(_file_path, \ 3337 'iolibs/template_files/auto_dsig_mw.inc')).read() 3338 3339 file = file % replace_dict 3340 # Write the file 3341 writer.writelines(file) 3342 else: 3343 return replace_dict
3344 #=========================================================================== 3345 # write_configs_file 3346 #===========================================================================
3347 - def write_configs_file(self, writer, matrix_element):
3348 """Write the configs.inc file for MadEvent""" 3349 3350 # Extract number of external particles 3351 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3352 3353 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3354 mapconfigs = [c[0] for c in configs] 3355 model = matrix_element.get('processes')[0].get('model') 3356 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3357 [[c[1]] for c in configs], 3358 mapconfigs, 3359 nexternal, ninitial,matrix_element, model)
3360 3361 #=========================================================================== 3362 # write_run_configs_file 3363 #===========================================================================
3364 - def write_run_config_file(self, writer):
3365 """Write the run_configs.inc file for MadWeight""" 3366 3367 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3368 text = open(path).read() % {'chanperjob':'5'} 3369 writer.write(text) 3370 return True
3371 3372 #=========================================================================== 3373 # write_configs_file_from_diagrams 3374 #===========================================================================
3375 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3376 nexternal, ninitial, matrix_element, model):
3377 """Write the actual configs.inc file. 3378 3379 configs is the diagrams corresponding to configs (each 3380 diagrams is a list of corresponding diagrams for all 3381 subprocesses, with None if there is no corresponding diagrams 3382 for a given process). 3383 mapconfigs gives the diagram number for each config. 3384 3385 For s-channels, we need to output one PDG for each subprocess in 3386 the subprocess group, in order to be able to pick the right 3387 one for multiprocesses.""" 3388 3389 lines = [] 3390 3391 particle_dict = matrix_element.get('processes')[0].get('model').\ 3392 get('particle_dict') 3393 3394 s_and_t_channels = [] 3395 3396 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3397 for config in configs if [d for d in config if d][0].\ 3398 get_vertex_leg_numbers()!=[]] 3399 3400 minvert = min(vert_list) if vert_list!=[] else 0 3401 # Number of subprocesses 3402 nsubprocs = len(configs[0]) 3403 3404 nconfigs = 0 3405 3406 new_pdg = model.get_first_non_pdg() 3407 3408 for iconfig, helas_diags in enumerate(configs): 3409 if any([vert > minvert for vert in 3410 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3411 # Only 3-vertices allowed in configs.inc 3412 continue 3413 nconfigs += 1 3414 3415 # Need s- and t-channels for all subprocesses, including 3416 # those that don't contribute to this config 3417 empty_verts = [] 3418 stchannels = [] 3419 for h in helas_diags: 3420 if h: 3421 # get_s_and_t_channels gives vertices starting from 3422 # final state external particles and working inwards 3423 stchannels.append(h.get('amplitudes')[0].\ 3424 get_s_and_t_channels(ninitial,model,new_pdg)) 3425 else: 3426 stchannels.append((empty_verts, None)) 3427 3428 # For t-channels, just need the first non-empty one 3429 tchannels = [t for s,t in stchannels if t != None][0] 3430 3431 # For s_and_t_channels (to be used later) use only first config 3432 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3433 tchannels]) 3434 3435 # Make sure empty_verts is same length as real vertices 3436 if any([s for s,t in stchannels]): 3437 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3438 3439 # Reorganize s-channel vertices to get a list of all 3440 # subprocesses for each vertex 3441 schannels = list(zip(*[s for s,t in stchannels])) 3442 else: 3443 schannels = [] 3444 3445 allchannels = schannels 3446 if len(tchannels) > 1: 3447 # Write out tchannels only if there are any non-trivial ones 3448 allchannels = schannels + tchannels 3449 3450 # Write out propagators for s-channel and t-channel vertices 3451 3452 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3453 # Correspondance between the config and the diagram = amp2 3454 lines.append("* %d %d " % (nconfigs, 3455 mapconfigs[iconfig])) 3456 3457 for verts in allchannels: 3458 if verts in schannels: 3459 vert = [v for v in verts if v][0] 3460 else: 3461 vert = verts 3462 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3463 last_leg = vert.get('legs')[-1] 3464 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3465 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3466 # (last_leg.get('number'), nconfigs, len(daughters), 3467 # ",".join([str(d) for d in daughters]))) 3468 3469 if last_leg.get('id') == 21 and 21 not in particle_dict: 3470 # Fake propagator used in multiparticle vertices 3471 mass = 'zero' 3472 width = 'zero' 3473 pow_part = 0 3474 else: 3475 if (last_leg.get('id')!=7): 3476 particle = particle_dict[last_leg.get('id')] 3477 # Get mass 3478 mass = particle.get('mass') 3479 # Get width 3480 width = particle.get('width') 3481 else : # fake propagator used in multiparticle vertices 3482 mass= 'zero' 3483 width= 'zero' 3484 3485 line=line+" "+mass+" "+width+" " 3486 3487 if verts in schannels: 3488 pdgs = [] 3489 for v in verts: 3490 if v: 3491 pdgs.append(v.get('legs')[-1].get('id')) 3492 else: 3493 pdgs.append(0) 3494 lines.append(line+" S "+str(last_leg.get('id'))) 3495 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3496 # (last_leg.get('number'), nconfigs, nsubprocs, 3497 # ",".join([str(d) for d in pdgs]))) 3498 # lines.append("data tprid(%d,%d)/0/" % \ 3499 # (last_leg.get('number'), nconfigs)) 3500 elif verts in tchannels[:-1]: 3501 lines.append(line+" T "+str(last_leg.get('id'))) 3502 # lines.append("data tprid(%d,%d)/%d/" % \ 3503 # (last_leg.get('number'), nconfigs, 3504 # abs(last_leg.get('id')))) 3505 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3506 # (last_leg.get('number'), nconfigs, nsubprocs, 3507 # ",".join(['0'] * nsubprocs))) 3508 3509 # Write out number of configs 3510 # lines.append("# Number of configs") 3511 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3512 lines.append(" * ") # a line with just a star indicates this is the end of file 3513 # Write the file 3514 writer.writelines(lines) 3515 3516 return s_and_t_channels
3517
3518 3519 3520 #=============================================================================== 3521 # ProcessExporterFortranME 3522 #=============================================================================== 3523 -class ProcessExporterFortranME(ProcessExporterFortran):
3524 """Class to take care of exporting a set of matrix elements to 3525 MadEvent format.""" 3526 3527 matrix_file = "matrix_madevent_v4.inc" 3528 done_warning_tchannel = False 3529 3530 # helper function for customise helas writter 3531 @staticmethod
3532 - def custom_helas_call(call, arg):
3533 if arg['mass'] == '%(M)s,%(W)s,': 3534 arg['mass'] = '%(M)s, fk_%(W)s,' 3535 elif '%(W)s' in arg['mass']: 3536 raise Exception 3537 return call, arg
3538
3539 - def copy_template(self, model):
3540 """Additional actions needed for setup of Template 3541 """ 3542 3543 super(ProcessExporterFortranME, self).copy_template(model) 3544 3545 # File created from Template (Different in some child class) 3546 filename = pjoin(self.dir_path,'Source','run_config.inc') 3547 self.write_run_config_file(writers.FortranWriter(filename)) 3548 3549 # The next file are model dependant (due to SLAH convention) 3550 self.model_name = model.get('name') 3551 # Add the symmetry.f 3552 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3553 self.write_symmetry(writers.FortranWriter(filename)) 3554 # 3555 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3556 self.write_addmothers(writers.FortranWriter(filename)) 3557 # Copy the different python file in the Template 3558 self.copy_python_file()
3559 3560 3561 3562 3563 3564 3565 #=========================================================================== 3566 # generate_subprocess_directory 3567 #===========================================================================
3568 - def copy_python_file(self):
3569 """copy the python file require for the Template""" 3570 3571 # madevent interface 3572 cp(_file_path+'/interface/madevent_interface.py', 3573 self.dir_path+'/bin/internal/madevent_interface.py') 3574 cp(_file_path+'/interface/extended_cmd.py', 3575 self.dir_path+'/bin/internal/extended_cmd.py') 3576 cp(_file_path+'/interface/common_run_interface.py', 3577 self.dir_path+'/bin/internal/common_run_interface.py') 3578 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3579 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3580 cp(_file_path+'/iolibs/save_load_object.py', 3581 self.dir_path+'/bin/internal/save_load_object.py') 3582 cp(_file_path+'/iolibs/file_writers.py', 3583 self.dir_path+'/bin/internal/file_writers.py') 3584 #model file 3585 cp(_file_path+'../models/check_param_card.py', 3586 self.dir_path+'/bin/internal/check_param_card.py') 3587 3588 #copy all the file present in madevent directory 3589 for name in os.listdir(pjoin(_file_path, 'madevent')): 3590 if name not in ['__init__.py'] and name.endswith('.py'): 3591 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3592 3593 #madevent file 3594 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3595 cp(_file_path+'/various/lhe_parser.py', 3596 self.dir_path+'/bin/internal/lhe_parser.py') 3597 cp(_file_path+'/various/banner.py', 3598 self.dir_path+'/bin/internal/banner.py') 3599 cp(_file_path+'/various/histograms.py', 3600 self.dir_path+'/bin/internal/histograms.py') 3601 cp(_file_path+'/various/plot_djrs.py', 3602 self.dir_path+'/bin/internal/plot_djrs.py') 3603 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3604 3605 cp(_file_path+'/various/cluster.py', 3606 self.dir_path+'/bin/internal/cluster.py') 3607 cp(_file_path+'/madevent/combine_runs.py', 3608 self.dir_path+'/bin/internal/combine_runs.py') 3609 # logging configuration 3610 cp(_file_path+'/interface/.mg5_logging.conf', 3611 self.dir_path+'/bin/internal/me5_logging.conf') 3612 cp(_file_path+'/interface/coloring_logging.py', 3613 self.dir_path+'/bin/internal/coloring_logging.py') 3614 # shower card and FO_analyse_card. 3615 # Although not needed, it is imported by banner.py 3616 cp(_file_path+'/various/shower_card.py', 3617 self.dir_path+'/bin/internal/shower_card.py') 3618 cp(_file_path+'/various/FO_analyse_card.py', 3619 self.dir_path+'/bin/internal/FO_analyse_card.py')
3620 3621
3622 - def convert_model(self, model, wanted_lorentz = [], 3623 wanted_couplings = []):
3624 3625 super(ProcessExporterFortranME,self).convert_model(model, 3626 wanted_lorentz, wanted_couplings) 3627 3628 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3629 try: 3630 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3631 except OSError as error: 3632 pass 3633 model_path = model.get('modelpath') 3634 # This is not safe if there is a '##' or '-' in the path. 3635 shutil.copytree(model_path, 3636 pjoin(self.dir_path,'bin','internal','ufomodel'), 3637 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3638 if hasattr(model, 'restrict_card'): 3639 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3640 'restrict_default.dat') 3641 if isinstance(model.restrict_card, check_param_card.ParamCard): 3642 model.restrict_card.write(out_path) 3643 else: 3644 files.cp(model.restrict_card, out_path)
3645 3646 #=========================================================================== 3647 # export model files 3648 #===========================================================================
3649 - def export_model_files(self, model_path):
3650 """export the model dependent files""" 3651 3652 super(ProcessExporterFortranME,self).export_model_files(model_path) 3653 3654 # Add the routine update_as_param in v4 model 3655 # This is a function created in the UFO 3656 text=""" 3657 subroutine update_as_param() 3658 call setpara('param_card.dat',.false.) 3659 return 3660 end 3661 """ 3662 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3663 ff.write(text) 3664 ff.close() 3665 3666 # Add the symmetry.f 3667 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3668 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3669 3670 # Modify setrun.f 3671 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3672 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3673 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3674 fsock.write(text) 3675 fsock.close() 3676 3677 self.make_model_symbolic_link()
3678 3679 #=========================================================================== 3680 # generate_subprocess_directory 3681 #===========================================================================
3682 - def generate_subprocess_directory(self, matrix_element, 3683 fortran_model, 3684 me_number):
3685 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3686 including the necessary matrix.f and various helper files""" 3687 3688 cwd = os.getcwd() 3689 path = pjoin(self.dir_path, 'SubProcesses') 3690 3691 3692 if not self.model: 3693 self.model = matrix_element.get('processes')[0].get('model') 3694 3695 3696 3697 #os.chdir(path) 3698 # Create the directory PN_xx_xxxxx in the specified path 3699 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3700 try: 3701 os.mkdir(pjoin(path,subprocdir)) 3702 except os.error as error: 3703 logger.warning(error.strerror + " " + subprocdir) 3704 3705 #try: 3706 # os.chdir(subprocdir) 3707 #except os.error: 3708 # logger.error('Could not cd to directory %s' % subprocdir) 3709 # return 0 3710 3711 logger.info('Creating files in directory %s' % subprocdir) 3712 Ppath = pjoin(path, subprocdir) 3713 3714 # Extract number of external particles 3715 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3716 3717 # Add the driver.f 3718 ncomb = matrix_element.get_helicity_combinations() 3719 filename = pjoin(Ppath,'driver.f') 3720 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3721 v5=self.opt['v5_model']) 3722 3723 # Create the matrix.f file, auto_dsig.f file and all inc files 3724 filename = pjoin(Ppath, 'matrix.f') 3725 calls, ncolor = \ 3726 self.write_matrix_element_v4(writers.FortranWriter(filename), 3727 matrix_element, fortran_model, subproc_number = me_number) 3728 3729 filename = pjoin(Ppath, 'auto_dsig.f') 3730 self.write_auto_dsig_file(writers.FortranWriter(filename), 3731 matrix_element) 3732 3733 filename = pjoin(Ppath, 'configs.inc') 3734 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3735 writers.FortranWriter(filename), 3736 matrix_element) 3737 3738 filename = pjoin(Ppath, 'config_nqcd.inc') 3739 self.write_config_nqcd_file(writers.FortranWriter(filename), 3740 nqcd_list) 3741 3742 filename = pjoin(Ppath, 'config_subproc_map.inc') 3743 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3744 s_and_t_channels) 3745 3746 filename = pjoin(Ppath, 'coloramps.inc') 3747 self.write_coloramps_file(writers.FortranWriter(filename), 3748 mapconfigs, 3749 matrix_element) 3750 3751 filename = pjoin(Ppath, 'get_color.f') 3752 self.write_colors_file(writers.FortranWriter(filename), 3753 matrix_element) 3754 3755 filename = pjoin(Ppath, 'decayBW.inc') 3756 self.write_decayBW_file(writers.FortranWriter(filename), 3757 s_and_t_channels) 3758 3759 filename = pjoin(Ppath, 'dname.mg') 3760 self.write_dname_file(writers.FileWriter(filename), 3761 "P"+matrix_element.get('processes')[0].shell_string()) 3762 3763 filename = pjoin(Ppath, 'iproc.dat') 3764 self.write_iproc_file(writers.FortranWriter(filename), 3765 me_number) 3766 3767 filename = pjoin(Ppath, 'leshouche.inc') 3768 self.write_leshouche_file(writers.FortranWriter(filename), 3769 matrix_element) 3770 3771 filename = pjoin(Ppath, 'maxamps.inc') 3772 self.write_maxamps_file(writers.FortranWriter(filename), 3773 len(matrix_element.get('diagrams')), 3774 ncolor, 3775 len(matrix_element.get('processes')), 3776 1) 3777 3778 filename = pjoin(Ppath, 'mg.sym') 3779 self.write_mg_sym_file(writers.FortranWriter(filename), 3780 matrix_element) 3781 3782 filename = pjoin(Ppath, 'ncombs.inc') 3783 self.write_ncombs_file(writers.FortranWriter(filename), 3784 nexternal) 3785 3786 filename = pjoin(Ppath, 'nexternal.inc') 3787 self.write_nexternal_file(writers.FortranWriter(filename), 3788 nexternal, ninitial) 3789 3790 filename = pjoin(Ppath, 'ngraphs.inc') 3791 self.write_ngraphs_file(writers.FortranWriter(filename), 3792 len(mapconfigs)) 3793 3794 3795 filename = pjoin(Ppath, 'pmass.inc') 3796 self.write_pmass_file(writers.FortranWriter(filename), 3797 matrix_element) 3798 3799 filename = pjoin(Ppath, 'props.inc') 3800 self.write_props_file(writers.FortranWriter(filename), 3801 matrix_element, 3802 s_and_t_channels) 3803 3804 # Find config symmetries and permutations 3805 symmetry, perms, ident_perms = \ 3806 diagram_symmetry.find_symmetry(matrix_element) 3807 3808 filename = pjoin(Ppath, 'symswap.inc') 3809 self.write_symswap_file(writers.FortranWriter(filename), 3810 ident_perms) 3811 3812 filename = pjoin(Ppath, 'symfact_orig.dat') 3813 self.write_symfact_file(open(filename, 'w'), symmetry) 3814 3815 # Generate diagrams 3816 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3817 filename = pjoin(Ppath, "matrix.ps") 3818 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3819 get('diagrams'), 3820 filename, 3821 model=matrix_element.get('processes')[0].\ 3822 get('model'), 3823 amplitude=True) 3824 logger.info("Generating Feynman diagrams for " + \ 3825 matrix_element.get('processes')[0].nice_string()) 3826 plot.draw() 3827 3828 self.link_files_in_SubProcess(Ppath) 3829 3830 #import nexternal/leshouche in Source 3831 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3832 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3833 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3834 # Return to SubProcesses dir 3835 #os.chdir(os.path.pardir) 3836 3837 # Add subprocess to subproc.mg 3838 filename = pjoin(path, 'subproc.mg') 3839 files.append_to_file(filename, 3840 self.write_subproc, 3841 subprocdir) 3842 3843 # Return to original dir 3844 #os.chdir(cwd) 3845 3846 # Generate info page 3847 gen_infohtml.make_info_html(self.dir_path) 3848 3849 3850 if not calls: 3851 calls = 0 3852 return calls
3853 3854 link_Sub_files = ['addmothers.f', 3855 'cluster.f', 3856 'cluster.inc', 3857 'coupl.inc', 3858 'cuts.f', 3859 'cuts.inc', 3860 'genps.f', 3861 'genps.inc', 3862 'idenparts.f', 3863 'initcluster.f', 3864 'makefile', 3865 'message.inc', 3866 'myamp.f', 3867 'reweight.f', 3868 'run.inc', 3869 'maxconfigs.inc', 3870 'maxparticles.inc', 3871 'run_config.inc', 3872 'lhe_event_infos.inc', 3873 'setcuts.f', 3874 'setscales.f', 3875 'sudakov.inc', 3876 'symmetry.f', 3877 'unwgt.f', 3878 'dummy_fct.f' 3879 ] 3880 3894 3895
3896 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3897 """Finalize ME v4 directory by creating jpeg diagrams, html 3898 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3899 3900 if 'nojpeg' in flaglist: 3901 makejpg = False 3902 else: 3903 makejpg = True 3904 if 'online' in flaglist: 3905 online = True 3906 else: 3907 online = False 3908 3909 compiler = {'fortran': mg5options['fortran_compiler'], 3910 'cpp': mg5options['cpp_compiler'], 3911 'f2py': mg5options['f2py_compiler']} 3912 3913 # indicate that the output type is not grouped 3914 if not isinstance(self, ProcessExporterFortranMEGroup): 3915 self.proc_characteristic['grouped_matrix'] = False 3916 3917 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3918 3919 # set limitation linked to the model 3920 3921 3922 # indicate the PDG of all initial particle 3923 try: 3924 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3925 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3926 except AttributeError: 3927 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3928 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3929 self.proc_characteristic['pdg_initial1'] = pdgs1 3930 self.proc_characteristic['pdg_initial2'] = pdgs2 3931 3932 3933 modelname = self.opt['model'] 3934 if modelname == 'mssm' or modelname.startswith('mssm-'): 3935 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3936 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3937 check_param_card.convert_to_mg5card(param_card, mg5_param) 3938 check_param_card.check_valid_param_card(mg5_param) 3939 3940 # Add the combine_events.f modify param_card path/number of @X 3941 filename = pjoin(self.dir_path,'Source','combine_events.f') 3942 try: 3943 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3944 except AttributeError: 3945 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3946 nb_proc = len(set(nb_proc)) 3947 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3948 # Write maxconfigs.inc based on max of ME's/subprocess groups 3949 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3950 self.write_maxconfigs_file(writers.FortranWriter(filename), 3951 matrix_elements) 3952 3953 # Write maxparticles.inc based on max of ME's/subprocess groups 3954 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3955 self.write_maxparticles_file(writers.FortranWriter(filename), 3956 matrix_elements) 3957 3958 # Touch "done" file 3959 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3960 3961 # Check for compiler 3962 self.set_compiler(compiler) 3963 self.set_cpp_compiler(compiler['cpp']) 3964 3965 3966 old_pos = os.getcwd() 3967 subpath = pjoin(self.dir_path, 'SubProcesses') 3968 3969 P_dir_list = [proc for proc in os.listdir(subpath) 3970 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3971 3972 devnull = os.open(os.devnull, os.O_RDWR) 3973 # Convert the poscript in jpg files (if authorize) 3974 if makejpg: 3975 try: 3976 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3977 except Exception as error: 3978 pass 3979 3980 if misc.which('gs'): 3981 logger.info("Generate jpeg diagrams") 3982 for Pdir in P_dir_list: 3983 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3984 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3985 3986 logger.info("Generate web pages") 3987 # Create the WebPage using perl script 3988 3989 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3990 stdout = devnull,cwd=pjoin(self.dir_path)) 3991 3992 #os.chdir(os.path.pardir) 3993 3994 obj = gen_infohtml.make_info_html(self.dir_path) 3995 3996 if online: 3997 nb_channel = obj.rep_rule['nb_gen_diag'] 3998 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3999 #add the information to proc_charac 4000 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4001 4002 # Write command history as proc_card_mg5 4003 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4004 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4005 history.write(output_file) 4006 4007 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4008 stdout = devnull) 4009 4010 #crate the proc_characteristic file 4011 self.create_proc_charac(matrix_elements, history) 4012 4013 # create the run_card 4014 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4015 4016 # Run "make" to generate madevent.tar.gz file 4017 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4018 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4019 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4020 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4021 stdout = devnull, cwd=self.dir_path) 4022 4023 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4024 stdout = devnull, cwd=self.dir_path)
4025 4026 4027 4028 4029 4030 4031 #return to the initial dir 4032 #os.chdir(old_pos) 4033 4034 #=========================================================================== 4035 # write_matrix_element_v4 4036 #===========================================================================
4037 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4038 proc_id = "", config_map = [], subproc_number = ""):
4039 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4040 4041 if not matrix_element.get('processes') or \ 4042 not matrix_element.get('diagrams'): 4043 return 0 4044 4045 if writer: 4046 if not isinstance(writer, writers.FortranWriter): 4047 raise writers.FortranWriter.FortranWriterError(\ 4048 "writer not FortranWriter") 4049 # Set lowercase/uppercase Fortran code 4050 writers.FortranWriter.downcase = False 4051 4052 # check if MLM/.../ is supported for this matrix-element and update associate flag 4053 if self.model and 'MLM' in self.model["limitations"]: 4054 if 'MLM' not in self.proc_characteristic["limitations"]: 4055 used_couplings = matrix_element.get_used_couplings(output="set") 4056 for vertex in self.model.get('interactions'): 4057 particles = [p for p in vertex.get('particles')] 4058 if 21 in [p.get('pdg_code') for p in particles]: 4059 colors = [par.get('color') for par in particles] 4060 if 1 in colors: 4061 continue 4062 elif 'QCD' not in vertex.get('orders'): 4063 for bad_coup in vertex.get('couplings').values(): 4064 if bad_coup in used_couplings: 4065 self.proc_characteristic["limitations"].append('MLM') 4066 break 4067 4068 # The proc prefix is not used for MadEvent output so it can safely be set 4069 # to an empty string. 4070 replace_dict = {'proc_prefix':''} 4071 4072 # Extract helas calls 4073 helas_calls = fortran_model.get_matrix_element_calls(\ 4074 matrix_element) 4075 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4076 logger.warning("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False") 4077 ProcessExporterFortranME.done_warning_tchannel = True 4078 4079 replace_dict['helas_calls'] = "\n".join(helas_calls) 4080 4081 4082 #adding the support for the fake width (forbidding too small width) 4083 mass_width = matrix_element.get_all_mass_widths() 4084 mass_width = sorted(list(mass_width)) 4085 width_list = set([e[1] for e in mass_width]) 4086 4087 replace_dict['fake_width_declaration'] = \ 4088 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4089 replace_dict['fake_width_declaration'] += \ 4090 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4091 fk_w_defs = [] 4092 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4093 4094 for m, w in mass_width: 4095 if w == 'zero': 4096 if ' fk_zero = 0d0' not in fk_w_defs: 4097 fk_w_defs.append(' fk_zero = 0d0') 4098 continue 4099 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4100 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4101 4102 # Extract version number and date from VERSION file 4103 info_lines = self.get_mg5_info_lines() 4104 replace_dict['info_lines'] = info_lines 4105 4106 # Extract process info lines 4107 process_lines = self.get_process_info_lines(matrix_element) 4108 replace_dict['process_lines'] = process_lines 4109 4110 # Set proc_id 4111 replace_dict['proc_id'] = proc_id 4112 4113 # Extract ncomb 4114 ncomb = matrix_element.get_helicity_combinations() 4115 replace_dict['ncomb'] = ncomb 4116 4117 # Extract helicity lines 4118 helicity_lines = self.get_helicity_lines(matrix_element) 4119 replace_dict['helicity_lines'] = helicity_lines 4120 4121 # Extract IC line 4122 ic_line = self.get_ic_line(matrix_element) 4123 replace_dict['ic_line'] = ic_line 4124 4125 # Extract overall denominator 4126 # Averaging initial state color, spin, and identical FS particles 4127 den_factor_line = self.get_den_factor_line(matrix_element) 4128 replace_dict['den_factor_line'] = den_factor_line 4129 4130 # Extract ngraphs 4131 ngraphs = matrix_element.get_number_of_amplitudes() 4132 replace_dict['ngraphs'] = ngraphs 4133 4134 # Extract ndiags 4135 ndiags = len(matrix_element.get('diagrams')) 4136 replace_dict['ndiags'] = ndiags 4137 4138 # Set define_iconfigs_lines 4139 replace_dict['define_iconfigs_lines'] = \ 4140 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4141 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4142 4143 if proc_id: 4144 # Set lines for subprocess group version 4145 # Set define_iconfigs_lines 4146 replace_dict['define_iconfigs_lines'] += \ 4147 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4148 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4149 # Set set_amp2_line 4150 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4151 proc_id 4152 else: 4153 # Standard running 4154 # Set set_amp2_line 4155 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4156 4157 # Extract nwavefuncs 4158 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4159 replace_dict['nwavefuncs'] = nwavefuncs 4160 4161 # Extract ncolor 4162 ncolor = max(1, len(matrix_element.get('color_basis'))) 4163 replace_dict['ncolor'] = ncolor 4164 4165 # Extract color data lines 4166 color_data_lines = self.get_color_data_lines(matrix_element) 4167 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4168 4169 4170 # Set the size of Wavefunction 4171 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4172 replace_dict['wavefunctionsize'] = 18 4173 else: 4174 replace_dict['wavefunctionsize'] = 6 4175 4176 # Extract amp2 lines 4177 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4178 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4179 4180 # The JAMP definition depends on the splitting order 4181 split_orders=matrix_element.get('processes')[0].get('split_orders') 4182 if len(split_orders)>0: 4183 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4184 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4185 matrix_element.get('processes')[0],squared_orders) 4186 else: 4187 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4188 # set all amplitude order to weight 1 and only one squared order 4189 # contribution which is of course ALL_ORDERS=2. 4190 squared_orders = [(2,),] 4191 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4192 replace_dict['chosen_so_configs'] = '.TRUE.' 4193 4194 replace_dict['nAmpSplitOrders']=len(amp_orders) 4195 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4196 replace_dict['split_order_str_list']=str(split_orders) 4197 replace_dict['nSplitOrders']=max(len(split_orders),1) 4198 amp_so = self.get_split_orders_lines( 4199 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4200 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4201 replace_dict['ampsplitorders']='\n'.join(amp_so) 4202 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4203 4204 4205 # Extract JAMP lines 4206 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4207 jamp_lines = self.get_JAMP_lines_split_order(\ 4208 matrix_element,amp_orders,split_order_names= 4209 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4210 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4211 4212 replace_dict['template_file'] = pjoin(_file_path, \ 4213 'iolibs/template_files/%s' % self.matrix_file) 4214 replace_dict['template_file2'] = pjoin(_file_path, \ 4215 'iolibs/template_files/split_orders_helping_functions.inc') 4216 4217 s1,s2 = matrix_element.get_spin_state_initial() 4218 replace_dict['nb_spin_state1'] = s1 4219 replace_dict['nb_spin_state2'] = s2 4220 4221 if writer: 4222 file = open(replace_dict['template_file']).read() 4223 file = file % replace_dict 4224 # Add the split orders helper functions. 4225 file = file + '\n' + open(replace_dict['template_file2'])\ 4226 .read()%replace_dict 4227 # Write the file 4228 writer.writelines(file) 4229 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4230 else: 4231 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4232 return replace_dict
4233 4234 #=========================================================================== 4235 # write_auto_dsig_file 4236 #===========================================================================
4237 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4238 """Write the auto_dsig.f file for the differential cross section 4239 calculation, includes pdf call information""" 4240 4241 if not matrix_element.get('processes') or \ 4242 not matrix_element.get('diagrams'): 4243 return 0 4244 4245 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4246 self.proc_characteristic['ninitial'] = ninitial 4247 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4248 4249 # Add information relevant for MLM matching: 4250 # Maximum QCD power in all the contributions 4251 max_qcd_order = 0 4252 for diag in matrix_element.get('diagrams'): 4253 orders = diag.calculate_orders() 4254 if 'QCD' in orders: 4255 max_qcd_order = max(max_qcd_order,orders['QCD']) 4256 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4257 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4258 proc.get('model').get_particle(id).get('color')>1]) 4259 for proc in matrix_element.get('processes')) 4260 # Maximum number of final state light jets to be matched 4261 self.proc_characteristic['max_n_matched_jets'] = max( 4262 self.proc_characteristic['max_n_matched_jets'], 4263 min(max_qcd_order,max_n_light_final_partons)) 4264 4265 # List of default pdgs to be considered for the CKKWl merging cut 4266 self.proc_characteristic['colored_pdgs'] = \ 4267 sorted(list(set([abs(p.get('pdg_code')) for p in 4268 matrix_element.get('processes')[0].get('model').get('particles') if 4269 p.get('color')>1]))) 4270 4271 if ninitial < 1 or ninitial > 2: 4272 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4273 4274 replace_dict = {} 4275 4276 # Extract version number and date from VERSION file 4277 info_lines = self.get_mg5_info_lines() 4278 replace_dict['info_lines'] = info_lines 4279 4280 # Extract process info lines 4281 process_lines = self.get_process_info_lines(matrix_element) 4282 replace_dict['process_lines'] = process_lines 4283 4284 # Set proc_id 4285 replace_dict['proc_id'] = proc_id 4286 replace_dict['numproc'] = 1 4287 4288 # Set dsig_line 4289 if ninitial == 1: 4290 # No conversion, since result of decay should be given in GeV 4291 dsig_line = "pd(0)*dsiguu" 4292 else: 4293 # Convert result (in GeV) to pb 4294 dsig_line = "pd(0)*conv*dsiguu" 4295 4296 replace_dict['dsig_line'] = dsig_line 4297 4298 # Extract pdf lines 4299 pdf_vars, pdf_data, pdf_lines = \ 4300 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4301 replace_dict['pdf_vars'] = pdf_vars 4302 replace_dict['pdf_data'] = pdf_data 4303 replace_dict['pdf_lines'] = pdf_lines 4304 4305 # Lines that differ between subprocess group and regular 4306 if proc_id: 4307 replace_dict['numproc'] = int(proc_id) 4308 replace_dict['passcuts_begin'] = "" 4309 replace_dict['passcuts_end'] = "" 4310 # Set lines for subprocess group version 4311 # Set define_iconfigs_lines 4312 replace_dict['define_subdiag_lines'] = \ 4313 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4314 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4315 replace_dict['cutsdone'] = "" 4316 else: 4317 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4318 replace_dict['passcuts_end'] = "ENDIF" 4319 replace_dict['define_subdiag_lines'] = "" 4320 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4321 4322 if not isinstance(self, ProcessExporterFortranMEGroup): 4323 ncomb=matrix_element.get_helicity_combinations() 4324 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4325 else: 4326 replace_dict['read_write_good_hel'] = "" 4327 4328 context = {'read_write_good_hel':True} 4329 4330 if writer: 4331 file = open(pjoin(_file_path, \ 4332 'iolibs/template_files/auto_dsig_v4.inc')).read() 4333 file = file % replace_dict 4334 4335 # Write the file 4336 writer.writelines(file, context=context) 4337 else: 4338 return replace_dict, context
4339 #=========================================================================== 4340 # write_coloramps_file 4341 #===========================================================================
4342 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4343 """Write the coloramps.inc file for MadEvent""" 4344 4345 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4346 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4347 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4348 len(mapconfigs))) 4349 4350 4351 # Write the file 4352 writer.writelines(lines) 4353 4354 return True
4355 4356 #=========================================================================== 4357 # write_colors_file 4358 #===========================================================================
4359 - def write_colors_file(self, writer, matrix_elements):
4360 """Write the get_color.f file for MadEvent, which returns color 4361 for all particles used in the matrix element.""" 4362 4363 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4364 matrix_elements = [matrix_elements] 4365 4366 model = matrix_elements[0].get('processes')[0].get('model') 4367 4368 # We need the both particle and antiparticle wf_ids, since the identity 4369 # depends on the direction of the wf. 4370 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4371 for wf in d.get('wavefunctions')],[]) \ 4372 for d in me.get('diagrams')], []) \ 4373 for me in matrix_elements], [])) 4374 4375 leg_ids = set(sum([sum([sum([[l.get('id'), 4376 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4377 for l in p.get_legs_with_decays()], []) \ 4378 for p in me.get('processes')], []) \ 4379 for me in matrix_elements], [])) 4380 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4381 4382 lines = """function get_color(ipdg) 4383 implicit none 4384 integer get_color, ipdg 4385 4386 if(ipdg.eq.%d)then 4387 get_color=%d 4388 return 4389 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4390 4391 for part_id in particle_ids[1:]: 4392 lines += """else if(ipdg.eq.%d)then 4393 get_color=%d 4394 return 4395 """ % (part_id, model.get_particle(part_id).get_color()) 4396 # Dummy particle for multiparticle vertices with pdg given by 4397 # first code not in the model 4398 lines += """else if(ipdg.eq.%d)then 4399 c This is dummy particle used in multiparticle vertices 4400 get_color=2 4401 return 4402 """ % model.get_first_non_pdg() 4403 lines += """else 4404 write(*,*)'Error: No color given for pdg ',ipdg 4405 get_color=0 4406 return 4407 endif 4408 end 4409 """ 4410 4411 # Write the file 4412 writer.writelines(lines) 4413 4414 return True
4415 4416 #=========================================================================== 4417 # write_config_nqcd_file 4418 #===========================================================================
4419 - def write_config_nqcd_file(self, writer, nqcd_list):
4420 """Write the config_nqcd.inc with the number of QCD couplings 4421 for each config""" 4422 4423 lines = [] 4424 for iconf, n in enumerate(nqcd_list): 4425 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4426 4427 # Write the file 4428 writer.writelines(lines) 4429 4430 return True
4431 4432 #=========================================================================== 4433 # write_maxconfigs_file 4434 #===========================================================================
4435 - def write_maxconfigs_file(self, writer, matrix_elements):
4436 """Write the maxconfigs.inc file for MadEvent""" 4437 4438 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4439 maxconfigs = max([me.get_num_configs() for me in \ 4440 matrix_elements.get('matrix_elements')]) 4441 else: 4442 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4443 4444 lines = "integer lmaxconfigs\n" 4445 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4446 4447 # Write the file 4448 writer.writelines(lines) 4449 4450 return True
4451 4452 #=========================================================================== 4453 # read_write_good_hel 4454 #===========================================================================
4455 - def read_write_good_hel(self, ncomb):
4456 """return the code to read/write the good_hel common_block""" 4457 4458 convert = {'ncomb' : ncomb} 4459 output = """ 4460 subroutine write_good_hel(stream_id) 4461 implicit none 4462 integer stream_id 4463 INTEGER NCOMB 4464 PARAMETER ( NCOMB=%(ncomb)d) 4465 LOGICAL GOODHEL(NCOMB) 4466 INTEGER NTRY 4467 common/BLOCK_GOODHEL/NTRY,GOODHEL 4468 write(stream_id,*) GOODHEL 4469 return 4470 end 4471 4472 4473 subroutine read_good_hel(stream_id) 4474 implicit none 4475 include 'genps.inc' 4476 integer stream_id 4477 INTEGER NCOMB 4478 PARAMETER ( NCOMB=%(ncomb)d) 4479 LOGICAL GOODHEL(NCOMB) 4480 INTEGER NTRY 4481 common/BLOCK_GOODHEL/NTRY,GOODHEL 4482 read(stream_id,*) GOODHEL 4483 NTRY = MAXTRIES + 1 4484 return 4485 end 4486 4487 subroutine init_good_hel() 4488 implicit none 4489 INTEGER NCOMB 4490 PARAMETER ( NCOMB=%(ncomb)d) 4491 LOGICAL GOODHEL(NCOMB) 4492 INTEGER NTRY 4493 INTEGER I 4494 4495 do i=1,NCOMB 4496 GOODHEL(I) = .false. 4497 enddo 4498 NTRY = 0 4499 end 4500 4501 integer function get_maxsproc() 4502 implicit none 4503 get_maxsproc = 1 4504 return 4505 end 4506 4507 """ % convert 4508 4509 return output
4510 4511 #=========================================================================== 4512 # write_config_subproc_map_file 4513 #===========================================================================
4514 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4515 """Write a dummy config_subproc.inc file for MadEvent""" 4516 4517 lines = [] 4518 4519 for iconfig in range(len(s_and_t_channels)): 4520 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4521 (iconfig + 1)) 4522 4523 # Write the file 4524 writer.writelines(lines) 4525 4526 return True
4527 4528 #=========================================================================== 4529 # write_configs_file 4530 #===========================================================================
4531 - def write_configs_file(self, writer, matrix_element):
4532 """Write the configs.inc file for MadEvent""" 4533 4534 # Extract number of external particles 4535 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4536 4537 model = matrix_element.get('processes')[0].get('model') 4538 configs = [(i+1, d) for (i, d) in \ 4539 enumerate(matrix_element.get('diagrams'))] 4540 mapconfigs = [c[0] for c in configs] 4541 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4542 [[c[1]] for c in configs], 4543 mapconfigs, 4544 nexternal, ninitial, 4545 model)
4546 4547 #=========================================================================== 4548 # write_run_configs_file 4549 #===========================================================================
4550 - def write_run_config_file(self, writer):
4551 """Write the run_configs.inc file for MadEvent""" 4552 4553 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4554 4555 if self.proc_characteristic['loop_induced']: 4556 job_per_chan = 1 4557 else: 4558 job_per_chan = 5 4559 4560 if writer: 4561 text = open(path).read() % {'chanperjob': job_per_chan} 4562 writer.write(text) 4563 return True 4564 else: 4565 return {'chanperjob': job_per_chan}
4566 4567 #=========================================================================== 4568 # write_configs_file_from_diagrams 4569 #===========================================================================
4570 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4571 nexternal, ninitial, model):
4572 """Write the actual configs.inc file. 4573 4574 configs is the diagrams corresponding to configs (each 4575 diagrams is a list of corresponding diagrams for all 4576 subprocesses, with None if there is no corresponding diagrams 4577 for a given process). 4578 mapconfigs gives the diagram number for each config. 4579 4580 For s-channels, we need to output one PDG for each subprocess in 4581 the subprocess group, in order to be able to pick the right 4582 one for multiprocesses.""" 4583 4584 lines = [] 4585 4586 s_and_t_channels = [] 4587 4588 nqcd_list = [] 4589 4590 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4591 for config in configs if [d for d in config if d][0].\ 4592 get_vertex_leg_numbers()!=[]] 4593 minvert = min(vert_list) if vert_list!=[] else 0 4594 4595 # Number of subprocesses 4596 nsubprocs = len(configs[0]) 4597 4598 nconfigs = 0 4599 4600 new_pdg = model.get_first_non_pdg() 4601 4602 for iconfig, helas_diags in enumerate(configs): 4603 if any([vert > minvert for vert in 4604 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4605 # Only 3-vertices allowed in configs.inc 4606 continue 4607 nconfigs += 1 4608 4609 # Need s- and t-channels for all subprocesses, including 4610 # those that don't contribute to this config 4611 empty_verts = [] 4612 stchannels = [] 4613 for h in helas_diags: 4614 if h: 4615 # get_s_and_t_channels gives vertices starting from 4616 # final state external particles and working inwards 4617 stchannels.append(h.get('amplitudes')[0].\ 4618 get_s_and_t_channels(ninitial, model, 4619 new_pdg)) 4620 else: 4621 stchannels.append((empty_verts, None)) 4622 4623 # For t-channels, just need the first non-empty one 4624 tchannels = [t for s,t in stchannels if t != None][0] 4625 4626 # For s_and_t_channels (to be used later) use only first config 4627 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4628 tchannels]) 4629 4630 # Make sure empty_verts is same length as real vertices 4631 if any([s for s,t in stchannels]): 4632 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4633 4634 # Reorganize s-channel vertices to get a list of all 4635 # subprocesses for each vertex 4636 schannels = list(zip(*[s for s,t in stchannels])) 4637 else: 4638 schannels = [] 4639 4640 allchannels = schannels 4641 if len(tchannels) > 1: 4642 # Write out tchannels only if there are any non-trivial ones 4643 allchannels = schannels + tchannels 4644 4645 # Write out propagators for s-channel and t-channel vertices 4646 4647 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4648 # Correspondance between the config and the diagram = amp2 4649 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4650 mapconfigs[iconfig])) 4651 # Number of QCD couplings in this diagram 4652 nqcd = 0 4653 for h in helas_diags: 4654 if h: 4655 try: 4656 nqcd = h.calculate_orders()['QCD'] 4657 except KeyError: 4658 pass 4659 break 4660 else: 4661 continue 4662 4663 nqcd_list.append(nqcd) 4664 4665 for verts in allchannels: 4666 if verts in schannels: 4667 vert = [v for v in verts if v][0] 4668 else: 4669 vert = verts 4670 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4671 last_leg = vert.get('legs')[-1] 4672 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4673 (last_leg.get('number'), nconfigs, len(daughters), 4674 ",".join([str(d) for d in daughters]))) 4675 if verts in schannels: 4676 pdgs = [] 4677 for v in verts: 4678 if v: 4679 pdgs.append(v.get('legs')[-1].get('id')) 4680 else: 4681 pdgs.append(0) 4682 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4683 (last_leg.get('number'), nconfigs, nsubprocs, 4684 ",".join([str(d) for d in pdgs]))) 4685 lines.append("data tprid(%d,%d)/0/" % \ 4686 (last_leg.get('number'), nconfigs)) 4687 elif verts in tchannels[:-1]: 4688 lines.append("data tprid(%d,%d)/%d/" % \ 4689 (last_leg.get('number'), nconfigs, 4690 abs(last_leg.get('id')))) 4691 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4692 (last_leg.get('number'), nconfigs, nsubprocs, 4693 ",".join(['0'] * nsubprocs))) 4694 4695 # Write out number of configs 4696 lines.append("# Number of configs") 4697 lines.append("data mapconfig(0)/%d/" % nconfigs) 4698 4699 # Write the file 4700 writer.writelines(lines) 4701 4702 return s_and_t_channels, nqcd_list
4703 4704 #=========================================================================== 4705 # write_decayBW_file 4706 #===========================================================================
4707 - def write_decayBW_file(self, writer, s_and_t_channels):
4708 """Write the decayBW.inc file for MadEvent""" 4709 4710 lines = [] 4711 4712 booldict = {None: "0", True: "1", False: "2"} 4713 4714 for iconf, config in enumerate(s_and_t_channels): 4715 schannels = config[0] 4716 for vertex in schannels: 4717 # For the resulting leg, pick out whether it comes from 4718 # decay or not, as given by the onshell flag 4719 leg = vertex.get('legs')[-1] 4720 lines.append("data gForceBW(%d,%d)/%s/" % \ 4721 (leg.get('number'), iconf + 1, 4722 booldict[leg.get('onshell')])) 4723 4724 # Write the file 4725 writer.writelines(lines) 4726 4727 return True
4728 4729 #=========================================================================== 4730 # write_dname_file 4731 #===========================================================================
4732 - def write_dname_file(self, writer, dir_name):
4733 """Write the dname.mg file for MG4""" 4734 4735 line = "DIRNAME=%s" % dir_name 4736 4737 # Write the file 4738 writer.write(line + "\n") 4739 4740 return True
4741 4742 #=========================================================================== 4743 # write_driver 4744 #===========================================================================
4745 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4746 """Write the SubProcess/driver.f file for MG4""" 4747 4748 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4749 4750 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4751 card = 'Source/MODEL/MG5_param.dat' 4752 else: 4753 card = 'param_card.dat' 4754 # Requiring each helicity configuration to be probed by 10 points for 4755 # matrix element before using the resulting grid for MC over helicity 4756 # sampling. 4757 # We multiply this by 2 because each grouped subprocess is called at most 4758 # twice for each IMIRROR. 4759 replace_dict = {'param_card_name':card, 4760 'ncomb':ncomb, 4761 'hel_init_points':n_grouped_proc*10*2} 4762 if not v5: 4763 replace_dict['secondparam']=',.true.' 4764 else: 4765 replace_dict['secondparam']='' 4766 4767 if writer: 4768 text = open(path).read() % replace_dict 4769 writer.write(text) 4770 return True 4771 else: 4772 return replace_dict
4773 4774 #=========================================================================== 4775 # write_addmothers 4776 #===========================================================================
4777 - def write_addmothers(self, writer):
4778 """Write the SubProcess/addmothers.f""" 4779 4780 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4781 4782 text = open(path).read() % {'iconfig': 'diag_number'} 4783 writer.write(text) 4784 4785 return True
4786 4787 4788 #=========================================================================== 4789 # write_combine_events 4790 #===========================================================================
4791 - def write_combine_events(self, writer, nb_proc=100):
4792 """Write the SubProcess/driver.f file for MG4""" 4793 4794 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4795 4796 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4797 card = 'Source/MODEL/MG5_param.dat' 4798 else: 4799 card = 'param_card.dat' 4800 4801 #set maxpup (number of @X in the process card) 4802 4803 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4804 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4805 writer.write(text) 4806 4807 return True
4808 4809 4810 #=========================================================================== 4811 # write_symmetry 4812 #===========================================================================
4813 - def write_symmetry(self, writer, v5=True):
4814 """Write the SubProcess/driver.f file for ME""" 4815 4816 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4817 4818 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4819 card = 'Source/MODEL/MG5_param.dat' 4820 else: 4821 card = 'param_card.dat' 4822 4823 if v5: 4824 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4825 else: 4826 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4827 4828 if writer: 4829 text = open(path).read() 4830 text = text % replace_dict 4831 writer.write(text) 4832 return True 4833 else: 4834 return replace_dict
4835 4836 4837 4838 #=========================================================================== 4839 # write_iproc_file 4840 #===========================================================================
4841 - def write_iproc_file(self, writer, me_number):
4842 """Write the iproc.dat file for MG4""" 4843 line = "%d" % (me_number + 1) 4844 4845 # Write the file 4846 for line_to_write in writer.write_line(line): 4847 writer.write(line_to_write) 4848 return True
4849 4850 #=========================================================================== 4851 # write_mg_sym_file 4852 #===========================================================================
4853 - def write_mg_sym_file(self, writer, matrix_element):
4854 """Write the mg.sym file for MadEvent.""" 4855 4856 lines = [] 4857 4858 # Extract process with all decays included 4859 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 4860 4861 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 4862 4863 identical_indices = {} 4864 4865 # Extract identical particle info 4866 for i, leg in enumerate(final_legs): 4867 if leg.get('id') in identical_indices: 4868 identical_indices[leg.get('id')].append(\ 4869 i + ninitial + 1) 4870 else: 4871 identical_indices[leg.get('id')] = [i + ninitial + 1] 4872 4873 # Remove keys which have only one particle 4874 for key in list(identical_indices.keys()): 4875 if len(identical_indices[key]) < 2: 4876 del identical_indices[key] 4877 4878 # Write mg.sym file 4879 lines.append(str(len(list(identical_indices.keys())))) 4880 for key in identical_indices.keys(): 4881 lines.append(str(len(identical_indices[key]))) 4882 for number in identical_indices[key]: 4883 lines.append(str(number)) 4884 4885 # Write the file 4886 writer.writelines(lines) 4887 4888 return True
4889 4890 #=========================================================================== 4891 # write_mg_sym_file 4892 #===========================================================================
4893 - def write_default_mg_sym_file(self, writer):
4894 """Write the mg.sym file for MadEvent.""" 4895 4896 lines = "0" 4897 4898 # Write the file 4899 writer.writelines(lines) 4900 4901 return True
4902 4903 #=========================================================================== 4904 # write_ncombs_file 4905 #===========================================================================
4906 - def write_ncombs_file(self, writer, nexternal):
4907 """Write the ncombs.inc file for MadEvent.""" 4908 4909 # ncomb (used for clustering) is 2^nexternal 4910 file = " integer n_max_cl\n" 4911 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4912 4913 # Write the file 4914 writer.writelines(file) 4915 4916 return True
4917 4918 #=========================================================================== 4919 # write_processes_file 4920 #===========================================================================
4921 - def write_processes_file(self, writer, subproc_group):
4922 """Write the processes.dat file with info about the subprocesses 4923 in this group.""" 4924 4925 lines = [] 4926 4927 for ime, me in \ 4928 enumerate(subproc_group.get('matrix_elements')): 4929 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4930 ",".join(p.base_string() for p in \ 4931 me.get('processes')))) 4932 if me.get('has_mirror_process'): 4933 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4934 for proc in mirror_procs: 4935 legs = copy.copy(proc.get('legs_with_decays')) 4936 legs.insert(0, legs.pop(1)) 4937 proc.set("legs_with_decays", legs) 4938 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4939 mirror_procs)) 4940 else: 4941 lines.append("mirror none") 4942 4943 # Write the file 4944 writer.write("\n".join(lines)) 4945 4946 return True
4947 4948 #=========================================================================== 4949 # write_symswap_file 4950 #===========================================================================
4951 - def write_symswap_file(self, writer, ident_perms):
4952 """Write the file symswap.inc for MG4 by comparing diagrams using 4953 the internal matrix element value functionality.""" 4954 4955 lines = [] 4956 4957 # Write out lines for symswap.inc file (used to permute the 4958 # external leg momenta 4959 for iperm, perm in enumerate(ident_perms): 4960 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4961 (iperm+1, ",".join([str(i+1) for i in perm]))) 4962 lines.append("data nsym/%d/" % len(ident_perms)) 4963 4964 # Write the file 4965 writer.writelines(lines) 4966 4967 return True
4968 4969 #=========================================================================== 4970 # write_symfact_file 4971 #===========================================================================
4972 - def write_symfact_file(self, writer, symmetry):
4973 """Write the files symfact.dat for MG4 by comparing diagrams using 4974 the internal matrix element value functionality.""" 4975 4976 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4977 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4978 # Write out lines for symswap.inc file (used to permute the 4979 # external leg momenta 4980 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4981 # Write the file 4982 writer.write('\n'.join(lines)) 4983 writer.write('\n') 4984 4985 return True
4986 4987 #=========================================================================== 4988 # write_symperms_file 4989 #===========================================================================
4990 - def write_symperms_file(self, writer, perms):
4991 """Write the symperms.inc file for subprocess group, used for 4992 symmetric configurations""" 4993 4994 lines = [] 4995 for iperm, perm in enumerate(perms): 4996 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4997 (iperm+1, ",".join([str(i+1) for i in perm]))) 4998 4999 # Write the file 5000 writer.writelines(lines) 5001 5002 return True
5003 5004 #=========================================================================== 5005 # write_subproc 5006 #===========================================================================
5007 - def write_subproc(self, writer, subprocdir):
5008 """Append this subprocess to the subproc.mg file for MG4""" 5009 5010 # Write line to file 5011 writer.write(subprocdir + "\n") 5012 5013 return True
5014
5015 #=============================================================================== 5016 # ProcessExporterFortranMEGroup 5017 #=============================================================================== 5018 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5019 """Class to take care of exporting a set of matrix elements to 5020 MadEvent subprocess group format.""" 5021 5022 matrix_file = "matrix_madevent_group_v4.inc" 5023 grouped_mode = 'madevent' 5024 #=========================================================================== 5025 # generate_subprocess_directory 5026 #===========================================================================
5027 - def generate_subprocess_directory(self, subproc_group, 5028 fortran_model, 5029 group_number):
5030 """Generate the Pn directory for a subprocess group in MadEvent, 5031 including the necessary matrix_N.f files, configs.inc and various 5032 other helper files.""" 5033 5034 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5035 "subproc_group object not SubProcessGroup" 5036 5037 if not self.model: 5038 self.model = subproc_group.get('matrix_elements')[0].\ 5039 get('processes')[0].get('model') 5040 5041 cwd = os.getcwd() 5042 path = pjoin(self.dir_path, 'SubProcesses') 5043 5044 os.chdir(path) 5045 pathdir = os.getcwd() 5046 5047 # Create the directory PN in the specified path 5048 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5049 subproc_group.get('name')) 5050 try: 5051 os.mkdir(subprocdir) 5052 except os.error as error: 5053 logger.warning(error.strerror + " " + subprocdir) 5054 5055 try: 5056 os.chdir(subprocdir) 5057 except os.error: 5058 logger.error('Could not cd to directory %s' % subprocdir) 5059 return 0 5060 5061 logger.info('Creating files in directory %s' % subprocdir) 5062 5063 # Create the matrix.f files, auto_dsig.f files and all inc files 5064 # for all subprocesses in the group 5065 5066 maxamps = 0 5067 maxflows = 0 5068 tot_calls = 0 5069 5070 matrix_elements = subproc_group.get('matrix_elements') 5071 5072 # Add the driver.f, all grouped ME's must share the same number of 5073 # helicity configuration 5074 ncomb = matrix_elements[0].get_helicity_combinations() 5075 for me in matrix_elements[1:]: 5076 if ncomb!=me.get_helicity_combinations(): 5077 raise MadGraph5Error("All grouped processes must share the "+\ 5078 "same number of helicity configurations.") 5079 5080 filename = 'driver.f' 5081 self.write_driver(writers.FortranWriter(filename),ncomb, 5082 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5083 5084 for ime, matrix_element in \ 5085 enumerate(matrix_elements): 5086 filename = 'matrix%d.f' % (ime+1) 5087 calls, ncolor = \ 5088 self.write_matrix_element_v4(writers.FortranWriter(filename), 5089 matrix_element, 5090 fortran_model, 5091 proc_id=str(ime+1), 5092 config_map=subproc_group.get('diagram_maps')[ime], 5093 subproc_number=group_number) 5094 5095 filename = 'auto_dsig%d.f' % (ime+1) 5096 self.write_auto_dsig_file(writers.FortranWriter(filename), 5097 matrix_element, 5098 str(ime+1)) 5099 5100 # Keep track of needed quantities 5101 tot_calls += int(calls) 5102 maxflows = max(maxflows, ncolor) 5103 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5104 5105 # Draw diagrams 5106 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5107 filename = "matrix%d.ps" % (ime+1) 5108 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5109 get('diagrams'), 5110 filename, 5111 model = \ 5112 matrix_element.get('processes')[0].\ 5113 get('model'), 5114 amplitude=True) 5115 logger.info("Generating Feynman diagrams for " + \ 5116 matrix_element.get('processes')[0].nice_string()) 5117 plot.draw() 5118 5119 # Extract number of external particles 5120 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5121 5122 # Generate a list of diagrams corresponding to each configuration 5123 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5124 # If a subprocess has no diagrams for this config, the number is 0 5125 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5126 5127 filename = 'auto_dsig.f' 5128 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5129 subproc_group) 5130 5131 filename = 'coloramps.inc' 5132 self.write_coloramps_file(writers.FortranWriter(filename), 5133 subproc_diagrams_for_config, 5134 maxflows, 5135 matrix_elements) 5136 5137 filename = 'get_color.f' 5138 self.write_colors_file(writers.FortranWriter(filename), 5139 matrix_elements) 5140 5141 filename = 'config_subproc_map.inc' 5142 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5143 subproc_diagrams_for_config) 5144 5145 filename = 'configs.inc' 5146 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5147 writers.FortranWriter(filename), 5148 subproc_group, 5149 subproc_diagrams_for_config) 5150 5151 filename = 'config_nqcd.inc' 5152 self.write_config_nqcd_file(writers.FortranWriter(filename), 5153 nqcd_list) 5154 5155 filename = 'decayBW.inc' 5156 self.write_decayBW_file(writers.FortranWriter(filename), 5157 s_and_t_channels) 5158 5159 filename = 'dname.mg' 5160 self.write_dname_file(writers.FortranWriter(filename), 5161 subprocdir) 5162 5163 filename = 'iproc.dat' 5164 self.write_iproc_file(writers.FortranWriter(filename), 5165 group_number) 5166 5167 filename = 'leshouche.inc' 5168 self.write_leshouche_file(writers.FortranWriter(filename), 5169 subproc_group) 5170 5171 filename = 'maxamps.inc' 5172 self.write_maxamps_file(writers.FortranWriter(filename), 5173 maxamps, 5174 maxflows, 5175 max([len(me.get('processes')) for me in \ 5176 matrix_elements]), 5177 len(matrix_elements)) 5178 5179 # Note that mg.sym is not relevant for this case 5180 filename = 'mg.sym' 5181 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5182 5183 filename = 'mirrorprocs.inc' 5184 self.write_mirrorprocs(writers.FortranWriter(filename), 5185 subproc_group) 5186 5187 filename = 'ncombs.inc' 5188 self.write_ncombs_file(writers.FortranWriter(filename), 5189 nexternal) 5190 5191 filename = 'nexternal.inc' 5192 self.write_nexternal_file(writers.FortranWriter(filename), 5193 nexternal, ninitial) 5194 5195 filename = 'ngraphs.inc' 5196 self.write_ngraphs_file(writers.FortranWriter(filename), 5197 nconfigs) 5198 5199 filename = 'pmass.inc' 5200 self.write_pmass_file(writers.FortranWriter(filename), 5201 matrix_element) 5202 5203 filename = 'props.inc' 5204 self.write_props_file(writers.FortranWriter(filename), 5205 matrix_element, 5206 s_and_t_channels) 5207 5208 filename = 'processes.dat' 5209 files.write_to_file(filename, 5210 self.write_processes_file, 5211 subproc_group) 5212 5213 # Find config symmetries and permutations 5214 symmetry, perms, ident_perms = \ 5215 diagram_symmetry.find_symmetry(subproc_group) 5216 5217 filename = 'symswap.inc' 5218 self.write_symswap_file(writers.FortranWriter(filename), 5219 ident_perms) 5220 5221 filename = 'symfact_orig.dat' 5222 self.write_symfact_file(open(filename, 'w'), symmetry) 5223 5224 # check consistency 5225 for i, sym_fact in enumerate(symmetry): 5226 5227 if sym_fact >= 0: 5228 continue 5229 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5230 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5231 raise Exception("identical diagram with different QCD powwer") 5232 5233 5234 filename = 'symperms.inc' 5235 self.write_symperms_file(writers.FortranWriter(filename), 5236 perms) 5237 5238 # Generate jpgs -> pass in make_html 5239 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5240 5241 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5242 5243 #import nexternal/leshouch in Source 5244 ln('nexternal.inc', '../../Source', log=False) 5245 ln('leshouche.inc', '../../Source', log=False) 5246 ln('maxamps.inc', '../../Source', log=False) 5247 5248 # Return to SubProcesses dir) 5249 os.chdir(pathdir) 5250 5251 # Add subprocess to subproc.mg 5252 filename = 'subproc.mg' 5253 files.append_to_file(filename, 5254 self.write_subproc, 5255 subprocdir) 5256 5257 # Return to original dir 5258 os.chdir(cwd) 5259 5260 if not tot_calls: 5261 tot_calls = 0 5262 return tot_calls
5263 5264 #=========================================================================== 5265 # write_super_auto_dsig_file 5266 #===========================================================================
5267 - def write_super_auto_dsig_file(self, writer, subproc_group):
5268 """Write the auto_dsig.f file selecting between the subprocesses 5269 in subprocess group mode""" 5270 5271 replace_dict = {} 5272 5273 # Extract version number and date from VERSION file 5274 info_lines = self.get_mg5_info_lines() 5275 replace_dict['info_lines'] = info_lines 5276 5277 matrix_elements = subproc_group.get('matrix_elements') 5278 5279 # Extract process info lines 5280 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5281 matrix_elements]) 5282 replace_dict['process_lines'] = process_lines 5283 5284 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5285 replace_dict['nexternal'] = nexternal 5286 5287 replace_dict['nsprocs'] = 2*len(matrix_elements) 5288 5289 # Generate dsig definition line 5290 dsig_def_line = "DOUBLE PRECISION " + \ 5291 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5292 range(len(matrix_elements))]) 5293 replace_dict["dsig_def_line"] = dsig_def_line 5294 5295 # Generate dsig process lines 5296 call_dsig_proc_lines = [] 5297 for iproc in range(len(matrix_elements)): 5298 call_dsig_proc_lines.append(\ 5299 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5300 {"num": iproc + 1, 5301 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5302 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5303 5304 ncomb=matrix_elements[0].get_helicity_combinations() 5305 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5306 5307 s1,s2 = matrix_elements[0].get_spin_state_initial() 5308 replace_dict['nb_spin_state1'] = s1 5309 replace_dict['nb_spin_state2'] = s2 5310 5311 if writer: 5312 file = open(pjoin(_file_path, \ 5313 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5314 file = file % replace_dict 5315 5316 # Write the file 5317 writer.writelines(file) 5318 else: 5319 return replace_dict
5320 5321 #=========================================================================== 5322 # write_mirrorprocs 5323 #===========================================================================
5324 - def write_mirrorprocs(self, writer, subproc_group):
5325 """Write the mirrorprocs.inc file determining which processes have 5326 IS mirror process in subprocess group mode.""" 5327 5328 lines = [] 5329 bool_dict = {True: '.true.', False: '.false.'} 5330 matrix_elements = subproc_group.get('matrix_elements') 5331 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5332 (len(matrix_elements), 5333 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5334 me in matrix_elements]))) 5335 # Write the file 5336 writer.writelines(lines)
5337 5338 #=========================================================================== 5339 # write_addmothers 5340 #===========================================================================
5341 - def write_addmothers(self, writer):
5342 """Write the SubProcess/addmothers.f""" 5343 5344 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5345 5346 text = open(path).read() % {'iconfig': 'lconfig'} 5347 writer.write(text) 5348 5349 return True
5350 5351 5352 #=========================================================================== 5353 # write_coloramps_file 5354 #===========================================================================
5355 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5356 matrix_elements):
5357 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5358 5359 # Create a map from subprocess (matrix element) to a list of 5360 # the diagrams corresponding to each config 5361 5362 lines = [] 5363 5364 subproc_to_confdiag = {} 5365 for config in diagrams_for_config: 5366 for subproc, diag in enumerate(config): 5367 try: 5368 subproc_to_confdiag[subproc].append(diag) 5369 except KeyError: 5370 subproc_to_confdiag[subproc] = [diag] 5371 5372 for subproc in sorted(subproc_to_confdiag.keys()): 5373 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5374 matrix_elements[subproc], 5375 subproc + 1)) 5376 5377 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5378 (maxflows, 5379 len(diagrams_for_config), 5380 len(matrix_elements))) 5381 5382 # Write the file 5383 writer.writelines(lines) 5384 5385 return True
5386 5387 #=========================================================================== 5388 # write_config_subproc_map_file 5389 #===========================================================================
5390 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5391 """Write the config_subproc_map.inc file for subprocess groups""" 5392 5393 lines = [] 5394 # Output only configs that have some corresponding diagrams 5395 iconfig = 0 5396 for config in config_subproc_map: 5397 if set(config) == set([0]): 5398 continue 5399 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5400 (iconfig + 1, len(config), 5401 ",".join([str(i) for i in config]))) 5402 iconfig += 1 5403 # Write the file 5404 writer.writelines(lines) 5405 5406 return True
5407 5408 #=========================================================================== 5409 # read_write_good_hel 5410 #===========================================================================
5411 - def read_write_good_hel(self, ncomb):
5412 """return the code to read/write the good_hel common_block""" 5413 5414 convert = {'ncomb' : ncomb} 5415 5416 output = """ 5417 subroutine write_good_hel(stream_id) 5418 implicit none 5419 integer stream_id 5420 INTEGER NCOMB 5421 PARAMETER ( NCOMB=%(ncomb)d) 5422 LOGICAL GOODHEL(NCOMB, 2) 5423 INTEGER NTRY(2) 5424 common/BLOCK_GOODHEL/NTRY,GOODHEL 5425 write(stream_id,*) GOODHEL 5426 return 5427 end 5428 5429 5430 subroutine read_good_hel(stream_id) 5431 implicit none 5432 include 'genps.inc' 5433 integer stream_id 5434 INTEGER NCOMB 5435 PARAMETER ( NCOMB=%(ncomb)d) 5436 LOGICAL GOODHEL(NCOMB, 2) 5437 INTEGER NTRY(2) 5438 common/BLOCK_GOODHEL/NTRY,GOODHEL 5439 read(stream_id,*) GOODHEL 5440 NTRY(1) = MAXTRIES + 1 5441 NTRY(2) = MAXTRIES + 1 5442 return 5443 end 5444 5445 subroutine init_good_hel() 5446 implicit none 5447 INTEGER NCOMB 5448 PARAMETER ( NCOMB=%(ncomb)d) 5449 LOGICAL GOODHEL(NCOMB, 2) 5450 INTEGER NTRY(2) 5451 INTEGER I 5452 5453 do i=1,NCOMB 5454 GOODHEL(I,1) = .false. 5455 GOODHEL(I,2) = .false. 5456 enddo 5457 NTRY(1) = 0 5458 NTRY(2) = 0 5459 end 5460 5461 integer function get_maxsproc() 5462 implicit none 5463 include 'maxamps.inc' 5464 5465 get_maxsproc = maxsproc 5466 return 5467 end 5468 5469 """ % convert 5470 5471 return output
5472 5473 5474 5475 #=========================================================================== 5476 # write_configs_file 5477 #===========================================================================
5478 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5479 """Write the configs.inc file with topology information for a 5480 subprocess group. Use the first subprocess with a diagram for each 5481 configuration.""" 5482 5483 matrix_elements = subproc_group.get('matrix_elements') 5484 model = matrix_elements[0].get('processes')[0].get('model') 5485 5486 diagrams = [] 5487 config_numbers = [] 5488 for iconfig, config in enumerate(diagrams_for_config): 5489 # Check if any diagrams correspond to this config 5490 if set(config) == set([0]): 5491 continue 5492 subproc_diags = [] 5493 for s,d in enumerate(config): 5494 if d: 5495 subproc_diags.append(matrix_elements[s].\ 5496 get('diagrams')[d-1]) 5497 else: 5498 subproc_diags.append(None) 5499 diagrams.append(subproc_diags) 5500 config_numbers.append(iconfig + 1) 5501 5502 # Extract number of external particles 5503 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5504 5505 return len(diagrams), \ 5506 self.write_configs_file_from_diagrams(writer, diagrams, 5507 config_numbers, 5508 nexternal, ninitial, 5509 model)
5510 5511 #=========================================================================== 5512 # write_run_configs_file 5513 #===========================================================================
5514 - def write_run_config_file(self, writer):
5515 """Write the run_configs.inc file for MadEvent""" 5516 5517 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5518 if self.proc_characteristic['loop_induced']: 5519 job_per_chan = 1 5520 else: 5521 job_per_chan = 2 5522 text = open(path).read() % {'chanperjob':job_per_chan} 5523 writer.write(text) 5524 return True
5525 5526 5527 #=========================================================================== 5528 # write_leshouche_file 5529 #===========================================================================
5530 - def write_leshouche_file(self, writer, subproc_group):
5531 """Write the leshouche.inc file for MG4""" 5532 5533 all_lines = [] 5534 5535 for iproc, matrix_element in \ 5536 enumerate(subproc_group.get('matrix_elements')): 5537 all_lines.extend(self.get_leshouche_lines(matrix_element, 5538 iproc)) 5539 # Write the file 5540 writer.writelines(all_lines) 5541 return True
5542 5543
5544 - def finalize(self,*args, **opts):
5545 5546 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5547 #ensure that the grouping information is on the correct value 5548 self.proc_characteristic['grouped_matrix'] = True
5549 5550 5551 #=============================================================================== 5552 # UFO_model_to_mg4 5553 #=============================================================================== 5554 5555 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5556 5557 -class UFO_model_to_mg4(object):
5558 """ A converter of the UFO-MG5 Model to the MG4 format """ 5559 5560 # The list below shows the only variables the user is allowed to change by 5561 # himself for each PS point. If he changes any other, then calling 5562 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5563 # correctly account for the change. 5564 PS_dependent_key = ['aS','MU_R'] 5565 mp_complex_format = 'complex*32' 5566 mp_real_format = 'real*16' 5567 # Warning, it is crucial none of the couplings/parameters of the model 5568 # starts with this prefix. I should add a check for this. 5569 # You can change it as the global variable to check_param_card.ParamCard 5570 mp_prefix = check_param_card.ParamCard.mp_prefix 5571
5572 - def __init__(self, model, output_path, opt=None):
5573 """ initialization of the objects """ 5574 5575 self.model = model 5576 self.model_name = model['name'] 5577 self.dir_path = output_path 5578 5579 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5580 'loop_induced': False} 5581 if opt: 5582 self.opt.update(opt) 5583 5584 self.coups_dep = [] # (name, expression, type) 5585 self.coups_indep = [] # (name, expression, type) 5586 self.params_dep = [] # (name, expression, type) 5587 self.params_indep = [] # (name, expression, type) 5588 self.params_ext = [] # external parameter 5589 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5590 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5591
5593 """modify the parameter if some of them are identical up to the case""" 5594 5595 lower_dict={} 5596 duplicate = set() 5597 keys = list(self.model['parameters'].keys()) 5598 keys.sort() 5599 for key in keys: 5600 for param in self.model['parameters'][key]: 5601 lower_name = param.name.lower() 5602 if not lower_name: 5603 continue 5604 try: 5605 lower_dict[lower_name].append(param) 5606 except KeyError as error: 5607 lower_dict[lower_name] = [param] 5608 else: 5609 duplicate.add(lower_name) 5610 logger.debug('%s is define both as lower case and upper case.' 5611 % lower_name) 5612 if not duplicate: 5613 return 5614 5615 re_expr = r'''\b(%s)\b''' 5616 to_change = [] 5617 change={} 5618 for value in duplicate: 5619 for i, var in enumerate(lower_dict[value]): 5620 to_change.append(var.name) 5621 new_name = '%s%s' % (var.name.lower(), 5622 ('__%d'%(i+1) if i>0 else '')) 5623 change[var.name] = new_name 5624 var.name = new_name 5625 5626 # Apply the modification to the map_CTcoup_CTparam of the model 5627 # if it has one (giving for each coupling the CT parameters whcih 5628 # are necessary and which should be exported to the model. 5629 if hasattr(self.model,'map_CTcoup_CTparam'): 5630 for coup, ctparams in self.model.map_CTcoup_CTparam: 5631 for i, ctparam in enumerate(ctparams): 5632 try: 5633 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5634 except KeyError: 5635 pass 5636 5637 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5638 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5639 5640 # change parameters 5641 for key in keys: 5642 if key == ('external',): 5643 continue 5644 for param in self.model['parameters'][key]: 5645 param.expr = rep_pattern.sub(replace, param.expr) 5646 5647 # change couplings 5648 for key in self.model['couplings'].keys(): 5649 for coup in self.model['couplings'][key]: 5650 coup.expr = rep_pattern.sub(replace, coup.expr) 5651 5652 # change mass/width 5653 for part in self.model['particles']: 5654 if str(part.get('mass')) in to_change: 5655 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5656 if str(part.get('width')) in to_change: 5657 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5658
5659 - def refactorize(self, wanted_couplings = []):
5660 """modify the couplings to fit with MG4 convention """ 5661 5662 # Keep only separation in alphaS 5663 keys = list(self.model['parameters'].keys()) 5664 keys.sort(key=len) 5665 for key in keys: 5666 to_add = [o for o in self.model['parameters'][key] if o.name] 5667 5668 if key == ('external',): 5669 self.params_ext += to_add 5670 elif any([(k in key) for k in self.PS_dependent_key]): 5671 self.params_dep += to_add 5672 else: 5673 self.params_indep += to_add 5674 # same for couplings 5675 keys = list(self.model['couplings'].keys()) 5676 keys.sort(key=len) 5677 for key, coup_list in self.model['couplings'].items(): 5678 if any([(k in key) for k in self.PS_dependent_key]): 5679 self.coups_dep += [c for c in coup_list if 5680 (not wanted_couplings or c.name in \ 5681 wanted_couplings)] 5682 else: 5683 self.coups_indep += [c for c in coup_list if 5684 (not wanted_couplings or c.name in \ 5685 wanted_couplings)] 5686 5687 # MG4 use G and not aS as it basic object for alphas related computation 5688 #Pass G in the independant list 5689 if 'G' in self.params_dep: 5690 index = self.params_dep.index('G') 5691 G = self.params_dep.pop(index) 5692 # G.expr = '2*cmath.sqrt(as*pi)' 5693 # self.params_indep.insert(0, self.params_dep.pop(index)) 5694 # No need to add it if not defined 5695 5696 if 'aS' not in self.params_ext: 5697 logger.critical('aS not define as external parameter adding it!') 5698 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5699 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5700 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5701 - def build(self, wanted_couplings = [], full=True):
5702 """modify the couplings to fit with MG4 convention and creates all the 5703 different files""" 5704 5705 self.pass_parameter_to_case_insensitive() 5706 self.refactorize(wanted_couplings) 5707 5708 # write the files 5709 if full: 5710 if wanted_couplings: 5711 # extract the wanted ct parameters 5712 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5713 self.write_all()
5714 5715
5716 - def open(self, name, comment='c', format='default'):
5717 """ Open the file name in the correct directory and with a valid 5718 header.""" 5719 5720 file_path = pjoin(self.dir_path, name) 5721 5722 if format == 'fortran': 5723 fsock = writers.FortranWriter(file_path, 'w') 5724 write_class = io.FileIO 5725 5726 write_class.writelines(fsock, comment * 77 + '\n') 5727 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 5728 {'comment': comment + (6 - len(comment)) * ' '}) 5729 write_class.writelines(fsock, comment * 77 + '\n\n') 5730 else: 5731 fsock = open(file_path, 'w') 5732 fsock.writelines(comment * 77 + '\n') 5733 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 5734 {'comment': comment + (6 - len(comment)) * ' '}) 5735 fsock.writelines(comment * 77 + '\n\n') 5736 return fsock
5737 5738
5739 - def write_all(self):
5740 """ write all the files """ 5741 #write the part related to the external parameter 5742 self.create_ident_card() 5743 self.create_param_read() 5744 5745 #write the definition of the parameter 5746 self.create_input() 5747 self.create_intparam_def(dp=True,mp=False) 5748 if self.opt['mp']: 5749 self.create_intparam_def(dp=False,mp=True) 5750 5751 # definition of the coupling. 5752 self.create_actualize_mp_ext_param_inc() 5753 self.create_coupl_inc() 5754 self.create_write_couplings() 5755 self.create_couplings() 5756 5757 # the makefile 5758 self.create_makeinc() 5759 self.create_param_write() 5760 5761 # The model functions 5762 self.create_model_functions_inc() 5763 self.create_model_functions_def() 5764 5765 # The param_card.dat 5766 self.create_param_card() 5767 5768 5769 # All the standard files 5770 self.copy_standard_file()
5771 5772 ############################################################################ 5773 ## ROUTINE CREATING THE FILES ############################################ 5774 ############################################################################ 5775
5776 - def copy_standard_file(self):
5777 """Copy the standard files for the fortran model.""" 5778 5779 #copy the library files 5780 file_to_link = ['formats.inc','printout.f', \ 5781 'rw_para.f', 'testprog.f'] 5782 5783 for filename in file_to_link: 5784 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5785 self.dir_path) 5786 5787 file = open(os.path.join(MG5DIR,\ 5788 'models/template_files/fortran/rw_para.f')).read() 5789 5790 includes=["include \'coupl.inc\'","include \'input.inc\'", 5791 "include \'model_functions.inc\'"] 5792 if self.opt['mp']: 5793 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5794 # In standalone and madloop we do no use the compiled param card but 5795 # still parse the .dat one so we must load it. 5796 if self.opt['loop_induced']: 5797 #loop induced follow MadEvent way to handle the card. 5798 load_card = '' 5799 lha_read_filename='lha_read.f' 5800 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5801 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5802 lha_read_filename='lha_read_mp.f' 5803 elif self.opt['export_format'].startswith('standalone') \ 5804 or self.opt['export_format'] in ['madweight', 'plugin']\ 5805 or self.opt['export_format'].startswith('matchbox'): 5806 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5807 lha_read_filename='lha_read.f' 5808 else: 5809 load_card = '' 5810 lha_read_filename='lha_read.f' 5811 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5812 os.path.join(self.dir_path,'lha_read.f')) 5813 5814 file=file%{'includes':'\n '.join(includes), 5815 'load_card':load_card} 5816 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5817 writer.writelines(file) 5818 writer.close() 5819 5820 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5821 or self.opt['loop_induced']: 5822 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5823 self.dir_path + '/makefile') 5824 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5825 path = pjoin(self.dir_path, 'makefile') 5826 text = open(path).read() 5827 text = text.replace('madevent','aMCatNLO') 5828 open(path, 'w').writelines(text) 5829 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5830 'madloop','madloop_optimized', 'standalone_rw', 5831 'madweight','matchbox','madloop_matchbox', 'plugin']: 5832 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5833 self.dir_path + '/makefile') 5834 #elif self.opt['export_format'] in []: 5835 #pass 5836 else: 5837 raise MadGraph5Error('Unknown format')
5838
5839 - def create_coupl_inc(self):
5840 """ write coupling.inc """ 5841 5842 fsock = self.open('coupl.inc', format='fortran') 5843 if self.opt['mp']: 5844 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5845 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5846 format='fortran') 5847 5848 # Write header 5849 header = """double precision G 5850 common/strong/ G 5851 5852 double complex gal(2) 5853 common/weak/ gal 5854 5855 double precision MU_R 5856 common/rscale/ MU_R 5857 5858 double precision Nf 5859 parameter(Nf=%d) 5860 """ % self.model.get_nflav() 5861 5862 fsock.writelines(header) 5863 5864 if self.opt['mp']: 5865 header = """%(real_mp_format)s %(mp_prefix)sG 5866 common/MP_strong/ %(mp_prefix)sG 5867 5868 %(complex_mp_format)s %(mp_prefix)sgal(2) 5869 common/MP_weak/ %(mp_prefix)sgal 5870 5871 %(complex_mp_format)s %(mp_prefix)sMU_R 5872 common/MP_rscale/ %(mp_prefix)sMU_R 5873 5874 """ 5875 5876 5877 5878 5879 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5880 'complex_mp_format':self.mp_complex_format, 5881 'mp_prefix':self.mp_prefix}) 5882 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5883 'complex_mp_format':self.mp_complex_format, 5884 'mp_prefix':''}) 5885 5886 # Write the Mass definition/ common block 5887 masses = set() 5888 widths = set() 5889 if self.opt['complex_mass']: 5890 complex_mass = set() 5891 5892 for particle in self.model.get('particles'): 5893 #find masses 5894 one_mass = particle.get('mass') 5895 if one_mass.lower() != 'zero': 5896 masses.add(one_mass) 5897 5898 # find width 5899 one_width = particle.get('width') 5900 if one_width.lower() != 'zero': 5901 widths.add(one_width) 5902 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5903 complex_mass.add('CMASS_%s' % one_mass) 5904 5905 if masses: 5906 fsock.writelines('double precision '+','.join(masses)+'\n') 5907 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5908 if self.opt['mp']: 5909 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5910 ','.join(masses)+'\n') 5911 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5912 ','.join(masses)+'\n\n') 5913 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5914 self.mp_prefix+m for m in masses])+'\n') 5915 mp_fsock.writelines('common/MP_masses/ '+\ 5916 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5917 5918 if widths: 5919 fsock.writelines('double precision '+','.join(widths)+'\n') 5920 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5921 if self.opt['mp']: 5922 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5923 ','.join(widths)+'\n') 5924 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5925 ','.join(widths)+'\n\n') 5926 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5927 self.mp_prefix+w for w in widths])+'\n') 5928 mp_fsock.writelines('common/MP_widths/ '+\ 5929 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5930 5931 # Write the Couplings 5932 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5933 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5934 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5935 if self.opt['mp']: 5936 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5937 ','.join(coupling_list)+'\n') 5938 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5939 ','.join(coupling_list)+'\n\n') 5940 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5941 self.mp_prefix+c for c in coupling_list])+'\n') 5942 mp_fsock.writelines('common/MP_couplings/ '+\ 5943 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5944 5945 # Write complex mass for complex mass scheme (if activated) 5946 if self.opt['complex_mass'] and complex_mass: 5947 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5948 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5949 if self.opt['mp']: 5950 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5951 ','.join(complex_mass)+'\n') 5952 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5953 ','.join(complex_mass)+'\n\n') 5954 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5955 self.mp_prefix+cm for cm in complex_mass])+'\n') 5956 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5957 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5958
5959 - def create_write_couplings(self):
5960 """ write the file coupl_write.inc """ 5961 5962 fsock = self.open('coupl_write.inc', format='fortran') 5963 5964 fsock.writelines("""write(*,*) ' Couplings of %s' 5965 write(*,*) ' ---------------------------------' 5966 write(*,*) ' '""" % self.model_name) 5967 def format(coupl): 5968 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5969 5970 # Write the Couplings 5971 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5972 fsock.writelines('\n'.join(lines)) 5973 5974
5975 - def create_input(self):
5976 """create input.inc containing the definition of the parameters""" 5977 5978 fsock = self.open('input.inc', format='fortran') 5979 if self.opt['mp']: 5980 mp_fsock = self.open('mp_input.inc', format='fortran') 5981 5982 #find mass/ width since they are already define 5983 already_def = set() 5984 for particle in self.model.get('particles'): 5985 already_def.add(particle.get('mass').lower()) 5986 already_def.add(particle.get('width').lower()) 5987 if self.opt['complex_mass']: 5988 already_def.add('cmass_%s' % particle.get('mass').lower()) 5989 5990 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5991 name.lower() not in already_def 5992 5993 real_parameters = [param.name for param in self.params_dep + 5994 self.params_indep if param.type == 'real' 5995 and is_valid(param.name)] 5996 5997 real_parameters += [param.name for param in self.params_ext 5998 if param.type == 'real'and 5999 is_valid(param.name)] 6000 6001 # check the parameter is a CT parameter or not 6002 # if yes, just use the needed ones 6003 real_parameters = [param for param in real_parameters \ 6004 if self.check_needed_param(param)] 6005 6006 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6007 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6008 if self.opt['mp']: 6009 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6010 self.mp_prefix+p for p in real_parameters])+'\n') 6011 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6012 self.mp_prefix+p for p in real_parameters])+'\n\n') 6013 6014 complex_parameters = [param.name for param in self.params_dep + 6015 self.params_indep if param.type == 'complex' and 6016 is_valid(param.name)] 6017 6018 # check the parameter is a CT parameter or not 6019 # if yes, just use the needed ones 6020 complex_parameters = [param for param in complex_parameters \ 6021 if self.check_needed_param(param)] 6022 6023 if complex_parameters: 6024 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6025 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6026 if self.opt['mp']: 6027 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6028 self.mp_prefix+p for p in complex_parameters])+'\n') 6029 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6030 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6031
6032 - def check_needed_param(self, param):
6033 """ Returns whether the parameter in argument is needed for this 6034 specific computation or not.""" 6035 6036 # If this is a leading order model or if there was no CT parameter 6037 # employed in this NLO model, one can directly return that the 6038 # parameter is needed since only CTParameters are filtered. 6039 if not hasattr(self, 'allCTparameters') or \ 6040 self.allCTparameters is None or self.usedCTparameters is None or \ 6041 len(self.allCTparameters)==0: 6042 return True 6043 6044 # We must allow the conjugate shorthand for the complex parameter as 6045 # well so we check wether either the parameter name or its name with 6046 # 'conjg__' substituted with '' is present in the list. 6047 # This is acceptable even if some parameter had an original name 6048 # including 'conjg__' in it, because at worst we export a parameter 6049 # was not needed. 6050 param = param.lower() 6051 cjg_param = param.replace('conjg__','',1) 6052 6053 # First make sure it is a CTparameter 6054 if param not in self.allCTparameters and \ 6055 cjg_param not in self.allCTparameters: 6056 return True 6057 6058 # Now check if it is in the list of CTparameters actually used 6059 return (param in self.usedCTparameters or \ 6060 cjg_param in self.usedCTparameters)
6061
6062 - def extract_needed_CTparam(self,wanted_couplings=[]):
6063 """ Extract what are the needed CT parameters given the wanted_couplings""" 6064 6065 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6066 # Setting these lists to none wil disable the filtering in 6067 # check_needed_param 6068 self.allCTparameters = None 6069 self.usedCTparameters = None 6070 return 6071 6072 # All CTparameters appearin in all CT couplings 6073 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6074 # Define in this class the list of all CT parameters 6075 self.allCTparameters=list(\ 6076 set(itertools.chain.from_iterable(allCTparameters))) 6077 6078 # All used CT couplings 6079 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6080 allUsedCTCouplings = [coupl for coupl in 6081 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6082 6083 # Now define the list of all CT parameters that are actually used 6084 self.usedCTparameters=list(\ 6085 set(itertools.chain.from_iterable([ 6086 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6087 ]))) 6088 6089 # Now at last, make these list case insensitive 6090 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6091 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6092
6093 - def create_intparam_def(self, dp=True, mp=False):
6094 """ create intparam_definition.inc setting the internal parameters. 6095 Output the double precision and/or the multiple precision parameters 6096 depending on the parameters dp and mp. If mp only, then the file names 6097 get the 'mp_' prefix. 6098 """ 6099 6100 fsock = self.open('%sintparam_definition.inc'% 6101 ('mp_' if mp and not dp else ''), format='fortran') 6102 6103 fsock.write_comments(\ 6104 "Parameters that should not be recomputed event by event.\n") 6105 fsock.writelines("if(readlha) then\n") 6106 if dp: 6107 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6108 if mp: 6109 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6110 6111 for param in self.params_indep: 6112 if param.name == 'ZERO': 6113 continue 6114 # check whether the parameter is a CT parameter 6115 # if yes,just used the needed ones 6116 if not self.check_needed_param(param.name): 6117 continue 6118 if dp: 6119 fsock.writelines("%s = %s\n" % (param.name, 6120 self.p_to_f.parse(param.expr))) 6121 if mp: 6122 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6123 self.mp_p_to_f.parse(param.expr))) 6124 6125 fsock.writelines('endif') 6126 6127 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6128 if dp: 6129 fsock.writelines("aS = G**2/4/pi\n") 6130 if mp: 6131 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6132 for param in self.params_dep: 6133 # check whether the parameter is a CT parameter 6134 # if yes,just used the needed ones 6135 if not self.check_needed_param(param.name): 6136 continue 6137 if dp: 6138 fsock.writelines("%s = %s\n" % (param.name, 6139 self.p_to_f.parse(param.expr))) 6140 elif mp: 6141 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6142 self.mp_p_to_f.parse(param.expr))) 6143 6144 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6145 if ('aEWM1',) in self.model['parameters']: 6146 if dp: 6147 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6148 gal(2) = 1d0 6149 """) 6150 elif mp: 6151 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6152 %(mp_prefix)sgal(2) = 1d0 6153 """ %{'mp_prefix':self.mp_prefix}) 6154 pass 6155 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6156 elif ('Gf',) in self.model['parameters']: 6157 if dp: 6158 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6159 gal(2) = 1d0 6160 """) 6161 elif mp: 6162 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6163 %(mp_prefix)sgal(2) = 1d0 6164 """ %{'mp_prefix':self.mp_prefix}) 6165 pass 6166 else: 6167 if dp: 6168 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6169 fsock.writelines(""" gal(1) = 1d0 6170 gal(2) = 1d0 6171 """) 6172 elif mp: 6173 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6174 %(mp_prefix)sgal(2) = 1e0_16 6175 """%{'mp_prefix':self.mp_prefix})
6176 6177
6178 - def create_couplings(self):
6179 """ create couplings.f and all couplingsX.f """ 6180 6181 nb_def_by_file = 25 6182 6183 self.create_couplings_main(nb_def_by_file) 6184 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6185 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6186 6187 for i in range(nb_coup_indep): 6188 # For the independent couplings, we compute the double and multiple 6189 # precision ones together 6190 data = self.coups_indep[nb_def_by_file * i: 6191 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6192 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6193 6194 for i in range(nb_coup_dep): 6195 # For the dependent couplings, we compute the double and multiple 6196 # precision ones in separate subroutines. 6197 data = self.coups_dep[nb_def_by_file * i: 6198 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6199 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6200 dp=True,mp=False) 6201 if self.opt['mp']: 6202 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6203 dp=False,mp=True)
6204 6205
6206 - def create_couplings_main(self, nb_def_by_file=25):
6207 """ create couplings.f """ 6208 6209 fsock = self.open('couplings.f', format='fortran') 6210 6211 fsock.writelines("""subroutine coup() 6212 6213 implicit none 6214 double precision PI, ZERO 6215 logical READLHA 6216 parameter (PI=3.141592653589793d0) 6217 parameter (ZERO=0d0) 6218 include \'model_functions.inc\'""") 6219 if self.opt['mp']: 6220 fsock.writelines("""%s MP__PI, MP__ZERO 6221 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6222 parameter (MP__ZERO=0e0_16) 6223 include \'mp_input.inc\' 6224 include \'mp_coupl.inc\' 6225 """%self.mp_real_format) 6226 fsock.writelines("""include \'input.inc\' 6227 include \'coupl.inc\' 6228 READLHA = .true. 6229 include \'intparam_definition.inc\'""") 6230 if self.opt['mp']: 6231 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6232 6233 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6234 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6235 6236 fsock.writelines('\n'.join(\ 6237 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6238 6239 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6240 6241 fsock.writelines('\n'.join(\ 6242 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6243 for i in range(nb_coup_dep)])) 6244 if self.opt['mp']: 6245 fsock.writelines('\n'.join(\ 6246 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6247 for i in range(nb_coup_dep)])) 6248 fsock.writelines('''\n return \n end\n''') 6249 6250 fsock.writelines("""subroutine update_as_param() 6251 6252 implicit none 6253 double precision PI, ZERO 6254 logical READLHA 6255 parameter (PI=3.141592653589793d0) 6256 parameter (ZERO=0d0) 6257 include \'model_functions.inc\'""") 6258 fsock.writelines("""include \'input.inc\' 6259 include \'coupl.inc\' 6260 READLHA = .false.""") 6261 fsock.writelines(""" 6262 include \'intparam_definition.inc\'\n 6263 """) 6264 6265 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6266 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6267 6268 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6269 6270 fsock.writelines('\n'.join(\ 6271 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6272 for i in range(nb_coup_dep)])) 6273 fsock.writelines('''\n return \n end\n''') 6274 6275 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6276 6277 implicit none 6278 double precision PI 6279 parameter (PI=3.141592653589793d0) 6280 double precision mu_r2, as2 6281 include \'model_functions.inc\'""") 6282 fsock.writelines("""include \'input.inc\' 6283 include \'coupl.inc\'""") 6284 fsock.writelines(""" 6285 if (mu_r2.gt.0d0) MU_R = mu_r2 6286 G = SQRT(4.0d0*PI*AS2) 6287 AS = as2 6288 6289 CALL UPDATE_AS_PARAM() 6290 """) 6291 fsock.writelines('''\n return \n end\n''') 6292 6293 if self.opt['mp']: 6294 fsock.writelines("""subroutine mp_update_as_param() 6295 6296 implicit none 6297 logical READLHA 6298 include \'model_functions.inc\'""") 6299 fsock.writelines("""%s MP__PI, MP__ZERO 6300 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6301 parameter (MP__ZERO=0e0_16) 6302 include \'mp_input.inc\' 6303 include \'mp_coupl.inc\' 6304 """%self.mp_real_format) 6305 fsock.writelines("""include \'input.inc\' 6306 include \'coupl.inc\' 6307 include \'actualize_mp_ext_params.inc\' 6308 READLHA = .false. 6309 include \'mp_intparam_definition.inc\'\n 6310 """) 6311 6312 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6313 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6314 6315 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6316 6317 fsock.writelines('\n'.join(\ 6318 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6319 for i in range(nb_coup_dep)])) 6320 fsock.writelines('''\n return \n end\n''')
6321
6322 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6323 """ create couplings[nb_file].f containing information coming from data. 6324 Outputs the computation of the double precision and/or the multiple 6325 precision couplings depending on the parameters dp and mp. 6326 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6327 filename and subroutine name. 6328 """ 6329 6330 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6331 nb_file), format='fortran') 6332 fsock.writelines("""subroutine %scoup%s() 6333 6334 implicit none 6335 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6336 if dp: 6337 fsock.writelines(""" 6338 double precision PI, ZERO 6339 parameter (PI=3.141592653589793d0) 6340 parameter (ZERO=0d0) 6341 include 'input.inc' 6342 include 'coupl.inc'""") 6343 if mp: 6344 fsock.writelines("""%s MP__PI, MP__ZERO 6345 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6346 parameter (MP__ZERO=0e0_16) 6347 include \'mp_input.inc\' 6348 include \'mp_coupl.inc\' 6349 """%self.mp_real_format) 6350 6351 for coupling in data: 6352 if dp: 6353 fsock.writelines('%s = %s' % (coupling.name, 6354 self.p_to_f.parse(coupling.expr))) 6355 if mp: 6356 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6357 self.mp_p_to_f.parse(coupling.expr))) 6358 fsock.writelines('end')
6359
6360 - def create_model_functions_inc(self):
6361 """ Create model_functions.inc which contains the various declarations 6362 of auxiliary functions which might be used in the couplings expressions 6363 """ 6364 6365 additional_fct = [] 6366 # check for functions define in the UFO model 6367 ufo_fct = self.model.get('functions') 6368 if ufo_fct: 6369 for fct in ufo_fct: 6370 # already handle by default 6371 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6372 "csc", "asec", "acsc", "theta_function", "cond", 6373 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6374 "grreglog","regsqrt"]: 6375 additional_fct.append(fct.name) 6376 6377 fsock = self.open('model_functions.inc', format='fortran') 6378 fsock.writelines("""double complex cond 6379 double complex condif 6380 double complex reglog 6381 double complex reglogp 6382 double complex reglogm 6383 double complex recms 6384 double complex arg 6385 double complex grreglog 6386 double complex regsqrt 6387 %s 6388 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6389 6390 6391 if self.opt['mp']: 6392 fsock.writelines("""%(complex_mp_format)s mp_cond 6393 %(complex_mp_format)s mp_condif 6394 %(complex_mp_format)s mp_reglog 6395 %(complex_mp_format)s mp_reglogp 6396 %(complex_mp_format)s mp_reglogm 6397 %(complex_mp_format)s mp_recms 6398 %(complex_mp_format)s mp_arg 6399 %(complex_mp_format)s mp_grreglog 6400 %(complex_mp_format)s mp_regsqrt 6401 %(additional)s 6402 """ %\ 6403 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6404 'complex_mp_format':self.mp_complex_format 6405 })
6406
6407 - def create_model_functions_def(self):
6408 """ Create model_functions.f which contains the various definitions 6409 of auxiliary functions which might be used in the couplings expressions 6410 Add the functions.f functions for formfactors support 6411 """ 6412 6413 fsock = self.open('model_functions.f', format='fortran') 6414 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6415 implicit none 6416 double complex condition,truecase,falsecase 6417 if(condition.eq.(0.0d0,0.0d0)) then 6418 cond=truecase 6419 else 6420 cond=falsecase 6421 endif 6422 end 6423 6424 double complex function condif(condition,truecase,falsecase) 6425 implicit none 6426 logical condition 6427 double complex truecase,falsecase 6428 if(condition) then 6429 condif=truecase 6430 else 6431 condif=falsecase 6432 endif 6433 end 6434 6435 double complex function recms(condition,expr) 6436 implicit none 6437 logical condition 6438 double complex expr 6439 if(condition)then 6440 recms=expr 6441 else 6442 recms=dcmplx(dble(expr)) 6443 endif 6444 end 6445 6446 double complex function reglog(arg) 6447 implicit none 6448 double complex TWOPII 6449 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6450 double complex arg 6451 if(arg.eq.(0.0d0,0.0d0)) then 6452 reglog=(0.0d0,0.0d0) 6453 else 6454 reglog=log(arg) 6455 endif 6456 end 6457 6458 double complex function reglogp(arg) 6459 implicit none 6460 double complex TWOPII 6461 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6462 double complex arg 6463 if(arg.eq.(0.0d0,0.0d0))then 6464 reglogp=(0.0d0,0.0d0) 6465 else 6466 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6467 reglogp=log(arg) + TWOPII 6468 else 6469 reglogp=log(arg) 6470 endif 6471 endif 6472 end 6473 6474 double complex function reglogm(arg) 6475 implicit none 6476 double complex TWOPII 6477 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6478 double complex arg 6479 if(arg.eq.(0.0d0,0.0d0))then 6480 reglogm=(0.0d0,0.0d0) 6481 else 6482 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6483 reglogm=log(arg) - TWOPII 6484 else 6485 reglogm=log(arg) 6486 endif 6487 endif 6488 end 6489 6490 double complex function regsqrt(arg_in) 6491 implicit none 6492 double complex arg_in 6493 double complex arg 6494 arg=arg_in 6495 if(dabs(dimag(arg)).eq.0.0d0)then 6496 arg=dcmplx(dble(arg),0.0d0) 6497 endif 6498 if(dabs(dble(arg)).eq.0.0d0)then 6499 arg=dcmplx(0.0d0,dimag(arg)) 6500 endif 6501 regsqrt=sqrt(arg) 6502 end 6503 6504 double complex function grreglog(logsw,expr1_in,expr2_in) 6505 implicit none 6506 double complex TWOPII 6507 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6508 double complex expr1_in,expr2_in 6509 double complex expr1,expr2 6510 double precision logsw 6511 double precision imagexpr 6512 logical firstsheet 6513 expr1=expr1_in 6514 expr2=expr2_in 6515 if(dabs(dimag(expr1)).eq.0.0d0)then 6516 expr1=dcmplx(dble(expr1),0.0d0) 6517 endif 6518 if(dabs(dble(expr1)).eq.0.0d0)then 6519 expr1=dcmplx(0.0d0,dimag(expr1)) 6520 endif 6521 if(dabs(dimag(expr2)).eq.0.0d0)then 6522 expr2=dcmplx(dble(expr2),0.0d0) 6523 endif 6524 if(dabs(dble(expr2)).eq.0.0d0)then 6525 expr2=dcmplx(0.0d0,dimag(expr2)) 6526 endif 6527 if(expr1.eq.(0.0d0,0.0d0))then 6528 grreglog=(0.0d0,0.0d0) 6529 else 6530 imagexpr=dimag(expr1)*dimag(expr2) 6531 firstsheet=imagexpr.ge.0.0d0 6532 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6533 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6534 if(firstsheet)then 6535 grreglog=log(expr1) 6536 else 6537 if(dimag(expr1).gt.0.0d0)then 6538 grreglog=log(expr1) - logsw*TWOPII 6539 else 6540 grreglog=log(expr1) + logsw*TWOPII 6541 endif 6542 endif 6543 endif 6544 end 6545 6546 double complex function arg(comnum) 6547 implicit none 6548 double complex comnum 6549 double complex iim 6550 iim = (0.0d0,1.0d0) 6551 if(comnum.eq.(0.0d0,0.0d0)) then 6552 arg=(0.0d0,0.0d0) 6553 else 6554 arg=log(comnum/abs(comnum))/iim 6555 endif 6556 end""") 6557 if self.opt['mp']: 6558 fsock.writelines(""" 6559 6560 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6561 implicit none 6562 %(complex_mp_format)s condition,truecase,falsecase 6563 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6564 mp_cond=truecase 6565 else 6566 mp_cond=falsecase 6567 endif 6568 end 6569 6570 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6571 implicit none 6572 logical condition 6573 %(complex_mp_format)s truecase,falsecase 6574 if(condition) then 6575 mp_condif=truecase 6576 else 6577 mp_condif=falsecase 6578 endif 6579 end 6580 6581 %(complex_mp_format)s function mp_recms(condition,expr) 6582 implicit none 6583 logical condition 6584 %(complex_mp_format)s expr 6585 if(condition)then 6586 mp_recms=expr 6587 else 6588 mp_recms=cmplx(real(expr),kind=16) 6589 endif 6590 end 6591 6592 %(complex_mp_format)s function mp_reglog(arg) 6593 implicit none 6594 %(complex_mp_format)s TWOPII 6595 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6596 %(complex_mp_format)s arg 6597 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6598 mp_reglog=(0.0e0_16,0.0e0_16) 6599 else 6600 mp_reglog=log(arg) 6601 endif 6602 end 6603 6604 %(complex_mp_format)s function mp_reglogp(arg) 6605 implicit none 6606 %(complex_mp_format)s TWOPII 6607 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6608 %(complex_mp_format)s arg 6609 if(arg.eq.(0.0e0_16,0.0e0_16))then 6610 mp_reglogp=(0.0e0_16,0.0e0_16) 6611 else 6612 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6613 mp_reglogp=log(arg) + TWOPII 6614 else 6615 mp_reglogp=log(arg) 6616 endif 6617 endif 6618 end 6619 6620 %(complex_mp_format)s function mp_reglogm(arg) 6621 implicit none 6622 %(complex_mp_format)s TWOPII 6623 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6624 %(complex_mp_format)s arg 6625 if(arg.eq.(0.0e0_16,0.0e0_16))then 6626 mp_reglogm=(0.0e0_16,0.0e0_16) 6627 else 6628 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6629 mp_reglogm=log(arg) - TWOPII 6630 else 6631 mp_reglogm=log(arg) 6632 endif 6633 endif 6634 end 6635 6636 %(complex_mp_format)s function mp_regsqrt(arg_in) 6637 implicit none 6638 %(complex_mp_format)s arg_in 6639 %(complex_mp_format)s arg 6640 arg=arg_in 6641 if(abs(imagpart(arg)).eq.0.0e0_16)then 6642 arg=cmplx(real(arg,kind=16),0.0e0_16) 6643 endif 6644 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6645 arg=cmplx(0.0e0_16,imagpart(arg)) 6646 endif 6647 mp_regsqrt=sqrt(arg) 6648 end 6649 6650 6651 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6652 implicit none 6653 %(complex_mp_format)s TWOPII 6654 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6655 %(complex_mp_format)s expr1_in,expr2_in 6656 %(complex_mp_format)s expr1,expr2 6657 %(real_mp_format)s logsw 6658 %(real_mp_format)s imagexpr 6659 logical firstsheet 6660 expr1=expr1_in 6661 expr2=expr2_in 6662 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6663 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6664 endif 6665 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6666 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6667 endif 6668 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6669 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6670 endif 6671 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6672 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6673 endif 6674 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6675 mp_grreglog=(0.0e0_16,0.0e0_16) 6676 else 6677 imagexpr=imagpart(expr1)*imagpart(expr2) 6678 firstsheet=imagexpr.ge.0.0e0_16 6679 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6680 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6681 if(firstsheet)then 6682 mp_grreglog=log(expr1) 6683 else 6684 if(imagpart(expr1).gt.0.0e0_16)then 6685 mp_grreglog=log(expr1) - logsw*TWOPII 6686 else 6687 mp_grreglog=log(expr1) + logsw*TWOPII 6688 endif 6689 endif 6690 endif 6691 end 6692 6693 %(complex_mp_format)s function mp_arg(comnum) 6694 implicit none 6695 %(complex_mp_format)s comnum 6696 %(complex_mp_format)s imm 6697 imm = (0.0e0_16,1.0e0_16) 6698 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6699 mp_arg=(0.0e0_16,0.0e0_16) 6700 else 6701 mp_arg=log(comnum/abs(comnum))/imm 6702 endif 6703 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6704 6705 6706 #check for the file functions.f 6707 model_path = self.model.get('modelpath') 6708 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6709 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6710 input = pjoin(model_path,'Fortran','functions.f') 6711 file.writelines(fsock, open(input).read()) 6712 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6713 6714 # check for functions define in the UFO model 6715 ufo_fct = self.model.get('functions') 6716 if ufo_fct: 6717 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6718 done = [] 6719 for fct in ufo_fct: 6720 # already handle by default 6721 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6722 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6723 "grreglog","regsqrt"] + done: 6724 done.append(str(fct.name.lower())) 6725 ufo_fct_template = """ 6726 double complex function %(name)s(%(args)s) 6727 implicit none 6728 double complex %(args)s 6729 %(definitions)s 6730 %(name)s = %(fct)s 6731 6732 return 6733 end 6734 """ 6735 str_fct = self.p_to_f.parse(fct.expr) 6736 if not self.p_to_f.to_define: 6737 definitions = [] 6738 else: 6739 definitions=[] 6740 for d in self.p_to_f.to_define: 6741 if d == 'pi': 6742 definitions.append(' double precision pi') 6743 definitions.append(' data pi /3.1415926535897932d0/') 6744 else: 6745 definitions.append(' double complex %s' % d) 6746 6747 text = ufo_fct_template % { 6748 'name': fct.name, 6749 'args': ", ".join(fct.arguments), 6750 'fct': str_fct, 6751 'definitions': '\n'.join(definitions) 6752 } 6753 6754 fsock.writelines(text) 6755 if self.opt['mp']: 6756 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6757 for fct in ufo_fct: 6758 # already handle by default 6759 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6760 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6761 "grreglog","regsqrt"]: 6762 ufo_fct_template = """ 6763 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6764 implicit none 6765 %(complex_mp_format)s mp__%(args)s 6766 %(definitions)s 6767 mp_%(name)s = %(fct)s 6768 6769 return 6770 end 6771 """ 6772 str_fct = self.mp_p_to_f.parse(fct.expr) 6773 if not self.mp_p_to_f.to_define: 6774 definitions = [] 6775 else: 6776 definitions=[] 6777 for d in self.mp_p_to_f.to_define: 6778 if d == 'pi': 6779 definitions.append(' %s mp__pi' % self.mp_real_format) 6780 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6781 else: 6782 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6783 text = ufo_fct_template % { 6784 'name': fct.name, 6785 'args': ", mp__".join(fct.arguments), 6786 'fct': str_fct, 6787 'definitions': '\n'.join(definitions), 6788 'complex_mp_format': self.mp_complex_format 6789 } 6790 fsock.writelines(text) 6791 6792 6793 6794 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6795 6796 6797
6798 - def create_makeinc(self):
6799 """create makeinc.inc containing the file to compile """ 6800 6801 fsock = self.open('makeinc.inc', comment='#') 6802 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6803 text += ' model_functions.o ' 6804 6805 nb_coup_indep = 1 + len(self.coups_dep) // 25 6806 nb_coup_dep = 1 + len(self.coups_indep) // 25 6807 couplings_files=['couplings%s.o' % (i+1) \ 6808 for i in range(nb_coup_dep + nb_coup_indep) ] 6809 if self.opt['mp']: 6810 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6811 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6812 text += ' '.join(couplings_files) 6813 fsock.writelines(text)
6814
6815 - def create_param_write(self):
6816 """ create param_write """ 6817 6818 fsock = self.open('param_write.inc', format='fortran') 6819 6820 fsock.writelines("""write(*,*) ' External Params' 6821 write(*,*) ' ---------------------------------' 6822 write(*,*) ' '""") 6823 def format(name): 6824 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6825 6826 # Write the external parameter 6827 lines = [format(param.name) for param in self.params_ext] 6828 fsock.writelines('\n'.join(lines)) 6829 6830 fsock.writelines("""write(*,*) ' Internal Params' 6831 write(*,*) ' ---------------------------------' 6832 write(*,*) ' '""") 6833 lines = [format(data.name) for data in self.params_indep 6834 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6835 fsock.writelines('\n'.join(lines)) 6836 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6837 write(*,*) ' ----------------------------------------' 6838 write(*,*) ' '""") 6839 lines = [format(data.name) for data in self.params_dep \ 6840 if self.check_needed_param(data.name)] 6841 6842 fsock.writelines('\n'.join(lines)) 6843 6844 6845
6846 - def create_ident_card(self):
6847 """ create the ident_card.dat """ 6848 6849 def format(parameter): 6850 """return the line for the ident_card corresponding to this parameter""" 6851 colum = [parameter.lhablock.lower()] + \ 6852 [str(value) for value in parameter.lhacode] + \ 6853 [parameter.name] 6854 if not parameter.name: 6855 return '' 6856 return ' '.join(colum)+'\n'
6857 6858 fsock = self.open('ident_card.dat') 6859 6860 external_param = [format(param) for param in self.params_ext] 6861 fsock.writelines('\n'.join(external_param)) 6862
6863 - def create_actualize_mp_ext_param_inc(self):
6864 """ create the actualize_mp_ext_params.inc code """ 6865 6866 # In principle one should actualize all external, but for now, it is 6867 # hardcoded that only AS and MU_R can by dynamically changed by the user 6868 # so that we only update those ones. 6869 # Of course, to be on the safe side, one could decide to update all 6870 # external parameters. 6871 update_params_list=[p for p in self.params_ext if p.name in 6872 self.PS_dependent_key] 6873 6874 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6875 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6876 for param in update_params_list] 6877 # When read_lha is false, it is G which is taken in input and not AS, so 6878 # this is what should be reset here too. 6879 if 'aS' in [param.name for param in update_params_list]: 6880 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6881 6882 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6883 fsock.writelines('\n'.join(res_strings))
6884
6885 - def create_param_read(self):
6886 """create param_read""" 6887 6888 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6889 or self.opt['loop_induced']: 6890 fsock = self.open('param_read.inc', format='fortran') 6891 fsock.writelines(' include \'../param_card.inc\'') 6892 return 6893 6894 def format_line(parameter): 6895 """return the line for the ident_card corresponding to this 6896 parameter""" 6897 template = \ 6898 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6899 % {'name': parameter.name, 6900 'value': self.p_to_f.parse(str(parameter.value.real))} 6901 if self.opt['mp']: 6902 template = template+ \ 6903 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6904 "%(mp_prefix)s%(name)s,%(value)s)") \ 6905 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6906 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6907 return template 6908 6909 fsock = self.open('param_read.inc', format='fortran') 6910 res_strings = [format_line(param) \ 6911 for param in self.params_ext] 6912 6913 # Correct width sign for Majorana particles (where the width 6914 # and mass need to have the same sign) 6915 for particle in self.model.get('particles'): 6916 if particle.is_fermion() and particle.get('self_antipart') and \ 6917 particle.get('width').lower() != 'zero': 6918 6919 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6920 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6921 if self.opt['mp']: 6922 res_strings.append(\ 6923 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6924 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6925 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6926 6927 fsock.writelines('\n'.join(res_strings)) 6928 6929 6930 @staticmethod
6931 - def create_param_card_static(model, output_path, rule_card_path=False, 6932 mssm_convert=True):
6933 """ create the param_card.dat for a givent model --static method-- """ 6934 #1. Check if a default param_card is present: 6935 done = False 6936 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6937 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6938 model_path = model.get('modelpath') 6939 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6940 done = True 6941 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6942 output_path) 6943 if not done: 6944 param_writer.ParamCardWriter(model, output_path) 6945 6946 if rule_card_path: 6947 if hasattr(model, 'rule_card'): 6948 model.rule_card.write_file(rule_card_path) 6949 6950 if mssm_convert: 6951 model_name = model.get('name') 6952 # IF MSSM convert the card to SLAH1 6953 if model_name == 'mssm' or model_name.startswith('mssm-'): 6954 import models.check_param_card as translator 6955 # Check the format of the param_card for Pythia and make it correct 6956 if rule_card_path: 6957 translator.make_valid_param_card(output_path, rule_card_path) 6958 translator.convert_to_slha1(output_path)
6959
6960 - def create_param_card(self):
6961 """ create the param_card.dat """ 6962 6963 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6964 if not hasattr(self.model, 'rule_card'): 6965 rule_card=False 6966 self.create_param_card_static(self.model, 6967 output_path=pjoin(self.dir_path, 'param_card.dat'), 6968 rule_card_path=rule_card, 6969 mssm_convert=True)
6970
6971 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6972 """ Determine which Export_v4 class is required. cmd is the command 6973 interface containing all potential usefull information. 6974 The output_type argument specifies from which context the output 6975 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6976 and 'default' for tree-level outputs.""" 6977 6978 opt = dict(cmd.options) 6979 opt['output_options'] = cmd_options 6980 6981 # ========================================================================== 6982 # First check whether Ninja must be installed. 6983 # Ninja would only be required if: 6984 # a) Loop optimized output is selected 6985 # b) the process gathered from the amplitude generated use loops 6986 6987 if len(cmd._curr_amps)>0: 6988 try: 6989 curr_proc = cmd._curr_amps[0].get('process') 6990 except base_objects.PhysicsObject.PhysicsObjectError: 6991 curr_proc = None 6992 elif hasattr(cmd,'_fks_multi_proc') and \ 6993 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6994 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6995 else: 6996 curr_proc = None 6997 6998 requires_reduction_tool = opt['loop_optimized_output'] and \ 6999 (not curr_proc is None) and \ 7000 (curr_proc.get('perturbation_couplings') != [] and \ 7001 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7002 7003 # An installation is required then, but only if the specified path is the 7004 # default local one and that the Ninja library appears missing. 7005 if requires_reduction_tool: 7006 cmd.install_reduction_library() 7007 7008 # ========================================================================== 7009 # First treat the MadLoop5 standalone case 7010 MadLoop_SA_options = {'clean': not noclean, 7011 'complex_mass':cmd.options['complex_mass_scheme'], 7012 'export_format':'madloop', 7013 'mp':True, 7014 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7015 'cuttools_dir': cmd._cuttools_dir, 7016 'iregi_dir':cmd._iregi_dir, 7017 'golem_dir':cmd.options['golem'], 7018 'samurai_dir':cmd.options['samurai'], 7019 'ninja_dir':cmd.options['ninja'], 7020 'collier_dir':cmd.options['collier'], 7021 'fortran_compiler':cmd.options['fortran_compiler'], 7022 'f2py_compiler':cmd.options['f2py_compiler'], 7023 'output_dependencies':cmd.options['output_dependencies'], 7024 'SubProc_prefix':'P', 7025 'compute_color_flows':cmd.options['loop_color_flows'], 7026 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7027 'cluster_local_path': cmd.options['cluster_local_path'], 7028 'output_options': cmd_options 7029 } 7030 7031 if output_type.startswith('madloop'): 7032 import madgraph.loop.loop_exporters as loop_exporters 7033 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7034 ExporterClass=None 7035 if not cmd.options['loop_optimized_output']: 7036 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7037 else: 7038 if output_type == "madloop": 7039 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7040 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7041 elif output_type == "madloop_matchbox": 7042 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7043 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7044 else: 7045 raise Exception("output_type not recognize %s" % output_type) 7046 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7047 else: 7048 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7049 ' in %s'%str(cmd._mgme_dir)) 7050 7051 # Then treat the aMC@NLO output 7052 elif output_type=='amcatnlo': 7053 import madgraph.iolibs.export_fks as export_fks 7054 ExporterClass=None 7055 amcatnlo_options = dict(opt) 7056 amcatnlo_options.update(MadLoop_SA_options) 7057 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7058 if not cmd.options['loop_optimized_output']: 7059 logger.info("Writing out the aMC@NLO code") 7060 ExporterClass = export_fks.ProcessExporterFortranFKS 7061 amcatnlo_options['export_format']='FKS5_default' 7062 else: 7063 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7064 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7065 amcatnlo_options['export_format']='FKS5_optimized' 7066 return ExporterClass(cmd._export_dir, amcatnlo_options) 7067 7068 7069 # Then the default tree-level output 7070 elif output_type=='default': 7071 assert group_subprocesses in [True, False] 7072 7073 opt = dict(opt) 7074 opt.update({'clean': not noclean, 7075 'complex_mass': cmd.options['complex_mass_scheme'], 7076 'export_format':cmd._export_format, 7077 'mp': False, 7078 'sa_symmetry':False, 7079 'model': cmd._curr_model.get('name'), 7080 'v5_model': False if cmd._model_v4_path else True }) 7081 7082 format = cmd._export_format #shortcut 7083 7084 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7085 opt['sa_symmetry'] = True 7086 elif format == 'plugin': 7087 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7088 7089 loop_induced_opt = dict(opt) 7090 loop_induced_opt.update(MadLoop_SA_options) 7091 loop_induced_opt['export_format'] = 'madloop_optimized' 7092 loop_induced_opt['SubProc_prefix'] = 'PV' 7093 # For loop_induced output with MadEvent, we must have access to the 7094 # color flows. 7095 loop_induced_opt['compute_color_flows'] = True 7096 for key in opt: 7097 if key not in loop_induced_opt: 7098 loop_induced_opt[key] = opt[key] 7099 7100 # Madevent output supports MadAnalysis5 7101 if format in ['madevent']: 7102 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7103 7104 if format == 'matrix' or format.startswith('standalone'): 7105 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7106 7107 elif format in ['madevent'] and group_subprocesses: 7108 if isinstance(cmd._curr_amps[0], 7109 loop_diagram_generation.LoopAmplitude): 7110 import madgraph.loop.loop_exporters as loop_exporters 7111 return loop_exporters.LoopInducedExporterMEGroup( 7112 cmd._export_dir,loop_induced_opt) 7113 else: 7114 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7115 elif format in ['madevent']: 7116 if isinstance(cmd._curr_amps[0], 7117 loop_diagram_generation.LoopAmplitude): 7118 import madgraph.loop.loop_exporters as loop_exporters 7119 return loop_exporters.LoopInducedExporterMENoGroup( 7120 cmd._export_dir,loop_induced_opt) 7121 else: 7122 return ProcessExporterFortranME(cmd._export_dir,opt) 7123 elif format in ['matchbox']: 7124 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7125 elif cmd._export_format in ['madweight'] and group_subprocesses: 7126 7127 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7128 elif cmd._export_format in ['madweight']: 7129 return ProcessExporterFortranMW(cmd._export_dir, opt) 7130 elif format == 'plugin': 7131 if isinstance(cmd._curr_amps[0], 7132 loop_diagram_generation.LoopAmplitude): 7133 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7134 else: 7135 return cmd._export_plugin(cmd._export_dir, opt) 7136 7137 else: 7138 raise Exception('Wrong export_v4 format') 7139 else: 7140 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7141
7142 7143 7144 7145 #=============================================================================== 7146 # ProcessExporterFortranMWGroup 7147 #=============================================================================== 7148 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7149 """Class to take care of exporting a set of matrix elements to 7150 MadEvent subprocess group format.""" 7151 7152 matrix_file = "matrix_madweight_group_v4.inc" 7153 grouped_mode = 'madweight' 7154 #=========================================================================== 7155 # generate_subprocess_directory 7156 #===========================================================================
7157 - def generate_subprocess_directory(self, subproc_group, 7158 fortran_model, 7159 group_number):
7160 """Generate the Pn directory for a subprocess group in MadEvent, 7161 including the necessary matrix_N.f files, configs.inc and various 7162 other helper files.""" 7163 7164 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7165 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7166 7167 if not self.model: 7168 self.model = subproc_group.get('matrix_elements')[0].\ 7169 get('processes')[0].get('model') 7170 7171 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7172 7173 # Create the directory PN in the specified path 7174 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7175 subproc_group.get('name')) 7176 try: 7177 os.mkdir(pjoin(pathdir, subprocdir)) 7178 except os.error as error: 7179 logger.warning(error.strerror + " " + subprocdir) 7180 7181 7182 logger.info('Creating files in directory %s' % subprocdir) 7183 Ppath = pjoin(pathdir, subprocdir) 7184 7185 # Create the matrix.f files, auto_dsig.f files and all inc files 7186 # for all subprocesses in the group 7187 7188 maxamps = 0 7189 maxflows = 0 7190 tot_calls = 0 7191 7192 matrix_elements = subproc_group.get('matrix_elements') 7193 7194 for ime, matrix_element in \ 7195 enumerate(matrix_elements): 7196 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7197 calls, ncolor = \ 7198 self.write_matrix_element_v4(writers.FortranWriter(filename), 7199 matrix_element, 7200 fortran_model, 7201 str(ime+1), 7202 subproc_group.get('diagram_maps')[\ 7203 ime]) 7204 7205 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7206 self.write_auto_dsig_file(writers.FortranWriter(filename), 7207 matrix_element, 7208 str(ime+1)) 7209 7210 # Keep track of needed quantities 7211 tot_calls += int(calls) 7212 maxflows = max(maxflows, ncolor) 7213 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7214 7215 # Draw diagrams 7216 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7217 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7218 get('diagrams'), 7219 filename, 7220 model = \ 7221 matrix_element.get('processes')[0].\ 7222 get('model'), 7223 amplitude=True) 7224 logger.info("Generating Feynman diagrams for " + \ 7225 matrix_element.get('processes')[0].nice_string()) 7226 plot.draw() 7227 7228 # Extract number of external particles 7229 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7230 7231 # Generate a list of diagrams corresponding to each configuration 7232 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7233 # If a subprocess has no diagrams for this config, the number is 0 7234 7235 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7236 7237 filename = pjoin(Ppath, 'auto_dsig.f') 7238 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7239 subproc_group) 7240 7241 filename = pjoin(Ppath,'configs.inc') 7242 nconfigs, s_and_t_channels = self.write_configs_file(\ 7243 writers.FortranWriter(filename), 7244 subproc_group, 7245 subproc_diagrams_for_config) 7246 7247 filename = pjoin(Ppath, 'leshouche.inc') 7248 self.write_leshouche_file(writers.FortranWriter(filename), 7249 subproc_group) 7250 7251 filename = pjoin(Ppath, 'phasespace.inc') 7252 self.write_phasespace_file(writers.FortranWriter(filename), 7253 nconfigs) 7254 7255 7256 filename = pjoin(Ppath, 'maxamps.inc') 7257 self.write_maxamps_file(writers.FortranWriter(filename), 7258 maxamps, 7259 maxflows, 7260 max([len(me.get('processes')) for me in \ 7261 matrix_elements]), 7262 len(matrix_elements)) 7263 7264 filename = pjoin(Ppath, 'mirrorprocs.inc') 7265 self.write_mirrorprocs(writers.FortranWriter(filename), 7266 subproc_group) 7267 7268 filename = pjoin(Ppath, 'nexternal.inc') 7269 self.write_nexternal_file(writers.FortranWriter(filename), 7270 nexternal, ninitial) 7271 7272 filename = pjoin(Ppath, 'pmass.inc') 7273 self.write_pmass_file(writers.FortranWriter(filename), 7274 matrix_element) 7275 7276 filename = pjoin(Ppath, 'props.inc') 7277 self.write_props_file(writers.FortranWriter(filename), 7278 matrix_element, 7279 s_and_t_channels) 7280 7281 # filename = pjoin(Ppath, 'processes.dat') 7282 # files.write_to_file(filename, 7283 # self.write_processes_file, 7284 # subproc_group) 7285 7286 # Generate jpgs -> pass in make_html 7287 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7288 7289 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7290 7291 for file in linkfiles: 7292 ln('../%s' % file, cwd=Ppath) 7293 7294 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7295 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7296 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7297 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7298 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7299 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7300 if not tot_calls: 7301 tot_calls = 0 7302 return tot_calls
7303 7304 7305 #=========================================================================== 7306 # Helper functions 7307 #===========================================================================
7308 - def modify_grouping(self, matrix_element):
7309 """allow to modify the grouping (if grouping is in place) 7310 return two value: 7311 - True/False if the matrix_element was modified 7312 - the new(or old) matrix element""" 7313 7314 return True, matrix_element.split_lepton_grouping()
7315 7316 #=========================================================================== 7317 # write_super_auto_dsig_file 7318 #===========================================================================
7319 - def write_super_auto_dsig_file(self, writer, subproc_group):
7320 """Write the auto_dsig.f file selecting between the subprocesses 7321 in subprocess group mode""" 7322 7323 replace_dict = {} 7324 7325 # Extract version number and date from VERSION file 7326 info_lines = self.get_mg5_info_lines() 7327 replace_dict['info_lines'] = info_lines 7328 7329 matrix_elements = subproc_group.get('matrix_elements') 7330 7331 # Extract process info lines 7332 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7333 matrix_elements]) 7334 replace_dict['process_lines'] = process_lines 7335 7336 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7337 replace_dict['nexternal'] = nexternal 7338 7339 replace_dict['nsprocs'] = 2*len(matrix_elements) 7340 7341 # Generate dsig definition line 7342 dsig_def_line = "DOUBLE PRECISION " + \ 7343 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7344 range(len(matrix_elements))]) 7345 replace_dict["dsig_def_line"] = dsig_def_line 7346 7347 # Generate dsig process lines 7348 call_dsig_proc_lines = [] 7349 for iproc in range(len(matrix_elements)): 7350 call_dsig_proc_lines.append(\ 7351 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7352 {"num": iproc + 1, 7353 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7354 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7355 7356 if writer: 7357 file = open(os.path.join(_file_path, \ 7358 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7359 file = file % replace_dict 7360 # Write the file 7361 writer.writelines(file) 7362 else: 7363 return replace_dict
7364 7365 #=========================================================================== 7366 # write_mirrorprocs 7367 #===========================================================================
7368 - def write_mirrorprocs(self, writer, subproc_group):
7369 """Write the mirrorprocs.inc file determining which processes have 7370 IS mirror process in subprocess group mode.""" 7371 7372 lines = [] 7373 bool_dict = {True: '.true.', False: '.false.'} 7374 matrix_elements = subproc_group.get('matrix_elements') 7375 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7376 (len(matrix_elements), 7377 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7378 me in matrix_elements]))) 7379 # Write the file 7380 writer.writelines(lines)
7381 7382 #=========================================================================== 7383 # write_configs_file 7384 #===========================================================================
7385 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7386 """Write the configs.inc file with topology information for a 7387 subprocess group. Use the first subprocess with a diagram for each 7388 configuration.""" 7389 7390 matrix_elements = subproc_group.get('matrix_elements') 7391 model = matrix_elements[0].get('processes')[0].get('model') 7392 7393 diagrams = [] 7394 config_numbers = [] 7395 for iconfig, config in enumerate(diagrams_for_config): 7396 # Check if any diagrams correspond to this config 7397 if set(config) == set([0]): 7398 continue 7399 subproc_diags = [] 7400 for s,d in enumerate(config): 7401 if d: 7402 subproc_diags.append(matrix_elements[s].\ 7403 get('diagrams')[d-1]) 7404 else: 7405 subproc_diags.append(None) 7406 diagrams.append(subproc_diags) 7407 config_numbers.append(iconfig + 1) 7408 7409 # Extract number of external particles 7410 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7411 7412 return len(diagrams), \ 7413 self.write_configs_file_from_diagrams(writer, diagrams, 7414 config_numbers, 7415 nexternal, ninitial, 7416 matrix_elements[0],model)
7417 7418 #=========================================================================== 7419 # write_run_configs_file 7420 #===========================================================================
7421 - def write_run_config_file(self, writer):
7422 """Write the run_configs.inc file for MadEvent""" 7423 7424 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7425 text = open(path).read() % {'chanperjob':'2'} 7426 writer.write(text) 7427 return True
7428 7429 7430 #=========================================================================== 7431 # write_leshouche_file 7432 #===========================================================================
7433 - def write_leshouche_file(self, writer, subproc_group):
7434 """Write the leshouche.inc file for MG4""" 7435 7436 all_lines = [] 7437 7438 for iproc, matrix_element in \ 7439 enumerate(subproc_group.get('matrix_elements')): 7440 all_lines.extend(self.get_leshouche_lines(matrix_element, 7441 iproc)) 7442 7443 # Write the file 7444 writer.writelines(all_lines) 7445 7446 return True
7447