Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  """Methods and classes to export matrix elements to v4 format.""" 
  20   
  21  import copy 
  22  from six import StringIO 
  23  import itertools 
  24  import fractions 
  25  import glob 
  26  import logging 
  27  import math 
  28  import os 
  29  import io 
  30  import re 
  31  import shutil 
  32  import subprocess 
  33  import sys 
  34  import time 
  35  import traceback 
  36   
  37  import aloha 
  38   
  39  import madgraph.core.base_objects as base_objects 
  40  import madgraph.core.color_algebra as color 
  41  import madgraph.core.helas_objects as helas_objects 
  42  import madgraph.iolibs.drawing_eps as draw 
  43  import madgraph.iolibs.files as files 
  44  import madgraph.iolibs.group_subprocs as group_subprocs 
  45  import madgraph.iolibs.file_writers as writers 
  46  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  47  import madgraph.iolibs.template_files as template_files 
  48  import madgraph.iolibs.ufo_expression_parsers as parsers 
  49  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  50  import madgraph.interface.common_run_interface as common_run_interface 
  51  import madgraph.various.diagram_symmetry as diagram_symmetry 
  52  import madgraph.various.misc as misc 
  53  import madgraph.various.banner as banner_mod 
  54  import madgraph.various.process_checks as process_checks 
  55  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  56  import aloha.create_aloha as create_aloha 
  57  import models.import_ufo as import_ufo 
  58  import models.write_param_card as param_writer 
  59  import models.check_param_card as check_param_card 
  60   
  61   
  62  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  63  from madgraph.iolibs.files import cp, ln, mv 
  64   
  65  from madgraph import InvalidCmd 
  66   
  67  pjoin = os.path.join 
  68   
  69  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  70  logger = logging.getLogger('madgraph.export_v4') 
  71   
  72  default_compiler= {'fortran': 'gfortran', 
  73                         'f2py': 'f2py', 
  74                         'cpp':'g++'} 
75 76 77 -class VirtualExporter(object):
78 79 #exporter variable who modified the way madgraph interacts with this class 80 81 grouped_mode = 'madevent' 82 # This variable changes the type of object called within 'generate_subprocess_directory' 83 #functions. 84 # False to avoid grouping (only identical matrix element are merged) 85 # 'madevent' group the massless quark and massless lepton 86 # 'madweight' group the gluon with the massless quark 87 sa_symmetry = False 88 # If no grouped_mode=False, uu~ and u~u will be called independently. 89 #Putting sa_symmetry generates only one of the two matrix-element. 90 check = True 91 # Ask madgraph to check if the directory already exists and propose to the user to 92 #remove it first if this is the case 93 output = 'Template' 94 # [Template, None, dir] 95 # - Template, madgraph will call copy_template 96 # - dir, madgraph will just create an empty directory for initialisation 97 # - None, madgraph do nothing for initialisation 98 exporter = 'v4' 99 # language of the output 'v4' for Fortran output 100 # 'cpp' for C++ output 101 102
103 - def __init__(self, dir_path = "", opt=None):
104 # cmd_options is a dictionary with all the optional argurment passed at output time 105 106 # Activate some monkey patching for the helas call writer. 107 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 108 self.helas_call_writer_custom
109 110 111 # helper function for customise helas writter 112 @staticmethod
113 - def custom_helas_call(call, arg):
114 """static method to customise the way aloha function call are written 115 call is the default template for the call 116 arg are the dictionary used for the call 117 """ 118 return call, arg
119 120 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 121 122
123 - def copy_template(self, model):
124 return
125
126 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
127 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 128 return 0 # return an integer stating the number of call to helicity routine
129
130 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
131 return
132
133 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
134 return
135 136
137 - def pass_information_from_cmd(self, cmd):
138 """pass information from the command interface to the exporter. 139 Please do not modify any object of the interface from the exporter. 140 """ 141 return
142
143 - def modify_grouping(self, matrix_element):
144 return False, matrix_element
145
146 - def export_model_files(self, model_v4_path):
147 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 148 return
149
150 - def export_helas(self, HELAS_PATH):
151 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 152 return
153
154 #=============================================================================== 155 # ProcessExporterFortran 156 #=============================================================================== 157 -class ProcessExporterFortran(VirtualExporter):
158 """Class to take care of exporting a set of matrix elements to 159 Fortran (v4) format.""" 160 161 default_opt = {'clean': False, 'complex_mass':False, 162 'export_format':'madevent', 'mp': False, 163 'v5_model': True, 164 'output_options':{} 165 } 166 grouped_mode = False 167
168 - def __init__(self, dir_path = "", opt=None):
169 """Initiate the ProcessExporterFortran with directory information""" 170 self.mgme_dir = MG5DIR 171 self.dir_path = dir_path 172 self.model = None 173 174 self.opt = dict(self.default_opt) 175 if opt: 176 self.opt.update(opt) 177 178 self.cmd_options = self.opt['output_options'] 179 180 #place holder to pass information to the run_interface 181 self.proc_characteristic = banner_mod.ProcCharacteristic() 182 # call mother class 183 super(ProcessExporterFortran,self).__init__(dir_path, opt)
184 185 186 #=========================================================================== 187 # process exporter fortran switch between group and not grouped 188 #===========================================================================
189 - def export_processes(self, matrix_elements, fortran_model):
190 """Make the switch between grouped and not grouped output""" 191 192 calls = 0 193 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 194 for (group_number, me_group) in enumerate(matrix_elements): 195 calls = calls + self.generate_subprocess_directory(\ 196 me_group, fortran_model, group_number) 197 else: 198 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 199 calls = calls + self.generate_subprocess_directory(\ 200 me, fortran_model, me_number) 201 202 return calls
203 204 205 #=========================================================================== 206 # create the run_card 207 #===========================================================================
208 - def create_run_card(self, matrix_elements, history):
209 """ """ 210 211 212 # bypass this for the loop-check 213 import madgraph.loop.loop_helas_objects as loop_helas_objects 214 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 215 matrix_elements = None 216 217 run_card = banner_mod.RunCard() 218 219 220 default=True 221 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 222 processes = [me.get('processes') for megroup in matrix_elements 223 for me in megroup['matrix_elements']] 224 elif matrix_elements: 225 processes = [me.get('processes') 226 for me in matrix_elements['matrix_elements']] 227 else: 228 default =False 229 230 if default: 231 run_card.create_default_for_process(self.proc_characteristic, 232 history, 233 processes) 234 235 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 236 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 237 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
238 239 240 241 #=========================================================================== 242 # copy the Template in a new directory. 243 #===========================================================================
244 - def copy_template(self, model):
245 """create the directory run_name as a copy of the MadEvent 246 Template, and clean the directory 247 """ 248 249 #First copy the full template tree if dir_path doesn't exit 250 if not os.path.isdir(self.dir_path): 251 assert self.mgme_dir, \ 252 "No valid MG_ME path given for MG4 run directory creation." 253 logger.info('initialize a new directory: %s' % \ 254 os.path.basename(self.dir_path)) 255 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 256 self.dir_path, True) 257 # misc.copytree since dir_path already exists 258 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 259 self.dir_path) 260 # copy plot_card 261 for card in ['plot_card']: 262 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 263 try: 264 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 265 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 266 except IOError: 267 logger.warning("Failed to copy " + card + ".dat to default") 268 elif os.getcwd() == os.path.realpath(self.dir_path): 269 logger.info('working in local directory: %s' % \ 270 os.path.realpath(self.dir_path)) 271 # misc.copytree since dir_path already exists 272 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 273 self.dir_path) 274 # for name in misc.glob('Template/LO/*', self.mgme_dir): 275 # name = os.path.basename(name) 276 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 277 # if os.path.isfile(filename): 278 # files.cp(filename, pjoin(self.dir_path,name)) 279 # elif os.path.isdir(filename): 280 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 281 # misc.copytree since dir_path already exists 282 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 283 self.dir_path) 284 # Copy plot_card 285 for card in ['plot_card']: 286 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 287 try: 288 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 289 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 290 except IOError: 291 logger.warning("Failed to copy " + card + ".dat to default") 292 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 293 assert self.mgme_dir, \ 294 "No valid MG_ME path given for MG4 run directory creation." 295 try: 296 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 297 except IOError: 298 MG5_version = misc.get_pkg_info() 299 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 300 301 #Ensure that the Template is clean 302 if self.opt['clean']: 303 logger.info('remove old information in %s' % \ 304 os.path.basename(self.dir_path)) 305 if 'MADGRAPH_BASE' in os.environ: 306 misc.call([pjoin('bin', 'internal', 'clean_template'), 307 '--web'], cwd=self.dir_path) 308 else: 309 try: 310 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 311 cwd=self.dir_path) 312 except Exception as why: 313 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 314 % (os.path.basename(self.dir_path),why)) 315 316 #Write version info 317 MG_version = misc.get_pkg_info() 318 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 319 MG_version['version']) 320 321 # add the makefile in Source directory 322 filename = pjoin(self.dir_path,'Source','makefile') 323 self.write_source_makefile(writers.FileWriter(filename)) 324 325 # add the DiscreteSampler information 326 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 327 pjoin(self.dir_path, 'Source')) 328 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 329 pjoin(self.dir_path, 'Source')) 330 331 # We need to create the correct open_data for the pdf 332 self.write_pdf_opendata()
333 334 335 #=========================================================================== 336 # Call MadAnalysis5 to generate the default cards for this process 337 #===========================================================================
338 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 339 ma5_path, output_dir, levels = ['parton','hadron']):
340 """ Call MA5 so that it writes default cards for both parton and 341 post-shower levels, tailored for this particular process.""" 342 343 if len(levels)==0: 344 return 345 start = time.time() 346 logger.info('Generating MadAnalysis5 default cards tailored to this process') 347 try: 348 MA5_interpreter = common_run_interface.CommonRunCmd.\ 349 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 350 except (Exception, SystemExit) as e: 351 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 352 return 353 if MA5_interpreter is None: 354 return 355 356 MA5_main = MA5_interpreter.main 357 for lvl in ['parton','hadron']: 358 if lvl in levels: 359 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 360 try: 361 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 362 except (Exception, SystemExit) as e: 363 # keep the default card (skip only) 364 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 365 ' default analysis card for this process.') 366 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 367 error=StringIO() 368 traceback.print_exc(file=error) 369 logger.debug('MadAnalysis5 error was:') 370 logger.debug('-'*60) 371 logger.debug(error.getvalue()[:-1]) 372 logger.debug('-'*60) 373 else: 374 open(card_to_generate,'w').write(text) 375 stop = time.time() 376 if stop-start >1: 377 logger.info('Cards created in %.2fs' % (stop-start))
378 379 #=========================================================================== 380 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 381 #===========================================================================
382 - def write_procdef_mg5(self, file_pos, modelname, process_str):
383 """ write an equivalent of the MG4 proc_card in order that all the Madevent 384 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 385 386 proc_card_template = template_files.mg4_proc_card.mg4_template 387 process_template = template_files.mg4_proc_card.process_template 388 process_text = '' 389 coupling = '' 390 new_process_content = [] 391 392 393 # First find the coupling and suppress the coupling from process_str 394 #But first ensure that coupling are define whithout spaces: 395 process_str = process_str.replace(' =', '=') 396 process_str = process_str.replace('= ', '=') 397 process_str = process_str.replace(',',' , ') 398 #now loop on the element and treat all the coupling 399 for info in process_str.split(): 400 if '=' in info: 401 coupling += info + '\n' 402 else: 403 new_process_content.append(info) 404 # Recombine the process_str (which is the input process_str without coupling 405 #info) 406 process_str = ' '.join(new_process_content) 407 408 #format the SubProcess 409 replace_dict = {'process': process_str, 410 'coupling': coupling} 411 process_text += process_template.substitute(replace_dict) 412 413 replace_dict = {'process': process_text, 414 'model': modelname, 415 'multiparticle':''} 416 text = proc_card_template.substitute(replace_dict) 417 418 if file_pos: 419 ff = open(file_pos, 'w') 420 ff.write(text) 421 ff.close() 422 else: 423 return replace_dict
424 425
426 - def pass_information_from_cmd(self, cmd):
427 """Pass information for MA5""" 428 429 self.proc_defs = cmd._curr_proc_defs
430 431 #=========================================================================== 432 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 433 #===========================================================================
434 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
435 """Function to finalize v4 directory, for inheritance.""" 436 437 self.create_run_card(matrix_elements, history) 438 self.create_MA5_cards(matrix_elements, history)
439
440 - def create_MA5_cards(self,matrix_elements,history):
441 """ A wrapper around the creation of the MA5 cards so that it can be 442 bypassed by daughter classes (i.e. in standalone).""" 443 if 'madanalysis5_path' in self.opt and not \ 444 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 445 processes = None 446 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 447 processes = [me.get('processes') for megroup in matrix_elements 448 for me in megroup['matrix_elements']] 449 elif matrix_elements: 450 processes = [me.get('processes') 451 for me in matrix_elements['matrix_elements']] 452 453 self.create_default_madanalysis5_cards( 454 history, self.proc_defs, processes, 455 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 456 levels = ['hadron','parton']) 457 458 for level in ['hadron','parton']: 459 # Copying these cards turn on the use of MadAnalysis5 by default. 460 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 461 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 462 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
463 464 #=========================================================================== 465 # Create the proc_characteristic file passing information to the run_interface 466 #===========================================================================
467 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
468 469 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
470 471 #=========================================================================== 472 # write_matrix_element_v4 473 #===========================================================================
474 - def write_matrix_element_v4(self):
475 """Function to write a matrix.f file, for inheritance. 476 """ 477 pass
478 479 #=========================================================================== 480 # write_pdf_opendata 481 #===========================================================================
482 - def write_pdf_opendata(self):
483 """ modify the pdf opendata file, to allow direct access to cluster node 484 repository if configure""" 485 486 if not self.opt["cluster_local_path"]: 487 changer = {"pdf_systemwide": ""} 488 else: 489 to_add = """ 490 tempname='%(path)s'//Tablefile 491 open(IU,file=tempname,status='old',ERR=1) 492 return 493 1 tempname='%(path)s/Pdfdata/'//Tablefile 494 open(IU,file=tempname,status='old',ERR=2) 495 return 496 2 tempname='%(path)s/lhapdf'//Tablefile 497 open(IU,file=tempname,status='old',ERR=3) 498 return 499 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 500 open(IU,file=tempname,status='old',ERR=4) 501 return 502 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 503 open(IU,file=tempname,status='old',ERR=5) 504 return 505 """ % {"path" : self.opt["cluster_local_path"]} 506 507 changer = {"pdf_systemwide": to_add} 508 509 510 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 512 ff.writelines(template % changer) 513 514 # Do the same for lhapdf set 515 if not self.opt["cluster_local_path"]: 516 changer = {"cluster_specific_path": ""} 517 else: 518 to_add=""" 519 LHAPath='%(path)s/PDFsets' 520 Inquire(File=LHAPath, exist=exists) 521 if(exists)return 522 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='%(path)s/../lhapdf/pdfsets/' 526 Inquire(File=LHAPath, exist=exists) 527 if(exists)return 528 LHAPath='./PDFsets' 529 """ % {"path" : self.opt["cluster_local_path"]} 530 changer = {"cluster_specific_path": to_add} 531 532 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 533 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 534 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 535 ff.writelines(template % changer) 536 537 538 return
539 540 541 542 #=========================================================================== 543 # write_maxparticles_file 544 #===========================================================================
545 - def write_maxparticles_file(self, writer, matrix_elements):
546 """Write the maxparticles.inc file for MadEvent""" 547 548 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 549 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 550 matrix_elements.get('matrix_elements')]) 551 else: 552 maxparticles = max([me.get_nexternal_ninitial()[0] \ 553 for me in matrix_elements]) 554 555 lines = "integer max_particles\n" 556 lines += "parameter(max_particles=%d)" % maxparticles 557 558 # Write the file 559 writer.writelines(lines) 560 561 return True
562 563 564 #=========================================================================== 565 # export the model 566 #===========================================================================
567 - def export_model_files(self, model_path):
568 """Configure the files/link of the process according to the model""" 569 570 # Import the model 571 for file in os.listdir(model_path): 572 if os.path.isfile(pjoin(model_path, file)): 573 shutil.copy2(pjoin(model_path, file), \ 574 pjoin(self.dir_path, 'Source', 'MODEL'))
575 576 590 598 599 600 #=========================================================================== 601 # export the helas routine 602 #===========================================================================
603 - def export_helas(self, helas_path):
604 """Configure the files/link of the process according to the model""" 605 606 # Import helas routine 607 for filename in os.listdir(helas_path): 608 filepos = pjoin(helas_path, filename) 609 if os.path.isfile(filepos): 610 if filepos.endswith('Makefile.template'): 611 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 612 elif filepos.endswith('Makefile'): 613 pass 614 else: 615 cp(filepos, self.dir_path + '/Source/DHELAS')
616 # following lines do the same but whithout symbolic link 617 # 618 #def export_helas(mgme_dir, dir_path): 619 # 620 # # Copy the HELAS directory 621 # helas_dir = pjoin(mgme_dir, 'HELAS') 622 # for filename in os.listdir(helas_dir): 623 # if os.path.isfile(pjoin(helas_dir, filename)): 624 # shutil.copy2(pjoin(helas_dir, filename), 625 # pjoin(dir_path, 'Source', 'DHELAS')) 626 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 627 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 628 # 629 630 #=========================================================================== 631 # generate_subprocess_directory 632 #===========================================================================
633 - def generate_subprocess_directory(self, matrix_element, 634 fortran_model, 635 me_number):
636 """Routine to generate a subprocess directory (for inheritance)""" 637 638 pass
639 640 #=========================================================================== 641 # get_source_libraries_list 642 #===========================================================================
643 - def get_source_libraries_list(self):
644 """ Returns the list of libraries to be compiling when compiling the 645 SOURCE directory. It is different for loop_induced processes and 646 also depends on the value of the 'output_dependencies' option""" 647 648 return ['$(LIBDIR)libdhelas.$(libext)', 649 '$(LIBDIR)libpdf.$(libext)', 650 '$(LIBDIR)libmodel.$(libext)', 651 '$(LIBDIR)libcernlib.$(libext)', 652 '$(LIBDIR)libbias.$(libext)']
653 654 #=========================================================================== 655 # write_source_makefile 656 #===========================================================================
657 - def write_source_makefile(self, writer):
658 """Write the nexternal.inc file for MG4""" 659 660 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 661 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 662 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 663 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 664 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 665 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 666 else: 667 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 668 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 669 670 replace_dict= {'libraries': set_of_lib, 671 'model':model_line, 672 'additional_dsample': '', 673 'additional_dependencies':''} 674 675 if writer: 676 text = open(path).read() % replace_dict 677 writer.write(text) 678 679 return replace_dict
680 681 #=========================================================================== 682 # write_nexternal_madspin 683 #===========================================================================
684 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
685 """Write the nexternal_prod.inc file for madspin""" 686 687 replace_dict = {} 688 689 replace_dict['nexternal'] = nexternal 690 replace_dict['ninitial'] = ninitial 691 692 file = """ \ 693 integer nexternal_prod 694 parameter (nexternal_prod=%(nexternal)d) 695 integer nincoming_prod 696 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 #=========================================================================== 706 # write_helamp_madspin 707 #===========================================================================
708 - def write_helamp_madspin(self, writer, ncomb):
709 """Write the helamp.inc file for madspin""" 710 711 replace_dict = {} 712 713 replace_dict['ncomb'] = ncomb 714 715 file = """ \ 716 integer ncomb1 717 parameter (ncomb1=%(ncomb)d) 718 double precision helamp(ncomb1) 719 common /to_helamp/helamp """ % replace_dict 720 721 # Write the file 722 if writer: 723 writer.writelines(file) 724 return True 725 else: 726 return replace_dict
727 728 729 730 #=========================================================================== 731 # write_nexternal_file 732 #===========================================================================
733 - def write_nexternal_file(self, writer, nexternal, ninitial):
734 """Write the nexternal.inc file for MG4""" 735 736 replace_dict = {} 737 738 replace_dict['nexternal'] = nexternal 739 replace_dict['ninitial'] = ninitial 740 741 file = """ \ 742 integer nexternal 743 parameter (nexternal=%(nexternal)d) 744 integer nincoming 745 parameter (nincoming=%(ninitial)d)""" % replace_dict 746 747 # Write the file 748 if writer: 749 writer.writelines(file) 750 return True 751 else: 752 return replace_dict
753 #=========================================================================== 754 # write_pmass_file 755 #===========================================================================
756 - def write_pmass_file(self, writer, matrix_element):
757 """Write the pmass.inc file for MG4""" 758 759 model = matrix_element.get('processes')[0].get('model') 760 761 lines = [] 762 for wf in matrix_element.get_external_wavefunctions(): 763 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 764 if mass.lower() != "zero": 765 mass = "abs(%s)" % mass 766 767 lines.append("pmass(%d)=%s" % \ 768 (wf.get('number_external'), mass)) 769 770 # Write the file 771 writer.writelines(lines) 772 773 return True
774 775 #=========================================================================== 776 # write_ngraphs_file 777 #===========================================================================
778 - def write_ngraphs_file(self, writer, nconfigs):
779 """Write the ngraphs.inc file for MG4. Needs input from 780 write_configs_file.""" 781 782 file = " integer n_max_cg\n" 783 file = file + "parameter (n_max_cg=%d)" % nconfigs 784 785 # Write the file 786 writer.writelines(file) 787 788 return True
789 790 #=========================================================================== 791 # write_leshouche_file 792 #===========================================================================
793 - def write_leshouche_file(self, writer, matrix_element):
794 """Write the leshouche.inc file for MG4""" 795 796 # Write the file 797 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 798 799 return True
800 801 #=========================================================================== 802 # get_leshouche_lines 803 #===========================================================================
804 - def get_leshouche_lines(self, matrix_element, numproc):
805 """Write the leshouche.inc file for MG4""" 806 807 # Extract number of external particles 808 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 809 810 lines = [] 811 for iproc, proc in enumerate(matrix_element.get('processes')): 812 legs = proc.get_legs_with_decays() 813 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 814 (iproc + 1, numproc+1, nexternal, 815 ",".join([str(l.get('id')) for l in legs]))) 816 if iproc == 0 and numproc == 0: 817 for i in [1, 2]: 818 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 819 (i, nexternal, 820 ",".join([ "%3r" % 0 ] * ninitial + \ 821 [ "%3r" % i ] * (nexternal - ninitial)))) 822 823 # Here goes the color connections corresponding to the JAMPs 824 # Only one output, for the first subproc! 825 if iproc == 0: 826 # If no color basis, just output trivial color flow 827 if not matrix_element.get('color_basis'): 828 for i in [1, 2]: 829 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 830 (i, numproc+1,nexternal, 831 ",".join([ "%3r" % 0 ] * nexternal))) 832 833 else: 834 # First build a color representation dictionnary 835 repr_dict = {} 836 for l in legs: 837 repr_dict[l.get('number')] = \ 838 proc.get('model').get_particle(l.get('id')).get_color()\ 839 * (-1)**(1+l.get('state')) 840 # Get the list of color flows 841 color_flow_list = \ 842 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 843 ninitial) 844 # And output them properly 845 for cf_i, color_flow_dict in enumerate(color_flow_list): 846 for i in [0, 1]: 847 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 848 (i + 1, cf_i + 1, numproc+1, nexternal, 849 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 850 for l in legs]))) 851 852 return lines
853 854 855 856 857 #=========================================================================== 858 # write_maxamps_file 859 #===========================================================================
860 - def write_maxamps_file(self, writer, maxamps, maxflows, 861 maxproc,maxsproc):
862 """Write the maxamps.inc file for MG4.""" 863 864 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 865 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 866 (maxamps, maxflows) 867 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 868 (maxproc, maxsproc) 869 870 # Write the file 871 writer.writelines(file) 872 873 return True
874 875 876 #=========================================================================== 877 # Routines to output UFO models in MG4 format 878 #=========================================================================== 879
880 - def convert_model(self, model, wanted_lorentz = [], 881 wanted_couplings = []):
882 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 883 884 # Make sure aloha is in quadruple precision if needed 885 old_aloha_mp=aloha.mp_precision 886 aloha.mp_precision=self.opt['mp'] 887 self.model = model 888 # create the MODEL 889 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 890 self.opt['exporter'] = self.__class__ 891 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 892 model_builder.build(wanted_couplings) 893 894 # Backup the loop mode, because it can be changed in what follows. 895 old_loop_mode = aloha.loop_mode 896 897 # Create the aloha model or use the existing one (for loop exporters 898 # this is useful as the aloha model will be used again in the 899 # LoopHelasMatrixElements generated). We do not save the model generated 900 # here if it didn't exist already because it would be a waste of 901 # memory for tree level applications since aloha is only needed at the 902 # time of creating the aloha fortran subroutines. 903 if hasattr(self, 'aloha_model'): 904 aloha_model = self.aloha_model 905 else: 906 try: 907 with misc.MuteLogger(['madgraph.models'], [60]): 908 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 909 except ImportError: 910 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 911 aloha_model.add_Lorentz_object(model.get('lorentz')) 912 913 # Compute the subroutines 914 if wanted_lorentz: 915 aloha_model.compute_subset(wanted_lorentz) 916 else: 917 aloha_model.compute_all(save=False) 918 919 # Write them out 920 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 921 aloha_model.write(write_dir, 'Fortran') 922 923 # Revert the original aloha loop mode 924 aloha.loop_mode = old_loop_mode 925 926 #copy Helas Template 927 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 928 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 929 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 930 write_dir+'/aloha_functions.f') 931 aloha_model.loop_mode = False 932 else: 933 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 934 write_dir+'/aloha_functions.f') 935 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 936 937 # Make final link in the Process 938 self.make_model_symbolic_link() 939 940 # Re-establish original aloha mode 941 aloha.mp_precision=old_aloha_mp
942 943 944 #=========================================================================== 945 # Helper functions 946 #===========================================================================
947 - def modify_grouping(self, matrix_element):
948 """allow to modify the grouping (if grouping is in place) 949 return two value: 950 - True/False if the matrix_element was modified 951 - the new(or old) matrix element""" 952 953 return False, matrix_element
954 955 #=========================================================================== 956 # Helper functions 957 #===========================================================================
958 - def get_mg5_info_lines(self):
959 """Return info lines for MG5, suitable to place at beginning of 960 Fortran files""" 961 962 info = misc.get_pkg_info() 963 info_lines = "" 964 if info and 'version' in info and 'date' in info: 965 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 966 (info['version'], info['date']) 967 info_lines = info_lines + \ 968 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 969 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 970 else: 971 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 972 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 973 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 974 975 return info_lines
976
977 - def get_process_info_lines(self, matrix_element):
978 """Return info lines describing the processes for this matrix element""" 979 980 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 981 for process in matrix_element.get('processes')])
982 983
984 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
985 """Return the Helicity matrix definition lines for this matrix element""" 986 987 helicity_line_list = [] 988 i = 0 989 for helicities in matrix_element.get_helicity_matrix(): 990 i = i + 1 991 int_list = [i, len(helicities)] 992 int_list.extend(helicities) 993 helicity_line_list.append(\ 994 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 995 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 996 997 return "\n".join(helicity_line_list)
998
999 - def get_ic_line(self, matrix_element):
1000 """Return the IC definition line coming after helicities, required by 1001 switchmom in madevent""" 1002 1003 nexternal = matrix_element.get_nexternal_ninitial()[0] 1004 int_list = list(range(1, nexternal + 1)) 1005 1006 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1007 ",".join([str(i) for \ 1008 i in int_list]))
1009
1010 - def set_chosen_SO_index(self, process, squared_orders):
1011 """ From the squared order constraints set by the user, this function 1012 finds what indices of the squared_orders list the user intends to pick. 1013 It returns this as a string of comma-separated successive '.true.' or 1014 '.false.' for each index.""" 1015 1016 user_squared_orders = process.get('squared_orders') 1017 split_orders = process.get('split_orders') 1018 1019 if len(user_squared_orders)==0: 1020 return ','.join(['.true.']*len(squared_orders)) 1021 1022 res = [] 1023 for sqsos in squared_orders: 1024 is_a_match = True 1025 for user_sqso, value in user_squared_orders.items(): 1026 if (process.get_squared_order_type(user_sqso) =='==' and \ 1027 value!=sqsos[split_orders.index(user_sqso)]) or \ 1028 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1029 value<sqsos[split_orders.index(user_sqso)]) or \ 1030 (process.get_squared_order_type(user_sqso) == '>' and \ 1031 value>=sqsos[split_orders.index(user_sqso)]): 1032 is_a_match = False 1033 break 1034 res.append('.true.' if is_a_match else '.false.') 1035 1036 return ','.join(res)
1037
1038 - def get_split_orders_lines(self, orders, array_name, n=5):
1039 """ Return the split orders definition as defined in the list orders and 1040 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1041 1042 ret_list = [] 1043 for index, order in enumerate(orders): 1044 for k in range(0, len(order), n): 1045 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1046 (array_name,index + 1, k + 1, min(k + n, len(order)), 1047 ','.join(["%5r" % i for i in order[k:k + n]]))) 1048 return ret_list
1049
1050 - def format_integer_list(self, list, name, n=5):
1051 """ Return an initialization of the python list in argument following 1052 the fortran syntax using the data keyword assignment, filling an array 1053 of name 'name'. It splits rows in chunks of size n.""" 1054 1055 ret_list = [] 1056 for k in range(0, len(list), n): 1057 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1058 (name, k + 1, min(k + n, len(list)), 1059 ','.join(["%5r" % i for i in list[k:k + n]]))) 1060 return ret_list
1061
1062 - def get_color_data_lines(self, matrix_element, n=6):
1063 """Return the color matrix definition lines for this matrix element. Split 1064 rows in chunks of size n.""" 1065 1066 if not matrix_element.get('color_matrix'): 1067 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1068 else: 1069 ret_list = [] 1070 my_cs = color.ColorString() 1071 for index, denominator in \ 1072 enumerate(matrix_element.get('color_matrix').\ 1073 get_line_denominators()): 1074 # First write the common denominator for this color matrix line 1075 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1076 # Then write the numerators for the matrix elements 1077 num_list = matrix_element.get('color_matrix').\ 1078 get_line_numerators(index, denominator) 1079 1080 assert all([int(i)==i for i in num_list]) 1081 1082 for k in range(0, len(num_list), n): 1083 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1084 (index + 1, k + 1, min(k + n, len(num_list)), 1085 ','.join(["%5i" % int(i) for i in num_list[k:k + n]]))) 1086 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1087 ret_list.append("C %s" % repr(my_cs)) 1088 1089 return ret_list
1090 1091
1092 - def get_den_factor_line(self, matrix_element):
1093 """Return the denominator factor line for this matrix element""" 1094 1095 return "DATA IDEN/%2r/" % \ 1096 matrix_element.get_denominator_factor()
1097
1098 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1099 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1100 which configs (diagrams).""" 1101 1102 ret_list = [] 1103 1104 booldict = {False: ".false.", True: ".true."} 1105 1106 if not matrix_element.get('color_basis'): 1107 # No color, so only one color factor. Simply write a ".true." 1108 # for each config (i.e., each diagram with only 3 particle 1109 # vertices 1110 configs = len(mapconfigs) 1111 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1112 (num_matrix_element, configs, 1113 ','.join([".true." for i in range(configs)]))) 1114 return ret_list 1115 1116 # There is a color basis - create a list showing which JAMPs have 1117 # contributions to which configs 1118 1119 # Only want to include leading color flows, so find max_Nc 1120 color_basis = matrix_element.get('color_basis') 1121 1122 # We don't want to include the power of Nc's which come from the potential 1123 # loop color trace (i.e. in the case of a closed fermion loop for example) 1124 # so we subtract it here when computing max_Nc 1125 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1126 color_basis.values()],[])) 1127 1128 # Crate dictionary between diagram number and JAMP number 1129 diag_jamp = {} 1130 for ijamp, col_basis_elem in \ 1131 enumerate(sorted(matrix_element.get('color_basis').keys())): 1132 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1133 # Only use color flows with Nc == max_Nc. However, notice that 1134 # we don't want to include the Nc power coming from the loop 1135 # in this counting. 1136 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1137 diag_num = diag_tuple[0] + 1 1138 # Add this JAMP number to this diag_num 1139 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1140 [ijamp+1] 1141 1142 colamps = ijamp + 1 1143 for iconfig, num_diag in enumerate(mapconfigs): 1144 if num_diag == 0: 1145 continue 1146 1147 # List of True or False 1148 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1149 # Add line 1150 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1151 (iconfig+1, num_matrix_element, colamps, 1152 ','.join(["%s" % booldict[b] for b in \ 1153 bool_list]))) 1154 1155 return ret_list
1156
1157 - def get_amp2_lines(self, matrix_element, config_map = []):
1158 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1159 1160 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1161 # Get minimum legs in a vertex 1162 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1163 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1164 minvert = min(vert_list) if vert_list!=[] else 0 1165 1166 ret_lines = [] 1167 if config_map: 1168 # In this case, we need to sum up all amplitudes that have 1169 # identical topologies, as given by the config_map (which 1170 # gives the topology/config for each of the diagrams 1171 diagrams = matrix_element.get('diagrams') 1172 # Combine the diagrams with identical topologies 1173 config_to_diag_dict = {} 1174 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1175 if config_map[idiag] == 0: 1176 continue 1177 try: 1178 config_to_diag_dict[config_map[idiag]].append(idiag) 1179 except KeyError: 1180 config_to_diag_dict[config_map[idiag]] = [idiag] 1181 # Write out the AMP2s summing squares of amplitudes belonging 1182 # to eiher the same diagram or different diagrams with 1183 # identical propagator properties. Note that we need to use 1184 # AMP2 number corresponding to the first diagram number used 1185 # for that AMP2. 1186 for config in sorted(config_to_diag_dict.keys()): 1187 1188 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1189 {"num": (config_to_diag_dict[config][0] + 1)} 1190 1191 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1192 sum([diagrams[idiag].get('amplitudes') for \ 1193 idiag in config_to_diag_dict[config]], [])]) 1194 1195 # Not using \sum |M|^2 anymore since this creates troubles 1196 # when ckm is not diagonal due to the JIM mechanism. 1197 if '+' in amp: 1198 line += "(%s)*dconjg(%s)" % (amp, amp) 1199 else: 1200 line += "%s*dconjg(%s)" % (amp, amp) 1201 ret_lines.append(line) 1202 else: 1203 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1204 # Ignore any diagrams with 4-particle vertices. 1205 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1206 continue 1207 # Now write out the expression for AMP2, meaning the sum of 1208 # squared amplitudes belonging to the same diagram 1209 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1210 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1211 {"num": a.get('number')} for a in \ 1212 diag.get('amplitudes')]) 1213 ret_lines.append(line) 1214 1215 return ret_lines
1216 1217 #=========================================================================== 1218 # Returns the data statements initializing the coeffictients for the JAMP 1219 # decomposition. It is used when the JAMP initialization is decided to be 1220 # done through big arrays containing the projection coefficients. 1221 #===========================================================================
1222 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1223 n=50, Nc_value=3):
1224 """This functions return the lines defining the DATA statement setting 1225 the coefficients building the JAMPS out of the AMPS. Split rows in 1226 bunches of size n. 1227 One can specify the color_basis from which the color amplitudes originates 1228 so that there are commentaries telling what color structure each JAMP 1229 corresponds to.""" 1230 1231 if(not isinstance(color_amplitudes,list) or 1232 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1233 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1234 1235 res_list = [] 1236 my_cs = color.ColorString() 1237 for index, coeff_list in enumerate(color_amplitudes): 1238 # Create the list of the complete numerical coefficient. 1239 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1240 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1241 coefficient in coeff_list] 1242 # Create the list of the numbers of the contributing amplitudes. 1243 # Mutliply by -1 for those which have an imaginary coefficient. 1244 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1245 for coefficient in coeff_list] 1246 # Find the common denominator. 1247 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1248 num_list=[(coefficient*commondenom).numerator \ 1249 for coefficient in coefs_list] 1250 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1251 index+1,len(num_list))) 1252 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1253 index+1,commondenom)) 1254 if color_basis: 1255 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1256 res_list.append("C %s" % repr(my_cs)) 1257 for k in range(0, len(num_list), n): 1258 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1259 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1260 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1261 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1262 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1263 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1264 pass 1265 return res_list
1266 1267
1268 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1269 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1270 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1271 defined as a matrix element or directly as a color_amplitudes dictionary. 1272 The split_order_amps specifies the group of amplitudes sharing the same 1273 amplitude orders which should be put in together in a given set of JAMPS. 1274 The split_order_amps is supposed to have the format of the second output 1275 of the function get_split_orders_mapping function in helas_objects.py. 1276 The split_order_names is optional (it should correspond to the process 1277 'split_orders' attribute) and only present to provide comments in the 1278 JAMP definitions in the code.""" 1279 1280 # Let the user call get_JAMP_lines_split_order directly from a 1281 error_msg="Malformed '%s' argument passed to the "+\ 1282 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1283 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1284 color_amplitudes=col_amps.get_color_amplitudes() 1285 elif(isinstance(col_amps,list)): 1286 if(col_amps and isinstance(col_amps[0],list)): 1287 color_amplitudes=col_amps 1288 else: 1289 raise MadGraph5Error(error_msg%'col_amps') 1290 else: 1291 raise MadGraph5Error(error_msg%'col_amps') 1292 1293 # Verify the sanity of the split_order_amps and split_order_names args 1294 if isinstance(split_order_amps,list): 1295 for elem in split_order_amps: 1296 if len(elem)!=2: 1297 raise MadGraph5Error(error_msg%'split_order_amps') 1298 # Check the first element of the two lists to make sure they are 1299 # integers, although in principle they should all be integers. 1300 if not isinstance(elem[0],tuple) or \ 1301 not isinstance(elem[1],tuple) or \ 1302 not isinstance(elem[0][0],int) or \ 1303 not isinstance(elem[1][0],int): 1304 raise MadGraph5Error(error_msg%'split_order_amps') 1305 else: 1306 raise MadGraph5Error(error_msg%'split_order_amps') 1307 1308 if not split_order_names is None: 1309 if isinstance(split_order_names,list): 1310 # Should specify the same number of names as there are elements 1311 # in the key of the split_order_amps. 1312 if len(split_order_names)!=len(split_order_amps[0][0]): 1313 raise MadGraph5Error(error_msg%'split_order_names') 1314 # Check the first element of the list to be a string 1315 if not isinstance(split_order_names[0],str): 1316 raise MadGraph5Error(error_msg%'split_order_names') 1317 else: 1318 raise MadGraph5Error(error_msg%'split_order_names') 1319 1320 # Now scan all contributing orders to be individually computed and 1321 # construct the list of color_amplitudes for JAMP to be constructed 1322 # accordingly. 1323 res_list=[] 1324 for i, amp_order in enumerate(split_order_amps): 1325 col_amps_order = [] 1326 for jamp in color_amplitudes: 1327 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1328 if split_order_names: 1329 res_list.append('C JAMPs contributing to orders '+' '.join( 1330 ['%s=%i'%order for order in zip(split_order_names, 1331 amp_order[0])])) 1332 if self.opt['export_format'] in ['madloop_matchbox']: 1333 res_list.extend(self.get_JAMP_lines(col_amps_order, 1334 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1335 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1336 else: 1337 res_list.extend(self.get_JAMP_lines(col_amps_order, 1338 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1339 1340 return res_list
1341 1342
1343 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1344 split=-1):
1345 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1346 defined as a matrix element or directly as a color_amplitudes dictionary, 1347 Jamp_formatLC should be define to allow to add LeadingColor computation 1348 (usefull for MatchBox) 1349 The split argument defines how the JAMP lines should be split in order 1350 not to be too long.""" 1351 1352 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1353 # the color amplitudes lists. 1354 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1355 color_amplitudes=col_amps.get_color_amplitudes() 1356 elif(isinstance(col_amps,list)): 1357 if(col_amps and isinstance(col_amps[0],list)): 1358 color_amplitudes=col_amps 1359 else: 1360 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1361 else: 1362 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1363 1364 1365 res_list = [] 1366 for i, coeff_list in enumerate(color_amplitudes): 1367 # It might happen that coeff_list is empty if this function was 1368 # called from get_JAMP_lines_split_order (i.e. if some color flow 1369 # does not contribute at all for a given order). 1370 # In this case we simply set it to 0. 1371 if coeff_list==[]: 1372 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1373 continue 1374 # Break the JAMP definition into 'n=split' pieces to avoid having 1375 # arbitrarly long lines. 1376 first=True 1377 n = (len(coeff_list)+1 if split<=0 else split) 1378 while coeff_list!=[]: 1379 coefs=coeff_list[:n] 1380 coeff_list=coeff_list[n:] 1381 res = ((JAMP_format+"=") % str(i + 1)) + \ 1382 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1383 1384 first=False 1385 # Optimization: if all contributions to that color basis element have 1386 # the same coefficient (up to a sign), put it in front 1387 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1388 common_factor = False 1389 diff_fracs = list(set(list_fracs)) 1390 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1391 common_factor = True 1392 global_factor = diff_fracs[0] 1393 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1394 1395 # loop for JAMP 1396 for (coefficient, amp_number) in coefs: 1397 if not coefficient: 1398 continue 1399 if common_factor: 1400 res = (res + "%s" + AMP_format) % \ 1401 (self.coeff(coefficient[0], 1402 coefficient[1] / abs(coefficient[1]), 1403 coefficient[2], 1404 coefficient[3]), 1405 str(amp_number)) 1406 else: 1407 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1408 coefficient[1], 1409 coefficient[2], 1410 coefficient[3]), 1411 str(amp_number)) 1412 1413 if common_factor: 1414 res = res + ')' 1415 1416 res_list.append(res) 1417 1418 return res_list
1419
1420 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1421 """Generate the PDF lines for the auto_dsig.f file""" 1422 1423 processes = matrix_element.get('processes') 1424 model = processes[0].get('model') 1425 1426 pdf_definition_lines = "" 1427 pdf_data_lines = "" 1428 pdf_lines = "" 1429 1430 if ninitial == 1: 1431 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1432 for i, proc in enumerate(processes): 1433 process_line = proc.base_string() 1434 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1435 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1436 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1437 else: 1438 # Pick out all initial state particles for the two beams 1439 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1440 p in processes]))), 1441 sorted(list(set([p.get_initial_pdg(2) for \ 1442 p in processes])))] 1443 1444 # Prepare all variable names 1445 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1446 sum(initial_states,[])]) 1447 for key,val in pdf_codes.items(): 1448 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1449 1450 # Set conversion from PDG code to number used in PDF calls 1451 pdgtopdf = {21: 0, 22: 7} 1452 1453 # Fill in missing entries of pdgtopdf 1454 for pdg in sum(initial_states,[]): 1455 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1456 pdgtopdf[pdg] = pdg 1457 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1458 # If any particle has pdg code 7, we need to use something else 1459 pdgtopdf[pdg] = 6000000 + pdg 1460 1461 # Get PDF variable declarations for all initial states 1462 for i in [0,1]: 1463 pdf_definition_lines += "DOUBLE PRECISION " + \ 1464 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1465 for pdg in \ 1466 initial_states[i]]) + \ 1467 "\n" 1468 1469 # Get PDF data lines for all initial states 1470 for i in [0,1]: 1471 pdf_data_lines += "DATA " + \ 1472 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1473 for pdg in initial_states[i]]) + \ 1474 "/%d*1D0/" % len(initial_states[i]) + \ 1475 "\n" 1476 1477 # Get PDF lines for all different initial states 1478 for i, init_states in enumerate(initial_states): 1479 if subproc_group: 1480 pdf_lines = pdf_lines + \ 1481 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1482 % (i + 1, i + 1) 1483 else: 1484 pdf_lines = pdf_lines + \ 1485 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1486 % (i + 1, i + 1) 1487 1488 for nbi,initial_state in enumerate(init_states): 1489 if initial_state in list(pdf_codes.keys()): 1490 if subproc_group: 1491 pdf_lines = pdf_lines + \ 1492 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1493 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1494 (pdf_codes[initial_state], 1495 i + 1, i + 1, pdgtopdf[initial_state], 1496 i + 1, i + 1) 1497 else: 1498 pdf_lines = pdf_lines + \ 1499 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1500 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1501 (pdf_codes[initial_state], 1502 i + 1, i + 1, pdgtopdf[initial_state], 1503 i + 1, 1504 i + 1, i + 1) 1505 pdf_lines = pdf_lines + "ENDIF\n" 1506 1507 # Add up PDFs for the different initial state particles 1508 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1509 for proc in processes: 1510 process_line = proc.base_string() 1511 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1512 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1513 for ibeam in [1, 2]: 1514 initial_state = proc.get_initial_pdg(ibeam) 1515 if initial_state in list(pdf_codes.keys()): 1516 pdf_lines = pdf_lines + "%s%d*" % \ 1517 (pdf_codes[initial_state], ibeam) 1518 else: 1519 pdf_lines = pdf_lines + "1d0*" 1520 # Remove last "*" from pdf_lines 1521 pdf_lines = pdf_lines[:-1] + "\n" 1522 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1523 1524 # Remove last line break from the return variables 1525 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1526 1527 #=========================================================================== 1528 # write_props_file 1529 #===========================================================================
1530 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1531 """Write the props.inc file for MadEvent. Needs input from 1532 write_configs_file.""" 1533 1534 lines = [] 1535 1536 particle_dict = matrix_element.get('processes')[0].get('model').\ 1537 get('particle_dict') 1538 1539 for iconf, configs in enumerate(s_and_t_channels): 1540 for vertex in configs[0] + configs[1][:-1]: 1541 leg = vertex.get('legs')[-1] 1542 if leg.get('id') not in particle_dict: 1543 # Fake propagator used in multiparticle vertices 1544 mass = 'zero' 1545 width = 'zero' 1546 pow_part = 0 1547 else: 1548 particle = particle_dict[leg.get('id')] 1549 # Get mass 1550 if particle.get('mass').lower() == 'zero': 1551 mass = particle.get('mass') 1552 else: 1553 mass = "abs(%s)" % particle.get('mass') 1554 # Get width 1555 if particle.get('width').lower() == 'zero': 1556 width = particle.get('width') 1557 else: 1558 width = "abs(%s)" % particle.get('width') 1559 1560 pow_part = 1 + int(particle.is_boson()) 1561 1562 lines.append("prmass(%d,%d) = %s" % \ 1563 (leg.get('number'), iconf + 1, mass)) 1564 lines.append("prwidth(%d,%d) = %s" % \ 1565 (leg.get('number'), iconf + 1, width)) 1566 lines.append("pow(%d,%d) = %d" % \ 1567 (leg.get('number'), iconf + 1, pow_part)) 1568 1569 # Write the file 1570 writer.writelines(lines) 1571 1572 return True
1573 1574 #=========================================================================== 1575 # write_configs_file 1576 #===========================================================================
1577 - def write_configs_file(self, writer, matrix_element):
1578 """Write the configs.inc file for MadEvent""" 1579 1580 # Extract number of external particles 1581 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1582 1583 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1584 mapconfigs = [c[0] for c in configs] 1585 model = matrix_element.get('processes')[0].get('model') 1586 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1587 [[c[1]] for c in configs], 1588 mapconfigs, 1589 nexternal, ninitial, 1590 model)
1591 1592 #=========================================================================== 1593 # write_configs_file_from_diagrams 1594 #===========================================================================
1595 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1596 nexternal, ninitial, model):
1597 """Write the actual configs.inc file. 1598 1599 configs is the diagrams corresponding to configs (each 1600 diagrams is a list of corresponding diagrams for all 1601 subprocesses, with None if there is no corresponding diagrams 1602 for a given process). 1603 mapconfigs gives the diagram number for each config. 1604 1605 For s-channels, we need to output one PDG for each subprocess in 1606 the subprocess group, in order to be able to pick the right 1607 one for multiprocesses.""" 1608 1609 lines = [] 1610 1611 s_and_t_channels = [] 1612 1613 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1614 for config in configs if [d for d in config if d][0].\ 1615 get_vertex_leg_numbers()!=[]] 1616 minvert = min(vert_list) if vert_list!=[] else 0 1617 1618 # Number of subprocesses 1619 nsubprocs = len(configs[0]) 1620 1621 nconfigs = 0 1622 1623 new_pdg = model.get_first_non_pdg() 1624 1625 for iconfig, helas_diags in enumerate(configs): 1626 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1627 [0].get_vertex_leg_numbers()) : 1628 # Only 3-vertices allowed in configs.inc except for vertices 1629 # which originate from a shrunk loop. 1630 continue 1631 nconfigs += 1 1632 1633 # Need s- and t-channels for all subprocesses, including 1634 # those that don't contribute to this config 1635 empty_verts = [] 1636 stchannels = [] 1637 for h in helas_diags: 1638 if h: 1639 # get_s_and_t_channels gives vertices starting from 1640 # final state external particles and working inwards 1641 stchannels.append(h.get('amplitudes')[0].\ 1642 get_s_and_t_channels(ninitial, model, new_pdg)) 1643 else: 1644 stchannels.append((empty_verts, None)) 1645 1646 # For t-channels, just need the first non-empty one 1647 tchannels = [t for s,t in stchannels if t != None][0] 1648 1649 # For s_and_t_channels (to be used later) use only first config 1650 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1651 tchannels]) 1652 1653 # Make sure empty_verts is same length as real vertices 1654 if any([s for s,t in stchannels]): 1655 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1656 1657 # Reorganize s-channel vertices to get a list of all 1658 # subprocesses for each vertex 1659 schannels = list(zip(*[s for s,t in stchannels])) 1660 else: 1661 schannels = [] 1662 1663 allchannels = schannels 1664 if len(tchannels) > 1: 1665 # Write out tchannels only if there are any non-trivial ones 1666 allchannels = schannels + tchannels 1667 1668 # Write out propagators for s-channel and t-channel vertices 1669 1670 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1671 # Correspondance between the config and the diagram = amp2 1672 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1673 mapconfigs[iconfig])) 1674 1675 for verts in allchannels: 1676 if verts in schannels: 1677 vert = [v for v in verts if v][0] 1678 else: 1679 vert = verts 1680 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1681 last_leg = vert.get('legs')[-1] 1682 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1683 (last_leg.get('number'), nconfigs, len(daughters), 1684 ",".join([str(d) for d in daughters]))) 1685 if verts in schannels: 1686 pdgs = [] 1687 for v in verts: 1688 if v: 1689 pdgs.append(v.get('legs')[-1].get('id')) 1690 else: 1691 pdgs.append(0) 1692 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1693 (last_leg.get('number'), nconfigs, nsubprocs, 1694 ",".join([str(d) for d in pdgs]))) 1695 lines.append("data tprid(%d,%d)/0/" % \ 1696 (last_leg.get('number'), nconfigs)) 1697 elif verts in tchannels[:-1]: 1698 lines.append("data tprid(%d,%d)/%d/" % \ 1699 (last_leg.get('number'), nconfigs, 1700 abs(last_leg.get('id')))) 1701 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1702 (last_leg.get('number'), nconfigs, nsubprocs, 1703 ",".join(['0'] * nsubprocs))) 1704 1705 # Write out number of configs 1706 lines.append("# Number of configs") 1707 lines.append("data mapconfig(0)/%d/" % nconfigs) 1708 1709 # Write the file 1710 writer.writelines(lines) 1711 1712 return s_and_t_channels
1713 1714 #=========================================================================== 1715 # Global helper methods 1716 #=========================================================================== 1717
1718 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1719 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1720 1721 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1722 1723 if total_coeff == 1: 1724 if is_imaginary: 1725 return '+imag1*' 1726 else: 1727 return '+' 1728 elif total_coeff == -1: 1729 if is_imaginary: 1730 return '-imag1*' 1731 else: 1732 return '-' 1733 1734 res_str = '%+iD0' % total_coeff.numerator 1735 1736 if total_coeff.denominator != 1: 1737 # Check if total_coeff is an integer 1738 res_str = res_str + '/%iD0' % total_coeff.denominator 1739 1740 if is_imaginary: 1741 res_str = res_str + '*imag1' 1742 1743 return res_str + '*'
1744 1745
1746 - def set_fortran_compiler(self, default_compiler, force=False):
1747 """Set compiler based on what's available on the system""" 1748 1749 # Check for compiler 1750 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1751 f77_compiler = default_compiler['fortran'] 1752 elif misc.which('gfortran'): 1753 f77_compiler = 'gfortran' 1754 elif misc.which('g77'): 1755 f77_compiler = 'g77' 1756 elif misc.which('f77'): 1757 f77_compiler = 'f77' 1758 elif default_compiler['fortran']: 1759 logger.warning('No Fortran Compiler detected! Please install one') 1760 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1761 else: 1762 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1763 logger.info('Use Fortran compiler ' + f77_compiler) 1764 1765 1766 # Check for compiler. 1. set default. 1767 if default_compiler['f2py']: 1768 f2py_compiler = default_compiler['f2py'] 1769 else: 1770 f2py_compiler = '' 1771 # Try to find the correct one. 1772 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1773 f2py_compiler = default_compiler['f2py'] 1774 elif misc.which('f2py'): 1775 f2py_compiler = 'f2py' 1776 elif sys.version_info[1] == 6: 1777 if misc.which('f2py-2.6'): 1778 f2py_compiler = 'f2py-2.6' 1779 elif misc.which('f2py2.6'): 1780 f2py_compiler = 'f2py2.6' 1781 elif sys.version_info[1] == 7: 1782 if misc.which('f2py-2.7'): 1783 f2py_compiler = 'f2py-2.7' 1784 elif misc.which('f2py2.7'): 1785 f2py_compiler = 'f2py2.7' 1786 1787 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1788 1789 1790 self.replace_make_opt_f_compiler(to_replace) 1791 # Replace also for Template but not for cluster 1792 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1793 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1794 1795 return f77_compiler
1796 1797 # an alias for backward compatibility 1798 set_compiler = set_fortran_compiler 1799 1800
1801 - def set_cpp_compiler(self, default_compiler, force=False):
1802 """Set compiler based on what's available on the system""" 1803 1804 # Check for compiler 1805 if default_compiler and misc.which(default_compiler): 1806 compiler = default_compiler 1807 elif misc.which('g++'): 1808 #check if clang version 1809 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1810 stderr=subprocess.PIPE) 1811 out, _ = p.communicate() 1812 out = out.decode() 1813 if 'clang' in str(out) and misc.which('clang'): 1814 compiler = 'clang' 1815 else: 1816 compiler = 'g++' 1817 elif misc.which('c++'): 1818 compiler = 'c++' 1819 elif misc.which('clang'): 1820 compiler = 'clang' 1821 elif default_compiler: 1822 logger.warning('No c++ Compiler detected! Please install one') 1823 compiler = default_compiler # maybe misc fail so try with it 1824 else: 1825 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1826 logger.info('Use c++ compiler ' + compiler) 1827 self.replace_make_opt_c_compiler(compiler) 1828 # Replace also for Template but not for cluster 1829 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 1830 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1831 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1832 1833 return compiler
1834 1835
1836 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1837 """Set FC=compiler in Source/make_opts""" 1838 1839 assert isinstance(compilers, dict) 1840 1841 mod = False #avoid to rewrite the file if not needed 1842 if not root_dir: 1843 root_dir = self.dir_path 1844 1845 compiler= compilers['fortran'] 1846 f2py_compiler = compilers['f2py'] 1847 if not f2py_compiler: 1848 f2py_compiler = 'f2py' 1849 for_update= {'DEFAULT_F_COMPILER':compiler, 1850 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1851 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1852 1853 try: 1854 common_run_interface.CommonRunCmd.update_make_opts_full( 1855 make_opts, for_update) 1856 except IOError: 1857 if root_dir == self.dir_path: 1858 logger.info('Fail to set compiler. Trying to continue anyway.')
1859
1860 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1861 """Set CXX=compiler in Source/make_opts. 1862 The version is also checked, in order to set some extra flags 1863 if the compiler is clang (on MACOS)""" 1864 1865 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1866 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1867 1868 1869 # list of the variable to set in the make_opts file 1870 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1871 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1872 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1873 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1874 } 1875 1876 # for MOJAVE remove the MACFLAG: 1877 if is_clang: 1878 import platform 1879 version, _, _ = platform.mac_ver() 1880 if not version:# not linux 1881 version = 14 # set version to remove MACFLAG 1882 else: 1883 version = int(version.split('.')[1]) 1884 if version >= 14: 1885 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1886 1887 if not root_dir: 1888 root_dir = self.dir_path 1889 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1890 1891 try: 1892 common_run_interface.CommonRunCmd.update_make_opts_full( 1893 make_opts, for_update) 1894 except IOError: 1895 if root_dir == self.dir_path: 1896 logger.info('Fail to set compiler. Trying to continue anyway.') 1897 1898 return
1899
1900 #=============================================================================== 1901 # ProcessExporterFortranSA 1902 #=============================================================================== 1903 -class ProcessExporterFortranSA(ProcessExporterFortran):
1904 """Class to take care of exporting a set of matrix elements to 1905 MadGraph v4 StandAlone format.""" 1906 1907 matrix_template = "matrix_standalone_v4.inc" 1908
1909 - def __init__(self, *args,**opts):
1910 """add the format information compare to standard init""" 1911 1912 if 'format' in opts: 1913 self.format = opts['format'] 1914 del opts['format'] 1915 else: 1916 self.format = 'standalone' 1917 1918 self.prefix_info = {} 1919 ProcessExporterFortran.__init__(self, *args, **opts)
1920
1921 - def copy_template(self, model):
1922 """Additional actions needed for setup of Template 1923 """ 1924 1925 #First copy the full template tree if dir_path doesn't exit 1926 if os.path.isdir(self.dir_path): 1927 return 1928 1929 logger.info('initialize a new standalone directory: %s' % \ 1930 os.path.basename(self.dir_path)) 1931 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1932 1933 # Create the directory structure 1934 os.mkdir(self.dir_path) 1935 os.mkdir(pjoin(self.dir_path, 'Source')) 1936 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1937 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1938 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1939 os.mkdir(pjoin(self.dir_path, 'bin')) 1940 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1941 os.mkdir(pjoin(self.dir_path, 'lib')) 1942 os.mkdir(pjoin(self.dir_path, 'Cards')) 1943 1944 # Information at top-level 1945 #Write version info 1946 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1947 try: 1948 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1949 except IOError: 1950 MG5_version = misc.get_pkg_info() 1951 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1952 "5." + MG5_version['version']) 1953 1954 1955 # Add file in SubProcesses 1956 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1957 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1958 1959 if self.format == 'standalone': 1960 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1961 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1962 1963 # Add file in Source 1964 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1965 pjoin(self.dir_path, 'Source')) 1966 # add the makefile 1967 filename = pjoin(self.dir_path,'Source','makefile') 1968 self.write_source_makefile(writers.FileWriter(filename))
1969 1970 #=========================================================================== 1971 # export model files 1972 #===========================================================================
1973 - def export_model_files(self, model_path):
1974 """export the model dependent files for V4 model""" 1975 1976 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1977 # Add the routine update_as_param in v4 model 1978 # This is a function created in the UFO 1979 text=""" 1980 subroutine update_as_param() 1981 call setpara('param_card.dat',.false.) 1982 return 1983 end 1984 """ 1985 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1986 ff.write(text) 1987 ff.close() 1988 1989 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1990 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1991 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1992 fsock.write(text) 1993 fsock.close() 1994 1995 self.make_model_symbolic_link()
1996 1997 #=========================================================================== 1998 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1999 #===========================================================================
2000 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2001 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2002 Perl script of MadEvent4 are still working properly for pure MG5 run. 2003 Not needed for StandAlone so just return 2004 """ 2005 2006 return
2007 2008 2009 #=========================================================================== 2010 # Make the Helas and Model directories for Standalone directory 2011 #===========================================================================
2012 - def make(self):
2013 """Run make in the DHELAS and MODEL directories, to set up 2014 everything for running standalone 2015 """ 2016 2017 source_dir = pjoin(self.dir_path, "Source") 2018 logger.info("Running make for Helas") 2019 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2020 logger.info("Running make for Model") 2021 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2022 2023 #=========================================================================== 2024 # Create proc_card_mg5.dat for Standalone directory 2025 #===========================================================================
2026 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2027 """Finalize Standalone MG4 directory by 2028 generation proc_card_mg5.dat 2029 generate a global makefile 2030 """ 2031 2032 compiler = {'fortran': mg5options['fortran_compiler'], 2033 'cpp': mg5options['cpp_compiler'], 2034 'f2py': mg5options['f2py_compiler']} 2035 2036 self.compiler_choice(compiler) 2037 self.make() 2038 2039 # Write command history as proc_card_mg5 2040 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2041 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2042 history.write(output_file) 2043 2044 ProcessExporterFortran.finalize(self, matrix_elements, 2045 history, mg5options, flaglist) 2046 open(pjoin(self.dir_path,'__init__.py'),'w') 2047 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2048 2049 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2050 #add the module to hande the NLO weight 2051 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2052 pjoin(self.dir_path, 'Source')) 2053 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2054 pjoin(self.dir_path, 'Source', 'PDF')) 2055 self.write_pdf_opendata() 2056 2057 if self.prefix_info: 2058 self.write_f2py_splitter() 2059 self.write_f2py_makefile() 2060 self.write_f2py_check_sa(matrix_elements, 2061 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2062 else: 2063 # create a single makefile to compile all the subprocesses 2064 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2065 deppython = '' 2066 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2067 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2068 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2069 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2070 text+='all: %s\n\techo \'done\'' % deppython 2071 2072 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2073 ff.write(text) 2074 ff.close()
2075
2076 - def write_f2py_splitter(self):
2077 """write a function to call the correct matrix element""" 2078 2079 template = """ 2080 %(python_information)s 2081 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2082 IMPLICIT NONE 2083 C ALPHAS is given at scale2 (SHOULD be different of 0 for loop induced, ignore for LO) 2084 2085 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2086 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2087 CF2PY integer, intent(in):: procid 2088 CF2PY integer, intent(in) :: npdg 2089 CF2PY double precision, intent(out) :: ANS 2090 CF2PY double precision, intent(in) :: ALPHAS 2091 CF2PY double precision, intent(in) :: SCALE2 2092 integer pdgs(*) 2093 integer npdg, nhel, procid 2094 double precision p(*) 2095 double precision ANS, ALPHAS, PI,SCALE2 2096 include 'coupl.inc' 2097 2098 PI = 3.141592653589793D0 2099 G = 2* DSQRT(ALPHAS*PI) 2100 CALL UPDATE_AS_PARAM() 2101 c if (scale2.ne.0d0) stop 1 2102 2103 %(smatrixhel)s 2104 2105 return 2106 end 2107 2108 SUBROUTINE INITIALISE(PATH) 2109 C ROUTINE FOR F2PY to read the benchmark point. 2110 IMPLICIT NONE 2111 CHARACTER*512 PATH 2112 CF2PY INTENT(IN) :: PATH 2113 CALL SETPARA(PATH) !first call to setup the paramaters 2114 RETURN 2115 END 2116 2117 2118 subroutine CHANGE_PARA(name, value) 2119 implicit none 2120 CF2PY intent(in) :: name 2121 CF2PY intent(in) :: value 2122 2123 character*512 name 2124 double precision value 2125 2126 %(helreset_def)s 2127 2128 include '../Source/MODEL/input.inc' 2129 include '../Source/MODEL/coupl.inc' 2130 2131 %(helreset_setup)s 2132 2133 SELECT CASE (name) 2134 %(parameter_setup)s 2135 CASE DEFAULT 2136 write(*,*) 'no parameter matching', name, value 2137 END SELECT 2138 2139 return 2140 end 2141 2142 subroutine update_all_coup() 2143 implicit none 2144 call coup() 2145 return 2146 end 2147 2148 2149 subroutine get_pdg_order(PDG, ALLPROC) 2150 IMPLICIT NONE 2151 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2152 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2153 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2154 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2155 DATA PDGS/ %(pdgs)s / 2156 DATA PIDS/ %(pids)s / 2157 PDG = PDGS 2158 ALLPROC = PIDS 2159 RETURN 2160 END 2161 2162 subroutine get_prefix(PREFIX) 2163 IMPLICIT NONE 2164 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2165 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2166 DATA PREF / '%(prefix)s'/ 2167 PREFIX = PREF 2168 RETURN 2169 END 2170 2171 2172 """ 2173 2174 allids = list(self.prefix_info.keys()) 2175 allprefix = [self.prefix_info[key][0] for key in allids] 2176 min_nexternal = min([len(ids[0]) for ids in allids]) 2177 max_nexternal = max([len(ids[0]) for ids in allids]) 2178 2179 info = [] 2180 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2181 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2182 2183 2184 text = [] 2185 for n_ext in range(min_nexternal, max_nexternal+1): 2186 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2187 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2188 if not current_id: 2189 continue 2190 if min_nexternal != max_nexternal: 2191 if n_ext == min_nexternal: 2192 text.append(' if (npdg.eq.%i)then' % n_ext) 2193 else: 2194 text.append(' else if (npdg.eq.%i)then' % n_ext) 2195 for ii,pdgs in enumerate(current_id): 2196 pid = current_pid[ii] 2197 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2198 if ii==0: 2199 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2200 else: 2201 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2202 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2203 text.append(' endif') 2204 #close the function 2205 if min_nexternal != max_nexternal: 2206 text.append('endif') 2207 2208 params = self.get_model_parameter(self.model) 2209 parameter_setup =[] 2210 for key, var in params.items(): 2211 parameter_setup.append(' CASE ("%s")\n %s = value' 2212 % (key, var)) 2213 2214 # part for the resetting of the helicity 2215 helreset_def = [] 2216 helreset_setup = [] 2217 for prefix in set(allprefix): 2218 helreset_setup.append(' %shelreset = .true. ' % prefix) 2219 helreset_def.append(' logical %shelreset \n common /%shelreset/ %shelreset' % (prefix, prefix, prefix)) 2220 2221 2222 formatting = {'python_information':'\n'.join(info), 2223 'smatrixhel': '\n'.join(text), 2224 'maxpart': max_nexternal, 2225 'nb_me': len(allids), 2226 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2227 for i in range(max_nexternal) for (pdg,pid) in allids), 2228 'prefix':'\',\''.join(allprefix), 2229 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2230 'parameter_setup': '\n'.join(parameter_setup), 2231 'helreset_def' : '\n'.join(helreset_def), 2232 'helreset_setup' : '\n'.join(helreset_setup), 2233 } 2234 formatting['lenprefix'] = len(formatting['prefix']) 2235 text = template % formatting 2236 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2237 fsock.writelines(text) 2238 fsock.close()
2239
2240 - def get_model_parameter(self, model):
2241 """ returns all the model parameter 2242 """ 2243 params = {} 2244 for p in model.get('parameters')[('external',)]: 2245 name = p.name 2246 nopref = name[4:] if name.startswith('mdl_') else name 2247 params[nopref] = name 2248 2249 block = p.lhablock 2250 lha = '_'.join([str(i) for i in p.lhacode]) 2251 params['%s_%s' % (block.upper(), lha)] = name 2252 2253 return params
2254 2255 2256 2257 2258
2259 - def write_f2py_check_sa(self, matrix_element, writer):
2260 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2261 # To be implemented. It is just an example file, i.e. not crucial. 2262 return
2263
2264 - def write_f2py_makefile(self):
2265 """ """ 2266 # Add file in SubProcesses 2267 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2268 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2269
2270 - def create_MA5_cards(self,*args,**opts):
2271 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2272 pass
2273
2274 - def compiler_choice(self, compiler):
2275 """ Different daughter classes might want different compilers. 2276 So this function is meant to be overloaded if desired.""" 2277 2278 self.set_compiler(compiler)
2279 2280 #=========================================================================== 2281 # generate_subprocess_directory 2282 #===========================================================================
2283 - def generate_subprocess_directory(self, matrix_element, 2284 fortran_model, number):
2285 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2286 including the necessary matrix.f and nexternal.inc files""" 2287 2288 cwd = os.getcwd() 2289 # Create the directory PN_xx_xxxxx in the specified path 2290 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2291 "P%s" % matrix_element.get('processes')[0].shell_string()) 2292 2293 if self.opt['sa_symmetry']: 2294 # avoid symmetric output 2295 for i,proc in enumerate(matrix_element.get('processes')): 2296 2297 tag = proc.get_tag() 2298 legs = proc.get('legs')[:] 2299 leg0 = proc.get('legs')[0] 2300 leg1 = proc.get('legs')[1] 2301 if not leg1.get('state'): 2302 proc.get('legs')[0] = leg1 2303 proc.get('legs')[1] = leg0 2304 flegs = proc.get('legs')[2:] 2305 for perm in itertools.permutations(flegs): 2306 for i,p in enumerate(perm): 2307 proc.get('legs')[i+2] = p 2308 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2309 "P%s" % proc.shell_string()) 2310 #restore original order 2311 proc.get('legs')[2:] = legs[2:] 2312 if os.path.exists(dirpath2): 2313 proc.get('legs')[:] = legs 2314 return 0 2315 proc.get('legs')[:] = legs 2316 2317 try: 2318 os.mkdir(dirpath) 2319 except os.error as error: 2320 logger.warning(error.strerror + " " + dirpath) 2321 2322 #try: 2323 # os.chdir(dirpath) 2324 #except os.error: 2325 # logger.error('Could not cd to directory %s' % dirpath) 2326 # return 0 2327 2328 logger.info('Creating files in directory %s' % dirpath) 2329 2330 # Extract number of external particles 2331 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2332 2333 # Create the matrix.f file and the nexternal.inc file 2334 if self.opt['export_format']=='standalone_msP': 2335 filename = pjoin(dirpath, 'matrix_prod.f') 2336 else: 2337 filename = pjoin(dirpath, 'matrix.f') 2338 2339 proc_prefix = '' 2340 if 'prefix' in self.cmd_options: 2341 if self.cmd_options['prefix'] == 'int': 2342 proc_prefix = 'M%s_' % number 2343 elif self.cmd_options['prefix'] == 'proc': 2344 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2345 else: 2346 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2347 for proc in matrix_element.get('processes'): 2348 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2349 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2350 2351 calls = self.write_matrix_element_v4( 2352 writers.FortranWriter(filename), 2353 matrix_element, 2354 fortran_model, 2355 proc_prefix=proc_prefix) 2356 2357 if self.opt['export_format'] == 'standalone_msP': 2358 filename = pjoin(dirpath,'configs_production.inc') 2359 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2360 writers.FortranWriter(filename), 2361 matrix_element) 2362 2363 filename = pjoin(dirpath,'props_production.inc') 2364 self.write_props_file(writers.FortranWriter(filename), 2365 matrix_element, 2366 s_and_t_channels) 2367 2368 filename = pjoin(dirpath,'nexternal_prod.inc') 2369 self.write_nexternal_madspin(writers.FortranWriter(filename), 2370 nexternal, ninitial) 2371 2372 if self.opt['export_format']=='standalone_msF': 2373 filename = pjoin(dirpath, 'helamp.inc') 2374 ncomb=matrix_element.get_helicity_combinations() 2375 self.write_helamp_madspin(writers.FortranWriter(filename), 2376 ncomb) 2377 2378 filename = pjoin(dirpath, 'nexternal.inc') 2379 self.write_nexternal_file(writers.FortranWriter(filename), 2380 nexternal, ninitial) 2381 2382 filename = pjoin(dirpath, 'pmass.inc') 2383 self.write_pmass_file(writers.FortranWriter(filename), 2384 matrix_element) 2385 2386 filename = pjoin(dirpath, 'ngraphs.inc') 2387 self.write_ngraphs_file(writers.FortranWriter(filename), 2388 len(matrix_element.get_all_amplitudes())) 2389 2390 # Generate diagrams 2391 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2392 filename = pjoin(dirpath, "matrix.ps") 2393 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2394 get('diagrams'), 2395 filename, 2396 model=matrix_element.get('processes')[0].\ 2397 get('model'), 2398 amplitude=True) 2399 logger.info("Generating Feynman diagrams for " + \ 2400 matrix_element.get('processes')[0].nice_string()) 2401 plot.draw() 2402 2403 linkfiles = ['check_sa.f', 'coupl.inc'] 2404 2405 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2406 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2407 pat = re.compile('smatrix', re.I) 2408 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2409 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2410 f.write(new_text) 2411 linkfiles.pop(0) 2412 2413 for file in linkfiles: 2414 ln('../%s' % file, cwd=dirpath) 2415 ln('../makefileP', name='makefile', cwd=dirpath) 2416 # Return to original PWD 2417 #os.chdir(cwd) 2418 2419 if not calls: 2420 calls = 0 2421 return calls
2422 2423 2424 #=========================================================================== 2425 # write_source_makefile 2426 #===========================================================================
2427 - def write_source_makefile(self, writer):
2428 """Write the nexternal.inc file for MG4""" 2429 2430 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2431 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2432 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2433 2434 replace_dict= {'libraries': set_of_lib, 2435 'model':model_line, 2436 'additional_dsample': '', 2437 'additional_dependencies':''} 2438 2439 text = open(path).read() % replace_dict 2440 2441 if writer: 2442 writer.write(text) 2443 2444 return replace_dict
2445 2446 #=========================================================================== 2447 # write_matrix_element_v4 2448 #===========================================================================
2449 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2450 write=True, proc_prefix=''):
2451 """Export a matrix element to a matrix.f file in MG4 standalone format 2452 if write is on False, just return the replace_dict and not write anything.""" 2453 2454 2455 if not matrix_element.get('processes') or \ 2456 not matrix_element.get('diagrams'): 2457 return 0 2458 2459 if writer: 2460 if not isinstance(writer, writers.FortranWriter): 2461 raise writers.FortranWriter.FortranWriterError(\ 2462 "writer not FortranWriter but %s" % type(writer)) 2463 # Set lowercase/uppercase Fortran code 2464 writers.FortranWriter.downcase = False 2465 2466 2467 if 'sa_symmetry' not in self.opt: 2468 self.opt['sa_symmetry']=False 2469 2470 2471 # The proc_id is for MadEvent grouping which is never used in SA. 2472 replace_dict = {'global_variable':'', 'amp2_lines':'', 2473 'proc_prefix':proc_prefix, 'proc_id':''} 2474 2475 # Extract helas calls 2476 helas_calls = fortran_model.get_matrix_element_calls(\ 2477 matrix_element) 2478 2479 replace_dict['helas_calls'] = "\n".join(helas_calls) 2480 2481 # Extract version number and date from VERSION file 2482 info_lines = self.get_mg5_info_lines() 2483 replace_dict['info_lines'] = info_lines 2484 2485 # Extract process info lines 2486 process_lines = self.get_process_info_lines(matrix_element) 2487 replace_dict['process_lines'] = process_lines 2488 2489 # Extract number of external particles 2490 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2491 replace_dict['nexternal'] = nexternal 2492 replace_dict['nincoming'] = ninitial 2493 2494 # Extract ncomb 2495 ncomb = matrix_element.get_helicity_combinations() 2496 replace_dict['ncomb'] = ncomb 2497 2498 # Extract helicity lines 2499 helicity_lines = self.get_helicity_lines(matrix_element) 2500 replace_dict['helicity_lines'] = helicity_lines 2501 2502 # Extract overall denominator 2503 # Averaging initial state color, spin, and identical FS particles 2504 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2505 2506 # Extract ngraphs 2507 ngraphs = matrix_element.get_number_of_amplitudes() 2508 replace_dict['ngraphs'] = ngraphs 2509 2510 # Extract nwavefuncs 2511 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2512 replace_dict['nwavefuncs'] = nwavefuncs 2513 2514 # Extract ncolor 2515 ncolor = max(1, len(matrix_element.get('color_basis'))) 2516 replace_dict['ncolor'] = ncolor 2517 2518 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2519 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2520 matrix_element.get_beams_hel_avg_factor() 2521 2522 # Extract color data lines 2523 color_data_lines = self.get_color_data_lines(matrix_element) 2524 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2525 2526 if self.opt['export_format']=='standalone_msP': 2527 # For MadSpin need to return the AMP2 2528 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2529 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2530 replace_dict['global_variable'] = \ 2531 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2532 2533 # JAMP definition, depends on the number of independent split orders 2534 split_orders=matrix_element.get('processes')[0].get('split_orders') 2535 2536 if len(split_orders)==0: 2537 replace_dict['nSplitOrders']='' 2538 # Extract JAMP lines 2539 jamp_lines = self.get_JAMP_lines(matrix_element) 2540 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2541 # set all amplitude order to weight 1 and only one squared order 2542 # contribution which is of course ALL_ORDERS=2. 2543 squared_orders = [(2,),] 2544 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2545 replace_dict['chosen_so_configs'] = '.TRUE.' 2546 replace_dict['nSqAmpSplitOrders']=1 2547 replace_dict['split_order_str_list']='' 2548 else: 2549 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2550 replace_dict['nAmpSplitOrders']=len(amp_orders) 2551 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2552 replace_dict['nSplitOrders']=len(split_orders) 2553 replace_dict['split_order_str_list']=str(split_orders) 2554 amp_so = self.get_split_orders_lines( 2555 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2556 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2557 replace_dict['ampsplitorders']='\n'.join(amp_so) 2558 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2559 jamp_lines = self.get_JAMP_lines_split_order(\ 2560 matrix_element,amp_orders,split_order_names=split_orders) 2561 2562 # Now setup the array specifying what squared split order is chosen 2563 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2564 matrix_element.get('processes')[0],squared_orders) 2565 2566 # For convenience we also write the driver check_sa_splitOrders.f 2567 # that explicitely writes out the contribution from each squared order. 2568 # The original driver still works and is compiled with 'make' while 2569 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2570 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2571 self.write_check_sa_splitOrders(squared_orders,split_orders, 2572 nexternal,ninitial,proc_prefix,check_sa_writer) 2573 2574 if write: 2575 writers.FortranWriter('nsqso_born.inc').writelines( 2576 """INTEGER NSQSO_BORN 2577 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2578 2579 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2580 2581 matrix_template = self.matrix_template 2582 if self.opt['export_format']=='standalone_msP' : 2583 matrix_template = 'matrix_standalone_msP_v4.inc' 2584 elif self.opt['export_format']=='standalone_msF': 2585 matrix_template = 'matrix_standalone_msF_v4.inc' 2586 elif self.opt['export_format']=='matchbox': 2587 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2588 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2589 2590 if len(split_orders)>0: 2591 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2592 logger.debug("Warning: The export format %s is not "+\ 2593 " available for individual ME evaluation of given coupl. orders."+\ 2594 " Only the total ME will be computed.", self.opt['export_format']) 2595 elif self.opt['export_format'] in ['madloop_matchbox']: 2596 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2597 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2598 else: 2599 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2600 2601 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2602 replace_dict['template_file2'] = pjoin(_file_path, \ 2603 'iolibs/template_files/split_orders_helping_functions.inc') 2604 if write and writer: 2605 path = replace_dict['template_file'] 2606 content = open(path).read() 2607 content = content % replace_dict 2608 # Write the file 2609 writer.writelines(content) 2610 # Add the helper functions. 2611 if len(split_orders)>0: 2612 content = '\n' + open(replace_dict['template_file2'])\ 2613 .read()%replace_dict 2614 writer.writelines(content) 2615 return len([call for call in helas_calls if call.find('#') != 0]) 2616 else: 2617 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2618 return replace_dict # for subclass update
2619
2620 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2621 nincoming, proc_prefix, writer):
2622 """ Write out a more advanced version of the check_sa drivers that 2623 individually returns the matrix element for each contributing squared 2624 order.""" 2625 2626 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2627 'template_files', 'check_sa_splitOrders.f')).read() 2628 printout_sq_orders=[] 2629 for i, squared_order in enumerate(squared_orders): 2630 sq_orders=[] 2631 for j, sqo in enumerate(squared_order): 2632 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2633 printout_sq_orders.append(\ 2634 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2635 %(i+1,' '.join(sq_orders),i+1)) 2636 printout_sq_orders='\n'.join(printout_sq_orders) 2637 replace_dict = {'printout_sqorders':printout_sq_orders, 2638 'nSplitOrders':len(squared_orders), 2639 'nexternal':nexternal, 2640 'nincoming':nincoming, 2641 'proc_prefix':proc_prefix} 2642 2643 if writer: 2644 writer.writelines(check_sa_content % replace_dict) 2645 else: 2646 return replace_dict
2647
2648 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2649 """class to take care of exporting a set of matrix element for the Matchbox 2650 code in the case of Born only routine""" 2651 2652 default_opt = {'clean': False, 'complex_mass':False, 2653 'export_format':'matchbox', 'mp': False, 2654 'sa_symmetry': True} 2655 2656 #specific template of the born 2657 2658 2659 matrix_template = "matrix_standalone_matchbox.inc" 2660 2661 @staticmethod
2662 - def get_color_string_lines(matrix_element):
2663 """Return the color matrix definition lines for this matrix element. Split 2664 rows in chunks of size n.""" 2665 2666 if not matrix_element.get('color_matrix'): 2667 return "\n".join(["out = 1"]) 2668 2669 #start the real work 2670 color_denominators = matrix_element.get('color_matrix').\ 2671 get_line_denominators() 2672 matrix_strings = [] 2673 my_cs = color.ColorString() 2674 for i_color in range(len(color_denominators)): 2675 # Then write the numerators for the matrix elements 2676 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2677 t_str=repr(my_cs) 2678 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2679 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2680 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2681 all_matches = t_match.findall(t_str) 2682 output = {} 2683 arg=[] 2684 for match in all_matches: 2685 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2686 if ctype in ['ColorOne' ]: 2687 continue 2688 if ctype not in ['T', 'Tr' ]: 2689 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2690 tmparg += ['0'] 2691 arg +=tmparg 2692 for j, v in enumerate(arg): 2693 output[(i_color,j)] = v 2694 2695 for key in output: 2696 if matrix_strings == []: 2697 #first entry 2698 matrix_strings.append(""" 2699 if (in1.eq.%s.and.in2.eq.%s)then 2700 out = %s 2701 """ % (key[0], key[1], output[key])) 2702 else: 2703 #not first entry 2704 matrix_strings.append(""" 2705 elseif (in1.eq.%s.and.in2.eq.%s)then 2706 out = %s 2707 """ % (key[0], key[1], output[key])) 2708 if len(matrix_strings): 2709 matrix_strings.append(" else \n out = - 1 \n endif") 2710 else: 2711 return "\n out = - 1 \n " 2712 return "\n".join(matrix_strings)
2713
2714 - def make(self,*args,**opts):
2715 pass
2716
2717 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2718 JAMP_formatLC=None):
2719 2720 """Adding leading color part of the colorflow""" 2721 2722 if not JAMP_formatLC: 2723 JAMP_formatLC= "LN%s" % JAMP_format 2724 2725 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2726 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2727 col_amps=col_amps.get_color_amplitudes() 2728 elif(isinstance(col_amps,list)): 2729 if(col_amps and isinstance(col_amps[0],list)): 2730 col_amps=col_amps 2731 else: 2732 raise MadGraph5Error(error_msg % 'col_amps') 2733 else: 2734 raise MadGraph5Error(error_msg % 'col_amps') 2735 2736 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2737 JAMP_format=JAMP_format, 2738 AMP_format=AMP_format, 2739 split=-1) 2740 2741 2742 # Filter the col_ampls to generate only those without any 1/NC terms 2743 2744 LC_col_amps = [] 2745 for coeff_list in col_amps: 2746 to_add = [] 2747 for (coefficient, amp_number) in coeff_list: 2748 if coefficient[3]==0: 2749 to_add.append( (coefficient, amp_number) ) 2750 LC_col_amps.append(to_add) 2751 2752 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2753 JAMP_format=JAMP_formatLC, 2754 AMP_format=AMP_format, 2755 split=-1) 2756 2757 return text
2758
2759 2760 2761 2762 #=============================================================================== 2763 # ProcessExporterFortranMW 2764 #=============================================================================== 2765 -class ProcessExporterFortranMW(ProcessExporterFortran):
2766 """Class to take care of exporting a set of matrix elements to 2767 MadGraph v4 - MadWeight format.""" 2768 2769 matrix_file="matrix_standalone_v4.inc" 2770
2771 - def copy_template(self, model):
2772 """Additional actions needed for setup of Template 2773 """ 2774 2775 super(ProcessExporterFortranMW, self).copy_template(model) 2776 2777 # Add the MW specific file 2778 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2779 pjoin(self.dir_path, 'Source','MadWeight'), True) 2780 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2781 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2782 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2783 pjoin(self.dir_path, 'Source','setrun.f')) 2784 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2785 pjoin(self.dir_path, 'Source','run.inc')) 2786 # File created from Template (Different in some child class) 2787 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2788 self.write_run_config_file(writers.FortranWriter(filename)) 2789 2790 try: 2791 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2792 stdout = os.open(os.devnull, os.O_RDWR), 2793 stderr = os.open(os.devnull, os.O_RDWR), 2794 cwd=self.dir_path) 2795 except OSError: 2796 # Probably madweight already called 2797 pass 2798 2799 # Copy the different python file in the Template 2800 self.copy_python_file() 2801 # create the appropriate cuts.f 2802 self.get_mw_cuts_version() 2803 2804 # add the makefile in Source directory 2805 filename = os.path.join(self.dir_path,'Source','makefile') 2806 self.write_source_makefile(writers.FortranWriter(filename))
2807 2808 2809 2810 2811 #=========================================================================== 2812 # convert_model 2813 #===========================================================================
2814 - def convert_model(self, model, wanted_lorentz = [], 2815 wanted_couplings = []):
2816 2817 super(ProcessExporterFortranMW,self).convert_model(model, 2818 wanted_lorentz, wanted_couplings) 2819 2820 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2821 try: 2822 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2823 except OSError as error: 2824 pass 2825 model_path = model.get('modelpath') 2826 # This is not safe if there is a '##' or '-' in the path. 2827 shutil.copytree(model_path, 2828 pjoin(self.dir_path,'bin','internal','ufomodel'), 2829 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2830 if hasattr(model, 'restrict_card'): 2831 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2832 'restrict_default.dat') 2833 if isinstance(model.restrict_card, check_param_card.ParamCard): 2834 model.restrict_card.write(out_path) 2835 else: 2836 files.cp(model.restrict_card, out_path)
2837 2838 #=========================================================================== 2839 # generate_subprocess_directory 2840 #===========================================================================
2841 - def copy_python_file(self):
2842 """copy the python file require for the Template""" 2843 2844 # madevent interface 2845 cp(_file_path+'/interface/madweight_interface.py', 2846 self.dir_path+'/bin/internal/madweight_interface.py') 2847 cp(_file_path+'/interface/extended_cmd.py', 2848 self.dir_path+'/bin/internal/extended_cmd.py') 2849 cp(_file_path+'/interface/common_run_interface.py', 2850 self.dir_path+'/bin/internal/common_run_interface.py') 2851 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2852 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2853 cp(_file_path+'/iolibs/save_load_object.py', 2854 self.dir_path+'/bin/internal/save_load_object.py') 2855 cp(_file_path+'/madevent/gen_crossxhtml.py', 2856 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2857 cp(_file_path+'/madevent/sum_html.py', 2858 self.dir_path+'/bin/internal/sum_html.py') 2859 cp(_file_path+'/various/FO_analyse_card.py', 2860 self.dir_path+'/bin/internal/FO_analyse_card.py') 2861 cp(_file_path+'/iolibs/file_writers.py', 2862 self.dir_path+'/bin/internal/file_writers.py') 2863 #model file 2864 cp(_file_path+'../models/check_param_card.py', 2865 self.dir_path+'/bin/internal/check_param_card.py') 2866 2867 #madevent file 2868 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2869 cp(_file_path+'/various/lhe_parser.py', 2870 self.dir_path+'/bin/internal/lhe_parser.py') 2871 2872 cp(_file_path+'/various/banner.py', 2873 self.dir_path+'/bin/internal/banner.py') 2874 cp(_file_path+'/various/shower_card.py', 2875 self.dir_path+'/bin/internal/shower_card.py') 2876 cp(_file_path+'/various/cluster.py', 2877 self.dir_path+'/bin/internal/cluster.py') 2878 2879 # logging configuration 2880 cp(_file_path+'/interface/.mg5_logging.conf', 2881 self.dir_path+'/bin/internal/me5_logging.conf') 2882 cp(_file_path+'/interface/coloring_logging.py', 2883 self.dir_path+'/bin/internal/coloring_logging.py')
2884 2885 2886 #=========================================================================== 2887 # Change the version of cuts.f to the one compatible with MW 2888 #===========================================================================
2889 - def get_mw_cuts_version(self, outpath=None):
2890 """create the appropriate cuts.f 2891 This is based on the one associated to ME output but: 2892 1) No clustering (=> remove initcluster/setclscales) 2893 2) Adding the definition of cut_bw at the file. 2894 """ 2895 2896 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2897 2898 text = StringIO() 2899 #1) remove all dependencies in ickkw >1: 2900 nb_if = 0 2901 for line in template: 2902 if 'if(xqcut.gt.0d0' in line: 2903 nb_if = 1 2904 if nb_if == 0: 2905 text.write(line) 2906 continue 2907 if re.search(r'if\(.*\)\s*then', line): 2908 nb_if += 1 2909 elif 'endif' in line: 2910 nb_if -= 1 2911 2912 #2) add fake cut_bw (have to put the true one later) 2913 text.write(""" 2914 logical function cut_bw(p) 2915 include 'madweight_param.inc' 2916 double precision p(*) 2917 if (bw_cut) then 2918 cut_bw = .true. 2919 else 2920 stop 1 2921 endif 2922 return 2923 end 2924 """) 2925 2926 final = text.getvalue() 2927 #3) remove the call to initcluster: 2928 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2929 template = template.replace('genps.inc', 'maxparticles.inc') 2930 #Now we can write it 2931 if not outpath: 2932 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2933 elif isinstance(outpath, str): 2934 fsock = open(outpath, 'w') 2935 else: 2936 fsock = outpath 2937 fsock.write(template)
2938 2939 2940 2941 #=========================================================================== 2942 # Make the Helas and Model directories for Standalone directory 2943 #===========================================================================
2944 - def make(self):
2945 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2946 everything for running madweight 2947 """ 2948 2949 source_dir = os.path.join(self.dir_path, "Source") 2950 logger.info("Running make for Helas") 2951 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2952 logger.info("Running make for Model") 2953 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2954 logger.info("Running make for PDF") 2955 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2956 logger.info("Running make for CERNLIB") 2957 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2958 logger.info("Running make for GENERIC") 2959 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2960 logger.info("Running make for blocks") 2961 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2962 logger.info("Running make for tools") 2963 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2964 2965 #=========================================================================== 2966 # Create proc_card_mg5.dat for MadWeight directory 2967 #===========================================================================
2968 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2969 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2970 2971 compiler = {'fortran': mg5options['fortran_compiler'], 2972 'cpp': mg5options['cpp_compiler'], 2973 'f2py': mg5options['f2py_compiler']} 2974 2975 2976 2977 #proc_charac 2978 self.create_proc_charac() 2979 2980 # Write maxparticles.inc based on max of ME's/subprocess groups 2981 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2982 self.write_maxparticles_file(writers.FortranWriter(filename), 2983 matrix_elements) 2984 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2985 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2986 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2987 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2988 2989 self.set_compiler(compiler) 2990 self.make() 2991 2992 # Write command history as proc_card_mg5 2993 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2994 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2995 history.write(output_file) 2996 2997 ProcessExporterFortran.finalize(self, matrix_elements, 2998 history, mg5options, flaglist)
2999 3000 3001 3002 #=========================================================================== 3003 # create the run_card for MW 3004 #===========================================================================
3005 - def create_run_card(self, matrix_elements, history):
3006 """ """ 3007 3008 run_card = banner_mod.RunCard() 3009 3010 # pass to default for MW 3011 run_card["run_tag"] = "\'not_use\'" 3012 run_card["fixed_ren_scale"] = "T" 3013 run_card["fixed_fac_scale"] = "T" 3014 run_card.remove_all_cut() 3015 3016 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3017 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3018 python_template=True) 3019 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3020 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3021 python_template=True)
3022 3023 #=========================================================================== 3024 # export model files 3025 #===========================================================================
3026 - def export_model_files(self, model_path):
3027 """export the model dependent files for V4 model""" 3028 3029 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3030 # Add the routine update_as_param in v4 model 3031 # This is a function created in the UFO 3032 text=""" 3033 subroutine update_as_param() 3034 call setpara('param_card.dat',.false.) 3035 return 3036 end 3037 """ 3038 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3039 ff.write(text) 3040 ff.close() 3041 3042 # Modify setrun.f 3043 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3044 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3045 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3046 fsock.write(text) 3047 fsock.close() 3048 3049 # Modify initialization.f 3050 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3051 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3052 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3053 fsock.write(text) 3054 fsock.close() 3055 3056 3057 self.make_model_symbolic_link()
3058 3059 #=========================================================================== 3060 # generate_subprocess_directory 3061 #===========================================================================
3062 - def generate_subprocess_directory(self, matrix_element, 3063 fortran_model,number):
3064 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3065 including the necessary matrix.f and nexternal.inc files""" 3066 3067 cwd = os.getcwd() 3068 # Create the directory PN_xx_xxxxx in the specified path 3069 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3070 "P%s" % matrix_element.get('processes')[0].shell_string()) 3071 3072 try: 3073 os.mkdir(dirpath) 3074 except os.error as error: 3075 logger.warning(error.strerror + " " + dirpath) 3076 3077 #try: 3078 # os.chdir(dirpath) 3079 #except os.error: 3080 # logger.error('Could not cd to directory %s' % dirpath) 3081 # return 0 3082 3083 logger.info('Creating files in directory %s' % dirpath) 3084 3085 # Extract number of external particles 3086 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3087 3088 # Create the matrix.f file and the nexternal.inc file 3089 filename = pjoin(dirpath,'matrix.f') 3090 calls,ncolor = self.write_matrix_element_v4( 3091 writers.FortranWriter(filename), 3092 matrix_element, 3093 fortran_model) 3094 3095 filename = pjoin(dirpath, 'auto_dsig.f') 3096 self.write_auto_dsig_file(writers.FortranWriter(filename), 3097 matrix_element) 3098 3099 filename = pjoin(dirpath, 'configs.inc') 3100 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3101 writers.FortranWriter(filename), 3102 matrix_element) 3103 3104 filename = pjoin(dirpath, 'nexternal.inc') 3105 self.write_nexternal_file(writers.FortranWriter(filename), 3106 nexternal, ninitial) 3107 3108 filename = pjoin(dirpath, 'leshouche.inc') 3109 self.write_leshouche_file(writers.FortranWriter(filename), 3110 matrix_element) 3111 3112 filename = pjoin(dirpath, 'props.inc') 3113 self.write_props_file(writers.FortranWriter(filename), 3114 matrix_element, 3115 s_and_t_channels) 3116 3117 filename = pjoin(dirpath, 'pmass.inc') 3118 self.write_pmass_file(writers.FortranWriter(filename), 3119 matrix_element) 3120 3121 filename = pjoin(dirpath, 'ngraphs.inc') 3122 self.write_ngraphs_file(writers.FortranWriter(filename), 3123 len(matrix_element.get_all_amplitudes())) 3124 3125 filename = pjoin(dirpath, 'maxamps.inc') 3126 self.write_maxamps_file(writers.FortranWriter(filename), 3127 len(matrix_element.get('diagrams')), 3128 ncolor, 3129 len(matrix_element.get('processes')), 3130 1) 3131 3132 filename = pjoin(dirpath, 'phasespace.inc') 3133 self.write_phasespace_file(writers.FortranWriter(filename), 3134 len(matrix_element.get('diagrams')), 3135 ) 3136 3137 # Generate diagrams 3138 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3139 filename = pjoin(dirpath, "matrix.ps") 3140 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3141 get('diagrams'), 3142 filename, 3143 model=matrix_element.get('processes')[0].\ 3144 get('model'), 3145 amplitude='') 3146 logger.info("Generating Feynman diagrams for " + \ 3147 matrix_element.get('processes')[0].nice_string()) 3148 plot.draw() 3149 3150 #import genps.inc and maxconfigs.inc into Subprocesses 3151 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3152 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3153 3154 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3155 3156 for file in linkfiles: 3157 ln('../%s' % file, starting_dir=cwd) 3158 3159 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3160 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3161 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3162 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3163 # Return to original PWD 3164 #os.chdir(cwd) 3165 3166 if not calls: 3167 calls = 0 3168 return calls
3169 3170 #=========================================================================== 3171 # write_matrix_element_v4 3172 #===========================================================================
3173 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3174 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3175 3176 if not matrix_element.get('processes') or \ 3177 not matrix_element.get('diagrams'): 3178 return 0 3179 3180 if writer: 3181 if not isinstance(writer, writers.FortranWriter): 3182 raise writers.FortranWriter.FortranWriterError(\ 3183 "writer not FortranWriter") 3184 3185 # Set lowercase/uppercase Fortran code 3186 writers.FortranWriter.downcase = False 3187 3188 replace_dict = {} 3189 3190 # Extract version number and date from VERSION file 3191 info_lines = self.get_mg5_info_lines() 3192 replace_dict['info_lines'] = info_lines 3193 3194 # Extract process info lines 3195 process_lines = self.get_process_info_lines(matrix_element) 3196 replace_dict['process_lines'] = process_lines 3197 3198 # Set proc_id 3199 replace_dict['proc_id'] = proc_id 3200 3201 # Extract number of external particles 3202 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3203 replace_dict['nexternal'] = nexternal 3204 3205 # Extract ncomb 3206 ncomb = matrix_element.get_helicity_combinations() 3207 replace_dict['ncomb'] = ncomb 3208 3209 # Extract helicity lines 3210 helicity_lines = self.get_helicity_lines(matrix_element) 3211 replace_dict['helicity_lines'] = helicity_lines 3212 3213 # Extract overall denominator 3214 # Averaging initial state color, spin, and identical FS particles 3215 den_factor_line = self.get_den_factor_line(matrix_element) 3216 replace_dict['den_factor_line'] = den_factor_line 3217 3218 # Extract ngraphs 3219 ngraphs = matrix_element.get_number_of_amplitudes() 3220 replace_dict['ngraphs'] = ngraphs 3221 3222 # Extract nwavefuncs 3223 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3224 replace_dict['nwavefuncs'] = nwavefuncs 3225 3226 # Extract ncolor 3227 ncolor = max(1, len(matrix_element.get('color_basis'))) 3228 replace_dict['ncolor'] = ncolor 3229 3230 # Extract color data lines 3231 color_data_lines = self.get_color_data_lines(matrix_element) 3232 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3233 3234 # Extract helas calls 3235 helas_calls = fortran_model.get_matrix_element_calls(\ 3236 matrix_element) 3237 3238 replace_dict['helas_calls'] = "\n".join(helas_calls) 3239 3240 # Extract JAMP lines 3241 jamp_lines = self.get_JAMP_lines(matrix_element) 3242 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3243 3244 replace_dict['template_file'] = os.path.join(_file_path, \ 3245 'iolibs/template_files/%s' % self.matrix_file) 3246 replace_dict['template_file2'] = '' 3247 3248 if writer: 3249 file = open(replace_dict['template_file']).read() 3250 file = file % replace_dict 3251 # Write the file 3252 writer.writelines(file) 3253 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3254 else: 3255 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3256 3257 #=========================================================================== 3258 # write_source_makefile 3259 #===========================================================================
3260 - def write_source_makefile(self, writer):
3261 """Write the nexternal.inc file for madweight""" 3262 3263 3264 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3265 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3266 text = open(path).read() % {'libraries': set_of_lib} 3267 writer.write(text) 3268 3269 return True
3270
3271 - def write_phasespace_file(self, writer, nb_diag):
3272 """ """ 3273 3274 template = """ include 'maxparticles.inc' 3275 integer max_branches 3276 parameter (max_branches=max_particles-1) 3277 integer max_configs 3278 parameter (max_configs=%(nb_diag)s) 3279 3280 c channel position 3281 integer config_pos,perm_pos 3282 common /to_config/config_pos,perm_pos 3283 3284 """ 3285 3286 writer.write(template % {'nb_diag': nb_diag})
3287 3288 3289 #=========================================================================== 3290 # write_auto_dsig_file 3291 #===========================================================================
3292 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3293 """Write the auto_dsig.f file for the differential cross section 3294 calculation, includes pdf call information (MadWeight format)""" 3295 3296 if not matrix_element.get('processes') or \ 3297 not matrix_element.get('diagrams'): 3298 return 0 3299 3300 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3301 3302 if ninitial < 1 or ninitial > 2: 3303 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3304 3305 replace_dict = {} 3306 3307 # Extract version number and date from VERSION file 3308 info_lines = self.get_mg5_info_lines() 3309 replace_dict['info_lines'] = info_lines 3310 3311 # Extract process info lines 3312 process_lines = self.get_process_info_lines(matrix_element) 3313 replace_dict['process_lines'] = process_lines 3314 3315 # Set proc_id 3316 replace_dict['proc_id'] = proc_id 3317 replace_dict['numproc'] = 1 3318 3319 # Set dsig_line 3320 if ninitial == 1: 3321 # No conversion, since result of decay should be given in GeV 3322 dsig_line = "pd(0)*dsiguu" 3323 else: 3324 # Convert result (in GeV) to pb 3325 dsig_line = "pd(0)*conv*dsiguu" 3326 3327 replace_dict['dsig_line'] = dsig_line 3328 3329 # Extract pdf lines 3330 pdf_vars, pdf_data, pdf_lines = \ 3331 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3332 replace_dict['pdf_vars'] = pdf_vars 3333 replace_dict['pdf_data'] = pdf_data 3334 replace_dict['pdf_lines'] = pdf_lines 3335 3336 # Lines that differ between subprocess group and regular 3337 if proc_id: 3338 replace_dict['numproc'] = int(proc_id) 3339 replace_dict['passcuts_begin'] = "" 3340 replace_dict['passcuts_end'] = "" 3341 # Set lines for subprocess group version 3342 # Set define_iconfigs_lines 3343 replace_dict['define_subdiag_lines'] = \ 3344 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3345 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3346 else: 3347 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3348 replace_dict['passcuts_end'] = "ENDIF" 3349 replace_dict['define_subdiag_lines'] = "" 3350 3351 if writer: 3352 file = open(os.path.join(_file_path, \ 3353 'iolibs/template_files/auto_dsig_mw.inc')).read() 3354 3355 file = file % replace_dict 3356 # Write the file 3357 writer.writelines(file) 3358 else: 3359 return replace_dict
3360 #=========================================================================== 3361 # write_configs_file 3362 #===========================================================================
3363 - def write_configs_file(self, writer, matrix_element):
3364 """Write the configs.inc file for MadEvent""" 3365 3366 # Extract number of external particles 3367 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3368 3369 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3370 mapconfigs = [c[0] for c in configs] 3371 model = matrix_element.get('processes')[0].get('model') 3372 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3373 [[c[1]] for c in configs], 3374 mapconfigs, 3375 nexternal, ninitial,matrix_element, model)
3376 3377 #=========================================================================== 3378 # write_run_configs_file 3379 #===========================================================================
3380 - def write_run_config_file(self, writer):
3381 """Write the run_configs.inc file for MadWeight""" 3382 3383 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3384 text = open(path).read() % {'chanperjob':'5'} 3385 writer.write(text) 3386 return True
3387 3388 #=========================================================================== 3389 # write_configs_file_from_diagrams 3390 #===========================================================================
3391 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3392 nexternal, ninitial, matrix_element, model):
3393 """Write the actual configs.inc file. 3394 3395 configs is the diagrams corresponding to configs (each 3396 diagrams is a list of corresponding diagrams for all 3397 subprocesses, with None if there is no corresponding diagrams 3398 for a given process). 3399 mapconfigs gives the diagram number for each config. 3400 3401 For s-channels, we need to output one PDG for each subprocess in 3402 the subprocess group, in order to be able to pick the right 3403 one for multiprocesses.""" 3404 3405 lines = [] 3406 3407 particle_dict = matrix_element.get('processes')[0].get('model').\ 3408 get('particle_dict') 3409 3410 s_and_t_channels = [] 3411 3412 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3413 for config in configs if [d for d in config if d][0].\ 3414 get_vertex_leg_numbers()!=[]] 3415 3416 minvert = min(vert_list) if vert_list!=[] else 0 3417 # Number of subprocesses 3418 nsubprocs = len(configs[0]) 3419 3420 nconfigs = 0 3421 3422 new_pdg = model.get_first_non_pdg() 3423 3424 for iconfig, helas_diags in enumerate(configs): 3425 if any([vert > minvert for vert in 3426 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3427 # Only 3-vertices allowed in configs.inc 3428 continue 3429 nconfigs += 1 3430 3431 # Need s- and t-channels for all subprocesses, including 3432 # those that don't contribute to this config 3433 empty_verts = [] 3434 stchannels = [] 3435 for h in helas_diags: 3436 if h: 3437 # get_s_and_t_channels gives vertices starting from 3438 # final state external particles and working inwards 3439 stchannels.append(h.get('amplitudes')[0].\ 3440 get_s_and_t_channels(ninitial,model,new_pdg)) 3441 else: 3442 stchannels.append((empty_verts, None)) 3443 3444 # For t-channels, just need the first non-empty one 3445 tchannels = [t for s,t in stchannels if t != None][0] 3446 3447 # For s_and_t_channels (to be used later) use only first config 3448 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3449 tchannels]) 3450 3451 # Make sure empty_verts is same length as real vertices 3452 if any([s for s,t in stchannels]): 3453 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3454 3455 # Reorganize s-channel vertices to get a list of all 3456 # subprocesses for each vertex 3457 schannels = list(zip(*[s for s,t in stchannels])) 3458 else: 3459 schannels = [] 3460 3461 allchannels = schannels 3462 if len(tchannels) > 1: 3463 # Write out tchannels only if there are any non-trivial ones 3464 allchannels = schannels + tchannels 3465 3466 # Write out propagators for s-channel and t-channel vertices 3467 3468 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3469 # Correspondance between the config and the diagram = amp2 3470 lines.append("* %d %d " % (nconfigs, 3471 mapconfigs[iconfig])) 3472 3473 for verts in allchannels: 3474 if verts in schannels: 3475 vert = [v for v in verts if v][0] 3476 else: 3477 vert = verts 3478 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3479 last_leg = vert.get('legs')[-1] 3480 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3481 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3482 # (last_leg.get('number'), nconfigs, len(daughters), 3483 # ",".join([str(d) for d in daughters]))) 3484 3485 if last_leg.get('id') == 21 and 21 not in particle_dict: 3486 # Fake propagator used in multiparticle vertices 3487 mass = 'zero' 3488 width = 'zero' 3489 pow_part = 0 3490 else: 3491 if (last_leg.get('id')!=7): 3492 particle = particle_dict[last_leg.get('id')] 3493 # Get mass 3494 mass = particle.get('mass') 3495 # Get width 3496 width = particle.get('width') 3497 else : # fake propagator used in multiparticle vertices 3498 mass= 'zero' 3499 width= 'zero' 3500 3501 line=line+" "+mass+" "+width+" " 3502 3503 if verts in schannels: 3504 pdgs = [] 3505 for v in verts: 3506 if v: 3507 pdgs.append(v.get('legs')[-1].get('id')) 3508 else: 3509 pdgs.append(0) 3510 lines.append(line+" S "+str(last_leg.get('id'))) 3511 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3512 # (last_leg.get('number'), nconfigs, nsubprocs, 3513 # ",".join([str(d) for d in pdgs]))) 3514 # lines.append("data tprid(%d,%d)/0/" % \ 3515 # (last_leg.get('number'), nconfigs)) 3516 elif verts in tchannels[:-1]: 3517 lines.append(line+" T "+str(last_leg.get('id'))) 3518 # lines.append("data tprid(%d,%d)/%d/" % \ 3519 # (last_leg.get('number'), nconfigs, 3520 # abs(last_leg.get('id')))) 3521 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3522 # (last_leg.get('number'), nconfigs, nsubprocs, 3523 # ",".join(['0'] * nsubprocs))) 3524 3525 # Write out number of configs 3526 # lines.append("# Number of configs") 3527 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3528 lines.append(" * ") # a line with just a star indicates this is the end of file 3529 # Write the file 3530 writer.writelines(lines) 3531 3532 return s_and_t_channels
3533
3534 3535 3536 #=============================================================================== 3537 # ProcessExporterFortranME 3538 #=============================================================================== 3539 -class ProcessExporterFortranME(ProcessExporterFortran):
3540 """Class to take care of exporting a set of matrix elements to 3541 MadEvent format.""" 3542 3543 matrix_file = "matrix_madevent_v4.inc" 3544 done_warning_tchannel = False 3545 3546 # helper function for customise helas writter 3547 @staticmethod
3548 - def custom_helas_call(call, arg):
3549 if arg['mass'] == '%(M)s,%(W)s,': 3550 arg['mass'] = '%(M)s, fk_%(W)s,' 3551 elif '%(W)s' in arg['mass']: 3552 raise Exception 3553 return call, arg
3554
3555 - def copy_template(self, model):
3556 """Additional actions needed for setup of Template 3557 """ 3558 3559 super(ProcessExporterFortranME, self).copy_template(model) 3560 3561 # File created from Template (Different in some child class) 3562 filename = pjoin(self.dir_path,'Source','run_config.inc') 3563 self.write_run_config_file(writers.FortranWriter(filename)) 3564 3565 # The next file are model dependant (due to SLAH convention) 3566 self.model_name = model.get('name') 3567 # Add the symmetry.f 3568 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3569 self.write_symmetry(writers.FortranWriter(filename)) 3570 # 3571 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3572 self.write_addmothers(writers.FortranWriter(filename)) 3573 # Copy the different python file in the Template 3574 self.copy_python_file()
3575 3576 3577 3578 3579 3580 3581 #=========================================================================== 3582 # generate_subprocess_directory 3583 #===========================================================================
3584 - def copy_python_file(self):
3585 """copy the python file require for the Template""" 3586 3587 # madevent interface 3588 cp(_file_path+'/interface/madevent_interface.py', 3589 self.dir_path+'/bin/internal/madevent_interface.py') 3590 cp(_file_path+'/interface/extended_cmd.py', 3591 self.dir_path+'/bin/internal/extended_cmd.py') 3592 cp(_file_path+'/interface/common_run_interface.py', 3593 self.dir_path+'/bin/internal/common_run_interface.py') 3594 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3595 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3596 cp(_file_path+'/iolibs/save_load_object.py', 3597 self.dir_path+'/bin/internal/save_load_object.py') 3598 cp(_file_path+'/iolibs/file_writers.py', 3599 self.dir_path+'/bin/internal/file_writers.py') 3600 #model file 3601 cp(_file_path+'../models/check_param_card.py', 3602 self.dir_path+'/bin/internal/check_param_card.py') 3603 3604 #copy all the file present in madevent directory 3605 for name in os.listdir(pjoin(_file_path, 'madevent')): 3606 if name not in ['__init__.py'] and name.endswith('.py'): 3607 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3608 3609 #madevent file 3610 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3611 cp(_file_path+'/various/lhe_parser.py', 3612 self.dir_path+'/bin/internal/lhe_parser.py') 3613 cp(_file_path+'/various/banner.py', 3614 self.dir_path+'/bin/internal/banner.py') 3615 cp(_file_path+'/various/histograms.py', 3616 self.dir_path+'/bin/internal/histograms.py') 3617 cp(_file_path+'/various/plot_djrs.py', 3618 self.dir_path+'/bin/internal/plot_djrs.py') 3619 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3620 3621 cp(_file_path+'/various/cluster.py', 3622 self.dir_path+'/bin/internal/cluster.py') 3623 cp(_file_path+'/madevent/combine_runs.py', 3624 self.dir_path+'/bin/internal/combine_runs.py') 3625 # logging configuration 3626 cp(_file_path+'/interface/.mg5_logging.conf', 3627 self.dir_path+'/bin/internal/me5_logging.conf') 3628 cp(_file_path+'/interface/coloring_logging.py', 3629 self.dir_path+'/bin/internal/coloring_logging.py') 3630 # shower card and FO_analyse_card. 3631 # Although not needed, it is imported by banner.py 3632 cp(_file_path+'/various/shower_card.py', 3633 self.dir_path+'/bin/internal/shower_card.py') 3634 cp(_file_path+'/various/FO_analyse_card.py', 3635 self.dir_path+'/bin/internal/FO_analyse_card.py')
3636 3637
3638 - def convert_model(self, model, wanted_lorentz = [], 3639 wanted_couplings = []):
3640 3641 super(ProcessExporterFortranME,self).convert_model(model, 3642 wanted_lorentz, wanted_couplings) 3643 3644 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3645 try: 3646 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3647 except OSError as error: 3648 pass 3649 model_path = model.get('modelpath') 3650 # This is not safe if there is a '##' or '-' in the path. 3651 shutil.copytree(model_path, 3652 pjoin(self.dir_path,'bin','internal','ufomodel'), 3653 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3654 if hasattr(model, 'restrict_card'): 3655 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3656 'restrict_default.dat') 3657 if isinstance(model.restrict_card, check_param_card.ParamCard): 3658 model.restrict_card.write(out_path) 3659 else: 3660 files.cp(model.restrict_card, out_path)
3661 3662 #=========================================================================== 3663 # export model files 3664 #===========================================================================
3665 - def export_model_files(self, model_path):
3666 """export the model dependent files""" 3667 3668 super(ProcessExporterFortranME,self).export_model_files(model_path) 3669 3670 # Add the routine update_as_param in v4 model 3671 # This is a function created in the UFO 3672 text=""" 3673 subroutine update_as_param() 3674 call setpara('param_card.dat',.false.) 3675 return 3676 end 3677 """ 3678 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3679 ff.write(text) 3680 ff.close() 3681 3682 # Add the symmetry.f 3683 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3684 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3685 3686 # Modify setrun.f 3687 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3688 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3689 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3690 fsock.write(text) 3691 fsock.close() 3692 3693 self.make_model_symbolic_link()
3694 3695 #=========================================================================== 3696 # generate_subprocess_directory 3697 #===========================================================================
3698 - def generate_subprocess_directory(self, matrix_element, 3699 fortran_model, 3700 me_number):
3701 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3702 including the necessary matrix.f and various helper files""" 3703 3704 cwd = os.getcwd() 3705 path = pjoin(self.dir_path, 'SubProcesses') 3706 3707 3708 if not self.model: 3709 self.model = matrix_element.get('processes')[0].get('model') 3710 3711 3712 3713 #os.chdir(path) 3714 # Create the directory PN_xx_xxxxx in the specified path 3715 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3716 try: 3717 os.mkdir(pjoin(path,subprocdir)) 3718 except os.error as error: 3719 logger.warning(error.strerror + " " + subprocdir) 3720 3721 #try: 3722 # os.chdir(subprocdir) 3723 #except os.error: 3724 # logger.error('Could not cd to directory %s' % subprocdir) 3725 # return 0 3726 3727 logger.info('Creating files in directory %s' % subprocdir) 3728 Ppath = pjoin(path, subprocdir) 3729 3730 # Extract number of external particles 3731 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3732 3733 # Add the driver.f 3734 ncomb = matrix_element.get_helicity_combinations() 3735 filename = pjoin(Ppath,'driver.f') 3736 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3737 v5=self.opt['v5_model']) 3738 3739 # Create the matrix.f file, auto_dsig.f file and all inc files 3740 filename = pjoin(Ppath, 'matrix.f') 3741 calls, ncolor = \ 3742 self.write_matrix_element_v4(writers.FortranWriter(filename), 3743 matrix_element, fortran_model, subproc_number = me_number) 3744 3745 filename = pjoin(Ppath, 'auto_dsig.f') 3746 self.write_auto_dsig_file(writers.FortranWriter(filename), 3747 matrix_element) 3748 3749 filename = pjoin(Ppath, 'configs.inc') 3750 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3751 writers.FortranWriter(filename), 3752 matrix_element) 3753 3754 filename = pjoin(Ppath, 'config_nqcd.inc') 3755 self.write_config_nqcd_file(writers.FortranWriter(filename), 3756 nqcd_list) 3757 3758 filename = pjoin(Ppath, 'config_subproc_map.inc') 3759 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3760 s_and_t_channels) 3761 3762 filename = pjoin(Ppath, 'coloramps.inc') 3763 self.write_coloramps_file(writers.FortranWriter(filename), 3764 mapconfigs, 3765 matrix_element) 3766 3767 filename = pjoin(Ppath, 'get_color.f') 3768 self.write_colors_file(writers.FortranWriter(filename), 3769 matrix_element) 3770 3771 filename = pjoin(Ppath, 'decayBW.inc') 3772 self.write_decayBW_file(writers.FortranWriter(filename), 3773 s_and_t_channels) 3774 3775 filename = pjoin(Ppath, 'dname.mg') 3776 self.write_dname_file(writers.FileWriter(filename), 3777 "P"+matrix_element.get('processes')[0].shell_string()) 3778 3779 filename = pjoin(Ppath, 'iproc.dat') 3780 self.write_iproc_file(writers.FortranWriter(filename), 3781 me_number) 3782 3783 filename = pjoin(Ppath, 'leshouche.inc') 3784 self.write_leshouche_file(writers.FortranWriter(filename), 3785 matrix_element) 3786 3787 filename = pjoin(Ppath, 'maxamps.inc') 3788 self.write_maxamps_file(writers.FortranWriter(filename), 3789 len(matrix_element.get('diagrams')), 3790 ncolor, 3791 len(matrix_element.get('processes')), 3792 1) 3793 3794 filename = pjoin(Ppath, 'mg.sym') 3795 self.write_mg_sym_file(writers.FortranWriter(filename), 3796 matrix_element) 3797 3798 filename = pjoin(Ppath, 'ncombs.inc') 3799 self.write_ncombs_file(writers.FortranWriter(filename), 3800 nexternal) 3801 3802 filename = pjoin(Ppath, 'nexternal.inc') 3803 self.write_nexternal_file(writers.FortranWriter(filename), 3804 nexternal, ninitial) 3805 3806 filename = pjoin(Ppath, 'ngraphs.inc') 3807 self.write_ngraphs_file(writers.FortranWriter(filename), 3808 len(mapconfigs)) 3809 3810 3811 filename = pjoin(Ppath, 'pmass.inc') 3812 self.write_pmass_file(writers.FortranWriter(filename), 3813 matrix_element) 3814 3815 filename = pjoin(Ppath, 'props.inc') 3816 self.write_props_file(writers.FortranWriter(filename), 3817 matrix_element, 3818 s_and_t_channels) 3819 3820 # Find config symmetries and permutations 3821 symmetry, perms, ident_perms = \ 3822 diagram_symmetry.find_symmetry(matrix_element) 3823 3824 filename = pjoin(Ppath, 'symswap.inc') 3825 self.write_symswap_file(writers.FortranWriter(filename), 3826 ident_perms) 3827 3828 filename = pjoin(Ppath, 'symfact_orig.dat') 3829 self.write_symfact_file(open(filename, 'w'), symmetry) 3830 3831 # Generate diagrams 3832 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3833 filename = pjoin(Ppath, "matrix.ps") 3834 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3835 get('diagrams'), 3836 filename, 3837 model=matrix_element.get('processes')[0].\ 3838 get('model'), 3839 amplitude=True) 3840 logger.info("Generating Feynman diagrams for " + \ 3841 matrix_element.get('processes')[0].nice_string()) 3842 plot.draw() 3843 3844 self.link_files_in_SubProcess(Ppath) 3845 3846 #import nexternal/leshouche in Source 3847 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3848 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3849 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3850 # Return to SubProcesses dir 3851 #os.chdir(os.path.pardir) 3852 3853 # Add subprocess to subproc.mg 3854 filename = pjoin(path, 'subproc.mg') 3855 files.append_to_file(filename, 3856 self.write_subproc, 3857 subprocdir) 3858 3859 # Return to original dir 3860 #os.chdir(cwd) 3861 3862 # Generate info page 3863 gen_infohtml.make_info_html(self.dir_path) 3864 3865 3866 if not calls: 3867 calls = 0 3868 return calls
3869 3870 link_Sub_files = ['addmothers.f', 3871 'cluster.f', 3872 'cluster.inc', 3873 'coupl.inc', 3874 'cuts.f', 3875 'cuts.inc', 3876 'genps.f', 3877 'genps.inc', 3878 'idenparts.f', 3879 'initcluster.f', 3880 'makefile', 3881 'message.inc', 3882 'myamp.f', 3883 'reweight.f', 3884 'run.inc', 3885 'maxconfigs.inc', 3886 'maxparticles.inc', 3887 'run_config.inc', 3888 'lhe_event_infos.inc', 3889 'setcuts.f', 3890 'setscales.f', 3891 'sudakov.inc', 3892 'symmetry.f', 3893 'unwgt.f', 3894 'dummy_fct.f' 3895 ] 3896 3910 3911
3912 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3913 """Finalize ME v4 directory by creating jpeg diagrams, html 3914 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3915 3916 if 'nojpeg' in flaglist: 3917 makejpg = False 3918 else: 3919 makejpg = True 3920 if 'online' in flaglist: 3921 online = True 3922 else: 3923 online = False 3924 3925 compiler = {'fortran': mg5options['fortran_compiler'], 3926 'cpp': mg5options['cpp_compiler'], 3927 'f2py': mg5options['f2py_compiler']} 3928 3929 # indicate that the output type is not grouped 3930 if not isinstance(self, ProcessExporterFortranMEGroup): 3931 self.proc_characteristic['grouped_matrix'] = False 3932 3933 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3934 3935 # set limitation linked to the model 3936 3937 3938 # indicate the PDG of all initial particle 3939 try: 3940 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3941 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3942 except AttributeError: 3943 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3944 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3945 self.proc_characteristic['pdg_initial1'] = pdgs1 3946 self.proc_characteristic['pdg_initial2'] = pdgs2 3947 3948 3949 modelname = self.opt['model'] 3950 if modelname == 'mssm' or modelname.startswith('mssm-'): 3951 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3952 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3953 check_param_card.convert_to_mg5card(param_card, mg5_param) 3954 check_param_card.check_valid_param_card(mg5_param) 3955 3956 # Add the combine_events.f modify param_card path/number of @X 3957 filename = pjoin(self.dir_path,'Source','combine_events.f') 3958 try: 3959 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3960 except AttributeError: 3961 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3962 nb_proc = len(set(nb_proc)) 3963 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3964 # Write maxconfigs.inc based on max of ME's/subprocess groups 3965 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3966 self.write_maxconfigs_file(writers.FortranWriter(filename), 3967 matrix_elements) 3968 3969 # Write maxparticles.inc based on max of ME's/subprocess groups 3970 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3971 self.write_maxparticles_file(writers.FortranWriter(filename), 3972 matrix_elements) 3973 3974 # Touch "done" file 3975 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3976 3977 # Check for compiler 3978 self.set_compiler(compiler) 3979 self.set_cpp_compiler(compiler['cpp']) 3980 3981 3982 old_pos = os.getcwd() 3983 subpath = pjoin(self.dir_path, 'SubProcesses') 3984 3985 P_dir_list = [proc for proc in os.listdir(subpath) 3986 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3987 3988 devnull = os.open(os.devnull, os.O_RDWR) 3989 # Convert the poscript in jpg files (if authorize) 3990 if makejpg: 3991 try: 3992 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3993 except Exception as error: 3994 pass 3995 3996 if misc.which('gs'): 3997 logger.info("Generate jpeg diagrams") 3998 for Pdir in P_dir_list: 3999 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 4000 stdout = devnull, cwd=pjoin(subpath, Pdir)) 4001 4002 logger.info("Generate web pages") 4003 # Create the WebPage using perl script 4004 4005 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 4006 stdout = devnull,cwd=pjoin(self.dir_path)) 4007 4008 #os.chdir(os.path.pardir) 4009 4010 obj = gen_infohtml.make_info_html(self.dir_path) 4011 4012 if online: 4013 nb_channel = obj.rep_rule['nb_gen_diag'] 4014 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4015 #add the information to proc_charac 4016 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4017 4018 # Write command history as proc_card_mg5 4019 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4020 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4021 history.write(output_file) 4022 4023 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4024 stdout = devnull) 4025 4026 #crate the proc_characteristic file 4027 self.create_proc_charac(matrix_elements, history) 4028 4029 # create the run_card 4030 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4031 4032 # Run "make" to generate madevent.tar.gz file 4033 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4034 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4035 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4036 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4037 stdout = devnull, cwd=self.dir_path) 4038 4039 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4040 stdout = devnull, cwd=self.dir_path)
4041 4042 4043 4044 4045 4046 4047 #return to the initial dir 4048 #os.chdir(old_pos) 4049 4050 #=========================================================================== 4051 # write_matrix_element_v4 4052 #===========================================================================
4053 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4054 proc_id = "", config_map = [], subproc_number = ""):
4055 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4056 4057 if not matrix_element.get('processes') or \ 4058 not matrix_element.get('diagrams'): 4059 return 0 4060 4061 if writer: 4062 if not isinstance(writer, writers.FortranWriter): 4063 raise writers.FortranWriter.FortranWriterError(\ 4064 "writer not FortranWriter") 4065 # Set lowercase/uppercase Fortran code 4066 writers.FortranWriter.downcase = False 4067 4068 # check if MLM/.../ is supported for this matrix-element and update associate flag 4069 if self.model and 'MLM' in self.model["limitations"]: 4070 if 'MLM' not in self.proc_characteristic["limitations"]: 4071 used_couplings = matrix_element.get_used_couplings(output="set") 4072 for vertex in self.model.get('interactions'): 4073 particles = [p for p in vertex.get('particles')] 4074 if 21 in [p.get('pdg_code') for p in particles]: 4075 colors = [par.get('color') for par in particles] 4076 if 1 in colors: 4077 continue 4078 elif 'QCD' not in vertex.get('orders'): 4079 for bad_coup in vertex.get('couplings').values(): 4080 if bad_coup in used_couplings: 4081 self.proc_characteristic["limitations"].append('MLM') 4082 break 4083 4084 # The proc prefix is not used for MadEvent output so it can safely be set 4085 # to an empty string. 4086 replace_dict = {'proc_prefix':''} 4087 4088 # Extract helas calls 4089 helas_calls = fortran_model.get_matrix_element_calls(\ 4090 matrix_element) 4091 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4092 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4093 ProcessExporterFortranME.done_warning_tchannel = True 4094 4095 replace_dict['helas_calls'] = "\n".join(helas_calls) 4096 4097 4098 #adding the support for the fake width (forbidding too small width) 4099 mass_width = matrix_element.get_all_mass_widths() 4100 mass_width = sorted(list(mass_width)) 4101 width_list = set([e[1] for e in mass_width]) 4102 4103 replace_dict['fake_width_declaration'] = \ 4104 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4105 replace_dict['fake_width_declaration'] += \ 4106 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4107 fk_w_defs = [] 4108 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4109 for m, w in mass_width: 4110 if w == 'zero': 4111 if ' fk_zero = 0d0' not in fk_w_defs: 4112 fk_w_defs.append(' fk_zero = 0d0') 4113 continue 4114 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4115 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4116 4117 # Extract version number and date from VERSION file 4118 info_lines = self.get_mg5_info_lines() 4119 replace_dict['info_lines'] = info_lines 4120 4121 # Extract process info lines 4122 process_lines = self.get_process_info_lines(matrix_element) 4123 replace_dict['process_lines'] = process_lines 4124 4125 # Set proc_id 4126 replace_dict['proc_id'] = proc_id 4127 4128 # Extract ncomb 4129 ncomb = matrix_element.get_helicity_combinations() 4130 replace_dict['ncomb'] = ncomb 4131 4132 # Extract helicity lines 4133 helicity_lines = self.get_helicity_lines(matrix_element) 4134 replace_dict['helicity_lines'] = helicity_lines 4135 4136 # Extract IC line 4137 ic_line = self.get_ic_line(matrix_element) 4138 replace_dict['ic_line'] = ic_line 4139 4140 # Extract overall denominator 4141 # Averaging initial state color, spin, and identical FS particles 4142 den_factor_line = self.get_den_factor_line(matrix_element) 4143 replace_dict['den_factor_line'] = den_factor_line 4144 4145 # Extract ngraphs 4146 ngraphs = matrix_element.get_number_of_amplitudes() 4147 replace_dict['ngraphs'] = ngraphs 4148 4149 # Extract ndiags 4150 ndiags = len(matrix_element.get('diagrams')) 4151 replace_dict['ndiags'] = ndiags 4152 4153 # Set define_iconfigs_lines 4154 replace_dict['define_iconfigs_lines'] = \ 4155 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4156 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4157 4158 if proc_id: 4159 # Set lines for subprocess group version 4160 # Set define_iconfigs_lines 4161 replace_dict['define_iconfigs_lines'] += \ 4162 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4163 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4164 # Set set_amp2_line 4165 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4166 proc_id 4167 else: 4168 # Standard running 4169 # Set set_amp2_line 4170 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4171 4172 # Extract nwavefuncs 4173 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4174 replace_dict['nwavefuncs'] = nwavefuncs 4175 4176 # Extract ncolor 4177 ncolor = max(1, len(matrix_element.get('color_basis'))) 4178 replace_dict['ncolor'] = ncolor 4179 4180 # Extract color data lines 4181 color_data_lines = self.get_color_data_lines(matrix_element) 4182 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4183 4184 4185 # Set the size of Wavefunction 4186 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4187 replace_dict['wavefunctionsize'] = 18 4188 else: 4189 replace_dict['wavefunctionsize'] = 6 4190 4191 # Extract amp2 lines 4192 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4193 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4194 4195 # The JAMP definition depends on the splitting order 4196 split_orders=matrix_element.get('processes')[0].get('split_orders') 4197 if len(split_orders)>0: 4198 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4199 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4200 matrix_element.get('processes')[0],squared_orders) 4201 else: 4202 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4203 # set all amplitude order to weight 1 and only one squared order 4204 # contribution which is of course ALL_ORDERS=2. 4205 squared_orders = [(2,),] 4206 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4207 replace_dict['chosen_so_configs'] = '.TRUE.' 4208 4209 replace_dict['nAmpSplitOrders']=len(amp_orders) 4210 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4211 replace_dict['split_order_str_list']=str(split_orders) 4212 replace_dict['nSplitOrders']=max(len(split_orders),1) 4213 amp_so = self.get_split_orders_lines( 4214 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4215 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4216 replace_dict['ampsplitorders']='\n'.join(amp_so) 4217 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4218 4219 4220 # Extract JAMP lines 4221 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4222 jamp_lines = self.get_JAMP_lines_split_order(\ 4223 matrix_element,amp_orders,split_order_names= 4224 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4225 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4226 4227 replace_dict['template_file'] = pjoin(_file_path, \ 4228 'iolibs/template_files/%s' % self.matrix_file) 4229 replace_dict['template_file2'] = pjoin(_file_path, \ 4230 'iolibs/template_files/split_orders_helping_functions.inc') 4231 4232 s1,s2 = matrix_element.get_spin_state_initial() 4233 replace_dict['nb_spin_state1'] = s1 4234 replace_dict['nb_spin_state2'] = s2 4235 4236 if writer: 4237 file = open(replace_dict['template_file']).read() 4238 file = file % replace_dict 4239 # Add the split orders helper functions. 4240 file = file + '\n' + open(replace_dict['template_file2'])\ 4241 .read()%replace_dict 4242 # Write the file 4243 writer.writelines(file) 4244 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4245 else: 4246 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4247 return replace_dict
4248 4249 #=========================================================================== 4250 # write_auto_dsig_file 4251 #===========================================================================
4252 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4253 """Write the auto_dsig.f file for the differential cross section 4254 calculation, includes pdf call information""" 4255 4256 if not matrix_element.get('processes') or \ 4257 not matrix_element.get('diagrams'): 4258 return 0 4259 4260 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4261 self.proc_characteristic['ninitial'] = ninitial 4262 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4263 4264 # Add information relevant for MLM matching: 4265 # Maximum QCD power in all the contributions 4266 max_qcd_order = 0 4267 for diag in matrix_element.get('diagrams'): 4268 orders = diag.calculate_orders() 4269 if 'QCD' in orders: 4270 max_qcd_order = max(max_qcd_order,orders['QCD']) 4271 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4272 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4273 proc.get('model').get_particle(id).get('color')>1]) 4274 for proc in matrix_element.get('processes')) 4275 # Maximum number of final state light jets to be matched 4276 self.proc_characteristic['max_n_matched_jets'] = max( 4277 self.proc_characteristic['max_n_matched_jets'], 4278 min(max_qcd_order,max_n_light_final_partons)) 4279 4280 # List of default pdgs to be considered for the CKKWl merging cut 4281 self.proc_characteristic['colored_pdgs'] = \ 4282 sorted(list(set([abs(p.get('pdg_code')) for p in 4283 matrix_element.get('processes')[0].get('model').get('particles') if 4284 p.get('color')>1]))) 4285 4286 if ninitial < 1 or ninitial > 2: 4287 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4288 4289 replace_dict = {} 4290 4291 # Extract version number and date from VERSION file 4292 info_lines = self.get_mg5_info_lines() 4293 replace_dict['info_lines'] = info_lines 4294 4295 # Extract process info lines 4296 process_lines = self.get_process_info_lines(matrix_element) 4297 replace_dict['process_lines'] = process_lines 4298 4299 # Set proc_id 4300 replace_dict['proc_id'] = proc_id 4301 replace_dict['numproc'] = 1 4302 4303 # Set dsig_line 4304 if ninitial == 1: 4305 # No conversion, since result of decay should be given in GeV 4306 dsig_line = "pd(0)*dsiguu" 4307 else: 4308 # Convert result (in GeV) to pb 4309 dsig_line = "pd(0)*conv*dsiguu" 4310 4311 replace_dict['dsig_line'] = dsig_line 4312 4313 # Extract pdf lines 4314 pdf_vars, pdf_data, pdf_lines = \ 4315 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4316 replace_dict['pdf_vars'] = pdf_vars 4317 replace_dict['pdf_data'] = pdf_data 4318 replace_dict['pdf_lines'] = pdf_lines 4319 4320 # Lines that differ between subprocess group and regular 4321 if proc_id: 4322 replace_dict['numproc'] = int(proc_id) 4323 replace_dict['passcuts_begin'] = "" 4324 replace_dict['passcuts_end'] = "" 4325 # Set lines for subprocess group version 4326 # Set define_iconfigs_lines 4327 replace_dict['define_subdiag_lines'] = \ 4328 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4329 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4330 replace_dict['cutsdone'] = "" 4331 else: 4332 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4333 replace_dict['passcuts_end'] = "ENDIF" 4334 replace_dict['define_subdiag_lines'] = "" 4335 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4336 4337 if not isinstance(self, ProcessExporterFortranMEGroup): 4338 ncomb=matrix_element.get_helicity_combinations() 4339 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4340 else: 4341 replace_dict['read_write_good_hel'] = "" 4342 4343 context = {'read_write_good_hel':True} 4344 4345 if writer: 4346 file = open(pjoin(_file_path, \ 4347 'iolibs/template_files/auto_dsig_v4.inc')).read() 4348 file = file % replace_dict 4349 4350 # Write the file 4351 writer.writelines(file, context=context) 4352 else: 4353 return replace_dict, context
4354 #=========================================================================== 4355 # write_coloramps_file 4356 #===========================================================================
4357 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4358 """Write the coloramps.inc file for MadEvent""" 4359 4360 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4361 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4362 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4363 len(mapconfigs))) 4364 4365 4366 # Write the file 4367 writer.writelines(lines) 4368 4369 return True
4370 4371 #=========================================================================== 4372 # write_colors_file 4373 #===========================================================================
4374 - def write_colors_file(self, writer, matrix_elements):
4375 """Write the get_color.f file for MadEvent, which returns color 4376 for all particles used in the matrix element.""" 4377 4378 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4379 matrix_elements = [matrix_elements] 4380 4381 model = matrix_elements[0].get('processes')[0].get('model') 4382 4383 # We need the both particle and antiparticle wf_ids, since the identity 4384 # depends on the direction of the wf. 4385 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4386 for wf in d.get('wavefunctions')],[]) \ 4387 for d in me.get('diagrams')], []) \ 4388 for me in matrix_elements], [])) 4389 4390 leg_ids = set(sum([sum([sum([[l.get('id'), 4391 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4392 for l in p.get_legs_with_decays()], []) \ 4393 for p in me.get('processes')], []) \ 4394 for me in matrix_elements], [])) 4395 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4396 4397 lines = """function get_color(ipdg) 4398 implicit none 4399 integer get_color, ipdg 4400 4401 if(ipdg.eq.%d)then 4402 get_color=%d 4403 return 4404 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4405 4406 for part_id in particle_ids[1:]: 4407 lines += """else if(ipdg.eq.%d)then 4408 get_color=%d 4409 return 4410 """ % (part_id, model.get_particle(part_id).get_color()) 4411 # Dummy particle for multiparticle vertices with pdg given by 4412 # first code not in the model 4413 lines += """else if(ipdg.eq.%d)then 4414 c This is dummy particle used in multiparticle vertices 4415 get_color=2 4416 return 4417 """ % model.get_first_non_pdg() 4418 lines += """else 4419 write(*,*)'Error: No color given for pdg ',ipdg 4420 get_color=0 4421 return 4422 endif 4423 end 4424 """ 4425 4426 # Write the file 4427 writer.writelines(lines) 4428 4429 return True
4430 4431 #=========================================================================== 4432 # write_config_nqcd_file 4433 #===========================================================================
4434 - def write_config_nqcd_file(self, writer, nqcd_list):
4435 """Write the config_nqcd.inc with the number of QCD couplings 4436 for each config""" 4437 4438 lines = [] 4439 for iconf, n in enumerate(nqcd_list): 4440 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4441 4442 # Write the file 4443 writer.writelines(lines) 4444 4445 return True
4446 4447 #=========================================================================== 4448 # write_maxconfigs_file 4449 #===========================================================================
4450 - def write_maxconfigs_file(self, writer, matrix_elements):
4451 """Write the maxconfigs.inc file for MadEvent""" 4452 4453 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4454 maxconfigs = max([me.get_num_configs() for me in \ 4455 matrix_elements.get('matrix_elements')]) 4456 else: 4457 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4458 4459 lines = "integer lmaxconfigs\n" 4460 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4461 4462 # Write the file 4463 writer.writelines(lines) 4464 4465 return True
4466 4467 #=========================================================================== 4468 # read_write_good_hel 4469 #===========================================================================
4470 - def read_write_good_hel(self, ncomb):
4471 """return the code to read/write the good_hel common_block""" 4472 4473 convert = {'ncomb' : ncomb} 4474 output = """ 4475 subroutine write_good_hel(stream_id) 4476 implicit none 4477 integer stream_id 4478 INTEGER NCOMB 4479 PARAMETER ( NCOMB=%(ncomb)d) 4480 LOGICAL GOODHEL(NCOMB) 4481 INTEGER NTRY 4482 common/BLOCK_GOODHEL/NTRY,GOODHEL 4483 write(stream_id,*) GOODHEL 4484 return 4485 end 4486 4487 4488 subroutine read_good_hel(stream_id) 4489 implicit none 4490 include 'genps.inc' 4491 integer stream_id 4492 INTEGER NCOMB 4493 PARAMETER ( NCOMB=%(ncomb)d) 4494 LOGICAL GOODHEL(NCOMB) 4495 INTEGER NTRY 4496 common/BLOCK_GOODHEL/NTRY,GOODHEL 4497 read(stream_id,*) GOODHEL 4498 NTRY = MAXTRIES + 1 4499 return 4500 end 4501 4502 subroutine init_good_hel() 4503 implicit none 4504 INTEGER NCOMB 4505 PARAMETER ( NCOMB=%(ncomb)d) 4506 LOGICAL GOODHEL(NCOMB) 4507 INTEGER NTRY 4508 INTEGER I 4509 4510 do i=1,NCOMB 4511 GOODHEL(I) = .false. 4512 enddo 4513 NTRY = 0 4514 end 4515 4516 integer function get_maxsproc() 4517 implicit none 4518 get_maxsproc = 1 4519 return 4520 end 4521 4522 """ % convert 4523 4524 return output
4525 4526 #=========================================================================== 4527 # write_config_subproc_map_file 4528 #===========================================================================
4529 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4530 """Write a dummy config_subproc.inc file for MadEvent""" 4531 4532 lines = [] 4533 4534 for iconfig in range(len(s_and_t_channels)): 4535 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4536 (iconfig + 1)) 4537 4538 # Write the file 4539 writer.writelines(lines) 4540 4541 return True
4542 4543 #=========================================================================== 4544 # write_configs_file 4545 #===========================================================================
4546 - def write_configs_file(self, writer, matrix_element):
4547 """Write the configs.inc file for MadEvent""" 4548 4549 # Extract number of external particles 4550 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4551 4552 model = matrix_element.get('processes')[0].get('model') 4553 configs = [(i+1, d) for (i, d) in \ 4554 enumerate(matrix_element.get('diagrams'))] 4555 mapconfigs = [c[0] for c in configs] 4556 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4557 [[c[1]] for c in configs], 4558 mapconfigs, 4559 nexternal, ninitial, 4560 model)
4561 4562 #=========================================================================== 4563 # write_run_configs_file 4564 #===========================================================================
4565 - def write_run_config_file(self, writer):
4566 """Write the run_configs.inc file for MadEvent""" 4567 4568 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4569 4570 if self.proc_characteristic['loop_induced']: 4571 job_per_chan = 1 4572 else: 4573 job_per_chan = 5 4574 4575 if writer: 4576 text = open(path).read() % {'chanperjob': job_per_chan} 4577 writer.write(text) 4578 return True 4579 else: 4580 return {'chanperjob': job_per_chan}
4581 4582 #=========================================================================== 4583 # write_configs_file_from_diagrams 4584 #===========================================================================
4585 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4586 nexternal, ninitial, model):
4587 """Write the actual configs.inc file. 4588 4589 configs is the diagrams corresponding to configs (each 4590 diagrams is a list of corresponding diagrams for all 4591 subprocesses, with None if there is no corresponding diagrams 4592 for a given process). 4593 mapconfigs gives the diagram number for each config. 4594 4595 For s-channels, we need to output one PDG for each subprocess in 4596 the subprocess group, in order to be able to pick the right 4597 one for multiprocesses.""" 4598 4599 lines = [] 4600 4601 s_and_t_channels = [] 4602 4603 nqcd_list = [] 4604 4605 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4606 for config in configs if [d for d in config if d][0].\ 4607 get_vertex_leg_numbers()!=[]] 4608 minvert = min(vert_list) if vert_list!=[] else 0 4609 4610 # Number of subprocesses 4611 nsubprocs = len(configs[0]) 4612 4613 nconfigs = 0 4614 4615 new_pdg = model.get_first_non_pdg() 4616 4617 for iconfig, helas_diags in enumerate(configs): 4618 if any([vert > minvert for vert in 4619 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4620 # Only 3-vertices allowed in configs.inc 4621 continue 4622 nconfigs += 1 4623 4624 # Need s- and t-channels for all subprocesses, including 4625 # those that don't contribute to this config 4626 empty_verts = [] 4627 stchannels = [] 4628 for h in helas_diags: 4629 if h: 4630 # get_s_and_t_channels gives vertices starting from 4631 # final state external particles and working inwards 4632 stchannels.append(h.get('amplitudes')[0].\ 4633 get_s_and_t_channels(ninitial, model, 4634 new_pdg)) 4635 else: 4636 stchannels.append((empty_verts, None)) 4637 4638 # For t-channels, just need the first non-empty one 4639 tchannels = [t for s,t in stchannels if t != None][0] 4640 4641 # For s_and_t_channels (to be used later) use only first config 4642 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4643 tchannels]) 4644 4645 # Make sure empty_verts is same length as real vertices 4646 if any([s for s,t in stchannels]): 4647 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4648 4649 # Reorganize s-channel vertices to get a list of all 4650 # subprocesses for each vertex 4651 schannels = list(zip(*[s for s,t in stchannels])) 4652 else: 4653 schannels = [] 4654 4655 allchannels = schannels 4656 if len(tchannels) > 1: 4657 # Write out tchannels only if there are any non-trivial ones 4658 allchannels = schannels + tchannels 4659 4660 # Write out propagators for s-channel and t-channel vertices 4661 4662 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4663 # Correspondance between the config and the diagram = amp2 4664 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4665 mapconfigs[iconfig])) 4666 # Number of QCD couplings in this diagram 4667 nqcd = 0 4668 for h in helas_diags: 4669 if h: 4670 try: 4671 nqcd = h.calculate_orders()['QCD'] 4672 except KeyError: 4673 pass 4674 break 4675 else: 4676 continue 4677 4678 nqcd_list.append(nqcd) 4679 4680 for verts in allchannels: 4681 if verts in schannels: 4682 vert = [v for v in verts if v][0] 4683 else: 4684 vert = verts 4685 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4686 last_leg = vert.get('legs')[-1] 4687 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4688 (last_leg.get('number'), nconfigs, len(daughters), 4689 ",".join([str(d) for d in daughters]))) 4690 if verts in schannels: 4691 pdgs = [] 4692 for v in verts: 4693 if v: 4694 pdgs.append(v.get('legs')[-1].get('id')) 4695 else: 4696 pdgs.append(0) 4697 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4698 (last_leg.get('number'), nconfigs, nsubprocs, 4699 ",".join([str(d) for d in pdgs]))) 4700 lines.append("data tprid(%d,%d)/0/" % \ 4701 (last_leg.get('number'), nconfigs)) 4702 elif verts in tchannels[:-1]: 4703 lines.append("data tprid(%d,%d)/%d/" % \ 4704 (last_leg.get('number'), nconfigs, 4705 abs(last_leg.get('id')))) 4706 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4707 (last_leg.get('number'), nconfigs, nsubprocs, 4708 ",".join(['0'] * nsubprocs))) 4709 4710 # Write out number of configs 4711 lines.append("# Number of configs") 4712 lines.append("data mapconfig(0)/%d/" % nconfigs) 4713 4714 # Write the file 4715 writer.writelines(lines) 4716 4717 return s_and_t_channels, nqcd_list
4718 4719 #=========================================================================== 4720 # write_decayBW_file 4721 #===========================================================================
4722 - def write_decayBW_file(self, writer, s_and_t_channels):
4723 """Write the decayBW.inc file for MadEvent""" 4724 4725 lines = [] 4726 4727 booldict = {None: "0", True: "1", False: "2"} 4728 4729 for iconf, config in enumerate(s_and_t_channels): 4730 schannels = config[0] 4731 for vertex in schannels: 4732 # For the resulting leg, pick out whether it comes from 4733 # decay or not, as given by the onshell flag 4734 leg = vertex.get('legs')[-1] 4735 lines.append("data gForceBW(%d,%d)/%s/" % \ 4736 (leg.get('number'), iconf + 1, 4737 booldict[leg.get('onshell')])) 4738 4739 # Write the file 4740 writer.writelines(lines) 4741 4742 return True
4743 4744 #=========================================================================== 4745 # write_dname_file 4746 #===========================================================================
4747 - def write_dname_file(self, writer, dir_name):
4748 """Write the dname.mg file for MG4""" 4749 4750 line = "DIRNAME=%s" % dir_name 4751 4752 # Write the file 4753 writer.write(line + "\n") 4754 4755 return True
4756 4757 #=========================================================================== 4758 # write_driver 4759 #===========================================================================
4760 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4761 """Write the SubProcess/driver.f file for MG4""" 4762 4763 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4764 4765 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4766 card = 'Source/MODEL/MG5_param.dat' 4767 else: 4768 card = 'param_card.dat' 4769 # Requiring each helicity configuration to be probed by 10 points for 4770 # matrix element before using the resulting grid for MC over helicity 4771 # sampling. 4772 # We multiply this by 2 because each grouped subprocess is called at most 4773 # twice for each IMIRROR. 4774 replace_dict = {'param_card_name':card, 4775 'ncomb':ncomb, 4776 'hel_init_points':n_grouped_proc*10*2} 4777 if not v5: 4778 replace_dict['secondparam']=',.true.' 4779 else: 4780 replace_dict['secondparam']='' 4781 4782 if writer: 4783 text = open(path).read() % replace_dict 4784 writer.write(text) 4785 return True 4786 else: 4787 return replace_dict
4788 4789 #=========================================================================== 4790 # write_addmothers 4791 #===========================================================================
4792 - def write_addmothers(self, writer):
4793 """Write the SubProcess/addmothers.f""" 4794 4795 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4796 4797 text = open(path).read() % {'iconfig': 'diag_number'} 4798 writer.write(text) 4799 4800 return True
4801 4802 4803 #=========================================================================== 4804 # write_combine_events 4805 #===========================================================================
4806 - def write_combine_events(self, writer, nb_proc=100):
4807 """Write the SubProcess/driver.f file for MG4""" 4808 4809 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4810 4811 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4812 card = 'Source/MODEL/MG5_param.dat' 4813 else: 4814 card = 'param_card.dat' 4815 4816 #set maxpup (number of @X in the process card) 4817 4818 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4819 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4820 writer.write(text) 4821 4822 return True
4823 4824 4825 #=========================================================================== 4826 # write_symmetry 4827 #===========================================================================
4828 - def write_symmetry(self, writer, v5=True):
4829 """Write the SubProcess/driver.f file for ME""" 4830 4831 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4832 4833 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4834 card = 'Source/MODEL/MG5_param.dat' 4835 else: 4836 card = 'param_card.dat' 4837 4838 if v5: 4839 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4840 else: 4841 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4842 4843 if writer: 4844 text = open(path).read() 4845 text = text % replace_dict 4846 writer.write(text) 4847 return True 4848 else: 4849 return replace_dict
4850 4851 4852 4853 #=========================================================================== 4854 # write_iproc_file 4855 #===========================================================================
4856 - def write_iproc_file(self, writer, me_number):
4857 """Write the iproc.dat file for MG4""" 4858 line = "%d" % (me_number + 1) 4859 4860 # Write the file 4861 for line_to_write in writer.write_line(line): 4862 writer.write(line_to_write) 4863 return True
4864 4865 #=========================================================================== 4866 # write_mg_sym_file 4867 #===========================================================================
4868 - def write_mg_sym_file(self, writer, matrix_element):
4869 """Write the mg.sym file for MadEvent.""" 4870 4871 lines = [] 4872 4873 # Extract process with all decays included 4874 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 4875 4876 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 4877 4878 identical_indices = {} 4879 4880 # Extract identical particle info 4881 for i, leg in enumerate(final_legs): 4882 if leg.get('id') in identical_indices: 4883 identical_indices[leg.get('id')].append(\ 4884 i + ninitial + 1) 4885 else: 4886 identical_indices[leg.get('id')] = [i + ninitial + 1] 4887 4888 # Remove keys which have only one particle 4889 for key in list(identical_indices.keys()): 4890 if len(identical_indices[key]) < 2: 4891 del identical_indices[key] 4892 4893 # Write mg.sym file 4894 lines.append(str(len(list(identical_indices.keys())))) 4895 for key in identical_indices.keys(): 4896 lines.append(str(len(identical_indices[key]))) 4897 for number in identical_indices[key]: 4898 lines.append(str(number)) 4899 4900 # Write the file 4901 writer.writelines(lines) 4902 4903 return True
4904 4905 #=========================================================================== 4906 # write_mg_sym_file 4907 #===========================================================================
4908 - def write_default_mg_sym_file(self, writer):
4909 """Write the mg.sym file for MadEvent.""" 4910 4911 lines = "0" 4912 4913 # Write the file 4914 writer.writelines(lines) 4915 4916 return True
4917 4918 #=========================================================================== 4919 # write_ncombs_file 4920 #===========================================================================
4921 - def write_ncombs_file(self, writer, nexternal):
4922 """Write the ncombs.inc file for MadEvent.""" 4923 4924 # ncomb (used for clustering) is 2^nexternal 4925 file = " integer n_max_cl\n" 4926 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4927 4928 # Write the file 4929 writer.writelines(file) 4930 4931 return True
4932 4933 #=========================================================================== 4934 # write_processes_file 4935 #===========================================================================
4936 - def write_processes_file(self, writer, subproc_group):
4937 """Write the processes.dat file with info about the subprocesses 4938 in this group.""" 4939 4940 lines = [] 4941 4942 for ime, me in \ 4943 enumerate(subproc_group.get('matrix_elements')): 4944 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4945 ",".join(p.base_string() for p in \ 4946 me.get('processes')))) 4947 if me.get('has_mirror_process'): 4948 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4949 for proc in mirror_procs: 4950 legs = copy.copy(proc.get('legs_with_decays')) 4951 legs.insert(0, legs.pop(1)) 4952 proc.set("legs_with_decays", legs) 4953 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4954 mirror_procs)) 4955 else: 4956 lines.append("mirror none") 4957 4958 # Write the file 4959 writer.write("\n".join(lines)) 4960 4961 return True
4962 4963 #=========================================================================== 4964 # write_symswap_file 4965 #===========================================================================
4966 - def write_symswap_file(self, writer, ident_perms):
4967 """Write the file symswap.inc for MG4 by comparing diagrams using 4968 the internal matrix element value functionality.""" 4969 4970 lines = [] 4971 4972 # Write out lines for symswap.inc file (used to permute the 4973 # external leg momenta 4974 for iperm, perm in enumerate(ident_perms): 4975 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4976 (iperm+1, ",".join([str(i+1) for i in perm]))) 4977 lines.append("data nsym/%d/" % len(ident_perms)) 4978 4979 # Write the file 4980 writer.writelines(lines) 4981 4982 return True
4983 4984 #=========================================================================== 4985 # write_symfact_file 4986 #===========================================================================
4987 - def write_symfact_file(self, writer, symmetry):
4988 """Write the files symfact.dat for MG4 by comparing diagrams using 4989 the internal matrix element value functionality.""" 4990 4991 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4992 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4993 # Write out lines for symswap.inc file (used to permute the 4994 # external leg momenta 4995 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4996 # Write the file 4997 writer.write('\n'.join(lines)) 4998 writer.write('\n') 4999 5000 return True
5001 5002 #=========================================================================== 5003 # write_symperms_file 5004 #===========================================================================
5005 - def write_symperms_file(self, writer, perms):
5006 """Write the symperms.inc file for subprocess group, used for 5007 symmetric configurations""" 5008 5009 lines = [] 5010 for iperm, perm in enumerate(perms): 5011 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 5012 (iperm+1, ",".join([str(i+1) for i in perm]))) 5013 5014 # Write the file 5015 writer.writelines(lines) 5016 5017 return True
5018 5019 #=========================================================================== 5020 # write_subproc 5021 #===========================================================================
5022 - def write_subproc(self, writer, subprocdir):
5023 """Append this subprocess to the subproc.mg file for MG4""" 5024 5025 # Write line to file 5026 writer.write(subprocdir + "\n") 5027 5028 return True
5029
5030 #=============================================================================== 5031 # ProcessExporterFortranMEGroup 5032 #=============================================================================== 5033 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5034 """Class to take care of exporting a set of matrix elements to 5035 MadEvent subprocess group format.""" 5036 5037 matrix_file = "matrix_madevent_group_v4.inc" 5038 grouped_mode = 'madevent' 5039 #=========================================================================== 5040 # generate_subprocess_directory 5041 #===========================================================================
5042 - def generate_subprocess_directory(self, subproc_group, 5043 fortran_model, 5044 group_number):
5045 """Generate the Pn directory for a subprocess group in MadEvent, 5046 including the necessary matrix_N.f files, configs.inc and various 5047 other helper files.""" 5048 5049 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5050 "subproc_group object not SubProcessGroup" 5051 5052 if not self.model: 5053 self.model = subproc_group.get('matrix_elements')[0].\ 5054 get('processes')[0].get('model') 5055 5056 cwd = os.getcwd() 5057 path = pjoin(self.dir_path, 'SubProcesses') 5058 5059 os.chdir(path) 5060 pathdir = os.getcwd() 5061 5062 # Create the directory PN in the specified path 5063 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5064 subproc_group.get('name')) 5065 try: 5066 os.mkdir(subprocdir) 5067 except os.error as error: 5068 logger.warning(error.strerror + " " + subprocdir) 5069 5070 try: 5071 os.chdir(subprocdir) 5072 except os.error: 5073 logger.error('Could not cd to directory %s' % subprocdir) 5074 return 0 5075 5076 logger.info('Creating files in directory %s' % subprocdir) 5077 5078 # Create the matrix.f files, auto_dsig.f files and all inc files 5079 # for all subprocesses in the group 5080 5081 maxamps = 0 5082 maxflows = 0 5083 tot_calls = 0 5084 5085 matrix_elements = subproc_group.get('matrix_elements') 5086 5087 # Add the driver.f, all grouped ME's must share the same number of 5088 # helicity configuration 5089 ncomb = matrix_elements[0].get_helicity_combinations() 5090 for me in matrix_elements[1:]: 5091 if ncomb!=me.get_helicity_combinations(): 5092 raise MadGraph5Error("All grouped processes must share the "+\ 5093 "same number of helicity configurations.") 5094 5095 filename = 'driver.f' 5096 self.write_driver(writers.FortranWriter(filename),ncomb, 5097 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5098 5099 for ime, matrix_element in \ 5100 enumerate(matrix_elements): 5101 filename = 'matrix%d.f' % (ime+1) 5102 calls, ncolor = \ 5103 self.write_matrix_element_v4(writers.FortranWriter(filename), 5104 matrix_element, 5105 fortran_model, 5106 proc_id=str(ime+1), 5107 config_map=subproc_group.get('diagram_maps')[ime], 5108 subproc_number=group_number) 5109 5110 filename = 'auto_dsig%d.f' % (ime+1) 5111 self.write_auto_dsig_file(writers.FortranWriter(filename), 5112 matrix_element, 5113 str(ime+1)) 5114 5115 # Keep track of needed quantities 5116 tot_calls += int(calls) 5117 maxflows = max(maxflows, ncolor) 5118 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5119 5120 # Draw diagrams 5121 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5122 filename = "matrix%d.ps" % (ime+1) 5123 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5124 get('diagrams'), 5125 filename, 5126 model = \ 5127 matrix_element.get('processes')[0].\ 5128 get('model'), 5129 amplitude=True) 5130 logger.info("Generating Feynman diagrams for " + \ 5131 matrix_element.get('processes')[0].nice_string()) 5132 plot.draw() 5133 5134 # Extract number of external particles 5135 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5136 5137 # Generate a list of diagrams corresponding to each configuration 5138 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5139 # If a subprocess has no diagrams for this config, the number is 0 5140 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5141 5142 filename = 'auto_dsig.f' 5143 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5144 subproc_group) 5145 5146 filename = 'coloramps.inc' 5147 self.write_coloramps_file(writers.FortranWriter(filename), 5148 subproc_diagrams_for_config, 5149 maxflows, 5150 matrix_elements) 5151 5152 filename = 'get_color.f' 5153 self.write_colors_file(writers.FortranWriter(filename), 5154 matrix_elements) 5155 5156 filename = 'config_subproc_map.inc' 5157 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5158 subproc_diagrams_for_config) 5159 5160 filename = 'configs.inc' 5161 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5162 writers.FortranWriter(filename), 5163 subproc_group, 5164 subproc_diagrams_for_config) 5165 5166 filename = 'config_nqcd.inc' 5167 self.write_config_nqcd_file(writers.FortranWriter(filename), 5168 nqcd_list) 5169 5170 filename = 'decayBW.inc' 5171 self.write_decayBW_file(writers.FortranWriter(filename), 5172 s_and_t_channels) 5173 5174 filename = 'dname.mg' 5175 self.write_dname_file(writers.FortranWriter(filename), 5176 subprocdir) 5177 5178 filename = 'iproc.dat' 5179 self.write_iproc_file(writers.FortranWriter(filename), 5180 group_number) 5181 5182 filename = 'leshouche.inc' 5183 self.write_leshouche_file(writers.FortranWriter(filename), 5184 subproc_group) 5185 5186 filename = 'maxamps.inc' 5187 self.write_maxamps_file(writers.FortranWriter(filename), 5188 maxamps, 5189 maxflows, 5190 max([len(me.get('processes')) for me in \ 5191 matrix_elements]), 5192 len(matrix_elements)) 5193 5194 # Note that mg.sym is not relevant for this case 5195 filename = 'mg.sym' 5196 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5197 5198 filename = 'mirrorprocs.inc' 5199 self.write_mirrorprocs(writers.FortranWriter(filename), 5200 subproc_group) 5201 5202 filename = 'ncombs.inc' 5203 self.write_ncombs_file(writers.FortranWriter(filename), 5204 nexternal) 5205 5206 filename = 'nexternal.inc' 5207 self.write_nexternal_file(writers.FortranWriter(filename), 5208 nexternal, ninitial) 5209 5210 filename = 'ngraphs.inc' 5211 self.write_ngraphs_file(writers.FortranWriter(filename), 5212 nconfigs) 5213 5214 filename = 'pmass.inc' 5215 self.write_pmass_file(writers.FortranWriter(filename), 5216 matrix_element) 5217 5218 filename = 'props.inc' 5219 self.write_props_file(writers.FortranWriter(filename), 5220 matrix_element, 5221 s_and_t_channels) 5222 5223 filename = 'processes.dat' 5224 files.write_to_file(filename, 5225 self.write_processes_file, 5226 subproc_group) 5227 5228 # Find config symmetries and permutations 5229 symmetry, perms, ident_perms = \ 5230 diagram_symmetry.find_symmetry(subproc_group) 5231 5232 filename = 'symswap.inc' 5233 self.write_symswap_file(writers.FortranWriter(filename), 5234 ident_perms) 5235 5236 filename = 'symfact_orig.dat' 5237 self.write_symfact_file(open(filename, 'w'), symmetry) 5238 5239 # check consistency 5240 for i, sym_fact in enumerate(symmetry): 5241 5242 if sym_fact >= 0: 5243 continue 5244 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5245 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5246 raise Exception("identical diagram with different QCD powwer") 5247 5248 5249 filename = 'symperms.inc' 5250 self.write_symperms_file(writers.FortranWriter(filename), 5251 perms) 5252 5253 # Generate jpgs -> pass in make_html 5254 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5255 5256 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5257 5258 #import nexternal/leshouch in Source 5259 ln('nexternal.inc', '../../Source', log=False) 5260 ln('leshouche.inc', '../../Source', log=False) 5261 ln('maxamps.inc', '../../Source', log=False) 5262 5263 # Return to SubProcesses dir) 5264 os.chdir(pathdir) 5265 5266 # Add subprocess to subproc.mg 5267 filename = 'subproc.mg' 5268 files.append_to_file(filename, 5269 self.write_subproc, 5270 subprocdir) 5271 5272 # Return to original dir 5273 os.chdir(cwd) 5274 5275 if not tot_calls: 5276 tot_calls = 0 5277 return tot_calls
5278 5279 #=========================================================================== 5280 # write_super_auto_dsig_file 5281 #===========================================================================
5282 - def write_super_auto_dsig_file(self, writer, subproc_group):
5283 """Write the auto_dsig.f file selecting between the subprocesses 5284 in subprocess group mode""" 5285 5286 replace_dict = {} 5287 5288 # Extract version number and date from VERSION file 5289 info_lines = self.get_mg5_info_lines() 5290 replace_dict['info_lines'] = info_lines 5291 5292 matrix_elements = subproc_group.get('matrix_elements') 5293 5294 # Extract process info lines 5295 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5296 matrix_elements]) 5297 replace_dict['process_lines'] = process_lines 5298 5299 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5300 replace_dict['nexternal'] = nexternal 5301 5302 replace_dict['nsprocs'] = 2*len(matrix_elements) 5303 5304 # Generate dsig definition line 5305 dsig_def_line = "DOUBLE PRECISION " + \ 5306 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5307 range(len(matrix_elements))]) 5308 replace_dict["dsig_def_line"] = dsig_def_line 5309 5310 # Generate dsig process lines 5311 call_dsig_proc_lines = [] 5312 for iproc in range(len(matrix_elements)): 5313 call_dsig_proc_lines.append(\ 5314 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5315 {"num": iproc + 1, 5316 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5317 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5318 5319 ncomb=matrix_elements[0].get_helicity_combinations() 5320 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5321 5322 s1,s2 = matrix_elements[0].get_spin_state_initial() 5323 replace_dict['nb_spin_state1'] = s1 5324 replace_dict['nb_spin_state2'] = s2 5325 5326 if writer: 5327 file = open(pjoin(_file_path, \ 5328 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5329 file = file % replace_dict 5330 5331 # Write the file 5332 writer.writelines(file) 5333 else: 5334 return replace_dict
5335 5336 #=========================================================================== 5337 # write_mirrorprocs 5338 #===========================================================================
5339 - def write_mirrorprocs(self, writer, subproc_group):
5340 """Write the mirrorprocs.inc file determining which processes have 5341 IS mirror process in subprocess group mode.""" 5342 5343 lines = [] 5344 bool_dict = {True: '.true.', False: '.false.'} 5345 matrix_elements = subproc_group.get('matrix_elements') 5346 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5347 (len(matrix_elements), 5348 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5349 me in matrix_elements]))) 5350 # Write the file 5351 writer.writelines(lines)
5352 5353 #=========================================================================== 5354 # write_addmothers 5355 #===========================================================================
5356 - def write_addmothers(self, writer):
5357 """Write the SubProcess/addmothers.f""" 5358 5359 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5360 5361 text = open(path).read() % {'iconfig': 'lconfig'} 5362 writer.write(text) 5363 5364 return True
5365 5366 5367 #=========================================================================== 5368 # write_coloramps_file 5369 #===========================================================================
5370 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5371 matrix_elements):
5372 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5373 5374 # Create a map from subprocess (matrix element) to a list of 5375 # the diagrams corresponding to each config 5376 5377 lines = [] 5378 5379 subproc_to_confdiag = {} 5380 for config in diagrams_for_config: 5381 for subproc, diag in enumerate(config): 5382 try: 5383 subproc_to_confdiag[subproc].append(diag) 5384 except KeyError: 5385 subproc_to_confdiag[subproc] = [diag] 5386 5387 for subproc in sorted(subproc_to_confdiag.keys()): 5388 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5389 matrix_elements[subproc], 5390 subproc + 1)) 5391 5392 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5393 (maxflows, 5394 len(diagrams_for_config), 5395 len(matrix_elements))) 5396 5397 # Write the file 5398 writer.writelines(lines) 5399 5400 return True
5401 5402 #=========================================================================== 5403 # write_config_subproc_map_file 5404 #===========================================================================
5405 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5406 """Write the config_subproc_map.inc file for subprocess groups""" 5407 5408 lines = [] 5409 # Output only configs that have some corresponding diagrams 5410 iconfig = 0 5411 for config in config_subproc_map: 5412 if set(config) == set([0]): 5413 continue 5414 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5415 (iconfig + 1, len(config), 5416 ",".join([str(i) for i in config]))) 5417 iconfig += 1 5418 # Write the file 5419 writer.writelines(lines) 5420 5421 return True
5422 5423 #=========================================================================== 5424 # read_write_good_hel 5425 #===========================================================================
5426 - def read_write_good_hel(self, ncomb):
5427 """return the code to read/write the good_hel common_block""" 5428 5429 convert = {'ncomb' : ncomb} 5430 5431 output = """ 5432 subroutine write_good_hel(stream_id) 5433 implicit none 5434 integer stream_id 5435 INTEGER NCOMB 5436 PARAMETER ( NCOMB=%(ncomb)d) 5437 LOGICAL GOODHEL(NCOMB, 2) 5438 INTEGER NTRY(2) 5439 common/BLOCK_GOODHEL/NTRY,GOODHEL 5440 write(stream_id,*) GOODHEL 5441 return 5442 end 5443 5444 5445 subroutine read_good_hel(stream_id) 5446 implicit none 5447 include 'genps.inc' 5448 integer stream_id 5449 INTEGER NCOMB 5450 PARAMETER ( NCOMB=%(ncomb)d) 5451 LOGICAL GOODHEL(NCOMB, 2) 5452 INTEGER NTRY(2) 5453 common/BLOCK_GOODHEL/NTRY,GOODHEL 5454 read(stream_id,*) GOODHEL 5455 NTRY(1) = MAXTRIES + 1 5456 NTRY(2) = MAXTRIES + 1 5457 return 5458 end 5459 5460 subroutine init_good_hel() 5461 implicit none 5462 INTEGER NCOMB 5463 PARAMETER ( NCOMB=%(ncomb)d) 5464 LOGICAL GOODHEL(NCOMB, 2) 5465 INTEGER NTRY(2) 5466 INTEGER I 5467 5468 do i=1,NCOMB 5469 GOODHEL(I,1) = .false. 5470 GOODHEL(I,2) = .false. 5471 enddo 5472 NTRY(1) = 0 5473 NTRY(2) = 0 5474 end 5475 5476 integer function get_maxsproc() 5477 implicit none 5478 include 'maxamps.inc' 5479 5480 get_maxsproc = maxsproc 5481 return 5482 end 5483 5484 """ % convert 5485 5486 return output
5487 5488 5489 5490 #=========================================================================== 5491 # write_configs_file 5492 #===========================================================================
5493 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5494 """Write the configs.inc file with topology information for a 5495 subprocess group. Use the first subprocess with a diagram for each 5496 configuration.""" 5497 5498 matrix_elements = subproc_group.get('matrix_elements') 5499 model = matrix_elements[0].get('processes')[0].get('model') 5500 5501 diagrams = [] 5502 config_numbers = [] 5503 for iconfig, config in enumerate(diagrams_for_config): 5504 # Check if any diagrams correspond to this config 5505 if set(config) == set([0]): 5506 continue 5507 subproc_diags = [] 5508 for s,d in enumerate(config): 5509 if d: 5510 subproc_diags.append(matrix_elements[s].\ 5511 get('diagrams')[d-1]) 5512 else: 5513 subproc_diags.append(None) 5514 diagrams.append(subproc_diags) 5515 config_numbers.append(iconfig + 1) 5516 5517 # Extract number of external particles 5518 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5519 5520 return len(diagrams), \ 5521 self.write_configs_file_from_diagrams(writer, diagrams, 5522 config_numbers, 5523 nexternal, ninitial, 5524 model)
5525 5526 #=========================================================================== 5527 # write_run_configs_file 5528 #===========================================================================
5529 - def write_run_config_file(self, writer):
5530 """Write the run_configs.inc file for MadEvent""" 5531 5532 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5533 if self.proc_characteristic['loop_induced']: 5534 job_per_chan = 1 5535 else: 5536 job_per_chan = 2 5537 text = open(path).read() % {'chanperjob':job_per_chan} 5538 writer.write(text) 5539 return True
5540 5541 5542 #=========================================================================== 5543 # write_leshouche_file 5544 #===========================================================================
5545 - def write_leshouche_file(self, writer, subproc_group):
5546 """Write the leshouche.inc file for MG4""" 5547 5548 all_lines = [] 5549 5550 for iproc, matrix_element in \ 5551 enumerate(subproc_group.get('matrix_elements')): 5552 all_lines.extend(self.get_leshouche_lines(matrix_element, 5553 iproc)) 5554 # Write the file 5555 writer.writelines(all_lines) 5556 return True
5557 5558
5559 - def finalize(self,*args, **opts):
5560 5561 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5562 #ensure that the grouping information is on the correct value 5563 self.proc_characteristic['grouped_matrix'] = True
5564 5565 5566 #=============================================================================== 5567 # UFO_model_to_mg4 5568 #=============================================================================== 5569 5570 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5571 5572 -class UFO_model_to_mg4(object):
5573 """ A converter of the UFO-MG5 Model to the MG4 format """ 5574 5575 # The list below shows the only variables the user is allowed to change by 5576 # himself for each PS point. If he changes any other, then calling 5577 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5578 # correctly account for the change. 5579 PS_dependent_key = ['aS','MU_R'] 5580 mp_complex_format = 'complex*32' 5581 mp_real_format = 'real*16' 5582 # Warning, it is crucial none of the couplings/parameters of the model 5583 # starts with this prefix. I should add a check for this. 5584 # You can change it as the global variable to check_param_card.ParamCard 5585 mp_prefix = check_param_card.ParamCard.mp_prefix 5586
5587 - def __init__(self, model, output_path, opt=None):
5588 """ initialization of the objects """ 5589 5590 self.model = model 5591 self.model_name = model['name'] 5592 self.dir_path = output_path 5593 5594 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5595 'loop_induced': False} 5596 if opt: 5597 self.opt.update(opt) 5598 5599 self.coups_dep = [] # (name, expression, type) 5600 self.coups_indep = [] # (name, expression, type) 5601 self.params_dep = [] # (name, expression, type) 5602 self.params_indep = [] # (name, expression, type) 5603 self.params_ext = [] # external parameter 5604 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5605 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5606 5607
5609 """modify the parameter if some of them are identical up to the case""" 5610 5611 lower_dict={} 5612 duplicate = set() 5613 keys = list(self.model['parameters'].keys()) 5614 keys.sort() 5615 for key in keys: 5616 for param in self.model['parameters'][key]: 5617 lower_name = param.name.lower() 5618 if not lower_name: 5619 continue 5620 try: 5621 lower_dict[lower_name].append(param) 5622 except KeyError as error: 5623 lower_dict[lower_name] = [param] 5624 else: 5625 duplicate.add(lower_name) 5626 logger.debug('%s is define both as lower case and upper case.' 5627 % lower_name) 5628 if not duplicate: 5629 return 5630 5631 re_expr = r'''\b(%s)\b''' 5632 to_change = [] 5633 change={} 5634 for value in duplicate: 5635 for i, var in enumerate(lower_dict[value]): 5636 to_change.append(var.name) 5637 new_name = '%s%s' % (var.name.lower(), 5638 ('__%d'%(i+1) if i>0 else '')) 5639 change[var.name] = new_name 5640 var.name = new_name 5641 5642 # Apply the modification to the map_CTcoup_CTparam of the model 5643 # if it has one (giving for each coupling the CT parameters whcih 5644 # are necessary and which should be exported to the model. 5645 if hasattr(self.model,'map_CTcoup_CTparam'): 5646 for coup, ctparams in self.model.map_CTcoup_CTparam: 5647 for i, ctparam in enumerate(ctparams): 5648 try: 5649 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5650 except KeyError: 5651 pass 5652 5653 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5654 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5655 5656 # change parameters 5657 for key in keys: 5658 if key == ('external',): 5659 continue 5660 for param in self.model['parameters'][key]: 5661 param.expr = rep_pattern.sub(replace, param.expr) 5662 5663 # change couplings 5664 for key in self.model['couplings'].keys(): 5665 for coup in self.model['couplings'][key]: 5666 coup.expr = rep_pattern.sub(replace, coup.expr) 5667 5668 # change mass/width 5669 for part in self.model['particles']: 5670 if str(part.get('mass')) in to_change: 5671 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5672 if str(part.get('width')) in to_change: 5673 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5674
5675 - def refactorize(self, wanted_couplings = []):
5676 """modify the couplings to fit with MG4 convention """ 5677 5678 # Keep only separation in alphaS 5679 keys = list(self.model['parameters'].keys()) 5680 keys.sort(key=len) 5681 for key in keys: 5682 to_add = [o for o in self.model['parameters'][key] if o.name] 5683 5684 if key == ('external',): 5685 self.params_ext += to_add 5686 elif any([(k in key) for k in self.PS_dependent_key]): 5687 self.params_dep += to_add 5688 else: 5689 self.params_indep += to_add 5690 # same for couplings 5691 keys = list(self.model['couplings'].keys()) 5692 keys.sort(key=len) 5693 for key, coup_list in self.model['couplings'].items(): 5694 if any([(k in key) for k in self.PS_dependent_key]): 5695 self.coups_dep += [c for c in coup_list if 5696 (not wanted_couplings or c.name in \ 5697 wanted_couplings)] 5698 else: 5699 self.coups_indep += [c for c in coup_list if 5700 (not wanted_couplings or c.name in \ 5701 wanted_couplings)] 5702 5703 # MG4 use G and not aS as it basic object for alphas related computation 5704 #Pass G in the independant list 5705 if 'G' in self.params_dep: 5706 index = self.params_dep.index('G') 5707 G = self.params_dep.pop(index) 5708 # G.expr = '2*cmath.sqrt(as*pi)' 5709 # self.params_indep.insert(0, self.params_dep.pop(index)) 5710 # No need to add it if not defined 5711 5712 if 'aS' not in self.params_ext: 5713 logger.critical('aS not define as external parameter adding it!') 5714 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5715 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5716 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5717 - def build(self, wanted_couplings = [], full=True):
5718 """modify the couplings to fit with MG4 convention and creates all the 5719 different files""" 5720 5721 self.pass_parameter_to_case_insensitive() 5722 self.refactorize(wanted_couplings) 5723 5724 # write the files 5725 if full: 5726 if wanted_couplings: 5727 # extract the wanted ct parameters 5728 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5729 self.write_all()
5730 5731
5732 - def open(self, name, comment='c', format='default'):
5733 """ Open the file name in the correct directory and with a valid 5734 header.""" 5735 5736 file_path = pjoin(self.dir_path, name) 5737 5738 if format == 'fortran': 5739 fsock = writers.FortranWriter(file_path, 'w') 5740 write_class = io.FileIO 5741 5742 write_class.writelines(fsock, comment * 77 + '\n') 5743 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 5744 {'comment': comment + (6 - len(comment)) * ' '}) 5745 write_class.writelines(fsock, comment * 77 + '\n\n') 5746 else: 5747 fsock = open(file_path, 'w') 5748 fsock.writelines(comment * 77 + '\n') 5749 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 5750 {'comment': comment + (6 - len(comment)) * ' '}) 5751 fsock.writelines(comment * 77 + '\n\n') 5752 return fsock
5753 5754
5755 - def write_all(self):
5756 """ write all the files """ 5757 #write the part related to the external parameter 5758 self.create_ident_card() 5759 self.create_param_read() 5760 5761 #write the definition of the parameter 5762 self.create_input() 5763 self.create_intparam_def(dp=True,mp=False) 5764 if self.opt['mp']: 5765 self.create_intparam_def(dp=False,mp=True) 5766 5767 # definition of the coupling. 5768 self.create_actualize_mp_ext_param_inc() 5769 self.create_coupl_inc() 5770 self.create_write_couplings() 5771 self.create_couplings() 5772 5773 # the makefile 5774 self.create_makeinc() 5775 self.create_param_write() 5776 5777 # The model functions 5778 self.create_model_functions_inc() 5779 self.create_model_functions_def() 5780 5781 # The param_card.dat 5782 self.create_param_card() 5783 5784 5785 # All the standard files 5786 self.copy_standard_file()
5787 5788 ############################################################################ 5789 ## ROUTINE CREATING THE FILES ############################################ 5790 ############################################################################ 5791
5792 - def copy_standard_file(self):
5793 """Copy the standard files for the fortran model.""" 5794 5795 #copy the library files 5796 file_to_link = ['formats.inc','printout.f', \ 5797 'rw_para.f', 'testprog.f'] 5798 5799 for filename in file_to_link: 5800 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5801 self.dir_path) 5802 5803 file = open(os.path.join(MG5DIR,\ 5804 'models/template_files/fortran/rw_para.f')).read() 5805 5806 includes=["include \'coupl.inc\'","include \'input.inc\'", 5807 "include \'model_functions.inc\'"] 5808 if self.opt['mp']: 5809 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5810 # In standalone and madloop we do no use the compiled param card but 5811 # still parse the .dat one so we must load it. 5812 if self.opt['loop_induced']: 5813 #loop induced follow MadEvent way to handle the card. 5814 load_card = '' 5815 lha_read_filename='lha_read.f' 5816 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5817 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5818 lha_read_filename='lha_read_mp.f' 5819 elif self.opt['export_format'].startswith('standalone') \ 5820 or self.opt['export_format'] in ['madweight', 'plugin']\ 5821 or self.opt['export_format'].startswith('matchbox'): 5822 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5823 lha_read_filename='lha_read.f' 5824 else: 5825 load_card = '' 5826 lha_read_filename='lha_read.f' 5827 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5828 os.path.join(self.dir_path,'lha_read.f')) 5829 5830 file=file%{'includes':'\n '.join(includes), 5831 'load_card':load_card} 5832 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5833 writer.writelines(file) 5834 writer.close() 5835 5836 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5837 or self.opt['loop_induced']: 5838 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5839 self.dir_path + '/makefile') 5840 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5841 path = pjoin(self.dir_path, 'makefile') 5842 text = open(path).read() 5843 text = text.replace('madevent','aMCatNLO') 5844 open(path, 'w').writelines(text) 5845 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5846 'madloop','madloop_optimized', 'standalone_rw', 5847 'madweight','matchbox','madloop_matchbox', 'plugin']: 5848 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5849 self.dir_path + '/makefile') 5850 #elif self.opt['export_format'] in []: 5851 #pass 5852 else: 5853 raise MadGraph5Error('Unknown format')
5854
5855 - def create_coupl_inc(self):
5856 """ write coupling.inc """ 5857 5858 fsock = self.open('coupl.inc', format='fortran') 5859 if self.opt['mp']: 5860 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5861 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5862 format='fortran') 5863 5864 # Write header 5865 header = """double precision G 5866 common/strong/ G 5867 5868 double complex gal(2) 5869 common/weak/ gal 5870 5871 double precision MU_R 5872 common/rscale/ MU_R 5873 5874 double precision Nf 5875 parameter(Nf=%d) 5876 """ % self.model.get_nflav() 5877 5878 fsock.writelines(header) 5879 5880 if self.opt['mp']: 5881 header = """%(real_mp_format)s %(mp_prefix)sG 5882 common/MP_strong/ %(mp_prefix)sG 5883 5884 %(complex_mp_format)s %(mp_prefix)sgal(2) 5885 common/MP_weak/ %(mp_prefix)sgal 5886 5887 %(complex_mp_format)s %(mp_prefix)sMU_R 5888 common/MP_rscale/ %(mp_prefix)sMU_R 5889 5890 """ 5891 5892 5893 5894 5895 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5896 'complex_mp_format':self.mp_complex_format, 5897 'mp_prefix':self.mp_prefix}) 5898 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5899 'complex_mp_format':self.mp_complex_format, 5900 'mp_prefix':''}) 5901 5902 # Write the Mass definition/ common block 5903 masses = set() 5904 widths = set() 5905 if self.opt['complex_mass']: 5906 complex_mass = set() 5907 5908 for particle in self.model.get('particles'): 5909 #find masses 5910 one_mass = particle.get('mass') 5911 if one_mass.lower() != 'zero': 5912 masses.add(one_mass) 5913 5914 # find width 5915 one_width = particle.get('width') 5916 if one_width.lower() != 'zero': 5917 widths.add(one_width) 5918 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5919 complex_mass.add('CMASS_%s' % one_mass) 5920 5921 if masses: 5922 fsock.writelines('double precision '+','.join(masses)+'\n') 5923 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5924 if self.opt['mp']: 5925 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5926 ','.join(masses)+'\n') 5927 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5928 ','.join(masses)+'\n\n') 5929 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5930 self.mp_prefix+m for m in masses])+'\n') 5931 mp_fsock.writelines('common/MP_masses/ '+\ 5932 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5933 5934 if widths: 5935 fsock.writelines('double precision '+','.join(widths)+'\n') 5936 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5937 if self.opt['mp']: 5938 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5939 ','.join(widths)+'\n') 5940 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5941 ','.join(widths)+'\n\n') 5942 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5943 self.mp_prefix+w for w in widths])+'\n') 5944 mp_fsock.writelines('common/MP_widths/ '+\ 5945 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5946 5947 # Write the Couplings 5948 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5949 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5950 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5951 if self.opt['mp']: 5952 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5953 ','.join(coupling_list)+'\n') 5954 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5955 ','.join(coupling_list)+'\n\n') 5956 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5957 self.mp_prefix+c for c in coupling_list])+'\n') 5958 mp_fsock.writelines('common/MP_couplings/ '+\ 5959 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5960 5961 # Write complex mass for complex mass scheme (if activated) 5962 if self.opt['complex_mass'] and complex_mass: 5963 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5964 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5965 if self.opt['mp']: 5966 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5967 ','.join(complex_mass)+'\n') 5968 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5969 ','.join(complex_mass)+'\n\n') 5970 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5971 self.mp_prefix+cm for cm in complex_mass])+'\n') 5972 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5973 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5974
5975 - def create_write_couplings(self):
5976 """ write the file coupl_write.inc """ 5977 5978 fsock = self.open('coupl_write.inc', format='fortran') 5979 5980 fsock.writelines("""write(*,*) ' Couplings of %s' 5981 write(*,*) ' ---------------------------------' 5982 write(*,*) ' '""" % self.model_name) 5983 def format(coupl): 5984 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5985 5986 # Write the Couplings 5987 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5988 fsock.writelines('\n'.join(lines)) 5989 5990
5991 - def create_input(self):
5992 """create input.inc containing the definition of the parameters""" 5993 5994 fsock = self.open('input.inc', format='fortran') 5995 if self.opt['mp']: 5996 mp_fsock = self.open('mp_input.inc', format='fortran') 5997 5998 #find mass/ width since they are already define 5999 already_def = set() 6000 for particle in self.model.get('particles'): 6001 already_def.add(particle.get('mass').lower()) 6002 already_def.add(particle.get('width').lower()) 6003 if self.opt['complex_mass']: 6004 already_def.add('cmass_%s' % particle.get('mass').lower()) 6005 6006 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 6007 name.lower() not in already_def 6008 6009 real_parameters = [param.name for param in self.params_dep + 6010 self.params_indep if param.type == 'real' 6011 and is_valid(param.name)] 6012 6013 real_parameters += [param.name for param in self.params_ext 6014 if param.type == 'real'and 6015 is_valid(param.name)] 6016 6017 # check the parameter is a CT parameter or not 6018 # if yes, just use the needed ones 6019 real_parameters = [param for param in real_parameters \ 6020 if self.check_needed_param(param)] 6021 6022 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6023 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6024 if self.opt['mp']: 6025 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6026 self.mp_prefix+p for p in real_parameters])+'\n') 6027 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6028 self.mp_prefix+p for p in real_parameters])+'\n\n') 6029 6030 complex_parameters = [param.name for param in self.params_dep + 6031 self.params_indep if param.type == 'complex' and 6032 is_valid(param.name)] 6033 6034 # check the parameter is a CT parameter or not 6035 # if yes, just use the needed ones 6036 complex_parameters = [param for param in complex_parameters \ 6037 if self.check_needed_param(param)] 6038 6039 if complex_parameters: 6040 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6041 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6042 if self.opt['mp']: 6043 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6044 self.mp_prefix+p for p in complex_parameters])+'\n') 6045 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6046 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6047
6048 - def check_needed_param(self, param):
6049 """ Returns whether the parameter in argument is needed for this 6050 specific computation or not.""" 6051 6052 # If this is a leading order model or if there was no CT parameter 6053 # employed in this NLO model, one can directly return that the 6054 # parameter is needed since only CTParameters are filtered. 6055 if not hasattr(self, 'allCTparameters') or \ 6056 self.allCTparameters is None or self.usedCTparameters is None or \ 6057 len(self.allCTparameters)==0: 6058 return True 6059 6060 # We must allow the conjugate shorthand for the complex parameter as 6061 # well so we check wether either the parameter name or its name with 6062 # 'conjg__' substituted with '' is present in the list. 6063 # This is acceptable even if some parameter had an original name 6064 # including 'conjg__' in it, because at worst we export a parameter 6065 # was not needed. 6066 param = param.lower() 6067 cjg_param = param.replace('conjg__','',1) 6068 6069 # First make sure it is a CTparameter 6070 if param not in self.allCTparameters and \ 6071 cjg_param not in self.allCTparameters: 6072 return True 6073 6074 # Now check if it is in the list of CTparameters actually used 6075 return (param in self.usedCTparameters or \ 6076 cjg_param in self.usedCTparameters)
6077
6078 - def extract_needed_CTparam(self,wanted_couplings=[]):
6079 """ Extract what are the needed CT parameters given the wanted_couplings""" 6080 6081 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6082 # Setting these lists to none wil disable the filtering in 6083 # check_needed_param 6084 self.allCTparameters = None 6085 self.usedCTparameters = None 6086 return 6087 6088 # All CTparameters appearin in all CT couplings 6089 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6090 # Define in this class the list of all CT parameters 6091 self.allCTparameters=list(\ 6092 set(itertools.chain.from_iterable(allCTparameters))) 6093 6094 # All used CT couplings 6095 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6096 allUsedCTCouplings = [coupl for coupl in 6097 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6098 6099 # Now define the list of all CT parameters that are actually used 6100 self.usedCTparameters=list(\ 6101 set(itertools.chain.from_iterable([ 6102 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6103 ]))) 6104 6105 # Now at last, make these list case insensitive 6106 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6107 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6108
6109 - def create_intparam_def(self, dp=True, mp=False):
6110 """ create intparam_definition.inc setting the internal parameters. 6111 Output the double precision and/or the multiple precision parameters 6112 depending on the parameters dp and mp. If mp only, then the file names 6113 get the 'mp_' prefix. 6114 """ 6115 6116 fsock = self.open('%sintparam_definition.inc'% 6117 ('mp_' if mp and not dp else ''), format='fortran') 6118 6119 fsock.write_comments(\ 6120 "Parameters that should not be recomputed event by event.\n") 6121 fsock.writelines("if(readlha) then\n") 6122 if dp: 6123 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6124 if mp: 6125 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6126 6127 for param in self.params_indep: 6128 if param.name == 'ZERO': 6129 continue 6130 # check whether the parameter is a CT parameter 6131 # if yes,just used the needed ones 6132 if not self.check_needed_param(param.name): 6133 continue 6134 if dp: 6135 fsock.writelines("%s = %s\n" % (param.name, 6136 self.p_to_f.parse(param.expr))) 6137 if mp: 6138 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6139 self.mp_p_to_f.parse(param.expr))) 6140 6141 fsock.writelines('endif') 6142 6143 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6144 if dp: 6145 fsock.writelines("aS = G**2/4/pi\n") 6146 if mp: 6147 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6148 for param in self.params_dep: 6149 # check whether the parameter is a CT parameter 6150 # if yes,just used the needed ones 6151 if not self.check_needed_param(param.name): 6152 continue 6153 if dp: 6154 fsock.writelines("%s = %s\n" % (param.name, 6155 self.p_to_f.parse(param.expr))) 6156 elif mp: 6157 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6158 self.mp_p_to_f.parse(param.expr))) 6159 6160 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6161 if ('aEWM1',) in self.model['parameters']: 6162 if dp: 6163 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6164 gal(2) = 1d0 6165 """) 6166 elif mp: 6167 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6168 %(mp_prefix)sgal(2) = 1d0 6169 """ %{'mp_prefix':self.mp_prefix}) 6170 pass 6171 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6172 elif ('Gf',) in self.model['parameters']: 6173 if dp: 6174 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6175 gal(2) = 1d0 6176 """) 6177 elif mp: 6178 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6179 %(mp_prefix)sgal(2) = 1d0 6180 """ %{'mp_prefix':self.mp_prefix}) 6181 pass 6182 else: 6183 if dp: 6184 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6185 fsock.writelines(""" gal(1) = 1d0 6186 gal(2) = 1d0 6187 """) 6188 elif mp: 6189 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6190 %(mp_prefix)sgal(2) = 1e0_16 6191 """%{'mp_prefix':self.mp_prefix})
6192 6193
6194 - def create_couplings(self):
6195 """ create couplings.f and all couplingsX.f """ 6196 6197 nb_def_by_file = 25 6198 6199 self.create_couplings_main(nb_def_by_file) 6200 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6201 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6202 6203 for i in range(nb_coup_indep): 6204 # For the independent couplings, we compute the double and multiple 6205 # precision ones together 6206 data = self.coups_indep[nb_def_by_file * i: 6207 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6208 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6209 6210 for i in range(nb_coup_dep): 6211 # For the dependent couplings, we compute the double and multiple 6212 # precision ones in separate subroutines. 6213 data = self.coups_dep[nb_def_by_file * i: 6214 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6215 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6216 dp=True,mp=False) 6217 if self.opt['mp']: 6218 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6219 dp=False,mp=True)
6220 6221
6222 - def create_couplings_main(self, nb_def_by_file=25):
6223 """ create couplings.f """ 6224 6225 fsock = self.open('couplings.f', format='fortran') 6226 6227 fsock.writelines("""subroutine coup() 6228 6229 implicit none 6230 double precision PI, ZERO 6231 logical READLHA 6232 parameter (PI=3.141592653589793d0) 6233 parameter (ZERO=0d0) 6234 include \'model_functions.inc\'""") 6235 if self.opt['mp']: 6236 fsock.writelines("""%s MP__PI, MP__ZERO 6237 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6238 parameter (MP__ZERO=0e0_16) 6239 include \'mp_input.inc\' 6240 include \'mp_coupl.inc\' 6241 """%self.mp_real_format) 6242 fsock.writelines("""include \'input.inc\' 6243 include \'coupl.inc\' 6244 READLHA = .true. 6245 include \'intparam_definition.inc\'""") 6246 if self.opt['mp']: 6247 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6248 6249 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6250 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6251 6252 fsock.writelines('\n'.join(\ 6253 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6254 6255 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6256 6257 fsock.writelines('\n'.join(\ 6258 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6259 for i in range(nb_coup_dep)])) 6260 if self.opt['mp']: 6261 fsock.writelines('\n'.join(\ 6262 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6263 for i in range(nb_coup_dep)])) 6264 fsock.writelines('''\n return \n end\n''') 6265 6266 fsock.writelines("""subroutine update_as_param() 6267 6268 implicit none 6269 double precision PI, ZERO 6270 logical READLHA 6271 parameter (PI=3.141592653589793d0) 6272 parameter (ZERO=0d0) 6273 include \'model_functions.inc\'""") 6274 fsock.writelines("""include \'input.inc\' 6275 include \'coupl.inc\' 6276 READLHA = .false.""") 6277 fsock.writelines(""" 6278 include \'intparam_definition.inc\'\n 6279 """) 6280 6281 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6282 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6283 6284 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6285 6286 fsock.writelines('\n'.join(\ 6287 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6288 for i in range(nb_coup_dep)])) 6289 fsock.writelines('''\n return \n end\n''') 6290 6291 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6292 6293 implicit none 6294 double precision PI 6295 parameter (PI=3.141592653589793d0) 6296 double precision mu_r2, as2 6297 include \'model_functions.inc\'""") 6298 fsock.writelines("""include \'input.inc\' 6299 include \'coupl.inc\'""") 6300 fsock.writelines(""" 6301 if (mu_r2.gt.0d0) MU_R = mu_r2 6302 G = SQRT(4.0d0*PI*AS2) 6303 AS = as2 6304 6305 CALL UPDATE_AS_PARAM() 6306 """) 6307 fsock.writelines('''\n return \n end\n''') 6308 6309 if self.opt['mp']: 6310 fsock.writelines("""subroutine mp_update_as_param() 6311 6312 implicit none 6313 logical READLHA 6314 include \'model_functions.inc\'""") 6315 fsock.writelines("""%s MP__PI, MP__ZERO 6316 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6317 parameter (MP__ZERO=0e0_16) 6318 include \'mp_input.inc\' 6319 include \'mp_coupl.inc\' 6320 """%self.mp_real_format) 6321 fsock.writelines("""include \'input.inc\' 6322 include \'coupl.inc\' 6323 include \'actualize_mp_ext_params.inc\' 6324 READLHA = .false. 6325 include \'mp_intparam_definition.inc\'\n 6326 """) 6327 6328 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6329 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6330 6331 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6332 6333 fsock.writelines('\n'.join(\ 6334 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6335 for i in range(nb_coup_dep)])) 6336 fsock.writelines('''\n return \n end\n''')
6337
6338 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6339 """ create couplings[nb_file].f containing information coming from data. 6340 Outputs the computation of the double precision and/or the multiple 6341 precision couplings depending on the parameters dp and mp. 6342 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6343 filename and subroutine name. 6344 """ 6345 6346 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6347 nb_file), format='fortran') 6348 fsock.writelines("""subroutine %scoup%s() 6349 6350 implicit none 6351 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6352 if dp: 6353 fsock.writelines(""" 6354 double precision PI, ZERO 6355 parameter (PI=3.141592653589793d0) 6356 parameter (ZERO=0d0) 6357 include 'input.inc' 6358 include 'coupl.inc'""") 6359 if mp: 6360 fsock.writelines("""%s MP__PI, MP__ZERO 6361 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6362 parameter (MP__ZERO=0e0_16) 6363 include \'mp_input.inc\' 6364 include \'mp_coupl.inc\' 6365 """%self.mp_real_format) 6366 6367 for coupling in data: 6368 if dp: 6369 fsock.writelines('%s = %s' % (coupling.name, 6370 self.p_to_f.parse(coupling.expr))) 6371 if mp: 6372 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6373 self.mp_p_to_f.parse(coupling.expr))) 6374 fsock.writelines('end')
6375
6376 - def create_model_functions_inc(self):
6377 """ Create model_functions.inc which contains the various declarations 6378 of auxiliary functions which might be used in the couplings expressions 6379 """ 6380 6381 additional_fct = [] 6382 # check for functions define in the UFO model 6383 ufo_fct = self.model.get('functions') 6384 if ufo_fct: 6385 for fct in ufo_fct: 6386 # already handle by default 6387 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6388 "csc", "asec", "acsc", "theta_function", "cond", 6389 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6390 "grreglog","regsqrt"]: 6391 additional_fct.append(fct.name) 6392 6393 fsock = self.open('model_functions.inc', format='fortran') 6394 fsock.writelines("""double complex cond 6395 double complex condif 6396 double complex reglog 6397 double complex reglogp 6398 double complex reglogm 6399 double complex recms 6400 double complex arg 6401 double complex grreglog 6402 double complex regsqrt 6403 %s 6404 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6405 6406 6407 if self.opt['mp']: 6408 fsock.writelines("""%(complex_mp_format)s mp_cond 6409 %(complex_mp_format)s mp_condif 6410 %(complex_mp_format)s mp_reglog 6411 %(complex_mp_format)s mp_reglogp 6412 %(complex_mp_format)s mp_reglogm 6413 %(complex_mp_format)s mp_recms 6414 %(complex_mp_format)s mp_arg 6415 %(complex_mp_format)s mp_grreglog 6416 %(complex_mp_format)s mp_regsqrt 6417 %(additional)s 6418 """ %\ 6419 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6420 'complex_mp_format':self.mp_complex_format 6421 })
6422
6423 - def create_model_functions_def(self):
6424 """ Create model_functions.f which contains the various definitions 6425 of auxiliary functions which might be used in the couplings expressions 6426 Add the functions.f functions for formfactors support 6427 """ 6428 6429 fsock = self.open('model_functions.f', format='fortran') 6430 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6431 implicit none 6432 double complex condition,truecase,falsecase 6433 if(condition.eq.(0.0d0,0.0d0)) then 6434 cond=truecase 6435 else 6436 cond=falsecase 6437 endif 6438 end 6439 6440 double complex function condif(condition,truecase,falsecase) 6441 implicit none 6442 logical condition 6443 double complex truecase,falsecase 6444 if(condition) then 6445 condif=truecase 6446 else 6447 condif=falsecase 6448 endif 6449 end 6450 6451 double complex function recms(condition,expr) 6452 implicit none 6453 logical condition 6454 double complex expr 6455 if(condition)then 6456 recms=expr 6457 else 6458 recms=dcmplx(dble(expr)) 6459 endif 6460 end 6461 6462 double complex function reglog(arg) 6463 implicit none 6464 double complex TWOPII 6465 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6466 double complex arg 6467 if(arg.eq.(0.0d0,0.0d0)) then 6468 reglog=(0.0d0,0.0d0) 6469 else 6470 reglog=log(arg) 6471 endif 6472 end 6473 6474 double complex function reglogp(arg) 6475 implicit none 6476 double complex TWOPII 6477 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6478 double complex arg 6479 if(arg.eq.(0.0d0,0.0d0))then 6480 reglogp=(0.0d0,0.0d0) 6481 else 6482 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6483 reglogp=log(arg) + TWOPII 6484 else 6485 reglogp=log(arg) 6486 endif 6487 endif 6488 end 6489 6490 double complex function reglogm(arg) 6491 implicit none 6492 double complex TWOPII 6493 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6494 double complex arg 6495 if(arg.eq.(0.0d0,0.0d0))then 6496 reglogm=(0.0d0,0.0d0) 6497 else 6498 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6499 reglogm=log(arg) - TWOPII 6500 else 6501 reglogm=log(arg) 6502 endif 6503 endif 6504 end 6505 6506 double complex function regsqrt(arg_in) 6507 implicit none 6508 double complex arg_in 6509 double complex arg 6510 arg=arg_in 6511 if(dabs(dimag(arg)).eq.0.0d0)then 6512 arg=dcmplx(dble(arg),0.0d0) 6513 endif 6514 if(dabs(dble(arg)).eq.0.0d0)then 6515 arg=dcmplx(0.0d0,dimag(arg)) 6516 endif 6517 regsqrt=sqrt(arg) 6518 end 6519 6520 double complex function grreglog(logsw,expr1_in,expr2_in) 6521 implicit none 6522 double complex TWOPII 6523 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6524 double complex expr1_in,expr2_in 6525 double complex expr1,expr2 6526 double precision logsw 6527 double precision imagexpr 6528 logical firstsheet 6529 expr1=expr1_in 6530 expr2=expr2_in 6531 if(dabs(dimag(expr1)).eq.0.0d0)then 6532 expr1=dcmplx(dble(expr1),0.0d0) 6533 endif 6534 if(dabs(dble(expr1)).eq.0.0d0)then 6535 expr1=dcmplx(0.0d0,dimag(expr1)) 6536 endif 6537 if(dabs(dimag(expr2)).eq.0.0d0)then 6538 expr2=dcmplx(dble(expr2),0.0d0) 6539 endif 6540 if(dabs(dble(expr2)).eq.0.0d0)then 6541 expr2=dcmplx(0.0d0,dimag(expr2)) 6542 endif 6543 if(expr1.eq.(0.0d0,0.0d0))then 6544 grreglog=(0.0d0,0.0d0) 6545 else 6546 imagexpr=dimag(expr1)*dimag(expr2) 6547 firstsheet=imagexpr.ge.0.0d0 6548 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6549 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6550 if(firstsheet)then 6551 grreglog=log(expr1) 6552 else 6553 if(dimag(expr1).gt.0.0d0)then 6554 grreglog=log(expr1) - logsw*TWOPII 6555 else 6556 grreglog=log(expr1) + logsw*TWOPII 6557 endif 6558 endif 6559 endif 6560 end 6561 6562 double complex function arg(comnum) 6563 implicit none 6564 double complex comnum 6565 double complex iim 6566 iim = (0.0d0,1.0d0) 6567 if(comnum.eq.(0.0d0,0.0d0)) then 6568 arg=(0.0d0,0.0d0) 6569 else 6570 arg=log(comnum/abs(comnum))/iim 6571 endif 6572 end""") 6573 if self.opt['mp']: 6574 fsock.writelines(""" 6575 6576 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6577 implicit none 6578 %(complex_mp_format)s condition,truecase,falsecase 6579 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6580 mp_cond=truecase 6581 else 6582 mp_cond=falsecase 6583 endif 6584 end 6585 6586 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6587 implicit none 6588 logical condition 6589 %(complex_mp_format)s truecase,falsecase 6590 if(condition) then 6591 mp_condif=truecase 6592 else 6593 mp_condif=falsecase 6594 endif 6595 end 6596 6597 %(complex_mp_format)s function mp_recms(condition,expr) 6598 implicit none 6599 logical condition 6600 %(complex_mp_format)s expr 6601 if(condition)then 6602 mp_recms=expr 6603 else 6604 mp_recms=cmplx(real(expr),kind=16) 6605 endif 6606 end 6607 6608 %(complex_mp_format)s function mp_reglog(arg) 6609 implicit none 6610 %(complex_mp_format)s TWOPII 6611 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6612 %(complex_mp_format)s arg 6613 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6614 mp_reglog=(0.0e0_16,0.0e0_16) 6615 else 6616 mp_reglog=log(arg) 6617 endif 6618 end 6619 6620 %(complex_mp_format)s function mp_reglogp(arg) 6621 implicit none 6622 %(complex_mp_format)s TWOPII 6623 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6624 %(complex_mp_format)s arg 6625 if(arg.eq.(0.0e0_16,0.0e0_16))then 6626 mp_reglogp=(0.0e0_16,0.0e0_16) 6627 else 6628 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6629 mp_reglogp=log(arg) + TWOPII 6630 else 6631 mp_reglogp=log(arg) 6632 endif 6633 endif 6634 end 6635 6636 %(complex_mp_format)s function mp_reglogm(arg) 6637 implicit none 6638 %(complex_mp_format)s TWOPII 6639 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6640 %(complex_mp_format)s arg 6641 if(arg.eq.(0.0e0_16,0.0e0_16))then 6642 mp_reglogm=(0.0e0_16,0.0e0_16) 6643 else 6644 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6645 mp_reglogm=log(arg) - TWOPII 6646 else 6647 mp_reglogm=log(arg) 6648 endif 6649 endif 6650 end 6651 6652 %(complex_mp_format)s function mp_regsqrt(arg_in) 6653 implicit none 6654 %(complex_mp_format)s arg_in 6655 %(complex_mp_format)s arg 6656 arg=arg_in 6657 if(abs(imagpart(arg)).eq.0.0e0_16)then 6658 arg=cmplx(real(arg,kind=16),0.0e0_16) 6659 endif 6660 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6661 arg=cmplx(0.0e0_16,imagpart(arg)) 6662 endif 6663 mp_regsqrt=sqrt(arg) 6664 end 6665 6666 6667 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6668 implicit none 6669 %(complex_mp_format)s TWOPII 6670 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6671 %(complex_mp_format)s expr1_in,expr2_in 6672 %(complex_mp_format)s expr1,expr2 6673 %(real_mp_format)s logsw 6674 %(real_mp_format)s imagexpr 6675 logical firstsheet 6676 expr1=expr1_in 6677 expr2=expr2_in 6678 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6679 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6680 endif 6681 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6682 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6683 endif 6684 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6685 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6686 endif 6687 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6688 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6689 endif 6690 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6691 mp_grreglog=(0.0e0_16,0.0e0_16) 6692 else 6693 imagexpr=imagpart(expr1)*imagpart(expr2) 6694 firstsheet=imagexpr.ge.0.0e0_16 6695 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6696 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6697 if(firstsheet)then 6698 mp_grreglog=log(expr1) 6699 else 6700 if(imagpart(expr1).gt.0.0e0_16)then 6701 mp_grreglog=log(expr1) - logsw*TWOPII 6702 else 6703 mp_grreglog=log(expr1) + logsw*TWOPII 6704 endif 6705 endif 6706 endif 6707 end 6708 6709 %(complex_mp_format)s function mp_arg(comnum) 6710 implicit none 6711 %(complex_mp_format)s comnum 6712 %(complex_mp_format)s imm 6713 imm = (0.0e0_16,1.0e0_16) 6714 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6715 mp_arg=(0.0e0_16,0.0e0_16) 6716 else 6717 mp_arg=log(comnum/abs(comnum))/imm 6718 endif 6719 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6720 6721 6722 #check for the file functions.f 6723 model_path = self.model.get('modelpath') 6724 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6725 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6726 input = pjoin(model_path,'Fortran','functions.f') 6727 fsock.writelines(open(input).read()) 6728 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6729 6730 # check for functions define in the UFO model 6731 ufo_fct = self.model.get('functions') 6732 if ufo_fct: 6733 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6734 done = [] 6735 for fct in ufo_fct: 6736 # already handle by default 6737 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6738 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6739 "grreglog","regsqrt"] + done: 6740 done.append(str(fct.name.lower())) 6741 ufo_fct_template = """ 6742 double complex function %(name)s(%(args)s) 6743 implicit none 6744 double complex %(args)s 6745 %(definitions)s 6746 %(name)s = %(fct)s 6747 6748 return 6749 end 6750 """ 6751 str_fct = self.p_to_f.parse(fct.expr) 6752 if not self.p_to_f.to_define: 6753 definitions = [] 6754 else: 6755 definitions=[] 6756 for d in self.p_to_f.to_define: 6757 if d == 'pi': 6758 definitions.append(' double precision pi') 6759 definitions.append(' data pi /3.1415926535897932d0/') 6760 else: 6761 definitions.append(' double complex %s' % d) 6762 6763 text = ufo_fct_template % { 6764 'name': fct.name, 6765 'args': ", ".join(fct.arguments), 6766 'fct': str_fct, 6767 'definitions': '\n'.join(definitions) 6768 } 6769 6770 fsock.writelines(text) 6771 if self.opt['mp']: 6772 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6773 for fct in ufo_fct: 6774 # already handle by default 6775 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6776 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6777 "grreglog","regsqrt"]: 6778 ufo_fct_template = """ 6779 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6780 implicit none 6781 %(complex_mp_format)s mp__%(args)s 6782 %(definitions)s 6783 mp_%(name)s = %(fct)s 6784 6785 return 6786 end 6787 """ 6788 str_fct = self.mp_p_to_f.parse(fct.expr) 6789 if not self.mp_p_to_f.to_define: 6790 definitions = [] 6791 else: 6792 definitions=[] 6793 for d in self.mp_p_to_f.to_define: 6794 if d == 'pi': 6795 definitions.append(' %s mp__pi' % self.mp_real_format) 6796 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6797 else: 6798 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6799 text = ufo_fct_template % { 6800 'name': fct.name, 6801 'args': ", mp__".join(fct.arguments), 6802 'fct': str_fct, 6803 'definitions': '\n'.join(definitions), 6804 'complex_mp_format': self.mp_complex_format 6805 } 6806 fsock.writelines(text) 6807 6808 6809 6810 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6811 6812 6813
6814 - def create_makeinc(self):
6815 """create makeinc.inc containing the file to compile """ 6816 6817 fsock = self.open('makeinc.inc', comment='#') 6818 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6819 text += ' model_functions.o ' 6820 6821 nb_coup_indep = 1 + len(self.coups_dep) // 25 6822 nb_coup_dep = 1 + len(self.coups_indep) // 25 6823 couplings_files=['couplings%s.o' % (i+1) \ 6824 for i in range(nb_coup_dep + nb_coup_indep) ] 6825 if self.opt['mp']: 6826 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6827 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6828 text += ' '.join(couplings_files) 6829 fsock.writelines(text)
6830
6831 - def create_param_write(self):
6832 """ create param_write """ 6833 6834 fsock = self.open('param_write.inc', format='fortran') 6835 6836 fsock.writelines("""write(*,*) ' External Params' 6837 write(*,*) ' ---------------------------------' 6838 write(*,*) ' '""") 6839 def format(name): 6840 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6841 6842 # Write the external parameter 6843 lines = [format(param.name) for param in self.params_ext] 6844 fsock.writelines('\n'.join(lines)) 6845 6846 fsock.writelines("""write(*,*) ' Internal Params' 6847 write(*,*) ' ---------------------------------' 6848 write(*,*) ' '""") 6849 lines = [format(data.name) for data in self.params_indep 6850 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6851 fsock.writelines('\n'.join(lines)) 6852 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6853 write(*,*) ' ----------------------------------------' 6854 write(*,*) ' '""") 6855 lines = [format(data.name) for data in self.params_dep \ 6856 if self.check_needed_param(data.name)] 6857 6858 fsock.writelines('\n'.join(lines)) 6859 6860 6861
6862 - def create_ident_card(self):
6863 """ create the ident_card.dat """ 6864 6865 def format(parameter): 6866 """return the line for the ident_card corresponding to this parameter""" 6867 colum = [parameter.lhablock.lower()] + \ 6868 [str(value) for value in parameter.lhacode] + \ 6869 [parameter.name] 6870 if not parameter.name: 6871 return '' 6872 return ' '.join(colum)+'\n'
6873 6874 fsock = self.open('ident_card.dat') 6875 6876 external_param = [format(param) for param in self.params_ext] 6877 fsock.writelines('\n'.join(external_param)) 6878
6879 - def create_actualize_mp_ext_param_inc(self):
6880 """ create the actualize_mp_ext_params.inc code """ 6881 6882 # In principle one should actualize all external, but for now, it is 6883 # hardcoded that only AS and MU_R can by dynamically changed by the user 6884 # so that we only update those ones. 6885 # Of course, to be on the safe side, one could decide to update all 6886 # external parameters. 6887 update_params_list=[p for p in self.params_ext if p.name in 6888 self.PS_dependent_key] 6889 6890 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6891 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6892 for param in update_params_list] 6893 # When read_lha is false, it is G which is taken in input and not AS, so 6894 # this is what should be reset here too. 6895 if 'aS' in [param.name for param in update_params_list]: 6896 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6897 6898 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6899 fsock.writelines('\n'.join(res_strings))
6900
6901 - def create_param_read(self):
6902 """create param_read""" 6903 6904 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6905 or self.opt['loop_induced']: 6906 fsock = self.open('param_read.inc', format='fortran') 6907 fsock.writelines(' include \'../param_card.inc\'') 6908 return 6909 6910 def format_line(parameter): 6911 """return the line for the ident_card corresponding to this 6912 parameter""" 6913 template = \ 6914 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6915 % {'name': parameter.name, 6916 'value': self.p_to_f.parse(str(parameter.value.real))} 6917 if self.opt['mp']: 6918 template = template+ \ 6919 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6920 "%(mp_prefix)s%(name)s,%(value)s)") \ 6921 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6922 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6923 6924 if parameter.lhablock.lower() == 'loop': 6925 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 6926 6927 return template 6928 6929 fsock = self.open('param_read.inc', format='fortran') 6930 res_strings = [format_line(param) \ 6931 for param in self.params_ext] 6932 6933 # Correct width sign for Majorana particles (where the width 6934 # and mass need to have the same sign) 6935 for particle in self.model.get('particles'): 6936 if particle.is_fermion() and particle.get('self_antipart') and \ 6937 particle.get('width').lower() != 'zero': 6938 6939 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6940 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6941 if self.opt['mp']: 6942 res_strings.append(\ 6943 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6944 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6945 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6946 6947 fsock.writelines('\n'.join(res_strings)) 6948 6949 6950 @staticmethod
6951 - def create_param_card_static(model, output_path, rule_card_path=False, 6952 mssm_convert=True, write_special=True):
6953 """ create the param_card.dat for a givent model --static method-- """ 6954 #1. Check if a default param_card is present: 6955 done = False 6956 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6957 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6958 model_path = model.get('modelpath') 6959 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6960 done = True 6961 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6962 output_path) 6963 if not done: 6964 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 6965 6966 if rule_card_path: 6967 if hasattr(model, 'rule_card'): 6968 model.rule_card.write_file(rule_card_path) 6969 6970 if mssm_convert: 6971 model_name = model.get('name') 6972 # IF MSSM convert the card to SLAH1 6973 if model_name == 'mssm' or model_name.startswith('mssm-'): 6974 import models.check_param_card as translator 6975 # Check the format of the param_card for Pythia and make it correct 6976 if rule_card_path: 6977 translator.make_valid_param_card(output_path, rule_card_path) 6978 translator.convert_to_slha1(output_path)
6979
6980 - def create_param_card(self, write_special=True):
6981 """ create the param_card.dat """ 6982 6983 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6984 if not hasattr(self.model, 'rule_card'): 6985 rule_card=False 6986 write_special = True 6987 if 'exporter' in self.opt: 6988 import madgraph.loop.loop_exporters as loop_exporters 6989 import madgraph.iolibs.export_fks as export_fks 6990 write_special = False 6991 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 6992 write_special = True 6993 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 6994 write_special = False 6995 6996 self.create_param_card_static(self.model, 6997 output_path=pjoin(self.dir_path, 'param_card.dat'), 6998 rule_card_path=rule_card, 6999 mssm_convert=True, 7000 write_special=write_special)
7001
7002 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
7003 """ Determine which Export_v4 class is required. cmd is the command 7004 interface containing all potential usefull information. 7005 The output_type argument specifies from which context the output 7006 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 7007 and 'default' for tree-level outputs.""" 7008 7009 opt = dict(cmd.options) 7010 opt['output_options'] = cmd_options 7011 7012 # ========================================================================== 7013 # First check whether Ninja must be installed. 7014 # Ninja would only be required if: 7015 # a) Loop optimized output is selected 7016 # b) the process gathered from the amplitude generated use loops 7017 7018 if len(cmd._curr_amps)>0: 7019 try: 7020 curr_proc = cmd._curr_amps[0].get('process') 7021 except base_objects.PhysicsObject.PhysicsObjectError: 7022 curr_proc = None 7023 elif hasattr(cmd,'_fks_multi_proc') and \ 7024 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7025 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7026 else: 7027 curr_proc = None 7028 7029 requires_reduction_tool = opt['loop_optimized_output'] and \ 7030 (not curr_proc is None) and \ 7031 (curr_proc.get('perturbation_couplings') != [] and \ 7032 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7033 7034 # An installation is required then, but only if the specified path is the 7035 # default local one and that the Ninja library appears missing. 7036 if requires_reduction_tool: 7037 cmd.install_reduction_library() 7038 7039 # ========================================================================== 7040 # First treat the MadLoop5 standalone case 7041 MadLoop_SA_options = {'clean': not noclean, 7042 'complex_mass':cmd.options['complex_mass_scheme'], 7043 'export_format':'madloop', 7044 'mp':True, 7045 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7046 'cuttools_dir': cmd._cuttools_dir, 7047 'iregi_dir':cmd._iregi_dir, 7048 'golem_dir':cmd.options['golem'], 7049 'samurai_dir':cmd.options['samurai'], 7050 'ninja_dir':cmd.options['ninja'], 7051 'collier_dir':cmd.options['collier'], 7052 'fortran_compiler':cmd.options['fortran_compiler'], 7053 'f2py_compiler':cmd.options['f2py_compiler'], 7054 'output_dependencies':cmd.options['output_dependencies'], 7055 'SubProc_prefix':'P', 7056 'compute_color_flows':cmd.options['loop_color_flows'], 7057 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7058 'cluster_local_path': cmd.options['cluster_local_path'], 7059 'output_options': cmd_options 7060 } 7061 7062 if output_type.startswith('madloop'): 7063 import madgraph.loop.loop_exporters as loop_exporters 7064 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7065 ExporterClass=None 7066 if not cmd.options['loop_optimized_output']: 7067 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7068 else: 7069 if output_type == "madloop": 7070 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7071 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7072 elif output_type == "madloop_matchbox": 7073 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7074 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7075 else: 7076 raise Exception("output_type not recognize %s" % output_type) 7077 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7078 else: 7079 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7080 ' in %s'%str(cmd._mgme_dir)) 7081 7082 # Then treat the aMC@NLO output 7083 elif output_type=='amcatnlo': 7084 import madgraph.iolibs.export_fks as export_fks 7085 ExporterClass=None 7086 amcatnlo_options = dict(opt) 7087 amcatnlo_options.update(MadLoop_SA_options) 7088 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7089 if not cmd.options['loop_optimized_output']: 7090 logger.info("Writing out the aMC@NLO code") 7091 ExporterClass = export_fks.ProcessExporterFortranFKS 7092 amcatnlo_options['export_format']='FKS5_default' 7093 else: 7094 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7095 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7096 amcatnlo_options['export_format']='FKS5_optimized' 7097 return ExporterClass(cmd._export_dir, amcatnlo_options) 7098 7099 7100 # Then the default tree-level output 7101 elif output_type=='default': 7102 assert group_subprocesses in [True, False] 7103 7104 opt = dict(opt) 7105 opt.update({'clean': not noclean, 7106 'complex_mass': cmd.options['complex_mass_scheme'], 7107 'export_format':cmd._export_format, 7108 'mp': False, 7109 'sa_symmetry':False, 7110 'model': cmd._curr_model.get('name'), 7111 'v5_model': False if cmd._model_v4_path else True }) 7112 7113 format = cmd._export_format #shortcut 7114 7115 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7116 opt['sa_symmetry'] = True 7117 elif format == 'plugin': 7118 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7119 7120 loop_induced_opt = dict(opt) 7121 loop_induced_opt.update(MadLoop_SA_options) 7122 loop_induced_opt['export_format'] = 'madloop_optimized' 7123 loop_induced_opt['SubProc_prefix'] = 'PV' 7124 # For loop_induced output with MadEvent, we must have access to the 7125 # color flows. 7126 loop_induced_opt['compute_color_flows'] = True 7127 for key in opt: 7128 if key not in loop_induced_opt: 7129 loop_induced_opt[key] = opt[key] 7130 7131 # Madevent output supports MadAnalysis5 7132 if format in ['madevent']: 7133 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7134 7135 if format == 'matrix' or format.startswith('standalone'): 7136 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7137 7138 elif format in ['madevent'] and group_subprocesses: 7139 if isinstance(cmd._curr_amps[0], 7140 loop_diagram_generation.LoopAmplitude): 7141 import madgraph.loop.loop_exporters as loop_exporters 7142 return loop_exporters.LoopInducedExporterMEGroup( 7143 cmd._export_dir,loop_induced_opt) 7144 else: 7145 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7146 elif format in ['madevent']: 7147 if isinstance(cmd._curr_amps[0], 7148 loop_diagram_generation.LoopAmplitude): 7149 import madgraph.loop.loop_exporters as loop_exporters 7150 return loop_exporters.LoopInducedExporterMENoGroup( 7151 cmd._export_dir,loop_induced_opt) 7152 else: 7153 return ProcessExporterFortranME(cmd._export_dir,opt) 7154 elif format in ['matchbox']: 7155 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7156 elif cmd._export_format in ['madweight'] and group_subprocesses: 7157 7158 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7159 elif cmd._export_format in ['madweight']: 7160 return ProcessExporterFortranMW(cmd._export_dir, opt) 7161 elif format == 'plugin': 7162 if isinstance(cmd._curr_amps[0], 7163 loop_diagram_generation.LoopAmplitude): 7164 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7165 else: 7166 return cmd._export_plugin(cmd._export_dir, opt) 7167 7168 else: 7169 raise Exception('Wrong export_v4 format') 7170 else: 7171 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7172
7173 7174 7175 7176 #=============================================================================== 7177 # ProcessExporterFortranMWGroup 7178 #=============================================================================== 7179 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7180 """Class to take care of exporting a set of matrix elements to 7181 MadEvent subprocess group format.""" 7182 7183 matrix_file = "matrix_madweight_group_v4.inc" 7184 grouped_mode = 'madweight' 7185 #=========================================================================== 7186 # generate_subprocess_directory 7187 #===========================================================================
7188 - def generate_subprocess_directory(self, subproc_group, 7189 fortran_model, 7190 group_number):
7191 """Generate the Pn directory for a subprocess group in MadEvent, 7192 including the necessary matrix_N.f files, configs.inc and various 7193 other helper files.""" 7194 7195 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7196 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7197 7198 if not self.model: 7199 self.model = subproc_group.get('matrix_elements')[0].\ 7200 get('processes')[0].get('model') 7201 7202 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7203 7204 # Create the directory PN in the specified path 7205 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7206 subproc_group.get('name')) 7207 try: 7208 os.mkdir(pjoin(pathdir, subprocdir)) 7209 except os.error as error: 7210 logger.warning(error.strerror + " " + subprocdir) 7211 7212 7213 logger.info('Creating files in directory %s' % subprocdir) 7214 Ppath = pjoin(pathdir, subprocdir) 7215 7216 # Create the matrix.f files, auto_dsig.f files and all inc files 7217 # for all subprocesses in the group 7218 7219 maxamps = 0 7220 maxflows = 0 7221 tot_calls = 0 7222 7223 matrix_elements = subproc_group.get('matrix_elements') 7224 7225 for ime, matrix_element in \ 7226 enumerate(matrix_elements): 7227 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7228 calls, ncolor = \ 7229 self.write_matrix_element_v4(writers.FortranWriter(filename), 7230 matrix_element, 7231 fortran_model, 7232 str(ime+1), 7233 subproc_group.get('diagram_maps')[\ 7234 ime]) 7235 7236 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7237 self.write_auto_dsig_file(writers.FortranWriter(filename), 7238 matrix_element, 7239 str(ime+1)) 7240 7241 # Keep track of needed quantities 7242 tot_calls += int(calls) 7243 maxflows = max(maxflows, ncolor) 7244 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7245 7246 # Draw diagrams 7247 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7248 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7249 get('diagrams'), 7250 filename, 7251 model = \ 7252 matrix_element.get('processes')[0].\ 7253 get('model'), 7254 amplitude=True) 7255 logger.info("Generating Feynman diagrams for " + \ 7256 matrix_element.get('processes')[0].nice_string()) 7257 plot.draw() 7258 7259 # Extract number of external particles 7260 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7261 7262 # Generate a list of diagrams corresponding to each configuration 7263 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7264 # If a subprocess has no diagrams for this config, the number is 0 7265 7266 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7267 7268 filename = pjoin(Ppath, 'auto_dsig.f') 7269 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7270 subproc_group) 7271 7272 filename = pjoin(Ppath,'configs.inc') 7273 nconfigs, s_and_t_channels = self.write_configs_file(\ 7274 writers.FortranWriter(filename), 7275 subproc_group, 7276 subproc_diagrams_for_config) 7277 7278 filename = pjoin(Ppath, 'leshouche.inc') 7279 self.write_leshouche_file(writers.FortranWriter(filename), 7280 subproc_group) 7281 7282 filename = pjoin(Ppath, 'phasespace.inc') 7283 self.write_phasespace_file(writers.FortranWriter(filename), 7284 nconfigs) 7285 7286 7287 filename = pjoin(Ppath, 'maxamps.inc') 7288 self.write_maxamps_file(writers.FortranWriter(filename), 7289 maxamps, 7290 maxflows, 7291 max([len(me.get('processes')) for me in \ 7292 matrix_elements]), 7293 len(matrix_elements)) 7294 7295 filename = pjoin(Ppath, 'mirrorprocs.inc') 7296 self.write_mirrorprocs(writers.FortranWriter(filename), 7297 subproc_group) 7298 7299 filename = pjoin(Ppath, 'nexternal.inc') 7300 self.write_nexternal_file(writers.FortranWriter(filename), 7301 nexternal, ninitial) 7302 7303 filename = pjoin(Ppath, 'pmass.inc') 7304 self.write_pmass_file(writers.FortranWriter(filename), 7305 matrix_element) 7306 7307 filename = pjoin(Ppath, 'props.inc') 7308 self.write_props_file(writers.FortranWriter(filename), 7309 matrix_element, 7310 s_and_t_channels) 7311 7312 # filename = pjoin(Ppath, 'processes.dat') 7313 # files.write_to_file(filename, 7314 # self.write_processes_file, 7315 # subproc_group) 7316 7317 # Generate jpgs -> pass in make_html 7318 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7319 7320 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7321 7322 for file in linkfiles: 7323 ln('../%s' % file, cwd=Ppath) 7324 7325 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7326 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7327 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7328 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7329 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7330 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7331 if not tot_calls: 7332 tot_calls = 0 7333 return tot_calls
7334 7335 7336 #=========================================================================== 7337 # Helper functions 7338 #===========================================================================
7339 - def modify_grouping(self, matrix_element):
7340 """allow to modify the grouping (if grouping is in place) 7341 return two value: 7342 - True/False if the matrix_element was modified 7343 - the new(or old) matrix element""" 7344 7345 return True, matrix_element.split_lepton_grouping()
7346 7347 #=========================================================================== 7348 # write_super_auto_dsig_file 7349 #===========================================================================
7350 - def write_super_auto_dsig_file(self, writer, subproc_group):
7351 """Write the auto_dsig.f file selecting between the subprocesses 7352 in subprocess group mode""" 7353 7354 replace_dict = {} 7355 7356 # Extract version number and date from VERSION file 7357 info_lines = self.get_mg5_info_lines() 7358 replace_dict['info_lines'] = info_lines 7359 7360 matrix_elements = subproc_group.get('matrix_elements') 7361 7362 # Extract process info lines 7363 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7364 matrix_elements]) 7365 replace_dict['process_lines'] = process_lines 7366 7367 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7368 replace_dict['nexternal'] = nexternal 7369 7370 replace_dict['nsprocs'] = 2*len(matrix_elements) 7371 7372 # Generate dsig definition line 7373 dsig_def_line = "DOUBLE PRECISION " + \ 7374 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7375 range(len(matrix_elements))]) 7376 replace_dict["dsig_def_line"] = dsig_def_line 7377 7378 # Generate dsig process lines 7379 call_dsig_proc_lines = [] 7380 for iproc in range(len(matrix_elements)): 7381 call_dsig_proc_lines.append(\ 7382 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7383 {"num": iproc + 1, 7384 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7385 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7386 7387 if writer: 7388 file = open(os.path.join(_file_path, \ 7389 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7390 file = file % replace_dict 7391 # Write the file 7392 writer.writelines(file) 7393 else: 7394 return replace_dict
7395 7396 #=========================================================================== 7397 # write_mirrorprocs 7398 #===========================================================================
7399 - def write_mirrorprocs(self, writer, subproc_group):
7400 """Write the mirrorprocs.inc file determining which processes have 7401 IS mirror process in subprocess group mode.""" 7402 7403 lines = [] 7404 bool_dict = {True: '.true.', False: '.false.'} 7405 matrix_elements = subproc_group.get('matrix_elements') 7406 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7407 (len(matrix_elements), 7408 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7409 me in matrix_elements]))) 7410 # Write the file 7411 writer.writelines(lines)
7412 7413 #=========================================================================== 7414 # write_configs_file 7415 #===========================================================================
7416 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7417 """Write the configs.inc file with topology information for a 7418 subprocess group. Use the first subprocess with a diagram for each 7419 configuration.""" 7420 7421 matrix_elements = subproc_group.get('matrix_elements') 7422 model = matrix_elements[0].get('processes')[0].get('model') 7423 7424 diagrams = [] 7425 config_numbers = [] 7426 for iconfig, config in enumerate(diagrams_for_config): 7427 # Check if any diagrams correspond to this config 7428 if set(config) == set([0]): 7429 continue 7430 subproc_diags = [] 7431 for s,d in enumerate(config): 7432 if d: 7433 subproc_diags.append(matrix_elements[s].\ 7434 get('diagrams')[d-1]) 7435 else: 7436 subproc_diags.append(None) 7437 diagrams.append(subproc_diags) 7438 config_numbers.append(iconfig + 1) 7439 7440 # Extract number of external particles 7441 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7442 7443 return len(diagrams), \ 7444 self.write_configs_file_from_diagrams(writer, diagrams, 7445 config_numbers, 7446 nexternal, ninitial, 7447 matrix_elements[0],model)
7448 7449 #=========================================================================== 7450 # write_run_configs_file 7451 #===========================================================================
7452 - def write_run_config_file(self, writer):
7453 """Write the run_configs.inc file for MadEvent""" 7454 7455 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7456 text = open(path).read() % {'chanperjob':'2'} 7457 writer.write(text) 7458 return True
7459 7460 7461 #=========================================================================== 7462 # write_leshouche_file 7463 #===========================================================================
7464 - def write_leshouche_file(self, writer, subproc_group):
7465 """Write the leshouche.inc file for MG4""" 7466 7467 all_lines = [] 7468 7469 for iproc, matrix_element in \ 7470 enumerate(subproc_group.get('matrix_elements')): 7471 all_lines.extend(self.get_leshouche_lines(matrix_element, 7472 iproc)) 7473 7474 # Write the file 7475 writer.writelines(all_lines) 7476 7477 return True
7478