Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39   
  40  try: 
  41      import readline 
  42      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  43  except: 
  44      GNU_SPLITTING = True 
  45   
  46  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  47  root_path = os.path.split(root_path)[0] 
  48  sys.path.insert(0, os.path.join(root_path,'bin')) 
  49   
  50  # usefull shortcut 
  51  pjoin = os.path.join 
  52  # Special logger for the Cmd Interface 
  53  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  54  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  55    
  56  try: 
  57      # import from madgraph directory 
  58      import madgraph.interface.extended_cmd as cmd 
  59      import madgraph.interface.common_run_interface as common_run 
  60      import madgraph.iolibs.files as files 
  61      import madgraph.iolibs.save_load_object as save_load_object 
  62      import madgraph.various.banner as banner_mod 
  63      import madgraph.various.cluster as cluster 
  64      import madgraph.various.misc as misc 
  65      import madgraph.various.gen_crossxhtml as gen_crossxhtml 
  66      import madgraph.various.sum_html as sum_html 
  67      import madgraph.various.shower_card as shower_card 
  68      import madgraph.various.FO_analyse_card as analyse_card 
  69   
  70      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error 
  71      aMCatNLO = False 
  72  except ImportError, error: 
  73      logger.debug(error) 
  74      # import from madevent directory 
  75      import internal.extended_cmd as cmd 
  76      import internal.common_run_interface as common_run 
  77      import internal.banner as banner_mod 
  78      import internal.misc as misc     
  79      from internal import InvalidCmd, MadGraph5Error 
  80      import internal.files as files 
  81      import internal.cluster as cluster 
  82      import internal.save_load_object as save_load_object 
  83      import internal.gen_crossxhtml as gen_crossxhtml 
  84      import internal.sum_html as sum_html 
  85      import internal.shower_card as shower_card 
  86      import internal.FO_analyse_card as analyse_card 
  87      aMCatNLO = True 
  88   
89 -class aMCatNLOError(Exception):
90 pass
91 92
93 -def compile_dir(arguments):
94 """compile the direcory p_dir 95 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 96 this function needs not to be a class method in order to do 97 the compilation on multicore""" 98 99 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 100 logger.info(' Compiling %s...' % p_dir) 101 102 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 103 104 try: 105 #compile everything 106 # compile and run tests 107 for test in tests: 108 misc.compile([test], cwd = this_dir, job_specs = False) 109 input = pjoin(me_dir, '%s_input.txt' % test) 110 #this can be improved/better written to handle the output 111 misc.call(['./%s' % (test)], cwd=this_dir, 112 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w')) 113 114 if not options['reweightonly']: 115 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 116 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode) 117 misc.call(['./gensym'],cwd= this_dir, 118 stdin=open(pjoin(this_dir, 'gensym_input.txt')), 119 stdout=open(pjoin(this_dir, 'gensym.log'), 'w')) 120 #compile madevent_mintMC/mintFO 121 misc.compile([exe], cwd=this_dir, job_specs = False) 122 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 123 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 124 125 logger.info(' %s done.' % p_dir) 126 return 0 127 except MadGraph5Error, msg: 128 return msg
129 130
131 -def check_compiler(options, block=False):
132 """check that the current fortran compiler is gfortran 4.6 or later. 133 If block, stops the execution, otherwise just print a warning""" 134 135 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 136 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 137 'Note that You can still run all MadEvent run without any problem!' 138 #first check that gfortran is installed 139 if options['fortran_compiler']: 140 compiler = options['fortran_compiler'] 141 elif misc.which('gfortran'): 142 compiler = 'gfortran' 143 else: 144 compiler = '' 145 146 if 'gfortran' not in compiler: 147 if block: 148 raise aMCatNLOError(msg % compiler) 149 else: 150 logger.warning(msg % compiler) 151 else: 152 curr_version = misc.get_gfortran_version(compiler) 153 if not ''.join(curr_version.split('.')) >= '46': 154 if block: 155 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 156 else: 157 logger.warning(msg % (compiler + ' ' + curr_version))
158 159 160 161 #=============================================================================== 162 # CmdExtended 163 #===============================================================================
164 -class CmdExtended(common_run.CommonRunCmd):
165 """Particularisation of the cmd command for aMCatNLO""" 166 167 #suggested list of command 168 next_possibility = { 169 'start': [], 170 } 171 172 debug_output = 'ME5_debug' 173 error_debug = 'Please report this bug on https://bugs.launchpad.net/madgraph5\n' 174 error_debug += 'More information is found in \'%(debug)s\'.\n' 175 error_debug += 'Please attach this file to your report.' 176 177 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/madgraph5\n' 178 179 180 keyboard_stop_msg = """stopping all operation 181 in order to quit MadGraph5_aMC@NLO please enter exit""" 182 183 # Define the Error 184 InvalidCmd = InvalidCmd 185 ConfigurationError = aMCatNLOError 186
187 - def __init__(self, me_dir, options, *arg, **opt):
188 """Init history and line continuation""" 189 190 # Tag allowing/forbiding question 191 self.force = False 192 193 # If possible, build an info line with current version number 194 # and date, from the VERSION text file 195 info = misc.get_pkg_info() 196 info_line = "" 197 if info and info.has_key('version') and info.has_key('date'): 198 len_version = len(info['version']) 199 len_date = len(info['date']) 200 if len_version + len_date < 30: 201 info_line = "#* VERSION %s %s %s *\n" % \ 202 (info['version'], 203 (30 - len_version - len_date) * ' ', 204 info['date']) 205 else: 206 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 207 info_line = "#* VERSION %s %s *\n" % \ 208 (version, (24 - len(version)) * ' ') 209 210 # Create a header for the history file. 211 # Remember to fill in time at writeout time! 212 self.history_header = \ 213 '#************************************************************\n' + \ 214 '#* MadGraph5_aMC@NLO *\n' + \ 215 '#* *\n' + \ 216 "#* * * *\n" + \ 217 "#* * * * * *\n" + \ 218 "#* * * * * 5 * * * * *\n" + \ 219 "#* * * * * *\n" + \ 220 "#* * * *\n" + \ 221 "#* *\n" + \ 222 "#* *\n" + \ 223 info_line + \ 224 "#* *\n" + \ 225 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 226 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 227 "#* and *\n" + \ 228 "#* http://amcatnlo.cern.ch *\n" + \ 229 '#* *\n' + \ 230 '#************************************************************\n' + \ 231 '#* *\n' + \ 232 '#* Command File for aMCatNLO *\n' + \ 233 '#* *\n' + \ 234 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 235 '#* *\n' + \ 236 '#************************************************************\n' 237 238 if info_line: 239 info_line = info_line[1:] 240 241 logger.info(\ 242 "************************************************************\n" + \ 243 "* *\n" + \ 244 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 245 "* a M C @ N L O *\n" + \ 246 "* *\n" + \ 247 "* * * *\n" + \ 248 "* * * * * *\n" + \ 249 "* * * * * 5 * * * * *\n" + \ 250 "* * * * * *\n" + \ 251 "* * * *\n" + \ 252 "* *\n" + \ 253 info_line + \ 254 "* *\n" + \ 255 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 256 "* http://amcatnlo.cern.ch *\n" + \ 257 "* *\n" + \ 258 "* Type 'help' for in-line help. *\n" + \ 259 "* *\n" + \ 260 "************************************************************") 261 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
262 263
264 - def get_history_header(self):
265 """return the history header""" 266 return self.history_header % misc.get_time_info()
267
268 - def stop_on_keyboard_stop(self):
269 """action to perform to close nicely on a keyboard interupt""" 270 try: 271 if hasattr(self, 'cluster'): 272 logger.info('rm jobs on queue') 273 self.cluster.remove() 274 if hasattr(self, 'results'): 275 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 276 self.add_error_log_in_html(KeyboardInterrupt) 277 except: 278 pass
279
280 - def postcmd(self, stop, line):
281 """ Update the status of the run for finishing interactive command """ 282 283 # relaxing the tag forbidding question 284 self.force = False 285 286 if not self.use_rawinput: 287 return stop 288 289 290 arg = line.split() 291 if len(arg) == 0: 292 return stop 293 elif str(arg[0]) in ['exit','quit','EOF']: 294 return stop 295 296 try: 297 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 298 level=None, error=True) 299 except Exception: 300 misc.sprint('self.update_status fails', log=logger) 301 pass
302
303 - def nice_user_error(self, error, line):
304 """If a ME run is currently running add a link in the html output""" 305 306 self.add_error_log_in_html() 307 cmd.Cmd.nice_user_error(self, error, line)
308
309 - def nice_config_error(self, error, line):
310 """If a ME run is currently running add a link in the html output""" 311 312 self.add_error_log_in_html() 313 cmd.Cmd.nice_config_error(self, error, line)
314
315 - def nice_error_handling(self, error, line):
316 """If a ME run is currently running add a link in the html output""" 317 318 self.add_error_log_in_html() 319 cmd.Cmd.nice_error_handling(self, error, line)
320 321 322 323 #=============================================================================== 324 # HelpToCmd 325 #===============================================================================
326 -class HelpToCmd(object):
327 """ The Series of help routine for the aMCatNLOCmd""" 328
329 - def help_launch(self):
330 """help for launch command""" 331 _launch_parser.print_help()
332
333 - def help_banner_run(self):
334 logger.info("syntax: banner_run Path|RUN [--run_options]") 335 logger.info("-- Reproduce a run following a given banner") 336 logger.info(" One of the following argument is require:") 337 logger.info(" Path should be the path of a valid banner.") 338 logger.info(" RUN should be the name of a run of the current directory") 339 self.run_options_help([('-f','answer all question by default'), 340 ('--name=X', 'Define the name associated with the new run')])
341 342
343 - def help_compile(self):
344 """help for compile command""" 345 _compile_parser.print_help()
346
347 - def help_generate_events(self):
348 """help for generate_events commandi 349 just call help_launch""" 350 _generate_events_parser.print_help()
351 352
353 - def help_calculate_xsect(self):
354 """help for generate_events command""" 355 _calculate_xsect_parser.print_help()
356
357 - def help_shower(self):
358 """help for shower command""" 359 _shower_parser.print_help()
360 361
362 - def help_open(self):
363 logger.info("syntax: open FILE ") 364 logger.info("-- open a file with the appropriate editor.") 365 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 366 logger.info(' the path to the last created/used directory is used')
367
368 - def run_options_help(self, data):
369 if data: 370 logger.info('-- local options:') 371 for name, info in data: 372 logger.info(' %s : %s' % (name, info)) 373 374 logger.info("-- session options:") 375 logger.info(" Note that those options will be kept for the current session") 376 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 377 logger.info(" --multicore : Run in multi-core configuration") 378 logger.info(" --nb_core=X : limit the number of core to use to X.")
379 380 381 382 383 #=============================================================================== 384 # CheckValidForCmd 385 #===============================================================================
386 -class CheckValidForCmd(object):
387 """ The Series of check routine for the aMCatNLOCmd""" 388
389 - def check_shower(self, args, options):
390 """Check the validity of the line. args[0] is the run_directory""" 391 392 if options['force']: 393 self.force = True 394 395 if len(args) == 0: 396 self.help_shower() 397 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 398 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 399 raise self.InvalidCmd, 'Directory %s does not exists' % \ 400 pjoin(os.getcwd(), 'Events', args[0]) 401 402 self.set_run_name(args[0], level= 'shower') 403 args[0] = pjoin(self.me_dir, 'Events', args[0])
404
405 - def check_plot(self, args):
406 """Check the argument for the plot command 407 plot run_name modes""" 408 409 410 madir = self.options['madanalysis_path'] 411 td = self.options['td_path'] 412 413 if not madir or not td: 414 logger.info('Retry to read configuration file to find madanalysis/td') 415 self.set_configuration() 416 417 madir = self.options['madanalysis_path'] 418 td = self.options['td_path'] 419 420 if not madir: 421 error_msg = 'No Madanalysis path correctly set.' 422 error_msg += 'Please use the set command to define the path and retry.' 423 error_msg += 'You can also define it in the configuration file.' 424 raise self.InvalidCmd(error_msg) 425 if not td: 426 error_msg = 'No path to td directory correctly set.' 427 error_msg += 'Please use the set command to define the path and retry.' 428 error_msg += 'You can also define it in the configuration file.' 429 raise self.InvalidCmd(error_msg) 430 431 if len(args) == 0: 432 if not hasattr(self, 'run_name') or not self.run_name: 433 self.help_plot() 434 raise self.InvalidCmd('No run name currently define. Please add this information.') 435 args.append('all') 436 return 437 438 439 if args[0] not in self._plot_mode: 440 self.set_run_name(args[0], level='plot') 441 del args[0] 442 if len(args) == 0: 443 args.append('all') 444 elif not self.run_name: 445 self.help_plot() 446 raise self.InvalidCmd('No run name currently define. Please add this information.') 447 448 for arg in args: 449 if arg not in self._plot_mode and arg != self.run_name: 450 self.help_plot() 451 raise self.InvalidCmd('unknown options %s' % arg)
452
453 - def check_pgs(self, arg):
454 """Check the argument for pythia command 455 syntax: pgs [NAME] 456 Note that other option are already remove at this point 457 """ 458 459 # If not pythia-pgs path 460 if not self.options['pythia-pgs_path']: 461 logger.info('Retry to read configuration file to find pythia-pgs path') 462 self.set_configuration() 463 464 if not self.options['pythia-pgs_path'] or not \ 465 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 466 error_msg = 'No pythia-pgs path correctly set.' 467 error_msg += 'Please use the set command to define the path and retry.' 468 error_msg += 'You can also define it in the configuration file.' 469 raise self.InvalidCmd(error_msg) 470 471 tag = [a for a in arg if a.startswith('--tag=')] 472 if tag: 473 arg.remove(tag[0]) 474 tag = tag[0][6:] 475 476 477 if len(arg) == 0 and not self.run_name: 478 if self.results.lastrun: 479 arg.insert(0, self.results.lastrun) 480 else: 481 raise self.InvalidCmd('No run name currently define. Please add this information.') 482 483 if len(arg) == 1 and self.run_name == arg[0]: 484 arg.pop(0) 485 486 if not len(arg) and \ 487 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 488 self.help_pgs() 489 raise self.InvalidCmd('''No file file pythia_events.hep currently available 490 Please specify a valid run_name''') 491 492 lock = None 493 if len(arg) == 1: 494 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 495 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 496 'events_*.hep.gz')) 497 if not filenames: 498 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 499 else: 500 input_file = filenames[0] 501 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 502 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 503 argument=['-c', input_file]) 504 else: 505 if tag: 506 self.run_card['run_tag'] = tag 507 self.set_run_name(self.run_name, tag, 'pgs') 508 509 return lock
510 511
512 - def check_delphes(self, arg):
513 """Check the argument for pythia command 514 syntax: delphes [NAME] 515 Note that other option are already remove at this point 516 """ 517 518 # If not pythia-pgs path 519 if not self.options['delphes_path']: 520 logger.info('Retry to read configuration file to find delphes path') 521 self.set_configuration() 522 523 if not self.options['delphes_path']: 524 error_msg = 'No delphes path correctly set.' 525 error_msg += 'Please use the set command to define the path and retry.' 526 error_msg += 'You can also define it in the configuration file.' 527 raise self.InvalidCmd(error_msg) 528 529 tag = [a for a in arg if a.startswith('--tag=')] 530 if tag: 531 arg.remove(tag[0]) 532 tag = tag[0][6:] 533 534 535 if len(arg) == 0 and not self.run_name: 536 if self.results.lastrun: 537 arg.insert(0, self.results.lastrun) 538 else: 539 raise self.InvalidCmd('No run name currently define. Please add this information.') 540 541 if len(arg) == 1 and self.run_name == arg[0]: 542 arg.pop(0) 543 544 if not len(arg) and \ 545 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 546 self.help_pgs() 547 raise self.InvalidCmd('''No file file pythia_events.hep currently available 548 Please specify a valid run_name''') 549 550 if len(arg) == 1: 551 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 552 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 553 'events_*.hep.gz')) 554 if not filenames: 555 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 556 % (self.run_name, prev_tag, 557 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 558 else: 559 input_file = filenames[0] 560 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 561 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 562 argument=['-c', input_file]) 563 else: 564 if tag: 565 self.run_card['run_tag'] = tag 566 self.set_run_name(self.run_name, tag, 'delphes')
567
568 - def check_calculate_xsect(self, args, options):
569 """check the validity of the line. args is ORDER, 570 ORDER being LO or NLO. If no mode is passed, NLO is used""" 571 # modify args in order to be DIR 572 # mode being either standalone or madevent 573 574 if options['force']: 575 self.force = True 576 577 if not args: 578 args.append('NLO') 579 return 580 581 if len(args) > 1: 582 self.help_calculate_xsect() 583 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 584 585 elif len(args) == 1: 586 if not args[0] in ['NLO', 'LO']: 587 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 588 mode = args[0] 589 590 # check for incompatible options/modes 591 if options['multicore'] and options['cluster']: 592 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 593 ' are not compatible. Please choose one.'
594 595
596 - def check_generate_events(self, args, options):
597 """check the validity of the line. args is ORDER, 598 ORDER being LO or NLO. If no mode is passed, NLO is used""" 599 # modify args in order to be DIR 600 # mode being either standalone or madevent 601 602 if not args: 603 args.append('NLO') 604 return 605 606 if len(args) > 1: 607 self.help_generate_events() 608 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 609 610 elif len(args) == 1: 611 if not args[0] in ['NLO', 'LO']: 612 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 613 mode = args[0] 614 615 # check for incompatible options/modes 616 if options['multicore'] and options['cluster']: 617 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 618 ' are not compatible. Please choose one.'
619
620 - def check_banner_run(self, args):
621 """check the validity of line""" 622 623 if len(args) == 0: 624 self.help_banner_run() 625 raise self.InvalidCmd('banner_run requires at least one argument.') 626 627 tag = [a[6:] for a in args if a.startswith('--tag=')] 628 629 630 if os.path.exists(args[0]): 631 type ='banner' 632 format = self.detect_card_type(args[0]) 633 if format != 'banner': 634 raise self.InvalidCmd('The file is not a valid banner.') 635 elif tag: 636 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 637 (args[0], tag)) 638 if not os.path.exists(args[0]): 639 raise self.InvalidCmd('No banner associates to this name and tag.') 640 else: 641 name = args[0] 642 type = 'run' 643 banners = glob.glob(pjoin(self.me_dir,'Events', args[0], '*_banner.txt')) 644 if not banners: 645 raise self.InvalidCmd('No banner associates to this name.') 646 elif len(banners) == 1: 647 args[0] = banners[0] 648 else: 649 #list the tag and propose those to the user 650 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 651 tag = self.ask('which tag do you want to use?', tags[0], tags) 652 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 653 (args[0], tag)) 654 655 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 656 if run_name: 657 try: 658 self.exec_cmd('remove %s all banner -f' % run_name) 659 except Exception: 660 pass 661 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 662 elif type == 'banner': 663 self.set_run_name(self.find_available_run_name(self.me_dir)) 664 elif type == 'run': 665 if not self.results[name].is_empty(): 666 run_name = self.find_available_run_name(self.me_dir) 667 logger.info('Run %s is not empty so will use run_name: %s' % \ 668 (name, run_name)) 669 self.set_run_name(run_name) 670 else: 671 try: 672 self.exec_cmd('remove %s all banner -f' % run_name) 673 except Exception: 674 pass 675 self.set_run_name(name)
676 677 678
679 - def check_launch(self, args, options):
680 """check the validity of the line. args is MODE 681 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 682 # modify args in order to be DIR 683 # mode being either standalone or madevent 684 685 if options['force']: 686 self.force = True 687 688 689 if not args: 690 args.append('auto') 691 return 692 693 if len(args) > 1: 694 self.help_launch() 695 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 696 697 elif len(args) == 1: 698 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 699 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 700 mode = args[0] 701 702 # check for incompatible options/modes 703 if options['multicore'] and options['cluster']: 704 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 705 ' are not compatible. Please choose one.' 706 if mode == 'NLO' and options['reweightonly']: 707 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
708 709
710 - def check_compile(self, args, options):
711 """check the validity of the line. args is MODE 712 MODE being FO or MC. If no mode is passed, MC is used""" 713 # modify args in order to be DIR 714 # mode being either standalone or madevent 715 716 if options['force']: 717 self.force = True 718 719 if not args: 720 args.append('MC') 721 return 722 723 if len(args) > 1: 724 self.help_compile() 725 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 726 727 elif len(args) == 1: 728 if not args[0] in ['MC', 'FO']: 729 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 730 mode = args[0]
731 732 # check for incompatible options/modes 733 734 735 #=============================================================================== 736 # CompleteForCmd 737 #===============================================================================
738 -class CompleteForCmd(CheckValidForCmd):
739 """ The Series of help routine for the MadGraphCmd""" 740
741 - def complete_launch(self, text, line, begidx, endidx):
742 """auto-completion for launch command""" 743 744 args = self.split_arg(line[0:begidx]) 745 if len(args) == 1: 746 #return mode 747 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 748 elif len(args) == 2 and line[begidx-1] == '@': 749 return self.list_completion(text,['LO','NLO'],line) 750 else: 751 opts = [] 752 for opt in _launch_parser.option_list: 753 opts += opt._long_opts + opt._short_opts 754 return self.list_completion(text, opts, line)
755
756 - def complete_banner_run(self, text, line, begidx, endidx):
757 "Complete the banner run command" 758 try: 759 760 761 args = self.split_arg(line[0:begidx], error=False) 762 763 if args[-1].endswith(os.path.sep): 764 return self.path_completion(text, 765 os.path.join('.',*[a for a in args \ 766 if a.endswith(os.path.sep)])) 767 768 769 if len(args) > 1: 770 # only options are possible 771 tags = glob.glob(pjoin(self.me_dir, 'Events' , args[1],'%s_*_banner.txt' % args[1])) 772 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 773 774 if args[-1] != '--tag=': 775 tags = ['--tag=%s' % t for t in tags] 776 else: 777 return self.list_completion(text, tags) 778 return self.list_completion(text, tags +['--name=','-f'], line) 779 780 # First argument 781 possibilites = {} 782 783 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 784 if a.endswith(os.path.sep)])) 785 if os.path.sep in line: 786 return comp 787 else: 788 possibilites['Path from ./'] = comp 789 790 run_list = glob.glob(pjoin(self.me_dir, 'Events', '*','*_banner.txt')) 791 run_list = [n.rsplit('/',2)[1] for n in run_list] 792 possibilites['RUN Name'] = self.list_completion(text, run_list) 793 794 return self.deal_multiple_categories(possibilites) 795 796 797 except Exception, error: 798 print error
799 800
801 - def complete_compile(self, text, line, begidx, endidx):
802 """auto-completion for launch command""" 803 804 args = self.split_arg(line[0:begidx]) 805 if len(args) == 1: 806 #return mode 807 return self.list_completion(text,['FO','MC'],line) 808 else: 809 opts = [] 810 for opt in _compile_parser.option_list: 811 opts += opt._long_opts + opt._short_opts 812 return self.list_completion(text, opts, line)
813
814 - def complete_calculate_xsect(self, text, line, begidx, endidx):
815 """auto-completion for launch command""" 816 817 args = self.split_arg(line[0:begidx]) 818 if len(args) == 1: 819 #return mode 820 return self.list_completion(text,['LO','NLO'],line) 821 else: 822 opts = [] 823 for opt in _calculate_xsect_parser.option_list: 824 opts += opt._long_opts + opt._short_opts 825 return self.list_completion(text, opts, line)
826
827 - def complete_generate_events(self, text, line, begidx, endidx):
828 """auto-completion for generate_events command 829 call the compeltion for launch""" 830 self.complete_launch(text, line, begidx, endidx)
831 832
833 - def complete_shower(self, text, line, begidx, endidx):
834 args = self.split_arg(line[0:begidx]) 835 if len(args) == 1: 836 #return valid run_name 837 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe.gz')) 838 data = [n.rsplit('/',2)[1] for n in data] 839 tmp1 = self.list_completion(text, data) 840 if not self.run_name: 841 return tmp1
842
843 - def complete_plot(self, text, line, begidx, endidx):
844 """ Complete the plot command """ 845 846 args = self.split_arg(line[0:begidx], error=False) 847 848 if len(args) == 1: 849 #return valid run_name 850 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe*')) 851 data = [n.rsplit('/',2)[1] for n in data] 852 tmp1 = self.list_completion(text, data) 853 if not self.run_name: 854 return tmp1 855 856 if len(args) > 1: 857 return self.list_completion(text, self._plot_mode)
858
859 - def complete_pgs(self,text, line, begidx, endidx):
860 "Complete the pgs command" 861 args = self.split_arg(line[0:begidx], error=False) 862 if len(args) == 1: 863 #return valid run_name 864 data = glob.glob(pjoin(self.me_dir, 'Events', '*', 'events_*.hep.gz')) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1 869 else: 870 tmp2 = self.list_completion(text, self._run_options + ['-f', 871 '--tag=' ,'--no_default'], line) 872 return tmp1 + tmp2 873 else: 874 return self.list_completion(text, self._run_options + ['-f', 875 '--tag=','--no_default'], line)
876 877 complete_delphes = complete_pgs
878
879 -class aMCatNLOAlreadyRunning(InvalidCmd):
880 pass
881 882 #=============================================================================== 883 # aMCatNLOCmd 884 #===============================================================================
885 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
886 """The command line processor of MadGraph""" 887 888 # Truth values 889 true = ['T','.true.',True,'true'] 890 # Options and formats available 891 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 892 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 893 _calculate_decay_options = ['-f', '--accuracy=0.'] 894 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 895 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 896 _clean_mode = _plot_mode + ['channel', 'banner'] 897 _display_opts = ['run_name', 'options', 'variable'] 898 # survey options, dict from name to type, default value, and help text 899 # Variables to store object information 900 web = False 901 cluster_mode = 0 902 queue = 'madgraph' 903 nb_core = None 904 905 next_possibility = { 906 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 907 'help generate_events'], 908 'generate_events': ['generate_events [OPTIONS]', 'shower'], 909 'launch': ['launch [OPTIONS]', 'shower'], 910 'shower' : ['generate_events [OPTIONS]'] 911 } 912 913 914 ############################################################################
915 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
916 """ add information to the cmd """ 917 918 self.start_time = 0 919 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 920 #common_run.CommonRunCmd.__init__(self, me_dir, options) 921 922 self.mode = 'aMCatNLO' 923 self.nb_core = 0 924 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 925 926 # load the current status of the directory 927 if os.path.exists(pjoin(self.me_dir,'HTML','results.pkl')): 928 self.results = save_load_object.load_from_file(pjoin(self.me_dir,'HTML','results.pkl')) 929 self.results.resetall(self.me_dir) 930 self.last_mode = self.results[self.results.lastrun][-1]['run_mode'] 931 else: 932 model = self.find_model_name() 933 process = self.process # define in find_model_name 934 self.results = gen_crossxhtml.AllResultsNLO(model, process, self.me_dir) 935 self.last_mode = '' 936 self.results.def_web_mode(self.web) 937 # check that compiler is gfortran 4.6 or later if virtuals have been exported 938 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 939 940 if not '[real=QCD]' in proc_card: 941 check_compiler(self.options, block=True)
942 943 944 ############################################################################
945 - def do_shower(self, line):
946 """ run the shower on a given parton level file """ 947 argss = self.split_arg(line) 948 (options, argss) = _launch_parser.parse_args(argss) 949 # check argument validity and normalise argument 950 options = options.__dict__ 951 options['reweightonly'] = False 952 self.check_shower(argss, options) 953 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 954 self.ask_run_configuration('onlyshower', options) 955 self.run_mcatnlo(evt_file) 956 957 self.update_status('', level='all', update_results=True)
958 959 ################################################################################
960 - def do_plot(self, line):
961 """Create the plot for a given run""" 962 963 # Since in principle, all plot are already done automaticaly 964 args = self.split_arg(line) 965 # Check argument's validity 966 self.check_plot(args) 967 logger.info('plot for run %s' % self.run_name) 968 969 if not self.force: 970 self.ask_edit_cards([], args, plot=True) 971 972 if any([arg in ['parton'] for arg in args]): 973 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 974 if os.path.exists(filename+'.gz'): 975 misc.gunzip(filename) 976 if os.path.exists(filename): 977 logger.info('Found events.lhe file for run %s' % self.run_name) 978 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 979 self.create_plot('parton') 980 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 981 misc.gzip(filename) 982 983 if any([arg in ['all','parton'] for arg in args]): 984 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 985 if os.path.exists(filename): 986 logger.info('Found MADatNLO.top file for run %s' % \ 987 self.run_name) 988 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 989 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 990 991 if not os.path.isdir(plot_dir): 992 os.makedirs(plot_dir) 993 top_file = pjoin(plot_dir, 'plots.top') 994 files.cp(filename, top_file) 995 madir = self.options['madanalysis_path'] 996 tag = self.run_card['run_tag'] 997 td = self.options['td_path'] 998 misc.call(['%s/plot' % self.dirbin, madir, td], 999 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1000 stderr = subprocess.STDOUT, 1001 cwd=plot_dir) 1002 1003 misc.call(['%s/plot_page-pl' % self.dirbin, 1004 os.path.basename(plot_dir), 1005 'parton'], 1006 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1007 stderr = subprocess.STDOUT, 1008 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1009 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1010 output) 1011 1012 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1013 1014 if any([arg in ['all','shower'] for arg in args]): 1015 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 1016 'events_*.lhe.gz')) 1017 if len(filenames) != 1: 1018 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 1019 'events_*.hep.gz')) 1020 if len(filenames) != 1: 1021 logger.info('No shower level file found for run %s' % \ 1022 self.run_name) 1023 return 1024 filename = filenames[0] 1025 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1026 1027 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1028 if aMCatNLO and not self.options['mg5_path']: 1029 raise "plotting NLO HEP file needs MG5 utilities" 1030 1031 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1032 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1033 self.run_hep2lhe() 1034 else: 1035 filename = filenames[0] 1036 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1037 1038 self.create_plot('shower') 1039 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1040 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1041 lhe_file_name) 1042 misc.gzip(lhe_file_name) 1043 1044 if any([arg in ['all','pgs'] for arg in args]): 1045 filename = pjoin(self.me_dir, 'Events', self.run_name, 1046 '%s_pgs_events.lhco' % self.run_tag) 1047 if os.path.exists(filename+'.gz'): 1048 misc.gunzip(filename) 1049 if os.path.exists(filename): 1050 self.create_plot('PGS') 1051 misc.gzip(filename) 1052 else: 1053 logger.info('No valid files for pgs plot') 1054 1055 if any([arg in ['all','delphes'] for arg in args]): 1056 filename = pjoin(self.me_dir, 'Events', self.run_name, 1057 '%s_delphes_events.lhco' % self.run_tag) 1058 if os.path.exists(filename+'.gz'): 1059 misc.gunzip(filename) 1060 if os.path.exists(filename): 1061 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1062 self.create_plot('Delphes') 1063 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1064 misc.gzip(filename) 1065 else: 1066 logger.info('No valid files for delphes plot')
1067 1068 1069 ############################################################################
1070 - def do_calculate_xsect(self, line):
1071 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1072 this function wraps the do_launch one""" 1073 1074 self.start_time = time.time() 1075 argss = self.split_arg(line) 1076 # check argument validity and normalise argument 1077 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1078 options = options.__dict__ 1079 options['reweightonly'] = False 1080 options['parton'] = True 1081 self.check_calculate_xsect(argss, options) 1082 self.do_launch(line, options, argss)
1083 1084 ############################################################################
1085 - def do_banner_run(self, line):
1086 """Make a run from the banner file""" 1087 1088 args = self.split_arg(line) 1089 #check the validity of the arguments 1090 self.check_banner_run(args) 1091 1092 # Remove previous cards 1093 for name in ['shower_card.dat', 'madspin_card.dat']: 1094 try: 1095 os.remove(pjoin(self.me_dir, 'Cards', name)) 1096 except Exception: 1097 pass 1098 1099 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1100 1101 # Check if we want to modify the run 1102 if not self.force: 1103 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1104 if ans == 'n': 1105 self.force = True 1106 1107 # Compute run mode: 1108 if self.force: 1109 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1110 banner = banner_mod.Banner(args[0]) 1111 for line in banner['run_settings']: 1112 if '=' in line: 1113 mode, value = [t.strip() for t in line.split('=')] 1114 mode_status[mode] = value 1115 else: 1116 mode_status = {} 1117 1118 # Call Generate events 1119 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1120 switch=mode_status)
1121 1122 ############################################################################
1123 - def do_generate_events(self, line):
1124 """Main commands: generate events 1125 this function just wraps the do_launch one""" 1126 self.do_launch(line)
1127 1128 1129 ############################################################################
1130 - def do_treatcards(self, line, amcatnlo=True):
1131 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1132 #check if no 'Auto' are present in the file 1133 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1134 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1135 1136 ############################################################################
1137 - def set_configuration(self, amcatnlo=True, **opt):
1138 """assign all configuration variable from file 1139 loop over the different config file if config_file not define """ 1140 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1141 1142 ############################################################################
1143 - def do_launch(self, line, options={}, argss=[], switch={}):
1144 """Main commands: launch the full chain 1145 options and args are relevant if the function is called from other 1146 functions, such as generate_events or calculate_xsect 1147 mode gives the list of switch needed for the computation (usefull for banner_run) 1148 """ 1149 1150 if not argss and not options: 1151 self.start_time = time.time() 1152 argss = self.split_arg(line) 1153 # check argument validity and normalise argument 1154 (options, argss) = _launch_parser.parse_args(argss) 1155 options = options.__dict__ 1156 self.check_launch(argss, options) 1157 1158 if 'run_name' in options.keys() and options['run_name']: 1159 self.run_name = options['run_name'] 1160 # if a dir with the given run_name already exists 1161 # remove it and warn the user 1162 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1163 logger.warning('Removing old run information in \n'+ 1164 pjoin(self.me_dir, 'Events', self.run_name)) 1165 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1166 self.results.delete_run(self.run_name) 1167 1168 if options['multicore']: 1169 self.cluster_mode = 2 1170 elif options['cluster']: 1171 self.cluster_mode = 1 1172 1173 if not switch: 1174 mode = argss[0] 1175 if mode in ['LO', 'NLO']: 1176 options['parton'] = True 1177 mode = self.ask_run_configuration(mode, options) 1178 else: 1179 mode = self.ask_run_configuration('auto', options, switch) 1180 1181 self.results.add_detail('run_mode', mode) 1182 1183 self.update_status('Starting run', level=None, update_results=True) 1184 1185 if self.options['automatic_html_opening']: 1186 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1187 self.options['automatic_html_opening'] = False 1188 1189 if '+' in mode: 1190 mode = mode.split('+')[0] 1191 self.compile(mode, options) 1192 evt_file = self.run(mode, options) 1193 1194 if int(self.run_card['nevents']) == 0 and not mode in ['LO', 'NLO']: 1195 logger.info('No event file generated: grids have been set-up with a '\ 1196 'relative precision of %s' % self.run_card['req_acc']) 1197 return 1198 1199 if not mode in ['LO', 'NLO']: 1200 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1201 self.exec_cmd('decay_events -from_cards', postcmd=False) 1202 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1203 1204 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1205 and not options['parton']: 1206 self.run_mcatnlo(evt_file) 1207 elif mode == 'noshower': 1208 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1209 Please, shower the Les Houches events before using them for physics analyses.""") 1210 1211 1212 self.update_status('', level='all', update_results=True) 1213 if int(self.run_card['ickkw']) == 3 and mode in ['noshower', 'aMC@NLO']: 1214 logger.warning("""You are running with FxFx merging enabled. 1215 To be able to merge samples of various multiplicities without double counting, 1216 you have to remove some events after showering 'by hand'. 1217 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1218 1219 1220 1221 ############################################################################
1222 - def do_compile(self, line):
1223 """Advanced commands: just compile the executables """ 1224 argss = self.split_arg(line) 1225 # check argument validity and normalise argument 1226 (options, argss) = _compile_parser.parse_args(argss) 1227 options = options.__dict__ 1228 options['reweightonly'] = False 1229 options['nocompile'] = False 1230 self.check_compile(argss, options) 1231 1232 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1233 self.ask_run_configuration(mode, options) 1234 self.compile(mode, options) 1235 1236 1237 self.update_status('', level='all', update_results=True)
1238
1239 - def print_results_in_shell(self, data):
1240 """Have a nice results prints in the shell, 1241 data should be of type: gen_crossxhtml.OneTagResults""" 1242 if not data: 1243 return 1244 logger.info(" === Results Summary for run: %s tag: %s ===\n" % (data['run_name'],data['tag'])) 1245 if self.ninitial == 1: 1246 logger.info(" Width : %.4g +- %.4g GeV" % (data['cross'], data['error'])) 1247 else: 1248 logger.info(" Cross-section : %.4g +- %.4g pb" % (data['cross'], data['error'])) 1249 logger.info(" Nb of events : %s" % data['nb_event'] ) 1250 #if data['cross_pythia'] and data['nb_event_pythia']: 1251 # if self.ninitial == 1: 1252 # logger.info(" Matched Width : %.4g +- %.4g GeV" % (data['cross_pythia'], data['error_pythia'])) 1253 # else: 1254 # logger.info(" Matched Cross-section : %.4g +- %.4g pb" % (data['cross_pythia'], data['error_pythia'])) 1255 # logger.info(" Nb of events after Matching : %s" % data['nb_event_pythia']) 1256 # if self.run_card['use_syst'] in self.true: 1257 # logger.info(" Be carefull that matched information are here NOT for the central value. Refer to SysCalc output for it") 1258 logger.info(" " )
1259
1260 - def print_results_in_file(self, data, path, mode='w'):
1261 """Have a nice results prints in the shell, 1262 data should be of type: gen_crossxhtml.OneTagResults""" 1263 if not data: 1264 return 1265 1266 fsock = open(path, mode) 1267 1268 fsock.write(" === Results Summary for run: %s tag: %s process: %s ===\n" % \ 1269 (data['run_name'],data['tag'], os.path.basename(self.me_dir))) 1270 1271 if self.ninitial == 1: 1272 fsock.write(" Width : %.4g +- %.4g GeV\n" % (data['cross'], data['error'])) 1273 else: 1274 fsock.write(" Cross-section : %.4g +- %.4g pb\n" % (data['cross'], data['error'])) 1275 fsock.write(" Nb of events : %s\n" % data['nb_event'] ) 1276 #if data['cross_pythia'] and data['nb_event_pythia']: 1277 # if self.ninitial == 1: 1278 # fsock.write(" Matched Width : %.4g +- %.4g GeV\n" % (data['cross_pythia'], data['error_pythia'])) 1279 # else: 1280 # fsock.write(" Matched Cross-section : %.4g +- %.4g pb\n" % (data['cross_pythia'], data['error_pythia'])) 1281 # fsock.write(" Nb of events after Matching : %s\n" % data['nb_event_pythia']) 1282 fsock.write(" \n" )
1283 1284 1285 1286 1287
1288 - def update_random_seed(self):
1289 """Update random number seed with the value from the run_card. 1290 If this is 0, update the number according to a fresh one""" 1291 iseed = int(self.run_card['iseed']) 1292 if iseed == 0: 1293 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1294 iseed = int(randinit.read()[2:]) + 1 1295 randinit.close() 1296 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1297 randinit.write('r=%d' % iseed) 1298 randinit.close()
1299 1300
1301 - def get_characteristics(self, file):
1302 """reads the proc_characteristics file and initialises the correspondant 1303 dictionary""" 1304 lines = [l for l in open(file).read().split('\n') if l and not l.startswith('#')] 1305 self.proc_characteristics = {} 1306 for l in lines: 1307 key, value = l.split('=') 1308 self.proc_characteristics[key.strip()] = value.strip()
1309 1310
1311 - def run(self, mode, options):
1312 """runs aMC@NLO. Returns the name of the event file created""" 1313 logger.info('Starting run') 1314 1315 if not 'only_generation' in options.keys(): 1316 options['only_generation'] = False 1317 1318 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == '2' and not options['only_generation']: 1319 options['only_generation'] = True 1320 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1321 1322 if self.cluster_mode == 1: 1323 cluster_name = self.options['cluster_type'] 1324 self.cluster = cluster.from_name[cluster_name](**self.options) 1325 if self.cluster_mode == 2: 1326 try: 1327 import multiprocessing 1328 if not self.nb_core: 1329 try: 1330 self.nb_core = int(self.options['nb_core']) 1331 except TypeError: 1332 self.nb_core = multiprocessing.cpu_count() 1333 logger.info('Using %d cores' % self.nb_core) 1334 except ImportError: 1335 self.nb_core = 1 1336 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 1337 'Use set nb_core X in order to set this number and be able to'+ 1338 'run in multicore.') 1339 1340 self.cluster = cluster.MultiCore(**self.options) 1341 self.update_random_seed() 1342 #find and keep track of all the jobs 1343 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1344 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1345 folder_names['noshower'] = folder_names['aMC@NLO'] 1346 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1347 job_dict = {} 1348 p_dirs = [d for d in \ 1349 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1350 #find jobs and clean previous results 1351 if not options['only_generation'] and not options['reweightonly']: 1352 self.update_status('Cleaning previous results', level=None) 1353 for dir in p_dirs: 1354 job_dict[dir] = [file for file in \ 1355 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 1356 if file.startswith('ajob')] 1357 #find old folders to be removed 1358 for obj in folder_names[mode]: 1359 to_rm = [file for file in \ 1360 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 1361 if file.startswith(obj[:-1]) and \ 1362 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 1363 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 1364 #always clean dirs for the splitted event generation 1365 # do not include the born_G/ grid_G which should be kept when 1366 # doing a f.o. run keeping old grids 1367 to_always_rm = [file for file in \ 1368 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 1369 if file.startswith(obj[:-1]) and 1370 '_' in file and not '_G' in file and \ 1371 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 1372 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 1373 1374 if not options['only_generation'] and not options['reweightonly']: 1375 to_always_rm.extend(to_rm) 1376 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 1377 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 1378 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 1379 1380 mcatnlo_status = ['Setting up grid', 'Computing upper envelope', 'Generating events'] 1381 1382 if self.run_card['iappl']=='2': 1383 self.applgrid_distribute(options,mode,p_dirs) 1384 1385 if options['reweightonly']: 1386 event_norm=self.run_card['event_norm'] 1387 nevents=int(self.run_card['nevents']) 1388 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1389 1390 devnull = os.open(os.devnull, os.O_RDWR) 1391 if mode in ['LO', 'NLO']: 1392 # this is for fixed order runs 1393 mode_dict = {'NLO': 'all', 'LO': 'born'} 1394 logger.info('Doing fixed order %s' % mode) 1395 req_acc = self.run_card['req_acc_FO'] 1396 if not options['only_generation'] and req_acc != '-1': 1397 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, '-1', '6','0.10') 1398 self.update_status('Setting up grids', level=None) 1399 self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids') 1400 elif not options['only_generation']: 1401 npoints = self.run_card['npoints_FO_grid'] 1402 niters = self.run_card['niters_FO_grid'] 1403 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, npoints, niters) 1404 self.update_status('Setting up grids', level=None) 1405 self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids') 1406 1407 npoints = self.run_card['npoints_FO'] 1408 niters = self.run_card['niters_FO'] 1409 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], -1, npoints, niters) 1410 # collect the results and logs 1411 self.collect_log_files(folder_names[mode], 0) 1412 p = misc.Popen(['./combine_results_FO.sh', req_acc, '%s_G*' % mode_dict[mode]], \ 1413 stdout=subprocess.PIPE, \ 1414 cwd=pjoin(self.me_dir, 'SubProcesses')) 1415 output = p.communicate() 1416 1417 self.cross_sect_dict = self.read_results(output, mode) 1418 self.print_summary(options, 0, mode) 1419 cross, error = sum_html.make_all_html_results(self, ['%s*' % mode_dict[mode]]) 1420 self.results.add_detail('cross', cross) 1421 self.results.add_detail('error', error) 1422 1423 self.update_status('Computing cross-section', level=None) 1424 self.run_all(job_dict, [['0', mode_dict[mode], '0', mode_dict[mode]]], 'Computing cross-section') 1425 1426 # collect the results and logs 1427 self.collect_log_files(folder_names[mode], 1) 1428 p = misc.Popen(['./combine_results_FO.sh', '-1'] + folder_names[mode], \ 1429 stdout=subprocess.PIPE, 1430 cwd=pjoin(self.me_dir, 'SubProcesses')) 1431 output = p.communicate() 1432 self.cross_sect_dict = self.read_results(output, mode) 1433 1434 # collect the scale and PDF uncertainties 1435 scale_pdf_info={} 1436 if self.run_card['reweight_scale'] == '.true.' or self.run_card['reweight_PDF'] == '.true.': 1437 data_files=[] 1438 for dir in p_dirs: 1439 for obj in folder_names[mode]: 1440 for file in os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)): 1441 if file.startswith(obj[:-1]) and \ 1442 (os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file,'scale_pdf_dependence.dat'))): 1443 data_files.append(pjoin(dir,file,'scale_pdf_dependence.dat')) 1444 scale_pdf_info = self.pdf_scale_from_reweighting(data_files) 1445 # print the results: 1446 self.print_summary(options, 1, mode, scale_pdf_info) 1447 1448 files.cp(pjoin(self.me_dir, 'SubProcesses', 'res.txt'), 1449 pjoin(self.me_dir, 'Events', self.run_name)) 1450 1451 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 1452 misc.call(['./combine_plots_FO.sh'] + folder_names[mode], \ 1453 stdout=devnull, 1454 cwd=pjoin(self.me_dir, 'SubProcesses')) 1455 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 1456 pjoin(self.me_dir, 'Events', self.run_name)) 1457 logger.info('The results of this run and the TopDrawer file with the plots' + \ 1458 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1459 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 1460 misc.call(['./combine_root.sh'] + folder_names[mode], \ 1461 stdout=devnull, 1462 cwd=pjoin(self.me_dir, 'SubProcesses')) 1463 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 1464 pjoin(self.me_dir, 'Events', self.run_name)) 1465 logger.info('The results of this run and the ROOT file with the plots' + \ 1466 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1467 else: 1468 logger.info('The results of this run' + \ 1469 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1470 1471 cross, error = sum_html.make_all_html_results(self, folder_names[mode]) 1472 self.results.add_detail('cross', cross) 1473 self.results.add_detail('error', error) 1474 if self.run_card['iappl'] != '0': 1475 self.applgrid_combine(cross,error) 1476 self.update_status('Run complete', level='parton', update_results=True) 1477 1478 return 1479 1480 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1481 shower = self.run_card['parton_shower'].upper() 1482 nevents = int(self.run_card['nevents']) 1483 req_acc = self.run_card['req_acc'] 1484 if nevents == 0 and float(req_acc) < 0 : 1485 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1486 'of events, because 0 events requested. Please set '\ 1487 'the "req_acc" parameter in the run_card to a value between 0 and 1') 1488 elif float(req_acc) >1 or float(req_acc) == 0 : 1489 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1490 'be between larger than 0 and smaller than 1, '\ 1491 'or set to -1 for automatic determination. Current value is %s' % req_acc) 1492 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1493 elif float(req_acc) < 0 and nevents > 1000000 : 1494 req_acc='0.001' 1495 1496 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1497 1498 if not shower in shower_list: 1499 raise aMCatNLOError('%s is not a valid parton shower. Please use one of the following: %s' \ 1500 % (shower, ', '.join(shower_list))) 1501 1502 # check that PYTHIA6PT is not used for processes with FSR 1503 if shower == 'PYTHIA6PT' and \ 1504 self.proc_characteristics['has_fsr'] == 'true': 1505 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1506 1507 if mode in ['aMC@NLO', 'aMC@LO']: 1508 logger.info('Doing %s matched to parton shower' % mode[4:]) 1509 elif mode in ['noshower','noshowerLO']: 1510 logger.info('Generating events without running the shower.') 1511 elif options['only_generation']: 1512 logger.info('Generating events starting from existing results') 1513 1514 1515 for i, status in enumerate(mcatnlo_status): 1516 #check if need to split jobs 1517 # at least one channel must have enough events 1518 try: 1519 nevents_unweighted = open(pjoin(self.me_dir, 1520 'SubProcesses', 1521 'nevents_unweighted')).read().split('\n') 1522 except IOError: 1523 nevents_unweighted = [] 1524 1525 split = i == 2 and \ 1526 int(self.run_card['nevt_job']) > 0 1527 1528 if i == 2 or not options['only_generation']: 1529 # if the number of events requested is zero, 1530 # skip mint step 2 1531 if i==2 and nevents==0: 1532 self.print_summary(options, 2,mode) 1533 return 1534 1535 if split: 1536 # split the event generation 1537 misc.call([pjoin(self.me_dir, 'bin', 'internal', 'split_jobs.py')] + \ 1538 [self.run_card['nevt_job']], 1539 stdout = devnull, 1540 cwd = pjoin(self.me_dir, 'SubProcesses')) 1541 assert os.path.exists(pjoin(self.me_dir, 'SubProcesses', 1542 'nevents_unweighted_splitted')) 1543 1544 self.update_status(status, level='parton') 1545 if mode in ['aMC@NLO', 'noshower']: 1546 self.write_madinMMC_file(pjoin(self.me_dir, 'SubProcesses'), 'all', i) 1547 self.run_all(job_dict, [['2', 'F', '%d' % i]], status, split_jobs = split) 1548 1549 elif mode in ['aMC@LO', 'noshowerLO']: 1550 self.write_madinMMC_file( 1551 pjoin(self.me_dir, 'SubProcesses'), 'born', i) 1552 self.run_all(job_dict, 1553 [['2', 'B', '%d' % i]], 1554 '%s at LO' % status, split_jobs = split) 1555 1556 if (i < 2 and not options['only_generation']) or i == 1 : 1557 # collect the results and logs 1558 self.collect_log_files(folder_names[mode], i) 1559 p = misc.Popen(['./combine_results.sh'] + \ 1560 ['%d' % i,'%d' % nevents, '%s' % req_acc ] + \ 1561 folder_names[mode], 1562 stdout=subprocess.PIPE, 1563 cwd = pjoin(self.me_dir, 'SubProcesses')) 1564 output = p.communicate() 1565 files.cp(pjoin(self.me_dir, 'SubProcesses', 'res_%d.txt' % i), \ 1566 pjoin(self.me_dir, 'Events', self.run_name)) 1567 1568 self.cross_sect_dict = self.read_results(output, mode) 1569 self.print_summary(options, i, mode) 1570 1571 cross, error = sum_html.make_all_html_results(self, folder_names[mode]) 1572 self.results.add_detail('cross', cross) 1573 self.results.add_detail('error', error) 1574 1575 #check that split jobs are all correctly terminated 1576 if split: 1577 self.check_event_files() 1578 1579 if self.cluster_mode == 1: 1580 #if cluster run, wait 15 sec so that event files are transferred back 1581 self.update_status( 1582 'Waiting while files are transferred back from the cluster nodes', 1583 level='parton') 1584 time.sleep(10) 1585 if split: 1586 files.cp(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted'), \ 1587 pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')) 1588 1589 1590 event_norm=self.run_card['event_norm'] 1591 self.collect_log_files(folder_names[mode], 2) 1592 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1593 1594
1595 - def applgrid_combine(self,cross,error):
1596 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 1597 logger.debug('Combining APPLgrids \n') 1598 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),'applgrid-combine') 1599 with open(pjoin(self.me_dir,'SubProcesses','dirs.txt')) as dirf: 1600 all_jobs=dirf.readlines() 1601 ngrids=len(all_jobs) 1602 nobs =len([name for name in os.listdir(pjoin(self.me_dir,'SubProcesses',all_jobs[0].rstrip())) \ 1603 if name.endswith("_out.root")]) 1604 for obs in range(0,nobs): 1605 gdir = [pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 1606 # combine APPLgrids from different channels for observable 'obs' 1607 if self.run_card["iappl"] == "1": 1608 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,"aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 1609 elif self.run_card["iappl"] == "2": 1610 unc2_inv=pow(cross/error,2) 1611 unc2_inv_ngrids=pow(cross/error,2)*ngrids 1612 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 1613 for job in all_jobs: 1614 os.remove(pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+str(obs)+"_in.root")) 1615 else: 1616 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 1617 # after combining, delete the original grids 1618 for ggdir in gdir: 1619 os.remove(ggdir)
1620 1621
1622 - def applgrid_distribute(self,options,mode,p_dirs):
1623 """Distributes the APPLgrids ready to be filled by a second run of the code""" 1624 # if no appl_start_grid argument given, guess it from the time stamps of the starting grid files 1625 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 1626 gfiles=glob.glob(pjoin(self.me_dir, 'Events','*','aMCfast_obs_0_starting_grid.root')) 1627 time_stamps={} 1628 for root_file in gfiles: 1629 time_stamps[root_file]=os.path.getmtime(root_file) 1630 options['appl_start_grid']= \ 1631 max(time_stamps.iterkeys(), key=(lambda key: time_stamps[key])).split('/')[-2] 1632 logger.info('No --appl_start_grid option given. Guessing that start grid from run "%s" should be used.' \ 1633 % options['appl_start_grid']) 1634 1635 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 1636 self.appl_start_grid = options['appl_start_grid'] 1637 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 1638 # check that this dir exists and at least one grid file is there 1639 if not os.path.exists(pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')): 1640 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 1641 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 1642 else: 1643 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir(start_grid_dir) \ 1644 if name.endswith("_starting_grid.root")] 1645 nobs =len(all_grids) 1646 gstring=" ".join(all_grids) 1647 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 1648 raise self.InvalidCmd('No APPLgrid name currently defined. Please provide this information.') 1649 if mode == 'NLO': 1650 gdir='all_G' 1651 elif mode == 'LO': 1652 gdir='born_G' 1653 #copy the grid to all relevant directories 1654 for pdir in p_dirs: 1655 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,"SubProcesses",pdir)) \ 1656 if file.startswith(gdir) and os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 1657 for g_dir in g_dirs: 1658 for grid in all_grids: 1659 obs=grid.split('_')[-3] 1660 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,'grid_obs_'+obs+'_in.root'))
1661 1662
1663 - def collect_log_files(self, folders, istep):
1664 """collect the log files and put them in a single, html-friendly file inside the run_... 1665 directory""" 1666 step_list = ['Grid setting', 'Cross-section computation', 'Event generation'] 1667 log_file = pjoin(self.me_dir, 'Events', self.run_name, 1668 'alllogs_%d.html' % istep) 1669 # this keeps track of which step has been computed for which channel 1670 channel_dict = {} 1671 log_files = [] 1672 for folder in folders: 1673 log_files += glob.glob(pjoin(self.me_dir, 'SubProcesses', 'P*', folder, 'log.txt')) 1674 1675 content = '' 1676 1677 content += '<HTML><BODY>\n<font face="courier" size=2>' 1678 for log in log_files: 1679 channel_dict[os.path.dirname(log)] = [istep] 1680 # put an anchor 1681 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(pjoin(self.me_dir,'SubProcesses'),'')) 1682 # and put some nice header 1683 content += '<font color="red">\n' 1684 content += '<br>LOG file for integration channel %s, %s <br>' % \ 1685 (os.path.dirname(log).replace(pjoin(self.me_dir,'SubProcesses'), ''), 1686 step_list[istep]) 1687 content += '</font>\n' 1688 #then just flush the content of the small log inside the big log 1689 #the PRE tag prints everything verbatim 1690 content += '<PRE>\n' + open(log).read() + '\n</PRE>' 1691 content +='<br>\n' 1692 1693 content += '</font>\n</BODY></HTML>\n' 1694 open(log_file, 'w').write(content)
1695 1696
1697 - def read_results(self, output, mode):
1698 """extract results (cross-section, absolute cross-section and errors) 1699 from output, which should be formatted as 1700 Found 4 correctly terminated jobs 1701 random seed found in 'randinit' is 33 1702 Integrated abs(cross-section) 1703 7.94473937e+03 +- 2.9953e+01 (3.7702e-01%) 1704 Integrated cross-section 1705 6.63392298e+03 +- 3.7669e+01 (5.6782e-01%) 1706 for aMC@NLO/aMC@LO, and as 1707 1708 for NLO/LO 1709 The cross_sect_dict is returned""" 1710 res = {} 1711 if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']: 1712 pat = re.compile(\ 1713 '''Found (\d+) correctly terminated jobs 1714 random seed found in 'randinit' is (\d+) 1715 Integrated abs\(cross-section\) 1716 \s*(\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\d+\.\d+e[+-]\d+)\%\) 1717 Integrated cross-section 1718 \s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''') 1719 else: 1720 pat = re.compile(\ 1721 '''Found (\d+) correctly terminated jobs 1722 \s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''') 1723 pass 1724 1725 match = re.search(pat, output[0]) 1726 if not match or output[1]: 1727 logger.info('Return code of the event collection: '+str(output[1])) 1728 logger.info('Output of the event collection:\n'+output[0]) 1729 raise aMCatNLOError('An error occurred during the collection of results.\n' + 1730 'Please check the .log files inside the directories which failed.') 1731 # if int(match.groups()[0]) != self.njobs: 1732 # raise aMCatNLOError('Not all jobs terminated successfully') 1733 if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']: 1734 return {'randinit' : int(match.groups()[1]), 1735 'xseca' : float(match.groups()[2]), 1736 'erra' : float(match.groups()[3]), 1737 'xsect' : float(match.groups()[5]), 1738 'errt' : float(match.groups()[6])} 1739 else: 1740 return {'xsect' : float(match.groups()[1]), 1741 'errt' : float(match.groups()[2])}
1742
1743 - def print_summary(self, options, step, mode, scale_pdf_info={}):
1744 """print a summary of the results contained in self.cross_sect_dict. 1745 step corresponds to the mintMC step, if =2 (i.e. after event generation) 1746 some additional infos are printed""" 1747 # find process name 1748 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 1749 process = '' 1750 for line in proc_card_lines: 1751 if line.startswith('generate') or line.startswith('add process'): 1752 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 1753 lpp = {'0':'l', '1':'p', '-1':'pbar'} 1754 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 1755 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 1756 self.run_card['ebeam1'], self.run_card['ebeam2']) 1757 1758 # Gather some basic statistics for the run and extracted from the log files. 1759 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 1760 log_GV_files = glob.glob(pjoin(self.me_dir, \ 1761 'SubProcesses', 'P*','G*','log_MINT*.txt')) 1762 all_log_files = glob.glob(pjoin(self.me_dir, \ 1763 'SubProcesses', 'P*','G*','log*.txt')) 1764 elif mode == 'NLO': 1765 log_GV_files = glob.glob(pjoin(self.me_dir, \ 1766 'SubProcesses', 'P*','all_G*','log*.txt')) 1767 all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*', 1768 '%sG*'%foldName,'log*.txt')) for foldName in ['all_']],[]) 1769 elif mode == 'LO': 1770 log_GV_files = '' 1771 all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*', 1772 '%sG*'%foldName,'log*.txt')) for foldName in ['born_']],[]) 1773 else: 1774 raise aMCatNLOError, 'Running mode %s not supported.'%mode 1775 1776 1777 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 1778 status = ['Determining the number of unweighted events per channel', 1779 'Updating the number of unweighted events per channel', 1780 'Summary:'] 1781 if step != 2: 1782 message = status[step] + '\n\n Intermediate results:' + \ 1783 ('\n Random seed: %(randinit)d' + \ 1784 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' + \ 1785 '\n Total abs(cross-section): %(xseca)8.3e +- %(erra)6.1e pb \n') \ 1786 % self.cross_sect_dict 1787 else: 1788 1789 message = '\n ' + status[step] + proc_info + \ 1790 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \ 1791 self.cross_sect_dict 1792 1793 if int(self.run_card['nevents'])>=10000 and self.run_card['reweight_scale']=='.true.': 1794 message = message + \ 1795 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \ 1796 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low']) 1797 if int(self.run_card['nevents'])>=10000 and self.run_card['reweight_PDF']=='.true.': 1798 message = message + \ 1799 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \ 1800 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low']) 1801 1802 neg_frac = (self.cross_sect_dict['xseca'] - self.cross_sect_dict['xsect'])/\ 1803 (2. * self.cross_sect_dict['xseca']) 1804 message = message + \ 1805 ('\n Number of events generated: %s' + \ 1806 '\n Parton shower to be used: %s' + \ 1807 '\n Fraction of negative weights: %4.2f' + \ 1808 '\n Total running time : %s') % \ 1809 (self.run_card['nevents'], 1810 self.run_card['parton_shower'], 1811 neg_frac, 1812 misc.format_timer(time.time()-self.start_time)) 1813 1814 elif mode in ['NLO', 'LO']: 1815 status = ['Results after grid setup (cross-section is non-physical):', 1816 'Final results and run summary:'] 1817 if step == 0: 1818 message = '\n ' + status[step] + \ 1819 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \ 1820 self.cross_sect_dict 1821 elif step == 1: 1822 message = '\n ' + status[step] + proc_info + \ 1823 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \ 1824 self.cross_sect_dict 1825 if self.run_card['reweight_scale']=='.true.': 1826 message = message + \ 1827 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \ 1828 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low']) 1829 if self.run_card['reweight_PDF']=='.true.': 1830 message = message + \ 1831 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \ 1832 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low']) 1833 1834 if (mode in ['NLO', 'LO'] and step!=1) or \ 1835 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 1836 logger.info(message+'\n') 1837 return 1838 1839 # Some advanced general statistics are shown in the debug message at the 1840 # end of the run 1841 # Make sure it never stops a run 1842 try: 1843 message, debug_msg = \ 1844 self.compile_advanced_stats(log_GV_files, all_log_files, message) 1845 except Exception as e: 1846 debug_msg = 'Advanced statistics collection failed with error "%s"'%str(e) 1847 1848 logger.debug(debug_msg+'\n') 1849 logger.info(message+'\n') 1850 1851 # Now copy relevant information in the Events/Run_<xxx> directory 1852 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 1853 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 1854 open(pjoin(evt_path, '.full_summary.txt'), 1855 'w').write(message+'\n\n'+debug_msg+'\n') 1856 1857 self.archive_files(evt_path,mode)
1858
1859 - def archive_files(self, evt_path, mode):
1860 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 1861 the run.""" 1862 1863 files_to_arxiv = [pjoin('Cards','param_card.dat'), 1864 pjoin('Cards','MadLoopParams.dat'), 1865 pjoin('Cards','FKS_params.dat'), 1866 pjoin('Cards','run_card.dat'), 1867 pjoin('Subprocesses','setscales.f'), 1868 pjoin('Subprocesses','cuts.f')] 1869 1870 if mode in ['NLO', 'LO']: 1871 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 1872 1873 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 1874 os.mkdir(pjoin(evt_path,'RunMaterial')) 1875 1876 for path in files_to_arxiv: 1877 if os.path.isfile(pjoin(self.me_dir,path)): 1878 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 1879 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 1880 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
1881
1882 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
1883 """ This functions goes through the log files given in arguments and 1884 compiles statistics about MadLoop stability, virtual integration 1885 optimization and detection of potential error messages into a nice 1886 debug message to printed at the end of the run """ 1887 1888 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 1889 # > Errors is a list of tuples with this format (log_file,nErrors) 1890 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 1891 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 1892 1893 # ================================== 1894 # == MadLoop stability statistics == 1895 # ================================== 1896 1897 # Recuperate the fraction of unstable PS points found in the runs for 1898 # the virtuals 1899 UPS_stat_finder = re.compile( 1900 r"Satistics from MadLoop:.*"+\ 1901 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 1902 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 1903 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 1904 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 1905 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 1906 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 1907 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 1908 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 1909 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 1910 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 1911 1912 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 1913 1 : 'CutTools (double precision)', 1914 2 : 'PJFry++', 1915 3 : 'IREGI', 1916 4 : 'Golem95', 1917 9 : 'CutTools (quadruple precision)'} 1918 RetUnit_finder =re.compile( 1919 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 1920 #Unit 1921 1922 for gv_log in log_GV_files: 1923 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 1924 log=open(gv_log,'r').read() 1925 UPS_stats = re.search(UPS_stat_finder,log) 1926 for retunit_stats in re.finditer(RetUnit_finder, log): 1927 if channel_name not in stats['UPS'].keys(): 1928 stats['UPS'][channel_name] = [0]*10+[[0]*10] 1929 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 1930 += int(retunit_stats.group('n_occurences')) 1931 if not UPS_stats is None: 1932 try: 1933 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 1934 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 1935 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 1936 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 1937 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 1938 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 1939 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 1940 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 1941 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 1942 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 1943 except KeyError: 1944 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 1945 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 1946 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 1947 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 1948 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 1949 int(UPS_stats.group('n10')),[0]*10] 1950 debug_msg = "" 1951 if len(stats['UPS'].keys())>0: 1952 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 1953 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 1954 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 1955 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 1956 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 1957 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 1958 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 1959 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 1960 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 1961 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 1962 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 1963 for i in range(10)] 1964 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 1965 float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 1966 maxUPS = max(UPSfracs, key = lambda w: w[1]) 1967 1968 tmpStr = "" 1969 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 1970 tmpStr += '\n Stability unknown: %d'%nTotsun 1971 tmpStr += '\n Stable PS point: %d'%nTotsps 1972 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 1973 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 1974 tmpStr += '\n Only double precision used: %d'%nTotddp 1975 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 1976 tmpStr += '\n Initialization phase-space points: %d'%nTotini 1977 tmpStr += '\n Reduction methods used:' 1978 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 1979 unit_code_meaning.keys() if nTot1[i]>0] 1980 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 1981 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 1982 if nTot100 != 0: 1983 debug_msg += '\n Unknown return code (100): %d'%nTot100 1984 if nTot10 != 0: 1985 debug_msg += '\n Unknown return code (10): %d'%nTot10 1986 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 1987 not in unit_code_meaning.keys()) 1988 if nUnknownUnit != 0: 1989 debug_msg += '\n Unknown return code (1): %d'\ 1990 %nUnknownUnit 1991 1992 if maxUPS[1]>0.001: 1993 message += tmpStr 1994 message += '\n Total number of unstable PS point detected:'+\ 1995 ' %d (%4.2f%%)'%(nToteps,float(100*nToteps)/nTotPS) 1996 message += '\n Maximum fraction of UPS points in '+\ 1997 'channel %s (%4.2f%%)'%maxUPS 1998 message += '\n Please report this to the authors while '+\ 1999 'providing the file' 2000 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 2001 maxUPS[0],'UPS.log')) 2002 else: 2003 debug_msg += tmpStr 2004 2005 2006 # ==================================================== 2007 # == aMC@NLO virtual integration optimization stats == 2008 # ==================================================== 2009 2010 virt_tricks_finder = re.compile( 2011 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 2012 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 2013 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 2014 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 2015 2016 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 2017 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 2018 2019 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 2020 2021 channel_contr_list = {} 2022 for gv_log in log_GV_files: 2023 logfile=open(gv_log,'r') 2024 log = logfile.read() 2025 logfile.close() 2026 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2027 vf_stats = None 2028 for vf_stats in re.finditer(virt_frac_finder, log): 2029 pass 2030 if not vf_stats is None: 2031 v_frac = float(vf_stats.group('v_frac')) 2032 v_average = float(vf_stats.group('v_average')) 2033 try: 2034 if v_frac < stats['virt_stats']['v_frac_min'][0]: 2035 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 2036 if v_frac > stats['virt_stats']['v_frac_max'][0]: 2037 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 2038 stats['virt_stats']['v_frac_avg'][0] += v_frac 2039 stats['virt_stats']['v_frac_avg'][1] += 1 2040 except KeyError: 2041 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 2042 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 2043 stats['virt_stats']['v_frac_avg']=[v_frac,1] 2044 2045 2046 ccontr_stats = None 2047 for ccontr_stats in re.finditer(channel_contr_finder, log): 2048 pass 2049 if not ccontr_stats is None: 2050 contrib = float(ccontr_stats.group('v_contr')) 2051 try: 2052 if contrib>channel_contr_list[channel_name]: 2053 channel_contr_list[channel_name]=contrib 2054 except KeyError: 2055 channel_contr_list[channel_name]=contrib 2056 2057 2058 # Now build the list of relevant virt log files to look for the maxima 2059 # of virt fractions and such. 2060 average_contrib = 0.0 2061 for value in channel_contr_list.values(): 2062 average_contrib += value 2063 if len(channel_contr_list.values()) !=0: 2064 average_contrib = average_contrib / len(channel_contr_list.values()) 2065 2066 relevant_log_GV_files = [] 2067 excluded_channels = set([]) 2068 all_channels = set([]) 2069 for log_file in log_GV_files: 2070 channel_name = '/'.join(log_file.split('/')[-3:-1]) 2071 all_channels.add(channel_name) 2072 try: 2073 if channel_contr_list[channel_name] > (0.1*average_contrib): 2074 relevant_log_GV_files.append(log_file) 2075 else: 2076 excluded_channels.add(channel_name) 2077 except KeyError: 2078 relevant_log_GV_files.append(log_file) 2079 2080 # Now we want to use the latest occurence of accumulated result in the log file 2081 for gv_log in relevant_log_GV_files: 2082 logfile=open(gv_log,'r') 2083 log = logfile.read() 2084 logfile.close() 2085 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2086 2087 vt_stats = None 2088 for vt_stats in re.finditer(virt_tricks_finder, log): 2089 pass 2090 if not vt_stats is None: 2091 vt_stats_group = vt_stats.groupdict() 2092 v_ratio = float(vt_stats.group('v_ratio')) 2093 v_ratio_err = float(vt_stats.group('v_ratio_err')) 2094 v_contr = float(vt_stats.group('v_abs_contr')) 2095 v_contr_err = float(vt_stats.group('v_abs_contr_err')) 2096 try: 2097 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 2098 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 2099 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 2100 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 2101 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 2102 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 2103 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 2104 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 2105 if v_contr < stats['virt_stats']['v_contr_min'][0]: 2106 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 2107 if v_contr > stats['virt_stats']['v_contr_max'][0]: 2108 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 2109 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 2110 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 2111 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 2112 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 2113 except KeyError: 2114 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 2115 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 2116 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 2117 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 2118 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 2119 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 2120 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 2121 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 2122 2123 vf_stats = None 2124 for vf_stats in re.finditer(virt_frac_finder, log): 2125 pass 2126 if not vf_stats is None: 2127 v_frac = float(vf_stats.group('v_frac')) 2128 v_average = float(vf_stats.group('v_average')) 2129 try: 2130 if v_average < stats['virt_stats']['v_average_min'][0]: 2131 stats['virt_stats']['v_average_min']=(v_average,channel_name) 2132 if v_average > stats['virt_stats']['v_average_max'][0]: 2133 stats['virt_stats']['v_average_max']=(v_average,channel_name) 2134 stats['virt_stats']['v_average_avg'][0] += v_average 2135 stats['virt_stats']['v_average_avg'][1] += 1 2136 except KeyError: 2137 stats['virt_stats']['v_average_min']=[v_average,channel_name] 2138 stats['virt_stats']['v_average_max']=[v_average,channel_name] 2139 stats['virt_stats']['v_average_avg']=[v_average,1] 2140 2141 try: 2142 debug_msg += '\n\n Statistics on virtual integration optimization : ' 2143 2144 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 2145 %tuple(stats['virt_stats']['v_frac_max']) 2146 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 2147 %tuple(stats['virt_stats']['v_frac_min']) 2148 debug_msg += '\n Average virt fraction computed %.3f'\ 2149 %float(stats['virt_stats']['v_frac_avg'][0]/float(stats['virt_stats']['v_frac_avg'][1])) 2150 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 2151 (len(excluded_channels),len(all_channels)) 2152 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 2153 %tuple(stats['virt_stats']['v_average_max']) 2154 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 2155 %tuple(stats['virt_stats']['v_ratio_max']) 2156 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 2157 %tuple(stats['virt_stats']['v_ratio_err_max']) 2158 debug_msg += tmpStr 2159 # After all it was decided that it is better not to alarm the user unecessarily 2160 # with such printout of the statistics. 2161 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 2162 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2163 # message += "\n Suspiciously large MC error in :" 2164 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2165 # message += tmpStr 2166 2167 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 2168 %tuple(stats['virt_stats']['v_contr_err_max']) 2169 debug_msg += tmpStr 2170 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 2171 # message += tmpStr 2172 2173 2174 except KeyError: 2175 debug_msg += '\n Could not find statistics on the integration optimization. ' 2176 2177 # ======================================= 2178 # == aMC@NLO timing profile statistics == 2179 # ======================================= 2180 2181 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 2182 "(?P<time>[\d\+-Eed\.]*)\s*") 2183 2184 for logf in log_GV_files: 2185 logfile=open(logf,'r') 2186 log = logfile.read() 2187 logfile.close() 2188 channel_name = '/'.join(logf.split('/')[-3:-1]) 2189 mint = re.search(mint_search,logf) 2190 if not mint is None: 2191 channel_name = channel_name+' [step %s]'%mint.group('ID') 2192 2193 for time_stats in re.finditer(timing_stat_finder, log): 2194 try: 2195 stats['timings'][time_stats.group('name')][channel_name]+=\ 2196 float(time_stats.group('time')) 2197 except KeyError: 2198 if time_stats.group('name') not in stats['timings'].keys(): 2199 stats['timings'][time_stats.group('name')] = {} 2200 stats['timings'][time_stats.group('name')][channel_name]=\ 2201 float(time_stats.group('time')) 2202 2203 # useful inline function 2204 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 2205 try: 2206 totTimeList = [(time, chan) for chan, time in \ 2207 stats['timings']['Total'].items()] 2208 except KeyError: 2209 totTimeList = [] 2210 2211 totTimeList.sort() 2212 if len(totTimeList)>0: 2213 debug_msg += '\n\n Inclusive timing profile :' 2214 debug_msg += '\n Overall slowest channel %s (%s)'%\ 2215 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 2216 debug_msg += '\n Average channel running time %s'%\ 2217 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 2218 debug_msg += '\n Aggregated total running time %s'%\ 2219 Tstr(sum([el[0] for el in totTimeList])) 2220 else: 2221 debug_msg += '\n\n Inclusive timing profile non available.' 2222 2223 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 2224 sum(stats['timings'][stat].values()), reverse=True) 2225 for name in sorted_keys: 2226 if name=='Total': 2227 continue 2228 if sum(stats['timings'][name].values())<=0.0: 2229 debug_msg += '\n Zero time record for %s.'%name 2230 continue 2231 try: 2232 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 2233 chan) for chan, time in stats['timings'][name].items()] 2234 except KeyError, ZeroDivisionError: 2235 debug_msg += '\n\n Timing profile for %s unavailable.'%name 2236 continue 2237 TimeList.sort() 2238 debug_msg += '\n Timing profile for <%s> :'%name 2239 try: 2240 debug_msg += '\n Overall fraction of time %.3f %%'%\ 2241 float((100.0*(sum(stats['timings'][name].values())/ 2242 sum(stats['timings']['Total'].values())))) 2243 except KeyError, ZeroDivisionError: 2244 debug_msg += '\n Overall fraction of time unavailable.' 2245 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 2246 (TimeList[-1][0],TimeList[-1][1]) 2247 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 2248 (TimeList[0][0],TimeList[0][1]) 2249 2250 # ============================= 2251 # == log file eror detection == 2252 # ============================= 2253 2254 # Find the number of potential errors found in all log files 2255 # This re is a simple match on a case-insensitve 'error' but there is 2256 # also some veto added for excluding the sentence 2257 # "See Section 6 of paper for error calculation." 2258 # which appear in the header of lhapdf in the logs. 2259 err_finder = re.compile(\ 2260 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 2261 for log in all_log_files: 2262 logfile=open(log,'r') 2263 nErrors = len(re.findall(err_finder, logfile.read())) 2264 logfile.close() 2265 if nErrors != 0: 2266 stats['Errors'].append((str(log),nErrors)) 2267 2268 nErrors = sum([err[1] for err in stats['Errors']],0) 2269 if nErrors != 0: 2270 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 2271 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 2272 'found in the following log file%s:'%('s' if \ 2273 len(stats['Errors'])>1 else '') 2274 for error in stats['Errors'][:3]: 2275 log_name = '/'.join(error[0].split('/')[-5:]) 2276 debug_msg += '\n > %d error%s in %s'%\ 2277 (error[1],'s' if error[1]>1 else '',log_name) 2278 if len(stats['Errors'])>3: 2279 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 2280 nRemainingLogs = len(stats['Errors'])-3 2281 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 2282 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 2283 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 2284 2285 return message, debug_msg
2286 2287
2288 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
2289 """this function calls the reweighting routines and creates the event file in the 2290 Event dir. Return the name of the event file created 2291 """ 2292 scale_pdf_info={} 2293 if (self.run_card['reweight_scale'] == '.true.' or self.run_card['reweight_PDF'] == '.true.') : 2294 scale_pdf_info = self.run_reweight(options['reweightonly']) 2295 2296 self.update_status('Collecting events', level='parton', update_results=True) 2297 misc.compile(['collect_events'], 2298 cwd=pjoin(self.me_dir, 'SubProcesses')) 2299 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 2300 stdin=subprocess.PIPE, 2301 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 2302 if event_norm.lower() == 'sum': 2303 p.communicate(input = '1\n') 2304 elif event_norm.lower() == 'unity': 2305 p.communicate(input = '3\n') 2306 else: 2307 p.communicate(input = '2\n') 2308 2309 #get filename from collect events 2310 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 2311 2312 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 2313 raise aMCatNLOError('An error occurred during event generation. ' + \ 2314 'The event file has not been created. Check collect_events.log') 2315 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2316 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 2317 if not options['reweightonly']: 2318 self.print_summary(options, 2, mode, scale_pdf_info) 2319 logger.info('The %s file has been generated.\n' % (evt_file)) 2320 self.results.add_detail('nb_event', nevents) 2321 self.update_status('Events generated', level='parton', update_results=True) 2322 return evt_file[:-3]
2323 2324
2325 - def run_mcatnlo(self, evt_file):
2326 """runs mcatnlo on the generated event file, to produce showered-events 2327 """ 2328 logger.info('Preparing MCatNLO run') 2329 try: 2330 misc.gunzip(evt_file) 2331 except Exception: 2332 pass 2333 2334 self.banner = banner_mod.Banner(evt_file) 2335 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 2336 2337 #check that the number of split event files divides the number of 2338 # events, otherwise set it to 1 2339 if int(int(self.banner.get_detail('run_card', 'nevents')) / \ 2340 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 2341 != int(self.banner.get_detail('run_card', 'nevents')): 2342 logger.warning(\ 2343 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 2344 'Setting it to 1.') 2345 self.shower_card['nsplit_jobs'] = 1 2346 2347 # don't split jobs if the user asks to shower only a part of the events 2348 if self.shower_card['nevents'] > 0 and \ 2349 self.shower_card['nevents'] < int(self.banner.get_detail('run_card', 'nevents')) and \ 2350 self.shower_card['nsplit_jobs'] != 1: 2351 logger.warning(\ 2352 'Only a part of the events will be showered.\n' + \ 2353 'Setting nsplit_jobs in the shower_card to 1.') 2354 self.shower_card['nsplit_jobs'] = 1 2355 2356 self.banner_to_mcatnlo(evt_file) 2357 2358 # if fastjet has to be linked (in extralibs) then 2359 # add lib /include dirs for fastjet if fastjet-config is present on the 2360 # system, otherwise add fjcore to the files to combine 2361 if 'fastjet' in self.shower_card['extralibs']: 2362 #first, check that stdc++ is also linked 2363 if not 'stdc++' in self.shower_card['extralibs']: 2364 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 2365 self.shower_card['extralibs'] += ' stdc++' 2366 # then check if options[fastjet] corresponds to a valid fj installation 2367 try: 2368 #this is for a complete fj installation 2369 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 2370 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2371 output, error = p.communicate() 2372 #remove the line break from output (last character) 2373 output = output[:-1] 2374 # add lib/include paths 2375 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 2376 logger.warning('Linking FastJet: updating EXTRAPATHS') 2377 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 2378 if not pjoin(output, 'include') in self.shower_card['includepaths']: 2379 logger.warning('Linking FastJet: updating INCLUDEPATHS') 2380 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 2381 # to be changed in the fortran wrapper 2382 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 2383 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 2384 except Exception: 2385 logger.warning('Linking FastJet: using fjcore') 2386 # this is for FJcore, so no FJ library has to be linked 2387 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 2388 if not 'fjcore.o' in self.shower_card['analyse']: 2389 self.shower_card['analyse'] += ' fjcore.o' 2390 # to be changed in the fortran wrapper 2391 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 2392 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 2393 # change the fortran wrapper with the correct namespaces/include 2394 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 2395 for line in fjwrapper_lines: 2396 if '//INCLUDE_FJ' in line: 2397 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 2398 if '//NAMESPACE_FJ' in line: 2399 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 2400 open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w').write(\ 2401 '\n'.join(fjwrapper_lines) + '\n') 2402 2403 extrapaths = self.shower_card['extrapaths'].split() 2404 2405 # check that the path needed by HW++ and PY8 are set if one uses these shower 2406 if shower in ['HERWIGPP', 'PYTHIA8']: 2407 path_dict = {'HERWIGPP': ['hepmc_path', 2408 'thepeg_path', 2409 'hwpp_path'], 2410 'PYTHIA8': ['pythia8_path']} 2411 2412 if not all([self.options[ppath] for ppath in path_dict[shower]]): 2413 raise aMCatNLOError('Some paths are missing in the configuration file.\n' + \ 2414 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 2415 2416 if shower == 'HERWIGPP': 2417 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 2418 2419 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 2420 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 2421 2422 if 'LD_LIBRARY_PATH' in os.environ.keys(): 2423 ldlibrarypath = os.environ['LD_LIBRARY_PATH'] 2424 else: 2425 ldlibrarypath = '' 2426 ldlibrarypath += ':' + ':'.join(extrapaths) 2427 os.putenv('LD_LIBRARY_PATH', ldlibrarypath) 2428 2429 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 2430 self.shower_card.write_card(shower, shower_card_path) 2431 2432 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 2433 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 2434 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 2435 stderr=open(mcatnlo_log, 'w'), 2436 cwd=pjoin(self.me_dir, 'MCatNLO')) 2437 2438 exe = 'MCATNLO_%s_EXE' % shower 2439 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 2440 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 2441 print open(mcatnlo_log).read() 2442 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 2443 logger.info(' ... done') 2444 2445 # create an empty dir where to run 2446 count = 1 2447 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 2448 (shower, count))): 2449 count += 1 2450 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 2451 (shower, count)) 2452 os.mkdir(rundir) 2453 files.cp(shower_card_path, rundir) 2454 2455 #look for the event files (don't resplit if one asks for the 2456 # same number of event files as in the previous run) 2457 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 2458 'events_*.lhe')) 2459 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 2460 logger.info('Cleaning old files and splitting the event file...') 2461 #clean the old files 2462 files.rm([f for f in event_files if 'events.lhe' not in f]) 2463 if self.shower_card['nsplit_jobs'] > 1: 2464 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities')) 2465 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 2466 stdin=subprocess.PIPE, 2467 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 2468 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2469 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 2470 logger.info('Splitting done.') 2471 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 2472 'events_*.lhe')) 2473 2474 event_files.sort() 2475 2476 self.update_status('Showering events...', level='shower') 2477 logger.info('(Running in %s)' % rundir) 2478 if shower != 'PYTHIA8': 2479 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 2480 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 2481 else: 2482 # special treatment for pythia8 2483 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 2484 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 2485 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 2486 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 2487 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 2488 else: 2489 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 2490 #link the hwpp exe in the rundir 2491 if shower == 'HERWIGPP': 2492 try: 2493 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 2494 except Exception: 2495 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 2496 2497 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 2498 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 2499 2500 files.ln(evt_file, rundir, 'events.lhe') 2501 for i, f in enumerate(event_files): 2502 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 2503 2504 if not self.shower_card['analyse']: 2505 # an hep/hepmc file as output 2506 out_id = 'HEP' 2507 else: 2508 # one or more .top file(s) as output 2509 out_id = 'TOP' 2510 2511 # write the executable 2512 open(pjoin(rundir, 'shower.sh'), 'w').write(\ 2513 open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 2514 % {'extralibs': ':'.join(extrapaths)}) 2515 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 2516 2517 if event_files: 2518 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 2519 for i in range(len(event_files))] 2520 else: 2521 arg_list = [[shower, out_id, self.run_name]] 2522 2523 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 2524 self.njobs = 1 2525 self.wait_for_complete('shower') 2526 2527 # now collect the results 2528 message = '' 2529 warning = '' 2530 to_gzip = [evt_file] 2531 if out_id == 'HEP': 2532 #copy the showered stdhep/hepmc file back in events 2533 if shower in ['PYTHIA8', 'HERWIGPP']: 2534 hep_format = 'HEPMC' 2535 ext = 'hepmc' 2536 else: 2537 hep_format = 'StdHEP' 2538 ext = 'hep' 2539 2540 hep_file = '%s_%s_0.%s.gz' % \ 2541 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 2542 count = 0 2543 2544 # find the first available name for the output: 2545 # check existing results with or without event splitting 2546 while os.path.exists(hep_file) or \ 2547 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 2548 count +=1 2549 hep_file = '%s_%s_%d.%s.gz' % \ 2550 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 2551 2552 try: 2553 if self.shower_card['nsplit_jobs'] == 1: 2554 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 2555 message = ('The file %s has been generated. \nIt contains showered' + \ 2556 ' and hadronized events in the %s format obtained' + \ 2557 ' showering the parton-level event file %s.gz with %s') % \ 2558 (hep_file, hep_format, evt_file, shower) 2559 else: 2560 hep_list = [] 2561 for i in range(self.shower_card['nsplit_jobs']): 2562 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 2563 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 2564 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 2565 ' and hadronized events in the %s format obtained' + \ 2566 ' showering the (split) parton-level event file %s.gz with %s') % \ 2567 ('\n '.join(hep_list), hep_format, evt_file, shower) 2568 2569 except OSError, IOError: 2570 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 2571 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 2572 2573 # run the plot creation in a secure way 2574 if hep_format == 'StdHEP': 2575 try: 2576 self.do_plot('%s -f' % self.run_name) 2577 except Exception, error: 2578 logger.info("Fail to make the plot. Continue...") 2579 pass 2580 2581 elif out_id == 'TOP': 2582 #copy the topdrawer file(s) back in events 2583 topfiles = [] 2584 top_tars = [tarfile.TarFile(f) for f in glob.glob(pjoin(rundir, 'topfile*.tar'))] 2585 for top_tar in top_tars: 2586 topfiles.extend(top_tar.getnames()) 2587 2588 # safety check 2589 if len(top_tars) != self.shower_card['nsplit_jobs']: 2590 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 2591 (self.shower_card['nsplit_jobs'], len(top_tars))) 2592 2593 # find the first available name for the output: 2594 # check existing results with or without event splitting 2595 filename = 'plot_%s_%d_' % (shower, 1) 2596 count = 1 2597 while os.path.exists(pjoin(self.me_dir, 'Events', 2598 self.run_name, '%s0.top' % filename)) or \ 2599 os.path.exists(pjoin(self.me_dir, 'Events', 2600 self.run_name, '%s0__1.top' % filename)): 2601 count += 1 2602 filename = 'plot_%s_%d_' % (shower, count) 2603 2604 if not topfiles: 2605 # if no topfiles are found just warn the user 2606 waarning = 'No .top file has been generated. For the results of your ' +\ 2607 'run, please check inside %s' % rundir 2608 2609 elif self.shower_card['nsplit_jobs'] == 1: 2610 # only one job for the shower 2611 top_tars[0].extractall(path = rundir) 2612 plotfiles = [] 2613 for i, file in enumerate(topfiles): 2614 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 2615 '%s%d.top' % (filename, i)) 2616 files.mv(pjoin(rundir, file), plotfile) 2617 plotfiles.append(plotfile) 2618 2619 ffiles = 'files' 2620 have = 'have' 2621 if len(plotfiles) == 1: 2622 ffiles = 'file' 2623 have = 'has' 2624 2625 message = ('The %s %s %s been generated, with histograms in the' + \ 2626 ' TopDrawer format, obtained by showering the parton-level' + \ 2627 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 2628 evt_file, shower) 2629 else: 2630 # many jobs for the shower have been run 2631 topfiles_set = set(topfiles) 2632 plotfiles = [] 2633 for j, top_tar in enumerate(top_tars): 2634 top_tar.extractall(path = rundir) 2635 for i, file in enumerate(topfiles_set): 2636 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 2637 '%s%d__%d.top' % (filename, i, j + 1)) 2638 files.mv(pjoin(rundir, file), plotfile) 2639 plotfiles.append(plotfile) 2640 2641 # check if the user asked to combine the .top into a single file 2642 if self.shower_card['combine_td']: 2643 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 2644 2645 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 2646 norm = 1. 2647 elif self.banner.get('run_card', 'event_norm').lower() == 'average': 2648 norm = 1./float(self.shower_card['nsplit_jobs']) 2649 2650 plotfiles = [] 2651 for i, file in enumerate(topfiles_set): 2652 filelist = ['%s%d__%d.top' % (filename, i, j + 1) \ 2653 for j in range(self.shower_card['nsplit_jobs'])] 2654 infile="%d\n%s\n%s\n" % \ 2655 (self.shower_card['nsplit_jobs'], 2656 '\n'.join(filelist), 2657 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 2658 2659 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 2660 stdin=subprocess.PIPE, 2661 stdout=os.open(os.devnull, os.O_RDWR), 2662 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2663 p.communicate(input = infile) 2664 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 2665 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 2666 plotfiles.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 2667 tar = tarfile.open( 2668 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 2669 for f in filelist: 2670 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 2671 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 2672 2673 tar.close() 2674 2675 ffiles = 'files' 2676 have = 'have' 2677 if len(plotfiles) == 1: 2678 ffiles = 'file' 2679 have = 'has' 2680 2681 message = ('The %s %s %s been generated, with histograms in the' + \ 2682 ' TopDrawer format, obtained by showering the parton-level' + \ 2683 ' file %s.gz with %s.\n' + \ 2684 'The files from the different shower ' + \ 2685 'jobs (before combining them) can be found inside %s.') % \ 2686 (ffiles, ', '.join(plotfiles), have, \ 2687 evt_file, shower, 2688 ', '.join([f.replace('top', 'tar.gz') for f in plotfiles])) 2689 2690 else: 2691 message = ('The following files have been generated:\n %s\n' + \ 2692 'They contain histograms in the' + \ 2693 ' TopDrawer format, obtained by showering the parton-level' + \ 2694 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 2695 evt_file, shower) 2696 2697 # Now arxiv the shower card used if RunMaterial is present 2698 run_dir_path = pjoin(rundir, self.run_name) 2699 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 2700 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 2701 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 2702 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 2703 %(shower, count))) 2704 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 2705 cwd=run_dir_path) 2706 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 2707 # end of the run, gzip files and print out the message/warning 2708 for f in to_gzip: 2709 misc.gzip(f) 2710 if message: 2711 logger.info(message) 2712 if warning: 2713 logger.warning(warning) 2714 2715 self.update_status('Run complete', level='shower', update_results=True)
2716 2717 2718 ############################################################################
2719 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
2720 """define the run name, the run_tag, the banner and the results.""" 2721 2722 # when are we force to change the tag new_run:previous run requiring changes 2723 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'], 2724 'pythia': ['pythia','pgs','delphes'], 2725 'shower': ['shower'], 2726 'pgs': ['pgs'], 2727 'delphes':['delphes'], 2728 'plot':[]} 2729 2730 2731 2732 if name == self.run_name: 2733 if reload_card: 2734 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 2735 self.run_card = banner_mod.RunCardNLO(run_card) 2736 2737 #check if we need to change the tag 2738 if tag: 2739 self.run_card['run_tag'] = tag 2740 self.run_tag = tag 2741 self.results.add_run(self.run_name, self.run_card) 2742 else: 2743 for tag in upgrade_tag[level]: 2744 if getattr(self.results[self.run_name][-1], tag): 2745 tag = self.get_available_tag() 2746 self.run_card['run_tag'] = tag 2747 self.run_tag = tag 2748 self.results.add_run(self.run_name, self.run_card) 2749 break 2750 return # Nothing to do anymore 2751 2752 # save/clean previous run 2753 if self.run_name: 2754 self.store_result() 2755 # store new name 2756 self.run_name = name 2757 2758 # Read run_card 2759 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 2760 self.run_card = banner_mod.RunCardNLO(run_card) 2761 2762 new_tag = False 2763 # First call for this run -> set the banner 2764 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 2765 if tag: 2766 self.run_card['run_tag'] = tag 2767 new_tag = True 2768 elif not self.run_name in self.results and level =='parton': 2769 pass # No results yet, so current tag is fine 2770 elif not self.run_name in self.results: 2771 #This is only for case when you want to trick the interface 2772 logger.warning('Trying to run data on unknown run.') 2773 self.results.add_run(name, self.run_card) 2774 self.results.update('add run %s' % name, 'all', makehtml=True) 2775 else: 2776 for tag in upgrade_tag[level]: 2777 2778 if getattr(self.results[self.run_name][-1], tag): 2779 # LEVEL is already define in the last tag -> need to switch tag 2780 tag = self.get_available_tag() 2781 self.run_card['run_tag'] = tag 2782 new_tag = True 2783 break 2784 if not new_tag: 2785 # We can add the results to the current run 2786 tag = self.results[self.run_name][-1]['tag'] 2787 self.run_card['run_tag'] = tag # ensure that run_tag is correct 2788 2789 2790 if name in self.results and not new_tag: 2791 self.results.def_current(self.run_name) 2792 else: 2793 self.results.add_run(self.run_name, self.run_card) 2794 2795 self.run_tag = self.run_card['run_tag'] 2796 2797 # Return the tag of the previous run having the required data for this 2798 # tag/run to working wel. 2799 if level == 'parton': 2800 return 2801 elif level == 'pythia': 2802 return self.results[self.run_name][0]['tag'] 2803 else: 2804 for i in range(-1,-len(self.results[self.run_name])-1,-1): 2805 tagRun = self.results[self.run_name][i] 2806 if tagRun.pythia: 2807 return tagRun['tag']
2808 2809
2810 - def store_result(self):
2811 """ tar the pythia results. This is done when we are quite sure that 2812 the pythia output will not be use anymore """ 2813 2814 if not self.run_name: 2815 return 2816 2817 self.results.save() 2818 2819 if not self.to_store: 2820 return 2821 2822 tag = self.run_card['run_tag'] 2823 2824 self.to_store = []
2825 2826
2827 - def get_init_dict(self, evt_file):
2828 """reads the info in the init block and returns them in a dictionary""" 2829 ev_file = open(evt_file) 2830 init = "" 2831 found = False 2832 while True: 2833 line = ev_file.readline() 2834 if "<init>" in line: 2835 found = True 2836 elif found and not line.startswith('#'): 2837 init += line 2838 if "</init>" in line or "<event>" in line: 2839 break 2840 ev_file.close() 2841 2842 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 2843 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 2844 # these are not included (so far) in the init_dict 2845 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 2846 2847 init_dict = {} 2848 init_dict['idbmup1'] = int(init.split()[0]) 2849 init_dict['idbmup2'] = int(init.split()[1]) 2850 init_dict['ebmup1'] = float(init.split()[2]) 2851 init_dict['ebmup2'] = float(init.split()[3]) 2852 init_dict['pdfgup1'] = int(init.split()[4]) 2853 init_dict['pdfgup2'] = int(init.split()[5]) 2854 init_dict['pdfsup1'] = int(init.split()[6]) 2855 init_dict['pdfsup2'] = int(init.split()[7]) 2856 init_dict['idwtup'] = int(init.split()[8]) 2857 init_dict['nprup'] = int(init.split()[9]) 2858 2859 return init_dict
2860 2861
2862 - def banner_to_mcatnlo(self, evt_file):
2863 """creates the mcatnlo input script using the values set in the header of the event_file. 2864 It also checks if the lhapdf library is used""" 2865 shower = self.banner.get('run_card', 'parton_shower').upper() 2866 pdlabel = self.banner.get('run_card', 'pdlabel') 2867 itry = 0 2868 nevents = self.shower_card['nevents'] 2869 init_dict = self.get_init_dict(evt_file) 2870 2871 if nevents < 0 or \ 2872 nevents > int(self.banner.get_detail('run_card', 'nevents')): 2873 nevents = int(self.banner.get_detail('run_card', 'nevents')) 2874 2875 nevents = nevents / self.shower_card['nsplit_jobs'] 2876 2877 mcmass_dict = {} 2878 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 2879 pdg = int(line.split()[0]) 2880 mass = float(line.split()[1]) 2881 mcmass_dict[pdg] = mass 2882 2883 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 2884 content += 'NEVENTS=%d\n' % nevents 2885 content += 'NEVENTS_TOT=%d\n' % (int(self.banner.get_detail('run_card', 'nevents')) /\ 2886 self.shower_card['nsplit_jobs']) 2887 content += 'MCMODE=%s\n' % shower 2888 content += 'PDLABEL=%s\n' % pdlabel 2889 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 2890 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 2891 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 2892 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 2893 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 2894 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 2895 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 2896 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 2897 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 2898 try: 2899 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 2900 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 2901 except KeyError: 2902 content += 'HGGMASS=120.\n' 2903 content += 'HGGWIDTH=0.00575308848\n' 2904 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 2905 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 2906 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 2907 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 2908 content += 'DMASS=%s\n' % mcmass_dict[1] 2909 content += 'UMASS=%s\n' % mcmass_dict[2] 2910 content += 'SMASS=%s\n' % mcmass_dict[3] 2911 content += 'CMASS=%s\n' % mcmass_dict[4] 2912 content += 'BMASS=%s\n' % mcmass_dict[5] 2913 try: 2914 content += 'EMASS=%s\n' % mcmass_dict[11] 2915 content += 'MUMASS=%s\n' % mcmass_dict[13] 2916 content += 'TAUMASS=%s\n' % mcmass_dict[15] 2917 except KeyError: 2918 # this is for backward compatibility 2919 mcmass_lines = [l for l in \ 2920 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 2921 ).read().split('\n') if l] 2922 new_mcmass_dict = {} 2923 for l in mcmass_lines: 2924 key, val = l.split('=') 2925 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 2926 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 2927 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 2928 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 2929 2930 content += 'GMASS=%s\n' % mcmass_dict[21] 2931 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 2932 # check if need to link lhapdf 2933 if int(self.shower_card['pdfcode']) > 1 or \ 2934 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1): 2935 # Use LHAPDF (should be correctly installed, because 2936 # either events were already generated with them, or the 2937 # user explicitly gives an LHAPDF number in the 2938 # shower_card). 2939 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 2940 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 2941 stdout = subprocess.PIPE).stdout.read().strip() 2942 content += 'LHAPDFPATH=%s\n' % lhapdfpath 2943 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 2944 if self.shower_card['pdfcode']==1: 2945 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 2946 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 2947 else: 2948 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 2949 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 2950 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 2951 elif int(self.shower_card['pdfcode'])==1: 2952 # Try to use LHAPDF because user wants to use the same PDF 2953 # as was used for the event generation. However, for the 2954 # event generation, LHAPDF was not used, so non-trivial to 2955 # see if if LHAPDF is available with the corresponding PDF 2956 # set. If not found, give a warning and use build-in PDF 2957 # set instead. 2958 try: 2959 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 2960 stdout = subprocess.PIPE).stdout.read().strip() 2961 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 2962 content += 'LHAPDFPATH=%s\n' % lhapdfpath 2963 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 2964 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 2965 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 2966 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 2967 except Exception: 2968 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 2969 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 2970 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 2971 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 2972 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 2973 content += 'LHAPDFPATH=\n' 2974 content += 'PDFCODE=0\n' 2975 else: 2976 content += 'LHAPDFPATH=\n' 2977 content += 'PDFCODE=0\n' 2978 2979 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 2980 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 2981 # add the pythia8/hwpp path(s) 2982 if self.options['pythia8_path']: 2983 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 2984 if self.options['hwpp_path']: 2985 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 2986 if self.options['thepeg_path']: 2987 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 2988 if self.options['hepmc_path']: 2989 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 2990 2991 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 2992 output.write(content) 2993 output.close() 2994 return shower
2995 2996
2997 - def run_reweight(self, only):
2998 """runs the reweight_xsec_events eecutables on each sub-event file generated 2999 to compute on the fly scale and/or PDF uncertainities""" 3000 logger.info(' Doing reweight') 3001 3002 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 3003 # if only doing reweight, copy back the nevents_unweighted file 3004 if only: 3005 if os.path.exists(nev_unw + '.orig'): 3006 files.cp(nev_unw + '.orig', nev_unw) 3007 else: 3008 raise aMCatNLOError('Cannot find event file information') 3009 3010 #read the nevents_unweighted file to get the list of event files 3011 file = open(nev_unw) 3012 lines = file.read().split('\n') 3013 file.close() 3014 # make copy of the original nevent_unweighted file 3015 files.cp(nev_unw, nev_unw + '.orig') 3016 # loop over lines (all but the last one whith is empty) and check that the 3017 # number of events is not 0 3018 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 3019 #prepare the job_dict 3020 job_dict = {} 3021 exe = 'reweight_xsec_events.local' 3022 for i, evt_file in enumerate(evt_files): 3023 path, evt = os.path.split(evt_file) 3024 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 3025 pjoin(self.me_dir, 'SubProcesses', path)) 3026 job_dict[path] = [exe] 3027 3028 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 3029 3030 #check that the new event files are complete 3031 for evt_file in evt_files: 3032 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 3033 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 3034 stdout = subprocess.PIPE).stdout.read().strip() 3035 if last_line != "</LesHouchesEvents>": 3036 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 3037 '\'reweight_xsec_events.output\' files inside the ' + \ 3038 '\'SubProcesses/P*/G*/ directories for details') 3039 3040 #update file name in nevents_unweighted 3041 newfile = open(nev_unw, 'w') 3042 for line in lines: 3043 if line: 3044 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 3045 newfile.close() 3046 3047 return self.pdf_scale_from_reweighting(evt_files)
3048
3049 - def pdf_scale_from_reweighting(self, evt_files):
3050 """This function takes the files with the scale and pdf values 3051 written by the reweight_xsec_events.f code 3052 (P*/G*/pdf_scale_dependence.dat) and computes the overall 3053 scale and PDF uncertainty (the latter is computed using the 3054 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 3055 and returns it in percents. The expected format of the file 3056 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 3057 xsec_pdf0 xsec_pdf1 ....""" 3058 scale_pdf_info={} 3059 scales=[] 3060 pdfs=[] 3061 numofpdf = 0 3062 numofscales = 0 3063 for evt_file in evt_files: 3064 path, evt=os.path.split(evt_file) 3065 data_file=open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat')).read() 3066 lines = data_file.replace("D", "E").split("\n") 3067 if not numofscales: 3068 numofscales = int(lines[0]) 3069 if not numofpdf: 3070 numofpdf = int(lines[2]) 3071 scales_this = [float(val) for val in lines[1].split()] 3072 pdfs_this = [float(val) for val in lines[3].split()] 3073 3074 if numofscales != len(scales_this) or numofpdf !=len(pdfs_this): 3075 # the +1 takes the 0th (central) set into account 3076 logger.info(data_file) 3077 logger.info((' Expected # of scales: %d\n'+ 3078 ' Found # of scales: %d\n'+ 3079 ' Expected # of pdfs: %d\n'+ 3080 ' Found # of pdfs: %d\n') % 3081 (numofscales, len(scales_this), numofpdf, len(pdfs_this))) 3082 raise aMCatNLOError('inconsistent scale_pdf_dependence.dat') 3083 if not scales: 3084 scales = [0.] * numofscales 3085 if not pdfs: 3086 pdfs = [0.] * numofpdf 3087 3088 scales = [a + b for a, b in zip(scales, scales_this)] 3089 pdfs = [a + b for a, b in zip(pdfs, pdfs_this)] 3090 3091 # get the central value 3092 if numofscales>0 and numofpdf==0: 3093 cntrl_val=scales[0] 3094 elif numofpdf>0 and numofscales==0: 3095 cntrl_val=pdfs[0] 3096 elif numofpdf>0 and numofscales>0: 3097 if abs(1-scales[0]/pdfs[0])>0.0001: 3098 raise aMCatNLOError('Central values for scale and PDF variation not identical') 3099 else: 3100 cntrl_val=scales[0] 3101 3102 # get the scale uncertainty in percent 3103 scale_upp=0.0 3104 scale_low=0.0 3105 if numofscales>0: 3106 if cntrl_val != 0.0: 3107 scale_pdf_info['scale_upp'] = (max(scales)/cntrl_val-1)*100 3108 scale_pdf_info['scale_low'] = (1-min(scales)/cntrl_val)*100 3109 else: 3110 scale_pdf_info['scale_upp'] = 0.0 3111 scale_pdf_info['scale_low'] = 0.0 3112 3113 3114 # get the pdf uncertainty in percent (according to the Hessian method) 3115 lhaid=int(self.run_card['lhaid']) 3116 pdf_upp=0.0 3117 pdf_low=0.0 3118 if lhaid <= 90000: 3119 # use Hessian method (CTEQ & MSTW) 3120 if numofpdf>1: 3121 for i in range(int(numofpdf/2)): 3122 pdf_upp=pdf_upp+math.pow(max(0.0,pdfs[2*i+1]-cntrl_val,pdfs[2*i+2]-cntrl_val),2) 3123 pdf_low=pdf_low+math.pow(max(0.0,cntrl_val-pdfs[2*i+1],cntrl_val-pdfs[2*i+2]),2) 3124 if cntrl_val != 0.0: 3125 scale_pdf_info['pdf_upp'] = math.sqrt(pdf_upp)/cntrl_val*100 3126 scale_pdf_info['pdf_low'] = math.sqrt(pdf_low)/cntrl_val*100 3127 else: 3128 scale_pdf_info['pdf_upp'] = 0.0 3129 scale_pdf_info['pdf_low'] = 0.0 3130 3131 else: 3132 # use Gaussian method (NNPDF) 3133 pdf_stdev=0.0 3134 for i in range(int(numofpdf-1)): 3135 pdf_stdev = pdf_stdev + pow(pdfs[i+1] - cntrl_val,2) 3136 pdf_stdev = math.sqrt(pdf_stdev/int(numofpdf-2)) 3137 if cntrl_val != 0.0: 3138 scale_pdf_info['pdf_upp'] = pdf_stdev/cntrl_val*100 3139 else: 3140 scale_pdf_info['pdf_upp'] = 0.0 3141 scale_pdf_info['pdf_low'] = scale_pdf_info['pdf_upp'] 3142 return scale_pdf_info
3143 3144
3145 - def wait_for_complete(self, run_type):
3146 """this function waits for jobs on cluster to complete their run.""" 3147 3148 starttime = time.time() 3149 #logger.info(' Waiting for submitted jobs to complete') 3150 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 3151 starttime=starttime, level='parton', update_results=True) 3152 try: 3153 self.cluster.wait(self.me_dir, update_status) 3154 except: 3155 self.cluster.remove() 3156 raise
3157
3158 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3159 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 3160 njob_split = 0 3161 self.ijob = 0 3162 3163 # this is to keep track, if splitting evt generation, of the various 3164 # folders/args in order to resubmit the jobs if some of them fail 3165 self.split_folders = {} 3166 3167 if run_type != 'shower': 3168 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 3169 for args in arg_list: 3170 for Pdir, jobs in job_dict.items(): 3171 for job in jobs: 3172 if not split_jobs: 3173 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 3174 else: 3175 for n in self.find_jobs_to_split(Pdir, job, args[1]): 3176 self.run_exe(job, args + [n], run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 3177 njob_split += 1 3178 # print some statistics if running serially 3179 if self.cluster_mode == 2: 3180 time.sleep(1) # security to allow all jobs to be launched 3181 if njob_split > 0: 3182 self.njobs = njob_split 3183 else: 3184 self.njobs = len(arg_list) 3185 for args in arg_list: 3186 [(cwd, exe)] = job_dict.items() 3187 self.run_exe(exe, args, run_type, cwd) 3188 3189 self.wait_for_complete(run_type)
3190 3191 3192
3193 - def check_event_files(self):
3194 """check the integrity of the event files after splitting, and resubmit 3195 those which are not nicely terminated""" 3196 to_resubmit = [] 3197 for dir in self.split_folders.keys(): 3198 last_line = '' 3199 try: 3200 last_line = subprocess.Popen( 3201 ['tail', '-n1', pjoin(dir, 'events.lhe')], \ 3202 stdout = subprocess.PIPE).stdout.read().strip() 3203 except IOError: 3204 pass 3205 3206 if last_line != "</LesHouchesEvents>": 3207 to_resubmit.append(dir) 3208 3209 self.njobs = 0 3210 if to_resubmit: 3211 run_type = 'Resubmitting broken jobs' 3212 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 3213 logger.debug('Resubmitting\n' + '\n'.join(to_resubmit) + '\n') 3214 for dir in to_resubmit: 3215 files.rm([dir]) 3216 job = self.split_folders[dir][0] 3217 args = self.split_folders[dir][1:] 3218 run_type = 'monitor' 3219 cwd = os.path.split(dir)[0] 3220 self.run_exe(job, args, run_type, cwd=cwd ) 3221 self.njobs +=1 3222 3223 self.wait_for_complete(run_type)
3224 3225
3226 - def find_jobs_to_split(self, pdir, job, arg):
3227 """looks into the nevents_unweighed_splitted file to check how many 3228 split jobs are needed for this (pdir, job). arg is F, B or V""" 3229 # find the number of the integration channel 3230 splittings = [] 3231 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 3232 pattern = re.compile('for i in (\d+) ; do') 3233 match = re.search(pattern, ajob) 3234 channel = match.groups()[0] 3235 # then open the nevents_unweighted_splitted file and look for the 3236 # number of splittings to be done 3237 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 3238 # This skips the channels with zero events, because they are 3239 # not of the form GFXX_YY, but simply GFXX 3240 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 3241 pjoin(pdir, 'G%s%s' % (arg,channel))) 3242 matches = re.findall(pattern, nevents_file) 3243 for m in matches: 3244 splittings.append(m) 3245 return splittings
3246 3247
3248 - def run_exe(self, exe, args, run_type, cwd=None):
3249 """this basic function launch locally/on cluster exe with args as argument. 3250 """ 3251 3252 # first test that exe exists: 3253 execpath = None 3254 if cwd and os.path.exists(pjoin(cwd, exe)): 3255 execpath = pjoin(cwd, exe) 3256 elif not cwd and os.path.exists(exe): 3257 execpath = exe 3258 else: 3259 raise aMCatNLOError('Cannot find executable %s in %s' \ 3260 % (exe, os.getcwd())) 3261 # check that the executable has exec permissions 3262 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 3263 subprocess.call(['chmod', '+x', exe], cwd=cwd) 3264 # finally run it 3265 if self.cluster_mode == 0: 3266 #this is for the serial run 3267 misc.call(['./'+exe] + args, cwd=cwd) 3268 self.ijob += 1 3269 self.update_status((max([self.njobs - self.ijob - 1, 0]), 3270 min([1, self.njobs - self.ijob]), 3271 self.ijob, run_type), level='parton') 3272 3273 #this is for the cluster/multicore run 3274 elif 'reweight' in exe: 3275 # a reweight run 3276 # Find the correct PDF input file 3277 input_files, output_files = [], [] 3278 pdfinput = self.get_pdf_input_filename() 3279 if os.path.exists(pdfinput): 3280 input_files.append(pdfinput) 3281 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 3282 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 3283 input_files.append(args[0]) 3284 output_files.append('%s.rwgt' % os.path.basename(args[0])) 3285 output_files.append('reweight_xsec_events.output') 3286 output_files.append('scale_pdf_dependence.dat') 3287 3288 return self.cluster.submit2(exe, args, cwd=cwd, 3289 input_files=input_files, output_files=output_files, 3290 required_output=output_files) 3291 3292 elif 'ajob' in exe: 3293 # the 'standard' amcatnlo job 3294 # check if args is a list of string 3295 if type(args[0]) == str: 3296 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd, args) 3297 #submitting 3298 self.cluster.submit2(exe, args, cwd=cwd, 3299 input_files=input_files, output_files=output_files, 3300 required_output=required_output) 3301 3302 # keep track of folders and arguments for splitted evt gen 3303 subfolder=output_files[-1].split('/')[0] 3304 if len(args) == 4 and '_' in subfolder: 3305 self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 3306 3307 elif 'shower' in exe: 3308 # a shower job 3309 # args are [shower, output(HEP or TOP), run_name] 3310 # cwd is the shower rundir, where the executable are found 3311 input_files, output_files = [], [] 3312 shower = args[0] 3313 # the input files 3314 if shower == 'PYTHIA8': 3315 input_files.append(pjoin(cwd, 'Pythia8.exe')) 3316 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 3317 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3318 input_files.append(pjoin(cwd, 'config.sh')) 3319 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 3320 else: 3321 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 3322 else: 3323 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 3324 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 3325 if shower == 'HERWIGPP': 3326 input_files.append(pjoin(cwd, 'Herwig++')) 3327 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 3328 if len(args) == 3: 3329 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 3330 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 3331 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 3332 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 3333 else: 3334 raise aMCatNLOError, 'Event file not present in %s' % \ 3335 pjoin(self.me_dir, 'Events', self.run_name) 3336 else: 3337 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 3338 # the output files 3339 if len(args) == 3: 3340 output_files.append('mcatnlo_run.log') 3341 else: 3342 output_files.append('mcatnlo_run_%s.log' % args[3]) 3343 if args[1] == 'HEP': 3344 if len(args) == 3: 3345 fname = 'events' 3346 else: 3347 fname = 'events_%s' % args[3] 3348 if shower in ['PYTHIA8', 'HERWIGPP']: 3349 output_files.append(fname + '.hepmc.gz') 3350 else: 3351 output_files.append(fname + '.hep.gz') 3352 elif args[1] == 'TOP': 3353 if len(args) == 3: 3354 fname = 'topfile' 3355 else: 3356 fname = 'topfile_%s' % args[3] 3357 output_files.append(fname + '.tar') 3358 else: 3359 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 3360 #submitting 3361 self.cluster.submit2(exe, args, cwd=cwd, 3362 input_files=input_files, output_files=output_files) 3363 3364 else: 3365 return self.cluster.submit(exe, args, cwd=cwd)
3366
3367 - def getIO_ajob(self,exe,cwd, args):
3368 # use local disk if possible => need to stands what are the 3369 # input/output files 3370 3371 keep_fourth_arg = False 3372 output_files = [] 3373 required_output = [] 3374 input_files = [pjoin(self.me_dir, 'MGMEVersion.txt'), 3375 pjoin(self.me_dir, 'SubProcesses', 'randinit'), 3376 pjoin(cwd, 'symfact.dat'), 3377 pjoin(cwd, 'iproc.dat'), 3378 pjoin(cwd, 'initial_states_map.dat'), 3379 pjoin(cwd, 'configs_and_props_info.dat'), 3380 pjoin(cwd, 'leshouche_info.dat'), 3381 pjoin(cwd, 'param_card.dat'), 3382 pjoin(cwd, 'FKS_params.dat')] 3383 3384 if os.path.exists(pjoin(cwd,'nevents.tar')): 3385 input_files.append(pjoin(cwd,'nevents.tar')) 3386 3387 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 3388 input_files.append(pjoin(cwd, 'OLE_order.olc')) 3389 3390 # File for the loop (might not be present if MadLoop is not used) 3391 if os.path.exists(pjoin(cwd,'MadLoop5_resources')): 3392 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 3393 if not os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')): 3394 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 3395 dereference=True) 3396 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 3397 tf.close() 3398 3399 Ire = re.compile("for i in ([\d\s]*) ; do") 3400 try : 3401 fsock = open(exe) 3402 except IOError: 3403 fsock = open(pjoin(cwd,exe)) 3404 text = fsock.read() 3405 data = Ire.findall(text) 3406 subdir = ' '.join(data).split() 3407 3408 if args[0] == '0': 3409 # MADEVENT MINT FO MODE 3410 input_files.append(pjoin(cwd, 'madevent_mintFO')) 3411 input_files.append(pjoin(self.me_dir, 'SubProcesses','madin.%s' % args[1])) 3412 #j=$2\_G$i 3413 for i in subdir: 3414 current = '%s_G%s' % (args[1],i) 3415 if os.path.exists(pjoin(cwd,current)): 3416 input_files.append(pjoin(cwd, current)) 3417 output_files.append(current) 3418 3419 required_output.append('%s/results.dat' % current) 3420 required_output.append('%s/log.txt' % current) 3421 required_output.append('%s/mint_grids' % current) 3422 required_output.append('%s/grid.MC_integer' % current) 3423 if len(args) == 4: 3424 required_output.append('%s/scale_pdf_dependence.dat' % current) 3425 args[2] = '-1' 3426 # use a grid train on another part 3427 base = '%s_G%s' % (args[3],i) 3428 if args[0] == '0': 3429 to_move = ['grid.MC_integer','mint_grids'] 3430 elif args[0] == '1': 3431 to_move = ['mint_grids', 'grid.MC_integer'] 3432 else: 3433 to_move = [] 3434 if self.run_card['iappl'] =='2': 3435 for grid in glob.glob(pjoin(cwd,base,'grid_obs_*_in.root')): 3436 to_move.append(grid) 3437 if not os.path.exists(pjoin(cwd,current)): 3438 os.mkdir(pjoin(cwd,current)) 3439 input_files.append(pjoin(cwd, current)) 3440 for name in to_move: 3441 files.cp(pjoin(cwd,base, name), 3442 pjoin(cwd,current)) 3443 files.cp(pjoin(cwd,base, 'grid.MC_integer'), 3444 pjoin(cwd,current)) 3445 3446 elif args[0] == '2': 3447 # MINTMC MODE 3448 input_files.append(pjoin(cwd, 'madevent_mintMC')) 3449 if args[2] in ['0','2']: 3450 input_files.append(pjoin(self.me_dir, 'SubProcesses','madinMMC_%s.2' % args[1])) 3451 3452 for i in subdir: 3453 current = 'G%s%s' % (args[1], i) 3454 if os.path.exists(pjoin(cwd,current)): 3455 input_files.append(pjoin(cwd, current)) 3456 output_files.append(current) 3457 if len(args) == 4 and args[3] in ['H','S','V','B','F']: 3458 # use a grid train on another part 3459 base = '%s_%s' % (args[3],i) 3460 files.ln(pjoin(cwd,base,'mint_grids'), name = 'preset_mint_grids', 3461 starting_dir=pjoin(cwd,current)) 3462 files.ln(pjoin(cwd,base,'grid.MC_integer'), 3463 starting_dir=pjoin(cwd,current)) 3464 elif len(args) ==4: 3465 keep_fourth_arg = True 3466 # this is for the split event generation 3467 output_files.append('G%s%s_%s' % (args[1], i, args[3])) 3468 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1], i, args[3],args[2])) 3469 3470 else: 3471 required_output.append('%s/log_MINT%s.txt' % (current,args[2])) 3472 if args[2] in ['0','1']: 3473 required_output.append('%s/results.dat' % current) 3474 if args[2] == '1': 3475 output_files.append('%s/results.dat' % current) 3476 3477 else: 3478 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 3479 3480 #Find the correct PDF input file 3481 pdfinput = self.get_pdf_input_filename() 3482 if os.path.exists(pdfinput): 3483 input_files.append(pdfinput) 3484 3485 if len(args) == 4 and not keep_fourth_arg: 3486 args = args[:3] 3487 3488 return input_files, output_files, required_output, args
3489
3490 - def write_madinMMC_file(self, path, run_mode, mint_mode):
3491 """writes the madinMMC_?.2 file""" 3492 #check the validity of the arguments 3493 run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB'] 3494 if run_mode not in run_modes: 3495 raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \ 3496 % (run_mode, ', '.join(run_modes))) 3497 mint_modes = [0, 1, 2] 3498 if mint_mode not in mint_modes: 3499 raise aMCatNLOError('%s is not a valid mode for mintMC. Please use one of the following: %s' \ 3500 % (mint_mode, ', '.join(mint_modes))) 3501 if run_mode in ['born']: 3502 name_suffix = 'B' 3503 elif run_mode in ['virt', 'viSB']: 3504 name_suffix = 'V' 3505 else: 3506 name_suffix = 'F' 3507 3508 content = \ 3509 """-1 12 ! points, iterations 3510 0.03 ! desired fractional accuracy 3511 1 -0.1 ! alpha, beta for Gsoft 3512 -1 -0.1 ! alpha, beta for Gazi 3513 1 ! Suppress amplitude (0 no, 1 yes)? 3514 1 ! Exact helicity sum (0 yes, n = number/event)? 3515 1 ! Enter Configuration Number: 3516 %1d ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 3517 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 3518 %s ! all, born, real, virt 3519 """ \ 3520 % (mint_mode, run_mode) 3521 file = open(pjoin(path, 'madinMMC_%s.2' % name_suffix), 'w') 3522 file.write(content) 3523 file.close()
3524
3525 - def write_madin_file(self, path, run_mode, vegas_mode, npoints, niters, accuracy='0'):
3526 """writes the madin.run_mode file""" 3527 #check the validity of the arguments 3528 run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB', 'grid'] 3529 if run_mode not in run_modes: 3530 raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \ 3531 % (run_mode, ', '.join(run_modes))) 3532 name_suffix = run_mode 3533 3534 content = \ 3535 """%s %s ! points, iterations 3536 %s ! accuracy 3537 2 ! 0 fixed grid 2 adjust 3538 1 ! 1 suppress amp, 0 doesnt 3539 1 ! 0 for exact hel sum 3540 1 ! hel configuration numb 3541 'test' 3542 1 ! 1 to save grids 3543 %s ! 0 to exclude, 1 for new run, 2 to restart, 3 to reset w/ keeping grid 3544 %s ! all, born, real, virt 3545 """ \ 3546 % (npoints,niters,accuracy,vegas_mode,run_mode) 3547 file = open(pjoin(path, 'madin.%s' % name_suffix), 'w') 3548 file.write(content) 3549 file.close()
3550
3551 - def compile(self, mode, options):
3552 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 3553 specified in mode""" 3554 3555 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 3556 3557 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 3558 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 3559 3560 self.get_characteristics(pjoin(self.me_dir, 3561 'SubProcesses', 'proc_characteristics')) 3562 3563 #define a bunch of log files 3564 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 3565 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 3566 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 3567 test_log = pjoin(self.me_dir, 'test.log') 3568 3569 self.update_status('Compiling the code', level=None, update_results=True) 3570 3571 3572 libdir = pjoin(self.me_dir, 'lib') 3573 sourcedir = pjoin(self.me_dir, 'Source') 3574 3575 #clean files 3576 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 3577 #define which executable/tests to compile 3578 if '+' in mode: 3579 mode = mode.split('+')[0] 3580 if mode in ['NLO', 'LO']: 3581 exe = 'madevent_mintFO' 3582 tests = ['test_ME'] 3583 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 3584 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 3585 exe = 'madevent_mintMC' 3586 tests = ['test_ME', 'test_MC'] 3587 # write an analyse_opts with a dummy analysis so that compilation goes through 3588 open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w').write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o\n') 3589 3590 #directory where to compile exe 3591 p_dirs = [d for d in \ 3592 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 3593 # create param_card.inc and run_card.inc 3594 self.do_treatcards('', amcatnlo=True) 3595 # if --nocompile option is specified, check here that all exes exists. 3596 # If they exists, return 3597 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 3598 for p_dir in p_dirs]) and options['nocompile']: 3599 return 3600 3601 # rm links to lhapdflib/ PDFsets if exist 3602 if os.path.exists(pjoin(libdir, 'PDFsets')): 3603 files.rm(pjoin(libdir, 'PDFsets')) 3604 3605 # read the run_card to find if lhapdf is used or not 3606 if self.run_card['pdlabel'] == 'lhapdf' and \ 3607 (self.banner.get_detail('run_card', 'lpp1') != '0' or \ 3608 self.banner.get_detail('run_card', 'lpp1') != '0'): 3609 3610 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 3611 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3612 lhaid_list = [int(self.run_card['lhaid'])] 3613 if self.run_card['reweight_PDF'].lower() == '.true.': 3614 lhaid_list.append(int(self.run_card['PDF_set_min'])) 3615 lhaid_list.append(int(self.run_card['PDF_set_max'])) 3616 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3617 3618 else: 3619 if self.run_card['lpp1'] == '1' == self.run_card['lpp2']: 3620 logger.info('Using built-in libraries for PDFs') 3621 if self.run_card['lpp1'] == '0' == self.run_card['lpp2']: 3622 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.') 3623 try: 3624 del os.environ['lhapdf'] 3625 except KeyError: 3626 pass 3627 3628 # read the run_card to find if applgrid is used or not 3629 if self.run_card['iappl'] != '0': 3630 os.environ['applgrid'] = 'True' 3631 # check versions of applgrid and amcfast 3632 for code in ['applgrid','amcfast']: 3633 try: 3634 p = subprocess.Popen([self.options[code], '--version'], \ 3635 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3636 except OSError: 3637 raise aMCatNLOError(('No valid %s installation found. \n' + \ 3638 'Please set the path to %s-config by using \n' + \ 3639 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 3640 else: 3641 output, _ = p.communicate() 3642 if code is 'applgrid' and output < '1.4.63': 3643 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 3644 +' You are using %s',output) 3645 if code is 'amcfast' and output < '1.1.1': 3646 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 3647 +' You are using %s',output) 3648 3649 # set-up the Source/make_opts with the correct applgrid-config file 3650 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 3651 % (self.options['amcfast'],self.options['applgrid']) 3652 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 3653 text_out=[] 3654 for line in text: 3655 if line.strip().startswith('APPLLIBS=$'): 3656 line=appllibs 3657 text_out.append(line) 3658 open(pjoin(self.me_dir,'Source','make_opts'),'w').writelines(text_out) 3659 else: 3660 try: 3661 del os.environ['applgrid'] 3662 except KeyError: 3663 pass 3664 3665 try: 3666 os.environ['fastjet_config'] = self.options['fastjet'] 3667 except (TypeError, KeyError): 3668 if 'fastjet_config' in os.environ: 3669 del os.environ['fastjet_config'] 3670 os.unsetenv('fastjet_config') 3671 3672 # make Source 3673 self.update_status('Compiling source...', level=None) 3674 misc.compile(['clean4pdf'], cwd = sourcedir) 3675 misc.compile(cwd = sourcedir) 3676 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 3677 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 3678 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 3679 and os.path.exists(pjoin(libdir, 'libpdf.a')): 3680 logger.info(' ...done, continuing with P* directories') 3681 else: 3682 raise aMCatNLOError('Compilation failed') 3683 3684 # make StdHep (only necessary with MG option output_dependencies='internal') 3685 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 3686 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 3687 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 3688 if os.path.exists(pjoin(sourcedir,'StdHEP')): 3689 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 3690 misc.compile(['StdHEP'], cwd = sourcedir) 3691 logger.info(' ...done.') 3692 else: 3693 raise aMCatNLOError('Could not compile StdHEP because its'+\ 3694 ' source directory could not be found in the SOURCE folder.\n'+\ 3695 " Check the MG5_aMC option 'output_dependencies.'") 3696 3697 # make CutTools (only necessary with MG option output_dependencies='internal') 3698 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 3699 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 3700 if os.path.exists(pjoin(sourcedir,'CutTools')): 3701 logger.info('Compiling CutTools (can take a couple of minutes) ...') 3702 misc.compile(['CutTools'], cwd = sourcedir) 3703 logger.info(' ...done.') 3704 else: 3705 raise aMCatNLOError('Could not compile CutTools because its'+\ 3706 ' source directory could not be found in the SOURCE folder.\n'+\ 3707 " Check the MG5_aMC option 'output_dependencies.'") 3708 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 3709 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 3710 raise aMCatNLOError('CutTools compilation failed.') 3711 3712 # Verify compatibility between current compiler and the one which was 3713 # used when last compiling CutTools (if specified). 3714 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 3715 libdir, 'libcts.a')))),'compiler_version.log') 3716 if os.path.exists(compiler_log_path): 3717 compiler_version_used = open(compiler_log_path,'r').read() 3718 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 3719 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 3720 if os.path.exists(pjoin(sourcedir,'CutTools')): 3721 logger.info('CutTools was compiled with a different fortran'+\ 3722 ' compiler. Re-compiling it now...') 3723 misc.compile(['cleanCT'], cwd = sourcedir) 3724 misc.compile(['CutTools'], cwd = sourcedir) 3725 logger.info(' ...done.') 3726 else: 3727 raise aMCatNLOError("CutTools installation in %s"\ 3728 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 3729 " seems to have been compiled with a different compiler than"+\ 3730 " the one specified in MG5_aMC. Please recompile CutTools.") 3731 3732 # make IREGI (only necessary with MG option output_dependencies='internal') 3733 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 3734 and os.path.exists(pjoin(sourcedir,'IREGI')): 3735 logger.info('Compiling IREGI (can take a couple of minutes) ...') 3736 misc.compile(['IREGI'], cwd = sourcedir) 3737 logger.info(' ...done.') 3738 3739 if os.path.exists(pjoin(libdir, 'libiregi.a')): 3740 # Verify compatibility between current compiler and the one which was 3741 # used when last compiling IREGI (if specified). 3742 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 3743 libdir, 'libiregi.a')))),'compiler_version.log') 3744 if os.path.exists(compiler_log_path): 3745 compiler_version_used = open(compiler_log_path,'r').read() 3746 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 3747 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 3748 if os.path.exists(pjoin(sourcedir,'IREGI')): 3749 logger.info('IREGI was compiled with a different fortran'+\ 3750 ' compiler. Re-compiling it now...') 3751 misc.compile(['cleanIR'], cwd = sourcedir) 3752 misc.compile(['IREGI'], cwd = sourcedir) 3753 logger.info(' ...done.') 3754 else: 3755 raise aMCatNLOError("IREGI installation in %s"\ 3756 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 3757 " seems to have been compiled with a different compiler than"+\ 3758 " the one specified in MG5_aMC. Please recompile IREGI.") 3759 3760 # check if MadLoop virtuals have been generated 3761 if self.proc_characteristics['has_loops'].lower() == 'true' and \ 3762 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 3763 os.environ['madloop'] = 'true' 3764 if mode in ['NLO', 'aMC@NLO', 'noshower']: 3765 tests.append('check_poles') 3766 else: 3767 os.unsetenv('madloop') 3768 3769 # make and run tests (if asked for), gensym and make madevent in each dir 3770 self.update_status('Compiling directories...', level=None) 3771 3772 for test in tests: 3773 self.write_test_input(test) 3774 3775 try: 3776 import multiprocessing 3777 if not self.nb_core: 3778 try: 3779 self.nb_core = int(self.options['nb_core']) 3780 except TypeError: 3781 self.nb_core = multiprocessing.cpu_count() 3782 except ImportError: 3783 self.nb_core = 1 3784 3785 compile_options = copy.copy(self.options) 3786 compile_options['nb_core'] = self.nb_core 3787 compile_cluster = cluster.MultiCore(**compile_options) 3788 logger.info('Compiling on %d cores' % self.nb_core) 3789 3790 update_status = lambda i, r, f: self.donothing(i,r,f) 3791 for p_dir in p_dirs: 3792 compile_cluster.submit(prog = compile_dir, 3793 argument = [self.me_dir, p_dir, mode, options, 3794 tests, exe, self.options['run_mode']]) 3795 try: 3796 compile_cluster.wait(self.me_dir, update_status) 3797 3798 except: 3799 compile_cluster.remove() 3800 self.quit() 3801 3802 logger.info('Checking test output:') 3803 for p_dir in p_dirs: 3804 logger.info(p_dir) 3805 for test in tests: 3806 logger.info(' Result for %s:' % test) 3807 3808 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 3809 #check that none of the tests failed 3810 self.check_tests(test, this_dir)
3811 3812
3813 - def donothing(*args):
3814 pass
3815 3816
3817 - def check_tests(self, test, dir):
3818 """just call the correct parser for the test log""" 3819 if test in ['test_ME', 'test_MC']: 3820 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 3821 elif test == 'check_poles': 3822 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
3823 3824
3825 - def parse_test_mx_log(self, log):
3826 """read and parse the test_ME/MC.log file""" 3827 content = open(log).read() 3828 if 'FAILED' in content: 3829 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK') 3830 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 3831 'Please check that widths of final state particles (e.g. top) have been' + \ 3832 ' set to 0 in the param_card.dat.') 3833 else: 3834 lines = [l for l in content.split('\n') if 'PASSED' in l] 3835 logger.info(' Passed.') 3836 logger.debug('\n'+'\n'.join(lines))
3837 3838
3839 - def parse_check_poles_log(self, log):
3840 """reads and parse the check_poles.log file""" 3841 content = open(log).read() 3842 npass = 0 3843 nfail = 0 3844 for line in content.split('\n'): 3845 if 'PASSED' in line: 3846 npass +=1 3847 tolerance = float(line.split()[1]) 3848 if 'FAILED' in line: 3849 nfail +=1 3850 tolerance = float(line.split()[1]) 3851 3852 if nfail + npass == 0: 3853 logger.warning('0 points have been tried') 3854 return 3855 3856 if float(nfail)/float(nfail+npass) > 0.1: 3857 raise aMCatNLOError('Poles do not cancel, run cannot continue') 3858 else: 3859 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 3860 %(npass, nfail+npass, tolerance))
3861 3862
3863 - def write_test_input(self, test):
3864 """write the input files to run test_ME/MC or check_poles""" 3865 if test in ['test_ME', 'test_MC']: 3866 content = "-2 -2\n" #generate randomly energy/angle 3867 content+= "100 100\n" #run 100 points for soft and collinear tests 3868 content+= "0\n" #sum over helicities 3869 content+= "0\n" #all FKS configs 3870 content+= '\n'.join(["-1"] * 50) #random diagram 3871 elif test == 'check_poles': 3872 content = '20 \n -1\n' 3873 3874 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 3875 if test == 'test_MC': 3876 shower = self.run_card['parton_shower'] 3877 MC_header = "%s\n " % shower + \ 3878 "1 \n1 -0.1\n-1 -0.1\n" 3879 file.write(MC_header + content) 3880 else: 3881 file.write(content) 3882 file.close()
3883 3884 3885 3886 ############################################################################
3887 - def find_model_name(self):
3888 """ return the model name """ 3889 if hasattr(self, 'model_name'): 3890 return self.model_name 3891 3892 model = 'sm' 3893 proc = [] 3894 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')): 3895 line = line.split('#')[0] 3896 #line = line.split('=')[0] 3897 if line.startswith('import') and 'model' in line: 3898 model = line.split()[2] 3899 proc = [] 3900 elif line.startswith('generate'): 3901 proc.append(line.split(None,1)[1]) 3902 elif line.startswith('add process'): 3903 proc.append(line.split(None,2)[2]) 3904 3905 self.model = model 3906 self.process = proc 3907 return model
3908 3909 3910 3911 ############################################################################
3912 - def ask_run_configuration(self, mode, options, switch={}):
3913 """Ask the question when launching generate_events/multi_run""" 3914 3915 if 'parton' not in options: 3916 options['parton'] = False 3917 if 'reweightonly' not in options: 3918 options['reweightonly'] = False 3919 3920 3921 void = 'NOT INSTALLED' 3922 switch_order = ['order', 'fixed_order', 'shower','madspin'] 3923 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void, 3924 'madspin': void} 3925 if not switch: 3926 switch = switch_default 3927 else: 3928 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch)) 3929 3930 default_switch = ['ON', 'OFF'] 3931 allowed_switch_value = {'order': ['LO', 'NLO'], 3932 'fixed_order': default_switch, 3933 'shower': default_switch, 3934 'madspin': default_switch} 3935 3936 description = {'order': 'Perturbative order of the calculation:', 3937 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):', 3938 'shower': 'Shower the generated events:', 3939 'madspin': 'Decay particles with the MadSpin module:' } 3940 3941 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'}, 3942 ('madspin', 'ON'): {'fixed_order':'OFF'}, 3943 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF'} 3944 } 3945 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] 3946 3947 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void ) 3948 3949 3950 if mode == 'auto': 3951 mode = None 3952 if not mode and (options['parton'] or options['reweightonly']): 3953 mode = 'noshower' 3954 3955 # Init the switch value according to the current status 3956 available_mode = ['0', '1', '2'] 3957 available_mode.append('3') 3958 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 3959 switch['shower'] = 'ON' 3960 else: 3961 switch['shower'] = 'OFF' 3962 3963 if not aMCatNLO or self.options['mg5_path']: 3964 available_mode.append('4') 3965 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 3966 switch['madspin'] = 'ON' 3967 else: 3968 switch['madspin'] = 'OFF' 3969 3970 answers = list(available_mode) + ['auto', 'done'] 3971 alias = {} 3972 for id, key in enumerate(switch_order): 3973 if switch[key] != void: 3974 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]] 3975 #allow lower case for on/off 3976 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s)) 3977 for s in allowed_switch_value[key])) 3978 answers += special_values 3979 3980 def create_question(switch): 3981 switch_format = " %i %-60s %12s=%s\n" 3982 question = "The following switches determine which operations are executed:\n" 3983 for id, key in enumerate(switch_order): 3984 question += switch_format % (id+1, description[key], key, switch[key]) 3985 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1) 3986 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n' 3987 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n' 3988 return question
3989 3990 3991 def modify_switch(mode, answer, switch): 3992 if '=' in answer: 3993 key, status = answer.split('=') 3994 switch[key] = status 3995 if (key, status) in force_switch: 3996 for key2, status2 in force_switch[(key, status)].items(): 3997 if switch[key2] not in [status2, void]: 3998 logger.info('For coherence \'%s\' is set to \'%s\'' 3999 % (key2, status2), '$MG:color:BLACK') 4000 switch[key2] = status2 4001 elif answer in ['0', 'auto', 'done']: 4002 return 4003 elif answer in special_values: 4004 logger.info('Enter mode value: Go to the related mode', '$MG:color:BLACK') 4005 if answer == 'LO': 4006 switch['order'] = 'LO' 4007 switch['fixed_order'] = 'ON' 4008 assign_switch('shower', 'OFF') 4009 assign_switch('madspin', 'OFF') 4010 elif answer == 'NLO': 4011 switch['order'] = 'NLO' 4012 switch['fixed_order'] = 'ON' 4013 assign_switch('shower', 'OFF') 4014 assign_switch('madspin', 'OFF') 4015 elif answer == 'aMC@NLO': 4016 switch['order'] = 'NLO' 4017 switch['fixed_order'] = 'OFF' 4018 assign_switch('shower', 'ON') 4019 assign_switch('madspin', 'OFF') 4020 elif answer == 'aMC@LO': 4021 switch['order'] = 'LO' 4022 switch['fixed_order'] = 'OFF' 4023 assign_switch('shower', 'ON') 4024 assign_switch('madspin', 'OFF') 4025 elif answer == 'noshower': 4026 switch['order'] = 'NLO' 4027 switch['fixed_order'] = 'OFF' 4028 assign_switch('shower', 'OFF') 4029 assign_switch('madspin', 'OFF') 4030 elif answer == 'noshowerLO': 4031 switch['order'] = 'LO' 4032 switch['fixed_order'] = 'OFF' 4033 assign_switch('shower', 'OFF') 4034 assign_switch('madspin', 'OFF') 4035 if mode: 4036 return 4037 return switch 4038 4039 4040 modify_switch(mode, self.last_mode, switch) 4041 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4042 assign_switch('madspin', 'ON') 4043 4044 if not self.force: 4045 answer = '' 4046 while answer not in ['0', 'done', 'auto', 'onlyshower']: 4047 question = create_question(switch) 4048 if mode: 4049 answer = mode 4050 else: 4051 answer = self.ask(question, '0', answers, alias=alias) 4052 if answer.isdigit() and answer != '0': 4053 key = switch_order[int(answer) - 1] 4054 opt1 = allowed_switch_value[key][0] 4055 opt2 = allowed_switch_value[key][1] 4056 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2) 4057 4058 if not modify_switch(mode, answer, switch): 4059 break 4060 4061 #assign the mode depending of the switch 4062 if not mode or mode == 'auto': 4063 if switch['order'] == 'LO': 4064 if switch['shower'] == 'ON': 4065 mode = 'aMC@LO' 4066 elif switch['fixed_order'] == 'ON': 4067 mode = 'LO' 4068 else: 4069 mode = 'noshowerLO' 4070 elif switch['order'] == 'NLO': 4071 if switch['shower'] == 'ON': 4072 mode = 'aMC@NLO' 4073 elif switch['fixed_order'] == 'ON': 4074 mode = 'NLO' 4075 else: 4076 mode = 'noshower' 4077 logger.info('will run in mode: %s' % mode) 4078 4079 if mode == 'noshower': 4080 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 4081 Please, shower the Les Houches events before using them for physics analyses.""") 4082 4083 4084 # specify the cards which are needed for this run. 4085 cards = ['param_card.dat', 'run_card.dat'] 4086 ignore = [] 4087 if mode in ['LO', 'NLO']: 4088 options['parton'] = True 4089 ignore = ['shower_card.dat', 'madspin_card.dat'] 4090 cards.append('FO_analyse_card.dat') 4091 elif switch['madspin'] == 'ON': 4092 cards.append('madspin_card.dat') 4093 if 'aMC@' in mode: 4094 cards.append('shower_card.dat') 4095 if mode == 'onlyshower': 4096 cards = ['shower_card.dat'] 4097 if options['reweightonly']: 4098 cards = ['run_card.dat'] 4099 4100 self.keep_cards(cards, ignore) 4101 4102 if mode =='onlyshower': 4103 cards = ['shower_card.dat'] 4104 4105 if not options['force'] and not self.force: 4106 self.ask_edit_cards(cards, plot=False) 4107 4108 self.banner = banner_mod.Banner() 4109 4110 # store the cards in the banner 4111 for card in cards: 4112 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 4113 # and the run settings 4114 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 4115 self.banner.add_text('run_settings', run_settings) 4116 4117 if not mode =='onlyshower': 4118 self.run_card = self.banner.charge_card('run_card') 4119 self.run_tag = self.run_card['run_tag'] 4120 #this is if the user did not provide a name for the current run 4121 if not hasattr(self, 'run_name') or not self.run_name: 4122 self.run_name = self.find_available_run_name(self.me_dir) 4123 #add a tag in the run_name for distinguish run_type 4124 if self.run_name.startswith('run_'): 4125 if mode in ['LO','aMC@LO','noshowerLO']: 4126 self.run_name += '_LO' 4127 self.set_run_name(self.run_name, self.run_tag, 'parton') 4128 if int(self.run_card['ickkw']) == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 4129 logger.error("""FxFx merging (ickkw=3) not allowed at LO""") 4130 raise self.InvalidCmd(error) 4131 elif int(self.run_card['ickkw']) == 3 and mode in ['aMC@NLO', 'noshower']: 4132 logger.warning("""You are running with FxFx merging enabled. To be able to merge 4133 samples of various multiplicities without double counting, you 4134 have to remove some events after showering 'by hand'. Please 4135 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 4136 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 4137 logger.error("""FxFx merging does not work with Q-squared ordered showers.""") 4138 raise self.InvalidCmd(error) 4139 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 4140 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 4141 "Type \'n\' to stop or \'y\' to continue" 4142 answers = ['n','y'] 4143 answer = self.ask(question, 'n', answers, alias=alias) 4144 if answer == 'n': 4145 error = '''Stop opertation''' 4146 self.ask_run_configuration(mode, options) 4147 # raise aMCatNLOError(error) 4148 if 'aMC@' in mode or mode == 'onlyshower': 4149 self.shower_card = self.banner.charge_card('shower_card') 4150 4151 elif mode in ['LO', 'NLO']: 4152 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 4153 self.analyse_card = self.banner.charge_card('FO_analyse_card') 4154 4155 4156 return mode 4157 4158 4159 #=============================================================================== 4160 # aMCatNLOCmd 4161 #===============================================================================
4162 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
4163 """The command line processor of MadGraph"""
4164 4165 _compile_usage = "compile [MODE] [options]\n" + \ 4166 "-- compiles aMC@NLO \n" + \ 4167 " MODE can be either FO, for fixed-order computations, \n" + \ 4168 " or MC for matching with parton-shower monte-carlos. \n" + \ 4169 " (if omitted, it is set to MC)\n" 4170 _compile_parser = misc.OptionParser(usage=_compile_usage) 4171 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 4172 help="Use the card present in the directory for the launch, without editing them") 4173 4174 _launch_usage = "launch [MODE] [options]\n" + \ 4175 "-- execute aMC@NLO \n" + \ 4176 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4177 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4178 " computation of the total cross-section and the filling of parton-level histograms \n" + \ 4179 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4180 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4181 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4182 " in the run_card.dat\n" 4183 4184 _launch_parser = misc.OptionParser(usage=_launch_usage) 4185 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 4186 help="Use the card present in the directory for the launch, without editing them") 4187 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 4188 help="Submit the jobs on the cluster") 4189 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 4190 help="Submit the jobs on multicore mode") 4191 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4192 help="Skip compilation. Ignored if no executable is found") 4193 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4194 help="Skip integration and event generation, just run reweight on the" + \ 4195 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4196 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 4197 help="Stop the run after the parton level file generation (you need " + \ 4198 "to shower the file in order to get physical results)") 4199 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4200 help="Skip grid set up, just generate events starting from " + \ 4201 "the last available results") 4202 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 4203 help="Provide a name to the run") 4204 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4205 help="For use with APPLgrid only: start from existing grids") 4206 4207 4208 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 4209 "-- execute aMC@NLO \n" + \ 4210 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4211 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4212 " computation of the total cross-section and the filling of parton-level histograms \n" + \ 4213 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4214 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4215 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4216 " in the run_card.dat\n" 4217 4218 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 4219 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 4220 help="Use the card present in the directory for the generate_events, without editing them") 4221 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 4222 help="Submit the jobs on the cluster") 4223 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 4224 help="Submit the jobs on multicore mode") 4225 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4226 help="Skip compilation. Ignored if no executable is found") 4227 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4228 help="Skip integration and event generation, just run reweight on the" + \ 4229 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4230 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 4231 help="Stop the run after the parton level file generation (you need " + \ 4232 "to shower the file in order to get physical results)") 4233 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4234 help="Skip grid set up, just generate events starting from " + \ 4235 "the last available results") 4236 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 4237 help="Provide a name to the run") 4238 4239 4240 4241 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 4242 "-- calculate cross-section up to ORDER.\n" + \ 4243 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 4244 4245 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 4246 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 4247 help="Use the card present in the directory for the launch, without editing them") 4248 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 4249 help="Submit the jobs on the cluster") 4250 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 4251 help="Submit the jobs on multicore mode") 4252 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4253 help="Skip compilation. Ignored if no executable is found") 4254 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 4255 help="Provide a name to the run") 4256 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4257 help="For use with APPLgrid only: start from existing grids") 4258 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4259 help="Skip grid set up, just generate events starting from " + \ 4260 "the last available results") 4261 4262 _shower_usage = 'shower run_name [options]\n' + \ 4263 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 4264 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 4265 ' are directly read from the header of the event file\n' 4266 _shower_parser = misc.OptionParser(usage=_shower_usage) 4267 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 4268 help="Use the shower_card present in the directory for the launch, without editing") 4269