Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41   
  42  try: 
  43      import readline 
  44      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  45  except: 
  46      GNU_SPLITTING = True 
  47   
  48  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  49  root_path = os.path.split(root_path)[0] 
  50  sys.path.insert(0, os.path.join(root_path,'bin')) 
  51   
  52  # usefull shortcut 
  53  pjoin = os.path.join 
  54  # Special logger for the Cmd Interface 
  55  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  56  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  57    
  58  try: 
  59      import madgraph 
  60  except ImportError:  
  61      aMCatNLO = True  
  62      import internal.extended_cmd as cmd 
  63      import internal.common_run_interface as common_run 
  64      import internal.banner as banner_mod 
  65      import internal.misc as misc     
  66      from internal import InvalidCmd, MadGraph5Error 
  67      import internal.files as files 
  68      import internal.cluster as cluster 
  69      import internal.save_load_object as save_load_object 
  70      import internal.gen_crossxhtml as gen_crossxhtml 
  71      import internal.sum_html as sum_html 
  72      import internal.shower_card as shower_card 
  73      import internal.FO_analyse_card as analyse_card  
  74      import internal.histograms as histograms 
  75  else: 
  76      # import from madgraph directory 
  77      aMCatNLO = False 
  78      import madgraph.interface.extended_cmd as cmd 
  79      import madgraph.interface.common_run_interface as common_run 
  80      import madgraph.iolibs.files as files 
  81      import madgraph.iolibs.save_load_object as save_load_object 
  82      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  83      import madgraph.madevent.sum_html as sum_html 
  84      import madgraph.various.banner as banner_mod 
  85      import madgraph.various.cluster as cluster 
  86      import madgraph.various.misc as misc 
  87      import madgraph.various.shower_card as shower_card 
  88      import madgraph.various.FO_analyse_card as analyse_card 
  89      import madgraph.various.histograms as histograms 
  90      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error 
  91   
92 -class aMCatNLOError(Exception):
93 pass
94 95
96 -def compile_dir(*arguments):
97 """compile the direcory p_dir 98 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 99 this function needs not to be a class method in order to do 100 the compilation on multicore""" 101 102 if len(arguments) == 1: 103 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 104 elif len(arguments)==7: 105 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 106 else: 107 raise aMCatNLOError, 'not correct number of argument' 108 logger.info(' Compiling %s...' % p_dir) 109 110 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 111 112 try: 113 #compile everything 114 # compile and run tests 115 for test in tests: 116 # skip check_poles for LOonly dirs 117 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 118 continue 119 misc.compile([test], cwd = this_dir, job_specs = False) 120 input = pjoin(me_dir, '%s_input.txt' % test) 121 #this can be improved/better written to handle the output 122 misc.call(['./%s' % (test)], cwd=this_dir, 123 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w')) 124 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 125 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 126 dereference=True) 127 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 128 tf.close() 129 130 if not options['reweightonly']: 131 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 132 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode) 133 misc.call(['./gensym'],cwd= this_dir, 134 stdin=open(pjoin(this_dir, 'gensym_input.txt')), 135 stdout=open(pjoin(this_dir, 'gensym.log'), 'w')) 136 #compile madevent_mintMC/mintFO 137 misc.compile([exe], cwd=this_dir, job_specs = False) 138 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 139 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 140 141 logger.info(' %s done.' % p_dir) 142 return 0 143 except MadGraph5Error, msg: 144 return msg
145 146
147 -def check_compiler(options, block=False):
148 """check that the current fortran compiler is gfortran 4.6 or later. 149 If block, stops the execution, otherwise just print a warning""" 150 151 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 152 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 153 'Note that You can still run all MadEvent run without any problem!' 154 #first check that gfortran is installed 155 if options['fortran_compiler']: 156 compiler = options['fortran_compiler'] 157 elif misc.which('gfortran'): 158 compiler = 'gfortran' 159 else: 160 compiler = '' 161 162 if 'gfortran' not in compiler: 163 if block: 164 raise aMCatNLOError(msg % compiler) 165 else: 166 logger.warning(msg % compiler) 167 else: 168 curr_version = misc.get_gfortran_version(compiler) 169 if not ''.join(curr_version.split('.')) >= '46': 170 if block: 171 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 172 else: 173 logger.warning(msg % (compiler + ' ' + curr_version))
174 175 176 177 #=============================================================================== 178 # CmdExtended 179 #===============================================================================
180 -class CmdExtended(common_run.CommonRunCmd):
181 """Particularisation of the cmd command for aMCatNLO""" 182 183 #suggested list of command 184 next_possibility = { 185 'start': [], 186 } 187 188 debug_output = 'ME5_debug' 189 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 190 error_debug += 'More information is found in \'%(debug)s\'.\n' 191 error_debug += 'Please attach this file to your report.' 192 193 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 194 195 196 keyboard_stop_msg = """stopping all operation 197 in order to quit MadGraph5_aMC@NLO please enter exit""" 198 199 # Define the Error 200 InvalidCmd = InvalidCmd 201 ConfigurationError = aMCatNLOError 202
203 - def __init__(self, me_dir, options, *arg, **opt):
204 """Init history and line continuation""" 205 206 # Tag allowing/forbiding question 207 self.force = False 208 209 # If possible, build an info line with current version number 210 # and date, from the VERSION text file 211 info = misc.get_pkg_info() 212 info_line = "" 213 if info and info.has_key('version') and info.has_key('date'): 214 len_version = len(info['version']) 215 len_date = len(info['date']) 216 if len_version + len_date < 30: 217 info_line = "#* VERSION %s %s %s *\n" % \ 218 (info['version'], 219 (30 - len_version - len_date) * ' ', 220 info['date']) 221 else: 222 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 223 info_line = "#* VERSION %s %s *\n" % \ 224 (version, (24 - len(version)) * ' ') 225 226 # Create a header for the history file. 227 # Remember to fill in time at writeout time! 228 self.history_header = \ 229 '#************************************************************\n' + \ 230 '#* MadGraph5_aMC@NLO *\n' + \ 231 '#* *\n' + \ 232 "#* * * *\n" + \ 233 "#* * * * * *\n" + \ 234 "#* * * * * 5 * * * * *\n" + \ 235 "#* * * * * *\n" + \ 236 "#* * * *\n" + \ 237 "#* *\n" + \ 238 "#* *\n" + \ 239 info_line + \ 240 "#* *\n" + \ 241 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 242 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 243 "#* and *\n" + \ 244 "#* http://amcatnlo.cern.ch *\n" + \ 245 '#* *\n' + \ 246 '#************************************************************\n' + \ 247 '#* *\n' + \ 248 '#* Command File for aMCatNLO *\n' + \ 249 '#* *\n' + \ 250 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 251 '#* *\n' + \ 252 '#************************************************************\n' 253 254 if info_line: 255 info_line = info_line[1:] 256 257 logger.info(\ 258 "************************************************************\n" + \ 259 "* *\n" + \ 260 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 261 "* a M C @ N L O *\n" + \ 262 "* *\n" + \ 263 "* * * *\n" + \ 264 "* * * * * *\n" + \ 265 "* * * * * 5 * * * * *\n" + \ 266 "* * * * * *\n" + \ 267 "* * * *\n" + \ 268 "* *\n" + \ 269 info_line + \ 270 "* *\n" + \ 271 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 272 "* http://amcatnlo.cern.ch *\n" + \ 273 "* *\n" + \ 274 "* Type 'help' for in-line help. *\n" + \ 275 "* *\n" + \ 276 "************************************************************") 277 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
278 279
280 - def get_history_header(self):
281 """return the history header""" 282 return self.history_header % misc.get_time_info()
283
284 - def stop_on_keyboard_stop(self):
285 """action to perform to close nicely on a keyboard interupt""" 286 try: 287 if hasattr(self, 'cluster'): 288 logger.info('rm jobs on queue') 289 self.cluster.remove() 290 if hasattr(self, 'results'): 291 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 292 self.add_error_log_in_html(KeyboardInterrupt) 293 except: 294 pass
295
296 - def postcmd(self, stop, line):
297 """ Update the status of the run for finishing interactive command """ 298 299 # relaxing the tag forbidding question 300 self.force = False 301 302 if not self.use_rawinput: 303 return stop 304 305 306 arg = line.split() 307 if len(arg) == 0: 308 return stop 309 elif str(arg[0]) in ['exit','quit','EOF']: 310 return stop 311 312 try: 313 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 314 level=None, error=True) 315 except Exception: 316 misc.sprint('self.update_status fails', log=logger) 317 pass
318
319 - def nice_user_error(self, error, line):
320 """If a ME run is currently running add a link in the html output""" 321 322 self.add_error_log_in_html() 323 cmd.Cmd.nice_user_error(self, error, line)
324
325 - def nice_config_error(self, error, line):
326 """If a ME run is currently running add a link in the html output""" 327 328 self.add_error_log_in_html() 329 cmd.Cmd.nice_config_error(self, error, line)
330
331 - def nice_error_handling(self, error, line):
332 """If a ME run is currently running add a link in the html output""" 333 334 self.add_error_log_in_html() 335 cmd.Cmd.nice_error_handling(self, error, line)
336 337 338 339 #=============================================================================== 340 # HelpToCmd 341 #===============================================================================
342 -class HelpToCmd(object):
343 """ The Series of help routine for the aMCatNLOCmd""" 344
345 - def help_launch(self):
346 """help for launch command""" 347 _launch_parser.print_help()
348
349 - def help_banner_run(self):
350 logger.info("syntax: banner_run Path|RUN [--run_options]") 351 logger.info("-- Reproduce a run following a given banner") 352 logger.info(" One of the following argument is require:") 353 logger.info(" Path should be the path of a valid banner.") 354 logger.info(" RUN should be the name of a run of the current directory") 355 self.run_options_help([('-f','answer all question by default'), 356 ('--name=X', 'Define the name associated with the new run')])
357 358
359 - def help_compile(self):
360 """help for compile command""" 361 _compile_parser.print_help()
362
363 - def help_generate_events(self):
364 """help for generate_events commandi 365 just call help_launch""" 366 _generate_events_parser.print_help()
367 368
369 - def help_calculate_xsect(self):
370 """help for generate_events command""" 371 _calculate_xsect_parser.print_help()
372
373 - def help_shower(self):
374 """help for shower command""" 375 _shower_parser.print_help()
376 377
378 - def help_open(self):
379 logger.info("syntax: open FILE ") 380 logger.info("-- open a file with the appropriate editor.") 381 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 382 logger.info(' the path to the last created/used directory is used')
383
384 - def run_options_help(self, data):
385 if data: 386 logger.info('-- local options:') 387 for name, info in data: 388 logger.info(' %s : %s' % (name, info)) 389 390 logger.info("-- session options:") 391 logger.info(" Note that those options will be kept for the current session") 392 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 393 logger.info(" --multicore : Run in multi-core configuration") 394 logger.info(" --nb_core=X : limit the number of core to use to X.")
395 396 397 398 399 #=============================================================================== 400 # CheckValidForCmd 401 #===============================================================================
402 -class CheckValidForCmd(object):
403 """ The Series of check routine for the aMCatNLOCmd""" 404
405 - def check_shower(self, args, options):
406 """Check the validity of the line. args[0] is the run_directory""" 407 408 if options['force']: 409 self.force = True 410 411 if len(args) == 0: 412 self.help_shower() 413 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 414 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 415 raise self.InvalidCmd, 'Directory %s does not exists' % \ 416 pjoin(os.getcwd(), 'Events', args[0]) 417 418 self.set_run_name(args[0], level= 'shower') 419 args[0] = pjoin(self.me_dir, 'Events', args[0])
420
421 - def check_plot(self, args):
422 """Check the argument for the plot command 423 plot run_name modes""" 424 425 426 madir = self.options['madanalysis_path'] 427 td = self.options['td_path'] 428 429 if not madir or not td: 430 logger.info('Retry to read configuration file to find madanalysis/td') 431 self.set_configuration() 432 433 madir = self.options['madanalysis_path'] 434 td = self.options['td_path'] 435 436 if not madir: 437 error_msg = 'No Madanalysis path correctly set.' 438 error_msg += 'Please use the set command to define the path and retry.' 439 error_msg += 'You can also define it in the configuration file.' 440 raise self.InvalidCmd(error_msg) 441 if not td: 442 error_msg = 'No path to td directory correctly set.' 443 error_msg += 'Please use the set command to define the path and retry.' 444 error_msg += 'You can also define it in the configuration file.' 445 raise self.InvalidCmd(error_msg) 446 447 if len(args) == 0: 448 if not hasattr(self, 'run_name') or not self.run_name: 449 self.help_plot() 450 raise self.InvalidCmd('No run name currently define. Please add this information.') 451 args.append('all') 452 return 453 454 455 if args[0] not in self._plot_mode: 456 self.set_run_name(args[0], level='plot') 457 del args[0] 458 if len(args) == 0: 459 args.append('all') 460 elif not self.run_name: 461 self.help_plot() 462 raise self.InvalidCmd('No run name currently define. Please add this information.') 463 464 for arg in args: 465 if arg not in self._plot_mode and arg != self.run_name: 466 self.help_plot() 467 raise self.InvalidCmd('unknown options %s' % arg)
468
469 - def check_pgs(self, arg):
470 """Check the argument for pythia command 471 syntax: pgs [NAME] 472 Note that other option are already remove at this point 473 """ 474 475 # If not pythia-pgs path 476 if not self.options['pythia-pgs_path']: 477 logger.info('Retry to read configuration file to find pythia-pgs path') 478 self.set_configuration() 479 480 if not self.options['pythia-pgs_path'] or not \ 481 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 482 error_msg = 'No pythia-pgs path correctly set.' 483 error_msg += 'Please use the set command to define the path and retry.' 484 error_msg += 'You can also define it in the configuration file.' 485 raise self.InvalidCmd(error_msg) 486 487 tag = [a for a in arg if a.startswith('--tag=')] 488 if tag: 489 arg.remove(tag[0]) 490 tag = tag[0][6:] 491 492 493 if len(arg) == 0 and not self.run_name: 494 if self.results.lastrun: 495 arg.insert(0, self.results.lastrun) 496 else: 497 raise self.InvalidCmd('No run name currently define. Please add this information.') 498 499 if len(arg) == 1 and self.run_name == arg[0]: 500 arg.pop(0) 501 502 if not len(arg) and \ 503 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 504 self.help_pgs() 505 raise self.InvalidCmd('''No file file pythia_events.hep currently available 506 Please specify a valid run_name''') 507 508 lock = None 509 if len(arg) == 1: 510 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 511 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 512 'events_*.hep.gz')) 513 if not filenames: 514 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 515 else: 516 input_file = filenames[0] 517 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 518 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 519 argument=['-c', input_file]) 520 else: 521 if tag: 522 self.run_card['run_tag'] = tag 523 self.set_run_name(self.run_name, tag, 'pgs') 524 525 return lock
526 527
528 - def check_delphes(self, arg):
529 """Check the argument for pythia command 530 syntax: delphes [NAME] 531 Note that other option are already remove at this point 532 """ 533 534 # If not pythia-pgs path 535 if not self.options['delphes_path']: 536 logger.info('Retry to read configuration file to find delphes path') 537 self.set_configuration() 538 539 if not self.options['delphes_path']: 540 error_msg = 'No delphes path correctly set.' 541 error_msg += 'Please use the set command to define the path and retry.' 542 error_msg += 'You can also define it in the configuration file.' 543 raise self.InvalidCmd(error_msg) 544 545 tag = [a for a in arg if a.startswith('--tag=')] 546 if tag: 547 arg.remove(tag[0]) 548 tag = tag[0][6:] 549 550 551 if len(arg) == 0 and not self.run_name: 552 if self.results.lastrun: 553 arg.insert(0, self.results.lastrun) 554 else: 555 raise self.InvalidCmd('No run name currently define. Please add this information.') 556 557 if len(arg) == 1 and self.run_name == arg[0]: 558 arg.pop(0) 559 560 if not len(arg) and \ 561 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 562 self.help_pgs() 563 raise self.InvalidCmd('''No file file pythia_events.hep currently available 564 Please specify a valid run_name''') 565 566 if len(arg) == 1: 567 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 568 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 569 'events_*.hep.gz')) 570 if not filenames: 571 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 572 % (self.run_name, prev_tag, 573 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 574 else: 575 input_file = filenames[0] 576 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 577 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 578 argument=['-c', input_file]) 579 else: 580 if tag: 581 self.run_card['run_tag'] = tag 582 self.set_run_name(self.run_name, tag, 'delphes')
583
584 - def check_calculate_xsect(self, args, options):
585 """check the validity of the line. args is ORDER, 586 ORDER being LO or NLO. If no mode is passed, NLO is used""" 587 # modify args in order to be DIR 588 # mode being either standalone or madevent 589 590 if options['force']: 591 self.force = True 592 593 if not args: 594 args.append('NLO') 595 return 596 597 if len(args) > 1: 598 self.help_calculate_xsect() 599 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 600 601 elif len(args) == 1: 602 if not args[0] in ['NLO', 'LO']: 603 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 604 mode = args[0] 605 606 # check for incompatible options/modes 607 if options['multicore'] and options['cluster']: 608 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 609 ' are not compatible. Please choose one.'
610 611
612 - def check_generate_events(self, args, options):
613 """check the validity of the line. args is ORDER, 614 ORDER being LO or NLO. If no mode is passed, NLO is used""" 615 # modify args in order to be DIR 616 # mode being either standalone or madevent 617 618 if not args: 619 args.append('NLO') 620 return 621 622 if len(args) > 1: 623 self.help_generate_events() 624 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 625 626 elif len(args) == 1: 627 if not args[0] in ['NLO', 'LO']: 628 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 629 mode = args[0] 630 631 # check for incompatible options/modes 632 if options['multicore'] and options['cluster']: 633 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 634 ' are not compatible. Please choose one.'
635
636 - def check_banner_run(self, args):
637 """check the validity of line""" 638 639 if len(args) == 0: 640 self.help_banner_run() 641 raise self.InvalidCmd('banner_run requires at least one argument.') 642 643 tag = [a[6:] for a in args if a.startswith('--tag=')] 644 645 646 if os.path.exists(args[0]): 647 type ='banner' 648 format = self.detect_card_type(args[0]) 649 if format != 'banner': 650 raise self.InvalidCmd('The file is not a valid banner.') 651 elif tag: 652 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 653 (args[0], tag)) 654 if not os.path.exists(args[0]): 655 raise self.InvalidCmd('No banner associates to this name and tag.') 656 else: 657 name = args[0] 658 type = 'run' 659 banners = glob.glob(pjoin(self.me_dir,'Events', args[0], '*_banner.txt')) 660 if not banners: 661 raise self.InvalidCmd('No banner associates to this name.') 662 elif len(banners) == 1: 663 args[0] = banners[0] 664 else: 665 #list the tag and propose those to the user 666 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 667 tag = self.ask('which tag do you want to use?', tags[0], tags) 668 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 669 (args[0], tag)) 670 671 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 672 if run_name: 673 try: 674 self.exec_cmd('remove %s all banner -f' % run_name) 675 except Exception: 676 pass 677 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 678 elif type == 'banner': 679 self.set_run_name(self.find_available_run_name(self.me_dir)) 680 elif type == 'run': 681 if not self.results[name].is_empty(): 682 run_name = self.find_available_run_name(self.me_dir) 683 logger.info('Run %s is not empty so will use run_name: %s' % \ 684 (name, run_name)) 685 self.set_run_name(run_name) 686 else: 687 try: 688 self.exec_cmd('remove %s all banner -f' % run_name) 689 except Exception: 690 pass 691 self.set_run_name(name)
692 693 694
695 - def check_launch(self, args, options):
696 """check the validity of the line. args is MODE 697 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 698 # modify args in order to be DIR 699 # mode being either standalone or madevent 700 701 if options['force']: 702 self.force = True 703 704 705 if not args: 706 args.append('auto') 707 return 708 709 if len(args) > 1: 710 self.help_launch() 711 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 712 713 elif len(args) == 1: 714 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 715 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 716 mode = args[0] 717 718 # check for incompatible options/modes 719 if options['multicore'] and options['cluster']: 720 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 721 ' are not compatible. Please choose one.' 722 if mode == 'NLO' and options['reweightonly']: 723 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
724 725
726 - def check_compile(self, args, options):
727 """check the validity of the line. args is MODE 728 MODE being FO or MC. If no mode is passed, MC is used""" 729 # modify args in order to be DIR 730 # mode being either standalone or madevent 731 732 if options['force']: 733 self.force = True 734 735 if not args: 736 args.append('MC') 737 return 738 739 if len(args) > 1: 740 self.help_compile() 741 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 742 743 elif len(args) == 1: 744 if not args[0] in ['MC', 'FO']: 745 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 746 mode = args[0]
747 748 # check for incompatible options/modes 749 750 751 #=============================================================================== 752 # CompleteForCmd 753 #===============================================================================
754 -class CompleteForCmd(CheckValidForCmd):
755 """ The Series of help routine for the MadGraphCmd""" 756
757 - def complete_launch(self, text, line, begidx, endidx):
758 """auto-completion for launch command""" 759 760 args = self.split_arg(line[0:begidx]) 761 if len(args) == 1: 762 #return mode 763 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 764 elif len(args) == 2 and line[begidx-1] == '@': 765 return self.list_completion(text,['LO','NLO'],line) 766 else: 767 opts = [] 768 for opt in _launch_parser.option_list: 769 opts += opt._long_opts + opt._short_opts 770 return self.list_completion(text, opts, line)
771
772 - def complete_banner_run(self, text, line, begidx, endidx):
773 "Complete the banner run command" 774 try: 775 776 777 args = self.split_arg(line[0:begidx], error=False) 778 779 if args[-1].endswith(os.path.sep): 780 return self.path_completion(text, 781 os.path.join('.',*[a for a in args \ 782 if a.endswith(os.path.sep)])) 783 784 785 if len(args) > 1: 786 # only options are possible 787 tags = glob.glob(pjoin(self.me_dir, 'Events' , args[1],'%s_*_banner.txt' % args[1])) 788 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 789 790 if args[-1] != '--tag=': 791 tags = ['--tag=%s' % t for t in tags] 792 else: 793 return self.list_completion(text, tags) 794 return self.list_completion(text, tags +['--name=','-f'], line) 795 796 # First argument 797 possibilites = {} 798 799 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 800 if a.endswith(os.path.sep)])) 801 if os.path.sep in line: 802 return comp 803 else: 804 possibilites['Path from ./'] = comp 805 806 run_list = glob.glob(pjoin(self.me_dir, 'Events', '*','*_banner.txt')) 807 run_list = [n.rsplit('/',2)[1] for n in run_list] 808 possibilites['RUN Name'] = self.list_completion(text, run_list) 809 810 return self.deal_multiple_categories(possibilites) 811 812 813 except Exception, error: 814 print error
815 816
817 - def complete_compile(self, text, line, begidx, endidx):
818 """auto-completion for launch command""" 819 820 args = self.split_arg(line[0:begidx]) 821 if len(args) == 1: 822 #return mode 823 return self.list_completion(text,['FO','MC'],line) 824 else: 825 opts = [] 826 for opt in _compile_parser.option_list: 827 opts += opt._long_opts + opt._short_opts 828 return self.list_completion(text, opts, line)
829
830 - def complete_calculate_xsect(self, text, line, begidx, endidx):
831 """auto-completion for launch command""" 832 833 args = self.split_arg(line[0:begidx]) 834 if len(args) == 1: 835 #return mode 836 return self.list_completion(text,['LO','NLO'],line) 837 else: 838 opts = [] 839 for opt in _calculate_xsect_parser.option_list: 840 opts += opt._long_opts + opt._short_opts 841 return self.list_completion(text, opts, line)
842
843 - def complete_generate_events(self, text, line, begidx, endidx):
844 """auto-completion for generate_events command 845 call the compeltion for launch""" 846 self.complete_launch(text, line, begidx, endidx)
847 848
849 - def complete_shower(self, text, line, begidx, endidx):
850 args = self.split_arg(line[0:begidx]) 851 if len(args) == 1: 852 #return valid run_name 853 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe.gz')) 854 data = [n.rsplit('/',2)[1] for n in data] 855 tmp1 = self.list_completion(text, data) 856 if not self.run_name: 857 return tmp1
858
859 - def complete_plot(self, text, line, begidx, endidx):
860 """ Complete the plot command """ 861 862 args = self.split_arg(line[0:begidx], error=False) 863 864 if len(args) == 1: 865 #return valid run_name 866 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe*')) 867 data = [n.rsplit('/',2)[1] for n in data] 868 tmp1 = self.list_completion(text, data) 869 if not self.run_name: 870 return tmp1 871 872 if len(args) > 1: 873 return self.list_completion(text, self._plot_mode)
874
875 - def complete_pgs(self,text, line, begidx, endidx):
876 "Complete the pgs command" 877 args = self.split_arg(line[0:begidx], error=False) 878 if len(args) == 1: 879 #return valid run_name 880 data = glob.glob(pjoin(self.me_dir, 'Events', '*', 'events_*.hep.gz')) 881 data = [n.rsplit('/',2)[1] for n in data] 882 tmp1 = self.list_completion(text, data) 883 if not self.run_name: 884 return tmp1 885 else: 886 tmp2 = self.list_completion(text, self._run_options + ['-f', 887 '--tag=' ,'--no_default'], line) 888 return tmp1 + tmp2 889 else: 890 return self.list_completion(text, self._run_options + ['-f', 891 '--tag=','--no_default'], line)
892 893 complete_delphes = complete_pgs
894
895 -class aMCatNLOAlreadyRunning(InvalidCmd):
896 pass
897 898 #=============================================================================== 899 # aMCatNLOCmd 900 #===============================================================================
901 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
902 """The command line processor of MadGraph""" 903 904 # Truth values 905 true = ['T','.true.',True,'true'] 906 # Options and formats available 907 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 908 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 909 _calculate_decay_options = ['-f', '--accuracy=0.'] 910 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 911 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 912 _clean_mode = _plot_mode + ['channel', 'banner'] 913 _display_opts = ['run_name', 'options', 'variable'] 914 # survey options, dict from name to type, default value, and help text 915 # Variables to store object information 916 web = False 917 cluster_mode = 0 918 queue = 'madgraph' 919 nb_core = None 920 make_opts_var = {} 921 922 next_possibility = { 923 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 924 'help generate_events'], 925 'generate_events': ['generate_events [OPTIONS]', 'shower'], 926 'launch': ['launch [OPTIONS]', 'shower'], 927 'shower' : ['generate_events [OPTIONS]'] 928 } 929 930 931 ############################################################################
932 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
933 """ add information to the cmd """ 934 935 self.start_time = 0 936 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 937 #common_run.CommonRunCmd.__init__(self, me_dir, options) 938 939 self.mode = 'aMCatNLO' 940 self.nb_core = 0 941 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 942 943 944 self.load_results_db() 945 self.results.def_web_mode(self.web) 946 # check that compiler is gfortran 4.6 or later if virtuals have been exported 947 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 948 949 if not '[real=QCD]' in proc_card: 950 check_compiler(self.options, block=True)
951 952 953 ############################################################################
954 - def do_shower(self, line):
955 """ run the shower on a given parton level file """ 956 argss = self.split_arg(line) 957 (options, argss) = _launch_parser.parse_args(argss) 958 # check argument validity and normalise argument 959 options = options.__dict__ 960 options['reweightonly'] = False 961 self.check_shower(argss, options) 962 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 963 self.ask_run_configuration('onlyshower', options) 964 self.run_mcatnlo(evt_file) 965 966 self.update_status('', level='all', update_results=True)
967 968 ################################################################################
969 - def do_plot(self, line):
970 """Create the plot for a given run""" 971 972 # Since in principle, all plot are already done automaticaly 973 args = self.split_arg(line) 974 # Check argument's validity 975 self.check_plot(args) 976 logger.info('plot for run %s' % self.run_name) 977 978 if not self.force: 979 self.ask_edit_cards([], args, plot=True) 980 981 if any([arg in ['parton'] for arg in args]): 982 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 983 if os.path.exists(filename+'.gz'): 984 misc.gunzip(filename) 985 if os.path.exists(filename): 986 logger.info('Found events.lhe file for run %s' % self.run_name) 987 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 988 self.create_plot('parton') 989 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 990 misc.gzip(filename) 991 992 if any([arg in ['all','parton'] for arg in args]): 993 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 994 if os.path.exists(filename): 995 logger.info('Found MADatNLO.top file for run %s' % \ 996 self.run_name) 997 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 998 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 999 1000 if not os.path.isdir(plot_dir): 1001 os.makedirs(plot_dir) 1002 top_file = pjoin(plot_dir, 'plots.top') 1003 files.cp(filename, top_file) 1004 madir = self.options['madanalysis_path'] 1005 tag = self.run_card['run_tag'] 1006 td = self.options['td_path'] 1007 misc.call(['%s/plot' % self.dirbin, madir, td], 1008 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1009 stderr = subprocess.STDOUT, 1010 cwd=plot_dir) 1011 1012 misc.call(['%s/plot_page-pl' % self.dirbin, 1013 os.path.basename(plot_dir), 1014 'parton'], 1015 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1016 stderr = subprocess.STDOUT, 1017 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1018 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1019 output) 1020 1021 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1022 1023 if any([arg in ['all','shower'] for arg in args]): 1024 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 1025 'events_*.lhe.gz')) 1026 if len(filenames) != 1: 1027 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 1028 'events_*.hep.gz')) 1029 if len(filenames) != 1: 1030 logger.info('No shower level file found for run %s' % \ 1031 self.run_name) 1032 return 1033 filename = filenames[0] 1034 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1035 1036 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1037 if aMCatNLO and not self.options['mg5_path']: 1038 raise "plotting NLO HEP file needs MG5 utilities" 1039 1040 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1041 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1042 self.run_hep2lhe() 1043 else: 1044 filename = filenames[0] 1045 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1046 1047 self.create_plot('shower') 1048 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1049 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1050 lhe_file_name) 1051 misc.gzip(lhe_file_name) 1052 1053 if any([arg in ['all','pgs'] for arg in args]): 1054 filename = pjoin(self.me_dir, 'Events', self.run_name, 1055 '%s_pgs_events.lhco' % self.run_tag) 1056 if os.path.exists(filename+'.gz'): 1057 misc.gunzip(filename) 1058 if os.path.exists(filename): 1059 self.create_plot('PGS') 1060 misc.gzip(filename) 1061 else: 1062 logger.info('No valid files for pgs plot') 1063 1064 if any([arg in ['all','delphes'] for arg in args]): 1065 filename = pjoin(self.me_dir, 'Events', self.run_name, 1066 '%s_delphes_events.lhco' % self.run_tag) 1067 if os.path.exists(filename+'.gz'): 1068 misc.gunzip(filename) 1069 if os.path.exists(filename): 1070 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1071 self.create_plot('Delphes') 1072 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1073 misc.gzip(filename) 1074 else: 1075 logger.info('No valid files for delphes plot')
1076 1077 1078 ############################################################################
1079 - def do_calculate_xsect(self, line):
1080 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1081 this function wraps the do_launch one""" 1082 1083 self.start_time = time.time() 1084 argss = self.split_arg(line) 1085 # check argument validity and normalise argument 1086 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1087 options = options.__dict__ 1088 options['reweightonly'] = False 1089 options['parton'] = True 1090 self.check_calculate_xsect(argss, options) 1091 self.do_launch(line, options, argss)
1092 1093 ############################################################################
1094 - def do_banner_run(self, line):
1095 """Make a run from the banner file""" 1096 1097 args = self.split_arg(line) 1098 #check the validity of the arguments 1099 self.check_banner_run(args) 1100 1101 # Remove previous cards 1102 for name in ['shower_card.dat', 'madspin_card.dat']: 1103 try: 1104 os.remove(pjoin(self.me_dir, 'Cards', name)) 1105 except Exception: 1106 pass 1107 1108 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1109 1110 # Check if we want to modify the run 1111 if not self.force: 1112 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1113 if ans == 'n': 1114 self.force = True 1115 1116 # Compute run mode: 1117 if self.force: 1118 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1119 banner = banner_mod.Banner(args[0]) 1120 for line in banner['run_settings']: 1121 if '=' in line: 1122 mode, value = [t.strip() for t in line.split('=')] 1123 mode_status[mode] = value 1124 else: 1125 mode_status = {} 1126 1127 # Call Generate events 1128 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1129 switch=mode_status)
1130 1131 ############################################################################
1132 - def do_generate_events(self, line):
1133 """Main commands: generate events 1134 this function just wraps the do_launch one""" 1135 self.do_launch(line)
1136 1137 1138 ############################################################################
1139 - def do_treatcards(self, line, amcatnlo=True):
1140 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1141 #check if no 'Auto' are present in the file 1142 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1143 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1144 1145 ############################################################################
1146 - def set_configuration(self, amcatnlo=True, **opt):
1147 """assign all configuration variable from file 1148 loop over the different config file if config_file not define """ 1149 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1150 1151 ############################################################################
1152 - def do_launch(self, line, options={}, argss=[], switch={}):
1153 """Main commands: launch the full chain 1154 options and args are relevant if the function is called from other 1155 functions, such as generate_events or calculate_xsect 1156 mode gives the list of switch needed for the computation (usefull for banner_run) 1157 """ 1158 1159 if not argss and not options: 1160 self.start_time = time.time() 1161 argss = self.split_arg(line) 1162 # check argument validity and normalise argument 1163 (options, argss) = _launch_parser.parse_args(argss) 1164 options = options.__dict__ 1165 self.check_launch(argss, options) 1166 1167 1168 if 'run_name' in options.keys() and options['run_name']: 1169 self.run_name = options['run_name'] 1170 # if a dir with the given run_name already exists 1171 # remove it and warn the user 1172 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1173 logger.warning('Removing old run information in \n'+ 1174 pjoin(self.me_dir, 'Events', self.run_name)) 1175 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1176 self.results.delete_run(self.run_name) 1177 else: 1178 self.run_name = '' # will be set later 1179 1180 if options['multicore']: 1181 self.cluster_mode = 2 1182 elif options['cluster']: 1183 self.cluster_mode = 1 1184 1185 if not switch: 1186 mode = argss[0] 1187 1188 if mode in ['LO', 'NLO']: 1189 options['parton'] = True 1190 mode = self.ask_run_configuration(mode, options) 1191 else: 1192 mode = self.ask_run_configuration('auto', options, switch) 1193 1194 self.results.add_detail('run_mode', mode) 1195 1196 self.update_status('Starting run', level=None, update_results=True) 1197 1198 if self.options['automatic_html_opening']: 1199 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1200 self.options['automatic_html_opening'] = False 1201 1202 if '+' in mode: 1203 mode = mode.split('+')[0] 1204 self.compile(mode, options) 1205 evt_file = self.run(mode, options) 1206 1207 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1208 logger.info('No event file generated: grids have been set-up with a '\ 1209 'relative precision of %s' % self.run_card['req_acc']) 1210 return 1211 1212 if not mode in ['LO', 'NLO']: 1213 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1214 self.exec_cmd('reweight -from_cards', postcmd=False) 1215 self.exec_cmd('decay_events -from_cards', postcmd=False) 1216 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1217 1218 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1219 and not options['parton']: 1220 self.run_mcatnlo(evt_file) 1221 elif mode == 'noshower': 1222 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1223 Please, shower the Les Houches events before using them for physics analyses.""") 1224 1225 1226 self.update_status('', level='all', update_results=True) 1227 if self.run_card['ickkw'] == 3 and \ 1228 (mode in ['noshower'] or \ 1229 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1230 logger.warning("""You are running with FxFx merging enabled. 1231 To be able to merge samples of various multiplicities without double counting, 1232 you have to remove some events after showering 'by hand'. 1233 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1234 1235 self.store_result() 1236 #check if the param_card defines a scan. 1237 if self.param_card_iterator: 1238 param_card_iterator = self.param_card_iterator 1239 self.param_card_iterator = [] #avoid to next generate go trough here 1240 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1241 orig_name = self.run_name 1242 #go trough the scal 1243 with misc.TMP_variable(self, 'allow_notification_center', False): 1244 for i,card in enumerate(param_card_iterator): 1245 card.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1246 if not options['force']: 1247 options['force'] = True 1248 if options['run_name']: 1249 options['run_name'] = '%s_%s' % (orig_name, i+1) 1250 if not argss: 1251 argss = [mode, "-f"] 1252 elif argss[0] == "auto": 1253 argss[0] = mode 1254 self.do_launch("", options=options, argss=argss, switch=switch) 1255 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1256 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1257 #restore original param_card 1258 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1259 name = misc.get_scan_name(orig_name, self.run_name) 1260 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1261 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK') 1262 param_card_iterator.write_summary(path) 1263 1264 if self.allow_notification_center: 1265 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1266 '%s: %s +- %s ' % (self.results.current['run_name'], 1267 self.results.current['cross'], 1268 self.results.current['error']))
1269 1270 1271 ############################################################################
1272 - def do_compile(self, line):
1273 """Advanced commands: just compile the executables """ 1274 argss = self.split_arg(line) 1275 # check argument validity and normalise argument 1276 (options, argss) = _compile_parser.parse_args(argss) 1277 options = options.__dict__ 1278 options['reweightonly'] = False 1279 options['nocompile'] = False 1280 self.check_compile(argss, options) 1281 1282 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1283 self.ask_run_configuration(mode, options) 1284 self.compile(mode, options) 1285 1286 1287 self.update_status('', level='all', update_results=True)
1288 1289
1290 - def update_random_seed(self):
1291 """Update random number seed with the value from the run_card. 1292 If this is 0, update the number according to a fresh one""" 1293 iseed = self.run_card['iseed'] 1294 if iseed == 0: 1295 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1296 iseed = int(randinit.read()[2:]) + 1 1297 randinit.close() 1298 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1299 randinit.write('r=%d' % iseed) 1300 randinit.close()
1301 1302
1303 - def run(self, mode, options):
1304 """runs aMC@NLO. Returns the name of the event file created""" 1305 logger.info('Starting run') 1306 1307 if not 'only_generation' in options.keys(): 1308 options['only_generation'] = False 1309 1310 # for second step in applgrid mode, do only the event generation step 1311 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1312 options['only_generation'] = True 1313 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1314 self.setup_cluster_or_multicore() 1315 self.update_random_seed() 1316 #find and keep track of all the jobs 1317 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1318 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1319 folder_names['noshower'] = folder_names['aMC@NLO'] 1320 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1321 p_dirs = [d for d in \ 1322 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1323 #Clean previous results 1324 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1325 1326 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1327 1328 1329 if options['reweightonly']: 1330 event_norm=self.run_card['event_norm'] 1331 nevents=self.run_card['nevents'] 1332 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1333 1334 devnull = os.open(os.devnull, os.O_RDWR) 1335 1336 if mode in ['LO', 'NLO']: 1337 # this is for fixed order runs 1338 mode_dict = {'NLO': 'all', 'LO': 'born'} 1339 logger.info('Doing fixed order %s' % mode) 1340 req_acc = self.run_card['req_acc_FO'] 1341 1342 # Re-distribute the grids for the 2nd step of the applgrid 1343 # running 1344 if self.run_card['iappl'] == 2: 1345 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1346 1347 # create a list of dictionaries "jobs_to_run" with all the 1348 # jobs that need to be run 1349 integration_step=-1 1350 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1351 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1352 self.prepare_directories(jobs_to_run,mode) 1353 1354 # loop over the integration steps. After every step, check 1355 # if we have the required accuracy. If this is the case, 1356 # stop running, else do another step. 1357 while True: 1358 integration_step=integration_step+1 1359 self.run_all_jobs(jobs_to_run,integration_step) 1360 self.collect_log_files(jobs_to_run,integration_step) 1361 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1362 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1363 if not jobs_to_run: 1364 # there are no more jobs to run (jobs_to_run is empty) 1365 break 1366 # We are done. 1367 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1368 self.update_status('Run complete', level='parton', update_results=True) 1369 return 1370 1371 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1372 if self.ninitial == 1: 1373 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1374 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1375 'noshower': 'all', 'noshowerLO': 'born'} 1376 shower = self.run_card['parton_shower'].upper() 1377 nevents = self.run_card['nevents'] 1378 req_acc = self.run_card['req_acc'] 1379 if nevents == 0 and req_acc < 0 : 1380 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1381 'of events, because 0 events requested. Please set '\ 1382 'the "req_acc" parameter in the run_card to a value '\ 1383 'between 0 and 1') 1384 elif req_acc >1 or req_acc == 0 : 1385 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1386 'be between larger than 0 and smaller than 1, '\ 1387 'or set to -1 for automatic determination. Current '\ 1388 'value is %f' % req_acc) 1389 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1390 elif req_acc < 0 and nevents > 1000000 : 1391 req_acc=0.001 1392 1393 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1394 1395 if not shower in shower_list: 1396 raise aMCatNLOError('%s is not a valid parton shower. '\ 1397 'Please use one of the following: %s' \ 1398 % (shower, ', '.join(shower_list))) 1399 1400 # check that PYTHIA6PT is not used for processes with FSR 1401 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1402 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1403 1404 if mode in ['aMC@NLO', 'aMC@LO']: 1405 logger.info('Doing %s matched to parton shower' % mode[4:]) 1406 elif mode in ['noshower','noshowerLO']: 1407 logger.info('Generating events without running the shower.') 1408 elif options['only_generation']: 1409 logger.info('Generating events starting from existing results') 1410 1411 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1412 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1413 1414 # Make sure to update all the jobs to be ready for the event generation step 1415 if options['only_generation']: 1416 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1417 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1418 else: 1419 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1420 1421 1422 # Main loop over the three MINT generation steps: 1423 for mint_step, status in enumerate(mcatnlo_status): 1424 if options['only_generation'] and mint_step < 2: 1425 continue 1426 self.update_status(status, level='parton') 1427 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1428 self.collect_log_files(jobs_to_run,mint_step) 1429 if mint_step+1==2 and nevents==0: 1430 self.print_summary(options,2,mode) 1431 return 1432 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1433 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1434 # Sanity check on the event files. If error the jobs are resubmitted 1435 self.check_event_files(jobs_to_collect) 1436 1437 if self.cluster_mode == 1: 1438 #if cluster run, wait 10 sec so that event files are transferred back 1439 self.update_status( 1440 'Waiting while files are transferred back from the cluster nodes', 1441 level='parton') 1442 time.sleep(10) 1443 1444 event_norm=self.run_card['event_norm'] 1445 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1446
1447 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1448 integration_step,mode,fixed_order=True):
1449 """Creates a list of dictionaries with all the jobs to be run""" 1450 jobs_to_run=[] 1451 if not options['only_generation']: 1452 # Fresh, new run. Check all the P*/channels.txt files 1453 # (created by the 'gensym' executable) to set-up all the 1454 # jobs using the default inputs. 1455 npoints = self.run_card['npoints_FO_grid'] 1456 niters = self.run_card['niters_FO_grid'] 1457 for p_dir in p_dirs: 1458 try: 1459 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1460 channels=chan_file.readline().split() 1461 except IOError: 1462 logger.warning('No integration channels found for contribution %s' % p_dir) 1463 continue 1464 for channel in channels: 1465 job={} 1466 job['p_dir']=p_dir 1467 job['channel']=channel 1468 job['split']=0 1469 if fixed_order and req_acc == -1: 1470 job['accuracy']=0 1471 job['niters']=niters 1472 job['npoints']=npoints 1473 elif fixed_order and req_acc > 0: 1474 job['accuracy']=0.10 1475 job['niters']=6 1476 job['npoints']=-1 1477 elif not fixed_order: 1478 job['accuracy']=0.03 1479 job['niters']=12 1480 job['npoints']=-1 1481 else: 1482 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1483 'between 0 and 1 or set it equal to -1.') 1484 job['mint_mode']=0 1485 job['run_mode']=run_mode 1486 job['wgt_frac']=1.0 1487 jobs_to_run.append(job) 1488 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1489 else: 1490 # if options['only_generation'] is true, we need to loop 1491 # over all the existing G* directories and create the jobs 1492 # from there. 1493 name_suffix={'born' :'B', 'all':'F'} 1494 for p_dir in p_dirs: 1495 for chan_dir in os.listdir(pjoin(self.me_dir,'SubProcesses',p_dir)): 1496 if ((chan_dir.startswith(run_mode+'_G') and fixed_order) or\ 1497 (chan_dir.startswith('G'+name_suffix[run_mode]) and (not fixed_order))) and \ 1498 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)) or \ 1499 os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir))): 1500 job={} 1501 job['p_dir']=p_dir 1502 if fixed_order: 1503 channel=chan_dir.split('_')[1] 1504 job['channel']=channel[1:] # remove the 'G' 1505 if len(chan_dir.split('_')) == 3: 1506 split=int(chan_dir.split('_')[2]) 1507 else: 1508 split=0 1509 else: 1510 if len(chan_dir.split('_')) == 2: 1511 split=int(chan_dir.split('_')[1]) 1512 channel=chan_dir.split('_')[0] 1513 job['channel']=channel[2:] # remove the 'G' 1514 else: 1515 job['channel']=chan_dir[2:] # remove the 'G' 1516 split=0 1517 job['split']=split 1518 job['run_mode']=run_mode 1519 job['dirname']=pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir) 1520 job['wgt_frac']=1.0 1521 if not fixed_order: job['mint_mode']=1 1522 jobs_to_run.append(job) 1523 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1524 if fixed_order: 1525 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 1526 jobs_to_collect,integration_step,mode,run_mode) 1527 # Update the integration_step to make sure that nothing will be overwritten 1528 integration_step=1 1529 for job in jobs_to_run: 1530 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 1531 integration_step=integration_step+1 1532 integration_step=integration_step-1 1533 else: 1534 self.append_the_results(jobs_to_collect,integration_step) 1535 return jobs_to_run,jobs_to_collect,integration_step
1536
1537 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
1538 """Set-up the G* directories for running""" 1539 name_suffix={'born' :'B' , 'all':'F'} 1540 for job in jobs_to_run: 1541 if job['split'] == 0: 1542 if fixed_order : 1543 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1544 job['run_mode']+'_G'+job['channel']) 1545 else: 1546 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1547 'G'+name_suffix[job['run_mode']]+job['channel']) 1548 else: 1549 if fixed_order : 1550 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1551 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 1552 else: 1553 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1554 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 1555 job['dirname']=dirname 1556 if not os.path.isdir(dirname): 1557 os.makedirs(dirname) 1558 self.write_input_file(job,fixed_order) 1559 if not fixed_order: 1560 # copy the grids from the base directory to the split directory: 1561 if job['split'] != 0: 1562 for f in ['grid.MC_integer','mint_grids','res_1']: 1563 if not os.path.isfile(pjoin(job['dirname'],f)): 1564 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1565 1566
1567 - def write_input_file(self,job,fixed_order):
1568 """write the input file for the madevent_mint* executable in the appropriate directory""" 1569 if fixed_order: 1570 content= \ 1571 """NPOINTS = %(npoints)s 1572 NITERATIONS = %(niters)s 1573 ACCURACY = %(accuracy)s 1574 ADAPT_GRID = 2 1575 MULTICHANNEL = 1 1576 SUM_HELICITY = 1 1577 CHANNEL = %(channel)s 1578 SPLIT = %(split)s 1579 RUN_MODE = %(run_mode)s 1580 RESTART = %(mint_mode)s 1581 """ \ 1582 % job 1583 else: 1584 content = \ 1585 """-1 12 ! points, iterations 1586 %(accuracy)s ! desired fractional accuracy 1587 1 -0.1 ! alpha, beta for Gsoft 1588 -1 -0.1 ! alpha, beta for Gazi 1589 1 ! Suppress amplitude (0 no, 1 yes)? 1590 1 ! Exact helicity sum (0 yes, n = number/event)? 1591 %(channel)s ! Enter Configuration Number: 1592 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 1593 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 1594 %(run_mode)s ! all, born, real, virt 1595 """ \ 1596 % job 1597 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 1598 input_file.write(content)
1599 1600
1601 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1602 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 1603 if fixed_order: 1604 if integration_step == 0: 1605 self.update_status('Setting up grids', level=None) 1606 else: 1607 self.update_status('Refining results, step %i' % integration_step, level=None) 1608 self.ijob = 0 1609 name_suffix={'born' :'B', 'all':'F'} 1610 if fixed_order: 1611 run_type="Fixed order integration step %s" % integration_step 1612 else: 1613 run_type="MINT step %s" % integration_step 1614 self.njobs=len(jobs_to_run) 1615 for job in jobs_to_run: 1616 executable='ajob1' 1617 if fixed_order: 1618 arguments=[job['channel'],job['run_mode'], \ 1619 str(job['split']),str(integration_step)] 1620 else: 1621 arguments=[job['channel'],name_suffix[job['run_mode']], \ 1622 str(job['split']),str(integration_step)] 1623 self.run_exe(executable,arguments,run_type, 1624 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 1625 1626 if self.cluster_mode == 2: 1627 time.sleep(1) # security to allow all jobs to be launched 1628 self.wait_for_complete(run_type)
1629 1630
1631 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 1632 integration_step,mode,run_mode,fixed_order=True):
1633 """Collect the results, make HTML pages, print the summary and 1634 determine if there are more jobs to run. Returns the list 1635 of the jobs that still need to be run, as well as the 1636 complete list of jobs that need to be collected to get the 1637 final answer. 1638 """ 1639 # Get the results of the current integration/MINT step 1640 self.append_the_results(jobs_to_run,integration_step) 1641 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 1642 # Update HTML pages 1643 if fixed_order: 1644 cross, error = sum_html.make_all_html_results(self, ['%s*' % run_mode]) 1645 else: 1646 name_suffix={'born' :'B' , 'all':'F'} 1647 cross, error = sum_html.make_all_html_results(self, ['G%s*' % name_suffix[run_mode]]) 1648 self.results.add_detail('cross', cross) 1649 self.results.add_detail('error', error) 1650 # Set-up jobs for the next iteration/MINT step 1651 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 1652 # if there are no more jobs, we are done! 1653 # Print summary 1654 if (not jobs_to_run_new) and fixed_order: 1655 # print final summary of results (for fixed order) 1656 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 1657 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 1658 return jobs_to_run_new,jobs_to_collect 1659 elif jobs_to_run_new: 1660 # print intermediate summary of results 1661 scale_pdf_info=[] 1662 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 1663 else: 1664 # When we are done for (N)LO+PS runs, do not print 1665 # anything yet. This will be done after the reweighting 1666 # and collection of the events 1667 scale_pdf_info=[] 1668 # Prepare for the next integration/MINT step 1669 if (not fixed_order) and integration_step+1 == 2 : 1670 # next step is event generation (mint_step 2) 1671 jobs_to_run_new,jobs_to_collect_new= \ 1672 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 1673 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1674 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 1675 self.write_nevts_files(jobs_to_run_new) 1676 else: 1677 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1678 jobs_to_collect_new=jobs_to_collect 1679 return jobs_to_run_new,jobs_to_collect_new
1680 1681
1682 - def write_nevents_unweighted_file(self,jobs,jobs0events):
1683 """writes the nevents_unweighted file in the SubProcesses directory. 1684 We also need to write the jobs that will generate 0 events, 1685 because that makes sure that the cross section from those channels 1686 is taken into account in the event weights (by collect_events.f). 1687 """ 1688 content=[] 1689 for job in jobs: 1690 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1691 lhefile=pjoin(path,'events.lhe') 1692 content.append(' %s %d %9e %9e' % \ 1693 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 1694 for job in jobs0events: 1695 if job['nevents']==0: 1696 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1697 lhefile=pjoin(path,'events.lhe') 1698 content.append(' %s %d %9e %9e' % \ 1699 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 1700 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 1701 f.write('\n'.join(content)+'\n')
1702
1703 - def write_nevts_files(self,jobs):
1704 """write the nevts files in the SubProcesses/P*/G*/ directories""" 1705 for job in jobs: 1706 with open(pjoin(job['dirname'],'nevts'),'w') as f: 1707 f.write('%i\n' % job['nevents'])
1708
1709 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
1710 """Looks in the jobs_to_run to see if there is the need to split the 1711 event generation step. Updates jobs_to_run and 1712 jobs_to_collect to replace the split-job by its 1713 splits. Also removes jobs that do not need any events. 1714 """ 1715 nevt_job=self.run_card['nevt_job'] 1716 if nevt_job > 0: 1717 jobs_to_collect_new=copy.copy(jobs_to_collect) 1718 for job in jobs_to_run: 1719 nevents=job['nevents'] 1720 if nevents == 0: 1721 jobs_to_collect_new.remove(job) 1722 elif nevents > nevt_job: 1723 jobs_to_collect_new.remove(job) 1724 if nevents % nevt_job != 0 : 1725 nsplit=int(nevents/nevt_job)+1 1726 else: 1727 nsplit=int(nevents/nevt_job) 1728 for i in range(1,nsplit+1): 1729 job_new=copy.copy(job) 1730 left_over=nevents % nsplit 1731 if i <= left_over: 1732 job_new['nevents']=int(nevents/nsplit)+1 1733 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1734 else: 1735 job_new['nevents']=int(nevents/nsplit) 1736 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1737 job_new['split']=i 1738 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 1739 jobs_to_collect_new.append(job_new) 1740 jobs_to_run_new=copy.copy(jobs_to_collect_new) 1741 else: 1742 jobs_to_run_new=copy.copy(jobs_to_collect) 1743 for job in jobs_to_collect: 1744 if job['nevents'] == 0: 1745 jobs_to_run_new.remove(job) 1746 jobs_to_collect_new=copy.copy(jobs_to_run_new) 1747 1748 return jobs_to_run_new,jobs_to_collect_new
1749 1750
1751 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
1752 """ 1753 For (N)LO+PS: determines the number of events and/or the required 1754 accuracy per job. 1755 For fixed order: determines which jobs need higher precision and 1756 returns those with the newly requested precision. 1757 """ 1758 err=self.cross_sect_dict['errt'] 1759 tot=self.cross_sect_dict['xsect'] 1760 errABS=self.cross_sect_dict['erra'] 1761 totABS=self.cross_sect_dict['xseca'] 1762 jobs_new=[] 1763 if fixed_order: 1764 if req_acc == -1: 1765 if step+1 == 1: 1766 npoints = self.run_card['npoints_FO'] 1767 niters = self.run_card['niters_FO'] 1768 for job in jobs: 1769 job['mint_mode']=-1 1770 job['niters']=niters 1771 job['npoints']=npoints 1772 jobs_new.append(job) 1773 elif step+1 == 2: 1774 pass 1775 elif step+1 > 2: 1776 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 1777 'for integration step %i' % step ) 1778 elif ( req_acc > 0 and err/tot > req_acc*1.2 ) or step <= 0: 1779 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 1780 for job in jobs: 1781 job['mint_mode']=-1 1782 # Determine relative required accuracy on the ABS for this job 1783 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 1784 # If already accurate enough, skip the job (except when doing the first 1785 # step for the iappl=2 run: we need to fill all the applgrid grids!) 1786 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 1787 and not (step==-1 and self.run_card['iappl'] == 2): 1788 continue 1789 # Update the number of PS points based on errorABS, ncall and accuracy 1790 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 1791 (job['accuracy']*job['resultABS']),2) 1792 if itmax_fl <= 4.0 : 1793 job['niters']=max(int(round(itmax_fl)),2) 1794 job['npoints']=job['npoints_done']*2 1795 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 1796 job['niters']=4 1797 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 1798 else: 1799 if itmax_fl > 100.0 : itmax_fl=50.0 1800 job['niters']=int(round(math.sqrt(itmax_fl))) 1801 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 1802 round(math.sqrt(itmax_fl))))*2 1803 # Add the job to the list of jobs that need to be run 1804 jobs_new.append(job) 1805 return jobs_new 1806 elif step+1 <= 2: 1807 nevents=self.run_card['nevents'] 1808 # Total required accuracy for the upper bounding envelope 1809 if req_acc<0: 1810 req_acc2_inv=nevents 1811 else: 1812 req_acc2_inv=1/(req_acc*req_acc) 1813 if step+1 == 1 or step+1 == 2 : 1814 # determine the req. accuracy for each of the jobs for Mint-step = 1 1815 for job in jobs: 1816 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 1817 job['accuracy']=accuracy 1818 if step+1 == 2: 1819 # Randomly (based on the relative ABS Xsec of the job) determine the 1820 # number of events each job needs to generate for MINT-step = 2. 1821 r=self.get_randinit_seed() 1822 random.seed(r) 1823 totevts=nevents 1824 for job in jobs: 1825 job['nevents'] = 0 1826 while totevts : 1827 target = random.random() * totABS 1828 crosssum = 0. 1829 i = 0 1830 while i<len(jobs) and crosssum < target: 1831 job = jobs[i] 1832 crosssum += job['resultABS'] 1833 i += 1 1834 totevts -= 1 1835 i -= 1 1836 jobs[i]['nevents'] += 1 1837 for job in jobs: 1838 job['mint_mode']=step+1 # next step 1839 return jobs 1840 else: 1841 return []
1842 1843
1844 - def get_randinit_seed(self):
1845 """ Get the random number seed from the randinit file """ 1846 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 1847 # format of the file is "r=%d". 1848 iseed = int(randinit.read()[2:]) 1849 return iseed
1850 1851
1852 - def append_the_results(self,jobs,integration_step):
1853 """Appends the results for each of the jobs in the job list""" 1854 error_found=False 1855 for job in jobs: 1856 try: 1857 if integration_step >= 0 : 1858 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 1859 results=res_file.readline().split() 1860 else: 1861 # should only be here when doing fixed order with the 'only_generation' 1862 # option equal to True. Take the results from the final run done. 1863 with open(pjoin(job['dirname'],'res.dat')) as res_file: 1864 results=res_file.readline().split() 1865 except IOError: 1866 if not error_found: 1867 error_found=True 1868 error_log=[] 1869 error_log.append(pjoin(job['dirname'],'log.txt')) 1870 continue 1871 job['resultABS']=float(results[0]) 1872 job['errorABS']=float(results[1]) 1873 job['result']=float(results[2]) 1874 job['error']=float(results[3]) 1875 job['niters_done']=int(results[4]) 1876 job['npoints_done']=int(results[5]) 1877 job['time_spend']=float(results[6]) 1878 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 1879 job['err_perc'] = job['error']/job['result']*100. 1880 if error_found: 1881 raise aMCatNLOError('An error occurred during the collection of results.\n' + 1882 'Please check the .log files inside the directories which failed:\n' + 1883 '\n'.join(error_log)+'\n')
1884 1885 1886
1887 - def write_res_txt_file(self,jobs,integration_step):
1888 """writes the res.txt files in the SubProcess dir""" 1889 jobs.sort(key = lambda job: -job['errorABS']) 1890 content=[] 1891 content.append('\n\nCross section per integration channel:') 1892 for job in jobs: 1893 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 1894 content.append('\n\nABS cross section per integration channel:') 1895 for job in jobs: 1896 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 1897 totABS=0 1898 errABS=0 1899 tot=0 1900 err=0 1901 for job in jobs: 1902 totABS+= job['resultABS']*job['wgt_frac'] 1903 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 1904 tot+= job['result']*job['wgt_frac'] 1905 err+= math.pow(job['error'],2)*job['wgt_frac'] 1906 if jobs: 1907 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 1908 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 1909 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 1910 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 1911 res_file.write('\n'.join(content)) 1912 randinit=self.get_randinit_seed() 1913 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 1914 'erra':math.sqrt(errABS),'randinit':randinit}
1915 1916
1917 - def collect_scale_pdf_info(self,options,jobs):
1918 """read the scale_pdf_dependence.dat files and collects there results""" 1919 scale_pdf_info=[] 1920 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 1921 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 1922 data_files=[] 1923 for job in jobs: 1924 data_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 1925 scale_pdf_info = self.pdf_scale_from_reweighting(data_files) 1926 return scale_pdf_info
1927 1928
1929 - def combine_plots_FO(self,folder_name,jobs):
1930 """combines the plots and puts then in the Events/run* directory""" 1931 devnull = os.open(os.devnull, os.O_RDWR) 1932 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 1933 misc.call(['./combine_plots_FO.sh'] + folder_name, \ 1934 stdout=devnull, 1935 cwd=pjoin(self.me_dir, 'SubProcesses')) 1936 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 1937 pjoin(self.me_dir, 'Events', self.run_name)) 1938 logger.info('The results of this run and the TopDrawer file with the plots' + \ 1939 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1940 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 1941 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 1942 self.combine_plots_HwU(jobs,out) 1943 try: 1944 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 1945 stdout=devnull,stderr=devnull,\ 1946 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 1947 except Exception: 1948 pass 1949 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 1950 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1951 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 1952 misc.call(['./combine_root.sh'] + folder_name, \ 1953 stdout=devnull, 1954 cwd=pjoin(self.me_dir, 'SubProcesses')) 1955 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 1956 pjoin(self.me_dir, 'Events', self.run_name)) 1957 logger.info('The results of this run and the ROOT file with the plots' + \ 1958 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1959 else: 1960 logger.info('The results of this run' + \ 1961 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1962 1963
1964 - def combine_plots_HwU(self,jobs,out,normalisation=None):
1965 """Sums all the plots in the HwU format.""" 1966 logger.debug('Combining HwU plots.') 1967 1968 command = [] 1969 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 1970 for job in jobs: 1971 if job['dirname'].endswith('.HwU'): 1972 command.append(job['dirname']) 1973 else: 1974 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 1975 command.append("--out="+out) 1976 command.append("--gnuplot") 1977 command.append("--band=[]") 1978 command.append("--lhapdf-config="+self.options['lhapdf']) 1979 if normalisation: 1980 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 1981 command.append("--sum") 1982 command.append("--no_open") 1983 1984 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 1985 1986 while p.poll() is None: 1987 line = p.stdout.readline() 1988 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 1989 print line[:-1] 1990 elif __debug__ and line: 1991 logger.debug(line[:-1])
1992 1993
1994 - def applgrid_combine(self,cross,error,jobs):
1995 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 1996 logger.debug('Combining APPLgrids \n') 1997 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 1998 'applgrid-combine') 1999 all_jobs=[] 2000 for job in jobs: 2001 all_jobs.append(job['dirname']) 2002 ngrids=len(all_jobs) 2003 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2004 for obs in range(0,nobs): 2005 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2006 # combine APPLgrids from different channels for observable 'obs' 2007 if self.run_card["iappl"] == 1: 2008 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2009 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2010 elif self.run_card["iappl"] == 2: 2011 unc2_inv=pow(cross/error,2) 2012 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2013 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2014 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2015 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2016 for job in all_jobs: 2017 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2018 else: 2019 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2020 # after combining, delete the original grids 2021 for ggdir in gdir: 2022 os.remove(ggdir)
2023 2024
2025 - def applgrid_distribute(self,options,mode,p_dirs):
2026 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2027 # if no appl_start_grid argument given, guess it from the time stamps 2028 # of the starting grid files 2029 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2030 gfiles=glob.glob(pjoin(self.me_dir, 'Events','*', 2031 'aMCfast_obs_0_starting_grid.root')) 2032 time_stamps={} 2033 for root_file in gfiles: 2034 time_stamps[root_file]=os.path.getmtime(root_file) 2035 options['appl_start_grid']= \ 2036 max(time_stamps.iterkeys(), key=(lambda key: 2037 time_stamps[key])).split('/')[-2] 2038 logger.info('No --appl_start_grid option given. '+\ 2039 'Guessing that start grid from run "%s" should be used.' \ 2040 % options['appl_start_grid']) 2041 2042 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2043 self.appl_start_grid = options['appl_start_grid'] 2044 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2045 # check that this dir exists and at least one grid file is there 2046 if not os.path.exists(pjoin(start_grid_dir, 2047 'aMCfast_obs_0_starting_grid.root')): 2048 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2049 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2050 else: 2051 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2052 start_grid_dir) if name.endswith("_starting_grid.root")] 2053 nobs =len(all_grids) 2054 gstring=" ".join(all_grids) 2055 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2056 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2057 'Please provide this information.') 2058 #copy the grid to all relevant directories 2059 for pdir in p_dirs: 2060 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2061 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2062 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2063 for g_dir in g_dirs: 2064 for grid in all_grids: 2065 obs=grid.split('_')[-3] 2066 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2067 'grid_obs_'+obs+'_in.root'))
2068 2069 2070 2071
2072 - def collect_log_files(self, jobs, integration_step):
2073 """collect the log files and put them in a single, html-friendly file 2074 inside the Events/run_.../ directory""" 2075 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2076 'alllogs_%d.html' % integration_step) 2077 outfile = open(log_file, 'w') 2078 2079 content = '' 2080 content += '<HTML><BODY>\n<font face="courier" size=2>' 2081 for job in jobs: 2082 # put an anchor 2083 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2084 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2085 pjoin(self.me_dir,'SubProcesses'),'')) 2086 # and put some nice header 2087 content += '<font color="red">\n' 2088 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2089 (os.path.dirname(log).replace(pjoin(self.me_dir, 2090 'SubProcesses'), ''), 2091 integration_step) 2092 content += '</font>\n' 2093 #then just flush the content of the small log inside the big log 2094 #the PRE tag prints everything verbatim 2095 content += '<PRE>\n' + open(log).read() + '\n</PRE>' 2096 content +='<br>\n' 2097 outfile.write(content) 2098 content='' 2099 2100 outfile.write('</font>\n</BODY></HTML>\n') 2101 outfile.close()
2102 2103
2104 - def finalise_run_FO(self,folder_name,jobs):
2105 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2106 # Copy the res_*.txt files to the Events/run* folder 2107 res_files=glob.glob(pjoin(self.me_dir, 'SubProcesses', 'res_*.txt')) 2108 for res_file in res_files: 2109 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2110 # Collect the plots and put them in the Events/run* folder 2111 self.combine_plots_FO(folder_name,jobs) 2112 # If doing the applgrid-stuff, also combine those grids 2113 # and put those in the Events/run* folder 2114 if self.run_card['iappl'] != 0: 2115 cross=self.cross_sect_dict['xsect'] 2116 error=self.cross_sect_dict['errt'] 2117 self.applgrid_combine(cross,error,jobs)
2118 2119
2120 - def setup_cluster_or_multicore(self):
2121 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2122 if self.cluster_mode == 1: 2123 cluster_name = self.options['cluster_type'] 2124 self.cluster = cluster.from_name[cluster_name](**self.options) 2125 if self.cluster_mode == 2: 2126 try: 2127 import multiprocessing 2128 if not self.nb_core: 2129 try: 2130 self.nb_core = int(self.options['nb_core']) 2131 except TypeError: 2132 self.nb_core = multiprocessing.cpu_count() 2133 logger.info('Using %d cores' % self.nb_core) 2134 except ImportError: 2135 self.nb_core = 1 2136 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2137 'Use set nb_core X in order to set this number and be able to'+ 2138 'run in multicore.') 2139 2140 self.cluster = cluster.MultiCore(**self.options)
2141 2142
2143 - def clean_previous_results(self,options,p_dirs,folder_name):
2144 """Clean previous results. 2145 o. If doing only the reweighting step, do not delete anything and return directlty. 2146 o. Always remove all the G*_* files (from split event generation). 2147 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2148 if options['reweightonly']: 2149 return 2150 if not options['only_generation']: 2151 self.update_status('Cleaning previous results', level=None) 2152 for dir in p_dirs: 2153 #find old folders to be removed 2154 for obj in folder_name: 2155 # list all the G* (or all_G* or born_G*) directories 2156 to_rm = [file for file in \ 2157 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2158 if file.startswith(obj[:-1]) and \ 2159 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2160 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2161 # list all the G*_* directories (from split event generation) 2162 to_always_rm = [file for file in \ 2163 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2164 if file.startswith(obj[:-1]) and 2165 '_' in file and not '_G' in file and \ 2166 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2167 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2168 2169 if not options['only_generation']: 2170 to_always_rm.extend(to_rm) 2171 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2172 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2173 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2174 return
2175 2176
2177 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2178 """print a summary of the results contained in self.cross_sect_dict. 2179 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2180 some additional infos are printed""" 2181 # find process name 2182 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 2183 process = '' 2184 for line in proc_card_lines: 2185 if line.startswith('generate') or line.startswith('add process'): 2186 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 2187 lpp = {0:'l', 1:'p', -1:'pbar'} 2188 if self.ninitial == 1: 2189 proc_info = '\n Process %s' % process[:-3] 2190 else: 2191 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 2192 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 2193 self.run_card['ebeam1'], self.run_card['ebeam2']) 2194 2195 if self.ninitial == 1: 2196 self.cross_sect_dict['unit']='GeV' 2197 self.cross_sect_dict['xsec_string']='(Partial) decay width' 2198 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 2199 else: 2200 self.cross_sect_dict['unit']='pb' 2201 self.cross_sect_dict['xsec_string']='Total cross section' 2202 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 2203 2204 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2205 status = ['Determining the number of unweighted events per channel', 2206 'Updating the number of unweighted events per channel', 2207 'Summary:'] 2208 computed='(computed from LHE events)' 2209 elif mode in ['NLO', 'LO']: 2210 status = ['Results after grid setup:','Current results:', 2211 'Final results and run summary:'] 2212 computed='(computed from histogram information)' 2213 2214 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2215 message = status[step] + '\n\n Intermediate results:' + \ 2216 ('\n Random seed: %(randinit)d' + \ 2217 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 2218 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 2219 % self.cross_sect_dict 2220 elif mode in ['NLO','LO'] and not done: 2221 if step == 0: 2222 message = '\n ' + status[0] + \ 2223 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2224 self.cross_sect_dict 2225 else: 2226 message = '\n ' + status[1] + \ 2227 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2228 self.cross_sect_dict 2229 2230 else: 2231 message = '\n --------------------------------------------------------------' 2232 message = message + \ 2233 '\n ' + status[2] + proc_info + \ 2234 '\n Number of events generated: %s' % self.run_card['nevents'] +\ 2235 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2236 self.cross_sect_dict 2237 message = message + \ 2238 '\n --------------------------------------------------------------' 2239 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 2240 if scale_pdf_info[0]: 2241 # scale uncertainties 2242 message = message + '\n Scale variation %s:' % computed 2243 for s in scale_pdf_info[0]: 2244 if s['unc']: 2245 if self.run_card['ickkw'] != -1: 2246 message = message + \ 2247 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 2248 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 2249 else: 2250 message = message + \ 2251 ('\n Soft and hard scale dependence (added in quadrature): '\ 2252 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 2253 2254 else: 2255 message = message + \ 2256 ('\n Dynamical_scale_choice %(label)i: '\ 2257 '\n %(cen)8.3e pb') % s 2258 2259 if scale_pdf_info[1]: 2260 message = message + '\n PDF variation %s:' % computed 2261 for p in scale_pdf_info[1]: 2262 if p['unc']=='none': 2263 message = message + \ 2264 ('\n %(name)s (central value only): '\ 2265 '\n %(cen)8.3e pb') % p 2266 2267 elif p['unc']=='unknown': 2268 message = message + \ 2269 ('\n %(name)s (%(size)s members; combination method unknown): '\ 2270 '\n %(cen)8.3e pb') % p 2271 else: 2272 message = message + \ 2273 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 2274 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 2275 # pdf uncertainties 2276 message = message + \ 2277 '\n --------------------------------------------------------------' 2278 2279 2280 if (mode in ['NLO', 'LO'] and not done) or \ 2281 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 2282 logger.info(message+'\n') 2283 return 2284 2285 # Some advanced general statistics are shown in the debug message at the 2286 # end of the run 2287 # Make sure it never stops a run 2288 # Gather some basic statistics for the run and extracted from the log files. 2289 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2290 log_GV_files = glob.glob(pjoin(self.me_dir, \ 2291 'SubProcesses', 'P*','G*','log_MINT*.txt')) 2292 all_log_files = log_GV_files 2293 elif mode == 'NLO': 2294 log_GV_files = glob.glob(pjoin(self.me_dir, \ 2295 'SubProcesses', 'P*','all_G*','log_MINT*.txt')) 2296 all_log_files = log_GV_files 2297 2298 elif mode == 'LO': 2299 log_GV_files = '' 2300 all_log_files = glob.glob(pjoin(self.me_dir, \ 2301 'SubProcesses', 'P*','born_G*','log_MINT*.txt')) 2302 else: 2303 raise aMCatNLOError, 'Running mode %s not supported.'%mode 2304 2305 try: 2306 message, debug_msg = \ 2307 self.compile_advanced_stats(log_GV_files, all_log_files, message) 2308 except Exception as e: 2309 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 2310 err_string = StringIO.StringIO() 2311 traceback.print_exc(limit=4, file=err_string) 2312 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 2313 %err_string.getvalue() 2314 2315 logger.debug(debug_msg+'\n') 2316 logger.info(message+'\n') 2317 2318 # Now copy relevant information in the Events/Run_<xxx> directory 2319 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 2320 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 2321 open(pjoin(evt_path, '.full_summary.txt'), 2322 'w').write(message+'\n\n'+debug_msg+'\n') 2323 2324 self.archive_files(evt_path,mode)
2325
2326 - def archive_files(self, evt_path, mode):
2327 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 2328 the run.""" 2329 2330 files_to_arxiv = [pjoin('Cards','param_card.dat'), 2331 pjoin('Cards','MadLoopParams.dat'), 2332 pjoin('Cards','FKS_params.dat'), 2333 pjoin('Cards','run_card.dat'), 2334 pjoin('Subprocesses','setscales.f'), 2335 pjoin('Subprocesses','cuts.f')] 2336 2337 if mode in ['NLO', 'LO']: 2338 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 2339 2340 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 2341 os.mkdir(pjoin(evt_path,'RunMaterial')) 2342 2343 for path in files_to_arxiv: 2344 if os.path.isfile(pjoin(self.me_dir,path)): 2345 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 2346 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 2347 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2348
2349 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
2350 """ This functions goes through the log files given in arguments and 2351 compiles statistics about MadLoop stability, virtual integration 2352 optimization and detection of potential error messages into a nice 2353 debug message to printed at the end of the run """ 2354 2355 def safe_float(str_float): 2356 try: 2357 return float(str_float) 2358 except ValueError: 2359 logger.debug('Could not convert the following float during'+ 2360 ' advanced statistics printout: %s'%str(str_float)) 2361 return -1.0
2362 2363 2364 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 2365 # > Errors is a list of tuples with this format (log_file,nErrors) 2366 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 2367 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 2368 2369 # ================================== 2370 # == MadLoop stability statistics == 2371 # ================================== 2372 2373 # Recuperate the fraction of unstable PS points found in the runs for 2374 # the virtuals 2375 UPS_stat_finder = re.compile( 2376 r"Satistics from MadLoop:.*"+\ 2377 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 2378 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 2379 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 2380 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 2381 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 2382 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 2383 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 2384 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 2385 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 2386 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 2387 2388 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 2389 1 : 'CutTools (double precision)', 2390 2 : 'PJFry++', 2391 3 : 'IREGI', 2392 4 : 'Golem95', 2393 5 : 'Samurai', 2394 6 : 'Ninja (double precision)', 2395 8 : 'Ninja (quadruple precision)', 2396 9 : 'CutTools (quadruple precision)'} 2397 RetUnit_finder =re.compile( 2398 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 2399 #Unit 2400 2401 for gv_log in log_GV_files: 2402 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 2403 log=open(gv_log,'r').read() 2404 UPS_stats = re.search(UPS_stat_finder,log) 2405 for retunit_stats in re.finditer(RetUnit_finder, log): 2406 if channel_name not in stats['UPS'].keys(): 2407 stats['UPS'][channel_name] = [0]*10+[[0]*10] 2408 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 2409 += int(retunit_stats.group('n_occurences')) 2410 if not UPS_stats is None: 2411 try: 2412 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 2413 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 2414 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 2415 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 2416 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 2417 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 2418 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 2419 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 2420 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 2421 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 2422 except KeyError: 2423 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 2424 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 2425 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 2426 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 2427 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 2428 int(UPS_stats.group('n10')),[0]*10] 2429 debug_msg = "" 2430 if len(stats['UPS'].keys())>0: 2431 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 2432 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 2433 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 2434 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 2435 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 2436 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 2437 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 2438 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 2439 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 2440 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 2441 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 2442 for i in range(10)] 2443 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 2444 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 2445 maxUPS = max(UPSfracs, key = lambda w: w[1]) 2446 2447 tmpStr = "" 2448 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 2449 tmpStr += '\n Stability unknown: %d'%nTotsun 2450 tmpStr += '\n Stable PS point: %d'%nTotsps 2451 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 2452 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 2453 tmpStr += '\n Only double precision used: %d'%nTotddp 2454 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 2455 tmpStr += '\n Initialization phase-space points: %d'%nTotini 2456 tmpStr += '\n Reduction methods used:' 2457 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 2458 unit_code_meaning.keys() if nTot1[i]>0] 2459 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 2460 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 2461 if nTot100 != 0: 2462 debug_msg += '\n Unknown return code (100): %d'%nTot100 2463 if nTot10 != 0: 2464 debug_msg += '\n Unknown return code (10): %d'%nTot10 2465 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 2466 not in unit_code_meaning.keys()) 2467 if nUnknownUnit != 0: 2468 debug_msg += '\n Unknown return code (1): %d'\ 2469 %nUnknownUnit 2470 2471 if maxUPS[1]>0.001: 2472 message += tmpStr 2473 message += '\n Total number of unstable PS point detected:'+\ 2474 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 2475 message += '\n Maximum fraction of UPS points in '+\ 2476 'channel %s (%4.2f%%)'%maxUPS 2477 message += '\n Please report this to the authors while '+\ 2478 'providing the file' 2479 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 2480 maxUPS[0],'UPS.log')) 2481 else: 2482 debug_msg += tmpStr 2483 2484 2485 # ==================================================== 2486 # == aMC@NLO virtual integration optimization stats == 2487 # ==================================================== 2488 2489 virt_tricks_finder = re.compile( 2490 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 2491 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 2492 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 2493 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 2494 2495 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 2496 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 2497 2498 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 2499 2500 channel_contr_list = {} 2501 for gv_log in log_GV_files: 2502 logfile=open(gv_log,'r') 2503 log = logfile.read() 2504 logfile.close() 2505 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2506 vf_stats = None 2507 for vf_stats in re.finditer(virt_frac_finder, log): 2508 pass 2509 if not vf_stats is None: 2510 v_frac = safe_float(vf_stats.group('v_frac')) 2511 v_average = safe_float(vf_stats.group('v_average')) 2512 try: 2513 if v_frac < stats['virt_stats']['v_frac_min'][0]: 2514 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 2515 if v_frac > stats['virt_stats']['v_frac_max'][0]: 2516 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 2517 stats['virt_stats']['v_frac_avg'][0] += v_frac 2518 stats['virt_stats']['v_frac_avg'][1] += 1 2519 except KeyError: 2520 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 2521 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 2522 stats['virt_stats']['v_frac_avg']=[v_frac,1] 2523 2524 2525 ccontr_stats = None 2526 for ccontr_stats in re.finditer(channel_contr_finder, log): 2527 pass 2528 if not ccontr_stats is None: 2529 contrib = safe_float(ccontr_stats.group('v_contr')) 2530 try: 2531 if contrib>channel_contr_list[channel_name]: 2532 channel_contr_list[channel_name]=contrib 2533 except KeyError: 2534 channel_contr_list[channel_name]=contrib 2535 2536 2537 # Now build the list of relevant virt log files to look for the maxima 2538 # of virt fractions and such. 2539 average_contrib = 0.0 2540 for value in channel_contr_list.values(): 2541 average_contrib += value 2542 if len(channel_contr_list.values()) !=0: 2543 average_contrib = average_contrib / len(channel_contr_list.values()) 2544 2545 relevant_log_GV_files = [] 2546 excluded_channels = set([]) 2547 all_channels = set([]) 2548 for log_file in log_GV_files: 2549 channel_name = '/'.join(log_file.split('/')[-3:-1]) 2550 all_channels.add(channel_name) 2551 try: 2552 if channel_contr_list[channel_name] > (0.1*average_contrib): 2553 relevant_log_GV_files.append(log_file) 2554 else: 2555 excluded_channels.add(channel_name) 2556 except KeyError: 2557 relevant_log_GV_files.append(log_file) 2558 2559 # Now we want to use the latest occurence of accumulated result in the log file 2560 for gv_log in relevant_log_GV_files: 2561 logfile=open(gv_log,'r') 2562 log = logfile.read() 2563 logfile.close() 2564 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2565 2566 vt_stats = None 2567 for vt_stats in re.finditer(virt_tricks_finder, log): 2568 pass 2569 if not vt_stats is None: 2570 vt_stats_group = vt_stats.groupdict() 2571 v_ratio = safe_float(vt_stats.group('v_ratio')) 2572 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 2573 v_contr = safe_float(vt_stats.group('v_abs_contr')) 2574 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 2575 try: 2576 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 2577 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 2578 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 2579 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 2580 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 2581 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 2582 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 2583 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 2584 if v_contr < stats['virt_stats']['v_contr_min'][0]: 2585 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 2586 if v_contr > stats['virt_stats']['v_contr_max'][0]: 2587 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 2588 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 2589 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 2590 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 2591 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 2592 except KeyError: 2593 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 2594 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 2595 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 2596 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 2597 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 2598 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 2599 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 2600 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 2601 2602 vf_stats = None 2603 for vf_stats in re.finditer(virt_frac_finder, log): 2604 pass 2605 if not vf_stats is None: 2606 v_frac = safe_float(vf_stats.group('v_frac')) 2607 v_average = safe_float(vf_stats.group('v_average')) 2608 try: 2609 if v_average < stats['virt_stats']['v_average_min'][0]: 2610 stats['virt_stats']['v_average_min']=(v_average,channel_name) 2611 if v_average > stats['virt_stats']['v_average_max'][0]: 2612 stats['virt_stats']['v_average_max']=(v_average,channel_name) 2613 stats['virt_stats']['v_average_avg'][0] += v_average 2614 stats['virt_stats']['v_average_avg'][1] += 1 2615 except KeyError: 2616 stats['virt_stats']['v_average_min']=[v_average,channel_name] 2617 stats['virt_stats']['v_average_max']=[v_average,channel_name] 2618 stats['virt_stats']['v_average_avg']=[v_average,1] 2619 2620 try: 2621 debug_msg += '\n\n Statistics on virtual integration optimization : ' 2622 2623 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 2624 %tuple(stats['virt_stats']['v_frac_max']) 2625 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 2626 %tuple(stats['virt_stats']['v_frac_min']) 2627 debug_msg += '\n Average virt fraction computed %.3f'\ 2628 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 2629 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 2630 (len(excluded_channels),len(all_channels)) 2631 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 2632 %tuple(stats['virt_stats']['v_average_max']) 2633 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 2634 %tuple(stats['virt_stats']['v_ratio_max']) 2635 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 2636 %tuple(stats['virt_stats']['v_ratio_err_max']) 2637 debug_msg += tmpStr 2638 # After all it was decided that it is better not to alarm the user unecessarily 2639 # with such printout of the statistics. 2640 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 2641 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2642 # message += "\n Suspiciously large MC error in :" 2643 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2644 # message += tmpStr 2645 2646 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 2647 %tuple(stats['virt_stats']['v_contr_err_max']) 2648 debug_msg += tmpStr 2649 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 2650 # message += tmpStr 2651 2652 2653 except KeyError: 2654 debug_msg += '\n Could not find statistics on the integration optimization. ' 2655 2656 # ======================================= 2657 # == aMC@NLO timing profile statistics == 2658 # ======================================= 2659 2660 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 2661 "(?P<time>[\d\+-Eed\.]*)\s*") 2662 2663 for logf in log_GV_files: 2664 logfile=open(logf,'r') 2665 log = logfile.read() 2666 logfile.close() 2667 channel_name = '/'.join(logf.split('/')[-3:-1]) 2668 mint = re.search(mint_search,logf) 2669 if not mint is None: 2670 channel_name = channel_name+' [step %s]'%mint.group('ID') 2671 2672 for time_stats in re.finditer(timing_stat_finder, log): 2673 try: 2674 stats['timings'][time_stats.group('name')][channel_name]+=\ 2675 safe_float(time_stats.group('time')) 2676 except KeyError: 2677 if time_stats.group('name') not in stats['timings'].keys(): 2678 stats['timings'][time_stats.group('name')] = {} 2679 stats['timings'][time_stats.group('name')][channel_name]=\ 2680 safe_float(time_stats.group('time')) 2681 2682 # useful inline function 2683 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 2684 try: 2685 totTimeList = [(time, chan) for chan, time in \ 2686 stats['timings']['Total'].items()] 2687 except KeyError: 2688 totTimeList = [] 2689 2690 totTimeList.sort() 2691 if len(totTimeList)>0: 2692 debug_msg += '\n\n Inclusive timing profile :' 2693 debug_msg += '\n Overall slowest channel %s (%s)'%\ 2694 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 2695 debug_msg += '\n Average channel running time %s'%\ 2696 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 2697 debug_msg += '\n Aggregated total running time %s'%\ 2698 Tstr(sum([el[0] for el in totTimeList])) 2699 else: 2700 debug_msg += '\n\n Inclusive timing profile non available.' 2701 2702 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 2703 sum(stats['timings'][stat].values()), reverse=True) 2704 for name in sorted_keys: 2705 if name=='Total': 2706 continue 2707 if sum(stats['timings'][name].values())<=0.0: 2708 debug_msg += '\n Zero time record for %s.'%name 2709 continue 2710 try: 2711 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 2712 chan) for chan, time in stats['timings'][name].items()] 2713 except KeyError, ZeroDivisionError: 2714 debug_msg += '\n\n Timing profile for %s unavailable.'%name 2715 continue 2716 TimeList.sort() 2717 debug_msg += '\n Timing profile for <%s> :'%name 2718 try: 2719 debug_msg += '\n Overall fraction of time %.3f %%'%\ 2720 safe_float((100.0*(sum(stats['timings'][name].values())/ 2721 sum(stats['timings']['Total'].values())))) 2722 except KeyError, ZeroDivisionError: 2723 debug_msg += '\n Overall fraction of time unavailable.' 2724 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 2725 (TimeList[-1][0],TimeList[-1][1]) 2726 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 2727 (TimeList[0][0],TimeList[0][1]) 2728 2729 # ============================= 2730 # == log file eror detection == 2731 # ============================= 2732 2733 # Find the number of potential errors found in all log files 2734 # This re is a simple match on a case-insensitve 'error' but there is 2735 # also some veto added for excluding the sentence 2736 # "See Section 6 of paper for error calculation." 2737 # which appear in the header of lhapdf in the logs. 2738 err_finder = re.compile(\ 2739 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 2740 for log in all_log_files: 2741 logfile=open(log,'r') 2742 nErrors = len(re.findall(err_finder, logfile.read())) 2743 logfile.close() 2744 if nErrors != 0: 2745 stats['Errors'].append((str(log),nErrors)) 2746 2747 nErrors = sum([err[1] for err in stats['Errors']],0) 2748 if nErrors != 0: 2749 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 2750 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 2751 'found in the following log file%s:'%('s' if \ 2752 len(stats['Errors'])>1 else '') 2753 for error in stats['Errors'][:3]: 2754 log_name = '/'.join(error[0].split('/')[-5:]) 2755 debug_msg += '\n > %d error%s in %s'%\ 2756 (error[1],'s' if error[1]>1 else '',log_name) 2757 if len(stats['Errors'])>3: 2758 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 2759 nRemainingLogs = len(stats['Errors'])-3 2760 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 2761 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 2762 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 2763 2764 return message, debug_msg 2765 2766
2767 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
2768 """this function calls the reweighting routines and creates the event file in the 2769 Event dir. Return the name of the event file created 2770 """ 2771 scale_pdf_info=[] 2772 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2773 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2774 scale_pdf_info = self.run_reweight(options['reweightonly']) 2775 self.update_status('Collecting events', level='parton', update_results=True) 2776 misc.compile(['collect_events'], 2777 cwd=pjoin(self.me_dir, 'SubProcesses')) 2778 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 2779 stdin=subprocess.PIPE, 2780 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 2781 if event_norm.lower() == 'sum': 2782 p.communicate(input = '1\n') 2783 elif event_norm.lower() == 'unity': 2784 p.communicate(input = '3\n') 2785 else: 2786 p.communicate(input = '2\n') 2787 2788 #get filename from collect events 2789 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 2790 2791 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 2792 raise aMCatNLOError('An error occurred during event generation. ' + \ 2793 'The event file has not been created. Check collect_events.log') 2794 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2795 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 2796 if not options['reweightonly']: 2797 self.print_summary(options, 2, mode, scale_pdf_info) 2798 res_files=glob.glob(pjoin(self.me_dir, 'SubProcesses', 'res*.txt')) 2799 for res_file in res_files: 2800 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2801 2802 logger.info('The %s file has been generated.\n' % (evt_file)) 2803 self.results.add_detail('nb_event', nevents) 2804 self.update_status('Events generated', level='parton', update_results=True) 2805 return evt_file[:-3]
2806 2807
2808 - def run_mcatnlo(self, evt_file):
2809 """runs mcatnlo on the generated event file, to produce showered-events 2810 """ 2811 logger.info('Preparing MCatNLO run') 2812 try: 2813 misc.gunzip(evt_file) 2814 except Exception: 2815 pass 2816 2817 self.banner = banner_mod.Banner(evt_file) 2818 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 2819 2820 #check that the number of split event files divides the number of 2821 # events, otherwise set it to 1 2822 if int(self.banner.get_detail('run_card', 'nevents') / \ 2823 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 2824 != self.banner.get_detail('run_card', 'nevents'): 2825 logger.warning(\ 2826 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 2827 'Setting it to 1.') 2828 self.shower_card['nsplit_jobs'] = 1 2829 2830 # don't split jobs if the user asks to shower only a part of the events 2831 if self.shower_card['nevents'] > 0 and \ 2832 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 2833 self.shower_card['nsplit_jobs'] != 1: 2834 logger.warning(\ 2835 'Only a part of the events will be showered.\n' + \ 2836 'Setting nsplit_jobs in the shower_card to 1.') 2837 self.shower_card['nsplit_jobs'] = 1 2838 2839 self.banner_to_mcatnlo(evt_file) 2840 2841 # if fastjet has to be linked (in extralibs) then 2842 # add lib /include dirs for fastjet if fastjet-config is present on the 2843 # system, otherwise add fjcore to the files to combine 2844 if 'fastjet' in self.shower_card['extralibs']: 2845 #first, check that stdc++ is also linked 2846 if not 'stdc++' in self.shower_card['extralibs']: 2847 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 2848 self.shower_card['extralibs'] += ' stdc++' 2849 # then check if options[fastjet] corresponds to a valid fj installation 2850 try: 2851 #this is for a complete fj installation 2852 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 2853 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2854 output, error = p.communicate() 2855 #remove the line break from output (last character) 2856 output = output[:-1] 2857 # add lib/include paths 2858 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 2859 logger.warning('Linking FastJet: updating EXTRAPATHS') 2860 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 2861 if not pjoin(output, 'include') in self.shower_card['includepaths']: 2862 logger.warning('Linking FastJet: updating INCLUDEPATHS') 2863 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 2864 # to be changed in the fortran wrapper 2865 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 2866 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 2867 except Exception: 2868 logger.warning('Linking FastJet: using fjcore') 2869 # this is for FJcore, so no FJ library has to be linked 2870 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 2871 if not 'fjcore.o' in self.shower_card['analyse']: 2872 self.shower_card['analyse'] += ' fjcore.o' 2873 # to be changed in the fortran wrapper 2874 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 2875 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 2876 # change the fortran wrapper with the correct namespaces/include 2877 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 2878 for line in fjwrapper_lines: 2879 if '//INCLUDE_FJ' in line: 2880 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 2881 if '//NAMESPACE_FJ' in line: 2882 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 2883 open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w').write(\ 2884 '\n'.join(fjwrapper_lines) + '\n') 2885 2886 extrapaths = self.shower_card['extrapaths'].split() 2887 2888 # check that the path needed by HW++ and PY8 are set if one uses these shower 2889 if shower in ['HERWIGPP', 'PYTHIA8']: 2890 path_dict = {'HERWIGPP': ['hepmc_path', 2891 'thepeg_path', 2892 'hwpp_path'], 2893 'PYTHIA8': ['pythia8_path']} 2894 2895 if not all([self.options[ppath] for ppath in path_dict[shower]]): 2896 raise aMCatNLOError('Some paths are missing in the configuration file.\n' + \ 2897 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 2898 2899 if shower == 'HERWIGPP': 2900 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 2901 2902 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 2903 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 2904 2905 if 'LD_LIBRARY_PATH' in os.environ.keys(): 2906 ldlibrarypath = os.environ['LD_LIBRARY_PATH'] 2907 else: 2908 ldlibrarypath = '' 2909 ldlibrarypath += ':' + ':'.join(extrapaths) 2910 os.putenv('LD_LIBRARY_PATH', ldlibrarypath) 2911 2912 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 2913 self.shower_card.write_card(shower, shower_card_path) 2914 2915 # overwrite if shower_card_set.dat exists in MCatNLO 2916 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 2917 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 2918 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 2919 2920 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 2921 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 2922 2923 2924 # libdl may be needded for pythia 82xx 2925 if shower == 'PYTHIA8' and not \ 2926 os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 2927 'dl' not in self.shower_card['extralibs'].split(): 2928 # 'dl' has to be linked with the extralibs 2929 self.shower_card['extralibs'] += ' dl' 2930 logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 2931 "It is needed for the correct running of PY8.2xx.\n" + \ 2932 "If this library cannot be found on your system, a crash will occur.") 2933 2934 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 2935 stderr=open(mcatnlo_log, 'w'), 2936 cwd=pjoin(self.me_dir, 'MCatNLO')) 2937 2938 exe = 'MCATNLO_%s_EXE' % shower 2939 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 2940 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 2941 print open(mcatnlo_log).read() 2942 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 2943 logger.info(' ... done') 2944 2945 # create an empty dir where to run 2946 count = 1 2947 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 2948 (shower, count))): 2949 count += 1 2950 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 2951 (shower, count)) 2952 os.mkdir(rundir) 2953 files.cp(shower_card_path, rundir) 2954 2955 #look for the event files (don't resplit if one asks for the 2956 # same number of event files as in the previous run) 2957 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 2958 'events_*.lhe')) 2959 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 2960 logger.info('Cleaning old files and splitting the event file...') 2961 #clean the old files 2962 files.rm([f for f in event_files if 'events.lhe' not in f]) 2963 if self.shower_card['nsplit_jobs'] > 1: 2964 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities')) 2965 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 2966 stdin=subprocess.PIPE, 2967 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 2968 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2969 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 2970 logger.info('Splitting done.') 2971 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 2972 'events_*.lhe')) 2973 2974 event_files.sort() 2975 2976 self.update_status('Showering events...', level='shower') 2977 logger.info('(Running in %s)' % rundir) 2978 if shower != 'PYTHIA8': 2979 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 2980 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 2981 else: 2982 # special treatment for pythia8 2983 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 2984 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 2985 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 2986 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 2987 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 2988 else: # this is PY8.2xxx 2989 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 2990 #link the hwpp exe in the rundir 2991 if shower == 'HERWIGPP': 2992 try: 2993 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 2994 except Exception: 2995 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 2996 2997 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 2998 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 2999 3000 files.ln(evt_file, rundir, 'events.lhe') 3001 for i, f in enumerate(event_files): 3002 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3003 3004 if not self.shower_card['analyse']: 3005 # an hep/hepmc file as output 3006 out_id = 'HEP' 3007 else: 3008 # one or more .top file(s) as output 3009 if "HwU" in self.shower_card['analyse']: 3010 out_id = 'HWU' 3011 else: 3012 out_id = 'TOP' 3013 3014 # write the executable 3015 open(pjoin(rundir, 'shower.sh'), 'w').write(\ 3016 open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3017 % {'extralibs': ':'.join(extrapaths)}) 3018 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3019 3020 if event_files: 3021 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3022 for i in range(len(event_files))] 3023 else: 3024 arg_list = [[shower, out_id, self.run_name]] 3025 3026 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3027 self.njobs = 1 3028 self.wait_for_complete('shower') 3029 3030 # now collect the results 3031 message = '' 3032 warning = '' 3033 to_gzip = [evt_file] 3034 if out_id == 'HEP': 3035 #copy the showered stdhep/hepmc file back in events 3036 if shower in ['PYTHIA8', 'HERWIGPP']: 3037 hep_format = 'HEPMC' 3038 ext = 'hepmc' 3039 else: 3040 hep_format = 'StdHEP' 3041 ext = 'hep' 3042 3043 hep_file = '%s_%s_0.%s.gz' % \ 3044 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3045 count = 0 3046 3047 # find the first available name for the output: 3048 # check existing results with or without event splitting 3049 while os.path.exists(hep_file) or \ 3050 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3051 count +=1 3052 hep_file = '%s_%s_%d.%s.gz' % \ 3053 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3054 3055 try: 3056 if self.shower_card['nsplit_jobs'] == 1: 3057 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3058 message = ('The file %s has been generated. \nIt contains showered' + \ 3059 ' and hadronized events in the %s format obtained' + \ 3060 ' showering the parton-level event file %s.gz with %s') % \ 3061 (hep_file, hep_format, evt_file, shower) 3062 else: 3063 hep_list = [] 3064 for i in range(self.shower_card['nsplit_jobs']): 3065 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3066 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3067 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3068 ' and hadronized events in the %s format obtained' + \ 3069 ' showering the (split) parton-level event file %s.gz with %s') % \ 3070 ('\n '.join(hep_list), hep_format, evt_file, shower) 3071 3072 except OSError, IOError: 3073 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3074 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3075 3076 # run the plot creation in a secure way 3077 if hep_format == 'StdHEP': 3078 try: 3079 self.do_plot('%s -f' % self.run_name) 3080 except Exception, error: 3081 logger.info("Fail to make the plot. Continue...") 3082 pass 3083 3084 elif out_id == 'TOP' or out_id == 'HWU': 3085 #copy the topdrawer or HwU file(s) back in events 3086 if out_id=='TOP': 3087 ext='top' 3088 elif out_id=='HWU': 3089 ext='HwU' 3090 topfiles = [] 3091 top_tars = [tarfile.TarFile(f) for f in glob.glob(pjoin(rundir, 'histfile*.tar'))] 3092 for top_tar in top_tars: 3093 topfiles.extend(top_tar.getnames()) 3094 3095 # safety check 3096 if len(top_tars) != self.shower_card['nsplit_jobs']: 3097 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3098 (self.shower_card['nsplit_jobs'], len(top_tars))) 3099 3100 # find the first available name for the output: 3101 # check existing results with or without event splitting 3102 filename = 'plot_%s_%d_' % (shower, 1) 3103 count = 1 3104 while os.path.exists(pjoin(self.me_dir, 'Events', 3105 self.run_name, '%s0.%s' % (filename,ext))) or \ 3106 os.path.exists(pjoin(self.me_dir, 'Events', 3107 self.run_name, '%s0__1.%s' % (filename,ext))): 3108 count += 1 3109 filename = 'plot_%s_%d_' % (shower, count) 3110 3111 if out_id=='TOP': 3112 hist_format='TopDrawer format' 3113 elif out_id=='HWU': 3114 hist_format='HwU and GnuPlot formats' 3115 3116 if not topfiles: 3117 # if no topfiles are found just warn the user 3118 warning = 'No .top file has been generated. For the results of your ' +\ 3119 'run, please check inside %s' % rundir 3120 elif self.shower_card['nsplit_jobs'] == 1: 3121 # only one job for the shower 3122 top_tars[0].extractall(path = rundir) 3123 plotfiles = [] 3124 for i, file in enumerate(topfiles): 3125 if out_id=='TOP': 3126 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3127 '%s%d.top' % (filename, i)) 3128 files.mv(pjoin(rundir, file), plotfile) 3129 elif out_id=='HWU': 3130 out=pjoin(self.me_dir,'Events', 3131 self.run_name,'%s%d'% (filename,i)) 3132 histos=[{'dirname':pjoin(rundir,file)}] 3133 self.combine_plots_HwU(histos,out) 3134 try: 3135 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3136 stdout=os.open(os.devnull, os.O_RDWR),\ 3137 stderr=os.open(os.devnull, os.O_RDWR),\ 3138 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3139 except Exception: 3140 pass 3141 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3142 '%s%d.HwU'% (filename,i)) 3143 plotfiles.append(plotfile) 3144 3145 ffiles = 'files' 3146 have = 'have' 3147 if len(plotfiles) == 1: 3148 ffiles = 'file' 3149 have = 'has' 3150 3151 message = ('The %s %s %s been generated, with histograms in the' + \ 3152 ' %s, obtained by showering the parton-level' + \ 3153 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 3154 hist_format, evt_file, shower) 3155 else: 3156 # many jobs for the shower have been run 3157 topfiles_set = set(topfiles) 3158 plotfiles = [] 3159 for j, top_tar in enumerate(top_tars): 3160 top_tar.extractall(path = rundir) 3161 for i, file in enumerate(topfiles_set): 3162 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3163 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 3164 files.mv(pjoin(rundir, file), plotfile) 3165 plotfiles.append(plotfile) 3166 3167 # check if the user asked to combine the .top into a single file 3168 if self.shower_card['combine_td']: 3169 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 3170 3171 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 3172 norm = 1. 3173 elif self.banner.get('run_card', 'event_norm').lower() == 'average': 3174 norm = 1./float(self.shower_card['nsplit_jobs']) 3175 3176 plotfiles2 = [] 3177 for i, file in enumerate(topfiles_set): 3178 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 3179 for j in range(self.shower_card['nsplit_jobs'])] 3180 if out_id=='TOP': 3181 infile="%d\n%s\n%s\n" % \ 3182 (self.shower_card['nsplit_jobs'], 3183 '\n'.join(filelist), 3184 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 3185 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 3186 stdin=subprocess.PIPE, 3187 stdout=os.open(os.devnull, os.O_RDWR), 3188 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3189 p.communicate(input = infile) 3190 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 3191 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 3192 elif out_id=='HWU': 3193 out=pjoin(self.me_dir,'Events', 3194 self.run_name,'%s%d'% (filename,i)) 3195 histos=[] 3196 norms=[] 3197 for plotfile in plotfiles: 3198 histos.append({'dirname':plotfile}) 3199 norms.append(norm) 3200 self.combine_plots_HwU(histos,out,normalisation=norms) 3201 try: 3202 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 3203 stdout=os.open(os.devnull, os.O_RDWR),\ 3204 stderr=os.open(os.devnull, os.O_RDWR),\ 3205 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 3206 except Exception: 3207 pass 3208 3209 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 3210 tar = tarfile.open( 3211 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 3212 for f in filelist: 3213 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 3214 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 3215 3216 tar.close() 3217 3218 ffiles = 'files' 3219 have = 'have' 3220 if len(plotfiles2) == 1: 3221 ffiles = 'file' 3222 have = 'has' 3223 3224 message = ('The %s %s %s been generated, with histograms in the' + \ 3225 ' %s, obtained by showering the parton-level' + \ 3226 ' file %s.gz with %s.\n' + \ 3227 'The files from the different shower ' + \ 3228 'jobs (before combining them) can be found inside %s.') % \ 3229 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 3230 evt_file, shower, 3231 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 3232 3233 else: 3234 message = ('The following files have been generated:\n %s\n' + \ 3235 'They contain histograms in the' + \ 3236 ' %s, obtained by showering the parton-level' + \ 3237 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 3238 hist_format, evt_file, shower) 3239 3240 # Now arxiv the shower card used if RunMaterial is present 3241 run_dir_path = pjoin(rundir, self.run_name) 3242 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 3243 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 3244 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 3245 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 3246 %(shower, count))) 3247 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 3248 cwd=run_dir_path) 3249 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 3250 # end of the run, gzip files and print out the message/warning 3251 for f in to_gzip: 3252 misc.gzip(f) 3253 if message: 3254 logger.info(message) 3255 if warning: 3256 logger.warning(warning) 3257 3258 self.update_status('Run complete', level='shower', update_results=True)
3259 3260 3261 ############################################################################
3262 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3263 """define the run name, the run_tag, the banner and the results.""" 3264 3265 # when are we force to change the tag new_run:previous run requiring changes 3266 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'], 3267 'pythia': ['pythia','pgs','delphes'], 3268 'shower': ['shower'], 3269 'pgs': ['pgs'], 3270 'delphes':['delphes'], 3271 'plot':[]} 3272 3273 3274 3275 if name == self.run_name: 3276 if reload_card: 3277 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3278 self.run_card = banner_mod.RunCardNLO(run_card) 3279 3280 #check if we need to change the tag 3281 if tag: 3282 self.run_card['run_tag'] = tag 3283 self.run_tag = tag 3284 self.results.add_run(self.run_name, self.run_card) 3285 else: 3286 for tag in upgrade_tag[level]: 3287 if getattr(self.results[self.run_name][-1], tag): 3288 tag = self.get_available_tag() 3289 self.run_card['run_tag'] = tag 3290 self.run_tag = tag 3291 self.results.add_run(self.run_name, self.run_card) 3292 break 3293 return # Nothing to do anymore 3294 3295 # save/clean previous run 3296 if self.run_name: 3297 self.store_result() 3298 # store new name 3299 self.run_name = name 3300 3301 # Read run_card 3302 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3303 self.run_card = banner_mod.RunCardNLO(run_card) 3304 3305 new_tag = False 3306 # First call for this run -> set the banner 3307 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 3308 if tag: 3309 self.run_card['run_tag'] = tag 3310 new_tag = True 3311 elif not self.run_name in self.results and level =='parton': 3312 pass # No results yet, so current tag is fine 3313 elif not self.run_name in self.results: 3314 #This is only for case when you want to trick the interface 3315 logger.warning('Trying to run data on unknown run.') 3316 self.results.add_run(name, self.run_card) 3317 self.results.update('add run %s' % name, 'all', makehtml=True) 3318 else: 3319 for tag in upgrade_tag[level]: 3320 3321 if getattr(self.results[self.run_name][-1], tag): 3322 # LEVEL is already define in the last tag -> need to switch tag 3323 tag = self.get_available_tag() 3324 self.run_card['run_tag'] = tag 3325 new_tag = True 3326 break 3327 if not new_tag: 3328 # We can add the results to the current run 3329 tag = self.results[self.run_name][-1]['tag'] 3330 self.run_card['run_tag'] = tag # ensure that run_tag is correct 3331 3332 3333 if name in self.results and not new_tag: 3334 self.results.def_current(self.run_name) 3335 else: 3336 self.results.add_run(self.run_name, self.run_card) 3337 3338 self.run_tag = self.run_card['run_tag'] 3339 3340 # Return the tag of the previous run having the required data for this 3341 # tag/run to working wel. 3342 if level == 'parton': 3343 return 3344 elif level == 'pythia': 3345 return self.results[self.run_name][0]['tag'] 3346 else: 3347 for i in range(-1,-len(self.results[self.run_name])-1,-1): 3348 tagRun = self.results[self.run_name][i] 3349 if tagRun.pythia: 3350 return tagRun['tag']
3351 3352
3353 - def store_result(self):
3354 """ tar the pythia results. This is done when we are quite sure that 3355 the pythia output will not be use anymore """ 3356 3357 if not self.run_name: 3358 return 3359 3360 self.results.save() 3361 3362 if not self.to_store: 3363 return 3364 3365 if 'event' in self.to_store: 3366 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 3367 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 3368 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 3369 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 3370 else: 3371 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 3372 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 3373 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 3374 3375 3376 tag = self.run_card['run_tag'] 3377 3378 self.to_store = []
3379 3380
3381 - def get_init_dict(self, evt_file):
3382 """reads the info in the init block and returns them in a dictionary""" 3383 ev_file = open(evt_file) 3384 init = "" 3385 found = False 3386 while True: 3387 line = ev_file.readline() 3388 if "<init>" in line: 3389 found = True 3390 elif found and not line.startswith('#'): 3391 init += line 3392 if "</init>" in line or "<event>" in line: 3393 break 3394 ev_file.close() 3395 3396 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 3397 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 3398 # these are not included (so far) in the init_dict 3399 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 3400 3401 init_dict = {} 3402 init_dict['idbmup1'] = int(init.split()[0]) 3403 init_dict['idbmup2'] = int(init.split()[1]) 3404 init_dict['ebmup1'] = float(init.split()[2]) 3405 init_dict['ebmup2'] = float(init.split()[3]) 3406 init_dict['pdfgup1'] = int(init.split()[4]) 3407 init_dict['pdfgup2'] = int(init.split()[5]) 3408 init_dict['pdfsup1'] = int(init.split()[6]) 3409 init_dict['pdfsup2'] = int(init.split()[7]) 3410 init_dict['idwtup'] = int(init.split()[8]) 3411 init_dict['nprup'] = int(init.split()[9]) 3412 3413 return init_dict
3414 3415
3416 - def banner_to_mcatnlo(self, evt_file):
3417 """creates the mcatnlo input script using the values set in the header of the event_file. 3418 It also checks if the lhapdf library is used""" 3419 shower = self.banner.get('run_card', 'parton_shower').upper() 3420 pdlabel = self.banner.get('run_card', 'pdlabel') 3421 itry = 0 3422 nevents = self.shower_card['nevents'] 3423 init_dict = self.get_init_dict(evt_file) 3424 3425 if nevents < 0 or \ 3426 nevents > self.banner.get_detail('run_card', 'nevents'): 3427 nevents = self.banner.get_detail('run_card', 'nevents') 3428 3429 nevents = nevents / self.shower_card['nsplit_jobs'] 3430 3431 mcmass_dict = {} 3432 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 3433 pdg = int(line.split()[0]) 3434 mass = float(line.split()[1]) 3435 mcmass_dict[pdg] = mass 3436 3437 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 3438 content += 'NEVENTS=%d\n' % nevents 3439 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 3440 self.shower_card['nsplit_jobs']) 3441 content += 'MCMODE=%s\n' % shower 3442 content += 'PDLABEL=%s\n' % pdlabel 3443 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 3444 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 3445 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3446 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 3447 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 3448 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 3449 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 3450 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 3451 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 3452 try: 3453 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 3454 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 3455 except KeyError: 3456 content += 'HGGMASS=120.\n' 3457 content += 'HGGWIDTH=0.00575308848\n' 3458 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 3459 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 3460 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 3461 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 3462 content += 'DMASS=%s\n' % mcmass_dict[1] 3463 content += 'UMASS=%s\n' % mcmass_dict[2] 3464 content += 'SMASS=%s\n' % mcmass_dict[3] 3465 content += 'CMASS=%s\n' % mcmass_dict[4] 3466 content += 'BMASS=%s\n' % mcmass_dict[5] 3467 try: 3468 content += 'EMASS=%s\n' % mcmass_dict[11] 3469 content += 'MUMASS=%s\n' % mcmass_dict[13] 3470 content += 'TAUMASS=%s\n' % mcmass_dict[15] 3471 except KeyError: 3472 # this is for backward compatibility 3473 mcmass_lines = [l for l in \ 3474 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 3475 ).read().split('\n') if l] 3476 new_mcmass_dict = {} 3477 for l in mcmass_lines: 3478 key, val = l.split('=') 3479 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 3480 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 3481 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 3482 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 3483 3484 content += 'GMASS=%s\n' % mcmass_dict[21] 3485 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 3486 # check if need to link lhapdf 3487 if int(self.shower_card['pdfcode']) > 1 or \ 3488 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1): 3489 # Use LHAPDF (should be correctly installed, because 3490 # either events were already generated with them, or the 3491 # user explicitly gives an LHAPDF number in the 3492 # shower_card). 3493 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3494 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3495 stdout = subprocess.PIPE).stdout.read().strip() 3496 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3497 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3498 if self.shower_card['pdfcode']==1: 3499 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3500 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3501 else: 3502 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 3503 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 3504 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3505 elif int(self.shower_card['pdfcode'])==1: 3506 # Try to use LHAPDF because user wants to use the same PDF 3507 # as was used for the event generation. However, for the 3508 # event generation, LHAPDF was not used, so non-trivial to 3509 # see if if LHAPDF is available with the corresponding PDF 3510 # set. If not found, give a warning and use build-in PDF 3511 # set instead. 3512 try: 3513 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3514 stdout = subprocess.PIPE).stdout.read().strip() 3515 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3516 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3517 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3518 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3519 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3520 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3521 except Exception: 3522 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 3523 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 3524 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 3525 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 3526 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 3527 content += 'LHAPDFPATH=\n' 3528 content += 'PDFCODE=0\n' 3529 else: 3530 content += 'LHAPDFPATH=\n' 3531 content += 'PDFCODE=0\n' 3532 3533 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 3534 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 3535 # add the pythia8/hwpp path(s) 3536 if self.options['pythia8_path']: 3537 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 3538 if self.options['hwpp_path']: 3539 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 3540 if self.options['thepeg_path']: 3541 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 3542 if self.options['hepmc_path']: 3543 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 3544 3545 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 3546 output.write(content) 3547 output.close() 3548 return shower
3549 3550
3551 - def run_reweight(self, only):
3552 """runs the reweight_xsec_events executables on each sub-event file generated 3553 to compute on the fly scale and/or PDF uncertainities""" 3554 logger.info(' Doing reweight') 3555 3556 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 3557 # if only doing reweight, copy back the nevents_unweighted file 3558 if only: 3559 if os.path.exists(nev_unw + '.orig'): 3560 files.cp(nev_unw + '.orig', nev_unw) 3561 else: 3562 raise aMCatNLOError('Cannot find event file information') 3563 3564 #read the nevents_unweighted file to get the list of event files 3565 file = open(nev_unw) 3566 lines = file.read().split('\n') 3567 file.close() 3568 # make copy of the original nevent_unweighted file 3569 files.cp(nev_unw, nev_unw + '.orig') 3570 # loop over lines (all but the last one whith is empty) and check that the 3571 # number of events is not 0 3572 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 3573 #prepare the job_dict 3574 job_dict = {} 3575 exe = 'reweight_xsec_events.local' 3576 for i, evt_file in enumerate(evt_files): 3577 path, evt = os.path.split(evt_file) 3578 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 3579 pjoin(self.me_dir, 'SubProcesses', path)) 3580 job_dict[path] = [exe] 3581 3582 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 3583 3584 #check that the new event files are complete 3585 for evt_file in evt_files: 3586 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 3587 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 3588 stdout = subprocess.PIPE).stdout.read().strip() 3589 if last_line != "</LesHouchesEvents>": 3590 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 3591 '\'reweight_xsec_events.output\' files inside the ' + \ 3592 '\'SubProcesses/P*/G*/ directories for details') 3593 3594 #update file name in nevents_unweighted 3595 newfile = open(nev_unw, 'w') 3596 for line in lines: 3597 if line: 3598 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 3599 newfile.close() 3600 3601 return self.pdf_scale_from_reweighting(evt_files)
3602
3603 - def pdf_scale_from_reweighting(self, evt_files):
3604 """This function takes the files with the scale and pdf values 3605 written by the reweight_xsec_events.f code 3606 (P*/G*/pdf_scale_dependence.dat) and computes the overall 3607 scale and PDF uncertainty (the latter is computed using the 3608 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 3609 and returns it in percents. The expected format of the file 3610 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 3611 xsec_pdf0 xsec_pdf1 ....""" 3612 scales=[] 3613 pdfs=[] 3614 for evt_file in evt_files: 3615 path, evt=os.path.split(evt_file) 3616 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 3617 data_line=f.readline() 3618 if "scale variations:" in data_line: 3619 for i,scale in enumerate(self.run_card['dynamical_scale_choice']): 3620 data_line = f.readline().split() 3621 scales_this = [float(val) for val in f.readline().replace("D", "E").split()] 3622 try: 3623 scales[i] = [a + b for a, b in zip(scales[i], scales_this)] 3624 except IndexError: 3625 scales+=[scales_this] 3626 data_line=f.readline() 3627 if "pdf variations:" in data_line: 3628 for i,pdf in enumerate(self.run_card['lhaid']): 3629 data_line = f.readline().split() 3630 pdfs_this = [float(val) for val in f.readline().replace("D", "E").split()] 3631 try: 3632 pdfs[i] = [a + b for a, b in zip(pdfs[i], pdfs_this)] 3633 except IndexError: 3634 pdfs+=[pdfs_this] 3635 3636 # get the scale uncertainty in percent 3637 scale_info=[] 3638 for j,scale in enumerate(scales): 3639 s_cen=scale[0] 3640 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 3641 # max and min of the full envelope 3642 s_max=(max(scale)/s_cen-1)*100 3643 s_min=(1-min(scale)/s_cen)*100 3644 # ren and fac scale dependence added in quadrature 3645 ren_var=[] 3646 fac_var=[] 3647 for i in range(len(self.run_card['rw_rscale'])): 3648 ren_var.append(scale[i]-s_cen) # central fac scale 3649 for i in range(len(self.run_card['rw_fscale'])): 3650 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 3651 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 3652 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 3653 s_size=len(scale) 3654 else: 3655 s_max=0.0 3656 s_min=0.0 3657 s_max_q=0.0 3658 s_min_q=0.0 3659 s_size=len(scale) 3660 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 3661 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 3662 'label':self.run_card['dynamical_scale_choice'][j], \ 3663 'unc':self.run_card['reweight_scale'][j]}) 3664 3665 # check if we can use LHAPDF to compute the PDF uncertainty 3666 if any(self.run_card['reweight_pdf']): 3667 use_lhapdf=False 3668 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 3669 stdout=subprocess.PIPE).stdout.read().strip() 3670 3671 try: 3672 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 3673 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 3674 except OSError: 3675 candidates=[] 3676 for candidate in candidates: 3677 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 3678 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 3679 try: 3680 import lhapdf 3681 use_lhapdf=True 3682 break 3683 except ImportError: 3684 sys.path.pop(0) 3685 continue 3686 3687 if not use_lhapdf: 3688 try: 3689 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 3690 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 3691 except OSError: 3692 candidates=[] 3693 for candidate in candidates: 3694 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 3695 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 3696 try: 3697 import lhapdf 3698 use_lhapdf=True 3699 break 3700 except ImportError: 3701 sys.path.pop(0) 3702 continue 3703 3704 if not use_lhapdf: 3705 try: 3706 import lhapdf 3707 use_lhapdf=True 3708 except ImportError: 3709 logger.warning("Failed to access python version of LHAPDF: "\ 3710 "cannot compute PDF uncertainty from the "\ 3711 "weights in the events. The weights in the LHE " \ 3712 "event files will still cover all PDF set members, "\ 3713 "but there will be no PDF uncertainty printed in the run summary. \n "\ 3714 "If the python interface to LHAPDF is available on your system, try "\ 3715 "adding its location to the PYTHONPATH environment variable and the"\ 3716 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 3717 use_lhapdf=False 3718 3719 # turn off lhapdf printing any messages 3720 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 3721 3722 pdf_info=[] 3723 for j,pdfset in enumerate(pdfs): 3724 p_cen=pdfset[0] 3725 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 3726 if use_lhapdf: 3727 pdfsetname=self.run_card['lhapdfsetname'][j] 3728 try: 3729 p=lhapdf.getPDFSet(pdfsetname) 3730 ep=p.uncertainty(pdfset,-1) 3731 p_cen=ep.central 3732 p_min=abs(ep.errminus/p_cen)*100 3733 p_max=abs(ep.errplus/p_cen)*100 3734 p_type=p.errorType 3735 p_size=p.size 3736 p_conf=p.errorConfLevel 3737 except: 3738 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 3739 p_min=0.0 3740 p_max=0.0 3741 p_type='unknown' 3742 p_conf='unknown' 3743 p_size=len(pdfset) 3744 else: 3745 p_min=0.0 3746 p_max=0.0 3747 p_type='unknown' 3748 p_conf='unknown' 3749 p_size=len(pdfset) 3750 pdfsetname=self.run_card['lhaid'][j] 3751 else: 3752 p_min=0.0 3753 p_max=0.0 3754 p_type='none' 3755 p_conf='unknown' 3756 p_size=len(pdfset) 3757 pdfsetname=self.run_card['lhaid'][j] 3758 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 3759 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 3760 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 3761 3762 scale_pdf_info=[scale_info,pdf_info] 3763 return scale_pdf_info
3764 3765
3766 - def wait_for_complete(self, run_type):
3767 """this function waits for jobs on cluster to complete their run.""" 3768 starttime = time.time() 3769 #logger.info(' Waiting for submitted jobs to complete') 3770 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 3771 starttime=starttime, level='parton', update_results=True) 3772 try: 3773 self.cluster.wait(self.me_dir, update_status) 3774 except: 3775 self.cluster.remove() 3776 raise
3777
3778 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3779 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 3780 self.ijob = 0 3781 if run_type != 'shower': 3782 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 3783 for args in arg_list: 3784 for Pdir, jobs in job_dict.items(): 3785 for job in jobs: 3786 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 3787 if self.cluster_mode == 2: 3788 time.sleep(1) # security to allow all jobs to be launched 3789 else: 3790 self.njobs = len(arg_list) 3791 for args in arg_list: 3792 [(cwd, exe)] = job_dict.items() 3793 self.run_exe(exe, args, run_type, cwd) 3794 3795 self.wait_for_complete(run_type)
3796 3797 3798
3799 - def check_event_files(self,jobs):
3800 """check the integrity of the event files after splitting, and resubmit 3801 those which are not nicely terminated""" 3802 jobs_to_resubmit = [] 3803 for job in jobs: 3804 last_line = '' 3805 try: 3806 last_line = subprocess.Popen( 3807 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 3808 stdout = subprocess.PIPE).stdout.read().strip() 3809 except IOError: 3810 pass 3811 if last_line != "</LesHouchesEvents>": 3812 jobs_to_resubmit.append(job) 3813 self.njobs = 0 3814 if jobs_to_resubmit: 3815 run_type = 'Resubmitting broken jobs' 3816 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 3817 for job in jobs_to_resubmit: 3818 logger.debug('Resubmitting ' + job['dirname'] + '\n') 3819 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
3820 3821
3822 - def find_jobs_to_split(self, pdir, job, arg):
3823 """looks into the nevents_unweighed_splitted file to check how many 3824 split jobs are needed for this (pdir, job). arg is F, B or V""" 3825 # find the number of the integration channel 3826 splittings = [] 3827 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 3828 pattern = re.compile('for i in (\d+) ; do') 3829 match = re.search(pattern, ajob) 3830 channel = match.groups()[0] 3831 # then open the nevents_unweighted_splitted file and look for the 3832 # number of splittings to be done 3833 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 3834 # This skips the channels with zero events, because they are 3835 # not of the form GFXX_YY, but simply GFXX 3836 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 3837 pjoin(pdir, 'G%s%s' % (arg,channel))) 3838 matches = re.findall(pattern, nevents_file) 3839 for m in matches: 3840 splittings.append(m) 3841 return splittings
3842 3843
3844 - def run_exe(self, exe, args, run_type, cwd=None):
3845 """this basic function launch locally/on cluster exe with args as argument. 3846 """ 3847 3848 # first test that exe exists: 3849 execpath = None 3850 if cwd and os.path.exists(pjoin(cwd, exe)): 3851 execpath = pjoin(cwd, exe) 3852 elif not cwd and os.path.exists(exe): 3853 execpath = exe 3854 else: 3855 raise aMCatNLOError('Cannot find executable %s in %s' \ 3856 % (exe, os.getcwd())) 3857 # check that the executable has exec permissions 3858 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 3859 subprocess.call(['chmod', '+x', exe], cwd=cwd) 3860 # finally run it 3861 if self.cluster_mode == 0: 3862 #this is for the serial run 3863 misc.call(['./'+exe] + args, cwd=cwd) 3864 self.ijob += 1 3865 self.update_status((max([self.njobs - self.ijob - 1, 0]), 3866 min([1, self.njobs - self.ijob]), 3867 self.ijob, run_type), level='parton') 3868 3869 #this is for the cluster/multicore run 3870 elif 'reweight' in exe: 3871 # a reweight run 3872 # Find the correct PDF input file 3873 input_files, output_files = [], [] 3874 pdfinput = self.get_pdf_input_filename() 3875 if os.path.exists(pdfinput): 3876 input_files.append(pdfinput) 3877 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 3878 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 3879 input_files.append(args[0]) 3880 output_files.append('%s.rwgt' % os.path.basename(args[0])) 3881 output_files.append('reweight_xsec_events.output') 3882 output_files.append('scale_pdf_dependence.dat') 3883 3884 return self.cluster.submit2(exe, args, cwd=cwd, 3885 input_files=input_files, output_files=output_files, 3886 required_output=output_files) 3887 3888 elif 'ajob' in exe: 3889 # the 'standard' amcatnlo job 3890 # check if args is a list of string 3891 if type(args[0]) == str: 3892 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 3893 #submitting 3894 self.cluster.submit2(exe, args, cwd=cwd, 3895 input_files=input_files, output_files=output_files, 3896 required_output=required_output) 3897 3898 # # keep track of folders and arguments for splitted evt gen 3899 # subfolder=output_files[-1].split('/')[0] 3900 # if len(args) == 4 and '_' in subfolder: 3901 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 3902 3903 elif 'shower' in exe: 3904 # a shower job 3905 # args are [shower, output(HEP or TOP), run_name] 3906 # cwd is the shower rundir, where the executable are found 3907 input_files, output_files = [], [] 3908 shower = args[0] 3909 # the input files 3910 if shower == 'PYTHIA8': 3911 input_files.append(pjoin(cwd, 'Pythia8.exe')) 3912 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 3913 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3914 input_files.append(pjoin(cwd, 'config.sh')) 3915 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 3916 else: 3917 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 3918 else: 3919 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 3920 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 3921 if shower == 'HERWIGPP': 3922 input_files.append(pjoin(cwd, 'Herwig++')) 3923 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 3924 if len(args) == 3: 3925 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 3926 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 3927 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 3928 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 3929 else: 3930 raise aMCatNLOError, 'Event file not present in %s' % \ 3931 pjoin(self.me_dir, 'Events', self.run_name) 3932 else: 3933 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 3934 # the output files 3935 if len(args) == 3: 3936 output_files.append('mcatnlo_run.log') 3937 else: 3938 output_files.append('mcatnlo_run_%s.log' % args[3]) 3939 if args[1] == 'HEP': 3940 if len(args) == 3: 3941 fname = 'events' 3942 else: 3943 fname = 'events_%s' % args[3] 3944 if shower in ['PYTHIA8', 'HERWIGPP']: 3945 output_files.append(fname + '.hepmc.gz') 3946 else: 3947 output_files.append(fname + '.hep.gz') 3948 elif args[1] == 'TOP' or args[1] == 'HWU': 3949 if len(args) == 3: 3950 fname = 'histfile' 3951 else: 3952 fname = 'histfile_%s' % args[3] 3953 output_files.append(fname + '.tar') 3954 else: 3955 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 3956 #submitting 3957 self.cluster.submit2(exe, args, cwd=cwd, 3958 input_files=input_files, output_files=output_files) 3959 3960 else: 3961 return self.cluster.submit(exe, args, cwd=cwd)
3962
3963 - def getIO_ajob(self,exe,cwd, args):
3964 # use local disk if possible => need to stands what are the 3965 # input/output files 3966 3967 output_files = [] 3968 required_output = [] 3969 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 3970 pjoin(cwd, 'symfact.dat'), 3971 pjoin(cwd, 'iproc.dat'), 3972 pjoin(cwd, 'initial_states_map.dat'), 3973 pjoin(cwd, 'configs_and_props_info.dat'), 3974 pjoin(cwd, 'leshouche_info.dat'), 3975 pjoin(cwd, 'FKS_params.dat')] 3976 3977 # For GoSam interface, we must copy the SLHA card as well 3978 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 3979 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 3980 3981 if os.path.exists(pjoin(cwd,'nevents.tar')): 3982 input_files.append(pjoin(cwd,'nevents.tar')) 3983 3984 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 3985 input_files.append(pjoin(cwd, 'OLE_order.olc')) 3986 3987 # File for the loop (might not be present if MadLoop is not used) 3988 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 3989 cluster.need_transfer(self.options): 3990 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 3991 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 3992 cluster.need_transfer(self.options): 3993 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 3994 dereference=True) 3995 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 3996 tf.close() 3997 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 3998 3999 if args[1] == 'born' or args[1] == 'all': 4000 # MADEVENT MINT FO MODE 4001 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4002 if args[2] == '0': 4003 current = '%s_G%s' % (args[1],args[0]) 4004 else: 4005 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4006 if os.path.exists(pjoin(cwd,current)): 4007 input_files.append(pjoin(cwd, current)) 4008 output_files.append(current) 4009 4010 required_output.append('%s/results.dat' % current) 4011 required_output.append('%s/res_%s.dat' % (current,args[3])) 4012 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4013 required_output.append('%s/mint_grids' % current) 4014 required_output.append('%s/grid.MC_integer' % current) 4015 if args[3] != '0': 4016 required_output.append('%s/scale_pdf_dependence.dat' % current) 4017 4018 elif args[1] == 'F' or args[1] == 'B': 4019 # MINTMC MODE 4020 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4021 4022 if args[2] == '0': 4023 current = 'G%s%s' % (args[1],args[0]) 4024 else: 4025 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4026 if os.path.exists(pjoin(cwd,current)): 4027 input_files.append(pjoin(cwd, current)) 4028 output_files.append(current) 4029 if args[2] > '0': 4030 # this is for the split event generation 4031 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4032 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4033 4034 else: 4035 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4036 if args[3] in ['0','1']: 4037 required_output.append('%s/results.dat' % current) 4038 if args[3] == '1': 4039 output_files.append('%s/results.dat' % current) 4040 4041 else: 4042 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4043 4044 #Find the correct PDF input file 4045 pdfinput = self.get_pdf_input_filename() 4046 if os.path.exists(pdfinput): 4047 input_files.append(pdfinput) 4048 return input_files, output_files, required_output, args
4049 4050
4051 - def compile(self, mode, options):
4052 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4053 specified in mode""" 4054 4055 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4056 4057 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4058 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4059 4060 self.get_characteristics(pjoin(self.me_dir, 4061 'SubProcesses', 'proc_characteristics')) 4062 4063 #define a bunch of log files 4064 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4065 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4066 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4067 test_log = pjoin(self.me_dir, 'test.log') 4068 4069 # environmental variables to be included in make_opts 4070 self.make_opts_var = {} 4071 if self.proc_characteristics['has_loops'] and \ 4072 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4073 self.make_opts_var['madloop'] = 'true' 4074 4075 self.update_status('Compiling the code', level=None, update_results=True) 4076 4077 libdir = pjoin(self.me_dir, 'lib') 4078 sourcedir = pjoin(self.me_dir, 'Source') 4079 4080 #clean files 4081 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4082 #define which executable/tests to compile 4083 if '+' in mode: 4084 mode = mode.split('+')[0] 4085 if mode in ['NLO', 'LO']: 4086 exe = 'madevent_mintFO' 4087 tests = ['test_ME'] 4088 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4089 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4090 exe = 'madevent_mintMC' 4091 tests = ['test_ME', 'test_MC'] 4092 # write an analyse_opts with a dummy analysis so that compilation goes through 4093 open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w').write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4094 4095 #directory where to compile exe 4096 p_dirs = [d for d in \ 4097 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4098 # create param_card.inc and run_card.inc 4099 self.do_treatcards('', amcatnlo=True) 4100 # if --nocompile option is specified, check here that all exes exists. 4101 # If they exists, return 4102 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4103 for p_dir in p_dirs]) and options['nocompile']: 4104 return 4105 4106 # rm links to lhapdflib/ PDFsets if exist 4107 if os.path.exists(pjoin(libdir, 'PDFsets')): 4108 files.rm(pjoin(libdir, 'PDFsets')) 4109 4110 # read the run_card to find if lhapdf is used or not 4111 if self.run_card['pdlabel'] == 'lhapdf' and \ 4112 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 4113 self.banner.get_detail('run_card', 'lpp2') != 0): 4114 4115 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 4116 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4117 lhaid_list = self.run_card['lhaid'] 4118 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4119 4120 else: 4121 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 4122 logger.info('Using built-in libraries for PDFs') 4123 if self.run_card['lpp1'] == 0 == self.run_card['lpp2']: 4124 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.') 4125 4126 # read the run_card to find if applgrid is used or not 4127 if self.run_card['iappl'] != 0: 4128 self.make_opts_var['applgrid'] = 'True' 4129 # check versions of applgrid and amcfast 4130 for code in ['applgrid','amcfast']: 4131 try: 4132 p = subprocess.Popen([self.options[code], '--version'], \ 4133 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 4134 except OSError: 4135 raise aMCatNLOError(('No valid %s installation found. \n' + \ 4136 'Please set the path to %s-config by using \n' + \ 4137 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 4138 else: 4139 output, _ = p.communicate() 4140 if code is 'applgrid' and output < '1.4.63': 4141 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 4142 +' You are using %s',output) 4143 if code is 'amcfast' and output < '1.1.1': 4144 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 4145 +' You are using %s',output) 4146 4147 # set-up the Source/make_opts with the correct applgrid-config file 4148 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 4149 % (self.options['amcfast'],self.options['applgrid']) 4150 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 4151 text_out=[] 4152 for line in text: 4153 if line.strip().startswith('APPLLIBS=$'): 4154 line=appllibs 4155 text_out.append(line) 4156 open(pjoin(self.me_dir,'Source','make_opts'),'w').writelines(text_out) 4157 4158 if 'fastjet' in self.options.keys() and self.options['fastjet']: 4159 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 4160 4161 # add the make_opts_var to make_opts 4162 self.update_make_opts() 4163 4164 # make Source 4165 self.update_status('Compiling source...', level=None) 4166 misc.compile(['clean4pdf'], cwd = sourcedir) 4167 misc.compile(cwd = sourcedir) 4168 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 4169 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 4170 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 4171 and os.path.exists(pjoin(libdir, 'libpdf.a')): 4172 logger.info(' ...done, continuing with P* directories') 4173 else: 4174 raise aMCatNLOError('Compilation failed') 4175 4176 # make StdHep (only necessary with MG option output_dependencies='internal') 4177 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 4178 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 4179 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 4180 if os.path.exists(pjoin(sourcedir,'StdHEP')): 4181 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 4182 misc.compile(['StdHEP'], cwd = sourcedir) 4183 logger.info(' ...done.') 4184 else: 4185 raise aMCatNLOError('Could not compile StdHEP because its'+\ 4186 ' source directory could not be found in the SOURCE folder.\n'+\ 4187 " Check the MG5_aMC option 'output_dependencies.'") 4188 4189 # make CutTools (only necessary with MG option output_dependencies='internal') 4190 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4191 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4192 if os.path.exists(pjoin(sourcedir,'CutTools')): 4193 logger.info('Compiling CutTools (can take a couple of minutes) ...') 4194 misc.compile(['CutTools'], cwd = sourcedir) 4195 logger.info(' ...done.') 4196 else: 4197 raise aMCatNLOError('Could not compile CutTools because its'+\ 4198 ' source directory could not be found in the SOURCE folder.\n'+\ 4199 " Check the MG5_aMC option 'output_dependencies.'") 4200 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4201 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4202 raise aMCatNLOError('CutTools compilation failed.') 4203 4204 # Verify compatibility between current compiler and the one which was 4205 # used when last compiling CutTools (if specified). 4206 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4207 libdir, 'libcts.a')))),'compiler_version.log') 4208 if os.path.exists(compiler_log_path): 4209 compiler_version_used = open(compiler_log_path,'r').read() 4210 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4211 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4212 if os.path.exists(pjoin(sourcedir,'CutTools')): 4213 logger.info('CutTools was compiled with a different fortran'+\ 4214 ' compiler. Re-compiling it now...') 4215 misc.compile(['cleanCT'], cwd = sourcedir) 4216 misc.compile(['CutTools'], cwd = sourcedir) 4217 logger.info(' ...done.') 4218 else: 4219 raise aMCatNLOError("CutTools installation in %s"\ 4220 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 4221 " seems to have been compiled with a different compiler than"+\ 4222 " the one specified in MG5_aMC. Please recompile CutTools.") 4223 4224 # make IREGI (only necessary with MG option output_dependencies='internal') 4225 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 4226 and os.path.exists(pjoin(sourcedir,'IREGI')): 4227 logger.info('Compiling IREGI (can take a couple of minutes) ...') 4228 misc.compile(['IREGI'], cwd = sourcedir) 4229 logger.info(' ...done.') 4230 4231 if os.path.exists(pjoin(libdir, 'libiregi.a')): 4232 # Verify compatibility between current compiler and the one which was 4233 # used when last compiling IREGI (if specified). 4234 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4235 libdir, 'libiregi.a')))),'compiler_version.log') 4236 if os.path.exists(compiler_log_path): 4237 compiler_version_used = open(compiler_log_path,'r').read() 4238 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4239 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4240 if os.path.exists(pjoin(sourcedir,'IREGI')): 4241 logger.info('IREGI was compiled with a different fortran'+\ 4242 ' compiler. Re-compiling it now...') 4243 misc.compile(['cleanIR'], cwd = sourcedir) 4244 misc.compile(['IREGI'], cwd = sourcedir) 4245 logger.info(' ...done.') 4246 else: 4247 raise aMCatNLOError("IREGI installation in %s"\ 4248 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 4249 " seems to have been compiled with a different compiler than"+\ 4250 " the one specified in MG5_aMC. Please recompile IREGI.") 4251 4252 # check if MadLoop virtuals have been generated 4253 if self.proc_characteristics['has_loops'] and \ 4254 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4255 if mode in ['NLO', 'aMC@NLO', 'noshower']: 4256 tests.append('check_poles') 4257 4258 # make and run tests (if asked for), gensym and make madevent in each dir 4259 self.update_status('Compiling directories...', level=None) 4260 4261 for test in tests: 4262 self.write_test_input(test) 4263 4264 try: 4265 import multiprocessing 4266 if not self.nb_core: 4267 try: 4268 self.nb_core = int(self.options['nb_core']) 4269 except TypeError: 4270 self.nb_core = multiprocessing.cpu_count() 4271 except ImportError: 4272 self.nb_core = 1 4273 4274 compile_options = copy.copy(self.options) 4275 compile_options['nb_core'] = self.nb_core 4276 compile_cluster = cluster.MultiCore(**compile_options) 4277 logger.info('Compiling on %d cores' % self.nb_core) 4278 4279 update_status = lambda i, r, f: self.donothing(i,r,f) 4280 for p_dir in p_dirs: 4281 compile_cluster.submit(prog = compile_dir, 4282 argument = [self.me_dir, p_dir, mode, options, 4283 tests, exe, self.options['run_mode']]) 4284 try: 4285 compile_cluster.wait(self.me_dir, update_status) 4286 except Exception, error: 4287 logger.warning("Fail to compile the Subprocesses") 4288 if __debug__: 4289 raise 4290 compile_cluster.remove() 4291 self.do_quit('') 4292 4293 logger.info('Checking test output:') 4294 for p_dir in p_dirs: 4295 logger.info(p_dir) 4296 for test in tests: 4297 logger.info(' Result for %s:' % test) 4298 4299 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 4300 #check that none of the tests failed 4301 self.check_tests(test, this_dir)
4302 4303
4304 - def donothing(*args):
4305 pass
4306 4307
4308 - def check_tests(self, test, dir):
4309 """just call the correct parser for the test log. 4310 Skip check_poles for LOonly folders""" 4311 if test in ['test_ME', 'test_MC']: 4312 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 4313 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 4314 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4315 4316
4317 - def parse_test_mx_log(self, log):
4318 """read and parse the test_ME/MC.log file""" 4319 content = open(log).read() 4320 if 'FAILED' in content: 4321 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK') 4322 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 4323 'Please check that widths of final state particles (e.g. top) have been' + \ 4324 ' set to 0 in the param_card.dat.') 4325 else: 4326 lines = [l for l in content.split('\n') if 'PASSED' in l] 4327 logger.info(' Passed.') 4328 logger.debug('\n'+'\n'.join(lines))
4329 4330
4331 - def parse_check_poles_log(self, log):
4332 """reads and parse the check_poles.log file""" 4333 content = open(log).read() 4334 npass = 0 4335 nfail = 0 4336 for line in content.split('\n'): 4337 if 'PASSED' in line: 4338 npass +=1 4339 tolerance = float(line.split()[1]) 4340 if 'FAILED' in line: 4341 nfail +=1 4342 tolerance = float(line.split()[1]) 4343 4344 if nfail + npass == 0: 4345 logger.warning('0 points have been tried') 4346 return 4347 4348 if float(nfail)/float(nfail+npass) > 0.1: 4349 raise aMCatNLOError('Poles do not cancel, run cannot continue') 4350 else: 4351 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 4352 %(npass, nfail+npass, tolerance))
4353 4354
4355 - def write_test_input(self, test):
4356 """write the input files to run test_ME/MC or check_poles""" 4357 if test in ['test_ME', 'test_MC']: 4358 content = "-2 -2\n" #generate randomly energy/angle 4359 content+= "100 100\n" #run 100 points for soft and collinear tests 4360 content+= "0\n" #sum over helicities 4361 content+= "0\n" #all FKS configs 4362 content+= '\n'.join(["-1"] * 50) #random diagram 4363 elif test == 'check_poles': 4364 content = '20 \n -1\n' 4365 4366 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 4367 if test == 'test_MC': 4368 shower = self.run_card['parton_shower'] 4369 MC_header = "%s\n " % shower + \ 4370 "1 \n1 -0.1\n-1 -0.1\n" 4371 file.write(MC_header + content) 4372 else: 4373 file.write(content) 4374 file.close()
4375 4376 4377 4378 ############################################################################
4379 - def find_model_name(self):
4380 """ return the model name """ 4381 if hasattr(self, 'model_name'): 4382 return self.model_name 4383 4384 model = 'sm' 4385 proc = [] 4386 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')): 4387 line = line.split('#')[0] 4388 #line = line.split('=')[0] 4389 if line.startswith('import') and 'model' in line: 4390 model = line.split()[2] 4391 proc = [] 4392 elif line.startswith('generate'): 4393 proc.append(line.split(None,1)[1]) 4394 elif line.startswith('add process'): 4395 proc.append(line.split(None,2)[2]) 4396 4397 self.model = model 4398 self.process = proc 4399 return model
4400 4401 4402 4403 ############################################################################
4404 - def ask_run_configuration(self, mode, options, switch={}):
4405 """Ask the question when launching generate_events/multi_run""" 4406 4407 if 'parton' not in options: 4408 options['parton'] = False 4409 if 'reweightonly' not in options: 4410 options['reweightonly'] = False 4411 4412 4413 void = 'NOT INSTALLED' 4414 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight'] 4415 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void, 4416 'madspin': void,'reweight':'OFF'} 4417 if not switch: 4418 switch = switch_default 4419 else: 4420 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch)) 4421 default_switch = ['ON', 'OFF'] 4422 4423 4424 allowed_switch_value = {'order': ['LO', 'NLO'], 4425 'fixed_order': default_switch, 4426 'shower': default_switch, 4427 'madspin': default_switch, 4428 'reweight': default_switch} 4429 4430 description = {'order': 'Perturbative order of the calculation:', 4431 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):', 4432 'shower': 'Shower the generated events:', 4433 'madspin': 'Decay particles with the MadSpin module:', 4434 'reweight': 'Add weights to the events based on changing model parameters:'} 4435 4436 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'}, 4437 ('madspin', 'ON'): {'fixed_order':'OFF'}, 4438 ('reweight', 'ON'): {'fixed_order':'OFF'}, 4439 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF'} 4440 } 4441 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] 4442 4443 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void ) 4444 4445 if self.proc_characteristics['ninitial'] == 1: 4446 switch['fixed_order'] = 'ON' 4447 switch['shower'] = 'Not available for decay' 4448 switch['madspin'] = 'Not available for decay' 4449 switch['reweight'] = 'Not available for decay' 4450 allowed_switch_value['fixed_order'] = ['ON'] 4451 allowed_switch_value['shower'] = ['OFF'] 4452 allowed_switch_value['madspin'] = ['OFF'] 4453 allowed_switch_value['reweight'] = ['OFF'] 4454 available_mode = ['0','1'] 4455 special_values = ['LO', 'NLO'] 4456 else: 4457 # Init the switch value according to the current status 4458 available_mode = ['0', '1', '2','3'] 4459 4460 if mode == 'auto': 4461 mode = None 4462 if not mode and (options['parton'] or options['reweightonly']): 4463 mode = 'noshower' 4464 4465 4466 if '3' in available_mode: 4467 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 4468 switch['shower'] = 'ON' 4469 else: 4470 switch['shower'] = 'OFF' 4471 4472 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode: 4473 available_mode.append('4') 4474 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4475 switch['madspin'] = 'ON' 4476 else: 4477 switch['madspin'] = 'OFF' 4478 if misc.has_f2py() or self.options['f2py_compiler']: 4479 available_mode.append('5') 4480 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 4481 switch['reweight'] = 'ON' 4482 else: 4483 switch['reweight'] = 'OFF' 4484 else: 4485 switch['reweight'] = 'Not available (requires NumPy)' 4486 4487 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode: 4488 if switch['reweight'] == "OFF": 4489 switch['reweight'] = "ON" 4490 elif switch['reweight'] != "ON": 4491 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight']) 4492 if 'do_madspin' in options and options['do_madspin']: 4493 if switch['madspin'] == "OFF": 4494 switch['madspin'] = 'ON' 4495 elif switch['madspin'] != "ON": 4496 logger.critical("Cannot run MadSpin module: %s" % switch['reweight']) 4497 4498 answers = list(available_mode) + ['auto', 'done'] 4499 alias = {} 4500 for id, key in enumerate(switch_order): 4501 if switch[key] != void and switch[key] in allowed_switch_value[key] and \ 4502 len(allowed_switch_value[key]) >1: 4503 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]] 4504 #allow lower case for on/off 4505 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s)) 4506 for s in allowed_switch_value[key])) 4507 answers += special_values 4508 4509 def create_question(switch): 4510 switch_format = " %i %-61s %12s=%s\n" 4511 question = "The following switches determine which operations are executed:\n" 4512 for id, key in enumerate(switch_order): 4513 question += switch_format % (id+1, description[key], key, switch[key]) 4514 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1) 4515 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n' 4516 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n' 4517 return question
4518 4519 4520 def modify_switch(mode, answer, switch): 4521 if '=' in answer: 4522 key, status = answer.split('=') 4523 switch[key] = status 4524 if (key, status) in force_switch: 4525 for key2, status2 in force_switch[(key, status)].items(): 4526 if switch[key2] not in [status2, void]: 4527 logger.info('For coherence \'%s\' is set to \'%s\'' 4528 % (key2, status2), '$MG:color:BLACK') 4529 switch[key2] = status2 4530 elif answer in ['0', 'auto', 'done']: 4531 return 4532 elif answer in special_values: 4533 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK') 4534 #assign_switch('reweight', 'OFF') 4535 #assign_switch('madspin', 'OFF') 4536 if answer == 'LO': 4537 switch['order'] = 'LO' 4538 switch['fixed_order'] = 'ON' 4539 assign_switch('shower', 'OFF') 4540 elif answer == 'NLO': 4541 switch['order'] = 'NLO' 4542 switch['fixed_order'] = 'ON' 4543 assign_switch('shower', 'OFF') 4544 elif answer == 'aMC@NLO': 4545 switch['order'] = 'NLO' 4546 switch['fixed_order'] = 'OFF' 4547 assign_switch('shower', 'ON') 4548 elif answer == 'aMC@LO': 4549 switch['order'] = 'LO' 4550 switch['fixed_order'] = 'OFF' 4551 assign_switch('shower', 'ON') 4552 elif answer == 'noshower': 4553 switch['order'] = 'NLO' 4554 switch['fixed_order'] = 'OFF' 4555 assign_switch('shower', 'OFF') 4556 elif answer == 'noshowerLO': 4557 switch['order'] = 'LO' 4558 switch['fixed_order'] = 'OFF' 4559 assign_switch('shower', 'OFF') 4560 if mode: 4561 return 4562 return switch 4563 4564 modify_switch(mode, self.last_mode, switch) 4565 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4566 assign_switch('madspin', 'ON') 4567 4568 if not self.force: 4569 answer = '' 4570 while answer not in ['0', 'done', 'auto', 'onlyshower']: 4571 question = create_question(switch) 4572 if mode: 4573 answer = mode 4574 else: 4575 answer = self.ask(question, '0', answers, alias=alias) 4576 if answer.isdigit() and answer != '0': 4577 key = switch_order[int(answer) - 1] 4578 opt1 = allowed_switch_value[key][0] 4579 opt2 = allowed_switch_value[key][1] 4580 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2) 4581 4582 if not modify_switch(mode, answer, switch): 4583 break 4584 4585 #assign the mode depending of the switch 4586 if not mode or mode == 'auto': 4587 if switch['order'] == 'LO': 4588 if switch['shower'] == 'ON': 4589 mode = 'aMC@LO' 4590 elif switch['fixed_order'] == 'ON': 4591 mode = 'LO' 4592 else: 4593 mode = 'noshowerLO' 4594 elif switch['order'] == 'NLO': 4595 if switch['shower'] == 'ON': 4596 mode = 'aMC@NLO' 4597 elif switch['fixed_order'] == 'ON': 4598 mode = 'NLO' 4599 else: 4600 mode = 'noshower' 4601 logger.info('will run in mode: %s' % mode) 4602 4603 if mode == 'noshower': 4604 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 4605 Please, shower the Les Houches events before using them for physics analyses.""") 4606 4607 4608 # specify the cards which are needed for this run. 4609 cards = ['param_card.dat', 'run_card.dat'] 4610 ignore = [] 4611 if mode in ['LO', 'NLO']: 4612 options['parton'] = True 4613 ignore = ['shower_card.dat', 'madspin_card.dat'] 4614 cards.append('FO_analyse_card.dat') 4615 else: 4616 if switch['madspin'] == 'ON': 4617 cards.append('madspin_card.dat') 4618 if switch['reweight'] == 'ON': 4619 cards.append('reweight_card.dat') 4620 if 'aMC@' in mode: 4621 cards.append('shower_card.dat') 4622 if mode == 'onlyshower': 4623 cards = ['shower_card.dat'] 4624 if options['reweightonly']: 4625 cards = ['run_card.dat'] 4626 4627 self.keep_cards(cards, ignore) 4628 4629 if mode =='onlyshower': 4630 cards = ['shower_card.dat'] 4631 4632 4633 # automatically switch to keep_wgt option 4634 first_cmd = [] # force to change some switch 4635 4636 if not options['force'] and not self.force: 4637 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 4638 4639 4640 self.banner = banner_mod.Banner() 4641 4642 # store the cards in the banner 4643 for card in cards: 4644 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 4645 # and the run settings 4646 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 4647 self.banner.add_text('run_settings', run_settings) 4648 4649 if not mode =='onlyshower': 4650 self.run_card = self.banner.charge_card('run_card') 4651 self.run_tag = self.run_card['run_tag'] 4652 #this is if the user did not provide a name for the current run 4653 if not hasattr(self, 'run_name') or not self.run_name: 4654 self.run_name = self.find_available_run_name(self.me_dir) 4655 #add a tag in the run_name for distinguish run_type 4656 if self.run_name.startswith('run_'): 4657 if mode in ['LO','aMC@LO','noshowerLO']: 4658 self.run_name += '_LO' 4659 self.set_run_name(self.run_name, self.run_tag, 'parton') 4660 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 4661 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 4662 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 4663 logger.warning("""You are running with FxFx merging enabled. To be able to merge 4664 samples of various multiplicities without double counting, you 4665 have to remove some events after showering 'by hand'. Please 4666 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 4667 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 4668 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 4669 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 4670 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 4671 "Type \'n\' to stop or \'y\' to continue" 4672 answers = ['n','y'] 4673 answer = self.ask(question, 'n', answers, alias=alias) 4674 if answer == 'n': 4675 error = '''Stop opertation''' 4676 self.ask_run_configuration(mode, options) 4677 # raise aMCatNLOError(error) 4678 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 4679 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 4680 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 4681 if 'aMC@' in mode or mode == 'onlyshower': 4682 self.shower_card = self.banner.charge_card('shower_card') 4683 4684 elif mode in ['LO', 'NLO']: 4685 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 4686 self.analyse_card = self.banner.charge_card('FO_analyse_card') 4687 4688 return mode 4689 4690 4691 #=============================================================================== 4692 # aMCatNLOCmd 4693 #===============================================================================
4694 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
4695 """The command line processor of MadGraph"""
4696 4697 _compile_usage = "compile [MODE] [options]\n" + \ 4698 "-- compiles aMC@NLO \n" + \ 4699 " MODE can be either FO, for fixed-order computations, \n" + \ 4700 " or MC for matching with parton-shower monte-carlos. \n" + \ 4701 " (if omitted, it is set to MC)\n" 4702 _compile_parser = misc.OptionParser(usage=_compile_usage) 4703 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 4704 help="Use the card present in the directory for the launch, without editing them") 4705 4706 _launch_usage = "launch [MODE] [options]\n" + \ 4707 "-- execute aMC@NLO \n" + \ 4708 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4709 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4710 " computation of the total cross section and the filling of parton-level histograms \n" + \ 4711 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4712 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4713 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4714 " in the run_card.dat\n" 4715 4716 _launch_parser = misc.OptionParser(usage=_launch_usage) 4717 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 4718 help="Use the card present in the directory for the launch, without editing them") 4719 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 4720 help="Submit the jobs on the cluster") 4721 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 4722 help="Submit the jobs on multicore mode") 4723 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4724 help="Skip compilation. Ignored if no executable is found") 4725 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4726 help="Skip integration and event generation, just run reweight on the" + \ 4727 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4728 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 4729 help="Stop the run after the parton level file generation (you need " + \ 4730 "to shower the file in order to get physical results)") 4731 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4732 help="Skip grid set up, just generate events starting from " + \ 4733 "the last available results") 4734 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 4735 help="Provide a name to the run") 4736 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4737 help="For use with APPLgrid only: start from existing grids") 4738 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 4739 help="Run the reweight module (reweighting by different model parameters)") 4740 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 4741 help="Run the madspin package") 4742 4743 4744 4745 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 4746 "-- execute aMC@NLO \n" + \ 4747 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4748 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4749 " computation of the total cross section and the filling of parton-level histograms \n" + \ 4750 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4751 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4752 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4753 " in the run_card.dat\n" 4754 4755 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 4756 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 4757 help="Use the card present in the directory for the generate_events, without editing them") 4758 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 4759 help="Submit the jobs on the cluster") 4760 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 4761 help="Submit the jobs on multicore mode") 4762 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4763 help="Skip compilation. Ignored if no executable is found") 4764 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4765 help="Skip integration and event generation, just run reweight on the" + \ 4766 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4767 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 4768 help="Stop the run after the parton level file generation (you need " + \ 4769 "to shower the file in order to get physical results)") 4770 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4771 help="Skip grid set up, just generate events starting from " + \ 4772 "the last available results") 4773 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 4774 help="Provide a name to the run") 4775 4776 4777 4778 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 4779 "-- calculate cross section up to ORDER.\n" + \ 4780 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 4781 4782 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 4783 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 4784 help="Use the card present in the directory for the launch, without editing them") 4785 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 4786 help="Submit the jobs on the cluster") 4787 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 4788 help="Submit the jobs on multicore mode") 4789 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4790 help="Skip compilation. Ignored if no executable is found") 4791 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 4792 help="Provide a name to the run") 4793 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4794 help="For use with APPLgrid only: start from existing grids") 4795 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4796 help="Skip grid set up, just generate events starting from " + \ 4797 "the last available results") 4798 4799 _shower_usage = 'shower run_name [options]\n' + \ 4800 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 4801 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 4802 ' are directly read from the header of the event file\n' 4803 _shower_parser = misc.OptionParser(usage=_shower_usage) 4804 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 4805 help="Use the shower_card present in the directory for the launch, without editing") 4806 4807 if '__main__' == __name__: 4808 # Launch the interface without any check if one code is already running. 4809 # This can ONLY run a single command !! 4810 import sys 4811 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 4812 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 4813 'Please upgrate your version of python.') 4814 4815 import os 4816 import optparse 4817 # Get the directory of the script real path (bin) 4818 # and add it to the current PYTHONPATH 4819 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 4820 sys.path.insert(0, root_path) 4821
4822 - class MyOptParser(optparse.OptionParser):
4823 - class InvalidOption(Exception): pass
4824 - def error(self, msg=''):
4825 raise MyOptParser.InvalidOption(msg)
4826 # Write out nice usage message if called with -h or --help 4827 usage = "usage: %prog [options] [FILE] " 4828 parser = MyOptParser(usage=usage) 4829 parser.add_option("-l", "--logging", default='INFO', 4830 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 4831 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 4832 help='force toce to be in secure mode') 4833 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 4834 help='force to launch debug mode') 4835 parser_error = '' 4836 done = False 4837 4838 for i in range(len(sys.argv)-1): 4839 try: 4840 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 4841 done = True 4842 except MyOptParser.InvalidOption, error: 4843 pass 4844 else: 4845 args += sys.argv[len(sys.argv)-i:] 4846 if not done: 4847 # raise correct error: 4848 try: 4849 (options, args) = parser.parse_args() 4850 except MyOptParser.InvalidOption, error: 4851 print error 4852 sys.exit(2) 4853 4854 if len(args) == 0: 4855 args = '' 4856 4857 import subprocess 4858 import logging 4859 import logging.config 4860 # Set logging level according to the logging level given by options 4861 #logging.basicConfig(level=vars(logging)[options.logging]) 4862 import internal.coloring_logging 4863 try: 4864 if __debug__ and options.logging == 'INFO': 4865 options.logging = 'DEBUG' 4866 if options.logging.isdigit(): 4867 level = int(options.logging) 4868 else: 4869 level = eval('logging.' + options.logging) 4870 print os.path.join(root_path, 'internal', 'me5_logging.conf') 4871 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 4872 logging.root.setLevel(level) 4873 logging.getLogger('madgraph').setLevel(level) 4874 except: 4875 raise 4876 pass 4877 4878 # Call the cmd interface main loop 4879 try: 4880 if args: 4881 # a single command is provided 4882 if '--web' in args: 4883 i = args.index('--web') 4884 args.pop(i) 4885 cmd_line = aMCatNLOCmd(force_run=True) 4886 else: 4887 cmd_line = aMCatNLOCmdShell(force_run=True) 4888 4889 if not hasattr(cmd_line, 'do_%s' % args[0]): 4890 if parser_error: 4891 print parser_error 4892 print 'and %s can not be interpreted as a valid command.' % args[0] 4893 else: 4894 print 'ERROR: %s not a valid command. Please retry' % args[0] 4895 else: 4896 cmd_line.use_rawinput = False 4897 cmd_line.run_cmd(' '.join(args)) 4898 cmd_line.run_cmd('quit') 4899 4900 except KeyboardInterrupt: 4901 print 'quit on KeyboardInterrupt' 4902 pass 4903