Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39   
  40  try: 
  41      import readline 
  42      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  43  except: 
  44      GNU_SPLITTING = True 
  45   
  46  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  47  root_path = os.path.split(root_path)[0] 
  48  sys.path.insert(0, os.path.join(root_path,'bin')) 
  49   
  50  # usefull shortcut 
  51  pjoin = os.path.join 
  52  # Special logger for the Cmd Interface 
  53  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  54  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  55    
  56  try: 
  57      import madgraph 
  58  except ImportError:  
  59      aMCatNLO = True  
  60      import internal.extended_cmd as cmd 
  61      import internal.common_run_interface as common_run 
  62      import internal.banner as banner_mod 
  63      import internal.misc as misc     
  64      from internal import InvalidCmd, MadGraph5Error 
  65      import internal.files as files 
  66      import internal.cluster as cluster 
  67      import internal.save_load_object as save_load_object 
  68      import internal.gen_crossxhtml as gen_crossxhtml 
  69      import internal.sum_html as sum_html 
  70      import internal.shower_card as shower_card 
  71      import internal.FO_analyse_card as analyse_card  
  72      import internal.histograms as histograms 
  73  else: 
  74      # import from madgraph directory 
  75      aMCatNLO = False 
  76      import madgraph.interface.extended_cmd as cmd 
  77      import madgraph.interface.common_run_interface as common_run 
  78      import madgraph.iolibs.files as files 
  79      import madgraph.iolibs.save_load_object as save_load_object 
  80      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  81      import madgraph.madevent.sum_html as sum_html 
  82      import madgraph.various.banner as banner_mod 
  83      import madgraph.various.cluster as cluster 
  84      import madgraph.various.misc as misc 
  85      import madgraph.various.shower_card as shower_card 
  86      import madgraph.various.FO_analyse_card as analyse_card 
  87      import madgraph.various.histograms as histograms 
  88      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error 
  89   
90 -class aMCatNLOError(Exception):
91 pass
92 93
94 -def compile_dir(*arguments):
95 """compile the direcory p_dir 96 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 97 this function needs not to be a class method in order to do 98 the compilation on multicore""" 99 100 if len(arguments) == 1: 101 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 102 elif len(arguments)==7: 103 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 104 else: 105 raise aMCatNLOError, 'not correct number of argument' 106 logger.info(' Compiling %s...' % p_dir) 107 108 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 109 110 try: 111 #compile everything 112 # compile and run tests 113 for test in tests: 114 # skip check_poles for LOonly dirs 115 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 116 continue 117 misc.compile([test], cwd = this_dir, job_specs = False) 118 input = pjoin(me_dir, '%s_input.txt' % test) 119 #this can be improved/better written to handle the output 120 misc.call(['./%s' % (test)], cwd=this_dir, 121 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w')) 122 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 123 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 124 dereference=True) 125 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 126 tf.close() 127 128 if not options['reweightonly']: 129 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 130 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode) 131 misc.call(['./gensym'],cwd= this_dir, 132 stdin=open(pjoin(this_dir, 'gensym_input.txt')), 133 stdout=open(pjoin(this_dir, 'gensym.log'), 'w')) 134 #compile madevent_mintMC/mintFO 135 misc.compile([exe], cwd=this_dir, job_specs = False) 136 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 137 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 138 139 logger.info(' %s done.' % p_dir) 140 return 0 141 except MadGraph5Error, msg: 142 return msg
143 144
145 -def check_compiler(options, block=False):
146 """check that the current fortran compiler is gfortran 4.6 or later. 147 If block, stops the execution, otherwise just print a warning""" 148 149 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 150 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 151 'Note that You can still run all MadEvent run without any problem!' 152 #first check that gfortran is installed 153 if options['fortran_compiler']: 154 compiler = options['fortran_compiler'] 155 elif misc.which('gfortran'): 156 compiler = 'gfortran' 157 else: 158 compiler = '' 159 160 if 'gfortran' not in compiler: 161 if block: 162 raise aMCatNLOError(msg % compiler) 163 else: 164 logger.warning(msg % compiler) 165 else: 166 curr_version = misc.get_gfortran_version(compiler) 167 if not ''.join(curr_version.split('.')) >= '46': 168 if block: 169 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 170 else: 171 logger.warning(msg % (compiler + ' ' + curr_version))
172 173 174 175 #=============================================================================== 176 # CmdExtended 177 #===============================================================================
178 -class CmdExtended(common_run.CommonRunCmd):
179 """Particularisation of the cmd command for aMCatNLO""" 180 181 #suggested list of command 182 next_possibility = { 183 'start': [], 184 } 185 186 debug_output = 'ME5_debug' 187 error_debug = 'Please report this bug on https://bugs.launchpad.net/madgraph5\n' 188 error_debug += 'More information is found in \'%(debug)s\'.\n' 189 error_debug += 'Please attach this file to your report.' 190 191 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/madgraph5\n' 192 193 194 keyboard_stop_msg = """stopping all operation 195 in order to quit MadGraph5_aMC@NLO please enter exit""" 196 197 # Define the Error 198 InvalidCmd = InvalidCmd 199 ConfigurationError = aMCatNLOError 200
201 - def __init__(self, me_dir, options, *arg, **opt):
202 """Init history and line continuation""" 203 204 # Tag allowing/forbiding question 205 self.force = False 206 207 # If possible, build an info line with current version number 208 # and date, from the VERSION text file 209 info = misc.get_pkg_info() 210 info_line = "" 211 if info and info.has_key('version') and info.has_key('date'): 212 len_version = len(info['version']) 213 len_date = len(info['date']) 214 if len_version + len_date < 30: 215 info_line = "#* VERSION %s %s %s *\n" % \ 216 (info['version'], 217 (30 - len_version - len_date) * ' ', 218 info['date']) 219 else: 220 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 221 info_line = "#* VERSION %s %s *\n" % \ 222 (version, (24 - len(version)) * ' ') 223 224 # Create a header for the history file. 225 # Remember to fill in time at writeout time! 226 self.history_header = \ 227 '#************************************************************\n' + \ 228 '#* MadGraph5_aMC@NLO *\n' + \ 229 '#* *\n' + \ 230 "#* * * *\n" + \ 231 "#* * * * * *\n" + \ 232 "#* * * * * 5 * * * * *\n" + \ 233 "#* * * * * *\n" + \ 234 "#* * * *\n" + \ 235 "#* *\n" + \ 236 "#* *\n" + \ 237 info_line + \ 238 "#* *\n" + \ 239 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 240 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 241 "#* and *\n" + \ 242 "#* http://amcatnlo.cern.ch *\n" + \ 243 '#* *\n' + \ 244 '#************************************************************\n' + \ 245 '#* *\n' + \ 246 '#* Command File for aMCatNLO *\n' + \ 247 '#* *\n' + \ 248 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 249 '#* *\n' + \ 250 '#************************************************************\n' 251 252 if info_line: 253 info_line = info_line[1:] 254 255 logger.info(\ 256 "************************************************************\n" + \ 257 "* *\n" + \ 258 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 259 "* a M C @ N L O *\n" + \ 260 "* *\n" + \ 261 "* * * *\n" + \ 262 "* * * * * *\n" + \ 263 "* * * * * 5 * * * * *\n" + \ 264 "* * * * * *\n" + \ 265 "* * * *\n" + \ 266 "* *\n" + \ 267 info_line + \ 268 "* *\n" + \ 269 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 270 "* http://amcatnlo.cern.ch *\n" + \ 271 "* *\n" + \ 272 "* Type 'help' for in-line help. *\n" + \ 273 "* *\n" + \ 274 "************************************************************") 275 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
276 277
278 - def get_history_header(self):
279 """return the history header""" 280 return self.history_header % misc.get_time_info()
281
282 - def stop_on_keyboard_stop(self):
283 """action to perform to close nicely on a keyboard interupt""" 284 try: 285 if hasattr(self, 'cluster'): 286 logger.info('rm jobs on queue') 287 self.cluster.remove() 288 if hasattr(self, 'results'): 289 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 290 self.add_error_log_in_html(KeyboardInterrupt) 291 except: 292 pass
293
294 - def postcmd(self, stop, line):
295 """ Update the status of the run for finishing interactive command """ 296 297 # relaxing the tag forbidding question 298 self.force = False 299 300 if not self.use_rawinput: 301 return stop 302 303 304 arg = line.split() 305 if len(arg) == 0: 306 return stop 307 elif str(arg[0]) in ['exit','quit','EOF']: 308 return stop 309 310 try: 311 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 312 level=None, error=True) 313 except Exception: 314 misc.sprint('self.update_status fails', log=logger) 315 pass
316
317 - def nice_user_error(self, error, line):
318 """If a ME run is currently running add a link in the html output""" 319 320 self.add_error_log_in_html() 321 cmd.Cmd.nice_user_error(self, error, line)
322
323 - def nice_config_error(self, error, line):
324 """If a ME run is currently running add a link in the html output""" 325 326 self.add_error_log_in_html() 327 cmd.Cmd.nice_config_error(self, error, line)
328
329 - def nice_error_handling(self, error, line):
330 """If a ME run is currently running add a link in the html output""" 331 332 self.add_error_log_in_html() 333 cmd.Cmd.nice_error_handling(self, error, line)
334 335 336 337 #=============================================================================== 338 # HelpToCmd 339 #===============================================================================
340 -class HelpToCmd(object):
341 """ The Series of help routine for the aMCatNLOCmd""" 342
343 - def help_launch(self):
344 """help for launch command""" 345 _launch_parser.print_help()
346
347 - def help_banner_run(self):
348 logger.info("syntax: banner_run Path|RUN [--run_options]") 349 logger.info("-- Reproduce a run following a given banner") 350 logger.info(" One of the following argument is require:") 351 logger.info(" Path should be the path of a valid banner.") 352 logger.info(" RUN should be the name of a run of the current directory") 353 self.run_options_help([('-f','answer all question by default'), 354 ('--name=X', 'Define the name associated with the new run')])
355 356
357 - def help_compile(self):
358 """help for compile command""" 359 _compile_parser.print_help()
360
361 - def help_generate_events(self):
362 """help for generate_events commandi 363 just call help_launch""" 364 _generate_events_parser.print_help()
365 366
367 - def help_calculate_xsect(self):
368 """help for generate_events command""" 369 _calculate_xsect_parser.print_help()
370
371 - def help_shower(self):
372 """help for shower command""" 373 _shower_parser.print_help()
374 375
376 - def help_open(self):
377 logger.info("syntax: open FILE ") 378 logger.info("-- open a file with the appropriate editor.") 379 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 380 logger.info(' the path to the last created/used directory is used')
381
382 - def run_options_help(self, data):
383 if data: 384 logger.info('-- local options:') 385 for name, info in data: 386 logger.info(' %s : %s' % (name, info)) 387 388 logger.info("-- session options:") 389 logger.info(" Note that those options will be kept for the current session") 390 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 391 logger.info(" --multicore : Run in multi-core configuration") 392 logger.info(" --nb_core=X : limit the number of core to use to X.")
393 394 395 396 397 #=============================================================================== 398 # CheckValidForCmd 399 #===============================================================================
400 -class CheckValidForCmd(object):
401 """ The Series of check routine for the aMCatNLOCmd""" 402
403 - def check_shower(self, args, options):
404 """Check the validity of the line. args[0] is the run_directory""" 405 406 if options['force']: 407 self.force = True 408 409 if len(args) == 0: 410 self.help_shower() 411 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 412 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 413 raise self.InvalidCmd, 'Directory %s does not exists' % \ 414 pjoin(os.getcwd(), 'Events', args[0]) 415 416 self.set_run_name(args[0], level= 'shower') 417 args[0] = pjoin(self.me_dir, 'Events', args[0])
418
419 - def check_plot(self, args):
420 """Check the argument for the plot command 421 plot run_name modes""" 422 423 424 madir = self.options['madanalysis_path'] 425 td = self.options['td_path'] 426 427 if not madir or not td: 428 logger.info('Retry to read configuration file to find madanalysis/td') 429 self.set_configuration() 430 431 madir = self.options['madanalysis_path'] 432 td = self.options['td_path'] 433 434 if not madir: 435 error_msg = 'No Madanalysis path correctly set.' 436 error_msg += 'Please use the set command to define the path and retry.' 437 error_msg += 'You can also define it in the configuration file.' 438 raise self.InvalidCmd(error_msg) 439 if not td: 440 error_msg = 'No path to td directory correctly set.' 441 error_msg += 'Please use the set command to define the path and retry.' 442 error_msg += 'You can also define it in the configuration file.' 443 raise self.InvalidCmd(error_msg) 444 445 if len(args) == 0: 446 if not hasattr(self, 'run_name') or not self.run_name: 447 self.help_plot() 448 raise self.InvalidCmd('No run name currently define. Please add this information.') 449 args.append('all') 450 return 451 452 453 if args[0] not in self._plot_mode: 454 self.set_run_name(args[0], level='plot') 455 del args[0] 456 if len(args) == 0: 457 args.append('all') 458 elif not self.run_name: 459 self.help_plot() 460 raise self.InvalidCmd('No run name currently define. Please add this information.') 461 462 for arg in args: 463 if arg not in self._plot_mode and arg != self.run_name: 464 self.help_plot() 465 raise self.InvalidCmd('unknown options %s' % arg)
466
467 - def check_pgs(self, arg):
468 """Check the argument for pythia command 469 syntax: pgs [NAME] 470 Note that other option are already remove at this point 471 """ 472 473 # If not pythia-pgs path 474 if not self.options['pythia-pgs_path']: 475 logger.info('Retry to read configuration file to find pythia-pgs path') 476 self.set_configuration() 477 478 if not self.options['pythia-pgs_path'] or not \ 479 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 480 error_msg = 'No pythia-pgs path correctly set.' 481 error_msg += 'Please use the set command to define the path and retry.' 482 error_msg += 'You can also define it in the configuration file.' 483 raise self.InvalidCmd(error_msg) 484 485 tag = [a for a in arg if a.startswith('--tag=')] 486 if tag: 487 arg.remove(tag[0]) 488 tag = tag[0][6:] 489 490 491 if len(arg) == 0 and not self.run_name: 492 if self.results.lastrun: 493 arg.insert(0, self.results.lastrun) 494 else: 495 raise self.InvalidCmd('No run name currently define. Please add this information.') 496 497 if len(arg) == 1 and self.run_name == arg[0]: 498 arg.pop(0) 499 500 if not len(arg) and \ 501 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 502 self.help_pgs() 503 raise self.InvalidCmd('''No file file pythia_events.hep currently available 504 Please specify a valid run_name''') 505 506 lock = None 507 if len(arg) == 1: 508 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 509 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 510 'events_*.hep.gz')) 511 if not filenames: 512 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 513 else: 514 input_file = filenames[0] 515 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 516 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 517 argument=['-c', input_file]) 518 else: 519 if tag: 520 self.run_card['run_tag'] = tag 521 self.set_run_name(self.run_name, tag, 'pgs') 522 523 return lock
524 525
526 - def check_delphes(self, arg):
527 """Check the argument for pythia command 528 syntax: delphes [NAME] 529 Note that other option are already remove at this point 530 """ 531 532 # If not pythia-pgs path 533 if not self.options['delphes_path']: 534 logger.info('Retry to read configuration file to find delphes path') 535 self.set_configuration() 536 537 if not self.options['delphes_path']: 538 error_msg = 'No delphes path correctly set.' 539 error_msg += 'Please use the set command to define the path and retry.' 540 error_msg += 'You can also define it in the configuration file.' 541 raise self.InvalidCmd(error_msg) 542 543 tag = [a for a in arg if a.startswith('--tag=')] 544 if tag: 545 arg.remove(tag[0]) 546 tag = tag[0][6:] 547 548 549 if len(arg) == 0 and not self.run_name: 550 if self.results.lastrun: 551 arg.insert(0, self.results.lastrun) 552 else: 553 raise self.InvalidCmd('No run name currently define. Please add this information.') 554 555 if len(arg) == 1 and self.run_name == arg[0]: 556 arg.pop(0) 557 558 if not len(arg) and \ 559 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 560 self.help_pgs() 561 raise self.InvalidCmd('''No file file pythia_events.hep currently available 562 Please specify a valid run_name''') 563 564 if len(arg) == 1: 565 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 566 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 567 'events_*.hep.gz')) 568 if not filenames: 569 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 570 % (self.run_name, prev_tag, 571 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 572 else: 573 input_file = filenames[0] 574 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 575 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 576 argument=['-c', input_file]) 577 else: 578 if tag: 579 self.run_card['run_tag'] = tag 580 self.set_run_name(self.run_name, tag, 'delphes')
581
582 - def check_calculate_xsect(self, args, options):
583 """check the validity of the line. args is ORDER, 584 ORDER being LO or NLO. If no mode is passed, NLO is used""" 585 # modify args in order to be DIR 586 # mode being either standalone or madevent 587 588 if options['force']: 589 self.force = True 590 591 if not args: 592 args.append('NLO') 593 return 594 595 if len(args) > 1: 596 self.help_calculate_xsect() 597 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 598 599 elif len(args) == 1: 600 if not args[0] in ['NLO', 'LO']: 601 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 602 mode = args[0] 603 604 # check for incompatible options/modes 605 if options['multicore'] and options['cluster']: 606 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 607 ' are not compatible. Please choose one.'
608 609
610 - def check_generate_events(self, args, options):
611 """check the validity of the line. args is ORDER, 612 ORDER being LO or NLO. If no mode is passed, NLO is used""" 613 # modify args in order to be DIR 614 # mode being either standalone or madevent 615 616 if not args: 617 args.append('NLO') 618 return 619 620 if len(args) > 1: 621 self.help_generate_events() 622 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 623 624 elif len(args) == 1: 625 if not args[0] in ['NLO', 'LO']: 626 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 627 mode = args[0] 628 629 # check for incompatible options/modes 630 if options['multicore'] and options['cluster']: 631 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 632 ' are not compatible. Please choose one.'
633
634 - def check_banner_run(self, args):
635 """check the validity of line""" 636 637 if len(args) == 0: 638 self.help_banner_run() 639 raise self.InvalidCmd('banner_run requires at least one argument.') 640 641 tag = [a[6:] for a in args if a.startswith('--tag=')] 642 643 644 if os.path.exists(args[0]): 645 type ='banner' 646 format = self.detect_card_type(args[0]) 647 if format != 'banner': 648 raise self.InvalidCmd('The file is not a valid banner.') 649 elif tag: 650 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 651 (args[0], tag)) 652 if not os.path.exists(args[0]): 653 raise self.InvalidCmd('No banner associates to this name and tag.') 654 else: 655 name = args[0] 656 type = 'run' 657 banners = glob.glob(pjoin(self.me_dir,'Events', args[0], '*_banner.txt')) 658 if not banners: 659 raise self.InvalidCmd('No banner associates to this name.') 660 elif len(banners) == 1: 661 args[0] = banners[0] 662 else: 663 #list the tag and propose those to the user 664 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 665 tag = self.ask('which tag do you want to use?', tags[0], tags) 666 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 667 (args[0], tag)) 668 669 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 670 if run_name: 671 try: 672 self.exec_cmd('remove %s all banner -f' % run_name) 673 except Exception: 674 pass 675 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 676 elif type == 'banner': 677 self.set_run_name(self.find_available_run_name(self.me_dir)) 678 elif type == 'run': 679 if not self.results[name].is_empty(): 680 run_name = self.find_available_run_name(self.me_dir) 681 logger.info('Run %s is not empty so will use run_name: %s' % \ 682 (name, run_name)) 683 self.set_run_name(run_name) 684 else: 685 try: 686 self.exec_cmd('remove %s all banner -f' % run_name) 687 except Exception: 688 pass 689 self.set_run_name(name)
690 691 692
693 - def check_launch(self, args, options):
694 """check the validity of the line. args is MODE 695 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 696 # modify args in order to be DIR 697 # mode being either standalone or madevent 698 699 if options['force']: 700 self.force = True 701 702 703 if not args: 704 args.append('auto') 705 return 706 707 if len(args) > 1: 708 self.help_launch() 709 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 710 711 elif len(args) == 1: 712 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 713 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 714 mode = args[0] 715 716 # check for incompatible options/modes 717 if options['multicore'] and options['cluster']: 718 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 719 ' are not compatible. Please choose one.' 720 if mode == 'NLO' and options['reweightonly']: 721 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
722 723
724 - def check_compile(self, args, options):
725 """check the validity of the line. args is MODE 726 MODE being FO or MC. If no mode is passed, MC is used""" 727 # modify args in order to be DIR 728 # mode being either standalone or madevent 729 730 if options['force']: 731 self.force = True 732 733 if not args: 734 args.append('MC') 735 return 736 737 if len(args) > 1: 738 self.help_compile() 739 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 740 741 elif len(args) == 1: 742 if not args[0] in ['MC', 'FO']: 743 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 744 mode = args[0]
745 746 # check for incompatible options/modes 747 748 749 #=============================================================================== 750 # CompleteForCmd 751 #===============================================================================
752 -class CompleteForCmd(CheckValidForCmd):
753 """ The Series of help routine for the MadGraphCmd""" 754
755 - def complete_launch(self, text, line, begidx, endidx):
756 """auto-completion for launch command""" 757 758 args = self.split_arg(line[0:begidx]) 759 if len(args) == 1: 760 #return mode 761 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 762 elif len(args) == 2 and line[begidx-1] == '@': 763 return self.list_completion(text,['LO','NLO'],line) 764 else: 765 opts = [] 766 for opt in _launch_parser.option_list: 767 opts += opt._long_opts + opt._short_opts 768 return self.list_completion(text, opts, line)
769
770 - def complete_banner_run(self, text, line, begidx, endidx):
771 "Complete the banner run command" 772 try: 773 774 775 args = self.split_arg(line[0:begidx], error=False) 776 777 if args[-1].endswith(os.path.sep): 778 return self.path_completion(text, 779 os.path.join('.',*[a for a in args \ 780 if a.endswith(os.path.sep)])) 781 782 783 if len(args) > 1: 784 # only options are possible 785 tags = glob.glob(pjoin(self.me_dir, 'Events' , args[1],'%s_*_banner.txt' % args[1])) 786 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 787 788 if args[-1] != '--tag=': 789 tags = ['--tag=%s' % t for t in tags] 790 else: 791 return self.list_completion(text, tags) 792 return self.list_completion(text, tags +['--name=','-f'], line) 793 794 # First argument 795 possibilites = {} 796 797 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 798 if a.endswith(os.path.sep)])) 799 if os.path.sep in line: 800 return comp 801 else: 802 possibilites['Path from ./'] = comp 803 804 run_list = glob.glob(pjoin(self.me_dir, 'Events', '*','*_banner.txt')) 805 run_list = [n.rsplit('/',2)[1] for n in run_list] 806 possibilites['RUN Name'] = self.list_completion(text, run_list) 807 808 return self.deal_multiple_categories(possibilites) 809 810 811 except Exception, error: 812 print error
813 814
815 - def complete_compile(self, text, line, begidx, endidx):
816 """auto-completion for launch command""" 817 818 args = self.split_arg(line[0:begidx]) 819 if len(args) == 1: 820 #return mode 821 return self.list_completion(text,['FO','MC'],line) 822 else: 823 opts = [] 824 for opt in _compile_parser.option_list: 825 opts += opt._long_opts + opt._short_opts 826 return self.list_completion(text, opts, line)
827
828 - def complete_calculate_xsect(self, text, line, begidx, endidx):
829 """auto-completion for launch command""" 830 831 args = self.split_arg(line[0:begidx]) 832 if len(args) == 1: 833 #return mode 834 return self.list_completion(text,['LO','NLO'],line) 835 else: 836 opts = [] 837 for opt in _calculate_xsect_parser.option_list: 838 opts += opt._long_opts + opt._short_opts 839 return self.list_completion(text, opts, line)
840
841 - def complete_generate_events(self, text, line, begidx, endidx):
842 """auto-completion for generate_events command 843 call the compeltion for launch""" 844 self.complete_launch(text, line, begidx, endidx)
845 846
847 - def complete_shower(self, text, line, begidx, endidx):
848 args = self.split_arg(line[0:begidx]) 849 if len(args) == 1: 850 #return valid run_name 851 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe.gz')) 852 data = [n.rsplit('/',2)[1] for n in data] 853 tmp1 = self.list_completion(text, data) 854 if not self.run_name: 855 return tmp1
856
857 - def complete_plot(self, text, line, begidx, endidx):
858 """ Complete the plot command """ 859 860 args = self.split_arg(line[0:begidx], error=False) 861 862 if len(args) == 1: 863 #return valid run_name 864 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe*')) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1 869 870 if len(args) > 1: 871 return self.list_completion(text, self._plot_mode)
872
873 - def complete_pgs(self,text, line, begidx, endidx):
874 "Complete the pgs command" 875 args = self.split_arg(line[0:begidx], error=False) 876 if len(args) == 1: 877 #return valid run_name 878 data = glob.glob(pjoin(self.me_dir, 'Events', '*', 'events_*.hep.gz')) 879 data = [n.rsplit('/',2)[1] for n in data] 880 tmp1 = self.list_completion(text, data) 881 if not self.run_name: 882 return tmp1 883 else: 884 tmp2 = self.list_completion(text, self._run_options + ['-f', 885 '--tag=' ,'--no_default'], line) 886 return tmp1 + tmp2 887 else: 888 return self.list_completion(text, self._run_options + ['-f', 889 '--tag=','--no_default'], line)
890 891 complete_delphes = complete_pgs
892
893 -class aMCatNLOAlreadyRunning(InvalidCmd):
894 pass
895 896 #=============================================================================== 897 # aMCatNLOCmd 898 #===============================================================================
899 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
900 """The command line processor of MadGraph""" 901 902 # Truth values 903 true = ['T','.true.',True,'true'] 904 # Options and formats available 905 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 906 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 907 _calculate_decay_options = ['-f', '--accuracy=0.'] 908 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 909 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 910 _clean_mode = _plot_mode + ['channel', 'banner'] 911 _display_opts = ['run_name', 'options', 'variable'] 912 # survey options, dict from name to type, default value, and help text 913 # Variables to store object information 914 web = False 915 cluster_mode = 0 916 queue = 'madgraph' 917 nb_core = None 918 919 next_possibility = { 920 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 921 'help generate_events'], 922 'generate_events': ['generate_events [OPTIONS]', 'shower'], 923 'launch': ['launch [OPTIONS]', 'shower'], 924 'shower' : ['generate_events [OPTIONS]'] 925 } 926 927 928 ############################################################################
929 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
930 """ add information to the cmd """ 931 932 self.start_time = 0 933 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 934 #common_run.CommonRunCmd.__init__(self, me_dir, options) 935 936 self.mode = 'aMCatNLO' 937 self.nb_core = 0 938 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 939 940 # load the current status of the directory 941 if os.path.exists(pjoin(self.me_dir,'HTML','results.pkl')): 942 self.results = save_load_object.load_from_file(pjoin(self.me_dir,'HTML','results.pkl')) 943 self.results.resetall(self.me_dir) 944 self.last_mode = self.results[self.results.lastrun][-1]['run_mode'] 945 else: 946 model = self.find_model_name() 947 process = self.process # define in find_model_name 948 self.results = gen_crossxhtml.AllResultsNLO(model, process, self.me_dir) 949 self.last_mode = '' 950 self.results.def_web_mode(self.web) 951 # check that compiler is gfortran 4.6 or later if virtuals have been exported 952 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 953 954 if not '[real=QCD]' in proc_card: 955 check_compiler(self.options, block=True)
956 957 958 ############################################################################
959 - def do_shower(self, line):
960 """ run the shower on a given parton level file """ 961 argss = self.split_arg(line) 962 (options, argss) = _launch_parser.parse_args(argss) 963 # check argument validity and normalise argument 964 options = options.__dict__ 965 options['reweightonly'] = False 966 self.check_shower(argss, options) 967 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 968 self.ask_run_configuration('onlyshower', options) 969 self.run_mcatnlo(evt_file) 970 971 self.update_status('', level='all', update_results=True)
972 973 ################################################################################
974 - def do_plot(self, line):
975 """Create the plot for a given run""" 976 977 # Since in principle, all plot are already done automaticaly 978 args = self.split_arg(line) 979 # Check argument's validity 980 self.check_plot(args) 981 logger.info('plot for run %s' % self.run_name) 982 983 if not self.force: 984 self.ask_edit_cards([], args, plot=True) 985 986 if any([arg in ['parton'] for arg in args]): 987 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 988 if os.path.exists(filename+'.gz'): 989 misc.gunzip(filename) 990 if os.path.exists(filename): 991 logger.info('Found events.lhe file for run %s' % self.run_name) 992 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 993 self.create_plot('parton') 994 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 995 misc.gzip(filename) 996 997 if any([arg in ['all','parton'] for arg in args]): 998 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 999 if os.path.exists(filename): 1000 logger.info('Found MADatNLO.top file for run %s' % \ 1001 self.run_name) 1002 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1003 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1004 1005 if not os.path.isdir(plot_dir): 1006 os.makedirs(plot_dir) 1007 top_file = pjoin(plot_dir, 'plots.top') 1008 files.cp(filename, top_file) 1009 madir = self.options['madanalysis_path'] 1010 tag = self.run_card['run_tag'] 1011 td = self.options['td_path'] 1012 misc.call(['%s/plot' % self.dirbin, madir, td], 1013 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1014 stderr = subprocess.STDOUT, 1015 cwd=plot_dir) 1016 1017 misc.call(['%s/plot_page-pl' % self.dirbin, 1018 os.path.basename(plot_dir), 1019 'parton'], 1020 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1021 stderr = subprocess.STDOUT, 1022 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1023 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1024 output) 1025 1026 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1027 1028 if any([arg in ['all','shower'] for arg in args]): 1029 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 1030 'events_*.lhe.gz')) 1031 if len(filenames) != 1: 1032 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 1033 'events_*.hep.gz')) 1034 if len(filenames) != 1: 1035 logger.info('No shower level file found for run %s' % \ 1036 self.run_name) 1037 return 1038 filename = filenames[0] 1039 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1040 1041 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1042 if aMCatNLO and not self.options['mg5_path']: 1043 raise "plotting NLO HEP file needs MG5 utilities" 1044 1045 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1046 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1047 self.run_hep2lhe() 1048 else: 1049 filename = filenames[0] 1050 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1051 1052 self.create_plot('shower') 1053 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1054 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1055 lhe_file_name) 1056 misc.gzip(lhe_file_name) 1057 1058 if any([arg in ['all','pgs'] for arg in args]): 1059 filename = pjoin(self.me_dir, 'Events', self.run_name, 1060 '%s_pgs_events.lhco' % self.run_tag) 1061 if os.path.exists(filename+'.gz'): 1062 misc.gunzip(filename) 1063 if os.path.exists(filename): 1064 self.create_plot('PGS') 1065 misc.gzip(filename) 1066 else: 1067 logger.info('No valid files for pgs plot') 1068 1069 if any([arg in ['all','delphes'] for arg in args]): 1070 filename = pjoin(self.me_dir, 'Events', self.run_name, 1071 '%s_delphes_events.lhco' % self.run_tag) 1072 if os.path.exists(filename+'.gz'): 1073 misc.gunzip(filename) 1074 if os.path.exists(filename): 1075 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1076 self.create_plot('Delphes') 1077 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1078 misc.gzip(filename) 1079 else: 1080 logger.info('No valid files for delphes plot')
1081 1082 1083 ############################################################################
1084 - def do_calculate_xsect(self, line):
1085 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1086 this function wraps the do_launch one""" 1087 1088 self.start_time = time.time() 1089 argss = self.split_arg(line) 1090 # check argument validity and normalise argument 1091 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1092 options = options.__dict__ 1093 options['reweightonly'] = False 1094 options['parton'] = True 1095 self.check_calculate_xsect(argss, options) 1096 self.do_launch(line, options, argss)
1097 1098 ############################################################################
1099 - def do_banner_run(self, line):
1100 """Make a run from the banner file""" 1101 1102 args = self.split_arg(line) 1103 #check the validity of the arguments 1104 self.check_banner_run(args) 1105 1106 # Remove previous cards 1107 for name in ['shower_card.dat', 'madspin_card.dat']: 1108 try: 1109 os.remove(pjoin(self.me_dir, 'Cards', name)) 1110 except Exception: 1111 pass 1112 1113 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1114 1115 # Check if we want to modify the run 1116 if not self.force: 1117 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1118 if ans == 'n': 1119 self.force = True 1120 1121 # Compute run mode: 1122 if self.force: 1123 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1124 banner = banner_mod.Banner(args[0]) 1125 for line in banner['run_settings']: 1126 if '=' in line: 1127 mode, value = [t.strip() for t in line.split('=')] 1128 mode_status[mode] = value 1129 else: 1130 mode_status = {} 1131 1132 # Call Generate events 1133 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1134 switch=mode_status)
1135 1136 ############################################################################
1137 - def do_generate_events(self, line):
1138 """Main commands: generate events 1139 this function just wraps the do_launch one""" 1140 self.do_launch(line)
1141 1142 1143 ############################################################################
1144 - def do_treatcards(self, line, amcatnlo=True):
1145 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1146 #check if no 'Auto' are present in the file 1147 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1148 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1149 1150 ############################################################################
1151 - def set_configuration(self, amcatnlo=True, **opt):
1152 """assign all configuration variable from file 1153 loop over the different config file if config_file not define """ 1154 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1155 1156 ############################################################################
1157 - def do_launch(self, line, options={}, argss=[], switch={}):
1158 """Main commands: launch the full chain 1159 options and args are relevant if the function is called from other 1160 functions, such as generate_events or calculate_xsect 1161 mode gives the list of switch needed for the computation (usefull for banner_run) 1162 """ 1163 1164 if not argss and not options: 1165 self.start_time = time.time() 1166 argss = self.split_arg(line) 1167 # check argument validity and normalise argument 1168 (options, argss) = _launch_parser.parse_args(argss) 1169 options = options.__dict__ 1170 self.check_launch(argss, options) 1171 1172 1173 if 'run_name' in options.keys() and options['run_name']: 1174 self.run_name = options['run_name'] 1175 # if a dir with the given run_name already exists 1176 # remove it and warn the user 1177 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1178 logger.warning('Removing old run information in \n'+ 1179 pjoin(self.me_dir, 'Events', self.run_name)) 1180 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1181 self.results.delete_run(self.run_name) 1182 else: 1183 self.run_name = '' # will be set later 1184 1185 if options['multicore']: 1186 self.cluster_mode = 2 1187 elif options['cluster']: 1188 self.cluster_mode = 1 1189 1190 if not switch: 1191 mode = argss[0] 1192 1193 if mode in ['LO', 'NLO']: 1194 options['parton'] = True 1195 mode = self.ask_run_configuration(mode, options) 1196 else: 1197 mode = self.ask_run_configuration('auto', options, switch) 1198 1199 self.results.add_detail('run_mode', mode) 1200 1201 self.update_status('Starting run', level=None, update_results=True) 1202 1203 if self.options['automatic_html_opening']: 1204 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1205 self.options['automatic_html_opening'] = False 1206 1207 if '+' in mode: 1208 mode = mode.split('+')[0] 1209 self.compile(mode, options) 1210 evt_file = self.run(mode, options) 1211 1212 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1213 logger.info('No event file generated: grids have been set-up with a '\ 1214 'relative precision of %s' % self.run_card['req_acc']) 1215 return 1216 1217 if not mode in ['LO', 'NLO']: 1218 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1219 self.exec_cmd('reweight -from_cards', postcmd=False) 1220 self.exec_cmd('decay_events -from_cards', postcmd=False) 1221 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1222 1223 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1224 and not options['parton']: 1225 self.run_mcatnlo(evt_file) 1226 elif mode == 'noshower': 1227 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1228 Please, shower the Les Houches events before using them for physics analyses.""") 1229 1230 1231 self.update_status('', level='all', update_results=True) 1232 if self.run_card['ickkw'] == 3 and mode in ['noshower', 'aMC@NLO']: 1233 logger.warning("""You are running with FxFx merging enabled. 1234 To be able to merge samples of various multiplicities without double counting, 1235 you have to remove some events after showering 'by hand'. 1236 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1237 1238 1239 #check if the param_card defines a scan. 1240 if self.param_card_iterator: 1241 param_card_iterator = self.param_card_iterator 1242 self.param_card_iterator = [] #avoid to next generate go trough here 1243 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1244 orig_name = self.run_name 1245 #go trough the scal 1246 with misc.TMP_variable(self, 'allow_notification_center', False): 1247 for i,card in enumerate(param_card_iterator): 1248 card.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1249 if not options['force']: 1250 options['force'] = True 1251 if options['run_name']: 1252 options['run_name'] = '%s_%s' % (orig_name, i+1) 1253 if not argss: 1254 argss = [mode, "-f"] 1255 elif argss[0] == "auto": 1256 argss[0] = mode 1257 self.do_launch("", options=options, argss=argss, switch=switch) 1258 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1259 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1260 #restore original param_card 1261 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1262 name = misc.get_scan_name(orig_name, self.run_name) 1263 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1264 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK') 1265 param_card_iterator.write_summary(path) 1266 1267 if self.allow_notification_center: 1268 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1269 '%s: %s +- %s ' % (self.results.current['run_name'], 1270 self.results.current['cross'], 1271 self.results.current['error']))
1272 1273 1274 ############################################################################
1275 - def do_compile(self, line):
1276 """Advanced commands: just compile the executables """ 1277 argss = self.split_arg(line) 1278 # check argument validity and normalise argument 1279 (options, argss) = _compile_parser.parse_args(argss) 1280 options = options.__dict__ 1281 options['reweightonly'] = False 1282 options['nocompile'] = False 1283 self.check_compile(argss, options) 1284 1285 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1286 self.ask_run_configuration(mode, options) 1287 self.compile(mode, options) 1288 1289 1290 self.update_status('', level='all', update_results=True)
1291 1292
1293 - def update_random_seed(self):
1294 """Update random number seed with the value from the run_card. 1295 If this is 0, update the number according to a fresh one""" 1296 iseed = self.run_card['iseed'] 1297 if iseed == 0: 1298 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1299 iseed = int(randinit.read()[2:]) + 1 1300 randinit.close() 1301 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1302 randinit.write('r=%d' % iseed) 1303 randinit.close()
1304 1305
1306 - def run(self, mode, options):
1307 """runs aMC@NLO. Returns the name of the event file created""" 1308 logger.info('Starting run') 1309 1310 if not 'only_generation' in options.keys(): 1311 options['only_generation'] = False 1312 1313 # for second step in applgrid mode, do only the event generation step 1314 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1315 options['only_generation'] = True 1316 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1317 self.setup_cluster_or_multicore() 1318 self.update_random_seed() 1319 #find and keep track of all the jobs 1320 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1321 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1322 folder_names['noshower'] = folder_names['aMC@NLO'] 1323 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1324 p_dirs = [d for d in \ 1325 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1326 #Clean previous results 1327 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1328 1329 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1330 1331 1332 if options['reweightonly']: 1333 event_norm=self.run_card['event_norm'] 1334 nevents=self.run_card['nevents'] 1335 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1336 1337 devnull = os.open(os.devnull, os.O_RDWR) 1338 1339 if mode in ['LO', 'NLO']: 1340 # this is for fixed order runs 1341 mode_dict = {'NLO': 'all', 'LO': 'born'} 1342 logger.info('Doing fixed order %s' % mode) 1343 req_acc = self.run_card['req_acc_FO'] 1344 1345 # Re-distribute the grids for the 2nd step of the applgrid 1346 # running 1347 if self.run_card['iappl'] == 2: 1348 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1349 1350 # create a list of dictionaries "jobs_to_run" with all the 1351 # jobs that need to be run 1352 integration_step=-1 1353 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1354 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1355 self.prepare_directories(jobs_to_run,mode) 1356 1357 # loop over the integration steps. After every step, check 1358 # if we have the required accuracy. If this is the case, 1359 # stop running, else do another step. 1360 while True: 1361 integration_step=integration_step+1 1362 self.run_all_jobs(jobs_to_run,integration_step) 1363 self.collect_log_files(jobs_to_run,integration_step) 1364 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1365 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1366 if not jobs_to_run: 1367 # there are no more jobs to run (jobs_to_run is empty) 1368 break 1369 # We are done. 1370 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1371 self.update_status('Run complete', level='parton', update_results=True) 1372 return 1373 1374 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1375 if self.ninitial == 1: 1376 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1377 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1378 'noshower': 'all', 'noshowerLO': 'born'} 1379 shower = self.run_card['parton_shower'].upper() 1380 nevents = self.run_card['nevents'] 1381 req_acc = self.run_card['req_acc'] 1382 if nevents == 0 and req_acc < 0 : 1383 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1384 'of events, because 0 events requested. Please set '\ 1385 'the "req_acc" parameter in the run_card to a value '\ 1386 'between 0 and 1') 1387 elif req_acc >1 or req_acc == 0 : 1388 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1389 'be between larger than 0 and smaller than 1, '\ 1390 'or set to -1 for automatic determination. Current '\ 1391 'value is %f' % req_acc) 1392 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1393 elif req_acc < 0 and nevents > 1000000 : 1394 req_acc=0.001 1395 1396 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1397 1398 if not shower in shower_list: 1399 raise aMCatNLOError('%s is not a valid parton shower. '\ 1400 'Please use one of the following: %s' \ 1401 % (shower, ', '.join(shower_list))) 1402 1403 # check that PYTHIA6PT is not used for processes with FSR 1404 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1405 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1406 1407 if mode in ['aMC@NLO', 'aMC@LO']: 1408 logger.info('Doing %s matched to parton shower' % mode[4:]) 1409 elif mode in ['noshower','noshowerLO']: 1410 logger.info('Generating events without running the shower.') 1411 elif options['only_generation']: 1412 logger.info('Generating events starting from existing results') 1413 1414 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1415 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1416 1417 # Make sure to update all the jobs to be ready for the event generation step 1418 if options['only_generation']: 1419 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1420 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1421 else: 1422 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1423 1424 1425 # Main loop over the three MINT generation steps: 1426 for mint_step, status in enumerate(mcatnlo_status): 1427 if options['only_generation'] and mint_step < 2: 1428 continue 1429 self.update_status(status, level='parton') 1430 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1431 self.collect_log_files(jobs_to_run,mint_step) 1432 if mint_step+1==2 and nevents==0: 1433 self.print_summary(options,2,mode) 1434 return 1435 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1436 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1437 # Sanity check on the event files. If error the jobs are resubmitted 1438 self.check_event_files(jobs_to_collect) 1439 1440 if self.cluster_mode == 1: 1441 #if cluster run, wait 10 sec so that event files are transferred back 1442 self.update_status( 1443 'Waiting while files are transferred back from the cluster nodes', 1444 level='parton') 1445 time.sleep(10) 1446 1447 event_norm=self.run_card['event_norm'] 1448 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1449
1450 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1451 integration_step,mode,fixed_order=True):
1452 """Creates a list of dictionaries with all the jobs to be run""" 1453 jobs_to_run=[] 1454 if not options['only_generation']: 1455 # Fresh, new run. Check all the P*/channels.txt files 1456 # (created by the 'gensym' executable) to set-up all the 1457 # jobs using the default inputs. 1458 npoints = self.run_card['npoints_FO_grid'] 1459 niters = self.run_card['niters_FO_grid'] 1460 for p_dir in p_dirs: 1461 try: 1462 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1463 channels=chan_file.readline().split() 1464 except IOError: 1465 logger.warning('No integration channels found for contribution %s' % p_dir) 1466 continue 1467 for channel in channels: 1468 job={} 1469 job['p_dir']=p_dir 1470 job['channel']=channel 1471 job['split']=0 1472 if fixed_order and req_acc == -1: 1473 job['accuracy']=0 1474 job['niters']=niters 1475 job['npoints']=npoints 1476 elif fixed_order and req_acc > 0: 1477 job['accuracy']=0.10 1478 job['niters']=6 1479 job['npoints']=-1 1480 elif not fixed_order: 1481 job['accuracy']=0.03 1482 job['niters']=12 1483 job['npoints']=-1 1484 else: 1485 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1486 'between 0 and 1 or set it equal to -1.') 1487 job['mint_mode']=0 1488 job['run_mode']=run_mode 1489 job['wgt_frac']=1.0 1490 jobs_to_run.append(job) 1491 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1492 else: 1493 # if options['only_generation'] is true, we need to loop 1494 # over all the existing G* directories and create the jobs 1495 # from there. 1496 name_suffix={'born' :'B', 'all':'F'} 1497 for p_dir in p_dirs: 1498 for chan_dir in os.listdir(pjoin(self.me_dir,'SubProcesses',p_dir)): 1499 if ((chan_dir.startswith(run_mode+'_G') and fixed_order) or\ 1500 (chan_dir.startswith('G'+name_suffix[run_mode]) and (not fixed_order))) and \ 1501 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)) or \ 1502 os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir))): 1503 job={} 1504 job['p_dir']=p_dir 1505 if fixed_order: 1506 channel=chan_dir.split('_')[1] 1507 job['channel']=channel[1:] # remove the 'G' 1508 if len(chan_dir.split('_')) == 3: 1509 split=int(chan_dir.split('_')[2]) 1510 else: 1511 split=0 1512 else: 1513 if len(chan_dir.split('_')) == 2: 1514 split=int(chan_dir.split('_')[1]) 1515 channel=chan_dir.split('_')[0] 1516 job['channel']=channel[2:] # remove the 'G' 1517 else: 1518 job['channel']=chan_dir[2:] # remove the 'G' 1519 split=0 1520 job['split']=split 1521 job['run_mode']=run_mode 1522 job['dirname']=pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir) 1523 job['wgt_frac']=1.0 1524 if not fixed_order: job['mint_mode']=1 1525 jobs_to_run.append(job) 1526 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1527 if fixed_order: 1528 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 1529 jobs_to_collect,integration_step,mode,run_mode) 1530 # Update the integration_step to make sure that nothing will be overwritten 1531 integration_step=1 1532 for job in jobs_to_run: 1533 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 1534 integration_step=integration_step+1 1535 integration_step=integration_step-1 1536 else: 1537 self.append_the_results(jobs_to_collect,integration_step) 1538 return jobs_to_run,jobs_to_collect,integration_step
1539
1540 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
1541 """Set-up the G* directories for running""" 1542 name_suffix={'born' :'B' , 'all':'F'} 1543 for job in jobs_to_run: 1544 if job['split'] == 0: 1545 if fixed_order : 1546 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1547 job['run_mode']+'_G'+job['channel']) 1548 else: 1549 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1550 'G'+name_suffix[job['run_mode']]+job['channel']) 1551 else: 1552 if fixed_order : 1553 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1554 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 1555 else: 1556 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1557 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 1558 job['dirname']=dirname 1559 if not os.path.isdir(dirname): 1560 os.makedirs(dirname) 1561 self.write_input_file(job,fixed_order) 1562 if not fixed_order: 1563 # copy the grids from the base directory to the split directory: 1564 if job['split'] != 0: 1565 for f in ['grid.MC_integer','mint_grids','res_1']: 1566 if not os.path.isfile(pjoin(job['dirname'],f)): 1567 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1568 1569
1570 - def write_input_file(self,job,fixed_order):
1571 """write the input file for the madevent_mint* executable in the appropriate directory""" 1572 if fixed_order: 1573 content= \ 1574 """NPOINTS = %(npoints)s 1575 NITERATIONS = %(niters)s 1576 ACCURACY = %(accuracy)s 1577 ADAPT_GRID = 2 1578 MULTICHANNEL = 1 1579 SUM_HELICITY = 1 1580 CHANNEL = %(channel)s 1581 SPLIT = %(split)s 1582 RUN_MODE = %(run_mode)s 1583 RESTART = %(mint_mode)s 1584 """ \ 1585 % job 1586 else: 1587 content = \ 1588 """-1 12 ! points, iterations 1589 %(accuracy)s ! desired fractional accuracy 1590 1 -0.1 ! alpha, beta for Gsoft 1591 -1 -0.1 ! alpha, beta for Gazi 1592 1 ! Suppress amplitude (0 no, 1 yes)? 1593 1 ! Exact helicity sum (0 yes, n = number/event)? 1594 %(channel)s ! Enter Configuration Number: 1595 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 1596 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 1597 %(run_mode)s ! all, born, real, virt 1598 """ \ 1599 % job 1600 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 1601 input_file.write(content)
1602 1603
1604 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1605 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 1606 if fixed_order: 1607 if integration_step == 0: 1608 self.update_status('Setting up grids', level=None) 1609 else: 1610 self.update_status('Refining results, step %i' % integration_step, level=None) 1611 self.ijob = 0 1612 name_suffix={'born' :'B', 'all':'F'} 1613 if fixed_order: 1614 run_type="Fixed order integration step %s" % integration_step 1615 else: 1616 run_type="MINT step %s" % integration_step 1617 for job in jobs_to_run: 1618 executable='ajob1' 1619 if fixed_order: 1620 arguments=[job['channel'],job['run_mode'], \ 1621 str(job['split']),str(integration_step)] 1622 else: 1623 arguments=[job['channel'],name_suffix[job['run_mode']], \ 1624 str(job['split']),str(integration_step)] 1625 self.run_exe(executable,arguments,run_type, 1626 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 1627 1628 if self.cluster_mode == 2: 1629 time.sleep(1) # security to allow all jobs to be launched 1630 self.njobs=len(jobs_to_run) 1631 self.wait_for_complete(run_type)
1632 1633
1634 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 1635 integration_step,mode,run_mode,fixed_order=True):
1636 """Collect the results, make HTML pages, print the summary and 1637 determine if there are more jobs to run. Returns the list 1638 of the jobs that still need to be run, as well as the 1639 complete list of jobs that need to be collected to get the 1640 final answer. 1641 """ 1642 # Get the results of the current integration/MINT step 1643 self.append_the_results(jobs_to_run,integration_step) 1644 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 1645 # Update HTML pages 1646 if fixed_order: 1647 cross, error = sum_html.make_all_html_results(self, ['%s*' % run_mode]) 1648 else: 1649 name_suffix={'born' :'B' , 'all':'F'} 1650 cross, error = sum_html.make_all_html_results(self, ['G%s*' % name_suffix[run_mode]]) 1651 self.results.add_detail('cross', cross) 1652 self.results.add_detail('error', error) 1653 # Set-up jobs for the next iteration/MINT step 1654 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 1655 # if there are no more jobs, we are done! 1656 # Print summary 1657 if (not jobs_to_run_new) and fixed_order: 1658 # print final summary of results (for fixed order) 1659 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 1660 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 1661 return jobs_to_run_new,jobs_to_collect 1662 elif jobs_to_run_new: 1663 # print intermediate summary of results 1664 scale_pdf_info={} 1665 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 1666 else: 1667 # When we are done for (N)LO+PS runs, do not print 1668 # anything yet. This will be done after the reweighting 1669 # and collection of the events 1670 scale_pdf_info={} 1671 # Prepare for the next integration/MINT step 1672 if (not fixed_order) and integration_step+1 == 2 : 1673 # next step is event generation (mint_step 2) 1674 jobs_to_run_new,jobs_to_collect_new= \ 1675 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 1676 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1677 self.write_nevents_unweighted_file(jobs_to_collect_new) 1678 self.write_nevts_files(jobs_to_run_new) 1679 else: 1680 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1681 jobs_to_collect_new=jobs_to_collect 1682 return jobs_to_run_new,jobs_to_collect_new
1683 1684
1685 - def write_nevents_unweighted_file(self,jobs):
1686 """writes the nevents_unweighted file in the SubProcesses directory""" 1687 content=[] 1688 for job in jobs: 1689 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1690 lhefile=pjoin(path,'events.lhe') 1691 content.append(' %s %d %9e %9e' % \ 1692 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 1693 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 1694 f.write('\n'.join(content)+'\n')
1695
1696 - def write_nevts_files(self,jobs):
1697 """write the nevts files in the SubProcesses/P*/G*/ directories""" 1698 for job in jobs: 1699 with open(pjoin(job['dirname'],'nevts'),'w') as f: 1700 f.write('%i\n' % job['nevents'])
1701
1702 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
1703 """Looks in the jobs_to_run to see if there is the need to split the 1704 event generation step. Updates jobs_to_run and 1705 jobs_to_collect to replace the split-job by its 1706 splits. Also removes jobs that do not need any events. 1707 """ 1708 nevt_job=self.run_card['nevt_job'] 1709 if nevt_job > 0: 1710 jobs_to_collect_new=copy.copy(jobs_to_collect) 1711 for job in jobs_to_run: 1712 nevents=job['nevents'] 1713 if nevents == 0: 1714 jobs_to_collect_new.remove(job) 1715 elif nevents > nevt_job: 1716 jobs_to_collect_new.remove(job) 1717 if nevents % nevt_job != 0 : 1718 nsplit=int(nevents/nevt_job)+1 1719 else: 1720 nsplit=int(nevents/nevt_job) 1721 for i in range(1,nsplit+1): 1722 job_new=copy.copy(job) 1723 left_over=nevents % nsplit 1724 if i <= left_over: 1725 job_new['nevents']=int(nevents/nsplit)+1 1726 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1727 else: 1728 job_new['nevents']=int(nevents/nsplit) 1729 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1730 job_new['split']=i 1731 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 1732 jobs_to_collect_new.append(job_new) 1733 jobs_to_run_new=copy.copy(jobs_to_collect_new) 1734 else: 1735 jobs_to_run_new=copy.copy(jobs_to_collect) 1736 for job in jobs_to_collect: 1737 if job['nevents'] == 0: 1738 jobs_to_run_new.remove(job) 1739 jobs_to_collect_new=copy.copy(jobs_to_run_new) 1740 1741 return jobs_to_run_new,jobs_to_collect_new
1742 1743
1744 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
1745 """ 1746 For (N)LO+PS: determines the number of events and/or the required 1747 accuracy per job. 1748 For fixed order: determines which jobs need higher precision and 1749 returns those with the newly requested precision. 1750 """ 1751 err=self.cross_sect_dict['errt'] 1752 tot=self.cross_sect_dict['xsect'] 1753 errABS=self.cross_sect_dict['erra'] 1754 totABS=self.cross_sect_dict['xseca'] 1755 jobs_new=[] 1756 if fixed_order: 1757 if req_acc == -1: 1758 if step+1 == 1: 1759 npoints = self.run_card['npoints_FO'] 1760 niters = self.run_card['niters_FO'] 1761 for job in jobs: 1762 job['mint_mode']=-1 1763 job['niters']=niters 1764 job['npoints']=npoints 1765 jobs_new.append(job) 1766 elif step+1 == 2: 1767 pass 1768 elif step+1 > 2: 1769 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 1770 'for integration step %i' % step ) 1771 elif ( req_acc > 0 and err/tot > req_acc*1.2 ) or step == 0: 1772 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 1773 for job in jobs: 1774 job['mint_mode']=-1 1775 # Determine relative required accuracy on the ABS for this job 1776 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 1777 # If already accurate enough, skip running 1778 if job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0: 1779 continue 1780 # Update the number of PS points based on errorABS, ncall and accuracy 1781 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 1782 (job['accuracy']*job['resultABS']),2) 1783 if itmax_fl <= 4.0 : 1784 job['niters']=max(int(round(itmax_fl)),2) 1785 job['npoints']=job['npoints_done']*2 1786 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 1787 job['niters']=4 1788 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 1789 else: 1790 if itmax_fl > 100.0 : itmax_fl=50.0 1791 job['niters']=int(round(math.sqrt(itmax_fl))) 1792 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 1793 round(math.sqrt(itmax_fl))))*2 1794 # Add the job to the list of jobs that need to be run 1795 jobs_new.append(job) 1796 return jobs_new 1797 elif step+1 <= 2: 1798 nevents=self.run_card['nevents'] 1799 # Total required accuracy for the upper bounding envelope 1800 if req_acc<0: 1801 req_acc2_inv=nevents 1802 else: 1803 req_acc2_inv=1/(req_acc*req_acc) 1804 if step+1 == 1 or step+1 == 2 : 1805 # determine the req. accuracy for each of the jobs for Mint-step = 1 1806 for job in jobs: 1807 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 1808 job['accuracy']=accuracy 1809 if step+1 == 2: 1810 # Randomly (based on the relative ABS Xsec of the job) determine the 1811 # number of events each job needs to generate for MINT-step = 2. 1812 r=self.get_randinit_seed() 1813 random.seed(r) 1814 totevts=nevents 1815 for job in jobs: 1816 job['nevents'] = 0 1817 while totevts : 1818 target = random.random() * totABS 1819 crosssum = 0. 1820 i = 0 1821 while i<len(jobs) and crosssum < target: 1822 job = jobs[i] 1823 crosssum += job['resultABS'] 1824 i += 1 1825 totevts -= 1 1826 i -= 1 1827 jobs[i]['nevents'] += 1 1828 for job in jobs: 1829 job['mint_mode']=step+1 # next step 1830 return jobs 1831 else: 1832 return []
1833 1834
1835 - def get_randinit_seed(self):
1836 """ Get the random number seed from the randinit file """ 1837 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 1838 # format of the file is "r=%d". 1839 iseed = int(randinit.read()[2:]) 1840 return iseed
1841 1842
1843 - def append_the_results(self,jobs,integration_step):
1844 """Appends the results for each of the jobs in the job list""" 1845 error_found=False 1846 for job in jobs: 1847 try: 1848 if integration_step >= 0 : 1849 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 1850 results=res_file.readline().split() 1851 else: 1852 # should only be here when doing fixed order with the 'only_generation' 1853 # option equal to True. Take the results from the final run done. 1854 with open(pjoin(job['dirname'],'res.dat')) as res_file: 1855 results=res_file.readline().split() 1856 except IOError: 1857 if not error_found: 1858 error_found=True 1859 error_log=[] 1860 error_log.append(pjoin(job['dirname'],'log.txt')) 1861 continue 1862 job['resultABS']=float(results[0]) 1863 job['errorABS']=float(results[1]) 1864 job['result']=float(results[2]) 1865 job['error']=float(results[3]) 1866 job['niters_done']=int(results[4]) 1867 job['npoints_done']=int(results[5]) 1868 job['time_spend']=float(results[6]) 1869 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 1870 job['err_perc'] = job['error']/job['result']*100. 1871 if error_found: 1872 raise aMCatNLOError('An error occurred during the collection of results.\n' + 1873 'Please check the .log files inside the directories which failed:\n' + 1874 '\n'.join(error_log)+'\n')
1875 1876 1877
1878 - def write_res_txt_file(self,jobs,integration_step):
1879 """writes the res.txt files in the SubProcess dir""" 1880 jobs.sort(key = lambda job: -job['errorABS']) 1881 content=[] 1882 content.append('\n\nCross-section per integration channel:') 1883 for job in jobs: 1884 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 1885 content.append('\n\nABS cross-section per integration channel:') 1886 for job in jobs: 1887 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 1888 totABS=0 1889 errABS=0 1890 tot=0 1891 err=0 1892 for job in jobs: 1893 totABS+= job['resultABS'] 1894 errABS+= math.pow(job['errorABS'],2) 1895 tot+= job['result'] 1896 err+= math.pow(job['error'],2) 1897 if jobs: 1898 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 1899 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 1900 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 1901 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 1902 res_file.write('\n'.join(content)) 1903 randinit=self.get_randinit_seed() 1904 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 1905 'erra':math.sqrt(errABS),'randinit':randinit}
1906 1907
1908 - def collect_scale_pdf_info(self,options,jobs):
1909 """read the scale_pdf_dependence.dat files and collects there results""" 1910 scale_pdf_info={} 1911 if self.run_card['reweight_scale'] or self.run_card['reweight_PDF']: 1912 data_files=[] 1913 for job in jobs: 1914 data_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 1915 scale_pdf_info = self.pdf_scale_from_reweighting(data_files) 1916 return scale_pdf_info
1917 1918
1919 - def combine_plots_FO(self,folder_name,jobs):
1920 """combines the plots and puts then in the Events/run* directory""" 1921 devnull = os.open(os.devnull, os.O_RDWR) 1922 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 1923 misc.call(['./combine_plots_FO.sh'] + folder_name, \ 1924 stdout=devnull, 1925 cwd=pjoin(self.me_dir, 'SubProcesses')) 1926 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 1927 pjoin(self.me_dir, 'Events', self.run_name)) 1928 logger.info('The results of this run and the TopDrawer file with the plots' + \ 1929 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1930 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 1931 self.combine_plots_HwU(jobs) 1932 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.HwU'), 1933 pjoin(self.me_dir, 'Events', self.run_name)) 1934 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.gnuplot'), 1935 pjoin(self.me_dir, 'Events', self.run_name)) 1936 try: 1937 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 1938 stdout=devnull,stderr=devnull,\ 1939 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 1940 except Exception: 1941 pass 1942 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 1943 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1944 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 1945 misc.call(['./combine_root.sh'] + folder_name, \ 1946 stdout=devnull, 1947 cwd=pjoin(self.me_dir, 'SubProcesses')) 1948 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 1949 pjoin(self.me_dir, 'Events', self.run_name)) 1950 logger.info('The results of this run and the ROOT file with the plots' + \ 1951 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1952 else: 1953 logger.info('The results of this run' + \ 1954 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1955 1956
1957 - def combine_plots_HwU(self,jobs):
1958 """Sums all the plots in the HwU format.""" 1959 logger.debug('Combining HwU plots.') 1960 all_histo_paths=[] 1961 for job in jobs: 1962 all_histo_paths.append(pjoin(job['dirname'],"MADatNLO.HwU")) 1963 histogram_list = histograms.HwUList(all_histo_paths[0]) 1964 for histo_path in all_histo_paths[1:]: 1965 for i, histo in enumerate(histograms.HwUList(histo_path)): 1966 # First make sure the plots have the same weight labels and such 1967 histo.test_plot_compability(histogram_list[i]) 1968 # Now let the histogram module do the magic and add them. 1969 histogram_list[i] += histo 1970 1971 # And now output the finalized list 1972 histogram_list.output(pjoin(self.me_dir,'SubProcesses',"MADatNLO"), 1973 format = 'gnuplot')
1974
1975 - def applgrid_combine(self,cross,error,jobs):
1976 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 1977 logger.debug('Combining APPLgrids \n') 1978 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 1979 'applgrid-combine') 1980 all_jobs=[] 1981 for job in jobs: 1982 all_jobs.append(job['dirname']) 1983 ngrids=len(all_jobs) 1984 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 1985 for obs in range(0,nobs): 1986 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 1987 # combine APPLgrids from different channels for observable 'obs' 1988 if self.run_card["iappl"] == 1: 1989 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 1990 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 1991 elif self.run_card["iappl"] == 2: 1992 unc2_inv=pow(cross/error,2) 1993 unc2_inv_ngrids=pow(cross/error,2)*ngrids 1994 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 1995 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 1996 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 1997 for job in all_jobs: 1998 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 1999 else: 2000 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2001 # after combining, delete the original grids 2002 for ggdir in gdir: 2003 os.remove(ggdir)
2004 2005
2006 - def applgrid_distribute(self,options,mode,p_dirs):
2007 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2008 # if no appl_start_grid argument given, guess it from the time stamps 2009 # of the starting grid files 2010 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2011 gfiles=glob.glob(pjoin(self.me_dir, 'Events','*', 2012 'aMCfast_obs_0_starting_grid.root')) 2013 time_stamps={} 2014 for root_file in gfiles: 2015 time_stamps[root_file]=os.path.getmtime(root_file) 2016 options['appl_start_grid']= \ 2017 max(time_stamps.iterkeys(), key=(lambda key: 2018 time_stamps[key])).split('/')[-2] 2019 logger.info('No --appl_start_grid option given. '+\ 2020 'Guessing that start grid from run "%s" should be used.' \ 2021 % options['appl_start_grid']) 2022 2023 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2024 self.appl_start_grid = options['appl_start_grid'] 2025 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2026 # check that this dir exists and at least one grid file is there 2027 if not os.path.exists(pjoin(start_grid_dir, 2028 'aMCfast_obs_0_starting_grid.root')): 2029 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2030 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2031 else: 2032 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2033 start_grid_dir) if name.endswith("_starting_grid.root")] 2034 nobs =len(all_grids) 2035 gstring=" ".join(all_grids) 2036 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2037 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2038 'Please provide this information.') 2039 #copy the grid to all relevant directories 2040 for pdir in p_dirs: 2041 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2042 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2043 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2044 for g_dir in g_dirs: 2045 for grid in all_grids: 2046 obs=grid.split('_')[-3] 2047 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2048 'grid_obs_'+obs+'_in.root'))
2049 2050 2051 2052
2053 - def collect_log_files(self, jobs, integration_step):
2054 """collect the log files and put them in a single, html-friendly file 2055 inside the Events/run_.../ directory""" 2056 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2057 'alllogs_%d.html' % integration_step) 2058 outfile = open(log_file, 'w') 2059 2060 content = '' 2061 content += '<HTML><BODY>\n<font face="courier" size=2>' 2062 for job in jobs: 2063 # put an anchor 2064 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2065 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2066 pjoin(self.me_dir,'SubProcesses'),'')) 2067 # and put some nice header 2068 content += '<font color="red">\n' 2069 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2070 (os.path.dirname(log).replace(pjoin(self.me_dir, 2071 'SubProcesses'), ''), 2072 integration_step) 2073 content += '</font>\n' 2074 #then just flush the content of the small log inside the big log 2075 #the PRE tag prints everything verbatim 2076 content += '<PRE>\n' + open(log).read() + '\n</PRE>' 2077 content +='<br>\n' 2078 outfile.write(content) 2079 content='' 2080 2081 outfile.write('</font>\n</BODY></HTML>\n') 2082 outfile.close()
2083 2084
2085 - def finalise_run_FO(self,folder_name,jobs):
2086 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2087 # Copy the res_*.txt files to the Events/run* folder 2088 res_files=glob.glob(pjoin(self.me_dir, 'SubProcesses', 'res_*.txt')) 2089 for res_file in res_files: 2090 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2091 # Collect the plots and put them in the Events/run* folder 2092 self.combine_plots_FO(folder_name,jobs) 2093 # If doing the applgrid-stuff, also combine those grids 2094 # and put those in the Events/run* folder 2095 if self.run_card['iappl'] != 0: 2096 cross=self.cross_sect_dict['xsect'] 2097 error=self.cross_sect_dict['errt'] 2098 self.applgrid_combine(cross,error,jobs)
2099 2100
2101 - def setup_cluster_or_multicore(self):
2102 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2103 if self.cluster_mode == 1: 2104 cluster_name = self.options['cluster_type'] 2105 self.cluster = cluster.from_name[cluster_name](**self.options) 2106 if self.cluster_mode == 2: 2107 try: 2108 import multiprocessing 2109 if not self.nb_core: 2110 try: 2111 self.nb_core = int(self.options['nb_core']) 2112 except TypeError: 2113 self.nb_core = multiprocessing.cpu_count() 2114 logger.info('Using %d cores' % self.nb_core) 2115 except ImportError: 2116 self.nb_core = 1 2117 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2118 'Use set nb_core X in order to set this number and be able to'+ 2119 'run in multicore.') 2120 2121 self.cluster = cluster.MultiCore(**self.options)
2122 2123
2124 - def clean_previous_results(self,options,p_dirs,folder_name):
2125 """Clean previous results. 2126 o. If doing only the reweighting step, do not delete anything and return directlty. 2127 o. Always remove all the G*_* files (from split event generation). 2128 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2129 if options['reweightonly']: 2130 return 2131 if not options['only_generation']: 2132 self.update_status('Cleaning previous results', level=None) 2133 for dir in p_dirs: 2134 #find old folders to be removed 2135 for obj in folder_name: 2136 # list all the G* (or all_G* or born_G*) directories 2137 to_rm = [file for file in \ 2138 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2139 if file.startswith(obj[:-1]) and \ 2140 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2141 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2142 # list all the G*_* directories (from split event generation) 2143 to_always_rm = [file for file in \ 2144 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2145 if file.startswith(obj[:-1]) and 2146 '_' in file and not '_G' in file and \ 2147 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2148 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2149 2150 if not options['only_generation']: 2151 to_always_rm.extend(to_rm) 2152 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2153 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2154 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2155 return
2156 2157
2158 - def print_summary(self, options, step, mode, scale_pdf_info={}, done=True):
2159 """print a summary of the results contained in self.cross_sect_dict. 2160 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2161 some additional infos are printed""" 2162 # find process name 2163 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 2164 process = '' 2165 for line in proc_card_lines: 2166 if line.startswith('generate') or line.startswith('add process'): 2167 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 2168 lpp = {0:'l', 1:'p', -1:'pbar'} 2169 if self.ninitial == 1: 2170 proc_info = '\n Process %s' % process[:-3] 2171 else: 2172 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 2173 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 2174 self.run_card['ebeam1'], self.run_card['ebeam2']) 2175 2176 if self.ninitial == 1: 2177 self.cross_sect_dict['unit']='GeV' 2178 self.cross_sect_dict['xsec_string']='(Partial) decay width' 2179 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 2180 else: 2181 self.cross_sect_dict['unit']='pb' 2182 self.cross_sect_dict['xsec_string']='Total cross-section' 2183 self.cross_sect_dict['axsec_string']='Total abs(cross-section)' 2184 # Gather some basic statistics for the run and extracted from the log files. 2185 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2186 log_GV_files = glob.glob(pjoin(self.me_dir, \ 2187 'SubProcesses', 'P*','G*','log_MINT*.txt')) 2188 all_log_files = log_GV_files 2189 elif mode == 'NLO': 2190 log_GV_files = glob.glob(pjoin(self.me_dir, \ 2191 'SubProcesses', 'P*','all_G*','log_MINT*.txt')) 2192 all_log_files = log_GV_files 2193 2194 elif mode == 'LO': 2195 log_GV_files = '' 2196 all_log_files = glob.glob(pjoin(self.me_dir, \ 2197 'SubProcesses', 'P*','born_G*','log_MINT*.txt')) 2198 else: 2199 raise aMCatNLOError, 'Running mode %s not supported.'%mode 2200 2201 2202 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2203 status = ['Determining the number of unweighted events per channel', 2204 'Updating the number of unweighted events per channel', 2205 'Summary:'] 2206 if step != 2: 2207 message = status[step] + '\n\n Intermediate results:' + \ 2208 ('\n Random seed: %(randinit)d' + \ 2209 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 2210 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 2211 % self.cross_sect_dict 2212 else: 2213 2214 message = '\n ' + status[step] + proc_info + \ 2215 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2216 self.cross_sect_dict 2217 2218 if self.run_card['nevents']>=10000 and self.run_card['reweight_scale']: 2219 message = message + \ 2220 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \ 2221 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low']) 2222 if self.run_card['nevents']>=10000 and self.run_card['reweight_PDF']: 2223 message = message + \ 2224 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \ 2225 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low']) 2226 2227 neg_frac = (self.cross_sect_dict['xseca'] - self.cross_sect_dict['xsect'])/\ 2228 (2. * self.cross_sect_dict['xseca']) 2229 message = message + \ 2230 ('\n Number of events generated: %s' + \ 2231 '\n Parton shower to be used: %s' + \ 2232 '\n Fraction of negative weights: %4.2f' + \ 2233 '\n Total running time : %s') % \ 2234 (self.run_card['nevents'], 2235 self.run_card['parton_shower'].upper(), 2236 neg_frac, 2237 misc.format_timer(time.time()-self.start_time)) 2238 2239 elif mode in ['NLO', 'LO']: 2240 status = ['Results after grid setup:','Current results:', 2241 'Final results and run summary:'] 2242 if (not done) and (step == 0): 2243 message = '\n ' + status[0] + \ 2244 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2245 self.cross_sect_dict 2246 elif not done: 2247 message = '\n ' + status[1] + \ 2248 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2249 self.cross_sect_dict 2250 elif done: 2251 message = '\n ' + status[2] + proc_info + \ 2252 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2253 self.cross_sect_dict 2254 if self.run_card['reweight_scale']: 2255 if self.run_card['ickkw'] != -1: 2256 message = message + \ 2257 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \ 2258 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low']) 2259 else: 2260 message = message + \ 2261 ('\n Soft and hard scale dependence (added in quadrature): +%0.1f%% -%0.1f%%') % \ 2262 (scale_pdf_info['scale_upp_quad'], scale_pdf_info['scale_low_quad']) 2263 if self.run_card['reweight_PDF']: 2264 message = message + \ 2265 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \ 2266 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low']) 2267 2268 if (mode in ['NLO', 'LO'] and not done) or \ 2269 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 2270 logger.info(message+'\n') 2271 return 2272 2273 # Some advanced general statistics are shown in the debug message at the 2274 # end of the run 2275 # Make sure it never stops a run 2276 try: 2277 message, debug_msg = \ 2278 self.compile_advanced_stats(log_GV_files, all_log_files, message) 2279 except Exception as e: 2280 debug_msg = 'Advanced statistics collection failed with error "%s"'%str(e) 2281 2282 logger.debug(debug_msg+'\n') 2283 logger.info(message+'\n') 2284 2285 # Now copy relevant information in the Events/Run_<xxx> directory 2286 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 2287 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 2288 open(pjoin(evt_path, '.full_summary.txt'), 2289 'w').write(message+'\n\n'+debug_msg+'\n') 2290 2291 self.archive_files(evt_path,mode)
2292
2293 - def archive_files(self, evt_path, mode):
2294 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 2295 the run.""" 2296 2297 files_to_arxiv = [pjoin('Cards','param_card.dat'), 2298 pjoin('Cards','MadLoopParams.dat'), 2299 pjoin('Cards','FKS_params.dat'), 2300 pjoin('Cards','run_card.dat'), 2301 pjoin('Subprocesses','setscales.f'), 2302 pjoin('Subprocesses','cuts.f')] 2303 2304 if mode in ['NLO', 'LO']: 2305 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 2306 2307 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 2308 os.mkdir(pjoin(evt_path,'RunMaterial')) 2309 2310 for path in files_to_arxiv: 2311 if os.path.isfile(pjoin(self.me_dir,path)): 2312 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 2313 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 2314 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2315
2316 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
2317 """ This functions goes through the log files given in arguments and 2318 compiles statistics about MadLoop stability, virtual integration 2319 optimization and detection of potential error messages into a nice 2320 debug message to printed at the end of the run """ 2321 2322 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 2323 # > Errors is a list of tuples with this format (log_file,nErrors) 2324 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 2325 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 2326 2327 # ================================== 2328 # == MadLoop stability statistics == 2329 # ================================== 2330 2331 # Recuperate the fraction of unstable PS points found in the runs for 2332 # the virtuals 2333 UPS_stat_finder = re.compile( 2334 r"Satistics from MadLoop:.*"+\ 2335 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 2336 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 2337 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 2338 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 2339 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 2340 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 2341 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 2342 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 2343 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 2344 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 2345 2346 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 2347 1 : 'CutTools (double precision)', 2348 2 : 'PJFry++', 2349 3 : 'IREGI', 2350 4 : 'Golem95', 2351 9 : 'CutTools (quadruple precision)'} 2352 RetUnit_finder =re.compile( 2353 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 2354 #Unit 2355 2356 for gv_log in log_GV_files: 2357 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 2358 log=open(gv_log,'r').read() 2359 UPS_stats = re.search(UPS_stat_finder,log) 2360 for retunit_stats in re.finditer(RetUnit_finder, log): 2361 if channel_name not in stats['UPS'].keys(): 2362 stats['UPS'][channel_name] = [0]*10+[[0]*10] 2363 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 2364 += int(retunit_stats.group('n_occurences')) 2365 if not UPS_stats is None: 2366 try: 2367 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 2368 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 2369 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 2370 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 2371 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 2372 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 2373 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 2374 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 2375 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 2376 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 2377 except KeyError: 2378 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 2379 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 2380 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 2381 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 2382 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 2383 int(UPS_stats.group('n10')),[0]*10] 2384 debug_msg = "" 2385 if len(stats['UPS'].keys())>0: 2386 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 2387 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 2388 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 2389 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 2390 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 2391 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 2392 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 2393 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 2394 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 2395 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 2396 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 2397 for i in range(10)] 2398 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 2399 float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 2400 maxUPS = max(UPSfracs, key = lambda w: w[1]) 2401 2402 tmpStr = "" 2403 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 2404 tmpStr += '\n Stability unknown: %d'%nTotsun 2405 tmpStr += '\n Stable PS point: %d'%nTotsps 2406 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 2407 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 2408 tmpStr += '\n Only double precision used: %d'%nTotddp 2409 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 2410 tmpStr += '\n Initialization phase-space points: %d'%nTotini 2411 tmpStr += '\n Reduction methods used:' 2412 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 2413 unit_code_meaning.keys() if nTot1[i]>0] 2414 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 2415 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 2416 if nTot100 != 0: 2417 debug_msg += '\n Unknown return code (100): %d'%nTot100 2418 if nTot10 != 0: 2419 debug_msg += '\n Unknown return code (10): %d'%nTot10 2420 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 2421 not in unit_code_meaning.keys()) 2422 if nUnknownUnit != 0: 2423 debug_msg += '\n Unknown return code (1): %d'\ 2424 %nUnknownUnit 2425 2426 if maxUPS[1]>0.001: 2427 message += tmpStr 2428 message += '\n Total number of unstable PS point detected:'+\ 2429 ' %d (%4.2f%%)'%(nToteps,float(100*nToteps)/nTotPS) 2430 message += '\n Maximum fraction of UPS points in '+\ 2431 'channel %s (%4.2f%%)'%maxUPS 2432 message += '\n Please report this to the authors while '+\ 2433 'providing the file' 2434 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 2435 maxUPS[0],'UPS.log')) 2436 else: 2437 debug_msg += tmpStr 2438 2439 2440 # ==================================================== 2441 # == aMC@NLO virtual integration optimization stats == 2442 # ==================================================== 2443 2444 virt_tricks_finder = re.compile( 2445 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 2446 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 2447 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 2448 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 2449 2450 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 2451 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 2452 2453 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 2454 2455 channel_contr_list = {} 2456 for gv_log in log_GV_files: 2457 logfile=open(gv_log,'r') 2458 log = logfile.read() 2459 logfile.close() 2460 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2461 vf_stats = None 2462 for vf_stats in re.finditer(virt_frac_finder, log): 2463 pass 2464 if not vf_stats is None: 2465 v_frac = float(vf_stats.group('v_frac')) 2466 v_average = float(vf_stats.group('v_average')) 2467 try: 2468 if v_frac < stats['virt_stats']['v_frac_min'][0]: 2469 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 2470 if v_frac > stats['virt_stats']['v_frac_max'][0]: 2471 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 2472 stats['virt_stats']['v_frac_avg'][0] += v_frac 2473 stats['virt_stats']['v_frac_avg'][1] += 1 2474 except KeyError: 2475 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 2476 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 2477 stats['virt_stats']['v_frac_avg']=[v_frac,1] 2478 2479 2480 ccontr_stats = None 2481 for ccontr_stats in re.finditer(channel_contr_finder, log): 2482 pass 2483 if not ccontr_stats is None: 2484 contrib = float(ccontr_stats.group('v_contr')) 2485 try: 2486 if contrib>channel_contr_list[channel_name]: 2487 channel_contr_list[channel_name]=contrib 2488 except KeyError: 2489 channel_contr_list[channel_name]=contrib 2490 2491 2492 # Now build the list of relevant virt log files to look for the maxima 2493 # of virt fractions and such. 2494 average_contrib = 0.0 2495 for value in channel_contr_list.values(): 2496 average_contrib += value 2497 if len(channel_contr_list.values()) !=0: 2498 average_contrib = average_contrib / len(channel_contr_list.values()) 2499 2500 relevant_log_GV_files = [] 2501 excluded_channels = set([]) 2502 all_channels = set([]) 2503 for log_file in log_GV_files: 2504 channel_name = '/'.join(log_file.split('/')[-3:-1]) 2505 all_channels.add(channel_name) 2506 try: 2507 if channel_contr_list[channel_name] > (0.1*average_contrib): 2508 relevant_log_GV_files.append(log_file) 2509 else: 2510 excluded_channels.add(channel_name) 2511 except KeyError: 2512 relevant_log_GV_files.append(log_file) 2513 2514 # Now we want to use the latest occurence of accumulated result in the log file 2515 for gv_log in relevant_log_GV_files: 2516 logfile=open(gv_log,'r') 2517 log = logfile.read() 2518 logfile.close() 2519 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2520 2521 vt_stats = None 2522 for vt_stats in re.finditer(virt_tricks_finder, log): 2523 pass 2524 if not vt_stats is None: 2525 vt_stats_group = vt_stats.groupdict() 2526 v_ratio = float(vt_stats.group('v_ratio')) 2527 v_ratio_err = float(vt_stats.group('v_ratio_err')) 2528 v_contr = float(vt_stats.group('v_abs_contr')) 2529 v_contr_err = float(vt_stats.group('v_abs_contr_err')) 2530 try: 2531 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 2532 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 2533 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 2534 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 2535 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 2536 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 2537 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 2538 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 2539 if v_contr < stats['virt_stats']['v_contr_min'][0]: 2540 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 2541 if v_contr > stats['virt_stats']['v_contr_max'][0]: 2542 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 2543 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 2544 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 2545 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 2546 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 2547 except KeyError: 2548 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 2549 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 2550 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 2551 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 2552 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 2553 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 2554 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 2555 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 2556 2557 vf_stats = None 2558 for vf_stats in re.finditer(virt_frac_finder, log): 2559 pass 2560 if not vf_stats is None: 2561 v_frac = float(vf_stats.group('v_frac')) 2562 v_average = float(vf_stats.group('v_average')) 2563 try: 2564 if v_average < stats['virt_stats']['v_average_min'][0]: 2565 stats['virt_stats']['v_average_min']=(v_average,channel_name) 2566 if v_average > stats['virt_stats']['v_average_max'][0]: 2567 stats['virt_stats']['v_average_max']=(v_average,channel_name) 2568 stats['virt_stats']['v_average_avg'][0] += v_average 2569 stats['virt_stats']['v_average_avg'][1] += 1 2570 except KeyError: 2571 stats['virt_stats']['v_average_min']=[v_average,channel_name] 2572 stats['virt_stats']['v_average_max']=[v_average,channel_name] 2573 stats['virt_stats']['v_average_avg']=[v_average,1] 2574 2575 try: 2576 debug_msg += '\n\n Statistics on virtual integration optimization : ' 2577 2578 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 2579 %tuple(stats['virt_stats']['v_frac_max']) 2580 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 2581 %tuple(stats['virt_stats']['v_frac_min']) 2582 debug_msg += '\n Average virt fraction computed %.3f'\ 2583 %float(stats['virt_stats']['v_frac_avg'][0]/float(stats['virt_stats']['v_frac_avg'][1])) 2584 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 2585 (len(excluded_channels),len(all_channels)) 2586 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 2587 %tuple(stats['virt_stats']['v_average_max']) 2588 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 2589 %tuple(stats['virt_stats']['v_ratio_max']) 2590 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 2591 %tuple(stats['virt_stats']['v_ratio_err_max']) 2592 debug_msg += tmpStr 2593 # After all it was decided that it is better not to alarm the user unecessarily 2594 # with such printout of the statistics. 2595 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 2596 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2597 # message += "\n Suspiciously large MC error in :" 2598 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2599 # message += tmpStr 2600 2601 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 2602 %tuple(stats['virt_stats']['v_contr_err_max']) 2603 debug_msg += tmpStr 2604 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 2605 # message += tmpStr 2606 2607 2608 except KeyError: 2609 debug_msg += '\n Could not find statistics on the integration optimization. ' 2610 2611 # ======================================= 2612 # == aMC@NLO timing profile statistics == 2613 # ======================================= 2614 2615 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 2616 "(?P<time>[\d\+-Eed\.]*)\s*") 2617 2618 for logf in log_GV_files: 2619 logfile=open(logf,'r') 2620 log = logfile.read() 2621 logfile.close() 2622 channel_name = '/'.join(logf.split('/')[-3:-1]) 2623 mint = re.search(mint_search,logf) 2624 if not mint is None: 2625 channel_name = channel_name+' [step %s]'%mint.group('ID') 2626 2627 for time_stats in re.finditer(timing_stat_finder, log): 2628 try: 2629 stats['timings'][time_stats.group('name')][channel_name]+=\ 2630 float(time_stats.group('time')) 2631 except KeyError: 2632 if time_stats.group('name') not in stats['timings'].keys(): 2633 stats['timings'][time_stats.group('name')] = {} 2634 stats['timings'][time_stats.group('name')][channel_name]=\ 2635 float(time_stats.group('time')) 2636 2637 # useful inline function 2638 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 2639 try: 2640 totTimeList = [(time, chan) for chan, time in \ 2641 stats['timings']['Total'].items()] 2642 except KeyError: 2643 totTimeList = [] 2644 2645 totTimeList.sort() 2646 if len(totTimeList)>0: 2647 debug_msg += '\n\n Inclusive timing profile :' 2648 debug_msg += '\n Overall slowest channel %s (%s)'%\ 2649 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 2650 debug_msg += '\n Average channel running time %s'%\ 2651 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 2652 debug_msg += '\n Aggregated total running time %s'%\ 2653 Tstr(sum([el[0] for el in totTimeList])) 2654 else: 2655 debug_msg += '\n\n Inclusive timing profile non available.' 2656 2657 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 2658 sum(stats['timings'][stat].values()), reverse=True) 2659 for name in sorted_keys: 2660 if name=='Total': 2661 continue 2662 if sum(stats['timings'][name].values())<=0.0: 2663 debug_msg += '\n Zero time record for %s.'%name 2664 continue 2665 try: 2666 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 2667 chan) for chan, time in stats['timings'][name].items()] 2668 except KeyError, ZeroDivisionError: 2669 debug_msg += '\n\n Timing profile for %s unavailable.'%name 2670 continue 2671 TimeList.sort() 2672 debug_msg += '\n Timing profile for <%s> :'%name 2673 try: 2674 debug_msg += '\n Overall fraction of time %.3f %%'%\ 2675 float((100.0*(sum(stats['timings'][name].values())/ 2676 sum(stats['timings']['Total'].values())))) 2677 except KeyError, ZeroDivisionError: 2678 debug_msg += '\n Overall fraction of time unavailable.' 2679 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 2680 (TimeList[-1][0],TimeList[-1][1]) 2681 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 2682 (TimeList[0][0],TimeList[0][1]) 2683 2684 # ============================= 2685 # == log file eror detection == 2686 # ============================= 2687 2688 # Find the number of potential errors found in all log files 2689 # This re is a simple match on a case-insensitve 'error' but there is 2690 # also some veto added for excluding the sentence 2691 # "See Section 6 of paper for error calculation." 2692 # which appear in the header of lhapdf in the logs. 2693 err_finder = re.compile(\ 2694 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 2695 for log in all_log_files: 2696 logfile=open(log,'r') 2697 nErrors = len(re.findall(err_finder, logfile.read())) 2698 logfile.close() 2699 if nErrors != 0: 2700 stats['Errors'].append((str(log),nErrors)) 2701 2702 nErrors = sum([err[1] for err in stats['Errors']],0) 2703 if nErrors != 0: 2704 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 2705 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 2706 'found in the following log file%s:'%('s' if \ 2707 len(stats['Errors'])>1 else '') 2708 for error in stats['Errors'][:3]: 2709 log_name = '/'.join(error[0].split('/')[-5:]) 2710 debug_msg += '\n > %d error%s in %s'%\ 2711 (error[1],'s' if error[1]>1 else '',log_name) 2712 if len(stats['Errors'])>3: 2713 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 2714 nRemainingLogs = len(stats['Errors'])-3 2715 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 2716 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 2717 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 2718 2719 return message, debug_msg
2720 2721
2722 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
2723 """this function calls the reweighting routines and creates the event file in the 2724 Event dir. Return the name of the event file created 2725 """ 2726 scale_pdf_info={} 2727 if self.run_card['reweight_scale'] or self.run_card['reweight_PDF'] : 2728 scale_pdf_info = self.run_reweight(options['reweightonly']) 2729 self.update_status('Collecting events', level='parton', update_results=True) 2730 misc.compile(['collect_events'], 2731 cwd=pjoin(self.me_dir, 'SubProcesses')) 2732 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 2733 stdin=subprocess.PIPE, 2734 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 2735 if event_norm.lower() == 'sum': 2736 p.communicate(input = '1\n') 2737 elif event_norm.lower() == 'unity': 2738 p.communicate(input = '3\n') 2739 else: 2740 p.communicate(input = '2\n') 2741 2742 #get filename from collect events 2743 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 2744 2745 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 2746 raise aMCatNLOError('An error occurred during event generation. ' + \ 2747 'The event file has not been created. Check collect_events.log') 2748 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2749 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 2750 if not options['reweightonly']: 2751 self.print_summary(options, 2, mode, scale_pdf_info) 2752 res_files=glob.glob(pjoin(self.me_dir, 'SubProcesses', 'res*.txt')) 2753 for res_file in res_files: 2754 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2755 2756 logger.info('The %s file has been generated.\n' % (evt_file)) 2757 self.results.add_detail('nb_event', nevents) 2758 self.update_status('Events generated', level='parton', update_results=True) 2759 return evt_file[:-3]
2760 2761
2762 - def run_mcatnlo(self, evt_file):
2763 """runs mcatnlo on the generated event file, to produce showered-events 2764 """ 2765 logger.info('Preparing MCatNLO run') 2766 try: 2767 misc.gunzip(evt_file) 2768 except Exception: 2769 pass 2770 2771 self.banner = banner_mod.Banner(evt_file) 2772 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 2773 2774 #check that the number of split event files divides the number of 2775 # events, otherwise set it to 1 2776 if int(self.banner.get_detail('run_card', 'nevents') / \ 2777 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 2778 != self.banner.get_detail('run_card', 'nevents'): 2779 logger.warning(\ 2780 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 2781 'Setting it to 1.') 2782 self.shower_card['nsplit_jobs'] = 1 2783 2784 # don't split jobs if the user asks to shower only a part of the events 2785 if self.shower_card['nevents'] > 0 and \ 2786 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 2787 self.shower_card['nsplit_jobs'] != 1: 2788 logger.warning(\ 2789 'Only a part of the events will be showered.\n' + \ 2790 'Setting nsplit_jobs in the shower_card to 1.') 2791 self.shower_card['nsplit_jobs'] = 1 2792 2793 self.banner_to_mcatnlo(evt_file) 2794 2795 # if fastjet has to be linked (in extralibs) then 2796 # add lib /include dirs for fastjet if fastjet-config is present on the 2797 # system, otherwise add fjcore to the files to combine 2798 if 'fastjet' in self.shower_card['extralibs']: 2799 #first, check that stdc++ is also linked 2800 if not 'stdc++' in self.shower_card['extralibs']: 2801 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 2802 self.shower_card['extralibs'] += ' stdc++' 2803 # then check if options[fastjet] corresponds to a valid fj installation 2804 try: 2805 #this is for a complete fj installation 2806 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 2807 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2808 output, error = p.communicate() 2809 #remove the line break from output (last character) 2810 output = output[:-1] 2811 # add lib/include paths 2812 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 2813 logger.warning('Linking FastJet: updating EXTRAPATHS') 2814 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 2815 if not pjoin(output, 'include') in self.shower_card['includepaths']: 2816 logger.warning('Linking FastJet: updating INCLUDEPATHS') 2817 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 2818 # to be changed in the fortran wrapper 2819 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 2820 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 2821 except Exception: 2822 logger.warning('Linking FastJet: using fjcore') 2823 # this is for FJcore, so no FJ library has to be linked 2824 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 2825 if not 'fjcore.o' in self.shower_card['analyse']: 2826 self.shower_card['analyse'] += ' fjcore.o' 2827 # to be changed in the fortran wrapper 2828 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 2829 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 2830 # change the fortran wrapper with the correct namespaces/include 2831 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 2832 for line in fjwrapper_lines: 2833 if '//INCLUDE_FJ' in line: 2834 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 2835 if '//NAMESPACE_FJ' in line: 2836 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 2837 open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w').write(\ 2838 '\n'.join(fjwrapper_lines) + '\n') 2839 2840 extrapaths = self.shower_card['extrapaths'].split() 2841 2842 # check that the path needed by HW++ and PY8 are set if one uses these shower 2843 if shower in ['HERWIGPP', 'PYTHIA8']: 2844 path_dict = {'HERWIGPP': ['hepmc_path', 2845 'thepeg_path', 2846 'hwpp_path'], 2847 'PYTHIA8': ['pythia8_path']} 2848 2849 if not all([self.options[ppath] for ppath in path_dict[shower]]): 2850 raise aMCatNLOError('Some paths are missing in the configuration file.\n' + \ 2851 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 2852 2853 if shower == 'HERWIGPP': 2854 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 2855 2856 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 2857 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 2858 2859 if 'LD_LIBRARY_PATH' in os.environ.keys(): 2860 ldlibrarypath = os.environ['LD_LIBRARY_PATH'] 2861 else: 2862 ldlibrarypath = '' 2863 ldlibrarypath += ':' + ':'.join(extrapaths) 2864 os.putenv('LD_LIBRARY_PATH', ldlibrarypath) 2865 2866 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 2867 self.shower_card.write_card(shower, shower_card_path) 2868 2869 # overwrite if shower_card_set.dat exists in MCatNLO 2870 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 2871 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 2872 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 2873 2874 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 2875 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 2876 2877 2878 # libdl may be needded for pythia 82xx 2879 if shower == 'PYTHIA8' and not \ 2880 os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 2881 'dl' not in self.shower_card['extralibs'].split(): 2882 # 'dl' has to be linked with the extralibs 2883 self.shower_card['extralibs'] += ' dl' 2884 logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 2885 "It is needed for the correct running of PY8.2xx.\n" + \ 2886 "If this library cannot be found on your system, a crash will occur.") 2887 2888 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 2889 stderr=open(mcatnlo_log, 'w'), 2890 cwd=pjoin(self.me_dir, 'MCatNLO')) 2891 2892 exe = 'MCATNLO_%s_EXE' % shower 2893 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 2894 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 2895 print open(mcatnlo_log).read() 2896 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 2897 logger.info(' ... done') 2898 2899 # create an empty dir where to run 2900 count = 1 2901 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 2902 (shower, count))): 2903 count += 1 2904 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 2905 (shower, count)) 2906 os.mkdir(rundir) 2907 files.cp(shower_card_path, rundir) 2908 2909 #look for the event files (don't resplit if one asks for the 2910 # same number of event files as in the previous run) 2911 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 2912 'events_*.lhe')) 2913 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 2914 logger.info('Cleaning old files and splitting the event file...') 2915 #clean the old files 2916 files.rm([f for f in event_files if 'events.lhe' not in f]) 2917 if self.shower_card['nsplit_jobs'] > 1: 2918 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities')) 2919 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 2920 stdin=subprocess.PIPE, 2921 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 2922 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2923 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 2924 logger.info('Splitting done.') 2925 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name, 2926 'events_*.lhe')) 2927 2928 event_files.sort() 2929 2930 self.update_status('Showering events...', level='shower') 2931 logger.info('(Running in %s)' % rundir) 2932 if shower != 'PYTHIA8': 2933 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 2934 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 2935 else: 2936 # special treatment for pythia8 2937 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 2938 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 2939 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 2940 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 2941 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 2942 else: # this is PY8.2xxx 2943 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 2944 #link the hwpp exe in the rundir 2945 if shower == 'HERWIGPP': 2946 try: 2947 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 2948 except Exception: 2949 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 2950 2951 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 2952 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 2953 2954 files.ln(evt_file, rundir, 'events.lhe') 2955 for i, f in enumerate(event_files): 2956 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 2957 2958 if not self.shower_card['analyse']: 2959 # an hep/hepmc file as output 2960 out_id = 'HEP' 2961 else: 2962 # one or more .top file(s) as output 2963 if "HwU" in self.shower_card['analyse']: 2964 out_id = 'HWU' 2965 else: 2966 out_id = 'TOP' 2967 2968 # write the executable 2969 open(pjoin(rundir, 'shower.sh'), 'w').write(\ 2970 open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 2971 % {'extralibs': ':'.join(extrapaths)}) 2972 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 2973 2974 if event_files: 2975 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 2976 for i in range(len(event_files))] 2977 else: 2978 arg_list = [[shower, out_id, self.run_name]] 2979 2980 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 2981 self.njobs = 1 2982 self.wait_for_complete('shower') 2983 2984 # now collect the results 2985 message = '' 2986 warning = '' 2987 to_gzip = [evt_file] 2988 if out_id == 'HEP': 2989 #copy the showered stdhep/hepmc file back in events 2990 if shower in ['PYTHIA8', 'HERWIGPP']: 2991 hep_format = 'HEPMC' 2992 ext = 'hepmc' 2993 else: 2994 hep_format = 'StdHEP' 2995 ext = 'hep' 2996 2997 hep_file = '%s_%s_0.%s.gz' % \ 2998 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 2999 count = 0 3000 3001 # find the first available name for the output: 3002 # check existing results with or without event splitting 3003 while os.path.exists(hep_file) or \ 3004 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3005 count +=1 3006 hep_file = '%s_%s_%d.%s.gz' % \ 3007 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3008 3009 try: 3010 if self.shower_card['nsplit_jobs'] == 1: 3011 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3012 message = ('The file %s has been generated. \nIt contains showered' + \ 3013 ' and hadronized events in the %s format obtained' + \ 3014 ' showering the parton-level event file %s.gz with %s') % \ 3015 (hep_file, hep_format, evt_file, shower) 3016 else: 3017 hep_list = [] 3018 for i in range(self.shower_card['nsplit_jobs']): 3019 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3020 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3021 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3022 ' and hadronized events in the %s format obtained' + \ 3023 ' showering the (split) parton-level event file %s.gz with %s') % \ 3024 ('\n '.join(hep_list), hep_format, evt_file, shower) 3025 3026 except OSError, IOError: 3027 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3028 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3029 3030 # run the plot creation in a secure way 3031 if hep_format == 'StdHEP': 3032 try: 3033 self.do_plot('%s -f' % self.run_name) 3034 except Exception, error: 3035 logger.info("Fail to make the plot. Continue...") 3036 pass 3037 3038 elif out_id == 'TOP' or out_id == 'HWU': 3039 #copy the topdrawer or HwU file(s) back in events 3040 if out_id=='TOP': 3041 ext='top' 3042 elif out_id=='HWU': 3043 ext='HwU' 3044 topfiles = [] 3045 top_tars = [tarfile.TarFile(f) for f in glob.glob(pjoin(rundir, 'histfile*.tar'))] 3046 for top_tar in top_tars: 3047 topfiles.extend(top_tar.getnames()) 3048 3049 # safety check 3050 if len(top_tars) != self.shower_card['nsplit_jobs']: 3051 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3052 (self.shower_card['nsplit_jobs'], len(top_tars))) 3053 3054 # find the first available name for the output: 3055 # check existing results with or without event splitting 3056 filename = 'plot_%s_%d_' % (shower, 1) 3057 count = 1 3058 while os.path.exists(pjoin(self.me_dir, 'Events', 3059 self.run_name, '%s0.%s' % (filename,ext))) or \ 3060 os.path.exists(pjoin(self.me_dir, 'Events', 3061 self.run_name, '%s0__1.%s' % (filename,ext))): 3062 count += 1 3063 filename = 'plot_%s_%d_' % (shower, count) 3064 3065 if out_id=='TOP': 3066 hist_format='TopDrawer format' 3067 elif out_id=='HWU': 3068 hist_format='HwU and GnuPlot formats' 3069 3070 if not topfiles: 3071 # if no topfiles are found just warn the user 3072 warning = 'No .top file has been generated. For the results of your ' +\ 3073 'run, please check inside %s' % rundir 3074 elif self.shower_card['nsplit_jobs'] == 1: 3075 # only one job for the shower 3076 top_tars[0].extractall(path = rundir) 3077 plotfiles = [] 3078 for i, file in enumerate(topfiles): 3079 if out_id=='TOP': 3080 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3081 '%s%d.top' % (filename, i)) 3082 files.mv(pjoin(rundir, file), plotfile) 3083 elif out_id=='HWU': 3084 histogram_list=histograms.HwUList(pjoin(rundir,file)) 3085 histogram_list.output(pjoin(self.me_dir,'Events',self.run_name, 3086 '%s%d'% (filename,i)),format = 'gnuplot') 3087 try: 3088 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3089 stdout=os.open(os.devnull, os.O_RDWR),\ 3090 stderr=os.open(os.devnull, os.O_RDWR),\ 3091 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3092 except Exception: 3093 pass 3094 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3095 '%s%d.HwU'% (filename,i)) 3096 plotfiles.append(plotfile) 3097 3098 ffiles = 'files' 3099 have = 'have' 3100 if len(plotfiles) == 1: 3101 ffiles = 'file' 3102 have = 'has' 3103 3104 message = ('The %s %s %s been generated, with histograms in the' + \ 3105 ' %s, obtained by showering the parton-level' + \ 3106 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 3107 hist_format, evt_file, shower) 3108 else: 3109 # many jobs for the shower have been run 3110 topfiles_set = set(topfiles) 3111 plotfiles = [] 3112 for j, top_tar in enumerate(top_tars): 3113 top_tar.extractall(path = rundir) 3114 for i, file in enumerate(topfiles_set): 3115 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3116 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 3117 files.mv(pjoin(rundir, file), plotfile) 3118 plotfiles.append(plotfile) 3119 3120 # check if the user asked to combine the .top into a single file 3121 if self.shower_card['combine_td']: 3122 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 3123 3124 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 3125 norm = 1. 3126 elif self.banner.get('run_card', 'event_norm').lower() == 'average': 3127 norm = 1./float(self.shower_card['nsplit_jobs']) 3128 3129 plotfiles2 = [] 3130 for i, file in enumerate(topfiles_set): 3131 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 3132 for j in range(self.shower_card['nsplit_jobs'])] 3133 if out_id=='TOP': 3134 infile="%d\n%s\n%s\n" % \ 3135 (self.shower_card['nsplit_jobs'], 3136 '\n'.join(filelist), 3137 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 3138 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 3139 stdin=subprocess.PIPE, 3140 stdout=os.open(os.devnull, os.O_RDWR), 3141 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3142 p.communicate(input = infile) 3143 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 3144 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 3145 elif out_id=='HWU': 3146 histogram_list=histograms.HwUList(plotfiles[0]) 3147 for ii, histo in enumerate(histogram_list): 3148 histogram_list[ii] = histo*norm 3149 for histo_path in plotfiles[1:]: 3150 for ii, histo in enumerate(histograms.HwUList(histo_path)): 3151 # First make sure the plots have the same weight labels and such 3152 histo.test_plot_compability(histogram_list[ii]) 3153 # Now let the histogram module do the magic and add them. 3154 histogram_list[ii] += histo*norm 3155 # And now output the finalized list 3156 histogram_list.output(pjoin(self.me_dir,'Events',self.run_name,'%s%d'% (filename, i)), 3157 format = 'gnuplot') 3158 try: 3159 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 3160 stdout=os.open(os.devnull, os.O_RDWR),\ 3161 stderr=os.open(os.devnull, os.O_RDWR),\ 3162 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3163 except Exception: 3164 pass 3165 3166 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 3167 tar = tarfile.open( 3168 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 3169 for f in filelist: 3170 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 3171 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 3172 3173 tar.close() 3174 3175 ffiles = 'files' 3176 have = 'have' 3177 if len(plotfiles2) == 1: 3178 ffiles = 'file' 3179 have = 'has' 3180 3181 message = ('The %s %s %s been generated, with histograms in the' + \ 3182 ' %s, obtained by showering the parton-level' + \ 3183 ' file %s.gz with %s.\n' + \ 3184 'The files from the different shower ' + \ 3185 'jobs (before combining them) can be found inside %s.') % \ 3186 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 3187 evt_file, shower, 3188 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 3189 3190 else: 3191 message = ('The following files have been generated:\n %s\n' + \ 3192 'They contain histograms in the' + \ 3193 ' %s, obtained by showering the parton-level' + \ 3194 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 3195 hist_format, evt_file, shower) 3196 3197 # Now arxiv the shower card used if RunMaterial is present 3198 run_dir_path = pjoin(rundir, self.run_name) 3199 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 3200 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 3201 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 3202 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 3203 %(shower, count))) 3204 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 3205 cwd=run_dir_path) 3206 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 3207 # end of the run, gzip files and print out the message/warning 3208 for f in to_gzip: 3209 misc.gzip(f) 3210 if message: 3211 logger.info(message) 3212 if warning: 3213 logger.warning(warning) 3214 3215 self.update_status('Run complete', level='shower', update_results=True)
3216 3217 3218 ############################################################################
3219 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3220 """define the run name, the run_tag, the banner and the results.""" 3221 3222 # when are we force to change the tag new_run:previous run requiring changes 3223 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'], 3224 'pythia': ['pythia','pgs','delphes'], 3225 'shower': ['shower'], 3226 'pgs': ['pgs'], 3227 'delphes':['delphes'], 3228 'plot':[]} 3229 3230 3231 3232 if name == self.run_name: 3233 if reload_card: 3234 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3235 self.run_card = banner_mod.RunCardNLO(run_card) 3236 3237 #check if we need to change the tag 3238 if tag: 3239 self.run_card['run_tag'] = tag 3240 self.run_tag = tag 3241 self.results.add_run(self.run_name, self.run_card) 3242 else: 3243 for tag in upgrade_tag[level]: 3244 if getattr(self.results[self.run_name][-1], tag): 3245 tag = self.get_available_tag() 3246 self.run_card['run_tag'] = tag 3247 self.run_tag = tag 3248 self.results.add_run(self.run_name, self.run_card) 3249 break 3250 return # Nothing to do anymore 3251 3252 # save/clean previous run 3253 if self.run_name: 3254 self.store_result() 3255 # store new name 3256 self.run_name = name 3257 3258 # Read run_card 3259 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3260 self.run_card = banner_mod.RunCardNLO(run_card) 3261 3262 new_tag = False 3263 # First call for this run -> set the banner 3264 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 3265 if tag: 3266 self.run_card['run_tag'] = tag 3267 new_tag = True 3268 elif not self.run_name in self.results and level =='parton': 3269 pass # No results yet, so current tag is fine 3270 elif not self.run_name in self.results: 3271 #This is only for case when you want to trick the interface 3272 logger.warning('Trying to run data on unknown run.') 3273 self.results.add_run(name, self.run_card) 3274 self.results.update('add run %s' % name, 'all', makehtml=True) 3275 else: 3276 for tag in upgrade_tag[level]: 3277 3278 if getattr(self.results[self.run_name][-1], tag): 3279 # LEVEL is already define in the last tag -> need to switch tag 3280 tag = self.get_available_tag() 3281 self.run_card['run_tag'] = tag 3282 new_tag = True 3283 break 3284 if not new_tag: 3285 # We can add the results to the current run 3286 tag = self.results[self.run_name][-1]['tag'] 3287 self.run_card['run_tag'] = tag # ensure that run_tag is correct 3288 3289 3290 if name in self.results and not new_tag: 3291 self.results.def_current(self.run_name) 3292 else: 3293 self.results.add_run(self.run_name, self.run_card) 3294 3295 self.run_tag = self.run_card['run_tag'] 3296 3297 # Return the tag of the previous run having the required data for this 3298 # tag/run to working wel. 3299 if level == 'parton': 3300 return 3301 elif level == 'pythia': 3302 return self.results[self.run_name][0]['tag'] 3303 else: 3304 for i in range(-1,-len(self.results[self.run_name])-1,-1): 3305 tagRun = self.results[self.run_name][i] 3306 if tagRun.pythia: 3307 return tagRun['tag']
3308 3309
3310 - def store_result(self):
3311 """ tar the pythia results. This is done when we are quite sure that 3312 the pythia output will not be use anymore """ 3313 3314 if not self.run_name: 3315 return 3316 3317 self.results.save() 3318 3319 if not self.to_store: 3320 return 3321 3322 tag = self.run_card['run_tag'] 3323 3324 self.to_store = []
3325 3326
3327 - def get_init_dict(self, evt_file):
3328 """reads the info in the init block and returns them in a dictionary""" 3329 ev_file = open(evt_file) 3330 init = "" 3331 found = False 3332 while True: 3333 line = ev_file.readline() 3334 if "<init>" in line: 3335 found = True 3336 elif found and not line.startswith('#'): 3337 init += line 3338 if "</init>" in line or "<event>" in line: 3339 break 3340 ev_file.close() 3341 3342 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 3343 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 3344 # these are not included (so far) in the init_dict 3345 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 3346 3347 init_dict = {} 3348 init_dict['idbmup1'] = int(init.split()[0]) 3349 init_dict['idbmup2'] = int(init.split()[1]) 3350 init_dict['ebmup1'] = float(init.split()[2]) 3351 init_dict['ebmup2'] = float(init.split()[3]) 3352 init_dict['pdfgup1'] = int(init.split()[4]) 3353 init_dict['pdfgup2'] = int(init.split()[5]) 3354 init_dict['pdfsup1'] = int(init.split()[6]) 3355 init_dict['pdfsup2'] = int(init.split()[7]) 3356 init_dict['idwtup'] = int(init.split()[8]) 3357 init_dict['nprup'] = int(init.split()[9]) 3358 3359 return init_dict
3360 3361
3362 - def banner_to_mcatnlo(self, evt_file):
3363 """creates the mcatnlo input script using the values set in the header of the event_file. 3364 It also checks if the lhapdf library is used""" 3365 shower = self.banner.get('run_card', 'parton_shower').upper() 3366 pdlabel = self.banner.get('run_card', 'pdlabel') 3367 itry = 0 3368 nevents = self.shower_card['nevents'] 3369 init_dict = self.get_init_dict(evt_file) 3370 3371 if nevents < 0 or \ 3372 nevents > self.banner.get_detail('run_card', 'nevents'): 3373 nevents = self.banner.get_detail('run_card', 'nevents') 3374 3375 nevents = nevents / self.shower_card['nsplit_jobs'] 3376 3377 mcmass_dict = {} 3378 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 3379 pdg = int(line.split()[0]) 3380 mass = float(line.split()[1]) 3381 mcmass_dict[pdg] = mass 3382 3383 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 3384 content += 'NEVENTS=%d\n' % nevents 3385 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 3386 self.shower_card['nsplit_jobs']) 3387 content += 'MCMODE=%s\n' % shower 3388 content += 'PDLABEL=%s\n' % pdlabel 3389 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 3390 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 3391 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3392 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 3393 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 3394 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 3395 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 3396 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 3397 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 3398 try: 3399 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 3400 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 3401 except KeyError: 3402 content += 'HGGMASS=120.\n' 3403 content += 'HGGWIDTH=0.00575308848\n' 3404 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 3405 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 3406 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 3407 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 3408 content += 'DMASS=%s\n' % mcmass_dict[1] 3409 content += 'UMASS=%s\n' % mcmass_dict[2] 3410 content += 'SMASS=%s\n' % mcmass_dict[3] 3411 content += 'CMASS=%s\n' % mcmass_dict[4] 3412 content += 'BMASS=%s\n' % mcmass_dict[5] 3413 try: 3414 content += 'EMASS=%s\n' % mcmass_dict[11] 3415 content += 'MUMASS=%s\n' % mcmass_dict[13] 3416 content += 'TAUMASS=%s\n' % mcmass_dict[15] 3417 except KeyError: 3418 # this is for backward compatibility 3419 mcmass_lines = [l for l in \ 3420 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 3421 ).read().split('\n') if l] 3422 new_mcmass_dict = {} 3423 for l in mcmass_lines: 3424 key, val = l.split('=') 3425 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 3426 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 3427 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 3428 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 3429 3430 content += 'GMASS=%s\n' % mcmass_dict[21] 3431 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 3432 # check if need to link lhapdf 3433 if int(self.shower_card['pdfcode']) > 1 or \ 3434 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1): 3435 # Use LHAPDF (should be correctly installed, because 3436 # either events were already generated with them, or the 3437 # user explicitly gives an LHAPDF number in the 3438 # shower_card). 3439 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3440 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3441 stdout = subprocess.PIPE).stdout.read().strip() 3442 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3443 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3444 if self.shower_card['pdfcode']==1: 3445 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3446 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3447 else: 3448 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 3449 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 3450 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3451 elif int(self.shower_card['pdfcode'])==1: 3452 # Try to use LHAPDF because user wants to use the same PDF 3453 # as was used for the event generation. However, for the 3454 # event generation, LHAPDF was not used, so non-trivial to 3455 # see if if LHAPDF is available with the corresponding PDF 3456 # set. If not found, give a warning and use build-in PDF 3457 # set instead. 3458 try: 3459 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3460 stdout = subprocess.PIPE).stdout.read().strip() 3461 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3462 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3463 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3464 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3465 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3466 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3467 except Exception: 3468 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 3469 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 3470 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 3471 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 3472 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 3473 content += 'LHAPDFPATH=\n' 3474 content += 'PDFCODE=0\n' 3475 else: 3476 content += 'LHAPDFPATH=\n' 3477 content += 'PDFCODE=0\n' 3478 3479 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 3480 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 3481 # add the pythia8/hwpp path(s) 3482 if self.options['pythia8_path']: 3483 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 3484 if self.options['hwpp_path']: 3485 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 3486 if self.options['thepeg_path']: 3487 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 3488 if self.options['hepmc_path']: 3489 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 3490 3491 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 3492 output.write(content) 3493 output.close() 3494 return shower
3495 3496
3497 - def run_reweight(self, only):
3498 """runs the reweight_xsec_events executables on each sub-event file generated 3499 to compute on the fly scale and/or PDF uncertainities""" 3500 logger.info(' Doing reweight') 3501 3502 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 3503 # if only doing reweight, copy back the nevents_unweighted file 3504 if only: 3505 if os.path.exists(nev_unw + '.orig'): 3506 files.cp(nev_unw + '.orig', nev_unw) 3507 else: 3508 raise aMCatNLOError('Cannot find event file information') 3509 3510 #read the nevents_unweighted file to get the list of event files 3511 file = open(nev_unw) 3512 lines = file.read().split('\n') 3513 file.close() 3514 # make copy of the original nevent_unweighted file 3515 files.cp(nev_unw, nev_unw + '.orig') 3516 # loop over lines (all but the last one whith is empty) and check that the 3517 # number of events is not 0 3518 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 3519 #prepare the job_dict 3520 job_dict = {} 3521 exe = 'reweight_xsec_events.local' 3522 for i, evt_file in enumerate(evt_files): 3523 path, evt = os.path.split(evt_file) 3524 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 3525 pjoin(self.me_dir, 'SubProcesses', path)) 3526 job_dict[path] = [exe] 3527 3528 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 3529 3530 #check that the new event files are complete 3531 for evt_file in evt_files: 3532 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 3533 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 3534 stdout = subprocess.PIPE).stdout.read().strip() 3535 if last_line != "</LesHouchesEvents>": 3536 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 3537 '\'reweight_xsec_events.output\' files inside the ' + \ 3538 '\'SubProcesses/P*/G*/ directories for details') 3539 3540 #update file name in nevents_unweighted 3541 newfile = open(nev_unw, 'w') 3542 for line in lines: 3543 if line: 3544 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 3545 newfile.close() 3546 3547 return self.pdf_scale_from_reweighting(evt_files)
3548
3549 - def pdf_scale_from_reweighting(self, evt_files):
3550 """This function takes the files with the scale and pdf values 3551 written by the reweight_xsec_events.f code 3552 (P*/G*/pdf_scale_dependence.dat) and computes the overall 3553 scale and PDF uncertainty (the latter is computed using the 3554 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 3555 and returns it in percents. The expected format of the file 3556 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 3557 xsec_pdf0 xsec_pdf1 ....""" 3558 scale_pdf_info={} 3559 scales=[] 3560 pdfs=[] 3561 numofpdf = 0 3562 numofscales = 0 3563 for evt_file in evt_files: 3564 path, evt=os.path.split(evt_file) 3565 data_file=open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat')).read() 3566 lines = data_file.replace("D", "E").split("\n") 3567 if not numofscales: 3568 numofscales = int(lines[0]) 3569 if not numofpdf: 3570 numofpdf = int(lines[2]) 3571 scales_this = [float(val) for val in lines[1].split()] 3572 pdfs_this = [float(val) for val in lines[3].split()] 3573 3574 if numofscales != len(scales_this) or numofpdf !=len(pdfs_this): 3575 # the +1 takes the 0th (central) set into account 3576 logger.info(data_file) 3577 logger.info((' Expected # of scales: %d\n'+ 3578 ' Found # of scales: %d\n'+ 3579 ' Expected # of pdfs: %d\n'+ 3580 ' Found # of pdfs: %d\n') % 3581 (numofscales, len(scales_this), numofpdf, len(pdfs_this))) 3582 raise aMCatNLOError('inconsistent scale_pdf_dependence.dat') 3583 if not scales: 3584 scales = [0.] * numofscales 3585 if not pdfs: 3586 pdfs = [0.] * numofpdf 3587 3588 scales = [a + b for a, b in zip(scales, scales_this)] 3589 pdfs = [a + b for a, b in zip(pdfs, pdfs_this)] 3590 3591 # get the central value 3592 if numofscales>0 and numofpdf==0: 3593 cntrl_val=scales[0] 3594 elif numofpdf>0 and numofscales==0: 3595 cntrl_val=pdfs[0] 3596 elif numofpdf>0 and numofscales>0: 3597 if abs(1-scales[0]/pdfs[0])>0.0001: 3598 raise aMCatNLOError('Central values for scale and PDF variation not identical') 3599 else: 3600 cntrl_val=scales[0] 3601 3602 # get the scale uncertainty in percent 3603 if numofscales>0: 3604 if cntrl_val != 0.0: 3605 # max and min of the full envelope 3606 scale_pdf_info['scale_upp'] = (max(scales)/cntrl_val-1)*100 3607 scale_pdf_info['scale_low'] = (1-min(scales)/cntrl_val)*100 3608 # ren and fac scale dependence added in quadrature 3609 scale_pdf_info['scale_upp_quad'] = ((cntrl_val+math.sqrt(math.pow(max(scales[0]-cntrl_val,scales[1]-cntrl_val,scales[2]-cntrl_val),2)+math.pow(max(scales[0]-cntrl_val,scales[3]-cntrl_val,scales[6]-cntrl_val),2)))/cntrl_val-1)*100 3610 scale_pdf_info['scale_low_quad'] = (1-(cntrl_val-math.sqrt(math.pow(min(scales[0]-cntrl_val,scales[1]-cntrl_val,scales[2]-cntrl_val),2)+math.pow(min(scales[0]-cntrl_val,scales[3]-cntrl_val,scales[6]-cntrl_val),2)))/cntrl_val)*100 3611 else: 3612 scale_pdf_info['scale_upp'] = 0.0 3613 scale_pdf_info['scale_low'] = 0.0 3614 3615 # get the pdf uncertainty in percent (according to the Hessian method) 3616 lhaid=self.run_card['lhaid'] 3617 pdf_upp=0.0 3618 pdf_low=0.0 3619 if lhaid <= 90000: 3620 # use Hessian method (CTEQ & MSTW) 3621 if numofpdf>1: 3622 for i in range(int(numofpdf/2)): 3623 pdf_upp=pdf_upp+math.pow(max(0.0,pdfs[2*i+1]-cntrl_val,pdfs[2*i+2]-cntrl_val),2) 3624 pdf_low=pdf_low+math.pow(max(0.0,cntrl_val-pdfs[2*i+1],cntrl_val-pdfs[2*i+2]),2) 3625 if cntrl_val != 0.0: 3626 scale_pdf_info['pdf_upp'] = math.sqrt(pdf_upp)/cntrl_val*100 3627 scale_pdf_info['pdf_low'] = math.sqrt(pdf_low)/cntrl_val*100 3628 else: 3629 scale_pdf_info['pdf_upp'] = 0.0 3630 scale_pdf_info['pdf_low'] = 0.0 3631 else: 3632 # use Gaussian method (NNPDF) 3633 pdf_stdev=0.0 3634 for i in range(int(numofpdf-1)): 3635 pdf_stdev = pdf_stdev + pow(pdfs[i+1] - cntrl_val,2) 3636 pdf_stdev = math.sqrt(pdf_stdev/int(numofpdf-2)) 3637 if cntrl_val != 0.0: 3638 scale_pdf_info['pdf_upp'] = pdf_stdev/cntrl_val*100 3639 else: 3640 scale_pdf_info['pdf_upp'] = 0.0 3641 scale_pdf_info['pdf_low'] = scale_pdf_info['pdf_upp'] 3642 return scale_pdf_info
3643 3644
3645 - def wait_for_complete(self, run_type):
3646 """this function waits for jobs on cluster to complete their run.""" 3647 starttime = time.time() 3648 #logger.info(' Waiting for submitted jobs to complete') 3649 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 3650 starttime=starttime, level='parton', update_results=True) 3651 try: 3652 self.cluster.wait(self.me_dir, update_status) 3653 except: 3654 self.cluster.remove() 3655 raise
3656
3657 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3658 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 3659 self.ijob = 0 3660 if run_type != 'shower': 3661 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 3662 for args in arg_list: 3663 for Pdir, jobs in job_dict.items(): 3664 for job in jobs: 3665 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 3666 if self.cluster_mode == 2: 3667 time.sleep(1) # security to allow all jobs to be launched 3668 else: 3669 self.njobs = len(arg_list) 3670 for args in arg_list: 3671 [(cwd, exe)] = job_dict.items() 3672 self.run_exe(exe, args, run_type, cwd) 3673 3674 self.wait_for_complete(run_type)
3675 3676 3677
3678 - def check_event_files(self,jobs):
3679 """check the integrity of the event files after splitting, and resubmit 3680 those which are not nicely terminated""" 3681 jobs_to_resubmit = [] 3682 for job in jobs: 3683 last_line = '' 3684 try: 3685 last_line = subprocess.Popen( 3686 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 3687 stdout = subprocess.PIPE).stdout.read().strip() 3688 except IOError: 3689 pass 3690 if last_line != "</LesHouchesEvents>": 3691 jobs_to_resubmit.append(job) 3692 self.njobs = 0 3693 if jobs_to_resubmit: 3694 run_type = 'Resubmitting broken jobs' 3695 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 3696 for job in jobs_to_resubmit: 3697 logger.debug('Resubmitting ' + job['dirname'] + '\n') 3698 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
3699 3700
3701 - def find_jobs_to_split(self, pdir, job, arg):
3702 """looks into the nevents_unweighed_splitted file to check how many 3703 split jobs are needed for this (pdir, job). arg is F, B or V""" 3704 # find the number of the integration channel 3705 splittings = [] 3706 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 3707 pattern = re.compile('for i in (\d+) ; do') 3708 match = re.search(pattern, ajob) 3709 channel = match.groups()[0] 3710 # then open the nevents_unweighted_splitted file and look for the 3711 # number of splittings to be done 3712 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 3713 # This skips the channels with zero events, because they are 3714 # not of the form GFXX_YY, but simply GFXX 3715 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 3716 pjoin(pdir, 'G%s%s' % (arg,channel))) 3717 matches = re.findall(pattern, nevents_file) 3718 for m in matches: 3719 splittings.append(m) 3720 return splittings
3721 3722
3723 - def run_exe(self, exe, args, run_type, cwd=None):
3724 """this basic function launch locally/on cluster exe with args as argument. 3725 """ 3726 3727 # first test that exe exists: 3728 execpath = None 3729 if cwd and os.path.exists(pjoin(cwd, exe)): 3730 execpath = pjoin(cwd, exe) 3731 elif not cwd and os.path.exists(exe): 3732 execpath = exe 3733 else: 3734 raise aMCatNLOError('Cannot find executable %s in %s' \ 3735 % (exe, os.getcwd())) 3736 # check that the executable has exec permissions 3737 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 3738 subprocess.call(['chmod', '+x', exe], cwd=cwd) 3739 # finally run it 3740 if self.cluster_mode == 0: 3741 #this is for the serial run 3742 misc.call(['./'+exe] + args, cwd=cwd) 3743 self.ijob += 1 3744 self.update_status((max([self.njobs - self.ijob - 1, 0]), 3745 min([1, self.njobs - self.ijob]), 3746 self.ijob, run_type), level='parton') 3747 3748 #this is for the cluster/multicore run 3749 elif 'reweight' in exe: 3750 # a reweight run 3751 # Find the correct PDF input file 3752 input_files, output_files = [], [] 3753 pdfinput = self.get_pdf_input_filename() 3754 if os.path.exists(pdfinput): 3755 input_files.append(pdfinput) 3756 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 3757 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 3758 input_files.append(args[0]) 3759 output_files.append('%s.rwgt' % os.path.basename(args[0])) 3760 output_files.append('reweight_xsec_events.output') 3761 output_files.append('scale_pdf_dependence.dat') 3762 3763 return self.cluster.submit2(exe, args, cwd=cwd, 3764 input_files=input_files, output_files=output_files, 3765 required_output=output_files) 3766 3767 elif 'ajob' in exe: 3768 # the 'standard' amcatnlo job 3769 # check if args is a list of string 3770 if type(args[0]) == str: 3771 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 3772 #submitting 3773 self.cluster.submit2(exe, args, cwd=cwd, 3774 input_files=input_files, output_files=output_files, 3775 required_output=required_output) 3776 3777 # # keep track of folders and arguments for splitted evt gen 3778 # subfolder=output_files[-1].split('/')[0] 3779 # if len(args) == 4 and '_' in subfolder: 3780 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 3781 3782 elif 'shower' in exe: 3783 # a shower job 3784 # args are [shower, output(HEP or TOP), run_name] 3785 # cwd is the shower rundir, where the executable are found 3786 input_files, output_files = [], [] 3787 shower = args[0] 3788 # the input files 3789 if shower == 'PYTHIA8': 3790 input_files.append(pjoin(cwd, 'Pythia8.exe')) 3791 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 3792 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3793 input_files.append(pjoin(cwd, 'config.sh')) 3794 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 3795 else: 3796 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 3797 else: 3798 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 3799 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 3800 if shower == 'HERWIGPP': 3801 input_files.append(pjoin(cwd, 'Herwig++')) 3802 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 3803 if len(args) == 3: 3804 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 3805 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 3806 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 3807 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 3808 else: 3809 raise aMCatNLOError, 'Event file not present in %s' % \ 3810 pjoin(self.me_dir, 'Events', self.run_name) 3811 else: 3812 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 3813 # the output files 3814 if len(args) == 3: 3815 output_files.append('mcatnlo_run.log') 3816 else: 3817 output_files.append('mcatnlo_run_%s.log' % args[3]) 3818 if args[1] == 'HEP': 3819 if len(args) == 3: 3820 fname = 'events' 3821 else: 3822 fname = 'events_%s' % args[3] 3823 if shower in ['PYTHIA8', 'HERWIGPP']: 3824 output_files.append(fname + '.hepmc.gz') 3825 else: 3826 output_files.append(fname + '.hep.gz') 3827 elif args[1] == 'TOP' or args[1] == 'HWU': 3828 if len(args) == 3: 3829 fname = 'histfile' 3830 else: 3831 fname = 'histfile_%s' % args[3] 3832 output_files.append(fname + '.tar') 3833 else: 3834 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 3835 #submitting 3836 self.cluster.submit2(exe, args, cwd=cwd, 3837 input_files=input_files, output_files=output_files) 3838 3839 else: 3840 return self.cluster.submit(exe, args, cwd=cwd)
3841
3842 - def getIO_ajob(self,exe,cwd, args):
3843 # use local disk if possible => need to stands what are the 3844 # input/output files 3845 3846 output_files = [] 3847 required_output = [] 3848 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 3849 pjoin(cwd, 'symfact.dat'), 3850 pjoin(cwd, 'iproc.dat'), 3851 pjoin(cwd, 'initial_states_map.dat'), 3852 pjoin(cwd, 'configs_and_props_info.dat'), 3853 pjoin(cwd, 'leshouche_info.dat'), 3854 pjoin(cwd, 'FKS_params.dat')] 3855 3856 # For GoSam interface, we must copy the SLHA card as well 3857 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 3858 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 3859 3860 if os.path.exists(pjoin(cwd,'nevents.tar')): 3861 input_files.append(pjoin(cwd,'nevents.tar')) 3862 3863 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 3864 input_files.append(pjoin(cwd, 'OLE_order.olc')) 3865 3866 # File for the loop (might not be present if MadLoop is not used) 3867 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 3868 cluster.need_transfer(self.options): 3869 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 3870 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 3871 cluster.need_transfer(self.options): 3872 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 3873 dereference=True) 3874 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 3875 tf.close() 3876 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 3877 3878 if args[1] == 'born' or args[1] == 'all': 3879 # MADEVENT MINT FO MODE 3880 input_files.append(pjoin(cwd, 'madevent_mintFO')) 3881 if args[2] == '0': 3882 current = '%s_G%s' % (args[1],args[0]) 3883 else: 3884 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 3885 if os.path.exists(pjoin(cwd,current)): 3886 input_files.append(pjoin(cwd, current)) 3887 output_files.append(current) 3888 3889 required_output.append('%s/results.dat' % current) 3890 required_output.append('%s/res_%s.dat' % (current,args[3])) 3891 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 3892 required_output.append('%s/mint_grids' % current) 3893 required_output.append('%s/grid.MC_integer' % current) 3894 if args[3] != '0': 3895 required_output.append('%s/scale_pdf_dependence.dat' % current) 3896 3897 elif args[1] == 'F' or args[1] == 'B': 3898 # MINTMC MODE 3899 input_files.append(pjoin(cwd, 'madevent_mintMC')) 3900 3901 if args[2] == '0': 3902 current = 'G%s%s' % (args[1],args[0]) 3903 else: 3904 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 3905 if os.path.exists(pjoin(cwd,current)): 3906 input_files.append(pjoin(cwd, current)) 3907 output_files.append(current) 3908 if args[2] > '0': 3909 # this is for the split event generation 3910 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 3911 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 3912 3913 else: 3914 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 3915 if args[3] in ['0','1']: 3916 required_output.append('%s/results.dat' % current) 3917 if args[3] == '1': 3918 output_files.append('%s/results.dat' % current) 3919 3920 else: 3921 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 3922 3923 #Find the correct PDF input file 3924 pdfinput = self.get_pdf_input_filename() 3925 if os.path.exists(pdfinput): 3926 input_files.append(pdfinput) 3927 return input_files, output_files, required_output, args
3928 3929
3930 - def compile(self, mode, options):
3931 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 3932 specified in mode""" 3933 3934 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 3935 3936 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 3937 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 3938 3939 self.get_characteristics(pjoin(self.me_dir, 3940 'SubProcesses', 'proc_characteristics')) 3941 3942 #define a bunch of log files 3943 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 3944 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 3945 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 3946 test_log = pjoin(self.me_dir, 'test.log') 3947 3948 self.update_status('Compiling the code', level=None, update_results=True) 3949 3950 3951 libdir = pjoin(self.me_dir, 'lib') 3952 sourcedir = pjoin(self.me_dir, 'Source') 3953 3954 #clean files 3955 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 3956 #define which executable/tests to compile 3957 if '+' in mode: 3958 mode = mode.split('+')[0] 3959 if mode in ['NLO', 'LO']: 3960 exe = 'madevent_mintFO' 3961 tests = ['test_ME'] 3962 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 3963 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 3964 exe = 'madevent_mintMC' 3965 tests = ['test_ME', 'test_MC'] 3966 # write an analyse_opts with a dummy analysis so that compilation goes through 3967 open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w').write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 3968 3969 #directory where to compile exe 3970 p_dirs = [d for d in \ 3971 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 3972 # create param_card.inc and run_card.inc 3973 self.do_treatcards('', amcatnlo=True) 3974 # if --nocompile option is specified, check here that all exes exists. 3975 # If they exists, return 3976 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 3977 for p_dir in p_dirs]) and options['nocompile']: 3978 return 3979 3980 # rm links to lhapdflib/ PDFsets if exist 3981 if os.path.exists(pjoin(libdir, 'PDFsets')): 3982 files.rm(pjoin(libdir, 'PDFsets')) 3983 3984 # read the run_card to find if lhapdf is used or not 3985 if self.run_card['pdlabel'] == 'lhapdf' and \ 3986 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 3987 self.banner.get_detail('run_card', 'lpp2') != 0): 3988 3989 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 3990 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3991 lhaid_list = [self.run_card['lhaid']] 3992 if self.run_card['reweight_PDF']: 3993 lhaid_list.append(self.run_card['PDF_set_min']) 3994 lhaid_list.append(self.run_card['PDF_set_max']) 3995 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3996 3997 else: 3998 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 3999 logger.info('Using built-in libraries for PDFs') 4000 if self.run_card['lpp1'] == 0 == self.run_card['lpp2']: 4001 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.') 4002 try: 4003 del os.environ['lhapdf'] 4004 except KeyError: 4005 pass 4006 4007 # read the run_card to find if applgrid is used or not 4008 if self.run_card['iappl'] != 0: 4009 os.environ['applgrid'] = 'True' 4010 # check versions of applgrid and amcfast 4011 for code in ['applgrid','amcfast']: 4012 try: 4013 p = subprocess.Popen([self.options[code], '--version'], \ 4014 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 4015 except OSError: 4016 raise aMCatNLOError(('No valid %s installation found. \n' + \ 4017 'Please set the path to %s-config by using \n' + \ 4018 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 4019 else: 4020 output, _ = p.communicate() 4021 if code is 'applgrid' and output < '1.4.63': 4022 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 4023 +' You are using %s',output) 4024 if code is 'amcfast' and output < '1.1.1': 4025 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 4026 +' You are using %s',output) 4027 4028 # set-up the Source/make_opts with the correct applgrid-config file 4029 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 4030 % (self.options['amcfast'],self.options['applgrid']) 4031 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 4032 text_out=[] 4033 for line in text: 4034 if line.strip().startswith('APPLLIBS=$'): 4035 line=appllibs 4036 text_out.append(line) 4037 open(pjoin(self.me_dir,'Source','make_opts'),'w').writelines(text_out) 4038 else: 4039 try: 4040 del os.environ['applgrid'] 4041 except KeyError: 4042 pass 4043 4044 try: 4045 os.environ['fastjet_config'] = self.options['fastjet'] 4046 except (TypeError, KeyError): 4047 if 'fastjet_config' in os.environ: 4048 del os.environ['fastjet_config'] 4049 os.unsetenv('fastjet_config') 4050 4051 # make Source 4052 self.update_status('Compiling source...', level=None) 4053 misc.compile(['clean4pdf'], cwd = sourcedir) 4054 misc.compile(cwd = sourcedir) 4055 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 4056 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 4057 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 4058 and os.path.exists(pjoin(libdir, 'libpdf.a')): 4059 logger.info(' ...done, continuing with P* directories') 4060 else: 4061 raise aMCatNLOError('Compilation failed') 4062 4063 # make StdHep (only necessary with MG option output_dependencies='internal') 4064 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 4065 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 4066 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 4067 if os.path.exists(pjoin(sourcedir,'StdHEP')): 4068 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 4069 misc.compile(['StdHEP'], cwd = sourcedir) 4070 logger.info(' ...done.') 4071 else: 4072 raise aMCatNLOError('Could not compile StdHEP because its'+\ 4073 ' source directory could not be found in the SOURCE folder.\n'+\ 4074 " Check the MG5_aMC option 'output_dependencies.'") 4075 4076 # make CutTools (only necessary with MG option output_dependencies='internal') 4077 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4078 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4079 if os.path.exists(pjoin(sourcedir,'CutTools')): 4080 logger.info('Compiling CutTools (can take a couple of minutes) ...') 4081 misc.compile(['CutTools'], cwd = sourcedir) 4082 logger.info(' ...done.') 4083 else: 4084 raise aMCatNLOError('Could not compile CutTools because its'+\ 4085 ' source directory could not be found in the SOURCE folder.\n'+\ 4086 " Check the MG5_aMC option 'output_dependencies.'") 4087 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4088 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4089 raise aMCatNLOError('CutTools compilation failed.') 4090 4091 # Verify compatibility between current compiler and the one which was 4092 # used when last compiling CutTools (if specified). 4093 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4094 libdir, 'libcts.a')))),'compiler_version.log') 4095 if os.path.exists(compiler_log_path): 4096 compiler_version_used = open(compiler_log_path,'r').read() 4097 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4098 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4099 if os.path.exists(pjoin(sourcedir,'CutTools')): 4100 logger.info('CutTools was compiled with a different fortran'+\ 4101 ' compiler. Re-compiling it now...') 4102 misc.compile(['cleanCT'], cwd = sourcedir) 4103 misc.compile(['CutTools'], cwd = sourcedir) 4104 logger.info(' ...done.') 4105 else: 4106 raise aMCatNLOError("CutTools installation in %s"\ 4107 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 4108 " seems to have been compiled with a different compiler than"+\ 4109 " the one specified in MG5_aMC. Please recompile CutTools.") 4110 4111 # make IREGI (only necessary with MG option output_dependencies='internal') 4112 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 4113 and os.path.exists(pjoin(sourcedir,'IREGI')): 4114 logger.info('Compiling IREGI (can take a couple of minutes) ...') 4115 misc.compile(['IREGI'], cwd = sourcedir) 4116 logger.info(' ...done.') 4117 4118 if os.path.exists(pjoin(libdir, 'libiregi.a')): 4119 # Verify compatibility between current compiler and the one which was 4120 # used when last compiling IREGI (if specified). 4121 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4122 libdir, 'libiregi.a')))),'compiler_version.log') 4123 if os.path.exists(compiler_log_path): 4124 compiler_version_used = open(compiler_log_path,'r').read() 4125 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4126 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4127 if os.path.exists(pjoin(sourcedir,'IREGI')): 4128 logger.info('IREGI was compiled with a different fortran'+\ 4129 ' compiler. Re-compiling it now...') 4130 misc.compile(['cleanIR'], cwd = sourcedir) 4131 misc.compile(['IREGI'], cwd = sourcedir) 4132 logger.info(' ...done.') 4133 else: 4134 raise aMCatNLOError("IREGI installation in %s"\ 4135 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 4136 " seems to have been compiled with a different compiler than"+\ 4137 " the one specified in MG5_aMC. Please recompile IREGI.") 4138 4139 # check if MadLoop virtuals have been generated 4140 if self.proc_characteristics['has_loops'] and \ 4141 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4142 os.environ['madloop'] = 'true' 4143 if mode in ['NLO', 'aMC@NLO', 'noshower']: 4144 tests.append('check_poles') 4145 else: 4146 os.unsetenv('madloop') 4147 4148 # make and run tests (if asked for), gensym and make madevent in each dir 4149 self.update_status('Compiling directories...', level=None) 4150 4151 for test in tests: 4152 self.write_test_input(test) 4153 4154 try: 4155 import multiprocessing 4156 if not self.nb_core: 4157 try: 4158 self.nb_core = int(self.options['nb_core']) 4159 except TypeError: 4160 self.nb_core = multiprocessing.cpu_count() 4161 except ImportError: 4162 self.nb_core = 1 4163 4164 compile_options = copy.copy(self.options) 4165 compile_options['nb_core'] = self.nb_core 4166 compile_cluster = cluster.MultiCore(**compile_options) 4167 logger.info('Compiling on %d cores' % self.nb_core) 4168 4169 update_status = lambda i, r, f: self.donothing(i,r,f) 4170 for p_dir in p_dirs: 4171 compile_cluster.submit(prog = compile_dir, 4172 argument = [self.me_dir, p_dir, mode, options, 4173 tests, exe, self.options['run_mode']]) 4174 try: 4175 compile_cluster.wait(self.me_dir, update_status) 4176 except Exception, error: 4177 logger.warning("Fail to compile the Subprocesses") 4178 if __debug__: 4179 raise 4180 compile_cluster.remove() 4181 self.do_quit('') 4182 4183 logger.info('Checking test output:') 4184 for p_dir in p_dirs: 4185 logger.info(p_dir) 4186 for test in tests: 4187 logger.info(' Result for %s:' % test) 4188 4189 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 4190 #check that none of the tests failed 4191 self.check_tests(test, this_dir)
4192 4193
4194 - def donothing(*args):
4195 pass
4196 4197
4198 - def check_tests(self, test, dir):
4199 """just call the correct parser for the test log. 4200 Skip check_poles for LOonly folders""" 4201 if test in ['test_ME', 'test_MC']: 4202 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 4203 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 4204 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4205 4206
4207 - def parse_test_mx_log(self, log):
4208 """read and parse the test_ME/MC.log file""" 4209 content = open(log).read() 4210 if 'FAILED' in content: 4211 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK') 4212 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 4213 'Please check that widths of final state particles (e.g. top) have been' + \ 4214 ' set to 0 in the param_card.dat.') 4215 else: 4216 lines = [l for l in content.split('\n') if 'PASSED' in l] 4217 logger.info(' Passed.') 4218 logger.debug('\n'+'\n'.join(lines))
4219 4220
4221 - def parse_check_poles_log(self, log):
4222 """reads and parse the check_poles.log file""" 4223 content = open(log).read() 4224 npass = 0 4225 nfail = 0 4226 for line in content.split('\n'): 4227 if 'PASSED' in line: 4228 npass +=1 4229 tolerance = float(line.split()[1]) 4230 if 'FAILED' in line: 4231 nfail +=1 4232 tolerance = float(line.split()[1]) 4233 4234 if nfail + npass == 0: 4235 logger.warning('0 points have been tried') 4236 return 4237 4238 if float(nfail)/float(nfail+npass) > 0.1: 4239 raise aMCatNLOError('Poles do not cancel, run cannot continue') 4240 else: 4241 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 4242 %(npass, nfail+npass, tolerance))
4243 4244
4245 - def write_test_input(self, test):
4246 """write the input files to run test_ME/MC or check_poles""" 4247 if test in ['test_ME', 'test_MC']: 4248 content = "-2 -2\n" #generate randomly energy/angle 4249 content+= "100 100\n" #run 100 points for soft and collinear tests 4250 content+= "0\n" #sum over helicities 4251 content+= "0\n" #all FKS configs 4252 content+= '\n'.join(["-1"] * 50) #random diagram 4253 elif test == 'check_poles': 4254 content = '20 \n -1\n' 4255 4256 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 4257 if test == 'test_MC': 4258 shower = self.run_card['parton_shower'] 4259 MC_header = "%s\n " % shower + \ 4260 "1 \n1 -0.1\n-1 -0.1\n" 4261 file.write(MC_header + content) 4262 else: 4263 file.write(content) 4264 file.close()
4265 4266 4267 4268 ############################################################################
4269 - def find_model_name(self):
4270 """ return the model name """ 4271 if hasattr(self, 'model_name'): 4272 return self.model_name 4273 4274 model = 'sm' 4275 proc = [] 4276 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')): 4277 line = line.split('#')[0] 4278 #line = line.split('=')[0] 4279 if line.startswith('import') and 'model' in line: 4280 model = line.split()[2] 4281 proc = [] 4282 elif line.startswith('generate'): 4283 proc.append(line.split(None,1)[1]) 4284 elif line.startswith('add process'): 4285 proc.append(line.split(None,2)[2]) 4286 4287 self.model = model 4288 self.process = proc 4289 return model
4290 4291 4292 4293 ############################################################################
4294 - def ask_run_configuration(self, mode, options, switch={}):
4295 """Ask the question when launching generate_events/multi_run""" 4296 4297 if 'parton' not in options: 4298 options['parton'] = False 4299 if 'reweightonly' not in options: 4300 options['reweightonly'] = False 4301 4302 4303 void = 'NOT INSTALLED' 4304 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight'] 4305 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void, 4306 'madspin': void,'reweight':'OFF'} 4307 if not switch: 4308 switch = switch_default 4309 else: 4310 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch)) 4311 default_switch = ['ON', 'OFF'] 4312 4313 4314 allowed_switch_value = {'order': ['LO', 'NLO'], 4315 'fixed_order': default_switch, 4316 'shower': default_switch, 4317 'madspin': default_switch, 4318 'reweight': default_switch} 4319 4320 4321 4322 4323 4324 description = {'order': 'Perturbative order of the calculation:', 4325 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):', 4326 'shower': 'Shower the generated events:', 4327 'madspin': 'Decay particles with the MadSpin module:', 4328 'reweight': 'Add weights to the events based on changing model parameters:'} 4329 4330 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'}, 4331 ('madspin', 'ON'): {'fixed_order':'OFF'}, 4332 ('reweight', 'ON'): {'fixed_order':'OFF'}, 4333 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF'} 4334 } 4335 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] 4336 4337 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void ) 4338 4339 if self.proc_characteristics['ninitial'] == 1: 4340 switch['fixed_order'] = 'ON' 4341 switch['shower'] = 'Not available for decay' 4342 switch['madspin'] = 'Not available for decay' 4343 switch['reweight'] = 'Not available for decay' 4344 allowed_switch_value['fixed_order'] = ['ON'] 4345 allowed_switch_value['shower'] = ['OFF'] 4346 allowed_switch_value['madspin'] = ['OFF'] 4347 allowed_switch_value['reweight'] = ['OFF'] 4348 available_mode = ['0','1'] 4349 special_values = ['LO', 'NLO'] 4350 else: 4351 # Init the switch value according to the current status 4352 available_mode = ['0', '1', '2','3'] 4353 4354 if mode == 'auto': 4355 mode = None 4356 if not mode and (options['parton'] or options['reweightonly']): 4357 mode = 'noshower' 4358 4359 4360 if '3' in available_mode: 4361 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 4362 switch['shower'] = 'ON' 4363 else: 4364 switch['shower'] = 'OFF' 4365 4366 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode: 4367 available_mode.append('4') 4368 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4369 switch['madspin'] = 'ON' 4370 else: 4371 switch['madspin'] = 'OFF' 4372 if misc.has_f2py() or self.options['f2py_compiler']: 4373 available_mode.append('5') 4374 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 4375 switch['reweight'] = 'ON' 4376 else: 4377 switch['reweight'] = 'OFF' 4378 else: 4379 switch['reweight'] = 'Not available (requires NumPy)' 4380 4381 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode: 4382 if switch['reweight'] == "OFF": 4383 switch['reweight'] = "ON" 4384 elif switch['reweight'] != "ON": 4385 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight']) 4386 if 'do_madspin' in options and options['do_madspin']: 4387 if switch['madspin'] == "OFF": 4388 switch['madspin'] = 'ON' 4389 elif switch['madspin'] != "ON": 4390 logger.critical("Cannot run MadSpin module: %s" % switch['reweight']) 4391 4392 answers = list(available_mode) + ['auto', 'done'] 4393 alias = {} 4394 for id, key in enumerate(switch_order): 4395 if switch[key] != void and switch[key] in allowed_switch_value[key] and \ 4396 len(allowed_switch_value[key]) >1: 4397 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]] 4398 #allow lower case for on/off 4399 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s)) 4400 for s in allowed_switch_value[key])) 4401 answers += special_values 4402 4403 def create_question(switch): 4404 switch_format = " %i %-61s %12s=%s\n" 4405 question = "The following switches determine which operations are executed:\n" 4406 for id, key in enumerate(switch_order): 4407 question += switch_format % (id+1, description[key], key, switch[key]) 4408 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1) 4409 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n' 4410 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n' 4411 return question
4412 4413 4414 def modify_switch(mode, answer, switch): 4415 if '=' in answer: 4416 key, status = answer.split('=') 4417 switch[key] = status 4418 if (key, status) in force_switch: 4419 for key2, status2 in force_switch[(key, status)].items(): 4420 if switch[key2] not in [status2, void]: 4421 logger.info('For coherence \'%s\' is set to \'%s\'' 4422 % (key2, status2), '$MG:color:BLACK') 4423 switch[key2] = status2 4424 elif answer in ['0', 'auto', 'done']: 4425 return 4426 elif answer in special_values: 4427 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK') 4428 #assign_switch('reweight', 'OFF') 4429 #assign_switch('madspin', 'OFF') 4430 if answer == 'LO': 4431 switch['order'] = 'LO' 4432 switch['fixed_order'] = 'ON' 4433 assign_switch('shower', 'OFF') 4434 elif answer == 'NLO': 4435 switch['order'] = 'NLO' 4436 switch['fixed_order'] = 'ON' 4437 assign_switch('shower', 'OFF') 4438 elif answer == 'aMC@NLO': 4439 switch['order'] = 'NLO' 4440 switch['fixed_order'] = 'OFF' 4441 assign_switch('shower', 'ON') 4442 elif answer == 'aMC@LO': 4443 switch['order'] = 'LO' 4444 switch['fixed_order'] = 'OFF' 4445 assign_switch('shower', 'ON') 4446 elif answer == 'noshower': 4447 switch['order'] = 'NLO' 4448 switch['fixed_order'] = 'OFF' 4449 assign_switch('shower', 'OFF') 4450 elif answer == 'noshowerLO': 4451 switch['order'] = 'LO' 4452 switch['fixed_order'] = 'OFF' 4453 assign_switch('shower', 'OFF') 4454 if mode: 4455 return 4456 return switch 4457 4458 modify_switch(mode, self.last_mode, switch) 4459 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4460 assign_switch('madspin', 'ON') 4461 4462 if not self.force: 4463 answer = '' 4464 while answer not in ['0', 'done', 'auto', 'onlyshower']: 4465 question = create_question(switch) 4466 if mode: 4467 answer = mode 4468 else: 4469 answer = self.ask(question, '0', answers, alias=alias) 4470 if answer.isdigit() and answer != '0': 4471 key = switch_order[int(answer) - 1] 4472 opt1 = allowed_switch_value[key][0] 4473 opt2 = allowed_switch_value[key][1] 4474 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2) 4475 4476 if not modify_switch(mode, answer, switch): 4477 break 4478 4479 #assign the mode depending of the switch 4480 if not mode or mode == 'auto': 4481 if switch['order'] == 'LO': 4482 if switch['shower'] == 'ON': 4483 mode = 'aMC@LO' 4484 elif switch['fixed_order'] == 'ON': 4485 mode = 'LO' 4486 else: 4487 mode = 'noshowerLO' 4488 elif switch['order'] == 'NLO': 4489 if switch['shower'] == 'ON': 4490 mode = 'aMC@NLO' 4491 elif switch['fixed_order'] == 'ON': 4492 mode = 'NLO' 4493 else: 4494 mode = 'noshower' 4495 logger.info('will run in mode: %s' % mode) 4496 4497 if mode == 'noshower': 4498 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 4499 Please, shower the Les Houches events before using them for physics analyses.""") 4500 4501 4502 # specify the cards which are needed for this run. 4503 cards = ['param_card.dat', 'run_card.dat'] 4504 ignore = [] 4505 if mode in ['LO', 'NLO']: 4506 options['parton'] = True 4507 ignore = ['shower_card.dat', 'madspin_card.dat'] 4508 cards.append('FO_analyse_card.dat') 4509 else: 4510 if switch['madspin'] == 'ON': 4511 cards.append('madspin_card.dat') 4512 if switch['reweight'] == 'ON': 4513 cards.append('reweight_card.dat') 4514 if 'aMC@' in mode: 4515 cards.append('shower_card.dat') 4516 if mode == 'onlyshower': 4517 cards = ['shower_card.dat'] 4518 if options['reweightonly']: 4519 cards = ['run_card.dat'] 4520 4521 self.keep_cards(cards, ignore) 4522 4523 if mode =='onlyshower': 4524 cards = ['shower_card.dat'] 4525 4526 if not options['force'] and not self.force: 4527 self.ask_edit_cards(cards, plot=False) 4528 4529 self.banner = banner_mod.Banner() 4530 4531 # store the cards in the banner 4532 for card in cards: 4533 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 4534 # and the run settings 4535 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 4536 self.banner.add_text('run_settings', run_settings) 4537 4538 if not mode =='onlyshower': 4539 self.run_card = self.banner.charge_card('run_card') 4540 self.run_tag = self.run_card['run_tag'] 4541 #this is if the user did not provide a name for the current run 4542 if not hasattr(self, 'run_name') or not self.run_name: 4543 self.run_name = self.find_available_run_name(self.me_dir) 4544 #add a tag in the run_name for distinguish run_type 4545 if self.run_name.startswith('run_'): 4546 if mode in ['LO','aMC@LO','noshowerLO']: 4547 self.run_name += '_LO' 4548 self.set_run_name(self.run_name, self.run_tag, 'parton') 4549 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 4550 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 4551 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 4552 logger.warning("""You are running with FxFx merging enabled. To be able to merge 4553 samples of various multiplicities without double counting, you 4554 have to remove some events after showering 'by hand'. Please 4555 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 4556 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 4557 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 4558 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 4559 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 4560 "Type \'n\' to stop or \'y\' to continue" 4561 answers = ['n','y'] 4562 answer = self.ask(question, 'n', answers, alias=alias) 4563 if answer == 'n': 4564 error = '''Stop opertation''' 4565 self.ask_run_configuration(mode, options) 4566 # raise aMCatNLOError(error) 4567 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 4568 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 4569 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 4570 if 'aMC@' in mode or mode == 'onlyshower': 4571 self.shower_card = self.banner.charge_card('shower_card') 4572 4573 elif mode in ['LO', 'NLO']: 4574 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 4575 self.analyse_card = self.banner.charge_card('FO_analyse_card') 4576 4577 4578 return mode 4579 4580 4581 #=============================================================================== 4582 # aMCatNLOCmd 4583 #===============================================================================
4584 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
4585 """The command line processor of MadGraph"""
4586 4587 _compile_usage = "compile [MODE] [options]\n" + \ 4588 "-- compiles aMC@NLO \n" + \ 4589 " MODE can be either FO, for fixed-order computations, \n" + \ 4590 " or MC for matching with parton-shower monte-carlos. \n" + \ 4591 " (if omitted, it is set to MC)\n" 4592 _compile_parser = misc.OptionParser(usage=_compile_usage) 4593 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 4594 help="Use the card present in the directory for the launch, without editing them") 4595 4596 _launch_usage = "launch [MODE] [options]\n" + \ 4597 "-- execute aMC@NLO \n" + \ 4598 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4599 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4600 " computation of the total cross-section and the filling of parton-level histograms \n" + \ 4601 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4602 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4603 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4604 " in the run_card.dat\n" 4605 4606 _launch_parser = misc.OptionParser(usage=_launch_usage) 4607 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 4608 help="Use the card present in the directory for the launch, without editing them") 4609 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 4610 help="Submit the jobs on the cluster") 4611 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 4612 help="Submit the jobs on multicore mode") 4613 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4614 help="Skip compilation. Ignored if no executable is found") 4615 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4616 help="Skip integration and event generation, just run reweight on the" + \ 4617 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4618 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 4619 help="Stop the run after the parton level file generation (you need " + \ 4620 "to shower the file in order to get physical results)") 4621 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4622 help="Skip grid set up, just generate events starting from " + \ 4623 "the last available results") 4624 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 4625 help="Provide a name to the run") 4626 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4627 help="For use with APPLgrid only: start from existing grids") 4628 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 4629 help="Run the reweight module (reweighting by different model parameter") 4630 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 4631 help="Run the madspin package") 4632 4633 4634 4635 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 4636 "-- execute aMC@NLO \n" + \ 4637 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4638 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4639 " computation of the total cross-section and the filling of parton-level histograms \n" + \ 4640 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4641 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4642 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4643 " in the run_card.dat\n" 4644 4645 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 4646 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 4647 help="Use the card present in the directory for the generate_events, without editing them") 4648 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 4649 help="Submit the jobs on the cluster") 4650 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 4651 help="Submit the jobs on multicore mode") 4652 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4653 help="Skip compilation. Ignored if no executable is found") 4654 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4655 help="Skip integration and event generation, just run reweight on the" + \ 4656 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4657 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 4658 help="Stop the run after the parton level file generation (you need " + \ 4659 "to shower the file in order to get physical results)") 4660 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4661 help="Skip grid set up, just generate events starting from " + \ 4662 "the last available results") 4663 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 4664 help="Provide a name to the run") 4665 4666 4667 4668 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 4669 "-- calculate cross-section up to ORDER.\n" + \ 4670 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 4671 4672 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 4673 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 4674 help="Use the card present in the directory for the launch, without editing them") 4675 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 4676 help="Submit the jobs on the cluster") 4677 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 4678 help="Submit the jobs on multicore mode") 4679 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4680 help="Skip compilation. Ignored if no executable is found") 4681 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 4682 help="Provide a name to the run") 4683 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4684 help="For use with APPLgrid only: start from existing grids") 4685 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4686 help="Skip grid set up, just generate events starting from " + \ 4687 "the last available results") 4688 4689 _shower_usage = 'shower run_name [options]\n' + \ 4690 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 4691 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 4692 ' are directly read from the header of the event file\n' 4693 _shower_parser = misc.OptionParser(usage=_shower_usage) 4694 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 4695 help="Use the shower_card present in the directory for the launch, without editing") 4696