Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1   ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41  try: 
  42      import cpickle as pickle 
  43  except: 
  44      import pickle 
  45   
  46  try: 
  47      import readline 
  48      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  49  except: 
  50      GNU_SPLITTING = True 
  51   
  52  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  53  root_path = os.path.split(root_path)[0] 
  54  sys.path.insert(0, os.path.join(root_path,'bin')) 
  55   
  56  # usefull shortcut 
  57  pjoin = os.path.join 
  58  # Special logger for the Cmd Interface 
  59  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  60  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  61    
  62  try: 
  63      import madgraph 
  64  except ImportError:  
  65      aMCatNLO = True  
  66      import internal.extended_cmd as cmd 
  67      import internal.common_run_interface as common_run 
  68      import internal.banner as banner_mod 
  69      import internal.misc as misc     
  70      from internal import InvalidCmd, MadGraph5Error 
  71      import internal.files as files 
  72      import internal.cluster as cluster 
  73      import internal.save_load_object as save_load_object 
  74      import internal.gen_crossxhtml as gen_crossxhtml 
  75      import internal.sum_html as sum_html 
  76      import internal.shower_card as shower_card 
  77      import internal.FO_analyse_card as analyse_card  
  78      import internal.lhe_parser as lhe_parser 
  79  else: 
  80      # import from madgraph directory 
  81      aMCatNLO = False 
  82      import madgraph.interface.extended_cmd as cmd 
  83      import madgraph.interface.common_run_interface as common_run 
  84      import madgraph.iolibs.files as files 
  85      import madgraph.iolibs.save_load_object as save_load_object 
  86      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  87      import madgraph.madevent.sum_html as sum_html 
  88      import madgraph.various.banner as banner_mod 
  89      import madgraph.various.cluster as cluster 
  90      import madgraph.various.misc as misc 
  91      import madgraph.various.shower_card as shower_card 
  92      import madgraph.various.FO_analyse_card as analyse_card 
  93      import madgraph.various.lhe_parser as lhe_parser 
  94      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
95 96 -class aMCatNLOError(Exception):
97 pass
98
99 100 -def compile_dir(*arguments):
101 """compile the direcory p_dir 102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 103 this function needs not to be a class method in order to do 104 the compilation on multicore""" 105 106 if len(arguments) == 1: 107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 108 elif len(arguments)==7: 109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 110 else: 111 raise aMCatNLOError, 'not correct number of argument' 112 logger.info(' Compiling %s...' % p_dir) 113 114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 115 116 try: 117 #compile everything 118 # compile and run tests 119 for test in tests: 120 # skip check_poles for LOonly dirs 121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 122 continue 123 if test == 'test_ME' or test == 'test_MC': 124 test_exe='test_soft_col_limits' 125 else: 126 test_exe=test 127 misc.compile([test_exe], cwd = this_dir, job_specs = False) 128 input = pjoin(me_dir, '%s_input.txt' % test) 129 #this can be improved/better written to handle the output 130 misc.call(['./%s' % (test_exe)], cwd=this_dir, 131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 132 close_fds=True) 133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 135 dereference=True) 136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 137 tf.close() 138 139 if not options['reweightonly']: 140 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 141 misc.call(['./gensym'],cwd= this_dir, 142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 143 close_fds=True) 144 #compile madevent_mintMC/mintFO 145 misc.compile([exe], cwd=this_dir, job_specs = False) 146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 148 149 logger.info(' %s done.' % p_dir) 150 return 0 151 except MadGraph5Error, msg: 152 return msg
153
154 155 -def check_compiler(options, block=False):
156 """check that the current fortran compiler is gfortran 4.6 or later. 157 If block, stops the execution, otherwise just print a warning""" 158 159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 161 'Note that You can still run all MadEvent run without any problem!' 162 #first check that gfortran is installed 163 if options['fortran_compiler']: 164 compiler = options['fortran_compiler'] 165 elif misc.which('gfortran'): 166 compiler = 'gfortran' 167 else: 168 compiler = '' 169 170 if 'gfortran' not in compiler: 171 if block: 172 raise aMCatNLOError(msg % compiler) 173 else: 174 logger.warning(msg % compiler) 175 else: 176 curr_version = misc.get_gfortran_version(compiler) 177 if not ''.join(curr_version.split('.')) >= '46': 178 if block: 179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 180 else: 181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183 184 185 #=============================================================================== 186 # CmdExtended 187 #=============================================================================== 188 -class CmdExtended(common_run.CommonRunCmd):
189 """Particularisation of the cmd command for aMCatNLO""" 190 191 #suggested list of command 192 next_possibility = { 193 'start': [], 194 } 195 196 debug_output = 'ME5_debug' 197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 198 error_debug += 'More information is found in \'%(debug)s\'.\n' 199 error_debug += 'Please attach this file to your report.' 200 201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 202 203 204 keyboard_stop_msg = """stopping all operation 205 in order to quit MadGraph5_aMC@NLO please enter exit""" 206 207 # Define the Error 208 InvalidCmd = InvalidCmd 209 ConfigurationError = aMCatNLOError 210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation""" 213 214 # Tag allowing/forbiding question 215 self.force = False 216 217 # If possible, build an info line with current version number 218 # and date, from the VERSION text file 219 info = misc.get_pkg_info() 220 info_line = "" 221 if info and info.has_key('version') and info.has_key('date'): 222 len_version = len(info['version']) 223 len_date = len(info['date']) 224 if len_version + len_date < 30: 225 info_line = "#* VERSION %s %s %s *\n" % \ 226 (info['version'], 227 (30 - len_version - len_date) * ' ', 228 info['date']) 229 else: 230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 231 info_line = "#* VERSION %s %s *\n" % \ 232 (version, (24 - len(version)) * ' ') 233 234 # Create a header for the history file. 235 # Remember to fill in time at writeout time! 236 self.history_header = \ 237 '#************************************************************\n' + \ 238 '#* MadGraph5_aMC@NLO *\n' + \ 239 '#* *\n' + \ 240 "#* * * *\n" + \ 241 "#* * * * * *\n" + \ 242 "#* * * * * 5 * * * * *\n" + \ 243 "#* * * * * *\n" + \ 244 "#* * * *\n" + \ 245 "#* *\n" + \ 246 "#* *\n" + \ 247 info_line + \ 248 "#* *\n" + \ 249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 251 "#* and *\n" + \ 252 "#* http://amcatnlo.cern.ch *\n" + \ 253 '#* *\n' + \ 254 '#************************************************************\n' + \ 255 '#* *\n' + \ 256 '#* Command File for aMCatNLO *\n' + \ 257 '#* *\n' + \ 258 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 259 '#* *\n' + \ 260 '#************************************************************\n' 261 262 if info_line: 263 info_line = info_line[1:] 264 265 logger.info(\ 266 "************************************************************\n" + \ 267 "* *\n" + \ 268 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 269 "* a M C @ N L O *\n" + \ 270 "* *\n" + \ 271 "* * * *\n" + \ 272 "* * * * * *\n" + \ 273 "* * * * * 5 * * * * *\n" + \ 274 "* * * * * *\n" + \ 275 "* * * *\n" + \ 276 "* *\n" + \ 277 info_line + \ 278 "* *\n" + \ 279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 280 "* http://amcatnlo.cern.ch *\n" + \ 281 "* *\n" + \ 282 "* Type 'help' for in-line help. *\n" + \ 283 "* *\n" + \ 284 "************************************************************") 285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286 287
288 - def get_history_header(self):
289 """return the history header""" 290 return self.history_header % misc.get_time_info()
291
292 - def stop_on_keyboard_stop(self):
293 """action to perform to close nicely on a keyboard interupt""" 294 try: 295 if hasattr(self, 'cluster'): 296 logger.info('rm jobs on queue') 297 self.cluster.remove() 298 if hasattr(self, 'results'): 299 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 300 self.add_error_log_in_html(KeyboardInterrupt) 301 except: 302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """ 306 307 # relaxing the tag forbidding question 308 self.force = False 309 310 if not self.use_rawinput: 311 return stop 312 313 314 arg = line.split() 315 if len(arg) == 0: 316 return stop 317 elif str(arg[0]) in ['exit','quit','EOF']: 318 return stop 319 320 try: 321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 322 level=None, error=True) 323 except Exception: 324 misc.sprint('self.update_status fails', log=logger) 325 pass
326
327 - def nice_user_error(self, error, line):
328 """If a ME run is currently running add a link in the html output""" 329 330 self.add_error_log_in_html() 331 cmd.Cmd.nice_user_error(self, error, line)
332
333 - def nice_config_error(self, error, line):
334 """If a ME run is currently running add a link in the html output""" 335 336 self.add_error_log_in_html() 337 cmd.Cmd.nice_config_error(self, error, line)
338
339 - def nice_error_handling(self, error, line):
340 """If a ME run is currently running add a link in the html output""" 341 342 self.add_error_log_in_html() 343 cmd.Cmd.nice_error_handling(self, error, line)
344
345 346 347 #=============================================================================== 348 # HelpToCmd 349 #=============================================================================== 350 -class HelpToCmd(object):
351 """ The Series of help routine for the aMCatNLOCmd""" 352
353 - def help_launch(self):
354 """help for launch command""" 355 _launch_parser.print_help()
356
357 - def help_banner_run(self):
358 logger.info("syntax: banner_run Path|RUN [--run_options]") 359 logger.info("-- Reproduce a run following a given banner") 360 logger.info(" One of the following argument is require:") 361 logger.info(" Path should be the path of a valid banner.") 362 logger.info(" RUN should be the name of a run of the current directory") 363 self.run_options_help([('-f','answer all question by default'), 364 ('--name=X', 'Define the name associated with the new run')])
365 366
367 - def help_compile(self):
368 """help for compile command""" 369 _compile_parser.print_help()
370
371 - def help_generate_events(self):
372 """help for generate_events commandi 373 just call help_launch""" 374 _generate_events_parser.print_help()
375 376
377 - def help_calculate_xsect(self):
378 """help for generate_events command""" 379 _calculate_xsect_parser.print_help()
380
381 - def help_shower(self):
382 """help for shower command""" 383 _shower_parser.print_help()
384 385
386 - def help_open(self):
387 logger.info("syntax: open FILE ") 388 logger.info("-- open a file with the appropriate editor.") 389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 390 logger.info(' the path to the last created/used directory is used')
391
392 - def run_options_help(self, data):
393 if data: 394 logger.info('-- local options:') 395 for name, info in data: 396 logger.info(' %s : %s' % (name, info)) 397 398 logger.info("-- session options:") 399 logger.info(" Note that those options will be kept for the current session") 400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 401 logger.info(" --multicore : Run in multi-core configuration") 402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
404 405 406 407 #=============================================================================== 408 # CheckValidForCmd 409 #=============================================================================== 410 -class CheckValidForCmd(object):
411 """ The Series of check routine for the aMCatNLOCmd""" 412
413 - def check_shower(self, args, options):
414 """Check the validity of the line. args[0] is the run_directory""" 415 416 if options['force']: 417 self.force = True 418 419 if len(args) == 0: 420 self.help_shower() 421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 423 raise self.InvalidCmd, 'Directory %s does not exists' % \ 424 pjoin(os.getcwd(), 'Events', args[0]) 425 426 self.set_run_name(args[0], level= 'shower') 427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
429 - def check_plot(self, args):
430 """Check the argument for the plot command 431 plot run_name modes""" 432 433 434 madir = self.options['madanalysis_path'] 435 td = self.options['td_path'] 436 437 if not madir or not td: 438 logger.info('Retry to read configuration file to find madanalysis/td') 439 self.set_configuration() 440 441 madir = self.options['madanalysis_path'] 442 td = self.options['td_path'] 443 444 if not madir: 445 error_msg = 'No Madanalysis path correctly set.' 446 error_msg += 'Please use the set command to define the path and retry.' 447 error_msg += 'You can also define it in the configuration file.' 448 raise self.InvalidCmd(error_msg) 449 if not td: 450 error_msg = 'No path to td directory correctly set.' 451 error_msg += 'Please use the set command to define the path and retry.' 452 error_msg += 'You can also define it in the configuration file.' 453 raise self.InvalidCmd(error_msg) 454 455 if len(args) == 0: 456 if not hasattr(self, 'run_name') or not self.run_name: 457 self.help_plot() 458 raise self.InvalidCmd('No run name currently define. Please add this information.') 459 args.append('all') 460 return 461 462 463 if args[0] not in self._plot_mode: 464 self.set_run_name(args[0], level='plot') 465 del args[0] 466 if len(args) == 0: 467 args.append('all') 468 elif not self.run_name: 469 self.help_plot() 470 raise self.InvalidCmd('No run name currently define. Please add this information.') 471 472 for arg in args: 473 if arg not in self._plot_mode and arg != self.run_name: 474 self.help_plot() 475 raise self.InvalidCmd('unknown options %s' % arg)
476
477 - def check_pgs(self, arg):
478 """Check the argument for pythia command 479 syntax: pgs [NAME] 480 Note that other option are already remove at this point 481 """ 482 483 # If not pythia-pgs path 484 if not self.options['pythia-pgs_path']: 485 logger.info('Retry to read configuration file to find pythia-pgs path') 486 self.set_configuration() 487 488 if not self.options['pythia-pgs_path'] or not \ 489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 490 error_msg = 'No pythia-pgs path correctly set.' 491 error_msg += 'Please use the set command to define the path and retry.' 492 error_msg += 'You can also define it in the configuration file.' 493 raise self.InvalidCmd(error_msg) 494 495 tag = [a for a in arg if a.startswith('--tag=')] 496 if tag: 497 arg.remove(tag[0]) 498 tag = tag[0][6:] 499 500 501 if len(arg) == 0 and not self.run_name: 502 if self.results.lastrun: 503 arg.insert(0, self.results.lastrun) 504 else: 505 raise self.InvalidCmd('No run name currently define. Please add this information.') 506 507 if len(arg) == 1 and self.run_name == arg[0]: 508 arg.pop(0) 509 510 if not len(arg) and \ 511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 512 self.help_pgs() 513 raise self.InvalidCmd('''No file file pythia_events.hep currently available 514 Please specify a valid run_name''') 515 516 lock = None 517 if len(arg) == 1: 518 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 520 521 if not filenames: 522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 523 else: 524 input_file = filenames[0] 525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 527 argument=['-c', input_file], 528 close_fds=True) 529 else: 530 if tag: 531 self.run_card['run_tag'] = tag 532 self.set_run_name(self.run_name, tag, 'pgs') 533 534 return lock
535 536
537 - def check_delphes(self, arg):
538 """Check the argument for pythia command 539 syntax: delphes [NAME] 540 Note that other option are already remove at this point 541 """ 542 543 # If not pythia-pgs path 544 if not self.options['delphes_path']: 545 logger.info('Retry to read configuration file to find delphes path') 546 self.set_configuration() 547 548 if not self.options['delphes_path']: 549 error_msg = 'No delphes path correctly set.' 550 error_msg += 'Please use the set command to define the path and retry.' 551 error_msg += 'You can also define it in the configuration file.' 552 raise self.InvalidCmd(error_msg) 553 554 tag = [a for a in arg if a.startswith('--tag=')] 555 if tag: 556 arg.remove(tag[0]) 557 tag = tag[0][6:] 558 559 560 if len(arg) == 0 and not self.run_name: 561 if self.results.lastrun: 562 arg.insert(0, self.results.lastrun) 563 else: 564 raise self.InvalidCmd('No run name currently define. Please add this information.') 565 566 if len(arg) == 1 and self.run_name == arg[0]: 567 arg.pop(0) 568 569 if not len(arg) and \ 570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 571 self.help_pgs() 572 raise self.InvalidCmd('''No file file pythia_events.hep currently available 573 Please specify a valid run_name''') 574 575 if len(arg) == 1: 576 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 578 579 580 if not filenames: 581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 582 % (self.run_name, prev_tag, 583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 584 else: 585 input_file = filenames[0] 586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 588 argument=['-c', input_file], 589 close_fds=True) 590 else: 591 if tag: 592 self.run_card['run_tag'] = tag 593 self.set_run_name(self.run_name, tag, 'delphes')
594
595 - def check_calculate_xsect(self, args, options):
596 """check the validity of the line. args is ORDER, 597 ORDER being LO or NLO. If no mode is passed, NLO is used""" 598 # modify args in order to be DIR 599 # mode being either standalone or madevent 600 601 if options['force']: 602 self.force = True 603 604 if not args: 605 args.append('NLO') 606 return 607 608 if len(args) > 1: 609 self.help_calculate_xsect() 610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 611 612 elif len(args) == 1: 613 if not args[0] in ['NLO', 'LO']: 614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 615 mode = args[0] 616 617 # check for incompatible options/modes 618 if options['multicore'] and options['cluster']: 619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 620 ' are not compatible. Please choose one.'
621 622
623 - def check_generate_events(self, args, options):
624 """check the validity of the line. args is ORDER, 625 ORDER being LO or NLO. If no mode is passed, NLO is used""" 626 # modify args in order to be DIR 627 # mode being either standalone or madevent 628 629 if not args: 630 args.append('NLO') 631 return 632 633 if len(args) > 1: 634 self.help_generate_events() 635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 636 637 elif len(args) == 1: 638 if not args[0] in ['NLO', 'LO']: 639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 640 mode = args[0] 641 642 # check for incompatible options/modes 643 if options['multicore'] and options['cluster']: 644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 645 ' are not compatible. Please choose one.'
646
647 - def check_banner_run(self, args):
648 """check the validity of line""" 649 650 if len(args) == 0: 651 self.help_banner_run() 652 raise self.InvalidCmd('banner_run requires at least one argument.') 653 654 tag = [a[6:] for a in args if a.startswith('--tag=')] 655 656 657 if os.path.exists(args[0]): 658 type ='banner' 659 format = self.detect_card_type(args[0]) 660 if format != 'banner': 661 raise self.InvalidCmd('The file is not a valid banner.') 662 elif tag: 663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 664 (args[0], tag)) 665 if not os.path.exists(args[0]): 666 raise self.InvalidCmd('No banner associates to this name and tag.') 667 else: 668 name = args[0] 669 type = 'run' 670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 671 if not banners: 672 raise self.InvalidCmd('No banner associates to this name.') 673 elif len(banners) == 1: 674 args[0] = banners[0] 675 else: 676 #list the tag and propose those to the user 677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 678 tag = self.ask('which tag do you want to use?', tags[0], tags) 679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 680 (args[0], tag)) 681 682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 683 if run_name: 684 try: 685 self.exec_cmd('remove %s all banner -f' % run_name) 686 except Exception: 687 pass 688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 689 elif type == 'banner': 690 self.set_run_name(self.find_available_run_name(self.me_dir)) 691 elif type == 'run': 692 if not self.results[name].is_empty(): 693 run_name = self.find_available_run_name(self.me_dir) 694 logger.info('Run %s is not empty so will use run_name: %s' % \ 695 (name, run_name)) 696 self.set_run_name(run_name) 697 else: 698 try: 699 self.exec_cmd('remove %s all banner -f' % run_name) 700 except Exception: 701 pass 702 self.set_run_name(name)
703 704 705
706 - def check_launch(self, args, options):
707 """check the validity of the line. args is MODE 708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 709 # modify args in order to be DIR 710 # mode being either standalone or madevent 711 712 if options['force']: 713 self.force = True 714 715 716 if not args: 717 args.append('auto') 718 return 719 720 if len(args) > 1: 721 self.help_launch() 722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 723 724 elif len(args) == 1: 725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 727 mode = args[0] 728 729 # check for incompatible options/modes 730 if options['multicore'] and options['cluster']: 731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 732 ' are not compatible. Please choose one.' 733 if mode == 'NLO' and options['reweightonly']: 734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735 736
737 - def check_compile(self, args, options):
738 """check the validity of the line. args is MODE 739 MODE being FO or MC. If no mode is passed, MC is used""" 740 # modify args in order to be DIR 741 # mode being either standalone or madevent 742 743 if options['force']: 744 self.force = True 745 746 if not args: 747 args.append('MC') 748 return 749 750 if len(args) > 1: 751 self.help_compile() 752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 753 754 elif len(args) == 1: 755 if not args[0] in ['MC', 'FO']: 756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 757 mode = args[0]
758
759 # check for incompatible options/modes 760 761 762 #=============================================================================== 763 # CompleteForCmd 764 #=============================================================================== 765 -class CompleteForCmd(CheckValidForCmd):
766 """ The Series of help routine for the MadGraphCmd""" 767
768 - def complete_launch(self, text, line, begidx, endidx):
769 """auto-completion for launch command""" 770 771 args = self.split_arg(line[0:begidx]) 772 if len(args) == 1: 773 #return mode 774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 775 elif len(args) == 2 and line[begidx-1] == '@': 776 return self.list_completion(text,['LO','NLO'],line) 777 else: 778 opts = [] 779 for opt in _launch_parser.option_list: 780 opts += opt._long_opts + opt._short_opts 781 return self.list_completion(text, opts, line)
782
783 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
784 "Complete the banner run command" 785 try: 786 787 788 args = self.split_arg(line[0:begidx], error=False) 789 790 if args[-1].endswith(os.path.sep): 791 return self.path_completion(text, 792 os.path.join('.',*[a for a in args \ 793 if a.endswith(os.path.sep)])) 794 795 796 if len(args) > 1: 797 # only options are possible 798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 800 801 if args[-1] != '--tag=': 802 tags = ['--tag=%s' % t for t in tags] 803 else: 804 return self.list_completion(text, tags) 805 return self.list_completion(text, tags +['--name=','-f'], line) 806 807 # First argument 808 possibilites = {} 809 810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 811 if a.endswith(os.path.sep)])) 812 if os.path.sep in line: 813 return comp 814 else: 815 possibilites['Path from ./'] = comp 816 817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 818 run_list = [n.rsplit('/',2)[1] for n in run_list] 819 possibilites['RUN Name'] = self.list_completion(text, run_list) 820 821 return self.deal_multiple_categories(possibilites, formatting) 822 823 824 except Exception, error: 825 print error
826 827
828 - def complete_compile(self, text, line, begidx, endidx):
829 """auto-completion for launch command""" 830 831 args = self.split_arg(line[0:begidx]) 832 if len(args) == 1: 833 #return mode 834 return self.list_completion(text,['FO','MC'],line) 835 else: 836 opts = [] 837 for opt in _compile_parser.option_list: 838 opts += opt._long_opts + opt._short_opts 839 return self.list_completion(text, opts, line)
840
841 - def complete_calculate_xsect(self, text, line, begidx, endidx):
842 """auto-completion for launch command""" 843 844 args = self.split_arg(line[0:begidx]) 845 if len(args) == 1: 846 #return mode 847 return self.list_completion(text,['LO','NLO'],line) 848 else: 849 opts = [] 850 for opt in _calculate_xsect_parser.option_list: 851 opts += opt._long_opts + opt._short_opts 852 return self.list_completion(text, opts, line)
853
854 - def complete_generate_events(self, text, line, begidx, endidx):
855 """auto-completion for generate_events command 856 call the compeltion for launch""" 857 self.complete_launch(text, line, begidx, endidx)
858 859
860 - def complete_shower(self, text, line, begidx, endidx):
861 args = self.split_arg(line[0:begidx]) 862 if len(args) == 1: 863 #return valid run_name 864 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1
869
870 - def complete_plot(self, text, line, begidx, endidx):
871 """ Complete the plot command """ 872 873 args = self.split_arg(line[0:begidx], error=False) 874 875 if len(args) == 1: 876 #return valid run_name 877 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 878 data = [n.rsplit('/',2)[1] for n in data] 879 tmp1 = self.list_completion(text, data) 880 if not self.run_name: 881 return tmp1 882 883 if len(args) > 1: 884 return self.list_completion(text, self._plot_mode)
885
886 - def complete_pgs(self,text, line, begidx, endidx):
887 "Complete the pgs command" 888 args = self.split_arg(line[0:begidx], error=False) 889 if len(args) == 1: 890 #return valid run_name 891 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 892 pjoin(self.me_dir, 'Events')) 893 data = [n.rsplit('/',2)[1] for n in data] 894 tmp1 = self.list_completion(text, data) 895 if not self.run_name: 896 return tmp1 897 else: 898 tmp2 = self.list_completion(text, self._run_options + ['-f', 899 '--tag=' ,'--no_default'], line) 900 return tmp1 + tmp2 901 else: 902 return self.list_completion(text, self._run_options + ['-f', 903 '--tag=','--no_default'], line)
904 905 complete_delphes = complete_pgs
906
907 -class aMCatNLOAlreadyRunning(InvalidCmd):
908 pass
909
910 -class AskRunNLO(cmd.ControlSwitch):
911 912 to_control = [('order', 'Type of perturbative computation'), 913 ('fixed_order', 'No MC@[N]LO matching / event generation'), 914 ('shower', 'Shower the generated events'), 915 ('madspin', 'Decay onshell particles'), 916 ('reweight', 'Add weights to events for new hypp.'), 917 ('madanalysis','Run MadAnalysis5 on the events generated')] 918 919 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower'] 920
921 - def __init__(self, question, line_args=[], mode=None, force=False, 922 *args, **opt):
923 924 self.check_available_module(opt['mother_interface'].options) 925 self.me_dir = opt['mother_interface'].me_dir 926 self.last_mode = opt['mother_interface'].last_mode 927 self.proc_characteristics = opt['mother_interface'].proc_characteristics 928 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat')) 929 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'], 930 *args, **opt)
931 932 @property
933 - def answer(self):
934 935 out = super(AskRunNLO, self).answer 936 if out['shower'] == 'HERWIG7': 937 out['shower'] = 'HERWIGPP' 938 939 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF': 940 out['runshower'] = False 941 else: 942 out['runshower'] = True 943 return out
944 945
946 - def check_available_module(self, options):
947 948 self.available_module = set() 949 if options['madanalysis5_path']: 950 self.available_module.add('MA5') 951 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']): 952 953 self.available_module.add('MadSpin') 954 if misc.has_f2py() or options['f2py_compiler']: 955 self.available_module.add('reweight') 956 if options['pythia8_path']: 957 self.available_module.add('PY8') 958 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']: 959 self.available_module.add('HW7')
960 # 961 # shorcut 962 #
963 - def ans_lo(self, value):
964 """ function called if the user type lo=value. or lo (then value is None)""" 965 966 if value is None: 967 self.switch['order'] = 'LO' 968 self.switch['fixed_order'] = 'ON' 969 self.set_switch('shower', 'OFF') 970 else: 971 logger.warning('Invalid command: lo=%s' % value)
972
973 - def ans_nlo(self, value):
974 if value is None: 975 self.switch['order'] = 'NLO' 976 self.switch['fixed_order'] = 'ON' 977 self.set_switch('shower', 'OFF') 978 else: 979 logger.warning('Invalid command: nlo=%s' % value)
980
981 - def ans_amc__at__nlo(self, value):
982 if value is None: 983 self.switch['order'] = 'NLO' 984 self.switch['fixed_order'] = 'OFF' 985 self.set_switch('shower', 'ON') 986 else: 987 logger.warning('Invalid command: aMC@NLO=%s' % value)
988
989 - def ans_amc__at__lo(self, value):
990 if value is None: 991 self.switch['order'] = 'LO' 992 self.switch['fixed_order'] = 'OFF' 993 self.set_switch('shower', 'ON') 994 else: 995 logger.warning('Invalid command: aMC@LO=%s' % value)
996
997 - def ans_noshower(self, value):
998 if value is None: 999 self.switch['order'] = 'NLO' 1000 self.switch['fixed_order'] = 'OFF' 1001 self.set_switch('shower', 'OFF') 1002 else: 1003 logger.warning('Invalid command: noshower=%s' % value)
1004
1005 - def ans_onlyshower(self, value):
1006 if value is None: 1007 self.switch['mode'] = 'onlyshower' 1008 self.switch['madspin'] = 'OFF' 1009 self.switch['reweight'] = 'OFF' 1010 else: 1011 logger.warning('Invalid command: onlyshower=%s' % value)
1012
1013 - def ans_noshowerlo(self, value):
1014 if value is None: 1015 self.switch['order'] = 'LO' 1016 self.switch['fixed_order'] = 'OFF' 1017 self.set_switch('shower', 'OFF') 1018 else: 1019 logger.warning('Invalid command: noshowerlo=%s' % value)
1020
1021 - def ans_madanalysis5(self, value):
1022 """ shortcut madanalysis5 -> madanalysis """ 1023 1024 if value is None: 1025 return self.onecmd('madanalysis') 1026 else: 1027 self.set_switch('madanalysis', value)
1028 # 1029 # ORDER 1030 #
1031 - def get_allowed_order(self):
1032 return ["LO", "NLO"]
1033
1034 - def set_default_order(self):
1035 1036 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']: 1037 self.switch['order'] = 'LO' 1038 self.switch['order'] = 'NLO'
1039
1040 - def set_switch_off_order(self):
1041 return
1042 # 1043 # Fix order 1044 #
1045 - def get_allowed_fixed_order(self):
1046 """ """ 1047 if self.proc_characteristics['ninitial'] == 1: 1048 return ['ON'] 1049 else: 1050 return ['ON', 'OFF']
1051
1052 - def set_default_fixed_order(self):
1053 1054 if self.last_mode in ['LO', 'NLO']: 1055 self.switch['fixed_order'] = 'ON' 1056 self.switch['fixed_order'] = 'OFF' 1057
1058 - def color_for_fixed_order(self, switch_value):
1059 1060 if switch_value in ['OFF']: 1061 return self.green % switch_value 1062 else: 1063 return self.red % switch_value
1064
1065 - def color_for_shower(self, switch_value):
1066 1067 if switch_value in ['ON']: 1068 return self.green % switch_value 1069 elif switch_value in self.get_allowed('shower'): 1070 return self.green % switch_value 1071 else: 1072 return self.red % switch_value
1073
1074 - def consistency_fixed_order_shower(self, vfix, vshower):
1075 """ consistency_XX_YY(val_XX, val_YY) 1076 -> XX is the new key set by the user to a new value val_XX 1077 -> YY is another key set by the user. 1078 -> return value should be None or "replace_YY" 1079 """ 1080 1081 if vfix == 'ON' and vshower != 'OFF' : 1082 return 'OFF' 1083 return None
1084 1085 consistency_fixed_order_madspin = consistency_fixed_order_shower 1086 consistency_fixed_order_reweight = consistency_fixed_order_shower 1087
1088 - def consistency_fixed_order_madanalysis(self, vfix, vma5):
1089 1090 if vfix == 'ON' and vma5 == 'ON' : 1091 return 'OFF' 1092 return None
1093 1094
1095 - def consistency_shower_fixed_order(self, vshower, vfix):
1096 """ consistency_XX_YY(val_XX, val_YY) 1097 -> XX is the new key set by the user to a new value val_XX 1098 -> YY is another key set by the user. 1099 -> return value should be None or "replace_YY" 1100 """ 1101 1102 if vshower != 'OFF' and vfix == 'ON': 1103 return 'OFF' 1104 return None
1105 1106 consistency_madspin_fixed_order = consistency_shower_fixed_order 1107 consistency_reweight_fixed_order = consistency_shower_fixed_order 1108 consistency_madanalysis_fixed_order = consistency_shower_fixed_order 1109 1110 1111 # 1112 # Shower 1113 #
1114 - def get_allowed_shower(self):
1115 """ """ 1116 1117 if hasattr(self, 'allowed_shower'): 1118 return self.allowed_shower 1119 1120 if self.proc_characteristics['ninitial'] == 1: 1121 self.allowed_shower = ['OFF'] 1122 return ['OFF'] 1123 else: 1124 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ] 1125 if 'PY8' in self.available_module: 1126 allowed.append('PYTHIA8') 1127 if 'HW7' in self.available_module: 1128 allowed.append('HERWIGPP') 1129 1130 self.allowed_shower = allowed 1131 1132 return allowed
1133
1134 - def check_value_shower(self, value):
1135 """ """ 1136 1137 if value.upper() in self.get_allowed_shower(): 1138 return True 1139 if value.upper() in ['PYTHIA8', 'HERWIGPP']: 1140 return True 1141 if value.upper() == 'ON': 1142 return self.run_card['parton_shower'] 1143 if value.upper() in ['P8','PY8','PYTHIA_8']: 1144 return 'PYTHIA8' 1145 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']: 1146 return 'PYTHIA6PT' 1147 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']: 1148 return 'PYTHIA6Q' 1149 if value.upper() in ['HW7', 'HERWIG7']: 1150 return 'HERWIG7' 1151 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']: 1152 return 'HERWIGPP' 1153 if value.upper() in ['HW6', 'HERWIG_6']: 1154 return 'HERWIG6'
1155
1156 - def set_default_shower(self):
1157 1158 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']: 1159 self.switch['shower'] = 'OFF' 1160 return 1161 1162 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 1163 self.switch['shower'] = self.run_card['parton_shower'] 1164 #self.switch['shower'] = 'ON' 1165 self.switch['fixed_order'] = "OFF" 1166 else: 1167 self.switch['shower'] = 'OFF' 1168
1169 - def consistency_shower_madanalysis(self, vshower, vma5):
1170 """ MA5 only possible with (N)LO+PS if shower is run""" 1171 1172 if vshower == 'OFF' and vma5 == 'ON': 1173 return 'OFF' 1174 return None
1175
1176 - def consistency_madanalysis_shower(self, vma5, vshower):
1177 1178 if vma5=='ON' and vshower == 'OFF': 1179 return 'ON' 1180 return None
1181
1182 - def get_cardcmd_for_shower(self, value):
1183 """ adpat run_card according to this setup. return list of cmd to run""" 1184 1185 if value != 'OFF': 1186 return ['set parton_shower %s' % self.switch['shower']] 1187 return []
1188 1189 # 1190 # madspin 1191 #
1192 - def get_allowed_madspin(self):
1193 """ """ 1194 1195 if hasattr(self, 'allowed_madspin'): 1196 return self.allowed_madspin 1197 1198 self.allowed_madspin = [] 1199 1200 1201 if 'MadSpin' not in self.available_module: 1202 return self.allowed_madspin 1203 if self.proc_characteristics['ninitial'] == 1: 1204 self.available_module.remove('MadSpin') 1205 self.allowed_madspin = ['OFF'] 1206 return self.allowed_madspin 1207 else: 1208 self.allowed_madspin = ['OFF', 'ON', 'onshell'] 1209 return self.allowed_madspin
1210
1211 - def check_value_madspin(self, value):
1212 """handle alias and valid option not present in get_allowed_madspin 1213 remember that this mode should always be OFF for 1>N. (ON not in allowed value)""" 1214 1215 if value.upper() in self.get_allowed_madspin(): 1216 if value == value.upper(): 1217 return True 1218 else: 1219 return value.upper() 1220 elif value.lower() in self.get_allowed_madspin(): 1221 if value == value.lower(): 1222 return True 1223 else: 1224 return value.lower() 1225 1226 if 'MadSpin' not in self.available_module or \ 1227 'ON' not in self.get_allowed_madspin(): 1228 return False 1229 1230 if value.lower() in ['madspin', 'full']: 1231 return 'full' 1232 elif value.lower() in ['none']: 1233 return 'none'
1234
1235 - def set_default_madspin(self):
1236 1237 if 'MadSpin' in self.available_module: 1238 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 1239 self.switch['madspin'] = 'ON' 1240 else: 1241 self.switch['madspin'] = 'OFF' 1242 else: 1243 self.switch['madspin'] = 'Not Avail.'
1244
1245 - def get_cardcmd_for_madspin(self, value):
1246 """set some command to run before allowing the user to modify the cards.""" 1247 1248 if value == 'onshell': 1249 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"] 1250 elif value in ['full', 'madspin']: 1251 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"] 1252 elif value == 'none': 1253 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"] 1254 else: 1255 return []
1256 1257 # 1258 # reweight 1259 #
1260 - def get_allowed_reweight(self):
1261 """set the valid (visible) options for reweight""" 1262 1263 if hasattr(self, 'allowed_reweight'): 1264 return getattr(self, 'allowed_reweight') 1265 1266 self.allowed_reweight = [] 1267 if 'reweight' not in self.available_module: 1268 return self.allowed_reweight 1269 if self.proc_characteristics['ninitial'] == 1: 1270 self.available_module.remove('reweight') 1271 self.allowed_reweight.append('OFF') 1272 return self.allowed_reweight 1273 else: 1274 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO'] 1275 return self.allowed_reweight
1276
1277 - def set_default_reweight(self):
1278 """initialise the switch for reweight""" 1279 1280 if 'reweight' in self.available_module: 1281 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 1282 self.switch['reweight'] = 'ON' 1283 else: 1284 self.switch['reweight'] = 'OFF' 1285 else: 1286 self.switch['reweight'] = 'Not Avail.'
1287
1288 - def get_cardcmd_for_reweight(self, value):
1289 """ adpat run_card according to this setup. return list of cmd to run""" 1290 1291 if value == 'LO': 1292 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"] 1293 elif value == 'NLO': 1294 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO", 1295 "set store_rwgt_info T"] 1296 elif value == 'NLO_TREE': 1297 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree", 1298 "set store_rwgt_info T"] 1299 return []
1300 1301 # 1302 # MadAnalysis5 1303 # 1304 get_allowed_madanalysis = get_allowed_madspin 1305
1306 - def set_default_madanalysis(self):
1307 """initialise the switch for reweight""" 1308 1309 if 'MA5' not in self.available_module: 1310 self.switch['madanalysis'] = 'Not Avail.' 1311 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')): 1312 self.switch['madanalysis'] = 'ON' 1313 else: 1314 self.switch['madanalysis'] = 'OFF'
1315
1316 - def check_value_madanalysis(self, value):
1317 """check an entry is valid. return the valid entry in case of shortcut""" 1318 1319 if value.upper() in self.get_allowed('madanalysis'): 1320 return True 1321 value = value.lower() 1322 if value == 'hadron': 1323 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False 1324 else: 1325 return False
1326
1327 1328 #=============================================================================== 1329 # aMCatNLOCmd 1330 #=============================================================================== 1331 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1332 """The command line processor of MadGraph""" 1333 1334 # Truth values 1335 true = ['T','.true.',True,'true'] 1336 # Options and formats available 1337 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 1338 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 1339 _calculate_decay_options = ['-f', '--accuracy=0.'] 1340 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 1341 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 1342 _clean_mode = _plot_mode + ['channel', 'banner'] 1343 _display_opts = ['run_name', 'options', 'variable'] 1344 # survey options, dict from name to type, default value, and help text 1345 # Variables to store object information 1346 web = False 1347 cluster_mode = 0 1348 queue = 'madgraph' 1349 nb_core = None 1350 make_opts_var = {} 1351 1352 next_possibility = { 1353 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 1354 'help generate_events'], 1355 'generate_events': ['generate_events [OPTIONS]', 'shower'], 1356 'launch': ['launch [OPTIONS]', 'shower'], 1357 'shower' : ['generate_events [OPTIONS]'] 1358 } 1359 1360 1361 ############################################################################
1362 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1363 """ add information to the cmd """ 1364 1365 self.start_time = 0 1366 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 1367 #common_run.CommonRunCmd.__init__(self, me_dir, options) 1368 1369 self.mode = 'aMCatNLO' 1370 self.nb_core = 0 1371 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 1372 1373 1374 self.load_results_db() 1375 self.results.def_web_mode(self.web) 1376 # check that compiler is gfortran 4.6 or later if virtuals have been exported 1377 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 1378 1379 if not '[real=QCD]' in proc_card: 1380 check_compiler(self.options, block=True)
1381 1382 1383 ############################################################################
1384 - def do_shower(self, line):
1385 """ run the shower on a given parton level file """ 1386 argss = self.split_arg(line) 1387 (options, argss) = _launch_parser.parse_args(argss) 1388 # check argument validity and normalise argument 1389 options = options.__dict__ 1390 options['reweightonly'] = False 1391 self.check_shower(argss, options) 1392 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 1393 self.ask_run_configuration('onlyshower', options) 1394 self.run_mcatnlo(evt_file, options) 1395 1396 self.update_status('', level='all', update_results=True)
1397 1398 ################################################################################
1399 - def do_plot(self, line):
1400 """Create the plot for a given run""" 1401 1402 # Since in principle, all plot are already done automaticaly 1403 args = self.split_arg(line) 1404 # Check argument's validity 1405 self.check_plot(args) 1406 logger.info('plot for run %s' % self.run_name) 1407 1408 if not self.force: 1409 self.ask_edit_cards([], args, plot=True) 1410 1411 if any([arg in ['parton'] for arg in args]): 1412 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 1413 if os.path.exists(filename+'.gz'): 1414 misc.gunzip(filename) 1415 if os.path.exists(filename): 1416 logger.info('Found events.lhe file for run %s' % self.run_name) 1417 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 1418 self.create_plot('parton') 1419 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1420 misc.gzip(filename) 1421 1422 if any([arg in ['all','parton'] for arg in args]): 1423 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1424 if os.path.exists(filename): 1425 logger.info('Found MADatNLO.top file for run %s' % \ 1426 self.run_name) 1427 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1428 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1429 1430 if not os.path.isdir(plot_dir): 1431 os.makedirs(plot_dir) 1432 top_file = pjoin(plot_dir, 'plots.top') 1433 files.cp(filename, top_file) 1434 madir = self.options['madanalysis_path'] 1435 tag = self.run_card['run_tag'] 1436 td = self.options['td_path'] 1437 misc.call(['%s/plot' % self.dirbin, madir, td], 1438 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1439 stderr = subprocess.STDOUT, 1440 cwd=plot_dir) 1441 1442 misc.call(['%s/plot_page-pl' % self.dirbin, 1443 os.path.basename(plot_dir), 1444 'parton'], 1445 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1446 stderr = subprocess.STDOUT, 1447 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1448 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1449 output) 1450 1451 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1452 1453 if any([arg in ['all','shower'] for arg in args]): 1454 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1455 if len(filenames) != 1: 1456 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1457 if len(filenames) != 1: 1458 logger.info('No shower level file found for run %s' % \ 1459 self.run_name) 1460 return 1461 filename = filenames[0] 1462 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1463 1464 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1465 if aMCatNLO and not self.options['mg5_path']: 1466 raise "plotting NLO HEP file needs MG5 utilities" 1467 1468 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1469 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1470 self.run_hep2lhe() 1471 else: 1472 filename = filenames[0] 1473 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1474 1475 self.create_plot('shower') 1476 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1477 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1478 lhe_file_name) 1479 misc.gzip(lhe_file_name) 1480 1481 if any([arg in ['all','pgs'] for arg in args]): 1482 filename = pjoin(self.me_dir, 'Events', self.run_name, 1483 '%s_pgs_events.lhco' % self.run_tag) 1484 if os.path.exists(filename+'.gz'): 1485 misc.gunzip(filename) 1486 if os.path.exists(filename): 1487 self.create_plot('PGS') 1488 misc.gzip(filename) 1489 else: 1490 logger.info('No valid files for pgs plot') 1491 1492 if any([arg in ['all','delphes'] for arg in args]): 1493 filename = pjoin(self.me_dir, 'Events', self.run_name, 1494 '%s_delphes_events.lhco' % self.run_tag) 1495 if os.path.exists(filename+'.gz'): 1496 misc.gunzip(filename) 1497 if os.path.exists(filename): 1498 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1499 self.create_plot('Delphes') 1500 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1501 misc.gzip(filename) 1502 else: 1503 logger.info('No valid files for delphes plot')
1504 1505 1506 ############################################################################
1507 - def do_calculate_xsect(self, line):
1508 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1509 this function wraps the do_launch one""" 1510 1511 self.start_time = time.time() 1512 argss = self.split_arg(line) 1513 # check argument validity and normalise argument 1514 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1515 options = options.__dict__ 1516 options['reweightonly'] = False 1517 options['parton'] = True 1518 self.check_calculate_xsect(argss, options) 1519 self.do_launch(line, options, argss)
1520 1521 ############################################################################
1522 - def do_banner_run(self, line):
1523 """Make a run from the banner file""" 1524 1525 args = self.split_arg(line) 1526 #check the validity of the arguments 1527 self.check_banner_run(args) 1528 1529 # Remove previous cards 1530 for name in ['shower_card.dat', 'madspin_card.dat']: 1531 try: 1532 os.remove(pjoin(self.me_dir, 'Cards', name)) 1533 except Exception: 1534 pass 1535 1536 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1537 1538 # Check if we want to modify the run 1539 if not self.force: 1540 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1541 if ans == 'n': 1542 self.force = True 1543 1544 # Compute run mode: 1545 if self.force: 1546 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1547 banner = banner_mod.Banner(args[0]) 1548 for line in banner['run_settings']: 1549 if '=' in line: 1550 mode, value = [t.strip() for t in line.split('=')] 1551 mode_status[mode] = value 1552 else: 1553 mode_status = {} 1554 1555 # Call Generate events 1556 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1557 switch=mode_status)
1558 1559 ############################################################################
1560 - def do_generate_events(self, line):
1561 """Main commands: generate events 1562 this function just wraps the do_launch one""" 1563 self.do_launch(line)
1564 1565 1566 ############################################################################
1567 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1568 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1569 #check if no 'Auto' are present in the file 1570 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1571 1572 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1573 # this variable is system only in the run_card 1574 # can not be done in EditCard since this parameter is not written in the 1575 # run_card directly. 1576 if mode in ['LO', 'NLO']: 1577 name = 'fo_lhe_weight_ratio' 1578 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1579 if name in FO_card: 1580 self.run_card.set(name, FO_card[name], user=False) 1581 name = 'fo_lhe_postprocessing' 1582 if name in FO_card: 1583 self.run_card.set(name, FO_card[name], user=False) 1584 1585 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1586 1587 ############################################################################
1588 - def set_configuration(self, amcatnlo=True, **opt):
1589 """assign all configuration variable from file 1590 loop over the different config file if config_file not define """ 1591 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1592 1593 ############################################################################
1594 - def do_launch(self, line, options={}, argss=[], switch={}):
1595 """Main commands: launch the full chain 1596 options and args are relevant if the function is called from other 1597 functions, such as generate_events or calculate_xsect 1598 mode gives the list of switch needed for the computation (usefull for banner_run) 1599 """ 1600 1601 if not argss and not options: 1602 self.start_time = time.time() 1603 argss = self.split_arg(line) 1604 # check argument validity and normalise argument 1605 (options, argss) = _launch_parser.parse_args(argss) 1606 options = options.__dict__ 1607 self.check_launch(argss, options) 1608 1609 1610 if 'run_name' in options.keys() and options['run_name']: 1611 self.run_name = options['run_name'] 1612 # if a dir with the given run_name already exists 1613 # remove it and warn the user 1614 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1615 logger.warning('Removing old run information in \n'+ 1616 pjoin(self.me_dir, 'Events', self.run_name)) 1617 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1618 self.results.delete_run(self.run_name) 1619 else: 1620 self.run_name = '' # will be set later 1621 1622 if options['multicore']: 1623 self.cluster_mode = 2 1624 elif options['cluster']: 1625 self.cluster_mode = 1 1626 1627 if not switch: 1628 mode = argss[0] 1629 1630 if mode in ['LO', 'NLO']: 1631 options['parton'] = True 1632 mode = self.ask_run_configuration(mode, options) 1633 else: 1634 mode = self.ask_run_configuration('auto', options, switch) 1635 1636 self.results.add_detail('run_mode', mode) 1637 1638 self.update_status('Starting run', level=None, update_results=True) 1639 1640 if self.options['automatic_html_opening']: 1641 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1642 self.options['automatic_html_opening'] = False 1643 1644 if '+' in mode: 1645 mode = mode.split('+')[0] 1646 self.compile(mode, options) 1647 evt_file = self.run(mode, options) 1648 1649 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1650 logger.info('No event file generated: grids have been set-up with a '\ 1651 'relative precision of %s' % self.run_card['req_acc']) 1652 return 1653 1654 if not mode in ['LO', 'NLO']: 1655 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1656 1657 if self.run_card['systematics_program'] == 'systematics': 1658 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1659 1660 self.exec_cmd('reweight -from_cards', postcmd=False) 1661 self.exec_cmd('decay_events -from_cards', postcmd=False) 1662 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1663 1664 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1665 and not options['parton']: 1666 self.run_mcatnlo(evt_file, options) 1667 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1668 1669 elif mode == 'noshower': 1670 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1671 Please, shower the Les Houches events before using them for physics analyses.""") 1672 1673 1674 self.update_status('', level='all', update_results=True) 1675 if self.run_card['ickkw'] == 3 and \ 1676 (mode in ['noshower'] or \ 1677 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1678 logger.warning("""You are running with FxFx merging enabled. 1679 To be able to merge samples of various multiplicities without double counting, 1680 you have to remove some events after showering 'by hand'. 1681 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1682 1683 self.store_result() 1684 #check if the param_card defines a scan. 1685 if self.param_card_iterator: 1686 param_card_iterator = self.param_card_iterator 1687 self.param_card_iterator = [] #avoid to next generate go trough here 1688 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1689 error=self.results.current['error']) 1690 orig_name = self.run_name 1691 #go trough the scal 1692 with misc.TMP_variable(self, 'allow_notification_center', False): 1693 for i,card in enumerate(param_card_iterator): 1694 card.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1695 self.check_param_card(pjoin(self.me_dir,'Cards','param_card.dat'), dependent=True) 1696 if not options['force']: 1697 options['force'] = True 1698 if options['run_name']: 1699 options['run_name'] = '%s_%s' % (orig_name, i+1) 1700 if not argss: 1701 argss = [mode, "-f"] 1702 elif argss[0] == "auto": 1703 argss[0] = mode 1704 self.do_launch("", options=options, argss=argss, switch=switch) 1705 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1706 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1707 error=self.results.current['error']) 1708 #restore original param_card 1709 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1710 name = misc.get_scan_name(orig_name, self.run_name) 1711 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1712 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK') 1713 param_card_iterator.write_summary(path) 1714 1715 if self.allow_notification_center: 1716 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1717 '%s: %s +- %s ' % (self.results.current['run_name'], 1718 self.results.current['cross'], 1719 self.results.current['error']))
1720 1721 1722 ############################################################################
1723 - def do_compile(self, line):
1724 """Advanced commands: just compile the executables """ 1725 argss = self.split_arg(line) 1726 # check argument validity and normalise argument 1727 (options, argss) = _compile_parser.parse_args(argss) 1728 options = options.__dict__ 1729 options['reweightonly'] = False 1730 options['nocompile'] = False 1731 self.check_compile(argss, options) 1732 1733 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1734 self.ask_run_configuration(mode, options) 1735 self.compile(mode, options) 1736 1737 1738 self.update_status('', level='all', update_results=True)
1739 1740
1741 - def update_random_seed(self):
1742 """Update random number seed with the value from the run_card. 1743 If this is 0, update the number according to a fresh one""" 1744 iseed = self.run_card['iseed'] 1745 if iseed == 0: 1746 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1747 iseed = int(randinit.read()[2:]) + 1 1748 randinit.close() 1749 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1750 randinit.write('r=%d' % iseed) 1751 randinit.close()
1752 1753
1754 - def run(self, mode, options):
1755 """runs aMC@NLO. Returns the name of the event file created""" 1756 logger.info('Starting run') 1757 1758 if not 'only_generation' in options.keys(): 1759 options['only_generation'] = False 1760 1761 # for second step in applgrid mode, do only the event generation step 1762 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1763 options['only_generation'] = True 1764 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1765 self.setup_cluster_or_multicore() 1766 self.update_random_seed() 1767 #find and keep track of all the jobs 1768 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1769 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1770 folder_names['noshower'] = folder_names['aMC@NLO'] 1771 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1772 p_dirs = [d for d in \ 1773 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1774 #Clean previous results 1775 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1776 1777 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1778 1779 1780 if options['reweightonly']: 1781 event_norm=self.run_card['event_norm'] 1782 nevents=self.run_card['nevents'] 1783 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1784 1785 if mode in ['LO', 'NLO']: 1786 # this is for fixed order runs 1787 mode_dict = {'NLO': 'all', 'LO': 'born'} 1788 logger.info('Doing fixed order %s' % mode) 1789 req_acc = self.run_card['req_acc_FO'] 1790 1791 # Re-distribute the grids for the 2nd step of the applgrid 1792 # running 1793 if self.run_card['iappl'] == 2: 1794 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1795 1796 # create a list of dictionaries "jobs_to_run" with all the 1797 # jobs that need to be run 1798 integration_step=-1 1799 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1800 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1801 self.prepare_directories(jobs_to_run,mode) 1802 1803 # loop over the integration steps. After every step, check 1804 # if we have the required accuracy. If this is the case, 1805 # stop running, else do another step. 1806 while True: 1807 integration_step=integration_step+1 1808 self.run_all_jobs(jobs_to_run,integration_step) 1809 self.collect_log_files(jobs_to_run,integration_step) 1810 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1811 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1812 if not jobs_to_run: 1813 # there are no more jobs to run (jobs_to_run is empty) 1814 break 1815 # We are done. 1816 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1817 self.update_status('Run complete', level='parton', update_results=True) 1818 return 1819 1820 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1821 if self.ninitial == 1: 1822 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1823 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1824 'noshower': 'all', 'noshowerLO': 'born'} 1825 shower = self.run_card['parton_shower'].upper() 1826 nevents = self.run_card['nevents'] 1827 req_acc = self.run_card['req_acc'] 1828 if nevents == 0 and req_acc < 0 : 1829 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1830 'of events, because 0 events requested. Please set '\ 1831 'the "req_acc" parameter in the run_card to a value '\ 1832 'between 0 and 1') 1833 elif req_acc >1 or req_acc == 0 : 1834 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1835 'be between larger than 0 and smaller than 1, '\ 1836 'or set to -1 for automatic determination. Current '\ 1837 'value is %f' % req_acc) 1838 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1839 elif req_acc < 0 and nevents > 1000000 : 1840 req_acc=0.001 1841 1842 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1843 1844 if not shower in shower_list: 1845 raise aMCatNLOError('%s is not a valid parton shower. '\ 1846 'Please use one of the following: %s' \ 1847 % (shower, ', '.join(shower_list))) 1848 1849 # check that PYTHIA6PT is not used for processes with FSR 1850 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1851 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1852 1853 if mode in ['aMC@NLO', 'aMC@LO']: 1854 logger.info('Doing %s matched to parton shower' % mode[4:]) 1855 elif mode in ['noshower','noshowerLO']: 1856 logger.info('Generating events without running the shower.') 1857 elif options['only_generation']: 1858 logger.info('Generating events starting from existing results') 1859 1860 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1861 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1862 # Make sure to update all the jobs to be ready for the event generation step 1863 if options['only_generation']: 1864 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1865 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1866 else: 1867 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1868 1869 1870 # Main loop over the three MINT generation steps: 1871 for mint_step, status in enumerate(mcatnlo_status): 1872 if options['only_generation'] and mint_step < 2: 1873 continue 1874 self.update_status(status, level='parton') 1875 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1876 self.collect_log_files(jobs_to_run,mint_step) 1877 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1878 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1879 if mint_step+1==2 and nevents==0: 1880 self.print_summary(options,2,mode) 1881 return 1882 1883 # Sanity check on the event files. If error the jobs are resubmitted 1884 self.check_event_files(jobs_to_collect) 1885 1886 if self.cluster_mode == 1: 1887 #if cluster run, wait 10 sec so that event files are transferred back 1888 self.update_status( 1889 'Waiting while files are transferred back from the cluster nodes', 1890 level='parton') 1891 time.sleep(10) 1892 1893 event_norm=self.run_card['event_norm'] 1894 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1895
1896 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1897 integration_step,mode,fixed_order=True):
1898 """Creates a list of dictionaries with all the jobs to be run""" 1899 jobs_to_run=[] 1900 if not options['only_generation']: 1901 # Fresh, new run. Check all the P*/channels.txt files 1902 # (created by the 'gensym' executable) to set-up all the 1903 # jobs using the default inputs. 1904 npoints = self.run_card['npoints_FO_grid'] 1905 niters = self.run_card['niters_FO_grid'] 1906 for p_dir in p_dirs: 1907 try: 1908 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1909 channels=chan_file.readline().split() 1910 except IOError: 1911 logger.warning('No integration channels found for contribution %s' % p_dir) 1912 continue 1913 if fixed_order: 1914 lch=len(channels) 1915 maxchannels=20 # combine up to 20 channels in a single job 1916 if self.run_card['iappl'] != 0: maxchannels=1 1917 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1918 else int(lch/maxchannels)) 1919 for nj in range(1,njobs+1): 1920 job={} 1921 job['p_dir']=p_dir 1922 job['channel']=str(nj) 1923 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1924 job['configs']=' '.join(channels[:job['nchans']]) 1925 del channels[:job['nchans']] 1926 job['split']=0 1927 if req_acc == -1: 1928 job['accuracy']=0 1929 job['niters']=niters 1930 job['npoints']=npoints 1931 elif req_acc > 0: 1932 job['accuracy']=0.05 1933 job['niters']=6 1934 job['npoints']=-1 1935 else: 1936 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1937 'between 0 and 1 or set it equal to -1.') 1938 job['mint_mode']=0 1939 job['run_mode']=run_mode 1940 job['wgt_frac']=1.0 1941 job['wgt_mult']=1.0 1942 jobs_to_run.append(job) 1943 if channels: 1944 raise aMCatNLOError('channels is not empty %s' % channels) 1945 else: 1946 for channel in channels: 1947 job={} 1948 job['p_dir']=p_dir 1949 job['channel']=channel 1950 job['split']=0 1951 job['accuracy']=0.03 1952 job['niters']=12 1953 job['npoints']=-1 1954 job['mint_mode']=0 1955 job['run_mode']=run_mode 1956 job['wgt_frac']=1.0 1957 jobs_to_run.append(job) 1958 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1959 else: 1960 # if options['only_generation'] is true, just read the current jobs from file 1961 try: 1962 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 1963 jobs_to_collect=pickle.load(f) 1964 for job in jobs_to_collect: 1965 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1]) 1966 jobs_to_run=copy.copy(jobs_to_collect) 1967 except: 1968 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 1969 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 1970 # Update cross sections and determine which jobs to run next 1971 if fixed_order: 1972 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 1973 jobs_to_collect,integration_step,mode,run_mode) 1974 # Update the integration_step to make sure that nothing will be overwritten 1975 integration_step=1 1976 for job in jobs_to_run: 1977 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 1978 integration_step=integration_step+1 1979 integration_step=integration_step-1 1980 else: 1981 self.append_the_results(jobs_to_collect,integration_step) 1982 return jobs_to_run,jobs_to_collect,integration_step
1983
1984 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
1985 """Set-up the G* directories for running""" 1986 name_suffix={'born' :'B' , 'all':'F'} 1987 for job in jobs_to_run: 1988 if job['split'] == 0: 1989 if fixed_order : 1990 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1991 job['run_mode']+'_G'+job['channel']) 1992 else: 1993 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1994 'G'+name_suffix[job['run_mode']]+job['channel']) 1995 else: 1996 if fixed_order : 1997 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1998 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 1999 else: 2000 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2001 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 2002 job['dirname']=dirname 2003 if not os.path.isdir(dirname): 2004 os.makedirs(dirname) 2005 self.write_input_file(job,fixed_order) 2006 # link or copy the grids from the base directory to the split directory: 2007 if not fixed_order: 2008 if job['split'] != 0: 2009 for f in ['grid.MC_integer','mint_grids','res_1']: 2010 if not os.path.isfile(pjoin(job['dirname'],f)): 2011 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 2012 else: 2013 if job['split'] != 0: 2014 for f in ['grid.MC_integer','mint_grids']: 2015 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2016 2017
2018 - def write_input_file(self,job,fixed_order):
2019 """write the input file for the madevent_mint* executable in the appropriate directory""" 2020 if fixed_order: 2021 content= \ 2022 """NPOINTS = %(npoints)s 2023 NITERATIONS = %(niters)s 2024 ACCURACY = %(accuracy)s 2025 ADAPT_GRID = 2 2026 MULTICHANNEL = 1 2027 SUM_HELICITY = 1 2028 NCHANS = %(nchans)s 2029 CHANNEL = %(configs)s 2030 SPLIT = %(split)s 2031 WGT_MULT= %(wgt_mult)s 2032 RUN_MODE = %(run_mode)s 2033 RESTART = %(mint_mode)s 2034 """ \ 2035 % job 2036 else: 2037 content = \ 2038 """-1 12 ! points, iterations 2039 %(accuracy)s ! desired fractional accuracy 2040 1 -0.1 ! alpha, beta for Gsoft 2041 1 -0.1 ! alpha, beta for Gazi 2042 1 ! Suppress amplitude (0 no, 1 yes)? 2043 1 ! Exact helicity sum (0 yes, n = number/event)? 2044 %(channel)s ! Enter Configuration Number: 2045 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 2046 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 2047 %(run_mode)s ! all, born, real, virt 2048 """ \ 2049 % job 2050 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 2051 input_file.write(content)
2052 2053
2054 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2055 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 2056 if fixed_order: 2057 if integration_step == 0: 2058 self.update_status('Setting up grids', level=None) 2059 else: 2060 self.update_status('Refining results, step %i' % integration_step, level=None) 2061 self.ijob = 0 2062 name_suffix={'born' :'B', 'all':'F'} 2063 if fixed_order: 2064 run_type="Fixed order integration step %s" % integration_step 2065 else: 2066 run_type="MINT step %s" % integration_step 2067 self.njobs=len(jobs_to_run) 2068 for job in jobs_to_run: 2069 executable='ajob1' 2070 if fixed_order: 2071 arguments=[job['channel'],job['run_mode'], \ 2072 str(job['split']),str(integration_step)] 2073 else: 2074 arguments=[job['channel'],name_suffix[job['run_mode']], \ 2075 str(job['split']),str(integration_step)] 2076 self.run_exe(executable,arguments,run_type, 2077 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 2078 2079 if self.cluster_mode == 2: 2080 time.sleep(1) # security to allow all jobs to be launched 2081 self.wait_for_complete(run_type)
2082 2083
2084 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 2085 integration_step,mode,run_mode,fixed_order=True):
2086 """Collect the results, make HTML pages, print the summary and 2087 determine if there are more jobs to run. Returns the list 2088 of the jobs that still need to be run, as well as the 2089 complete list of jobs that need to be collected to get the 2090 final answer. 2091 """ 2092 # Get the results of the current integration/MINT step 2093 self.append_the_results(jobs_to_run,integration_step) 2094 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 2095 # Update HTML pages 2096 if fixed_order: 2097 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode], 2098 jobs=jobs_to_collect) 2099 else: 2100 name_suffix={'born' :'B' , 'all':'F'} 2101 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]]) 2102 self.results.add_detail('cross', cross) 2103 self.results.add_detail('error', error) 2104 # Combine grids from split fixed order jobs 2105 if fixed_order: 2106 jobs_to_run=self.combine_split_order_run(jobs_to_run) 2107 # Set-up jobs for the next iteration/MINT step 2108 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 2109 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 2110 if fixed_order: 2111 # Write the jobs_to_collect directory to file so that we 2112 # can restart them later (with only-generation option) 2113 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2114 pickle.dump(jobs_to_collect,f) 2115 # Print summary 2116 if (not jobs_to_run_new) and fixed_order: 2117 # print final summary of results (for fixed order) 2118 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 2119 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 2120 return jobs_to_run_new,jobs_to_collect 2121 elif jobs_to_run_new: 2122 # print intermediate summary of results 2123 scale_pdf_info=[] 2124 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 2125 else: 2126 # When we are done for (N)LO+PS runs, do not print 2127 # anything yet. This will be done after the reweighting 2128 # and collection of the events 2129 scale_pdf_info=[] 2130 # Prepare for the next integration/MINT step 2131 if (not fixed_order) and integration_step+1 == 2 : 2132 # Write the jobs_to_collect directory to file so that we 2133 # can restart them later (with only-generation option) 2134 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2135 pickle.dump(jobs_to_collect,f) 2136 # next step is event generation (mint_step 2) 2137 jobs_to_run_new,jobs_to_collect_new= \ 2138 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 2139 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2140 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 2141 self.write_nevts_files(jobs_to_run_new) 2142 else: 2143 if fixed_order and self.run_card['iappl'] == 0 \ 2144 and self.run_card['req_acc_FO'] > 0: 2145 jobs_to_run_new,jobs_to_collect= \ 2146 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 2147 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2148 jobs_to_collect_new=jobs_to_collect 2149 return jobs_to_run_new,jobs_to_collect_new
2150 2151
2152 - def write_nevents_unweighted_file(self,jobs,jobs0events):
2153 """writes the nevents_unweighted file in the SubProcesses directory. 2154 We also need to write the jobs that will generate 0 events, 2155 because that makes sure that the cross section from those channels 2156 is taken into account in the event weights (by collect_events.f). 2157 """ 2158 content=[] 2159 for job in jobs: 2160 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2161 lhefile=pjoin(path,'events.lhe') 2162 content.append(' %s %d %9e %9e' % \ 2163 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 2164 for job in jobs0events: 2165 if job['nevents']==0: 2166 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2167 lhefile=pjoin(path,'events.lhe') 2168 content.append(' %s %d %9e %9e' % \ 2169 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 2170 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 2171 f.write('\n'.join(content)+'\n')
2172
2173 - def write_nevts_files(self,jobs):
2174 """write the nevts files in the SubProcesses/P*/G*/ directories""" 2175 for job in jobs: 2176 with open(pjoin(job['dirname'],'nevts'),'w') as f: 2177 if self.run_card['event_norm'].lower()=='bias': 2178 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca'])) 2179 else: 2180 f.write('%i\n' % job['nevents'])
2181
2182 - def combine_split_order_run(self,jobs_to_run):
2183 """Combines jobs and grids from split jobs that have been run""" 2184 # combine the jobs that need to be combined in job 2185 # groups. Simply combine the ones that have the same p_dir and 2186 # same channel. 2187 jobgroups_to_combine=[] 2188 jobs_to_run_new=[] 2189 for job in jobs_to_run: 2190 if job['split'] == 0: 2191 job['combined']=1 2192 jobs_to_run_new.append(job) # this jobs wasn't split 2193 elif job['split'] == 1: 2194 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2195 j['channel'] == job['channel'], jobs_to_run)) 2196 else: 2197 continue 2198 for job_group in jobgroups_to_combine: 2199 # Combine the grids (mint-grids & MC-integer grids) first 2200 self.combine_split_order_grids(job_group) 2201 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 2202 return jobs_to_run_new
2203
2204 - def combine_split_order_jobs(self,job_group):
2205 """combine the jobs in job_group and return a single summed job""" 2206 # first copy one of the jobs in 'jobs' 2207 sum_job=copy.copy(job_group[0]) 2208 # update the information to have a 'non-split' job: 2209 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 2210 sum_job['split']=0 2211 sum_job['wgt_mult']=1.0 2212 sum_job['combined']=len(job_group) 2213 # information to be summed: 2214 keys=['niters_done','npoints_done','niters','npoints',\ 2215 'result','resultABS','time_spend'] 2216 keys2=['error','errorABS'] 2217 # information to be summed in quadrature: 2218 for key in keys2: 2219 sum_job[key]=math.pow(sum_job[key],2) 2220 # Loop over the jobs and sum the information 2221 for i,job in enumerate(job_group): 2222 if i==0 : continue # skip the first 2223 for key in keys: 2224 sum_job[key]+=job[key] 2225 for key in keys2: 2226 sum_job[key]+=math.pow(job[key],2) 2227 for key in keys2: 2228 sum_job[key]=math.sqrt(sum_job[key]) 2229 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 2230 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 2231 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 2232 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 2233 return sum_job
2234 2235
2236 - def combine_split_order_grids(self,job_group):
2237 """Combines the mint_grids and MC-integer grids from the split order 2238 jobs (fixed order only). 2239 """ 2240 files_mint_grids=[] 2241 files_MC_integer=[] 2242 location=None 2243 for job in job_group: 2244 files_mint_grids.append(open(pjoin(job['dirname'],'mint_grids'),'r+')) 2245 files_MC_integer.append(open(pjoin(job['dirname'],'grid.MC_integer'),'r+')) 2246 if not location: 2247 location=pjoin(job['dirname'].rsplit('_',1)[0]) 2248 else: 2249 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 2250 raise aMCatNLOError('Not all jobs have the same location. '\ 2251 +'Cannot combine them.') 2252 # Needed to average the grids (both xgrids, ave_virt and 2253 # MC_integer grids), but sum the cross section info. The 2254 # latter is only the only line that contains integers. 2255 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 2256 linesoffiles=[f.readlines() for f in fs] 2257 to_write=[] 2258 for rowgrp in zip(*linesoffiles): 2259 try: 2260 # check that last element on the line is an 2261 # integer (will raise ValueError if not the 2262 # case). If integer, this is the line that 2263 # contains information that needs to be 2264 # summed. All other lines can be averaged. 2265 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 2266 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2267 floatgrps = zip(*floatsbyfile) 2268 special=[] 2269 for i,floatgrp in enumerate(floatgrps): 2270 if i==0: # sum X-sec 2271 special.append(sum(floatgrp)) 2272 elif i==1: # sum unc in quadrature 2273 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 2274 elif i==2: # average number of PS per iteration 2275 special.append(int(sum(floatgrp)/len(floatgrp))) 2276 elif i==3: # sum the number of iterations 2277 special.append(int(sum(floatgrp))) 2278 elif i==4: # average the nhits_in_grids 2279 special.append(int(sum(floatgrp)/len(floatgrp))) 2280 else: 2281 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 2282 'Cannot combine them.') 2283 to_write.append(" ".join(str(s) for s in special) + "\n") 2284 except ValueError: 2285 # just average all 2286 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2287 floatgrps = zip(*floatsbyfile) 2288 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 2289 to_write.append(" ".join(str(a) for a in averages) + "\n") 2290 # close the files 2291 for f in fs: 2292 f.close 2293 # write the data over the master location 2294 if j==0: 2295 with open(pjoin(location,'mint_grids'),'w') as f: 2296 f.writelines(to_write) 2297 elif j==1: 2298 with open(pjoin(location,'grid.MC_integer'),'w') as f: 2299 f.writelines(to_write)
2300 2301
2302 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
2303 """Looks in the jobs_to_run to see if there is the need to split the 2304 jobs, depending on the expected time they take. Updates 2305 jobs_to_run and jobs_to_collect to replace the split-job by 2306 its splits. 2307 """ 2308 # determine the number jobs we should have (this is per p_dir) 2309 if self.options['run_mode'] ==2: 2310 nb_submit = int(self.options['nb_core']) 2311 elif self.options['run_mode'] ==1: 2312 nb_submit = int(self.options['cluster_size']) 2313 else: 2314 nb_submit =1 2315 # total expected aggregated running time 2316 time_expected=0 2317 for job in jobs_to_run: 2318 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 2319 (job['niters_done']*job['npoints_done']) 2320 # this means that we must expect the following per job (in 2321 # ideal conditions) 2322 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 2323 jobs_to_run_new=[] 2324 jobs_to_collect_new=copy.copy(jobs_to_collect) 2325 for job in jobs_to_run: 2326 # remove current job from jobs_to_collect. Make sure 2327 # to remove all the split ones in case the original 2328 # job had been a split one (before it was re-combined) 2329 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2330 j['channel'] == job['channel'], jobs_to_collect_new): 2331 jobs_to_collect_new.remove(j) 2332 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 2333 (job['niters_done']*job['npoints_done']) 2334 # if the time expected for this job is (much) larger than 2335 # the time spend in the previous iteration, and larger 2336 # than the expected time per job, split it 2337 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 2338 # determine the number of splits needed 2339 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 2340 for i in range(1,nsplit+1): 2341 job_new=copy.copy(job) 2342 job_new['split']=i 2343 job_new['wgt_mult']=1./float(nsplit) 2344 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2345 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 2346 if nsplit >= job['niters']: 2347 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 2348 job_new['niters']=1 2349 else: 2350 job_new['npoints']=int(job['npoints']/nsplit) 2351 jobs_to_collect_new.append(job_new) 2352 jobs_to_run_new.append(job_new) 2353 else: 2354 jobs_to_collect_new.append(job) 2355 jobs_to_run_new.append(job) 2356 return jobs_to_run_new,jobs_to_collect_new
2357 2358
2359 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
2360 """Looks in the jobs_to_run to see if there is the need to split the 2361 event generation step. Updates jobs_to_run and 2362 jobs_to_collect to replace the split-job by its 2363 splits. Also removes jobs that do not need any events. 2364 """ 2365 nevt_job=self.run_card['nevt_job'] 2366 if nevt_job > 0: 2367 jobs_to_collect_new=copy.copy(jobs_to_collect) 2368 for job in jobs_to_run: 2369 nevents=job['nevents'] 2370 if nevents == 0: 2371 jobs_to_collect_new.remove(job) 2372 elif nevents > nevt_job: 2373 jobs_to_collect_new.remove(job) 2374 if nevents % nevt_job != 0 : 2375 nsplit=int(nevents/nevt_job)+1 2376 else: 2377 nsplit=int(nevents/nevt_job) 2378 for i in range(1,nsplit+1): 2379 job_new=copy.copy(job) 2380 left_over=nevents % nsplit 2381 if i <= left_over: 2382 job_new['nevents']=int(nevents/nsplit)+1 2383 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2384 else: 2385 job_new['nevents']=int(nevents/nsplit) 2386 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2387 job_new['split']=i 2388 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2389 jobs_to_collect_new.append(job_new) 2390 jobs_to_run_new=copy.copy(jobs_to_collect_new) 2391 else: 2392 jobs_to_run_new=copy.copy(jobs_to_collect) 2393 for job in jobs_to_collect: 2394 if job['nevents'] == 0: 2395 jobs_to_run_new.remove(job) 2396 jobs_to_collect_new=copy.copy(jobs_to_run_new) 2397 2398 return jobs_to_run_new,jobs_to_collect_new
2399 2400
2401 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
2402 """ 2403 For (N)LO+PS: determines the number of events and/or the required 2404 accuracy per job. 2405 For fixed order: determines which jobs need higher precision and 2406 returns those with the newly requested precision. 2407 """ 2408 err=self.cross_sect_dict['errt'] 2409 tot=self.cross_sect_dict['xsect'] 2410 errABS=self.cross_sect_dict['erra'] 2411 totABS=self.cross_sect_dict['xseca'] 2412 jobs_new=[] 2413 if fixed_order: 2414 if req_acc == -1: 2415 if step+1 == 1: 2416 npoints = self.run_card['npoints_FO'] 2417 niters = self.run_card['niters_FO'] 2418 for job in jobs: 2419 job['mint_mode']=-1 2420 job['niters']=niters 2421 job['npoints']=npoints 2422 jobs_new.append(job) 2423 elif step+1 == 2: 2424 pass 2425 elif step+1 > 2: 2426 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 2427 'for integration step %i' % step ) 2428 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2429 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2430 for job in jobs: 2431 job['mint_mode']=-1 2432 # Determine relative required accuracy on the ABS for this job 2433 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2434 # If already accurate enough, skip the job (except when doing the first 2435 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2436 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2437 and not (step==-1 and self.run_card['iappl'] == 2): 2438 continue 2439 # Update the number of PS points based on errorABS, ncall and accuracy 2440 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2441 (job['accuracy']*job['resultABS']),2) 2442 if itmax_fl <= 4.0 : 2443 job['niters']=max(int(round(itmax_fl)),2) 2444 job['npoints']=job['npoints_done']*2 2445 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2446 job['niters']=4 2447 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2448 else: 2449 if itmax_fl > 100.0 : itmax_fl=50.0 2450 job['niters']=int(round(math.sqrt(itmax_fl))) 2451 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2452 round(math.sqrt(itmax_fl))))*2 2453 # Add the job to the list of jobs that need to be run 2454 jobs_new.append(job) 2455 return jobs_new 2456 elif step+1 <= 2: 2457 nevents=self.run_card['nevents'] 2458 # Total required accuracy for the upper bounding envelope 2459 if req_acc<0: 2460 req_acc2_inv=nevents 2461 else: 2462 req_acc2_inv=1/(req_acc*req_acc) 2463 if step+1 == 1 or step+1 == 2 : 2464 # determine the req. accuracy for each of the jobs for Mint-step = 1 2465 for job in jobs: 2466 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2467 job['accuracy']=accuracy 2468 if step+1 == 2: 2469 # Randomly (based on the relative ABS Xsec of the job) determine the 2470 # number of events each job needs to generate for MINT-step = 2. 2471 r=self.get_randinit_seed() 2472 random.seed(r) 2473 totevts=nevents 2474 for job in jobs: 2475 job['nevents'] = 0 2476 while totevts : 2477 target = random.random() * totABS 2478 crosssum = 0. 2479 i = 0 2480 while i<len(jobs) and crosssum < target: 2481 job = jobs[i] 2482 crosssum += job['resultABS'] 2483 i += 1 2484 totevts -= 1 2485 i -= 1 2486 jobs[i]['nevents'] += 1 2487 for job in jobs: 2488 job['mint_mode']=step+1 # next step 2489 return jobs 2490 else: 2491 return []
2492 2493
2494 - def get_randinit_seed(self):
2495 """ Get the random number seed from the randinit file """ 2496 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2497 # format of the file is "r=%d". 2498 iseed = int(randinit.read()[2:]) 2499 return iseed
2500 2501
2502 - def append_the_results(self,jobs,integration_step):
2503 """Appends the results for each of the jobs in the job list""" 2504 error_found=False 2505 for job in jobs: 2506 try: 2507 if integration_step >= 0 : 2508 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2509 results=res_file.readline().split() 2510 else: 2511 # should only be here when doing fixed order with the 'only_generation' 2512 # option equal to True. Take the results from the final run done. 2513 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2514 results=res_file.readline().split() 2515 except IOError: 2516 if not error_found: 2517 error_found=True 2518 error_log=[] 2519 error_log.append(pjoin(job['dirname'],'log.txt')) 2520 continue 2521 job['resultABS']=float(results[0]) 2522 job['errorABS']=float(results[1]) 2523 job['result']=float(results[2]) 2524 job['error']=float(results[3]) 2525 job['niters_done']=int(results[4]) 2526 job['npoints_done']=int(results[5]) 2527 job['time_spend']=float(results[6]) 2528 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2529 job['err_perc'] = job['error']/job['result']*100. 2530 if error_found: 2531 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2532 'Please check the .log files inside the directories which failed:\n' + 2533 '\n'.join(error_log)+'\n')
2534 2535 2536
2537 - def write_res_txt_file(self,jobs,integration_step):
2538 """writes the res.txt files in the SubProcess dir""" 2539 jobs.sort(key = lambda job: -job['errorABS']) 2540 content=[] 2541 content.append('\n\nCross section per integration channel:') 2542 for job in jobs: 2543 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2544 content.append('\n\nABS cross section per integration channel:') 2545 for job in jobs: 2546 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2547 totABS=0 2548 errABS=0 2549 tot=0 2550 err=0 2551 for job in jobs: 2552 totABS+= job['resultABS']*job['wgt_frac'] 2553 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2554 tot+= job['result']*job['wgt_frac'] 2555 err+= math.pow(job['error'],2)*job['wgt_frac'] 2556 if jobs: 2557 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2558 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2559 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2560 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2561 res_file.write('\n'.join(content)) 2562 randinit=self.get_randinit_seed() 2563 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2564 'erra':math.sqrt(errABS),'randinit':randinit}
2565 2566
2567 - def collect_scale_pdf_info(self,options,jobs):
2568 """read the scale_pdf_dependence.dat files and collects there results""" 2569 scale_pdf_info=[] 2570 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2571 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2572 evt_files=[] 2573 evt_wghts=[] 2574 for job in jobs: 2575 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2576 evt_wghts.append(job['wgt_frac']) 2577 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2578 return scale_pdf_info
2579 2580
2581 - def combine_plots_FO(self,folder_name,jobs):
2582 """combines the plots and puts then in the Events/run* directory""" 2583 devnull = open(os.devnull, 'w') 2584 2585 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2586 misc.call(['./combine_plots_FO.sh'] + folder_name, \ 2587 stdout=devnull, 2588 cwd=pjoin(self.me_dir, 'SubProcesses')) 2589 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2590 pjoin(self.me_dir, 'Events', self.run_name)) 2591 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2592 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2593 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2594 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2595 self.combine_plots_HwU(jobs,out) 2596 try: 2597 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2598 stdout=devnull,stderr=devnull,\ 2599 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2600 except Exception: 2601 pass 2602 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2603 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2604 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2605 misc.call(['./combine_root.sh'] + folder_name, \ 2606 stdout=devnull, 2607 cwd=pjoin(self.me_dir, 'SubProcesses')) 2608 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2609 pjoin(self.me_dir, 'Events', self.run_name)) 2610 logger.info('The results of this run and the ROOT file with the plots' + \ 2611 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2612 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2613 self.combine_FO_lhe(jobs) 2614 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2615 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2616 else: 2617 logger.info('The results of this run' + \ 2618 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2619
2620 - def combine_FO_lhe(self,jobs):
2621 """combine the various lhe file generated in each directory. 2622 They are two steps: 2623 1) banner 2624 2) reweight each sample by the factor written at the end of each file 2625 3) concatenate each of the new files (gzip those). 2626 """ 2627 2628 logger.info('Combining lhe events for plotting analysis') 2629 start = time.time() 2630 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2631 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2632 if os.path.exists(output): 2633 os.remove(output) 2634 2635 2636 2637 2638 # 1. write the banner 2639 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2640 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2641 self.banner['initrwgt'] = text[10+i1:i2] 2642 # 2643 # <init> 2644 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2645 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2646 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2647 # </init> 2648 2649 cross = sum(j['result'] for j in jobs) 2650 error = math.sqrt(sum(j['error'] for j in jobs)) 2651 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2652 self.banner.write(output[:-3], close_tag=False) 2653 misc.gzip(output[:-3]) 2654 2655 2656 2657 fsock = lhe_parser.EventFile(output,'a') 2658 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2659 fsock.eventgroup = False 2660 else: 2661 fsock.eventgroup = True 2662 2663 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2664 for job in jobs: 2665 dirname = job['dirname'] 2666 #read last line 2667 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2668 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2669 # get normalisation ratio 2670 ratio = cross/sumwgt 2671 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2672 lhe.eventgroup = True # read the events by eventgroup 2673 for eventsgroup in lhe: 2674 neweventsgroup = [] 2675 for i,event in enumerate(eventsgroup): 2676 event.rescale_weights(ratio) 2677 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2678 and event == neweventsgroup[-1]: 2679 neweventsgroup[-1].wgt += event.wgt 2680 for key in event.reweight_data: 2681 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2682 else: 2683 neweventsgroup.append(event) 2684 fsock.write_events(neweventsgroup) 2685 lhe.close() 2686 os.remove(pjoin(dirname,'events.lhe')) 2687 else: 2688 lhe = [] 2689 lenlhe = [] 2690 misc.sprint('need to combine %s event file' % len(jobs)) 2691 globallhe = lhe_parser.MultiEventFile() 2692 globallhe.eventgroup = True 2693 for job in jobs: 2694 dirname = job['dirname'] 2695 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2696 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2697 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2698 nb_event=int(nb_event), scale=cross/sumwgt) 2699 for eventsgroup in globallhe: 2700 neweventsgroup = [] 2701 for i,event in enumerate(eventsgroup): 2702 event.rescale_weights(event.sample_scale) 2703 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2704 and event == neweventsgroup[-1]: 2705 neweventsgroup[-1].wgt += event.wgt 2706 for key in event.reweight_data: 2707 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2708 else: 2709 neweventsgroup.append(event) 2710 fsock.write_events(neweventsgroup) 2711 globallhe.close() 2712 fsock.write('</LesHouchesEvents>\n') 2713 fsock.close() 2714 misc.sprint('combining lhe file done in ', time.time()-start) 2715 for job in jobs: 2716 dirname = job['dirname'] 2717 os.remove(pjoin(dirname,'events.lhe')) 2718 2719 2720 2721 misc.sprint('combining lhe file done in ', time.time()-start)
2722 2723 2724 2725 2726 2727
2728 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2729 """Sums all the plots in the HwU format.""" 2730 logger.debug('Combining HwU plots.') 2731 2732 command = [] 2733 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2734 for job in jobs: 2735 if job['dirname'].endswith('.HwU'): 2736 command.append(job['dirname']) 2737 else: 2738 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2739 command.append("--out="+out) 2740 command.append("--gnuplot") 2741 command.append("--band=[]") 2742 command.append("--lhapdf-config="+self.options['lhapdf']) 2743 if normalisation: 2744 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2745 command.append("--sum") 2746 command.append("--keep_all_weights") 2747 command.append("--no_open") 2748 2749 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2750 2751 while p.poll() is None: 2752 line = p.stdout.readline() 2753 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2754 print line[:-1] 2755 elif __debug__ and line: 2756 logger.debug(line[:-1])
2757 2758
2759 - def applgrid_combine(self,cross,error,jobs):
2760 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2761 logger.debug('Combining APPLgrids \n') 2762 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2763 'applgrid-combine') 2764 all_jobs=[] 2765 for job in jobs: 2766 all_jobs.append(job['dirname']) 2767 ngrids=len(all_jobs) 2768 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2769 for obs in range(0,nobs): 2770 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2771 # combine APPLgrids from different channels for observable 'obs' 2772 if self.run_card["iappl"] == 1: 2773 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2774 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2775 elif self.run_card["iappl"] == 2: 2776 unc2_inv=pow(cross/error,2) 2777 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2778 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2779 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2780 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2781 for job in all_jobs: 2782 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2783 else: 2784 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2785 # after combining, delete the original grids 2786 for ggdir in gdir: 2787 os.remove(ggdir)
2788 2789
2790 - def applgrid_distribute(self,options,mode,p_dirs):
2791 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2792 # if no appl_start_grid argument given, guess it from the time stamps 2793 # of the starting grid files 2794 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2795 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2796 pjoin(self.me_dir,'Events')) 2797 2798 time_stamps={} 2799 for root_file in gfiles: 2800 time_stamps[root_file]=os.path.getmtime(root_file) 2801 options['appl_start_grid']= \ 2802 max(time_stamps.iterkeys(), key=(lambda key: 2803 time_stamps[key])).split('/')[-2] 2804 logger.info('No --appl_start_grid option given. '+\ 2805 'Guessing that start grid from run "%s" should be used.' \ 2806 % options['appl_start_grid']) 2807 2808 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2809 self.appl_start_grid = options['appl_start_grid'] 2810 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2811 # check that this dir exists and at least one grid file is there 2812 if not os.path.exists(pjoin(start_grid_dir, 2813 'aMCfast_obs_0_starting_grid.root')): 2814 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2815 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2816 else: 2817 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2818 start_grid_dir) if name.endswith("_starting_grid.root")] 2819 nobs =len(all_grids) 2820 gstring=" ".join(all_grids) 2821 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2822 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2823 'Please provide this information.') 2824 #copy the grid to all relevant directories 2825 for pdir in p_dirs: 2826 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2827 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2828 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2829 for g_dir in g_dirs: 2830 for grid in all_grids: 2831 obs=grid.split('_')[-3] 2832 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2833 'grid_obs_'+obs+'_in.root'))
2834 2835 2836 2837
2838 - def collect_log_files(self, jobs, integration_step):
2839 """collect the log files and put them in a single, html-friendly file 2840 inside the Events/run_.../ directory""" 2841 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2842 'alllogs_%d.html' % integration_step) 2843 outfile = open(log_file, 'w') 2844 2845 content = '' 2846 content += '<HTML><BODY>\n<font face="courier" size=2>' 2847 for job in jobs: 2848 # put an anchor 2849 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2850 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2851 pjoin(self.me_dir,'SubProcesses'),'')) 2852 # and put some nice header 2853 content += '<font color="red">\n' 2854 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2855 (os.path.dirname(log).replace(pjoin(self.me_dir, 2856 'SubProcesses'), ''), 2857 integration_step) 2858 content += '</font>\n' 2859 #then just flush the content of the small log inside the big log 2860 #the PRE tag prints everything verbatim 2861 with open(log) as l: 2862 content += '<PRE>\n' + l.read() + '\n</PRE>' 2863 content +='<br>\n' 2864 outfile.write(content) 2865 content='' 2866 2867 outfile.write('</font>\n</BODY></HTML>\n') 2868 outfile.close()
2869 2870
2871 - def finalise_run_FO(self,folder_name,jobs):
2872 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2873 # Copy the res_*.txt files to the Events/run* folder 2874 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2875 for res_file in res_files: 2876 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2877 # Collect the plots and put them in the Events/run* folder 2878 self.combine_plots_FO(folder_name,jobs) 2879 # If doing the applgrid-stuff, also combine those grids 2880 # and put those in the Events/run* folder 2881 if self.run_card['iappl'] != 0: 2882 cross=self.cross_sect_dict['xsect'] 2883 error=self.cross_sect_dict['errt'] 2884 self.applgrid_combine(cross,error,jobs)
2885 2886
2887 - def setup_cluster_or_multicore(self):
2888 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2889 if self.cluster_mode == 1: 2890 cluster_name = self.options['cluster_type'] 2891 try: 2892 self.cluster = cluster.from_name[cluster_name](**self.options) 2893 except KeyError: 2894 # Check if a plugin define this type of cluster 2895 # check for PLUGIN format 2896 cluster_class = misc.from_plugin_import(self.plugin_path, 2897 'new_cluster', cluster_name, 2898 info = 'cluster handling will be done with PLUGIN: %{plug}s' ) 2899 if cluster_class: 2900 self.cluster = cluster_class(**self.options) 2901 2902 if self.cluster_mode == 2: 2903 try: 2904 import multiprocessing 2905 if not self.nb_core: 2906 try: 2907 self.nb_core = int(self.options['nb_core']) 2908 except TypeError: 2909 self.nb_core = multiprocessing.cpu_count() 2910 logger.info('Using %d cores' % self.nb_core) 2911 except ImportError: 2912 self.nb_core = 1 2913 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2914 'Use set nb_core X in order to set this number and be able to'+ 2915 'run in multicore.') 2916 2917 self.cluster = cluster.MultiCore(**self.options)
2918 2919
2920 - def clean_previous_results(self,options,p_dirs,folder_name):
2921 """Clean previous results. 2922 o. If doing only the reweighting step, do not delete anything and return directlty. 2923 o. Always remove all the G*_* files (from split event generation). 2924 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2925 if options['reweightonly']: 2926 return 2927 if not options['only_generation']: 2928 self.update_status('Cleaning previous results', level=None) 2929 for dir in p_dirs: 2930 #find old folders to be removed 2931 for obj in folder_name: 2932 # list all the G* (or all_G* or born_G*) directories 2933 to_rm = [file for file in \ 2934 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2935 if file.startswith(obj[:-1]) and \ 2936 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2937 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2938 # list all the G*_* directories (from split event generation) 2939 to_always_rm = [file for file in \ 2940 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2941 if file.startswith(obj[:-1]) and 2942 '_' in file and not '_G' in file and \ 2943 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2944 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2945 2946 if not options['only_generation']: 2947 to_always_rm.extend(to_rm) 2948 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2949 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2950 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2951 return
2952 2953
2954 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2955 """print a summary of the results contained in self.cross_sect_dict. 2956 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2957 some additional infos are printed""" 2958 # find process name 2959 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 2960 process = '' 2961 for line in proc_card_lines: 2962 if line.startswith('generate') or line.startswith('add process'): 2963 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 2964 lpp = {0:'l', 1:'p', -1:'pbar'} 2965 if self.ninitial == 1: 2966 proc_info = '\n Process %s' % process[:-3] 2967 else: 2968 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 2969 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 2970 self.run_card['ebeam1'], self.run_card['ebeam2']) 2971 2972 if self.ninitial == 1: 2973 self.cross_sect_dict['unit']='GeV' 2974 self.cross_sect_dict['xsec_string']='(Partial) decay width' 2975 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 2976 else: 2977 self.cross_sect_dict['unit']='pb' 2978 self.cross_sect_dict['xsec_string']='Total cross section' 2979 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 2980 if self.run_card['event_norm'].lower()=='bias': 2981 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)' 2982 2983 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2984 status = ['Determining the number of unweighted events per channel', 2985 'Updating the number of unweighted events per channel', 2986 'Summary:'] 2987 computed='(computed from LHE events)' 2988 elif mode in ['NLO', 'LO']: 2989 status = ['Results after grid setup:','Current results:', 2990 'Final results and run summary:'] 2991 computed='(computed from histogram information)' 2992 2993 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2994 message = status[step] + '\n\n Intermediate results:' + \ 2995 ('\n Random seed: %(randinit)d' + \ 2996 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 2997 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 2998 % self.cross_sect_dict 2999 elif mode in ['NLO','LO'] and not done: 3000 if step == 0: 3001 message = '\n ' + status[0] + \ 3002 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3003 self.cross_sect_dict 3004 else: 3005 message = '\n ' + status[1] + \ 3006 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3007 self.cross_sect_dict 3008 3009 else: 3010 message = '\n --------------------------------------------------------------' 3011 message = message + \ 3012 '\n ' + status[2] + proc_info 3013 if mode not in ['LO', 'NLO']: 3014 message = message + \ 3015 '\n Number of events generated: %s' % self.run_card['nevents'] 3016 message = message + \ 3017 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3018 self.cross_sect_dict 3019 message = message + \ 3020 '\n --------------------------------------------------------------' 3021 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 3022 if scale_pdf_info[0]: 3023 # scale uncertainties 3024 message = message + '\n Scale variation %s:' % computed 3025 for s in scale_pdf_info[0]: 3026 if s['unc']: 3027 if self.run_card['ickkw'] != -1: 3028 message = message + \ 3029 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 3030 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 3031 else: 3032 message = message + \ 3033 ('\n Soft and hard scale dependence (added in quadrature): '\ 3034 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 3035 3036 else: 3037 message = message + \ 3038 ('\n Dynamical_scale_choice %(label)i: '\ 3039 '\n %(cen)8.3e pb') % s 3040 3041 if scale_pdf_info[1]: 3042 message = message + '\n PDF variation %s:' % computed 3043 for p in scale_pdf_info[1]: 3044 if p['unc']=='none': 3045 message = message + \ 3046 ('\n %(name)s (central value only): '\ 3047 '\n %(cen)8.3e pb') % p 3048 3049 elif p['unc']=='unknown': 3050 message = message + \ 3051 ('\n %(name)s (%(size)s members; combination method unknown): '\ 3052 '\n %(cen)8.3e pb') % p 3053 else: 3054 message = message + \ 3055 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 3056 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 3057 # pdf uncertainties 3058 message = message + \ 3059 '\n --------------------------------------------------------------' 3060 3061 3062 if (mode in ['NLO', 'LO'] and not done) or \ 3063 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 3064 logger.info(message+'\n') 3065 return 3066 3067 # Some advanced general statistics are shown in the debug message at the 3068 # end of the run 3069 # Make sure it never stops a run 3070 # Gather some basic statistics for the run and extracted from the log files. 3071 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3072 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 3073 pjoin(self.me_dir, 'SubProcesses')) 3074 all_log_files = log_GV_files 3075 elif mode == 'NLO': 3076 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 3077 pjoin(self.me_dir, 'SubProcesses')) 3078 all_log_files = log_GV_files 3079 3080 elif mode == 'LO': 3081 log_GV_files = '' 3082 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 3083 pjoin(self.me_dir, 'SubProcesses')) 3084 else: 3085 raise aMCatNLOError, 'Running mode %s not supported.'%mode 3086 3087 try: 3088 message, debug_msg = \ 3089 self.compile_advanced_stats(log_GV_files, all_log_files, message) 3090 except Exception as e: 3091 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 3092 err_string = StringIO.StringIO() 3093 traceback.print_exc(limit=4, file=err_string) 3094 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 3095 %err_string.getvalue() 3096 3097 logger.debug(debug_msg+'\n') 3098 logger.info(message+'\n') 3099 3100 # Now copy relevant information in the Events/Run_<xxx> directory 3101 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 3102 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 3103 open(pjoin(evt_path, '.full_summary.txt'), 3104 'w').write(message+'\n\n'+debug_msg+'\n') 3105 3106 self.archive_files(evt_path,mode)
3107
3108 - def archive_files(self, evt_path, mode):
3109 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 3110 the run.""" 3111 3112 files_to_arxiv = [pjoin('Cards','param_card.dat'), 3113 pjoin('Cards','MadLoopParams.dat'), 3114 pjoin('Cards','FKS_params.dat'), 3115 pjoin('Cards','run_card.dat'), 3116 pjoin('Subprocesses','setscales.f'), 3117 pjoin('Subprocesses','cuts.f')] 3118 3119 if mode in ['NLO', 'LO']: 3120 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 3121 3122 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 3123 os.mkdir(pjoin(evt_path,'RunMaterial')) 3124 3125 for path in files_to_arxiv: 3126 if os.path.isfile(pjoin(self.me_dir,path)): 3127 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 3128 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 3129 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3130
3131 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
3132 """ This functions goes through the log files given in arguments and 3133 compiles statistics about MadLoop stability, virtual integration 3134 optimization and detection of potential error messages into a nice 3135 debug message to printed at the end of the run """ 3136 3137 def safe_float(str_float): 3138 try: 3139 return float(str_float) 3140 except ValueError: 3141 logger.debug('Could not convert the following float during'+ 3142 ' advanced statistics printout: %s'%str(str_float)) 3143 return -1.0
3144 3145 3146 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 3147 # > Errors is a list of tuples with this format (log_file,nErrors) 3148 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 3149 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 3150 3151 # ================================== 3152 # == MadLoop stability statistics == 3153 # ================================== 3154 3155 # Recuperate the fraction of unstable PS points found in the runs for 3156 # the virtuals 3157 UPS_stat_finder = re.compile( 3158 r"Satistics from MadLoop:.*"+\ 3159 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 3160 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 3161 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 3162 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 3163 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 3164 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 3165 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 3166 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 3167 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 3168 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 3169 3170 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 3171 1 : 'CutTools (double precision)', 3172 2 : 'PJFry++', 3173 3 : 'IREGI', 3174 4 : 'Golem95', 3175 5 : 'Samurai', 3176 6 : 'Ninja (double precision)', 3177 7 : 'COLLIER', 3178 8 : 'Ninja (quadruple precision)', 3179 9 : 'CutTools (quadruple precision)'} 3180 RetUnit_finder =re.compile( 3181 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 3182 #Unit 3183 3184 for gv_log in log_GV_files: 3185 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 3186 log=open(gv_log,'r').read() 3187 UPS_stats = re.search(UPS_stat_finder,log) 3188 for retunit_stats in re.finditer(RetUnit_finder, log): 3189 if channel_name not in stats['UPS'].keys(): 3190 stats['UPS'][channel_name] = [0]*10+[[0]*10] 3191 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 3192 += int(retunit_stats.group('n_occurences')) 3193 if not UPS_stats is None: 3194 try: 3195 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 3196 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 3197 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 3198 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 3199 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 3200 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 3201 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 3202 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 3203 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 3204 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 3205 except KeyError: 3206 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 3207 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 3208 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 3209 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 3210 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 3211 int(UPS_stats.group('n10')),[0]*10] 3212 debug_msg = "" 3213 if len(stats['UPS'].keys())>0: 3214 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 3215 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 3216 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 3217 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 3218 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 3219 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 3220 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 3221 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 3222 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 3223 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 3224 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 3225 for i in range(10)] 3226 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 3227 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 3228 maxUPS = max(UPSfracs, key = lambda w: w[1]) 3229 3230 tmpStr = "" 3231 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 3232 tmpStr += '\n Stability unknown: %d'%nTotsun 3233 tmpStr += '\n Stable PS point: %d'%nTotsps 3234 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 3235 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 3236 tmpStr += '\n Only double precision used: %d'%nTotddp 3237 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 3238 tmpStr += '\n Initialization phase-space points: %d'%nTotini 3239 tmpStr += '\n Reduction methods used:' 3240 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 3241 unit_code_meaning.keys() if nTot1[i]>0] 3242 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 3243 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 3244 if nTot100 != 0: 3245 debug_msg += '\n Unknown return code (100): %d'%nTot100 3246 if nTot10 != 0: 3247 debug_msg += '\n Unknown return code (10): %d'%nTot10 3248 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 3249 not in unit_code_meaning.keys()) 3250 if nUnknownUnit != 0: 3251 debug_msg += '\n Unknown return code (1): %d'\ 3252 %nUnknownUnit 3253 3254 if maxUPS[1]>0.001: 3255 message += tmpStr 3256 message += '\n Total number of unstable PS point detected:'+\ 3257 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 3258 message += '\n Maximum fraction of UPS points in '+\ 3259 'channel %s (%4.2f%%)'%maxUPS 3260 message += '\n Please report this to the authors while '+\ 3261 'providing the file' 3262 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 3263 maxUPS[0],'UPS.log')) 3264 else: 3265 debug_msg += tmpStr 3266 3267 3268 # ==================================================== 3269 # == aMC@NLO virtual integration optimization stats == 3270 # ==================================================== 3271 3272 virt_tricks_finder = re.compile( 3273 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 3274 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 3275 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 3276 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 3277 3278 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 3279 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 3280 3281 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 3282 3283 channel_contr_list = {} 3284 for gv_log in log_GV_files: 3285 logfile=open(gv_log,'r') 3286 log = logfile.read() 3287 logfile.close() 3288 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3289 vf_stats = None 3290 for vf_stats in re.finditer(virt_frac_finder, log): 3291 pass 3292 if not vf_stats is None: 3293 v_frac = safe_float(vf_stats.group('v_frac')) 3294 v_average = safe_float(vf_stats.group('v_average')) 3295 try: 3296 if v_frac < stats['virt_stats']['v_frac_min'][0]: 3297 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 3298 if v_frac > stats['virt_stats']['v_frac_max'][0]: 3299 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 3300 stats['virt_stats']['v_frac_avg'][0] += v_frac 3301 stats['virt_stats']['v_frac_avg'][1] += 1 3302 except KeyError: 3303 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 3304 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 3305 stats['virt_stats']['v_frac_avg']=[v_frac,1] 3306 3307 3308 ccontr_stats = None 3309 for ccontr_stats in re.finditer(channel_contr_finder, log): 3310 pass 3311 if not ccontr_stats is None: 3312 contrib = safe_float(ccontr_stats.group('v_contr')) 3313 try: 3314 if contrib>channel_contr_list[channel_name]: 3315 channel_contr_list[channel_name]=contrib 3316 except KeyError: 3317 channel_contr_list[channel_name]=contrib 3318 3319 3320 # Now build the list of relevant virt log files to look for the maxima 3321 # of virt fractions and such. 3322 average_contrib = 0.0 3323 for value in channel_contr_list.values(): 3324 average_contrib += value 3325 if len(channel_contr_list.values()) !=0: 3326 average_contrib = average_contrib / len(channel_contr_list.values()) 3327 3328 relevant_log_GV_files = [] 3329 excluded_channels = set([]) 3330 all_channels = set([]) 3331 for log_file in log_GV_files: 3332 channel_name = '/'.join(log_file.split('/')[-3:-1]) 3333 all_channels.add(channel_name) 3334 try: 3335 if channel_contr_list[channel_name] > (0.1*average_contrib): 3336 relevant_log_GV_files.append(log_file) 3337 else: 3338 excluded_channels.add(channel_name) 3339 except KeyError: 3340 relevant_log_GV_files.append(log_file) 3341 3342 # Now we want to use the latest occurence of accumulated result in the log file 3343 for gv_log in relevant_log_GV_files: 3344 logfile=open(gv_log,'r') 3345 log = logfile.read() 3346 logfile.close() 3347 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3348 3349 vt_stats = None 3350 for vt_stats in re.finditer(virt_tricks_finder, log): 3351 pass 3352 if not vt_stats is None: 3353 vt_stats_group = vt_stats.groupdict() 3354 v_ratio = safe_float(vt_stats.group('v_ratio')) 3355 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 3356 v_contr = safe_float(vt_stats.group('v_abs_contr')) 3357 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 3358 try: 3359 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 3360 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 3361 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 3362 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 3363 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 3364 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 3365 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 3366 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 3367 if v_contr < stats['virt_stats']['v_contr_min'][0]: 3368 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 3369 if v_contr > stats['virt_stats']['v_contr_max'][0]: 3370 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 3371 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 3372 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 3373 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 3374 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 3375 except KeyError: 3376 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 3377 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 3378 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 3379 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 3380 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 3381 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 3382 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 3383 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 3384 3385 vf_stats = None 3386 for vf_stats in re.finditer(virt_frac_finder, log): 3387 pass 3388 if not vf_stats is None: 3389 v_frac = safe_float(vf_stats.group('v_frac')) 3390 v_average = safe_float(vf_stats.group('v_average')) 3391 try: 3392 if v_average < stats['virt_stats']['v_average_min'][0]: 3393 stats['virt_stats']['v_average_min']=(v_average,channel_name) 3394 if v_average > stats['virt_stats']['v_average_max'][0]: 3395 stats['virt_stats']['v_average_max']=(v_average,channel_name) 3396 stats['virt_stats']['v_average_avg'][0] += v_average 3397 stats['virt_stats']['v_average_avg'][1] += 1 3398 except KeyError: 3399 stats['virt_stats']['v_average_min']=[v_average,channel_name] 3400 stats['virt_stats']['v_average_max']=[v_average,channel_name] 3401 stats['virt_stats']['v_average_avg']=[v_average,1] 3402 3403 try: 3404 debug_msg += '\n\n Statistics on virtual integration optimization : ' 3405 3406 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3407 %tuple(stats['virt_stats']['v_frac_max']) 3408 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3409 %tuple(stats['virt_stats']['v_frac_min']) 3410 debug_msg += '\n Average virt fraction computed %.3f'\ 3411 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3412 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3413 (len(excluded_channels),len(all_channels)) 3414 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3415 %tuple(stats['virt_stats']['v_average_max']) 3416 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3417 %tuple(stats['virt_stats']['v_ratio_max']) 3418 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3419 %tuple(stats['virt_stats']['v_ratio_err_max']) 3420 debug_msg += tmpStr 3421 # After all it was decided that it is better not to alarm the user unecessarily 3422 # with such printout of the statistics. 3423 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3424 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3425 # message += "\n Suspiciously large MC error in :" 3426 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3427 # message += tmpStr 3428 3429 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3430 %tuple(stats['virt_stats']['v_contr_err_max']) 3431 debug_msg += tmpStr 3432 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3433 # message += tmpStr 3434 3435 3436 except KeyError: 3437 debug_msg += '\n Could not find statistics on the integration optimization. ' 3438 3439 # ======================================= 3440 # == aMC@NLO timing profile statistics == 3441 # ======================================= 3442 3443 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3444 "(?P<time>[\d\+-Eed\.]*)\s*") 3445 3446 for logf in log_GV_files: 3447 logfile=open(logf,'r') 3448 log = logfile.read() 3449 logfile.close() 3450 channel_name = '/'.join(logf.split('/')[-3:-1]) 3451 mint = re.search(mint_search,logf) 3452 if not mint is None: 3453 channel_name = channel_name+' [step %s]'%mint.group('ID') 3454 3455 for time_stats in re.finditer(timing_stat_finder, log): 3456 try: 3457 stats['timings'][time_stats.group('name')][channel_name]+=\ 3458 safe_float(time_stats.group('time')) 3459 except KeyError: 3460 if time_stats.group('name') not in stats['timings'].keys(): 3461 stats['timings'][time_stats.group('name')] = {} 3462 stats['timings'][time_stats.group('name')][channel_name]=\ 3463 safe_float(time_stats.group('time')) 3464 3465 # useful inline function 3466 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3467 try: 3468 totTimeList = [(time, chan) for chan, time in \ 3469 stats['timings']['Total'].items()] 3470 except KeyError: 3471 totTimeList = [] 3472 3473 totTimeList.sort() 3474 if len(totTimeList)>0: 3475 debug_msg += '\n\n Inclusive timing profile :' 3476 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3477 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3478 debug_msg += '\n Average channel running time %s'%\ 3479 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3480 debug_msg += '\n Aggregated total running time %s'%\ 3481 Tstr(sum([el[0] for el in totTimeList])) 3482 else: 3483 debug_msg += '\n\n Inclusive timing profile non available.' 3484 3485 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 3486 sum(stats['timings'][stat].values()), reverse=True) 3487 for name in sorted_keys: 3488 if name=='Total': 3489 continue 3490 if sum(stats['timings'][name].values())<=0.0: 3491 debug_msg += '\n Zero time record for %s.'%name 3492 continue 3493 try: 3494 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3495 chan) for chan, time in stats['timings'][name].items()] 3496 except KeyError, ZeroDivisionError: 3497 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3498 continue 3499 TimeList.sort() 3500 debug_msg += '\n Timing profile for <%s> :'%name 3501 try: 3502 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3503 safe_float((100.0*(sum(stats['timings'][name].values())/ 3504 sum(stats['timings']['Total'].values())))) 3505 except KeyError, ZeroDivisionError: 3506 debug_msg += '\n Overall fraction of time unavailable.' 3507 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3508 (TimeList[-1][0],TimeList[-1][1]) 3509 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3510 (TimeList[0][0],TimeList[0][1]) 3511 3512 # ============================= 3513 # == log file eror detection == 3514 # ============================= 3515 3516 # Find the number of potential errors found in all log files 3517 # This re is a simple match on a case-insensitve 'error' but there is 3518 # also some veto added for excluding the sentence 3519 # "See Section 6 of paper for error calculation." 3520 # which appear in the header of lhapdf in the logs. 3521 err_finder = re.compile(\ 3522 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3523 for log in all_log_files: 3524 logfile=open(log,'r') 3525 nErrors = len(re.findall(err_finder, logfile.read())) 3526 logfile.close() 3527 if nErrors != 0: 3528 stats['Errors'].append((str(log),nErrors)) 3529 3530 nErrors = sum([err[1] for err in stats['Errors']],0) 3531 if nErrors != 0: 3532 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3533 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3534 'found in the following log file%s:'%('s' if \ 3535 len(stats['Errors'])>1 else '') 3536 for error in stats['Errors'][:3]: 3537 log_name = '/'.join(error[0].split('/')[-5:]) 3538 debug_msg += '\n > %d error%s in %s'%\ 3539 (error[1],'s' if error[1]>1 else '',log_name) 3540 if len(stats['Errors'])>3: 3541 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3542 nRemainingLogs = len(stats['Errors'])-3 3543 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3544 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3545 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3546 3547 return message, debug_msg 3548 3549
3550 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3551 """this function calls the reweighting routines and creates the event file in the 3552 Event dir. Return the name of the event file created 3553 """ 3554 scale_pdf_info=[] 3555 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3556 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 3557 scale_pdf_info = self.run_reweight(options['reweightonly']) 3558 self.update_status('Collecting events', level='parton', update_results=True) 3559 misc.compile(['collect_events'], 3560 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3561 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3562 stdin=subprocess.PIPE, 3563 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3564 if event_norm.lower() == 'sum': 3565 p.communicate(input = '1\n') 3566 elif event_norm.lower() == 'unity': 3567 p.communicate(input = '3\n') 3568 elif event_norm.lower() == 'bias': 3569 p.communicate(input = '0\n') 3570 else: 3571 p.communicate(input = '2\n') 3572 3573 #get filename from collect events 3574 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3575 3576 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3577 raise aMCatNLOError('An error occurred during event generation. ' + \ 3578 'The event file has not been created. Check collect_events.log') 3579 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3580 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3581 if not options['reweightonly']: 3582 self.print_summary(options, 2, mode, scale_pdf_info) 3583 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3584 for res_file in res_files: 3585 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3586 3587 logger.info('The %s file has been generated.\n' % (evt_file)) 3588 self.results.add_detail('nb_event', nevents) 3589 self.update_status('Events generated', level='parton', update_results=True) 3590 return evt_file[:-3]
3591 3592
3593 - def run_mcatnlo(self, evt_file, options):
3594 """runs mcatnlo on the generated event file, to produce showered-events 3595 """ 3596 logger.info('Preparing MCatNLO run') 3597 try: 3598 misc.gunzip(evt_file) 3599 except Exception: 3600 pass 3601 3602 self.banner = banner_mod.Banner(evt_file) 3603 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3604 3605 #check that the number of split event files divides the number of 3606 # events, otherwise set it to 1 3607 if int(self.banner.get_detail('run_card', 'nevents') / \ 3608 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3609 != self.banner.get_detail('run_card', 'nevents'): 3610 logger.warning(\ 3611 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3612 'Setting it to 1.') 3613 self.shower_card['nsplit_jobs'] = 1 3614 3615 # don't split jobs if the user asks to shower only a part of the events 3616 if self.shower_card['nevents'] > 0 and \ 3617 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3618 self.shower_card['nsplit_jobs'] != 1: 3619 logger.warning(\ 3620 'Only a part of the events will be showered.\n' + \ 3621 'Setting nsplit_jobs in the shower_card to 1.') 3622 self.shower_card['nsplit_jobs'] = 1 3623 3624 self.banner_to_mcatnlo(evt_file) 3625 3626 # if fastjet has to be linked (in extralibs) then 3627 # add lib /include dirs for fastjet if fastjet-config is present on the 3628 # system, otherwise add fjcore to the files to combine 3629 if 'fastjet' in self.shower_card['extralibs']: 3630 #first, check that stdc++ is also linked 3631 if not 'stdc++' in self.shower_card['extralibs']: 3632 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3633 self.shower_card['extralibs'] += ' stdc++' 3634 # then check if options[fastjet] corresponds to a valid fj installation 3635 try: 3636 #this is for a complete fj installation 3637 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3638 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3639 output, error = p.communicate() 3640 #remove the line break from output (last character) 3641 output = output[:-1] 3642 # add lib/include paths 3643 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3644 logger.warning('Linking FastJet: updating EXTRAPATHS') 3645 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3646 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3647 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3648 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3649 # to be changed in the fortran wrapper 3650 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3651 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3652 except Exception: 3653 logger.warning('Linking FastJet: using fjcore') 3654 # this is for FJcore, so no FJ library has to be linked 3655 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3656 if not 'fjcore.o' in self.shower_card['analyse']: 3657 self.shower_card['analyse'] += ' fjcore.o' 3658 # to be changed in the fortran wrapper 3659 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3660 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3661 # change the fortran wrapper with the correct namespaces/include 3662 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3663 for line in fjwrapper_lines: 3664 if '//INCLUDE_FJ' in line: 3665 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3666 if '//NAMESPACE_FJ' in line: 3667 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3668 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3669 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3670 3671 extrapaths = self.shower_card['extrapaths'].split() 3672 3673 # check that the path needed by HW++ and PY8 are set if one uses these shower 3674 if shower in ['HERWIGPP', 'PYTHIA8']: 3675 path_dict = {'HERWIGPP': ['hepmc_path', 3676 'thepeg_path', 3677 'hwpp_path'], 3678 'PYTHIA8': ['pythia8_path']} 3679 3680 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3681 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3682 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3683 3684 if shower == 'HERWIGPP': 3685 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3686 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3687 3688 # add the HEPMC path of the pythia8 installation 3689 if shower == 'PYTHIA8': 3690 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3691 stdout = subprocess.PIPE).stdout.read().strip() 3692 #this gives all the flags, i.e. 3693 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3694 # we just need the path to the HepMC libraries 3695 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3696 3697 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3698 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3699 3700 # set the PATH for the dynamic libraries 3701 if sys.platform == 'darwin': 3702 ld_library_path = 'DYLD_LIBRARY_PATH' 3703 else: 3704 ld_library_path = 'LD_LIBRARY_PATH' 3705 if ld_library_path in os.environ.keys(): 3706 paths = os.environ[ld_library_path] 3707 else: 3708 paths = '' 3709 paths += ':' + ':'.join(extrapaths) 3710 os.putenv(ld_library_path, paths) 3711 3712 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3713 self.shower_card.write_card(shower, shower_card_path) 3714 3715 # overwrite if shower_card_set.dat exists in MCatNLO 3716 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3717 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3718 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3719 3720 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3721 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3722 3723 3724 # libdl may be needded for pythia 82xx 3725 #if shower == 'PYTHIA8' and not \ 3726 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3727 # 'dl' not in self.shower_card['extralibs'].split(): 3728 # # 'dl' has to be linked with the extralibs 3729 # self.shower_card['extralibs'] += ' dl' 3730 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3731 # "It is needed for the correct running of PY8.2xx.\n" + \ 3732 # "If this library cannot be found on your system, a crash will occur.") 3733 3734 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3735 stderr=open(mcatnlo_log, 'w'), 3736 cwd=pjoin(self.me_dir, 'MCatNLO'), 3737 close_fds=True) 3738 3739 exe = 'MCATNLO_%s_EXE' % shower 3740 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3741 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3742 print open(mcatnlo_log).read() 3743 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3744 logger.info(' ... done') 3745 3746 # create an empty dir where to run 3747 count = 1 3748 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3749 (shower, count))): 3750 count += 1 3751 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3752 (shower, count)) 3753 os.mkdir(rundir) 3754 files.cp(shower_card_path, rundir) 3755 3756 #look for the event files (don't resplit if one asks for the 3757 # same number of event files as in the previous run) 3758 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3759 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3760 logger.info('Cleaning old files and splitting the event file...') 3761 #clean the old files 3762 files.rm([f for f in event_files if 'events.lhe' not in f]) 3763 if self.shower_card['nsplit_jobs'] > 1: 3764 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3765 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3766 stdin=subprocess.PIPE, 3767 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3768 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3769 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3770 logger.info('Splitting done.') 3771 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3772 3773 event_files.sort() 3774 3775 self.update_status('Showering events...', level='shower') 3776 logger.info('(Running in %s)' % rundir) 3777 if shower != 'PYTHIA8': 3778 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3779 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3780 else: 3781 # special treatment for pythia8 3782 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3783 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3784 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3785 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3786 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3787 else: # this is PY8.2xxx 3788 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3789 #link the hwpp exe in the rundir 3790 if shower == 'HERWIGPP': 3791 try: 3792 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3793 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3794 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3795 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3796 except Exception: 3797 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3798 3799 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3800 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3801 3802 files.ln(evt_file, rundir, 'events.lhe') 3803 for i, f in enumerate(event_files): 3804 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3805 3806 if not self.shower_card['analyse']: 3807 # an hep/hepmc file as output 3808 out_id = 'HEP' 3809 else: 3810 # one or more .top file(s) as output 3811 if "HwU" in self.shower_card['analyse']: 3812 out_id = 'HWU' 3813 else: 3814 out_id = 'TOP' 3815 3816 # write the executable 3817 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3818 # set the PATH for the dynamic libraries 3819 if sys.platform == 'darwin': 3820 ld_library_path = 'DYLD_LIBRARY_PATH' 3821 else: 3822 ld_library_path = 'LD_LIBRARY_PATH' 3823 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3824 % {'ld_library_path': ld_library_path, 3825 'extralibs': ':'.join(extrapaths)}) 3826 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3827 3828 if event_files: 3829 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3830 for i in range(len(event_files))] 3831 else: 3832 arg_list = [[shower, out_id, self.run_name]] 3833 3834 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3835 self.njobs = 1 3836 self.wait_for_complete('shower') 3837 3838 # now collect the results 3839 message = '' 3840 warning = '' 3841 to_gzip = [evt_file] 3842 if out_id == 'HEP': 3843 #copy the showered stdhep/hepmc file back in events 3844 if shower in ['PYTHIA8', 'HERWIGPP']: 3845 hep_format = 'HEPMC' 3846 ext = 'hepmc' 3847 else: 3848 hep_format = 'StdHEP' 3849 ext = 'hep' 3850 3851 hep_file = '%s_%s_0.%s.gz' % \ 3852 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3853 count = 0 3854 3855 # find the first available name for the output: 3856 # check existing results with or without event splitting 3857 while os.path.exists(hep_file) or \ 3858 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3859 count +=1 3860 hep_file = '%s_%s_%d.%s.gz' % \ 3861 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3862 3863 try: 3864 if self.shower_card['nsplit_jobs'] == 1: 3865 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3866 message = ('The file %s has been generated. \nIt contains showered' + \ 3867 ' and hadronized events in the %s format obtained' + \ 3868 ' showering the parton-level event file %s.gz with %s') % \ 3869 (hep_file, hep_format, evt_file, shower) 3870 else: 3871 hep_list = [] 3872 for i in range(self.shower_card['nsplit_jobs']): 3873 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3874 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3875 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3876 ' and hadronized events in the %s format obtained' + \ 3877 ' showering the (split) parton-level event file %s.gz with %s') % \ 3878 ('\n '.join(hep_list), hep_format, evt_file, shower) 3879 3880 except OSError, IOError: 3881 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3882 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3883 3884 # run the plot creation in a secure way 3885 if hep_format == 'StdHEP': 3886 try: 3887 self.do_plot('%s -f' % self.run_name) 3888 except Exception, error: 3889 logger.info("Fail to make the plot. Continue...") 3890 pass 3891 3892 elif out_id == 'TOP' or out_id == 'HWU': 3893 #copy the topdrawer or HwU file(s) back in events 3894 if out_id=='TOP': 3895 ext='top' 3896 elif out_id=='HWU': 3897 ext='HwU' 3898 topfiles = [] 3899 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3900 for top_tar in top_tars: 3901 topfiles.extend(top_tar.getnames()) 3902 3903 # safety check 3904 if len(top_tars) != self.shower_card['nsplit_jobs']: 3905 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3906 (self.shower_card['nsplit_jobs'], len(top_tars))) 3907 3908 # find the first available name for the output: 3909 # check existing results with or without event splitting 3910 filename = 'plot_%s_%d_' % (shower, 1) 3911 count = 1 3912 while os.path.exists(pjoin(self.me_dir, 'Events', 3913 self.run_name, '%s0.%s' % (filename,ext))) or \ 3914 os.path.exists(pjoin(self.me_dir, 'Events', 3915 self.run_name, '%s0__1.%s' % (filename,ext))): 3916 count += 1 3917 filename = 'plot_%s_%d_' % (shower, count) 3918 3919 if out_id=='TOP': 3920 hist_format='TopDrawer format' 3921 elif out_id=='HWU': 3922 hist_format='HwU and GnuPlot formats' 3923 3924 if not topfiles: 3925 # if no topfiles are found just warn the user 3926 warning = 'No .top file has been generated. For the results of your ' +\ 3927 'run, please check inside %s' % rundir 3928 elif self.shower_card['nsplit_jobs'] == 1: 3929 # only one job for the shower 3930 top_tars[0].extractall(path = rundir) 3931 plotfiles = [] 3932 for i, file in enumerate(topfiles): 3933 if out_id=='TOP': 3934 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3935 '%s%d.top' % (filename, i)) 3936 files.mv(pjoin(rundir, file), plotfile) 3937 elif out_id=='HWU': 3938 out=pjoin(self.me_dir,'Events', 3939 self.run_name,'%s%d'% (filename,i)) 3940 histos=[{'dirname':pjoin(rundir,file)}] 3941 self.combine_plots_HwU(histos,out) 3942 try: 3943 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3944 stdout=os.open(os.devnull, os.O_RDWR),\ 3945 stderr=os.open(os.devnull, os.O_RDWR),\ 3946 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3947 except Exception: 3948 pass 3949 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3950 '%s%d.HwU'% (filename,i)) 3951 plotfiles.append(plotfile) 3952 3953 ffiles = 'files' 3954 have = 'have' 3955 if len(plotfiles) == 1: 3956 ffiles = 'file' 3957 have = 'has' 3958 3959 message = ('The %s %s %s been generated, with histograms in the' + \ 3960 ' %s, obtained by showering the parton-level' + \ 3961 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 3962 hist_format, evt_file, shower) 3963 else: 3964 # many jobs for the shower have been run 3965 topfiles_set = set(topfiles) 3966 plotfiles = [] 3967 for j, top_tar in enumerate(top_tars): 3968 top_tar.extractall(path = rundir) 3969 for i, file in enumerate(topfiles_set): 3970 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3971 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 3972 files.mv(pjoin(rundir, file), plotfile) 3973 plotfiles.append(plotfile) 3974 3975 # check if the user asked to combine the .top into a single file 3976 if self.shower_card['combine_td']: 3977 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 3978 3979 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 3980 norm = 1. 3981 else: 3982 norm = 1./float(self.shower_card['nsplit_jobs']) 3983 3984 plotfiles2 = [] 3985 for i, file in enumerate(topfiles_set): 3986 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 3987 for j in range(self.shower_card['nsplit_jobs'])] 3988 if out_id=='TOP': 3989 infile="%d\n%s\n%s\n" % \ 3990 (self.shower_card['nsplit_jobs'], 3991 '\n'.join(filelist), 3992 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 3993 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 3994 stdin=subprocess.PIPE, 3995 stdout=os.open(os.devnull, os.O_RDWR), 3996 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3997 p.communicate(input = infile) 3998 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 3999 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 4000 elif out_id=='HWU': 4001 out=pjoin(self.me_dir,'Events', 4002 self.run_name,'%s%d'% (filename,i)) 4003 histos=[] 4004 norms=[] 4005 for plotfile in plotfiles: 4006 histos.append({'dirname':plotfile}) 4007 norms.append(norm) 4008 self.combine_plots_HwU(histos,out,normalisation=norms) 4009 try: 4010 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 4011 stdout=os.open(os.devnull, os.O_RDWR),\ 4012 stderr=os.open(os.devnull, os.O_RDWR),\ 4013 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 4014 except Exception: 4015 pass 4016 4017 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 4018 tar = tarfile.open( 4019 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 4020 for f in filelist: 4021 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 4022 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 4023 4024 tar.close() 4025 4026 ffiles = 'files' 4027 have = 'have' 4028 if len(plotfiles2) == 1: 4029 ffiles = 'file' 4030 have = 'has' 4031 4032 message = ('The %s %s %s been generated, with histograms in the' + \ 4033 ' %s, obtained by showering the parton-level' + \ 4034 ' file %s.gz with %s.\n' + \ 4035 'The files from the different shower ' + \ 4036 'jobs (before combining them) can be found inside %s.') % \ 4037 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 4038 evt_file, shower, 4039 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 4040 4041 else: 4042 message = ('The following files have been generated:\n %s\n' + \ 4043 'They contain histograms in the' + \ 4044 ' %s, obtained by showering the parton-level' + \ 4045 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 4046 hist_format, evt_file, shower) 4047 4048 # Now arxiv the shower card used if RunMaterial is present 4049 run_dir_path = pjoin(rundir, self.run_name) 4050 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 4051 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 4052 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 4053 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 4054 %(shower, count))) 4055 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 4056 cwd=run_dir_path) 4057 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 4058 # end of the run, gzip files and print out the message/warning 4059 for f in to_gzip: 4060 misc.gzip(f) 4061 if message: 4062 logger.info(message) 4063 if warning: 4064 logger.warning(warning) 4065 4066 self.update_status('Run complete', level='shower', update_results=True)
4067 4068 ############################################################################
4069 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
4070 """define the run name, the run_tag, the banner and the results.""" 4071 4072 # when are we force to change the tag new_run:previous run requiring changes 4073 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 4074 'shower': ['shower','delphes','madanalysis5_hadron'], 4075 'delphes':['delphes'], 4076 'madanalysis5_hadron':['madanalysis5_hadron'], 4077 'plot':[]} 4078 4079 if name == self.run_name: 4080 if reload_card: 4081 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4082 self.run_card = banner_mod.RunCardNLO(run_card) 4083 4084 #check if we need to change the tag 4085 if tag: 4086 self.run_card['run_tag'] = tag 4087 self.run_tag = tag 4088 self.results.add_run(self.run_name, self.run_card) 4089 else: 4090 for tag in upgrade_tag[level]: 4091 if getattr(self.results[self.run_name][-1], tag): 4092 tag = self.get_available_tag() 4093 self.run_card['run_tag'] = tag 4094 self.run_tag = tag 4095 self.results.add_run(self.run_name, self.run_card) 4096 break 4097 return # Nothing to do anymore 4098 4099 # save/clean previous run 4100 if self.run_name: 4101 self.store_result() 4102 # store new name 4103 self.run_name = name 4104 4105 # Read run_card 4106 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4107 self.run_card = banner_mod.RunCardNLO(run_card) 4108 4109 new_tag = False 4110 # First call for this run -> set the banner 4111 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 4112 if 'mgruncard' in self.banner: 4113 self.run_card = self.banner.charge_card('run_card') 4114 if tag: 4115 self.run_card['run_tag'] = tag 4116 new_tag = True 4117 elif not self.run_name in self.results and level =='parton': 4118 pass # No results yet, so current tag is fine 4119 elif not self.run_name in self.results: 4120 #This is only for case when you want to trick the interface 4121 logger.warning('Trying to run data on unknown run.') 4122 self.results.add_run(name, self.run_card) 4123 self.results.update('add run %s' % name, 'all', makehtml=True) 4124 else: 4125 for tag in upgrade_tag[level]: 4126 4127 if getattr(self.results[self.run_name][-1], tag): 4128 # LEVEL is already define in the last tag -> need to switch tag 4129 tag = self.get_available_tag() 4130 self.run_card['run_tag'] = tag 4131 new_tag = True 4132 break 4133 if not new_tag: 4134 # We can add the results to the current run 4135 tag = self.results[self.run_name][-1]['tag'] 4136 self.run_card['run_tag'] = tag # ensure that run_tag is correct 4137 4138 4139 if name in self.results and not new_tag: 4140 self.results.def_current(self.run_name) 4141 else: 4142 self.results.add_run(self.run_name, self.run_card) 4143 4144 self.run_tag = self.run_card['run_tag'] 4145 4146 # Return the tag of the previous run having the required data for this 4147 # tag/run to working wel. 4148 if level == 'parton': 4149 return 4150 elif level == 'pythia': 4151 return self.results[self.run_name][0]['tag'] 4152 else: 4153 for i in range(-1,-len(self.results[self.run_name])-1,-1): 4154 tagRun = self.results[self.run_name][i] 4155 if tagRun.pythia: 4156 return tagRun['tag']
4157 4158
4159 - def store_result(self):
4160 """ tar the pythia results. This is done when we are quite sure that 4161 the pythia output will not be use anymore """ 4162 4163 if not self.run_name: 4164 return 4165 4166 self.results.save() 4167 4168 if not self.to_store: 4169 return 4170 4171 if 'event' in self.to_store: 4172 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 4173 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 4174 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 4175 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4176 else: 4177 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4178 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 4179 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 4180 4181 4182 tag = self.run_card['run_tag'] 4183 4184 self.to_store = []
4185 4186 4187 ############################################################################
4188 - def get_Gdir(self, Pdir=None):
4189 """get the list of Gdirectory if not yet saved.""" 4190 4191 if hasattr(self, "Gdirs"): 4192 if self.me_dir in self.Gdirs: 4193 if Pdir is None: 4194 return sum(self.Gdirs.values()) 4195 else: 4196 return self.Gdirs[Pdir] 4197 4198 Pdirs = self.get_Pdir() 4199 Gdirs = {self.me_dir:[]} 4200 for P in Pdirs: 4201 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and 4202 os.path.isdir(pjoin(P,G))] 4203 4204 self.Gdirs = Gdirs 4205 return self.getGdir(Pdir)
4206 4207
4208 - def get_init_dict(self, evt_file):
4209 """reads the info in the init block and returns them in a dictionary""" 4210 ev_file = open(evt_file) 4211 init = "" 4212 found = False 4213 while True: 4214 line = ev_file.readline() 4215 if "<init>" in line: 4216 found = True 4217 elif found and not line.startswith('#'): 4218 init += line 4219 if "</init>" in line or "<event>" in line: 4220 break 4221 ev_file.close() 4222 4223 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 4224 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 4225 # these are not included (so far) in the init_dict 4226 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 4227 4228 init_dict = {} 4229 init_dict['idbmup1'] = int(init.split()[0]) 4230 init_dict['idbmup2'] = int(init.split()[1]) 4231 init_dict['ebmup1'] = float(init.split()[2]) 4232 init_dict['ebmup2'] = float(init.split()[3]) 4233 init_dict['pdfgup1'] = int(init.split()[4]) 4234 init_dict['pdfgup2'] = int(init.split()[5]) 4235 init_dict['pdfsup1'] = int(init.split()[6]) 4236 init_dict['pdfsup2'] = int(init.split()[7]) 4237 init_dict['idwtup'] = int(init.split()[8]) 4238 init_dict['nprup'] = int(init.split()[9]) 4239 4240 return init_dict
4241 4242
4243 - def banner_to_mcatnlo(self, evt_file):
4244 """creates the mcatnlo input script using the values set in the header of the event_file. 4245 It also checks if the lhapdf library is used""" 4246 4247 shower = self.banner.get('run_card', 'parton_shower').upper() 4248 pdlabel = self.banner.get('run_card', 'pdlabel') 4249 itry = 0 4250 nevents = self.shower_card['nevents'] 4251 init_dict = self.get_init_dict(evt_file) 4252 4253 if nevents < 0 or \ 4254 nevents > self.banner.get_detail('run_card', 'nevents'): 4255 nevents = self.banner.get_detail('run_card', 'nevents') 4256 4257 nevents = nevents / self.shower_card['nsplit_jobs'] 4258 4259 mcmass_dict = {} 4260 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 4261 pdg = int(line.split()[0]) 4262 mass = float(line.split()[1]) 4263 mcmass_dict[pdg] = mass 4264 4265 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 4266 content += 'NEVENTS=%d\n' % nevents 4267 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 4268 self.shower_card['nsplit_jobs']) 4269 content += 'MCMODE=%s\n' % shower 4270 content += 'PDLABEL=%s\n' % pdlabel 4271 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 4272 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 4273 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4274 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 4275 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 4276 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 4277 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 4278 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 4279 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 4280 try: 4281 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 4282 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 4283 except KeyError: 4284 content += 'HGGMASS=120.\n' 4285 content += 'HGGWIDTH=0.00575308848\n' 4286 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 4287 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 4288 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 4289 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 4290 content += 'DMASS=%s\n' % mcmass_dict[1] 4291 content += 'UMASS=%s\n' % mcmass_dict[2] 4292 content += 'SMASS=%s\n' % mcmass_dict[3] 4293 content += 'CMASS=%s\n' % mcmass_dict[4] 4294 content += 'BMASS=%s\n' % mcmass_dict[5] 4295 try: 4296 content += 'EMASS=%s\n' % mcmass_dict[11] 4297 content += 'MUMASS=%s\n' % mcmass_dict[13] 4298 content += 'TAUMASS=%s\n' % mcmass_dict[15] 4299 except KeyError: 4300 # this is for backward compatibility 4301 mcmass_lines = [l for l in \ 4302 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 4303 ).read().split('\n') if l] 4304 new_mcmass_dict = {} 4305 for l in mcmass_lines: 4306 key, val = l.split('=') 4307 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 4308 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 4309 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 4310 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 4311 4312 content += 'GMASS=%s\n' % mcmass_dict[21] 4313 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 4314 # check if need to link lhapdf 4315 if int(self.shower_card['pdfcode']) > 1 or \ 4316 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 4317 shower=='HERWIGPP' : 4318 # Use LHAPDF (should be correctly installed, because 4319 # either events were already generated with them, or the 4320 # user explicitly gives an LHAPDF number in the 4321 # shower_card). 4322 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4323 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4324 stdout = subprocess.PIPE).stdout.read().strip() 4325 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4326 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4327 if self.shower_card['pdfcode']==0: 4328 lhaid_list = '' 4329 content += '' 4330 elif self.shower_card['pdfcode']==1: 4331 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4332 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4333 else: 4334 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 4335 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 4336 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4337 elif int(self.shower_card['pdfcode'])==1 or \ 4338 int(self.shower_card['pdfcode'])==-1 and True: 4339 # Try to use LHAPDF because user wants to use the same PDF 4340 # as was used for the event generation. However, for the 4341 # event generation, LHAPDF was not used, so non-trivial to 4342 # see if if LHAPDF is available with the corresponding PDF 4343 # set. If not found, give a warning and use build-in PDF 4344 # set instead. 4345 try: 4346 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4347 stdout = subprocess.PIPE).stdout.read().strip() 4348 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4349 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4350 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4351 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4352 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4353 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4354 except Exception: 4355 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 4356 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 4357 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 4358 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 4359 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 4360 content += 'LHAPDFPATH=\n' 4361 content += 'PDFCODE=0\n' 4362 else: 4363 content += 'LHAPDFPATH=\n' 4364 content += 'PDFCODE=0\n' 4365 4366 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 4367 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 4368 # add the pythia8/hwpp path(s) 4369 if self.options['pythia8_path']: 4370 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 4371 if self.options['hwpp_path']: 4372 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 4373 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 4374 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 4375 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 4376 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 4377 4378 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 4379 output.write(content) 4380 output.close() 4381 return shower
4382 4383
4384 - def run_reweight(self, only):
4385 """runs the reweight_xsec_events executables on each sub-event file generated 4386 to compute on the fly scale and/or PDF uncertainities""" 4387 logger.info(' Doing reweight') 4388 4389 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 4390 # if only doing reweight, copy back the nevents_unweighted file 4391 if only: 4392 if os.path.exists(nev_unw + '.orig'): 4393 files.cp(nev_unw + '.orig', nev_unw) 4394 else: 4395 raise aMCatNLOError('Cannot find event file information') 4396 4397 #read the nevents_unweighted file to get the list of event files 4398 file = open(nev_unw) 4399 lines = file.read().split('\n') 4400 file.close() 4401 # make copy of the original nevent_unweighted file 4402 files.cp(nev_unw, nev_unw + '.orig') 4403 # loop over lines (all but the last one whith is empty) and check that the 4404 # number of events is not 0 4405 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 4406 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 4407 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0: 4408 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts] 4409 #prepare the job_dict 4410 job_dict = {} 4411 exe = 'reweight_xsec_events.local' 4412 for i, evt_file in enumerate(evt_files): 4413 path, evt = os.path.split(evt_file) 4414 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 4415 pjoin(self.me_dir, 'SubProcesses', path)) 4416 job_dict[path] = [exe] 4417 4418 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 4419 4420 #check that the new event files are complete 4421 for evt_file in evt_files: 4422 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 4423 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 4424 stdout = subprocess.PIPE).stdout.read().strip() 4425 if last_line != "</LesHouchesEvents>": 4426 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 4427 '\'reweight_xsec_events.output\' files inside the ' + \ 4428 '\'SubProcesses/P*/G*/ directories for details') 4429 4430 #update file name in nevents_unweighted 4431 newfile = open(nev_unw, 'w') 4432 for line in lines: 4433 if line: 4434 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4435 newfile.close() 4436 4437 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4438
4439 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4440 """This function takes the files with the scale and pdf values 4441 written by the reweight_xsec_events.f code 4442 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4443 scale and PDF uncertainty (the latter is computed using the 4444 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4445 and returns it in percents. The expected format of the file 4446 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4447 xsec_pdf0 xsec_pdf1 ....""" 4448 4449 scales=[] 4450 pdfs=[] 4451 for i,evt_file in enumerate(evt_files): 4452 path, evt=os.path.split(evt_file) 4453 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4454 data_line=f.readline() 4455 if "scale variations:" in data_line: 4456 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4457 data_line = f.readline().split() 4458 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4459 try: 4460 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4461 except IndexError: 4462 scales+=[scales_this] 4463 data_line=f.readline() 4464 if "pdf variations:" in data_line: 4465 for j,pdf in enumerate(self.run_card['lhaid']): 4466 data_line = f.readline().split() 4467 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4468 try: 4469 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4470 except IndexError: 4471 pdfs+=[pdfs_this] 4472 4473 # get the scale uncertainty in percent 4474 scale_info=[] 4475 for j,scale in enumerate(scales): 4476 s_cen=scale[0] 4477 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4478 # max and min of the full envelope 4479 s_max=(max(scale)/s_cen-1)*100 4480 s_min=(1-min(scale)/s_cen)*100 4481 # ren and fac scale dependence added in quadrature 4482 ren_var=[] 4483 fac_var=[] 4484 for i in range(len(self.run_card['rw_rscale'])): 4485 ren_var.append(scale[i]-s_cen) # central fac scale 4486 for i in range(len(self.run_card['rw_fscale'])): 4487 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4488 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4489 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4490 s_size=len(scale) 4491 else: 4492 s_max=0.0 4493 s_min=0.0 4494 s_max_q=0.0 4495 s_min_q=0.0 4496 s_size=len(scale) 4497 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4498 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4499 'label':self.run_card['dynamical_scale_choice'][j], \ 4500 'unc':self.run_card['reweight_scale'][j]}) 4501 4502 # check if we can use LHAPDF to compute the PDF uncertainty 4503 if any(self.run_card['reweight_pdf']): 4504 use_lhapdf=False 4505 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 4506 stdout=subprocess.PIPE).stdout.read().strip() 4507 4508 try: 4509 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 4510 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 4511 except OSError: 4512 candidates=[] 4513 for candidate in candidates: 4514 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 4515 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 4516 try: 4517 import lhapdf 4518 use_lhapdf=True 4519 break 4520 except ImportError: 4521 sys.path.pop(0) 4522 continue 4523 4524 if not use_lhapdf: 4525 try: 4526 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 4527 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 4528 except OSError: 4529 candidates=[] 4530 for candidate in candidates: 4531 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 4532 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 4533 try: 4534 import lhapdf 4535 use_lhapdf=True 4536 break 4537 except ImportError: 4538 sys.path.pop(0) 4539 continue 4540 4541 if not use_lhapdf: 4542 try: 4543 import lhapdf 4544 use_lhapdf=True 4545 except ImportError: 4546 logger.warning("Failed to access python version of LHAPDF: "\ 4547 "cannot compute PDF uncertainty from the "\ 4548 "weights in the events. The weights in the LHE " \ 4549 "event files will still cover all PDF set members, "\ 4550 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4551 "If the python interface to LHAPDF is available on your system, try "\ 4552 "adding its location to the PYTHONPATH environment variable and the"\ 4553 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4554 use_lhapdf=False 4555 4556 # turn off lhapdf printing any messages 4557 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4558 4559 pdf_info=[] 4560 for j,pdfset in enumerate(pdfs): 4561 p_cen=pdfset[0] 4562 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4563 if use_lhapdf: 4564 pdfsetname=self.run_card['lhapdfsetname'][j] 4565 try: 4566 p=lhapdf.getPDFSet(pdfsetname) 4567 ep=p.uncertainty(pdfset,-1) 4568 p_cen=ep.central 4569 p_min=abs(ep.errminus/p_cen)*100 4570 p_max=abs(ep.errplus/p_cen)*100 4571 p_type=p.errorType 4572 p_size=p.size 4573 p_conf=p.errorConfLevel 4574 except: 4575 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4576 p_min=0.0 4577 p_max=0.0 4578 p_type='unknown' 4579 p_conf='unknown' 4580 p_size=len(pdfset) 4581 else: 4582 p_min=0.0 4583 p_max=0.0 4584 p_type='unknown' 4585 p_conf='unknown' 4586 p_size=len(pdfset) 4587 pdfsetname=self.run_card['lhaid'][j] 4588 else: 4589 p_min=0.0 4590 p_max=0.0 4591 p_type='none' 4592 p_conf='unknown' 4593 p_size=len(pdfset) 4594 pdfsetname=self.run_card['lhaid'][j] 4595 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4596 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4597 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4598 4599 scale_pdf_info=[scale_info,pdf_info] 4600 return scale_pdf_info
4601 4602
4603 - def wait_for_complete(self, run_type):
4604 """this function waits for jobs on cluster to complete their run.""" 4605 starttime = time.time() 4606 #logger.info(' Waiting for submitted jobs to complete') 4607 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4608 starttime=starttime, level='parton', update_results=True) 4609 try: 4610 self.cluster.wait(self.me_dir, update_status) 4611 except: 4612 self.cluster.remove() 4613 raise
4614
4615 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4616 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4617 self.ijob = 0 4618 if run_type != 'shower': 4619 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4620 for args in arg_list: 4621 for Pdir, jobs in job_dict.items(): 4622 for job in jobs: 4623 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4624 if self.cluster_mode == 2: 4625 time.sleep(1) # security to allow all jobs to be launched 4626 else: 4627 self.njobs = len(arg_list) 4628 for args in arg_list: 4629 [(cwd, exe)] = job_dict.items() 4630 self.run_exe(exe, args, run_type, cwd) 4631 4632 self.wait_for_complete(run_type)
4633 4634 4635
4636 - def check_event_files(self,jobs):
4637 """check the integrity of the event files after splitting, and resubmit 4638 those which are not nicely terminated""" 4639 jobs_to_resubmit = [] 4640 for job in jobs: 4641 last_line = '' 4642 try: 4643 last_line = subprocess.Popen( 4644 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4645 stdout = subprocess.PIPE).stdout.read().strip() 4646 except IOError: 4647 pass 4648 if last_line != "</LesHouchesEvents>": 4649 jobs_to_resubmit.append(job) 4650 self.njobs = 0 4651 if jobs_to_resubmit: 4652 run_type = 'Resubmitting broken jobs' 4653 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4654 for job in jobs_to_resubmit: 4655 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4656 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4657 4658
4659 - def find_jobs_to_split(self, pdir, job, arg):
4660 """looks into the nevents_unweighed_splitted file to check how many 4661 split jobs are needed for this (pdir, job). arg is F, B or V""" 4662 # find the number of the integration channel 4663 splittings = [] 4664 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4665 pattern = re.compile('for i in (\d+) ; do') 4666 match = re.search(pattern, ajob) 4667 channel = match.groups()[0] 4668 # then open the nevents_unweighted_splitted file and look for the 4669 # number of splittings to be done 4670 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4671 # This skips the channels with zero events, because they are 4672 # not of the form GFXX_YY, but simply GFXX 4673 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4674 pjoin(pdir, 'G%s%s' % (arg,channel))) 4675 matches = re.findall(pattern, nevents_file) 4676 for m in matches: 4677 splittings.append(m) 4678 return splittings
4679 4680
4681 - def run_exe(self, exe, args, run_type, cwd=None):
4682 """this basic function launch locally/on cluster exe with args as argument. 4683 """ 4684 # first test that exe exists: 4685 execpath = None 4686 if cwd and os.path.exists(pjoin(cwd, exe)): 4687 execpath = pjoin(cwd, exe) 4688 elif not cwd and os.path.exists(exe): 4689 execpath = exe 4690 else: 4691 raise aMCatNLOError('Cannot find executable %s in %s' \ 4692 % (exe, os.getcwd())) 4693 # check that the executable has exec permissions 4694 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4695 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4696 # finally run it 4697 if self.cluster_mode == 0: 4698 #this is for the serial run 4699 misc.call(['./'+exe] + args, cwd=cwd) 4700 self.ijob += 1 4701 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4702 min([1, self.njobs - self.ijob]), 4703 self.ijob, run_type), level='parton') 4704 4705 #this is for the cluster/multicore run 4706 elif 'reweight' in exe: 4707 # a reweight run 4708 # Find the correct PDF input file 4709 input_files, output_files = [], [] 4710 pdfinput = self.get_pdf_input_filename() 4711 if os.path.exists(pdfinput): 4712 input_files.append(pdfinput) 4713 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4714 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4715 input_files.append(args[0]) 4716 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4717 output_files.append('reweight_xsec_events.output') 4718 output_files.append('scale_pdf_dependence.dat') 4719 4720 return self.cluster.submit2(exe, args, cwd=cwd, 4721 input_files=input_files, output_files=output_files, 4722 required_output=output_files) 4723 4724 elif 'ajob' in exe: 4725 # the 'standard' amcatnlo job 4726 # check if args is a list of string 4727 if type(args[0]) == str: 4728 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4729 #submitting 4730 self.cluster.submit2(exe, args, cwd=cwd, 4731 input_files=input_files, output_files=output_files, 4732 required_output=required_output) 4733 4734 # # keep track of folders and arguments for splitted evt gen 4735 # subfolder=output_files[-1].split('/')[0] 4736 # if len(args) == 4 and '_' in subfolder: 4737 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4738 4739 elif 'shower' in exe: 4740 # a shower job 4741 # args are [shower, output(HEP or TOP), run_name] 4742 # cwd is the shower rundir, where the executable are found 4743 input_files, output_files = [], [] 4744 shower = args[0] 4745 # the input files 4746 if shower == 'PYTHIA8': 4747 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4748 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4749 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4750 input_files.append(pjoin(cwd, 'config.sh')) 4751 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4752 else: 4753 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4754 else: 4755 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4756 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4757 if shower == 'HERWIGPP': 4758 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4759 input_files.append(pjoin(cwd, 'Herwig++')) 4760 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4761 input_files.append(pjoin(cwd, 'Herwig')) 4762 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4763 if len(args) == 3: 4764 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4765 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4766 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4767 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4768 else: 4769 raise aMCatNLOError, 'Event file not present in %s' % \ 4770 pjoin(self.me_dir, 'Events', self.run_name) 4771 else: 4772 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4773 # the output files 4774 if len(args) == 3: 4775 output_files.append('mcatnlo_run.log') 4776 else: 4777 output_files.append('mcatnlo_run_%s.log' % args[3]) 4778 if args[1] == 'HEP': 4779 if len(args) == 3: 4780 fname = 'events' 4781 else: 4782 fname = 'events_%s' % args[3] 4783 if shower in ['PYTHIA8', 'HERWIGPP']: 4784 output_files.append(fname + '.hepmc.gz') 4785 else: 4786 output_files.append(fname + '.hep.gz') 4787 elif args[1] == 'TOP' or args[1] == 'HWU': 4788 if len(args) == 3: 4789 fname = 'histfile' 4790 else: 4791 fname = 'histfile_%s' % args[3] 4792 output_files.append(fname + '.tar') 4793 else: 4794 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4795 #submitting 4796 self.cluster.submit2(exe, args, cwd=cwd, 4797 input_files=input_files, output_files=output_files) 4798 4799 else: 4800 return self.cluster.submit(exe, args, cwd=cwd)
4801
4802 - def getIO_ajob(self,exe,cwd, args):
4803 # use local disk if possible => need to stands what are the 4804 # input/output files 4805 4806 output_files = [] 4807 required_output = [] 4808 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4809 pjoin(cwd, 'symfact.dat'), 4810 pjoin(cwd, 'iproc.dat'), 4811 pjoin(cwd, 'initial_states_map.dat'), 4812 pjoin(cwd, 'configs_and_props_info.dat'), 4813 pjoin(cwd, 'leshouche_info.dat'), 4814 pjoin(cwd, 'FKS_params.dat')] 4815 4816 # For GoSam interface, we must copy the SLHA card as well 4817 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4818 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4819 4820 if os.path.exists(pjoin(cwd,'nevents.tar')): 4821 input_files.append(pjoin(cwd,'nevents.tar')) 4822 4823 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4824 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4825 4826 # File for the loop (might not be present if MadLoop is not used) 4827 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4828 cluster.need_transfer(self.options): 4829 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4830 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4831 cluster.need_transfer(self.options): 4832 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4833 dereference=True) 4834 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4835 tf.close() 4836 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4837 4838 if args[1] == 'born' or args[1] == 'all': 4839 # MADEVENT MINT FO MODE 4840 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4841 if args[2] == '0': 4842 current = '%s_G%s' % (args[1],args[0]) 4843 else: 4844 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4845 if os.path.exists(pjoin(cwd,current)): 4846 input_files.append(pjoin(cwd, current)) 4847 output_files.append(current) 4848 4849 required_output.append('%s/results.dat' % current) 4850 required_output.append('%s/res_%s.dat' % (current,args[3])) 4851 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4852 required_output.append('%s/mint_grids' % current) 4853 required_output.append('%s/grid.MC_integer' % current) 4854 if args[3] != '0': 4855 required_output.append('%s/scale_pdf_dependence.dat' % current) 4856 4857 elif args[1] == 'F' or args[1] == 'B': 4858 # MINTMC MODE 4859 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4860 4861 if args[2] == '0': 4862 current = 'G%s%s' % (args[1],args[0]) 4863 else: 4864 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4865 if os.path.exists(pjoin(cwd,current)): 4866 input_files.append(pjoin(cwd, current)) 4867 output_files.append(current) 4868 if args[2] > '0': 4869 # this is for the split event generation 4870 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4871 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4872 4873 else: 4874 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4875 if args[3] in ['0','1']: 4876 required_output.append('%s/results.dat' % current) 4877 if args[3] == '1': 4878 output_files.append('%s/results.dat' % current) 4879 4880 else: 4881 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4882 4883 #Find the correct PDF input file 4884 pdfinput = self.get_pdf_input_filename() 4885 if os.path.exists(pdfinput): 4886 input_files.append(pdfinput) 4887 return input_files, output_files, required_output, args
4888 4889
4890 - def compile(self, mode, options):
4891 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4892 specified in mode""" 4893 4894 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4895 4896 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4897 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4898 4899 self.get_characteristics(pjoin(self.me_dir, 4900 'SubProcesses', 'proc_characteristics')) 4901 4902 #define a bunch of log files 4903 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4904 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4905 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4906 test_log = pjoin(self.me_dir, 'test.log') 4907 4908 # environmental variables to be included in make_opts 4909 self.make_opts_var = {} 4910 if self.proc_characteristics['has_loops'] and \ 4911 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4912 self.make_opts_var['madloop'] = 'true' 4913 4914 self.update_status('Compiling the code', level=None, update_results=True) 4915 4916 libdir = pjoin(self.me_dir, 'lib') 4917 sourcedir = pjoin(self.me_dir, 'Source') 4918 4919 #clean files 4920 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4921 #define which executable/tests to compile 4922 if '+' in mode: 4923 mode = mode.split('+')[0] 4924 if mode in ['NLO', 'LO']: 4925 exe = 'madevent_mintFO' 4926 tests = ['test_ME'] 4927 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4928 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4929 exe = 'madevent_mintMC' 4930 tests = ['test_ME', 'test_MC'] 4931 # write an analyse_opts with a dummy analysis so that compilation goes through 4932 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 4933 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4934 4935 #directory where to compile exe 4936 p_dirs = [d for d in \ 4937 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4938 # create param_card.inc and run_card.inc 4939 self.do_treatcards('', amcatnlo=True, mode=mode) 4940 # if --nocompile option is specified, check here that all exes exists. 4941 # If they exists, return 4942 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4943 for p_dir in p_dirs]) and options['nocompile']: 4944 return 4945 4946 # rm links to lhapdflib/ PDFsets if exist 4947 if os.path.exists(pjoin(libdir, 'PDFsets')): 4948 files.rm(pjoin(libdir, 'PDFsets')) 4949 4950 # read the run_card to find if lhapdf is used or not 4951 if self.run_card['pdlabel'] == 'lhapdf' and \ 4952 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 4953 self.banner.get_detail('run_card', 'lpp2') != 0): 4954 4955 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 4956 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4957 lhaid_list = self.run_card['lhaid'] 4958 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4959 4960 else: 4961 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 4962 logger.info('Using built-in libraries for PDFs') 4963 4964 self.make_opts_var['lhapdf'] = "" 4965 4966 # read the run_card to find if applgrid is used or not 4967 if self.run_card['iappl'] != 0: 4968 self.make_opts_var['applgrid'] = 'True' 4969 # check versions of applgrid and amcfast 4970 for code in ['applgrid','amcfast']: 4971 try: 4972 p = subprocess.Popen([self.options[code], '--version'], \ 4973 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 4974 except OSError: 4975 raise aMCatNLOError(('No valid %s installation found. \n' + \ 4976 'Please set the path to %s-config by using \n' + \ 4977 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 4978 else: 4979 output, _ = p.communicate() 4980 if code is 'applgrid' and output < '1.4.63': 4981 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 4982 +' You are using %s',output) 4983 if code is 'amcfast' and output < '1.1.1': 4984 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 4985 +' You are using %s',output) 4986 4987 # set-up the Source/make_opts with the correct applgrid-config file 4988 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 4989 % (self.options['amcfast'],self.options['applgrid']) 4990 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 4991 text_out=[] 4992 for line in text: 4993 if line.strip().startswith('APPLLIBS=$'): 4994 line=appllibs 4995 text_out.append(line) 4996 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 4997 fsock.writelines(text_out) 4998 else: 4999 self.make_opts_var['applgrid'] = "" 5000 5001 if 'fastjet' in self.options.keys() and self.options['fastjet']: 5002 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 5003 5004 # add the make_opts_var to make_opts 5005 self.update_make_opts() 5006 5007 # make Source 5008 self.update_status('Compiling source...', level=None) 5009 misc.compile(['clean4pdf'], cwd = sourcedir) 5010 misc.compile(cwd = sourcedir) 5011 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 5012 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 5013 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 5014 and os.path.exists(pjoin(libdir, 'libpdf.a')): 5015 logger.info(' ...done, continuing with P* directories') 5016 else: 5017 raise aMCatNLOError('Compilation failed') 5018 5019 # make StdHep (only necessary with MG option output_dependencies='internal') 5020 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 5021 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 5022 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 5023 if os.path.exists(pjoin(sourcedir,'StdHEP')): 5024 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 5025 misc.compile(['StdHEP'], cwd = sourcedir) 5026 logger.info(' ...done.') 5027 else: 5028 raise aMCatNLOError('Could not compile StdHEP because its'+\ 5029 ' source directory could not be found in the SOURCE folder.\n'+\ 5030 " Check the MG5_aMC option 'output_dependencies.'") 5031 5032 # make CutTools (only necessary with MG option output_dependencies='internal') 5033 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5034 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5035 if os.path.exists(pjoin(sourcedir,'CutTools')): 5036 logger.info('Compiling CutTools (can take a couple of minutes) ...') 5037 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5038 logger.info(' ...done.') 5039 else: 5040 raise aMCatNLOError('Could not compile CutTools because its'+\ 5041 ' source directory could not be found in the SOURCE folder.\n'+\ 5042 " Check the MG5_aMC option 'output_dependencies.'") 5043 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5044 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5045 raise aMCatNLOError('CutTools compilation failed.') 5046 5047 # Verify compatibility between current compiler and the one which was 5048 # used when last compiling CutTools (if specified). 5049 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5050 libdir, 'libcts.a')))),'compiler_version.log') 5051 if os.path.exists(compiler_log_path): 5052 compiler_version_used = open(compiler_log_path,'r').read() 5053 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5054 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5055 if os.path.exists(pjoin(sourcedir,'CutTools')): 5056 logger.info('CutTools was compiled with a different fortran'+\ 5057 ' compiler. Re-compiling it now...') 5058 misc.compile(['cleanCT'], cwd = sourcedir) 5059 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5060 logger.info(' ...done.') 5061 else: 5062 raise aMCatNLOError("CutTools installation in %s"\ 5063 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 5064 " seems to have been compiled with a different compiler than"+\ 5065 " the one specified in MG5_aMC. Please recompile CutTools.") 5066 5067 # make IREGI (only necessary with MG option output_dependencies='internal') 5068 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 5069 and os.path.exists(pjoin(sourcedir,'IREGI')): 5070 logger.info('Compiling IREGI (can take a couple of minutes) ...') 5071 misc.compile(['IREGI'], cwd = sourcedir) 5072 logger.info(' ...done.') 5073 5074 if os.path.exists(pjoin(libdir, 'libiregi.a')): 5075 # Verify compatibility between current compiler and the one which was 5076 # used when last compiling IREGI (if specified). 5077 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5078 libdir, 'libiregi.a')))),'compiler_version.log') 5079 if os.path.exists(compiler_log_path): 5080 compiler_version_used = open(compiler_log_path,'r').read() 5081 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5082 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5083 if os.path.exists(pjoin(sourcedir,'IREGI')): 5084 logger.info('IREGI was compiled with a different fortran'+\ 5085 ' compiler. Re-compiling it now...') 5086 misc.compile(['cleanIR'], cwd = sourcedir) 5087 misc.compile(['IREGI'], cwd = sourcedir) 5088 logger.info(' ...done.') 5089 else: 5090 raise aMCatNLOError("IREGI installation in %s"\ 5091 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 5092 " seems to have been compiled with a different compiler than"+\ 5093 " the one specified in MG5_aMC. Please recompile IREGI.") 5094 5095 # check if MadLoop virtuals have been generated 5096 if self.proc_characteristics['has_loops'] and \ 5097 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5098 if mode in ['NLO', 'aMC@NLO', 'noshower']: 5099 tests.append('check_poles') 5100 5101 # make and run tests (if asked for), gensym and make madevent in each dir 5102 self.update_status('Compiling directories...', level=None) 5103 5104 for test in tests: 5105 self.write_test_input(test) 5106 5107 try: 5108 import multiprocessing 5109 if not self.nb_core: 5110 try: 5111 self.nb_core = int(self.options['nb_core']) 5112 except TypeError: 5113 self.nb_core = multiprocessing.cpu_count() 5114 except ImportError: 5115 self.nb_core = 1 5116 5117 compile_options = copy.copy(self.options) 5118 compile_options['nb_core'] = self.nb_core 5119 compile_cluster = cluster.MultiCore(**compile_options) 5120 logger.info('Compiling on %d cores' % self.nb_core) 5121 5122 update_status = lambda i, r, f: self.donothing(i,r,f) 5123 for p_dir in p_dirs: 5124 compile_cluster.submit(prog = compile_dir, 5125 argument = [self.me_dir, p_dir, mode, options, 5126 tests, exe, self.options['run_mode']]) 5127 try: 5128 compile_cluster.wait(self.me_dir, update_status) 5129 except Exception, error: 5130 logger.warning("Fail to compile the Subprocesses") 5131 if __debug__: 5132 raise 5133 compile_cluster.remove() 5134 self.do_quit('') 5135 5136 logger.info('Checking test output:') 5137 for p_dir in p_dirs: 5138 logger.info(p_dir) 5139 for test in tests: 5140 logger.info(' Result for %s:' % test) 5141 5142 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 5143 #check that none of the tests failed 5144 self.check_tests(test, this_dir)
5145 5146
5147 - def donothing(*args):
5148 pass
5149 5150
5151 - def check_tests(self, test, dir):
5152 """just call the correct parser for the test log. 5153 Skip check_poles for LOonly folders""" 5154 if test in ['test_ME', 'test_MC']: 5155 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 5156 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 5157 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5158 5159
5160 - def parse_test_mx_log(self, log):
5161 """read and parse the test_ME/MC.log file""" 5162 content = open(log).read() 5163 if 'FAILED' in content: 5164 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK') 5165 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 5166 'Please check that widths of final state particles (e.g. top) have been' + \ 5167 ' set to 0 in the param_card.dat.') 5168 else: 5169 lines = [l for l in content.split('\n') if 'PASSED' in l] 5170 logger.info(' Passed.') 5171 logger.debug('\n'+'\n'.join(lines))
5172 5173
5174 - def parse_check_poles_log(self, log):
5175 """reads and parse the check_poles.log file""" 5176 content = open(log).read() 5177 npass = 0 5178 nfail = 0 5179 for line in content.split('\n'): 5180 if 'PASSED' in line: 5181 npass +=1 5182 tolerance = float(line.split()[1]) 5183 if 'FAILED' in line: 5184 nfail +=1 5185 tolerance = float(line.split()[1]) 5186 5187 if nfail + npass == 0: 5188 logger.warning('0 points have been tried') 5189 return 5190 5191 if float(nfail)/float(nfail+npass) > 0.1: 5192 raise aMCatNLOError('Poles do not cancel, run cannot continue') 5193 else: 5194 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 5195 %(npass, nfail+npass, tolerance))
5196 5197
5198 - def write_test_input(self, test):
5199 """write the input files to run test_ME/MC or check_poles""" 5200 if test in ['test_ME', 'test_MC']: 5201 content = "-2 -2\n" #generate randomly energy/angle 5202 content+= "100 100\n" #run 100 points for soft and collinear tests 5203 content+= "0\n" #all FKS configs 5204 content+= '\n'.join(["-1"] * 50) #random diagram (=first diagram) 5205 elif test == 'check_poles': 5206 content = '20 \n -1\n' 5207 5208 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 5209 if test == 'test_MC': 5210 shower = self.run_card['parton_shower'] 5211 header = "1 \n %s\n 1 -0.1\n-1 -0.1\n" % shower 5212 file.write(header + content) 5213 elif test == 'test_ME': 5214 header = "2 \n" 5215 file.write(header + content) 5216 else: 5217 file.write(content) 5218 file.close()
5219 5220 5221 action_switcher = AskRunNLO 5222 ############################################################################
5223 - def ask_run_configuration(self, mode, options, switch={}):
5224 """Ask the question when launching generate_events/multi_run""" 5225 5226 if 'parton' not in options: 5227 options['parton'] = False 5228 if 'reweightonly' not in options: 5229 options['reweightonly'] = False 5230 5231 if mode == 'auto': 5232 mode = None 5233 if not mode and (options['parton'] or options['reweightonly']): 5234 mode = 'noshower' 5235 5236 passing_cmd = [] 5237 for key,value in switch.keys(): 5238 passing_cmd.append('%s=%s' % (key,value)) 5239 5240 if 'do_reweight' in options and options['do_reweight']: 5241 passing_cmd.append('reweight=ON') 5242 if 'do_madspin' in options and options['do_madspin']: 5243 passing_cmd.append('madspin=ON') 5244 5245 force = self.force 5246 if mode == 'onlyshower': 5247 passing_cmd.append('onlyshower') 5248 force = True 5249 elif mode: 5250 passing_cmd.append(mode) 5251 5252 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher, 5253 mode=mode, force=force, 5254 first_cmd=passing_cmd, 5255 return_instance=True) 5256 5257 if 'mode' in switch: 5258 mode = switch['mode'] 5259 5260 #assign the mode depending of the switch 5261 if not mode or mode == 'auto': 5262 if switch['order'] == 'LO': 5263 if switch['runshower']: 5264 mode = 'aMC@LO' 5265 elif switch['fixed_order'] == 'ON': 5266 mode = 'LO' 5267 else: 5268 mode = 'noshowerLO' 5269 elif switch['order'] == 'NLO': 5270 if switch['runshower']: 5271 mode = 'aMC@NLO' 5272 elif switch['fixed_order'] == 'ON': 5273 mode = 'NLO' 5274 else: 5275 mode = 'noshower' 5276 logger.info('will run in mode: %s' % mode) 5277 5278 if mode == 'noshower': 5279 if switch['shower'] == 'OFF': 5280 logger.warning("""You have chosen not to run a parton shower. 5281 NLO events without showering are NOT physical. 5282 Please, shower the LesHouches events before using them for physics analyses. 5283 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""") 5284 else: 5285 logger.info("""Your Parton-shower choice is not available for running. 5286 The events will be generated for the associated Parton-Shower. 5287 Remember that NLO events without showering are NOT physical.""", '$MG:color:BLACK') 5288 5289 5290 # specify the cards which are needed for this run. 5291 cards = ['param_card.dat', 'run_card.dat'] 5292 ignore = [] 5293 if mode in ['LO', 'NLO']: 5294 options['parton'] = True 5295 ignore = ['shower_card.dat', 'madspin_card.dat'] 5296 cards.append('FO_analyse_card.dat') 5297 else: 5298 if switch['madspin'] != 'OFF': 5299 cards.append('madspin_card.dat') 5300 if switch['reweight'] != 'OFF': 5301 cards.append('reweight_card.dat') 5302 if switch['madanalysis'] == 'HADRON': 5303 cards.append('madanalysis5_hadron_card.dat') 5304 if 'aMC@' in mode: 5305 cards.append('shower_card.dat') 5306 if mode == 'onlyshower': 5307 cards = ['shower_card.dat'] 5308 if options['reweightonly']: 5309 cards = ['run_card.dat'] 5310 5311 self.keep_cards(cards, ignore) 5312 5313 if mode =='onlyshower': 5314 cards = ['shower_card.dat'] 5315 5316 5317 # automatically switch to keep_wgt option 5318 first_cmd = cmd_switch.get_cardcmd() 5319 5320 if not options['force'] and not self.force: 5321 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5322 5323 self.banner = banner_mod.Banner() 5324 5325 # store the cards in the banner 5326 for card in cards: 5327 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5328 # and the run settings 5329 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5330 self.banner.add_text('run_settings', run_settings) 5331 5332 if not mode =='onlyshower': 5333 self.run_card = self.banner.charge_card('run_card') 5334 self.run_tag = self.run_card['run_tag'] 5335 #this is if the user did not provide a name for the current run 5336 if not hasattr(self, 'run_name') or not self.run_name: 5337 self.run_name = self.find_available_run_name(self.me_dir) 5338 #add a tag in the run_name for distinguish run_type 5339 if self.run_name.startswith('run_'): 5340 if mode in ['LO','aMC@LO','noshowerLO']: 5341 self.run_name += '_LO' 5342 self.set_run_name(self.run_name, self.run_tag, 'parton') 5343 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5344 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5345 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 5346 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5347 samples of various multiplicities without double counting, you 5348 have to remove some events after showering 'by hand'. Please 5349 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5350 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5351 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5352 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 5353 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5354 "Type \'n\' to stop or \'y\' to continue" 5355 answers = ['n','y'] 5356 answer = self.ask(question, 'n', answers, alias=alias) 5357 if answer == 'n': 5358 error = '''Stop opertation''' 5359 self.ask_run_configuration(mode, options) 5360 # raise aMCatNLOError(error) 5361 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5362 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5363 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5364 if 'aMC@' in mode or mode == 'onlyshower': 5365 self.shower_card = self.banner.charge_card('shower_card') 5366 5367 elif mode in ['LO', 'NLO']: 5368 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5369 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5370 5371 return mode
5372
5373 5374 #=============================================================================== 5375 # aMCatNLOCmd 5376 #=============================================================================== 5377 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5378 """The command line processor of MadGraph"""
5379 5380 _compile_usage = "compile [MODE] [options]\n" + \ 5381 "-- compiles aMC@NLO \n" + \ 5382 " MODE can be either FO, for fixed-order computations, \n" + \ 5383 " or MC for matching with parton-shower monte-carlos. \n" + \ 5384 " (if omitted, it is set to MC)\n" 5385 _compile_parser = misc.OptionParser(usage=_compile_usage) 5386 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5387 help="Use the card present in the directory for the launch, without editing them") 5388 5389 _launch_usage = "launch [MODE] [options]\n" + \ 5390 "-- execute aMC@NLO \n" + \ 5391 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5392 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5393 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5394 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5395 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5396 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5397 " in the run_card.dat\n" 5398 5399 _launch_parser = misc.OptionParser(usage=_launch_usage) 5400 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5401 help="Use the card present in the directory for the launch, without editing them") 5402 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5403 help="Submit the jobs on the cluster") 5404 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5405 help="Submit the jobs on multicore mode") 5406 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5407 help="Skip compilation. Ignored if no executable is found") 5408 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5409 help="Skip integration and event generation, just run reweight on the" + \ 5410 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5411 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5412 help="Stop the run after the parton level file generation (you need " + \ 5413 "to shower the file in order to get physical results)") 5414 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5415 help="Skip grid set up, just generate events starting from " + \ 5416 "the last available results") 5417 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5418 help="Provide a name to the run") 5419 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5420 help="For use with APPLgrid only: start from existing grids") 5421 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5422 help="Run the reweight module (reweighting by different model parameters)") 5423 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5424 help="Run the madspin package") 5425 5426 5427 5428 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5429 "-- execute aMC@NLO \n" + \ 5430 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5431 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5432 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5433 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5434 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5435 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5436 " in the run_card.dat\n" 5437 5438 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5439 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5440 help="Use the card present in the directory for the generate_events, without editing them") 5441 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5442 help="Submit the jobs on the cluster") 5443 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5444 help="Submit the jobs on multicore mode") 5445 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5446 help="Skip compilation. Ignored if no executable is found") 5447 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5448 help="Skip integration and event generation, just run reweight on the" + \ 5449 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5450 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5451 help="Stop the run after the parton level file generation (you need " + \ 5452 "to shower the file in order to get physical results)") 5453 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5454 help="Skip grid set up, just generate events starting from " + \ 5455 "the last available results") 5456 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5457 help="Provide a name to the run") 5458 5459 5460 5461 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5462 "-- calculate cross section up to ORDER.\n" + \ 5463 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5464 5465 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5466 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5467 help="Use the card present in the directory for the launch, without editing them") 5468 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5469 help="Submit the jobs on the cluster") 5470 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5471 help="Submit the jobs on multicore mode") 5472 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5473 help="Skip compilation. Ignored if no executable is found") 5474 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5475 help="Provide a name to the run") 5476 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5477 help="For use with APPLgrid only: start from existing grids") 5478 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5479 help="Skip grid set up, just generate events starting from " + \ 5480 "the last available results") 5481 5482 _shower_usage = 'shower run_name [options]\n' + \ 5483 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5484 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5485 ' are directly read from the header of the event file\n' 5486 _shower_parser = misc.OptionParser(usage=_shower_usage) 5487 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5488 help="Use the shower_card present in the directory for the launch, without editing") 5489 5490 if '__main__' == __name__: 5491 # Launch the interface without any check if one code is already running. 5492 # This can ONLY run a single command !! 5493 import sys 5494 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 5495 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 5496 'Please upgrate your version of python.') 5497 5498 import os 5499 import optparse 5500 # Get the directory of the script real path (bin) 5501 # and add it to the current PYTHONPATH 5502 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5503 sys.path.insert(0, root_path)
5504 5505 - class MyOptParser(optparse.OptionParser):
5506 - class InvalidOption(Exception): pass
5507 - def error(self, msg=''):
5508 raise MyOptParser.InvalidOption(msg)
5509 # Write out nice usage message if called with -h or --help 5510 usage = "usage: %prog [options] [FILE] " 5511 parser = MyOptParser(usage=usage) 5512 parser.add_option("-l", "--logging", default='INFO', 5513 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5514 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5515 help='force toce to be in secure mode') 5516 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5517 help='force to launch debug mode') 5518 parser_error = '' 5519 done = False 5520 5521 for i in range(len(sys.argv)-1): 5522 try: 5523 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5524 done = True 5525 except MyOptParser.InvalidOption, error: 5526 pass 5527 else: 5528 args += sys.argv[len(sys.argv)-i:] 5529 if not done: 5530 # raise correct error: 5531 try: 5532 (options, args) = parser.parse_args() 5533 except MyOptParser.InvalidOption, error: 5534 print error 5535 sys.exit(2) 5536 5537 if len(args) == 0: 5538 args = '' 5539 5540 import subprocess 5541 import logging 5542 import logging.config 5543 # Set logging level according to the logging level given by options 5544 #logging.basicConfig(level=vars(logging)[options.logging]) 5545 import internal.coloring_logging 5546 try: 5547 if __debug__ and options.logging == 'INFO': 5548 options.logging = 'DEBUG' 5549 if options.logging.isdigit(): 5550 level = int(options.logging) 5551 else: 5552 level = eval('logging.' + options.logging) 5553 print os.path.join(root_path, 'internal', 'me5_logging.conf') 5554 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5555 logging.root.setLevel(level) 5556 logging.getLogger('madgraph').setLevel(level) 5557 except: 5558 raise 5559 pass 5560 5561 # Call the cmd interface main loop 5562 try: 5563 if args: 5564 # a single command is provided 5565 if '--web' in args: 5566 i = args.index('--web') 5567 args.pop(i) 5568 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5569 else: 5570 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5571 5572 if not hasattr(cmd_line, 'do_%s' % args[0]): 5573 if parser_error: 5574 print parser_error 5575 print 'and %s can not be interpreted as a valid command.' % args[0] 5576 else: 5577 print 'ERROR: %s not a valid command. Please retry' % args[0] 5578 else: 5579 cmd_line.use_rawinput = False 5580 cmd_line.run_cmd(' '.join(args)) 5581 cmd_line.run_cmd('quit') 5582 5583 except KeyboardInterrupt: 5584 print 'quit on KeyboardInterrupt' 5585 pass 5586