Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41  try: 
  42      import cpickle as pickle 
  43  except: 
  44      import pickle 
  45   
  46  try: 
  47      import readline 
  48      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  49  except: 
  50      GNU_SPLITTING = True 
  51   
  52  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  53  root_path = os.path.split(root_path)[0] 
  54  sys.path.insert(0, os.path.join(root_path,'bin')) 
  55   
  56  # usefull shortcut 
  57  pjoin = os.path.join 
  58  # Special logger for the Cmd Interface 
  59  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  60  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  61    
  62  try: 
  63      import madgraph 
  64  except ImportError:  
  65      aMCatNLO = True  
  66      import internal.extended_cmd as cmd 
  67      import internal.common_run_interface as common_run 
  68      import internal.banner as banner_mod 
  69      import internal.misc as misc     
  70      from internal import InvalidCmd, MadGraph5Error 
  71      import internal.files as files 
  72      import internal.cluster as cluster 
  73      import internal.save_load_object as save_load_object 
  74      import internal.gen_crossxhtml as gen_crossxhtml 
  75      import internal.sum_html as sum_html 
  76      import internal.shower_card as shower_card 
  77      import internal.FO_analyse_card as analyse_card  
  78      import internal.lhe_parser as lhe_parser 
  79  else: 
  80      # import from madgraph directory 
  81      aMCatNLO = False 
  82      import madgraph.interface.extended_cmd as cmd 
  83      import madgraph.interface.common_run_interface as common_run 
  84      import madgraph.iolibs.files as files 
  85      import madgraph.iolibs.save_load_object as save_load_object 
  86      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  87      import madgraph.madevent.sum_html as sum_html 
  88      import madgraph.various.banner as banner_mod 
  89      import madgraph.various.cluster as cluster 
  90      import madgraph.various.misc as misc 
  91      import madgraph.various.shower_card as shower_card 
  92      import madgraph.various.FO_analyse_card as analyse_card 
  93      import madgraph.various.lhe_parser as lhe_parser 
  94      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
95 96 -class aMCatNLOError(Exception):
97 pass
98
99 100 -def compile_dir(*arguments):
101 """compile the direcory p_dir 102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 103 this function needs not to be a class method in order to do 104 the compilation on multicore""" 105 106 if len(arguments) == 1: 107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 108 elif len(arguments)==7: 109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 110 else: 111 raise aMCatNLOError, 'not correct number of argument' 112 logger.info(' Compiling %s...' % p_dir) 113 114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 115 116 try: 117 #compile everything 118 # compile and run tests 119 for test in tests: 120 # skip check_poles for LOonly dirs 121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 122 continue 123 if test == 'test_ME' or test == 'test_MC': 124 test_exe='test_soft_col_limits' 125 else: 126 test_exe=test 127 misc.compile([test_exe], cwd = this_dir, job_specs = False) 128 input = pjoin(me_dir, '%s_input.txt' % test) 129 #this can be improved/better written to handle the output 130 misc.call(['./%s' % (test_exe)], cwd=this_dir, 131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 132 close_fds=True) 133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 135 dereference=True) 136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 137 tf.close() 138 139 if not options['reweightonly']: 140 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 141 misc.call(['./gensym'],cwd= this_dir, 142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 143 close_fds=True) 144 #compile madevent_mintMC/mintFO 145 misc.compile([exe], cwd=this_dir, job_specs = False) 146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 148 149 logger.info(' %s done.' % p_dir) 150 return 0 151 except MadGraph5Error, msg: 152 return msg
153
154 155 -def check_compiler(options, block=False):
156 """check that the current fortran compiler is gfortran 4.6 or later. 157 If block, stops the execution, otherwise just print a warning""" 158 159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 161 'Note that You can still run all MadEvent run without any problem!' 162 #first check that gfortran is installed 163 if options['fortran_compiler']: 164 compiler = options['fortran_compiler'] 165 elif misc.which('gfortran'): 166 compiler = 'gfortran' 167 else: 168 compiler = '' 169 170 if 'gfortran' not in compiler: 171 if block: 172 raise aMCatNLOError(msg % compiler) 173 else: 174 logger.warning(msg % compiler) 175 else: 176 curr_version = misc.get_gfortran_version(compiler) 177 if not ''.join(curr_version.split('.')) >= '46': 178 if block: 179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 180 else: 181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183 184 185 #=============================================================================== 186 # CmdExtended 187 #=============================================================================== 188 -class CmdExtended(common_run.CommonRunCmd):
189 """Particularisation of the cmd command for aMCatNLO""" 190 191 #suggested list of command 192 next_possibility = { 193 'start': [], 194 } 195 196 debug_output = 'ME5_debug' 197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 198 error_debug += 'More information is found in \'%(debug)s\'.\n' 199 error_debug += 'Please attach this file to your report.' 200 201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 202 203 204 keyboard_stop_msg = """stopping all operation 205 in order to quit MadGraph5_aMC@NLO please enter exit""" 206 207 # Define the Error 208 InvalidCmd = InvalidCmd 209 ConfigurationError = aMCatNLOError 210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation""" 213 214 # Tag allowing/forbiding question 215 self.force = False 216 217 # If possible, build an info line with current version number 218 # and date, from the VERSION text file 219 info = misc.get_pkg_info() 220 info_line = "" 221 if info and info.has_key('version') and info.has_key('date'): 222 len_version = len(info['version']) 223 len_date = len(info['date']) 224 if len_version + len_date < 30: 225 info_line = "#* VERSION %s %s %s *\n" % \ 226 (info['version'], 227 (30 - len_version - len_date) * ' ', 228 info['date']) 229 else: 230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 231 info_line = "#* VERSION %s %s *\n" % \ 232 (version, (24 - len(version)) * ' ') 233 234 # Create a header for the history file. 235 # Remember to fill in time at writeout time! 236 self.history_header = \ 237 '#************************************************************\n' + \ 238 '#* MadGraph5_aMC@NLO *\n' + \ 239 '#* *\n' + \ 240 "#* * * *\n" + \ 241 "#* * * * * *\n" + \ 242 "#* * * * * 5 * * * * *\n" + \ 243 "#* * * * * *\n" + \ 244 "#* * * *\n" + \ 245 "#* *\n" + \ 246 "#* *\n" + \ 247 info_line + \ 248 "#* *\n" + \ 249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 251 "#* and *\n" + \ 252 "#* http://amcatnlo.cern.ch *\n" + \ 253 '#* *\n' + \ 254 '#************************************************************\n' + \ 255 '#* *\n' + \ 256 '#* Command File for aMCatNLO *\n' + \ 257 '#* *\n' + \ 258 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 259 '#* *\n' + \ 260 '#************************************************************\n' 261 262 if info_line: 263 info_line = info_line[1:] 264 265 logger.info(\ 266 "************************************************************\n" + \ 267 "* *\n" + \ 268 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 269 "* a M C @ N L O *\n" + \ 270 "* *\n" + \ 271 "* * * *\n" + \ 272 "* * * * * *\n" + \ 273 "* * * * * 5 * * * * *\n" + \ 274 "* * * * * *\n" + \ 275 "* * * *\n" + \ 276 "* *\n" + \ 277 info_line + \ 278 "* *\n" + \ 279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 280 "* http://amcatnlo.cern.ch *\n" + \ 281 "* *\n" + \ 282 "* Type 'help' for in-line help. *\n" + \ 283 "* *\n" + \ 284 "************************************************************") 285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286 287
288 - def get_history_header(self):
289 """return the history header""" 290 return self.history_header % misc.get_time_info()
291
292 - def stop_on_keyboard_stop(self):
293 """action to perform to close nicely on a keyboard interupt""" 294 try: 295 if hasattr(self, 'cluster'): 296 logger.info('rm jobs on queue') 297 self.cluster.remove() 298 if hasattr(self, 'results'): 299 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 300 self.add_error_log_in_html(KeyboardInterrupt) 301 except: 302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """ 306 307 # relaxing the tag forbidding question 308 self.force = False 309 310 if not self.use_rawinput: 311 return stop 312 313 314 arg = line.split() 315 if len(arg) == 0: 316 return stop 317 elif str(arg[0]) in ['exit','quit','EOF']: 318 return stop 319 320 try: 321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 322 level=None, error=True) 323 except Exception: 324 misc.sprint('self.update_status fails', log=logger) 325 pass
326
327 - def nice_user_error(self, error, line):
328 """If a ME run is currently running add a link in the html output""" 329 330 self.add_error_log_in_html() 331 cmd.Cmd.nice_user_error(self, error, line)
332
333 - def nice_config_error(self, error, line):
334 """If a ME run is currently running add a link in the html output""" 335 336 self.add_error_log_in_html() 337 cmd.Cmd.nice_config_error(self, error, line)
338
339 - def nice_error_handling(self, error, line):
340 """If a ME run is currently running add a link in the html output""" 341 342 self.add_error_log_in_html() 343 cmd.Cmd.nice_error_handling(self, error, line)
344
345 346 347 #=============================================================================== 348 # HelpToCmd 349 #=============================================================================== 350 -class HelpToCmd(object):
351 """ The Series of help routine for the aMCatNLOCmd""" 352
353 - def help_launch(self):
354 """help for launch command""" 355 _launch_parser.print_help()
356
357 - def help_banner_run(self):
358 logger.info("syntax: banner_run Path|RUN [--run_options]") 359 logger.info("-- Reproduce a run following a given banner") 360 logger.info(" One of the following argument is require:") 361 logger.info(" Path should be the path of a valid banner.") 362 logger.info(" RUN should be the name of a run of the current directory") 363 self.run_options_help([('-f','answer all question by default'), 364 ('--name=X', 'Define the name associated with the new run')])
365 366
367 - def help_compile(self):
368 """help for compile command""" 369 _compile_parser.print_help()
370
371 - def help_generate_events(self):
372 """help for generate_events commandi 373 just call help_launch""" 374 _generate_events_parser.print_help()
375 376
377 - def help_calculate_xsect(self):
378 """help for generate_events command""" 379 _calculate_xsect_parser.print_help()
380
381 - def help_shower(self):
382 """help for shower command""" 383 _shower_parser.print_help()
384 385
386 - def help_open(self):
387 logger.info("syntax: open FILE ") 388 logger.info("-- open a file with the appropriate editor.") 389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 390 logger.info(' the path to the last created/used directory is used')
391
392 - def run_options_help(self, data):
393 if data: 394 logger.info('-- local options:') 395 for name, info in data: 396 logger.info(' %s : %s' % (name, info)) 397 398 logger.info("-- session options:") 399 logger.info(" Note that those options will be kept for the current session") 400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 401 logger.info(" --multicore : Run in multi-core configuration") 402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
404 405 406 407 #=============================================================================== 408 # CheckValidForCmd 409 #=============================================================================== 410 -class CheckValidForCmd(object):
411 """ The Series of check routine for the aMCatNLOCmd""" 412
413 - def check_shower(self, args, options):
414 """Check the validity of the line. args[0] is the run_directory""" 415 416 if options['force']: 417 self.force = True 418 419 if len(args) == 0: 420 self.help_shower() 421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 423 raise self.InvalidCmd, 'Directory %s does not exists' % \ 424 pjoin(os.getcwd(), 'Events', args[0]) 425 426 self.set_run_name(args[0], level= 'shower') 427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
429 - def check_plot(self, args):
430 """Check the argument for the plot command 431 plot run_name modes""" 432 433 434 madir = self.options['madanalysis_path'] 435 td = self.options['td_path'] 436 437 if not madir or not td: 438 logger.info('Retry to read configuration file to find madanalysis/td') 439 self.set_configuration() 440 441 madir = self.options['madanalysis_path'] 442 td = self.options['td_path'] 443 444 if not madir: 445 error_msg = 'No Madanalysis path correctly set.' 446 error_msg += 'Please use the set command to define the path and retry.' 447 error_msg += 'You can also define it in the configuration file.' 448 raise self.InvalidCmd(error_msg) 449 if not td: 450 error_msg = 'No path to td directory correctly set.' 451 error_msg += 'Please use the set command to define the path and retry.' 452 error_msg += 'You can also define it in the configuration file.' 453 raise self.InvalidCmd(error_msg) 454 455 if len(args) == 0: 456 if not hasattr(self, 'run_name') or not self.run_name: 457 self.help_plot() 458 raise self.InvalidCmd('No run name currently define. Please add this information.') 459 args.append('all') 460 return 461 462 463 if args[0] not in self._plot_mode: 464 self.set_run_name(args[0], level='plot') 465 del args[0] 466 if len(args) == 0: 467 args.append('all') 468 elif not self.run_name: 469 self.help_plot() 470 raise self.InvalidCmd('No run name currently define. Please add this information.') 471 472 for arg in args: 473 if arg not in self._plot_mode and arg != self.run_name: 474 self.help_plot() 475 raise self.InvalidCmd('unknown options %s' % arg)
476
477 - def check_pgs(self, arg):
478 """Check the argument for pythia command 479 syntax: pgs [NAME] 480 Note that other option are already remove at this point 481 """ 482 483 # If not pythia-pgs path 484 if not self.options['pythia-pgs_path']: 485 logger.info('Retry to read configuration file to find pythia-pgs path') 486 self.set_configuration() 487 488 if not self.options['pythia-pgs_path'] or not \ 489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 490 error_msg = 'No pythia-pgs path correctly set.' 491 error_msg += 'Please use the set command to define the path and retry.' 492 error_msg += 'You can also define it in the configuration file.' 493 raise self.InvalidCmd(error_msg) 494 495 tag = [a for a in arg if a.startswith('--tag=')] 496 if tag: 497 arg.remove(tag[0]) 498 tag = tag[0][6:] 499 500 501 if len(arg) == 0 and not self.run_name: 502 if self.results.lastrun: 503 arg.insert(0, self.results.lastrun) 504 else: 505 raise self.InvalidCmd('No run name currently define. Please add this information.') 506 507 if len(arg) == 1 and self.run_name == arg[0]: 508 arg.pop(0) 509 510 if not len(arg) and \ 511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 512 self.help_pgs() 513 raise self.InvalidCmd('''No file file pythia_events.hep currently available 514 Please specify a valid run_name''') 515 516 lock = None 517 if len(arg) == 1: 518 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 520 521 if not filenames: 522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 523 else: 524 input_file = filenames[0] 525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 527 argument=['-c', input_file], 528 close_fds=True) 529 else: 530 if tag: 531 self.run_card['run_tag'] = tag 532 self.set_run_name(self.run_name, tag, 'pgs') 533 534 return lock
535 536
537 - def check_delphes(self, arg):
538 """Check the argument for pythia command 539 syntax: delphes [NAME] 540 Note that other option are already remove at this point 541 """ 542 543 # If not pythia-pgs path 544 if not self.options['delphes_path']: 545 logger.info('Retry to read configuration file to find delphes path') 546 self.set_configuration() 547 548 if not self.options['delphes_path']: 549 error_msg = 'No delphes path correctly set.' 550 error_msg += 'Please use the set command to define the path and retry.' 551 error_msg += 'You can also define it in the configuration file.' 552 raise self.InvalidCmd(error_msg) 553 554 tag = [a for a in arg if a.startswith('--tag=')] 555 if tag: 556 arg.remove(tag[0]) 557 tag = tag[0][6:] 558 559 560 if len(arg) == 0 and not self.run_name: 561 if self.results.lastrun: 562 arg.insert(0, self.results.lastrun) 563 else: 564 raise self.InvalidCmd('No run name currently define. Please add this information.') 565 566 if len(arg) == 1 and self.run_name == arg[0]: 567 arg.pop(0) 568 569 if not len(arg) and \ 570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 571 self.help_pgs() 572 raise self.InvalidCmd('''No file file pythia_events.hep currently available 573 Please specify a valid run_name''') 574 575 if len(arg) == 1: 576 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 578 579 580 if not filenames: 581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 582 % (self.run_name, prev_tag, 583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 584 else: 585 input_file = filenames[0] 586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 588 argument=['-c', input_file], 589 close_fds=True) 590 else: 591 if tag: 592 self.run_card['run_tag'] = tag 593 self.set_run_name(self.run_name, tag, 'delphes')
594
595 - def check_calculate_xsect(self, args, options):
596 """check the validity of the line. args is ORDER, 597 ORDER being LO or NLO. If no mode is passed, NLO is used""" 598 # modify args in order to be DIR 599 # mode being either standalone or madevent 600 601 if options['force']: 602 self.force = True 603 604 if not args: 605 args.append('NLO') 606 return 607 608 if len(args) > 1: 609 self.help_calculate_xsect() 610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 611 612 elif len(args) == 1: 613 if not args[0] in ['NLO', 'LO']: 614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 615 mode = args[0] 616 617 # check for incompatible options/modes 618 if options['multicore'] and options['cluster']: 619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 620 ' are not compatible. Please choose one.'
621 622
623 - def check_generate_events(self, args, options):
624 """check the validity of the line. args is ORDER, 625 ORDER being LO or NLO. If no mode is passed, NLO is used""" 626 # modify args in order to be DIR 627 # mode being either standalone or madevent 628 629 if not args: 630 args.append('NLO') 631 return 632 633 if len(args) > 1: 634 self.help_generate_events() 635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 636 637 elif len(args) == 1: 638 if not args[0] in ['NLO', 'LO']: 639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 640 mode = args[0] 641 642 # check for incompatible options/modes 643 if options['multicore'] and options['cluster']: 644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 645 ' are not compatible. Please choose one.'
646
647 - def check_banner_run(self, args):
648 """check the validity of line""" 649 650 if len(args) == 0: 651 self.help_banner_run() 652 raise self.InvalidCmd('banner_run requires at least one argument.') 653 654 tag = [a[6:] for a in args if a.startswith('--tag=')] 655 656 657 if os.path.exists(args[0]): 658 type ='banner' 659 format = self.detect_card_type(args[0]) 660 if format != 'banner': 661 raise self.InvalidCmd('The file is not a valid banner.') 662 elif tag: 663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 664 (args[0], tag)) 665 if not os.path.exists(args[0]): 666 raise self.InvalidCmd('No banner associates to this name and tag.') 667 else: 668 name = args[0] 669 type = 'run' 670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 671 if not banners: 672 raise self.InvalidCmd('No banner associates to this name.') 673 elif len(banners) == 1: 674 args[0] = banners[0] 675 else: 676 #list the tag and propose those to the user 677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 678 tag = self.ask('which tag do you want to use?', tags[0], tags) 679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 680 (args[0], tag)) 681 682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 683 if run_name: 684 try: 685 self.exec_cmd('remove %s all banner -f' % run_name) 686 except Exception: 687 pass 688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 689 elif type == 'banner': 690 self.set_run_name(self.find_available_run_name(self.me_dir)) 691 elif type == 'run': 692 if not self.results[name].is_empty(): 693 run_name = self.find_available_run_name(self.me_dir) 694 logger.info('Run %s is not empty so will use run_name: %s' % \ 695 (name, run_name)) 696 self.set_run_name(run_name) 697 else: 698 try: 699 self.exec_cmd('remove %s all banner -f' % run_name) 700 except Exception: 701 pass 702 self.set_run_name(name)
703 704 705
706 - def check_launch(self, args, options):
707 """check the validity of the line. args is MODE 708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 709 # modify args in order to be DIR 710 # mode being either standalone or madevent 711 712 if options['force']: 713 self.force = True 714 715 716 if not args: 717 args.append('auto') 718 return 719 720 if len(args) > 1: 721 self.help_launch() 722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 723 724 elif len(args) == 1: 725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 727 mode = args[0] 728 729 # check for incompatible options/modes 730 if options['multicore'] and options['cluster']: 731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 732 ' are not compatible. Please choose one.' 733 if mode == 'NLO' and options['reweightonly']: 734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735 736
737 - def check_compile(self, args, options):
738 """check the validity of the line. args is MODE 739 MODE being FO or MC. If no mode is passed, MC is used""" 740 # modify args in order to be DIR 741 # mode being either standalone or madevent 742 743 if options['force']: 744 self.force = True 745 746 if not args: 747 args.append('MC') 748 return 749 750 if len(args) > 1: 751 self.help_compile() 752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 753 754 elif len(args) == 1: 755 if not args[0] in ['MC', 'FO']: 756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 757 mode = args[0]
758
759 # check for incompatible options/modes 760 761 762 #=============================================================================== 763 # CompleteForCmd 764 #=============================================================================== 765 -class CompleteForCmd(CheckValidForCmd):
766 """ The Series of help routine for the MadGraphCmd""" 767
768 - def complete_launch(self, text, line, begidx, endidx):
769 """auto-completion for launch command""" 770 771 args = self.split_arg(line[0:begidx]) 772 if len(args) == 1: 773 #return mode 774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 775 elif len(args) == 2 and line[begidx-1] == '@': 776 return self.list_completion(text,['LO','NLO'],line) 777 else: 778 opts = [] 779 for opt in _launch_parser.option_list: 780 opts += opt._long_opts + opt._short_opts 781 return self.list_completion(text, opts, line)
782
783 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
784 "Complete the banner run command" 785 try: 786 787 788 args = self.split_arg(line[0:begidx], error=False) 789 790 if args[-1].endswith(os.path.sep): 791 return self.path_completion(text, 792 os.path.join('.',*[a for a in args \ 793 if a.endswith(os.path.sep)])) 794 795 796 if len(args) > 1: 797 # only options are possible 798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 800 801 if args[-1] != '--tag=': 802 tags = ['--tag=%s' % t for t in tags] 803 else: 804 return self.list_completion(text, tags) 805 return self.list_completion(text, tags +['--name=','-f'], line) 806 807 # First argument 808 possibilites = {} 809 810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 811 if a.endswith(os.path.sep)])) 812 if os.path.sep in line: 813 return comp 814 else: 815 possibilites['Path from ./'] = comp 816 817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 818 run_list = [n.rsplit('/',2)[1] for n in run_list] 819 possibilites['RUN Name'] = self.list_completion(text, run_list) 820 821 return self.deal_multiple_categories(possibilites, formatting) 822 823 824 except Exception, error: 825 print error
826 827
828 - def complete_compile(self, text, line, begidx, endidx):
829 """auto-completion for launch command""" 830 831 args = self.split_arg(line[0:begidx]) 832 if len(args) == 1: 833 #return mode 834 return self.list_completion(text,['FO','MC'],line) 835 else: 836 opts = [] 837 for opt in _compile_parser.option_list: 838 opts += opt._long_opts + opt._short_opts 839 return self.list_completion(text, opts, line)
840
841 - def complete_calculate_xsect(self, text, line, begidx, endidx):
842 """auto-completion for launch command""" 843 844 args = self.split_arg(line[0:begidx]) 845 if len(args) == 1: 846 #return mode 847 return self.list_completion(text,['LO','NLO'],line) 848 else: 849 opts = [] 850 for opt in _calculate_xsect_parser.option_list: 851 opts += opt._long_opts + opt._short_opts 852 return self.list_completion(text, opts, line)
853
854 - def complete_generate_events(self, text, line, begidx, endidx):
855 """auto-completion for generate_events command 856 call the compeltion for launch""" 857 self.complete_launch(text, line, begidx, endidx)
858 859
860 - def complete_shower(self, text, line, begidx, endidx):
861 args = self.split_arg(line[0:begidx]) 862 if len(args) == 1: 863 #return valid run_name 864 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1
869
870 - def complete_plot(self, text, line, begidx, endidx):
871 """ Complete the plot command """ 872 873 args = self.split_arg(line[0:begidx], error=False) 874 875 if len(args) == 1: 876 #return valid run_name 877 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 878 data = [n.rsplit('/',2)[1] for n in data] 879 tmp1 = self.list_completion(text, data) 880 if not self.run_name: 881 return tmp1 882 883 if len(args) > 1: 884 return self.list_completion(text, self._plot_mode)
885
886 - def complete_pgs(self,text, line, begidx, endidx):
887 "Complete the pgs command" 888 args = self.split_arg(line[0:begidx], error=False) 889 if len(args) == 1: 890 #return valid run_name 891 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 892 pjoin(self.me_dir, 'Events')) 893 data = [n.rsplit('/',2)[1] for n in data] 894 tmp1 = self.list_completion(text, data) 895 if not self.run_name: 896 return tmp1 897 else: 898 tmp2 = self.list_completion(text, self._run_options + ['-f', 899 '--tag=' ,'--no_default'], line) 900 return tmp1 + tmp2 901 else: 902 return self.list_completion(text, self._run_options + ['-f', 903 '--tag=','--no_default'], line)
904 905 complete_delphes = complete_pgs
906
907 -class aMCatNLOAlreadyRunning(InvalidCmd):
908 pass
909
910 -class AskRunNLO(cmd.ControlSwitch):
911 912 to_control = [('order', 'Type of perturbative computation'), 913 ('fixed_order', 'No MC@[N]LO matching / event generation'), 914 ('shower', 'Shower the generated events'), 915 ('madspin', 'Decay onshell particles'), 916 ('reweight', 'Add weights to events for new hypp.'), 917 ('madanalysis','Run MadAnalysis5 on the events generated')] 918 919 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower'] 920
921 - def __init__(self, question, line_args=[], mode=None, force=False, 922 *args, **opt):
923 924 self.me_dir = opt['mother_interface'].me_dir 925 self.check_available_module(opt['mother_interface'].options) 926 self.last_mode = opt['mother_interface'].last_mode 927 self.proc_characteristics = opt['mother_interface'].proc_characteristics 928 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat')) 929 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'], 930 *args, **opt)
931 932 @property
933 - def answer(self):
934 935 out = super(AskRunNLO, self).answer 936 if out['shower'] == 'HERWIG7': 937 out['shower'] = 'HERWIGPP' 938 939 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF': 940 out['runshower'] = False 941 else: 942 out['runshower'] = True 943 return out
944 945
946 - def check_available_module(self, options):
947 948 self.available_module = set() 949 if options['madanalysis5_path']: 950 self.available_module.add('MA5') 951 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']): 952 953 self.available_module.add('MadSpin') 954 if misc.has_f2py() or options['f2py_compiler']: 955 self.available_module.add('reweight') 956 if options['pythia8_path']: 957 self.available_module.add('PY8') 958 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']: 959 self.available_module.add('HW7') 960 961 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 962 if os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))): 963 self.available_module.add('StdHEP')
964 # 965 # shorcut 966 #
967 - def ans_lo(self, value):
968 """ function called if the user type lo=value. or lo (then value is None)""" 969 970 if value is None: 971 self.switch['order'] = 'LO' 972 self.switch['fixed_order'] = 'ON' 973 self.set_switch('shower', 'OFF') 974 else: 975 logger.warning('Invalid command: lo=%s' % value)
976
977 - def ans_nlo(self, value):
978 if value is None: 979 self.switch['order'] = 'NLO' 980 self.switch['fixed_order'] = 'ON' 981 self.set_switch('shower', 'OFF') 982 else: 983 logger.warning('Invalid command: nlo=%s' % value)
984
985 - def ans_amc__at__nlo(self, value):
986 if value is None: 987 self.switch['order'] = 'NLO' 988 self.switch['fixed_order'] = 'OFF' 989 self.set_switch('shower', 'ON') 990 else: 991 logger.warning('Invalid command: aMC@NLO=%s' % value)
992
993 - def ans_amc__at__lo(self, value):
994 if value is None: 995 self.switch['order'] = 'LO' 996 self.switch['fixed_order'] = 'OFF' 997 self.set_switch('shower', 'ON') 998 else: 999 logger.warning('Invalid command: aMC@LO=%s' % value)
1000
1001 - def ans_noshower(self, value):
1002 if value is None: 1003 self.switch['order'] = 'NLO' 1004 self.switch['fixed_order'] = 'OFF' 1005 self.set_switch('shower', 'OFF') 1006 else: 1007 logger.warning('Invalid command: noshower=%s' % value)
1008
1009 - def ans_onlyshower(self, value):
1010 if value is None: 1011 self.switch['mode'] = 'onlyshower' 1012 self.switch['madspin'] = 'OFF' 1013 self.switch['reweight'] = 'OFF' 1014 else: 1015 logger.warning('Invalid command: onlyshower=%s' % value)
1016
1017 - def ans_noshowerlo(self, value):
1018 if value is None: 1019 self.switch['order'] = 'LO' 1020 self.switch['fixed_order'] = 'OFF' 1021 self.set_switch('shower', 'OFF') 1022 else: 1023 logger.warning('Invalid command: noshowerlo=%s' % value)
1024
1025 - def ans_madanalysis5(self, value):
1026 """ shortcut madanalysis5 -> madanalysis """ 1027 1028 if value is None: 1029 return self.onecmd('madanalysis') 1030 else: 1031 self.set_switch('madanalysis', value)
1032 # 1033 # ORDER 1034 #
1035 - def get_allowed_order(self):
1036 return ["LO", "NLO"]
1037
1038 - def set_default_order(self):
1039 1040 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']: 1041 self.switch['order'] = 'LO' 1042 self.switch['order'] = 'NLO'
1043
1044 - def set_switch_off_order(self):
1045 return
1046 # 1047 # Fix order 1048 #
1049 - def get_allowed_fixed_order(self):
1050 """ """ 1051 if self.proc_characteristics['ninitial'] == 1: 1052 return ['ON'] 1053 else: 1054 return ['ON', 'OFF']
1055
1056 - def set_default_fixed_order(self):
1057 1058 if self.last_mode in ['LO', 'NLO']: 1059 self.switch['fixed_order'] = 'ON' 1060 if self.proc_characteristics['ninitial'] == 1: 1061 self.switch['fixed_order'] = 'ON' 1062 else: 1063 self.switch['fixed_order'] = 'OFF' 1064
1065 - def color_for_fixed_order(self, switch_value):
1066 1067 if switch_value in ['OFF']: 1068 return self.green % switch_value 1069 else: 1070 return self.red % switch_value
1071
1072 - def color_for_shower(self, switch_value):
1073 1074 if switch_value in ['ON']: 1075 return self.green % switch_value 1076 elif switch_value in self.get_allowed('shower'): 1077 return self.green % switch_value 1078 else: 1079 return self.red % switch_value
1080
1081 - def consistency_fixed_order_shower(self, vfix, vshower):
1082 """ consistency_XX_YY(val_XX, val_YY) 1083 -> XX is the new key set by the user to a new value val_XX 1084 -> YY is another key set by the user. 1085 -> return value should be None or "replace_YY" 1086 """ 1087 1088 if vfix == 'ON' and vshower != 'OFF' : 1089 return 'OFF' 1090 return None
1091 1092 consistency_fixed_order_madspin = consistency_fixed_order_shower 1093 consistency_fixed_order_reweight = consistency_fixed_order_shower 1094
1095 - def consistency_fixed_order_madanalysis(self, vfix, vma5):
1096 1097 if vfix == 'ON' and vma5 == 'ON' : 1098 return 'OFF' 1099 return None
1100 1101
1102 - def consistency_shower_fixed_order(self, vshower, vfix):
1103 """ consistency_XX_YY(val_XX, val_YY) 1104 -> XX is the new key set by the user to a new value val_XX 1105 -> YY is another key set by the user. 1106 -> return value should be None or "replace_YY" 1107 """ 1108 1109 if vshower != 'OFF' and vfix == 'ON': 1110 return 'OFF' 1111 return None
1112 1113 consistency_madspin_fixed_order = consistency_shower_fixed_order 1114 consistency_reweight_fixed_order = consistency_shower_fixed_order 1115 consistency_madanalysis_fixed_order = consistency_shower_fixed_order 1116 1117 1118 # 1119 # Shower 1120 #
1121 - def get_allowed_shower(self):
1122 """ """ 1123 1124 if hasattr(self, 'allowed_shower'): 1125 return self.allowed_shower 1126 1127 if not misc.which('bc'): 1128 return ['OFF'] 1129 1130 if self.proc_characteristics['ninitial'] == 1: 1131 self.allowed_shower = ['OFF'] 1132 return ['OFF'] 1133 else: 1134 if 'StdHEP' in self.available_module: 1135 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ] 1136 else: 1137 allowed = ['OFF'] 1138 if 'PY8' in self.available_module: 1139 allowed.append('PYTHIA8') 1140 if 'HW7' in self.available_module: 1141 allowed.append('HERWIGPP') 1142 1143 1144 self.allowed_shower = allowed 1145 1146 return allowed
1147
1148 - def check_value_shower(self, value):
1149 """ """ 1150 1151 if value.upper() in self.get_allowed_shower(): 1152 return True 1153 if value.upper() in ['PYTHIA8', 'HERWIGPP']: 1154 return True 1155 if value.upper() == 'ON': 1156 return self.run_card['parton_shower'] 1157 if value.upper() in ['P8','PY8','PYTHIA_8']: 1158 return 'PYTHIA8' 1159 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']: 1160 return 'PYTHIA6PT' 1161 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']: 1162 return 'PYTHIA6Q' 1163 if value.upper() in ['HW7', 'HERWIG7']: 1164 return 'HERWIG7' 1165 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']: 1166 return 'HERWIGPP' 1167 if value.upper() in ['HW6', 'HERWIG_6']: 1168 return 'HERWIG6'
1169
1170 - def set_default_shower(self):
1171 1172 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']: 1173 self.switch['shower'] = 'OFF' 1174 return 1175 1176 if self.proc_characteristics['ninitial'] == 1: 1177 self.switch['shower'] = 'OFF' 1178 return 1179 1180 if not misc.which('bc'): 1181 logger.warning('bc command not available. Forbids to run the shower. please install it if you want to run the shower. (sudo apt-get install bc)') 1182 self.switch['shower'] = 'OFF' 1183 return 1184 1185 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 1186 self.switch['shower'] = self.run_card['parton_shower'] 1187 #self.switch['shower'] = 'ON' 1188 self.switch['fixed_order'] = "OFF" 1189 else: 1190 self.switch['shower'] = 'OFF' 1191
1192 - def consistency_shower_madanalysis(self, vshower, vma5):
1193 """ MA5 only possible with (N)LO+PS if shower is run""" 1194 1195 if vshower == 'OFF' and vma5 == 'ON': 1196 return 'OFF' 1197 return None
1198
1199 - def consistency_madanalysis_shower(self, vma5, vshower):
1200 1201 if vma5=='ON' and vshower == 'OFF': 1202 return 'ON' 1203 return None
1204
1205 - def get_cardcmd_for_shower(self, value):
1206 """ adpat run_card according to this setup. return list of cmd to run""" 1207 1208 if value != 'OFF': 1209 return ['set parton_shower %s' % self.switch['shower']] 1210 return []
1211 1212 # 1213 # madspin 1214 #
1215 - def get_allowed_madspin(self):
1216 """ """ 1217 1218 if hasattr(self, 'allowed_madspin'): 1219 return self.allowed_madspin 1220 1221 self.allowed_madspin = [] 1222 1223 1224 if 'MadSpin' not in self.available_module: 1225 return self.allowed_madspin 1226 if self.proc_characteristics['ninitial'] == 1: 1227 self.available_module.remove('MadSpin') 1228 self.allowed_madspin = ['OFF'] 1229 return self.allowed_madspin 1230 else: 1231 self.allowed_madspin = ['OFF', 'ON', 'onshell'] 1232 return self.allowed_madspin
1233
1234 - def check_value_madspin(self, value):
1235 """handle alias and valid option not present in get_allowed_madspin 1236 remember that this mode should always be OFF for 1>N. (ON not in allowed value)""" 1237 1238 if value.upper() in self.get_allowed_madspin(): 1239 if value == value.upper(): 1240 return True 1241 else: 1242 return value.upper() 1243 elif value.lower() in self.get_allowed_madspin(): 1244 if value == value.lower(): 1245 return True 1246 else: 1247 return value.lower() 1248 1249 if 'MadSpin' not in self.available_module or \ 1250 'ON' not in self.get_allowed_madspin(): 1251 return False 1252 1253 if value.lower() in ['madspin', 'full']: 1254 return 'full' 1255 elif value.lower() in ['none']: 1256 return 'none'
1257
1258 - def set_default_madspin(self):
1259 1260 if 'MadSpin' in self.available_module: 1261 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 1262 self.switch['madspin'] = 'ON' 1263 else: 1264 self.switch['madspin'] = 'OFF' 1265 else: 1266 self.switch['madspin'] = 'Not Avail.'
1267
1268 - def get_cardcmd_for_madspin(self, value):
1269 """set some command to run before allowing the user to modify the cards.""" 1270 1271 if value == 'onshell': 1272 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"] 1273 elif value in ['full', 'madspin']: 1274 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"] 1275 elif value == 'none': 1276 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"] 1277 else: 1278 return []
1279 1280 # 1281 # reweight 1282 #
1283 - def get_allowed_reweight(self):
1284 """set the valid (visible) options for reweight""" 1285 1286 if hasattr(self, 'allowed_reweight'): 1287 return getattr(self, 'allowed_reweight') 1288 1289 self.allowed_reweight = [] 1290 if 'reweight' not in self.available_module: 1291 return self.allowed_reweight 1292 if self.proc_characteristics['ninitial'] == 1: 1293 self.available_module.remove('reweight') 1294 self.allowed_reweight.append('OFF') 1295 return self.allowed_reweight 1296 else: 1297 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO'] 1298 return self.allowed_reweight
1299
1300 - def set_default_reweight(self):
1301 """initialise the switch for reweight""" 1302 1303 if 'reweight' in self.available_module: 1304 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 1305 self.switch['reweight'] = 'ON' 1306 else: 1307 self.switch['reweight'] = 'OFF' 1308 else: 1309 self.switch['reweight'] = 'Not Avail.'
1310
1311 - def get_cardcmd_for_reweight(self, value):
1312 """ adpat run_card according to this setup. return list of cmd to run""" 1313 1314 if value == 'LO': 1315 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"] 1316 elif value == 'NLO': 1317 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO", 1318 "set store_rwgt_info T"] 1319 elif value == 'NLO_TREE': 1320 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree", 1321 "set store_rwgt_info T"] 1322 return []
1323 1324 # 1325 # MadAnalysis5 1326 #
1327 - def get_allowed_madanalysis(self):
1328 1329 if hasattr(self, 'allowed_madanalysis'): 1330 return self.allowed_madanalysis 1331 1332 self.allowed_madanalysis = [] 1333 1334 1335 if 'MA5' not in self.available_module: 1336 return self.allowed_madanalysis 1337 1338 if self.proc_characteristics['ninitial'] == 1: 1339 self.available_module.remove('MA5') 1340 self.allowed_madanalysis = ['OFF'] 1341 return self.allowed_madanalysis 1342 else: 1343 self.allowed_madanalysis = ['OFF', 'ON'] 1344 return self.allowed_madanalysis
1345
1346 - def set_default_madanalysis(self):
1347 """initialise the switch for reweight""" 1348 1349 if 'MA5' not in self.available_module: 1350 self.switch['madanalysis'] = 'Not Avail.' 1351 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')): 1352 self.switch['madanalysis'] = 'ON' 1353 else: 1354 self.switch['madanalysis'] = 'OFF'
1355
1356 - def check_value_madanalysis(self, value):
1357 """check an entry is valid. return the valid entry in case of shortcut""" 1358 1359 if value.upper() in self.get_allowed('madanalysis'): 1360 return True 1361 value = value.lower() 1362 if value == 'hadron': 1363 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False 1364 else: 1365 return False
1366
1367 1368 #=============================================================================== 1369 # aMCatNLOCmd 1370 #=============================================================================== 1371 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1372 """The command line processor of MadGraph""" 1373 1374 # Truth values 1375 true = ['T','.true.',True,'true'] 1376 # Options and formats available 1377 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 1378 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 1379 _calculate_decay_options = ['-f', '--accuracy=0.'] 1380 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 1381 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 1382 _clean_mode = _plot_mode + ['channel', 'banner'] 1383 _display_opts = ['run_name', 'options', 'variable'] 1384 # survey options, dict from name to type, default value, and help text 1385 # Variables to store object information 1386 web = False 1387 cluster_mode = 0 1388 queue = 'madgraph' 1389 nb_core = None 1390 make_opts_var = {} 1391 1392 next_possibility = { 1393 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 1394 'help generate_events'], 1395 'generate_events': ['generate_events [OPTIONS]', 'shower'], 1396 'launch': ['launch [OPTIONS]', 'shower'], 1397 'shower' : ['generate_events [OPTIONS]'] 1398 } 1399 1400 1401 ############################################################################
1402 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1403 """ add information to the cmd """ 1404 1405 self.start_time = 0 1406 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 1407 #common_run.CommonRunCmd.__init__(self, me_dir, options) 1408 1409 self.mode = 'aMCatNLO' 1410 self.nb_core = 0 1411 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 1412 1413 1414 self.load_results_db() 1415 self.results.def_web_mode(self.web) 1416 # check that compiler is gfortran 4.6 or later if virtuals have been exported 1417 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 1418 1419 if not '[real=QCD]' in proc_card: 1420 check_compiler(self.options, block=True)
1421 1422 1423 ############################################################################
1424 - def do_shower(self, line):
1425 """ run the shower on a given parton level file """ 1426 argss = self.split_arg(line) 1427 (options, argss) = _launch_parser.parse_args(argss) 1428 # check argument validity and normalise argument 1429 options = options.__dict__ 1430 options['reweightonly'] = False 1431 self.check_shower(argss, options) 1432 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 1433 self.ask_run_configuration('onlyshower', options) 1434 self.run_mcatnlo(evt_file, options) 1435 1436 self.update_status('', level='all', update_results=True)
1437 1438 ################################################################################
1439 - def do_plot(self, line):
1440 """Create the plot for a given run""" 1441 1442 # Since in principle, all plot are already done automaticaly 1443 args = self.split_arg(line) 1444 # Check argument's validity 1445 self.check_plot(args) 1446 logger.info('plot for run %s' % self.run_name) 1447 1448 if not self.force: 1449 self.ask_edit_cards([], args, plot=True) 1450 1451 if any([arg in ['parton'] for arg in args]): 1452 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 1453 if os.path.exists(filename+'.gz'): 1454 misc.gunzip(filename) 1455 if os.path.exists(filename): 1456 logger.info('Found events.lhe file for run %s' % self.run_name) 1457 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 1458 self.create_plot('parton') 1459 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1460 misc.gzip(filename) 1461 1462 if any([arg in ['all','parton'] for arg in args]): 1463 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1464 if os.path.exists(filename): 1465 logger.info('Found MADatNLO.top file for run %s' % \ 1466 self.run_name) 1467 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1468 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1469 1470 if not os.path.isdir(plot_dir): 1471 os.makedirs(plot_dir) 1472 top_file = pjoin(plot_dir, 'plots.top') 1473 files.cp(filename, top_file) 1474 madir = self.options['madanalysis_path'] 1475 tag = self.run_card['run_tag'] 1476 td = self.options['td_path'] 1477 misc.call(['%s/plot' % self.dirbin, madir, td], 1478 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1479 stderr = subprocess.STDOUT, 1480 cwd=plot_dir) 1481 1482 misc.call(['%s/plot_page-pl' % self.dirbin, 1483 os.path.basename(plot_dir), 1484 'parton'], 1485 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1486 stderr = subprocess.STDOUT, 1487 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1488 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1489 output) 1490 1491 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1492 1493 if any([arg in ['all','shower'] for arg in args]): 1494 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1495 if len(filenames) != 1: 1496 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1497 if len(filenames) != 1: 1498 logger.info('No shower level file found for run %s' % \ 1499 self.run_name) 1500 return 1501 filename = filenames[0] 1502 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1503 1504 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1505 if aMCatNLO and not self.options['mg5_path']: 1506 raise "plotting NLO HEP file needs MG5 utilities" 1507 1508 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1509 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1510 self.run_hep2lhe() 1511 else: 1512 filename = filenames[0] 1513 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1514 1515 self.create_plot('shower') 1516 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1517 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1518 lhe_file_name) 1519 misc.gzip(lhe_file_name) 1520 1521 if any([arg in ['all','pgs'] for arg in args]): 1522 filename = pjoin(self.me_dir, 'Events', self.run_name, 1523 '%s_pgs_events.lhco' % self.run_tag) 1524 if os.path.exists(filename+'.gz'): 1525 misc.gunzip(filename) 1526 if os.path.exists(filename): 1527 self.create_plot('PGS') 1528 misc.gzip(filename) 1529 else: 1530 logger.info('No valid files for pgs plot') 1531 1532 if any([arg in ['all','delphes'] for arg in args]): 1533 filename = pjoin(self.me_dir, 'Events', self.run_name, 1534 '%s_delphes_events.lhco' % self.run_tag) 1535 if os.path.exists(filename+'.gz'): 1536 misc.gunzip(filename) 1537 if os.path.exists(filename): 1538 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1539 self.create_plot('Delphes') 1540 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1541 misc.gzip(filename) 1542 else: 1543 logger.info('No valid files for delphes plot')
1544 1545 1546 ############################################################################
1547 - def do_calculate_xsect(self, line):
1548 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1549 this function wraps the do_launch one""" 1550 1551 self.start_time = time.time() 1552 argss = self.split_arg(line) 1553 # check argument validity and normalise argument 1554 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1555 options = options.__dict__ 1556 options['reweightonly'] = False 1557 options['parton'] = True 1558 self.check_calculate_xsect(argss, options) 1559 self.do_launch(line, options, argss)
1560 1561 ############################################################################
1562 - def do_banner_run(self, line):
1563 """Make a run from the banner file""" 1564 1565 args = self.split_arg(line) 1566 #check the validity of the arguments 1567 self.check_banner_run(args) 1568 1569 # Remove previous cards 1570 for name in ['shower_card.dat', 'madspin_card.dat']: 1571 try: 1572 os.remove(pjoin(self.me_dir, 'Cards', name)) 1573 except Exception: 1574 pass 1575 1576 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1577 1578 # Check if we want to modify the run 1579 if not self.force: 1580 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1581 if ans == 'n': 1582 self.force = True 1583 1584 # Compute run mode: 1585 if self.force: 1586 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1587 banner = banner_mod.Banner(args[0]) 1588 for line in banner['run_settings']: 1589 if '=' in line: 1590 mode, value = [t.strip() for t in line.split('=')] 1591 mode_status[mode] = value 1592 else: 1593 mode_status = {} 1594 1595 # Call Generate events 1596 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1597 switch=mode_status)
1598 1599 ############################################################################
1600 - def do_generate_events(self, line):
1601 """Main commands: generate events 1602 this function just wraps the do_launch one""" 1603 self.do_launch(line)
1604 1605 1606 ############################################################################
1607 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1608 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1609 #check if no 'Auto' are present in the file 1610 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1611 1612 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1613 # this variable is system only in the run_card 1614 # can not be done in EditCard since this parameter is not written in the 1615 # run_card directly. 1616 if mode in ['LO', 'NLO']: 1617 name = 'fo_lhe_weight_ratio' 1618 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1619 if name in FO_card: 1620 self.run_card.set(name, FO_card[name], user=False) 1621 name = 'fo_lhe_postprocessing' 1622 if name in FO_card: 1623 self.run_card.set(name, FO_card[name], user=False) 1624 1625 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1626 1627 ############################################################################
1628 - def set_configuration(self, amcatnlo=True, **opt):
1629 """assign all configuration variable from file 1630 loop over the different config file if config_file not define """ 1631 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1632 1633 ############################################################################
1634 - def do_launch(self, line, options={}, argss=[], switch={}):
1635 """Main commands: launch the full chain 1636 options and args are relevant if the function is called from other 1637 functions, such as generate_events or calculate_xsect 1638 mode gives the list of switch needed for the computation (usefull for banner_run) 1639 """ 1640 1641 if not argss and not options: 1642 self.start_time = time.time() 1643 argss = self.split_arg(line) 1644 # check argument validity and normalise argument 1645 (options, argss) = _launch_parser.parse_args(argss) 1646 options = options.__dict__ 1647 self.check_launch(argss, options) 1648 1649 1650 if 'run_name' in options.keys() and options['run_name']: 1651 self.run_name = options['run_name'] 1652 # if a dir with the given run_name already exists 1653 # remove it and warn the user 1654 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1655 logger.warning('Removing old run information in \n'+ 1656 pjoin(self.me_dir, 'Events', self.run_name)) 1657 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1658 self.results.delete_run(self.run_name) 1659 else: 1660 self.run_name = '' # will be set later 1661 1662 if options['multicore']: 1663 self.cluster_mode = 2 1664 elif options['cluster']: 1665 self.cluster_mode = 1 1666 1667 if not switch: 1668 mode = argss[0] 1669 1670 if mode in ['LO', 'NLO']: 1671 options['parton'] = True 1672 mode = self.ask_run_configuration(mode, options) 1673 else: 1674 mode = self.ask_run_configuration('auto', options, switch) 1675 1676 self.results.add_detail('run_mode', mode) 1677 1678 self.update_status('Starting run', level=None, update_results=True) 1679 1680 if self.options['automatic_html_opening']: 1681 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1682 self.options['automatic_html_opening'] = False 1683 1684 if '+' in mode: 1685 mode = mode.split('+')[0] 1686 self.compile(mode, options) 1687 evt_file = self.run(mode, options) 1688 1689 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1690 logger.info('No event file generated: grids have been set-up with a '\ 1691 'relative precision of %s' % self.run_card['req_acc']) 1692 return 1693 1694 if not mode in ['LO', 'NLO']: 1695 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1696 1697 if self.run_card['systematics_program'] == 'systematics': 1698 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1699 1700 self.exec_cmd('reweight -from_cards', postcmd=False) 1701 self.exec_cmd('decay_events -from_cards', postcmd=False) 1702 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1703 1704 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1705 and not options['parton']: 1706 self.run_mcatnlo(evt_file, options) 1707 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1708 1709 elif mode == 'noshower': 1710 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1711 Please, shower the Les Houches events before using them for physics analyses.""") 1712 1713 1714 self.update_status('', level='all', update_results=True) 1715 if self.run_card['ickkw'] == 3 and \ 1716 (mode in ['noshower'] or \ 1717 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1718 logger.warning("""You are running with FxFx merging enabled. 1719 To be able to merge samples of various multiplicities without double counting, 1720 you have to remove some events after showering 'by hand'. 1721 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1722 1723 self.store_result() 1724 #check if the param_card defines a scan. 1725 if self.param_card_iterator: 1726 cpath = pjoin(self.me_dir,'Cards','param_card.dat') 1727 param_card_iterator = self.param_card_iterator 1728 self.param_card_iterator = [] #avoid to next generate go trough here 1729 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1730 error=self.results.current['error'], 1731 param_card_path=cpath) 1732 orig_name = self.run_name 1733 #go trough the scal 1734 with misc.TMP_variable(self, 'allow_notification_center', False): 1735 for i,card in enumerate(param_card_iterator): 1736 card.write(cpath) 1737 self.check_param_card(cpath, dependent=True) 1738 if not options['force']: 1739 options['force'] = True 1740 if options['run_name']: 1741 options['run_name'] = '%s_%s' % (orig_name, i+1) 1742 if not argss: 1743 argss = [mode, "-f"] 1744 elif argss[0] == "auto": 1745 argss[0] = mode 1746 self.do_launch("", options=options, argss=argss, switch=switch) 1747 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1748 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1749 error=self.results.current['error'], 1750 param_card_path=cpath) 1751 #restore original param_card 1752 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1753 name = misc.get_scan_name(orig_name, self.run_name) 1754 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1755 logger.info("write all cross-section results in %s" % path, '$MG:BOLD') 1756 param_card_iterator.write_summary(path) 1757 1758 if self.allow_notification_center: 1759 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1760 '%s: %s +- %s ' % (self.results.current['run_name'], 1761 self.results.current['cross'], 1762 self.results.current['error']))
1763 1764 1765 ############################################################################
1766 - def do_compile(self, line):
1767 """Advanced commands: just compile the executables """ 1768 argss = self.split_arg(line) 1769 # check argument validity and normalise argument 1770 (options, argss) = _compile_parser.parse_args(argss) 1771 options = options.__dict__ 1772 options['reweightonly'] = False 1773 options['nocompile'] = False 1774 self.check_compile(argss, options) 1775 1776 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1777 self.ask_run_configuration(mode, options) 1778 self.compile(mode, options) 1779 1780 1781 self.update_status('', level='all', update_results=True)
1782 1783
1784 - def update_random_seed(self):
1785 """Update random number seed with the value from the run_card. 1786 If this is 0, update the number according to a fresh one""" 1787 iseed = self.run_card['iseed'] 1788 if iseed == 0: 1789 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1790 iseed = int(randinit.read()[2:]) + 1 1791 randinit.close() 1792 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1793 randinit.write('r=%d' % iseed) 1794 randinit.close()
1795 1796
1797 - def run(self, mode, options):
1798 """runs aMC@NLO. Returns the name of the event file created""" 1799 logger.info('Starting run') 1800 1801 if not 'only_generation' in options.keys(): 1802 options['only_generation'] = False 1803 1804 # for second step in applgrid mode, do only the event generation step 1805 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1806 options['only_generation'] = True 1807 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1808 self.setup_cluster_or_multicore() 1809 self.update_random_seed() 1810 #find and keep track of all the jobs 1811 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1812 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1813 folder_names['noshower'] = folder_names['aMC@NLO'] 1814 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1815 p_dirs = [d for d in \ 1816 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1817 #Clean previous results 1818 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1819 1820 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1821 1822 1823 if options['reweightonly']: 1824 event_norm=self.run_card['event_norm'] 1825 nevents=self.run_card['nevents'] 1826 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1827 1828 if mode in ['LO', 'NLO']: 1829 # this is for fixed order runs 1830 mode_dict = {'NLO': 'all', 'LO': 'born'} 1831 logger.info('Doing fixed order %s' % mode) 1832 req_acc = self.run_card['req_acc_FO'] 1833 1834 # Re-distribute the grids for the 2nd step of the applgrid 1835 # running 1836 if self.run_card['iappl'] == 2: 1837 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1838 1839 # create a list of dictionaries "jobs_to_run" with all the 1840 # jobs that need to be run 1841 integration_step=-1 1842 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1843 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1844 self.prepare_directories(jobs_to_run,mode) 1845 1846 # loop over the integration steps. After every step, check 1847 # if we have the required accuracy. If this is the case, 1848 # stop running, else do another step. 1849 while True: 1850 integration_step=integration_step+1 1851 self.run_all_jobs(jobs_to_run,integration_step) 1852 self.collect_log_files(jobs_to_run,integration_step) 1853 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1854 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1855 if not jobs_to_run: 1856 # there are no more jobs to run (jobs_to_run is empty) 1857 break 1858 # We are done. 1859 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1860 self.update_status('Run complete', level='parton', update_results=True) 1861 return 1862 1863 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1864 if self.ninitial == 1: 1865 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1866 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1867 'noshower': 'all', 'noshowerLO': 'born'} 1868 shower = self.run_card['parton_shower'].upper() 1869 nevents = self.run_card['nevents'] 1870 req_acc = self.run_card['req_acc'] 1871 if nevents == 0 and req_acc < 0 : 1872 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1873 'of events, because 0 events requested. Please set '\ 1874 'the "req_acc" parameter in the run_card to a value '\ 1875 'between 0 and 1') 1876 elif req_acc >1 or req_acc == 0 : 1877 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1878 'be between larger than 0 and smaller than 1, '\ 1879 'or set to -1 for automatic determination. Current '\ 1880 'value is %f' % req_acc) 1881 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1882 elif req_acc < 0 and nevents > 1000000 : 1883 req_acc=0.001 1884 1885 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1886 1887 if not shower in shower_list: 1888 raise aMCatNLOError('%s is not a valid parton shower. '\ 1889 'Please use one of the following: %s' \ 1890 % (shower, ', '.join(shower_list))) 1891 1892 # check that PYTHIA6PT is not used for processes with FSR 1893 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1894 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1895 1896 if mode in ['aMC@NLO', 'aMC@LO']: 1897 logger.info('Doing %s matched to parton shower' % mode[4:]) 1898 elif mode in ['noshower','noshowerLO']: 1899 logger.info('Generating events without running the shower.') 1900 elif options['only_generation']: 1901 logger.info('Generating events starting from existing results') 1902 1903 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1904 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1905 # Make sure to update all the jobs to be ready for the event generation step 1906 if options['only_generation']: 1907 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1908 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1909 else: 1910 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1911 1912 1913 # Main loop over the three MINT generation steps: 1914 for mint_step, status in enumerate(mcatnlo_status): 1915 if options['only_generation'] and mint_step < 2: 1916 continue 1917 self.update_status(status, level='parton') 1918 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1919 self.collect_log_files(jobs_to_run,mint_step) 1920 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1921 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1922 if mint_step+1==2 and nevents==0: 1923 self.print_summary(options,2,mode) 1924 return 1925 1926 # Sanity check on the event files. If error the jobs are resubmitted 1927 self.check_event_files(jobs_to_collect) 1928 1929 if self.cluster_mode == 1: 1930 #if cluster run, wait 10 sec so that event files are transferred back 1931 self.update_status( 1932 'Waiting while files are transferred back from the cluster nodes', 1933 level='parton') 1934 time.sleep(10) 1935 1936 event_norm=self.run_card['event_norm'] 1937 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1938
1939 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1940 integration_step,mode,fixed_order=True):
1941 """Creates a list of dictionaries with all the jobs to be run""" 1942 jobs_to_run=[] 1943 if not options['only_generation']: 1944 # Fresh, new run. Check all the P*/channels.txt files 1945 # (created by the 'gensym' executable) to set-up all the 1946 # jobs using the default inputs. 1947 npoints = self.run_card['npoints_FO_grid'] 1948 niters = self.run_card['niters_FO_grid'] 1949 for p_dir in p_dirs: 1950 try: 1951 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1952 channels=chan_file.readline().split() 1953 except IOError: 1954 logger.warning('No integration channels found for contribution %s' % p_dir) 1955 continue 1956 if fixed_order: 1957 lch=len(channels) 1958 maxchannels=20 # combine up to 20 channels in a single job 1959 if self.run_card['iappl'] != 0: maxchannels=1 1960 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1961 else int(lch/maxchannels)) 1962 for nj in range(1,njobs+1): 1963 job={} 1964 job['p_dir']=p_dir 1965 job['channel']=str(nj) 1966 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1967 job['configs']=' '.join(channels[:job['nchans']]) 1968 del channels[:job['nchans']] 1969 job['split']=0 1970 if req_acc == -1: 1971 job['accuracy']=0 1972 job['niters']=niters 1973 job['npoints']=npoints 1974 elif req_acc > 0: 1975 job['accuracy']=0.05 1976 job['niters']=6 1977 job['npoints']=-1 1978 else: 1979 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1980 'between 0 and 1 or set it equal to -1.') 1981 job['mint_mode']=0 1982 job['run_mode']=run_mode 1983 job['wgt_frac']=1.0 1984 job['wgt_mult']=1.0 1985 jobs_to_run.append(job) 1986 if channels: 1987 raise aMCatNLOError('channels is not empty %s' % channels) 1988 else: 1989 for channel in channels: 1990 job={} 1991 job['p_dir']=p_dir 1992 job['channel']=channel 1993 job['split']=0 1994 job['accuracy']=0.03 1995 job['niters']=12 1996 job['npoints']=-1 1997 job['mint_mode']=0 1998 job['run_mode']=run_mode 1999 job['wgt_frac']=1.0 2000 jobs_to_run.append(job) 2001 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 2002 else: 2003 # if options['only_generation'] is true, just read the current jobs from file 2004 try: 2005 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 2006 jobs_to_collect=pickle.load(f) 2007 for job in jobs_to_collect: 2008 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1]) 2009 jobs_to_run=copy.copy(jobs_to_collect) 2010 except: 2011 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 2012 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 2013 # Update cross sections and determine which jobs to run next 2014 if fixed_order: 2015 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 2016 jobs_to_collect,integration_step,mode,run_mode) 2017 # Update the integration_step to make sure that nothing will be overwritten 2018 integration_step=1 2019 for job in jobs_to_run: 2020 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 2021 integration_step=integration_step+1 2022 integration_step=integration_step-1 2023 else: 2024 self.append_the_results(jobs_to_collect,integration_step) 2025 return jobs_to_run,jobs_to_collect,integration_step
2026
2027 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
2028 """Set-up the G* directories for running""" 2029 name_suffix={'born' :'B' , 'all':'F'} 2030 for job in jobs_to_run: 2031 if job['split'] == 0: 2032 if fixed_order : 2033 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2034 job['run_mode']+'_G'+job['channel']) 2035 else: 2036 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2037 'G'+name_suffix[job['run_mode']]+job['channel']) 2038 else: 2039 if fixed_order : 2040 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2041 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 2042 else: 2043 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2044 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 2045 job['dirname']=dirname 2046 if not os.path.isdir(dirname): 2047 os.makedirs(dirname) 2048 self.write_input_file(job,fixed_order) 2049 # link or copy the grids from the base directory to the split directory: 2050 if not fixed_order: 2051 if job['split'] != 0: 2052 for f in ['grid.MC_integer','mint_grids','res_1']: 2053 if not os.path.isfile(pjoin(job['dirname'],f)): 2054 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 2055 else: 2056 if job['split'] != 0: 2057 for f in ['grid.MC_integer','mint_grids']: 2058 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2059 2060
2061 - def write_input_file(self,job,fixed_order):
2062 """write the input file for the madevent_mint* executable in the appropriate directory""" 2063 if fixed_order: 2064 content= \ 2065 """NPOINTS = %(npoints)s 2066 NITERATIONS = %(niters)s 2067 ACCURACY = %(accuracy)s 2068 ADAPT_GRID = 2 2069 MULTICHANNEL = 1 2070 SUM_HELICITY = 1 2071 NCHANS = %(nchans)s 2072 CHANNEL = %(configs)s 2073 SPLIT = %(split)s 2074 WGT_MULT= %(wgt_mult)s 2075 RUN_MODE = %(run_mode)s 2076 RESTART = %(mint_mode)s 2077 """ \ 2078 % job 2079 else: 2080 content = \ 2081 """-1 12 ! points, iterations 2082 %(accuracy)s ! desired fractional accuracy 2083 1 -0.1 ! alpha, beta for Gsoft 2084 -1 -0.1 ! alpha, beta for Gazi 2085 1 ! Suppress amplitude (0 no, 1 yes)? 2086 1 ! Exact helicity sum (0 yes, n = number/event)? 2087 %(channel)s ! Enter Configuration Number: 2088 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 2089 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 2090 %(run_mode)s ! all, born, real, virt 2091 """ \ 2092 % job 2093 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 2094 input_file.write(content)
2095 2096
2097 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2098 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 2099 if fixed_order: 2100 if integration_step == 0: 2101 self.update_status('Setting up grids', level=None) 2102 else: 2103 self.update_status('Refining results, step %i' % integration_step, level=None) 2104 self.ijob = 0 2105 name_suffix={'born' :'B', 'all':'F'} 2106 if fixed_order: 2107 run_type="Fixed order integration step %s" % integration_step 2108 else: 2109 run_type="MINT step %s" % integration_step 2110 self.njobs=len(jobs_to_run) 2111 for job in jobs_to_run: 2112 executable='ajob1' 2113 if fixed_order: 2114 arguments=[job['channel'],job['run_mode'], \ 2115 str(job['split']),str(integration_step)] 2116 else: 2117 arguments=[job['channel'],name_suffix[job['run_mode']], \ 2118 str(job['split']),str(integration_step)] 2119 self.run_exe(executable,arguments,run_type, 2120 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 2121 2122 if self.cluster_mode == 2: 2123 time.sleep(1) # security to allow all jobs to be launched 2124 self.wait_for_complete(run_type)
2125 2126
2127 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 2128 integration_step,mode,run_mode,fixed_order=True):
2129 """Collect the results, make HTML pages, print the summary and 2130 determine if there are more jobs to run. Returns the list 2131 of the jobs that still need to be run, as well as the 2132 complete list of jobs that need to be collected to get the 2133 final answer. 2134 """ 2135 # Get the results of the current integration/MINT step 2136 self.append_the_results(jobs_to_run,integration_step) 2137 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 2138 # Update HTML pages 2139 if fixed_order: 2140 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode], 2141 jobs=jobs_to_collect) 2142 else: 2143 name_suffix={'born' :'B' , 'all':'F'} 2144 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]]) 2145 self.results.add_detail('cross', cross) 2146 self.results.add_detail('error', error) 2147 # Combine grids from split fixed order jobs 2148 if fixed_order: 2149 jobs_to_run=self.combine_split_order_run(jobs_to_run) 2150 # Set-up jobs for the next iteration/MINT step 2151 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 2152 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 2153 if fixed_order: 2154 # Write the jobs_to_collect directory to file so that we 2155 # can restart them later (with only-generation option) 2156 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2157 pickle.dump(jobs_to_collect,f) 2158 # Print summary 2159 if (not jobs_to_run_new) and fixed_order: 2160 # print final summary of results (for fixed order) 2161 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 2162 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 2163 return jobs_to_run_new,jobs_to_collect 2164 elif jobs_to_run_new: 2165 # print intermediate summary of results 2166 scale_pdf_info=[] 2167 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 2168 else: 2169 # When we are done for (N)LO+PS runs, do not print 2170 # anything yet. This will be done after the reweighting 2171 # and collection of the events 2172 scale_pdf_info=[] 2173 # Prepare for the next integration/MINT step 2174 if (not fixed_order) and integration_step+1 == 2 : 2175 # Write the jobs_to_collect directory to file so that we 2176 # can restart them later (with only-generation option) 2177 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2178 pickle.dump(jobs_to_collect,f) 2179 # next step is event generation (mint_step 2) 2180 jobs_to_run_new,jobs_to_collect_new= \ 2181 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 2182 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2183 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 2184 self.write_nevts_files(jobs_to_run_new) 2185 else: 2186 if fixed_order and self.run_card['iappl'] == 0 \ 2187 and self.run_card['req_acc_FO'] > 0: 2188 jobs_to_run_new,jobs_to_collect= \ 2189 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 2190 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2191 jobs_to_collect_new=jobs_to_collect 2192 return jobs_to_run_new,jobs_to_collect_new
2193 2194
2195 - def write_nevents_unweighted_file(self,jobs,jobs0events):
2196 """writes the nevents_unweighted file in the SubProcesses directory. 2197 We also need to write the jobs that will generate 0 events, 2198 because that makes sure that the cross section from those channels 2199 is taken into account in the event weights (by collect_events.f). 2200 """ 2201 content=[] 2202 for job in jobs: 2203 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2204 lhefile=pjoin(path,'events.lhe') 2205 content.append(' %s %d %9e %9e' % \ 2206 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 2207 for job in jobs0events: 2208 if job['nevents']==0: 2209 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2210 lhefile=pjoin(path,'events.lhe') 2211 content.append(' %s %d %9e %9e' % \ 2212 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 2213 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 2214 f.write('\n'.join(content)+'\n')
2215
2216 - def write_nevts_files(self,jobs):
2217 """write the nevts files in the SubProcesses/P*/G*/ directories""" 2218 for job in jobs: 2219 with open(pjoin(job['dirname'],'nevts'),'w') as f: 2220 if self.run_card['event_norm'].lower()=='bias': 2221 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca'])) 2222 else: 2223 f.write('%i\n' % job['nevents'])
2224
2225 - def combine_split_order_run(self,jobs_to_run):
2226 """Combines jobs and grids from split jobs that have been run""" 2227 # combine the jobs that need to be combined in job 2228 # groups. Simply combine the ones that have the same p_dir and 2229 # same channel. 2230 jobgroups_to_combine=[] 2231 jobs_to_run_new=[] 2232 for job in jobs_to_run: 2233 if job['split'] == 0: 2234 job['combined']=1 2235 jobs_to_run_new.append(job) # this jobs wasn't split 2236 elif job['split'] == 1: 2237 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2238 j['channel'] == job['channel'], jobs_to_run)) 2239 else: 2240 continue 2241 for job_group in jobgroups_to_combine: 2242 # Combine the grids (mint-grids & MC-integer grids) first 2243 self.combine_split_order_grids(job_group) 2244 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 2245 return jobs_to_run_new
2246
2247 - def combine_split_order_jobs(self,job_group):
2248 """combine the jobs in job_group and return a single summed job""" 2249 # first copy one of the jobs in 'jobs' 2250 sum_job=copy.copy(job_group[0]) 2251 # update the information to have a 'non-split' job: 2252 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 2253 sum_job['split']=0 2254 sum_job['wgt_mult']=1.0 2255 sum_job['combined']=len(job_group) 2256 # information to be summed: 2257 keys=['niters_done','npoints_done','niters','npoints',\ 2258 'result','resultABS','time_spend'] 2259 keys2=['error','errorABS'] 2260 # information to be summed in quadrature: 2261 for key in keys2: 2262 sum_job[key]=math.pow(sum_job[key],2) 2263 # Loop over the jobs and sum the information 2264 for i,job in enumerate(job_group): 2265 if i==0 : continue # skip the first 2266 for key in keys: 2267 sum_job[key]+=job[key] 2268 for key in keys2: 2269 sum_job[key]+=math.pow(job[key],2) 2270 for key in keys2: 2271 sum_job[key]=math.sqrt(sum_job[key]) 2272 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 2273 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 2274 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 2275 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 2276 return sum_job
2277 2278
2279 - def combine_split_order_grids(self,job_group):
2280 """Combines the mint_grids and MC-integer grids from the split order 2281 jobs (fixed order only). 2282 """ 2283 files_mint_grids=[] 2284 files_MC_integer=[] 2285 location=None 2286 for job in job_group: 2287 files_mint_grids.append(pjoin(job['dirname'],'mint_grids')) 2288 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer')) 2289 if not location: 2290 location=pjoin(job['dirname'].rsplit('_',1)[0]) 2291 else: 2292 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 2293 raise aMCatNLOError('Not all jobs have the same location. '\ 2294 +'Cannot combine them.') 2295 # Needed to average the grids (both xgrids, ave_virt and 2296 # MC_integer grids), but sum the cross section info. The 2297 # latter is only the only line that contains integers. 2298 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 2299 linesoffiles=[] 2300 for f in fs: 2301 with open(f,'r+') as fi: 2302 linesoffiles.append(fi.readlines()) 2303 to_write=[] 2304 for rowgrp in zip(*linesoffiles): 2305 try: 2306 # check that last element on the line is an 2307 # integer (will raise ValueError if not the 2308 # case). If integer, this is the line that 2309 # contains information that needs to be 2310 # summed. All other lines can be averaged. 2311 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 2312 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2313 floatgrps = zip(*floatsbyfile) 2314 special=[] 2315 for i,floatgrp in enumerate(floatgrps): 2316 if i==0: # sum X-sec 2317 special.append(sum(floatgrp)) 2318 elif i==1: # sum unc in quadrature 2319 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 2320 elif i==2: # average number of PS per iteration 2321 special.append(int(sum(floatgrp)/len(floatgrp))) 2322 elif i==3: # sum the number of iterations 2323 special.append(int(sum(floatgrp))) 2324 elif i==4: # average the nhits_in_grids 2325 special.append(int(sum(floatgrp)/len(floatgrp))) 2326 else: 2327 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 2328 'Cannot combine them.') 2329 to_write.append(" ".join(str(s) for s in special) + "\n") 2330 except ValueError: 2331 # just average all 2332 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2333 floatgrps = zip(*floatsbyfile) 2334 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 2335 to_write.append(" ".join(str(a) for a in averages) + "\n") 2336 # write the data over the master location 2337 if j==0: 2338 with open(pjoin(location,'mint_grids'),'w') as f: 2339 f.writelines(to_write) 2340 elif j==1: 2341 with open(pjoin(location,'grid.MC_integer'),'w') as f: 2342 f.writelines(to_write)
2343 2344
2345 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
2346 """Looks in the jobs_to_run to see if there is the need to split the 2347 jobs, depending on the expected time they take. Updates 2348 jobs_to_run and jobs_to_collect to replace the split-job by 2349 its splits. 2350 """ 2351 # determine the number jobs we should have (this is per p_dir) 2352 if self.options['run_mode'] ==2: 2353 nb_submit = int(self.options['nb_core']) 2354 elif self.options['run_mode'] ==1: 2355 nb_submit = int(self.options['cluster_size']) 2356 else: 2357 nb_submit =1 2358 # total expected aggregated running time 2359 time_expected=0 2360 for job in jobs_to_run: 2361 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 2362 (job['niters_done']*job['npoints_done']) 2363 # this means that we must expect the following per job (in 2364 # ideal conditions) 2365 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 2366 jobs_to_run_new=[] 2367 jobs_to_collect_new=copy.copy(jobs_to_collect) 2368 for job in jobs_to_run: 2369 # remove current job from jobs_to_collect. Make sure 2370 # to remove all the split ones in case the original 2371 # job had been a split one (before it was re-combined) 2372 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2373 j['channel'] == job['channel'], jobs_to_collect_new): 2374 jobs_to_collect_new.remove(j) 2375 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 2376 (job['niters_done']*job['npoints_done']) 2377 # if the time expected for this job is (much) larger than 2378 # the time spend in the previous iteration, and larger 2379 # than the expected time per job, split it 2380 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 2381 # determine the number of splits needed 2382 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 2383 for i in range(1,nsplit+1): 2384 job_new=copy.copy(job) 2385 job_new['split']=i 2386 job_new['wgt_mult']=1./float(nsplit) 2387 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2388 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 2389 if nsplit >= job['niters']: 2390 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 2391 job_new['niters']=1 2392 else: 2393 job_new['npoints']=int(job['npoints']/nsplit) 2394 jobs_to_collect_new.append(job_new) 2395 jobs_to_run_new.append(job_new) 2396 else: 2397 jobs_to_collect_new.append(job) 2398 jobs_to_run_new.append(job) 2399 return jobs_to_run_new,jobs_to_collect_new
2400 2401
2402 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
2403 """Looks in the jobs_to_run to see if there is the need to split the 2404 event generation step. Updates jobs_to_run and 2405 jobs_to_collect to replace the split-job by its 2406 splits. Also removes jobs that do not need any events. 2407 """ 2408 nevt_job=self.run_card['nevt_job'] 2409 if nevt_job > 0: 2410 jobs_to_collect_new=copy.copy(jobs_to_collect) 2411 for job in jobs_to_run: 2412 nevents=job['nevents'] 2413 if nevents == 0: 2414 jobs_to_collect_new.remove(job) 2415 elif nevents > nevt_job: 2416 jobs_to_collect_new.remove(job) 2417 if nevents % nevt_job != 0 : 2418 nsplit=int(nevents/nevt_job)+1 2419 else: 2420 nsplit=int(nevents/nevt_job) 2421 for i in range(1,nsplit+1): 2422 job_new=copy.copy(job) 2423 left_over=nevents % nsplit 2424 if i <= left_over: 2425 job_new['nevents']=int(nevents/nsplit)+1 2426 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2427 else: 2428 job_new['nevents']=int(nevents/nsplit) 2429 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2430 job_new['split']=i 2431 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2432 jobs_to_collect_new.append(job_new) 2433 jobs_to_run_new=copy.copy(jobs_to_collect_new) 2434 else: 2435 jobs_to_run_new=copy.copy(jobs_to_collect) 2436 for job in jobs_to_collect: 2437 if job['nevents'] == 0: 2438 jobs_to_run_new.remove(job) 2439 jobs_to_collect_new=copy.copy(jobs_to_run_new) 2440 2441 return jobs_to_run_new,jobs_to_collect_new
2442 2443
2444 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
2445 """ 2446 For (N)LO+PS: determines the number of events and/or the required 2447 accuracy per job. 2448 For fixed order: determines which jobs need higher precision and 2449 returns those with the newly requested precision. 2450 """ 2451 err=self.cross_sect_dict['errt'] 2452 tot=self.cross_sect_dict['xsect'] 2453 errABS=self.cross_sect_dict['erra'] 2454 totABS=self.cross_sect_dict['xseca'] 2455 jobs_new=[] 2456 if fixed_order: 2457 if req_acc == -1: 2458 if step+1 == 1: 2459 npoints = self.run_card['npoints_FO'] 2460 niters = self.run_card['niters_FO'] 2461 for job in jobs: 2462 job['mint_mode']=-1 2463 job['niters']=niters 2464 job['npoints']=npoints 2465 jobs_new.append(job) 2466 elif step+1 == 2: 2467 pass 2468 elif step+1 > 2: 2469 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 2470 'for integration step %i' % step ) 2471 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2472 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2473 for job in jobs: 2474 job['mint_mode']=-1 2475 # Determine relative required accuracy on the ABS for this job 2476 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2477 # If already accurate enough, skip the job (except when doing the first 2478 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2479 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2480 and not (step==-1 and self.run_card['iappl'] == 2): 2481 continue 2482 # Update the number of PS points based on errorABS, ncall and accuracy 2483 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2484 (job['accuracy']*job['resultABS']),2) 2485 if itmax_fl <= 4.0 : 2486 job['niters']=max(int(round(itmax_fl)),2) 2487 job['npoints']=job['npoints_done']*2 2488 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2489 job['niters']=4 2490 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2491 else: 2492 if itmax_fl > 100.0 : itmax_fl=50.0 2493 job['niters']=int(round(math.sqrt(itmax_fl))) 2494 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2495 round(math.sqrt(itmax_fl))))*2 2496 # Add the job to the list of jobs that need to be run 2497 jobs_new.append(job) 2498 return jobs_new 2499 elif step+1 <= 2: 2500 nevents=self.run_card['nevents'] 2501 # Total required accuracy for the upper bounding envelope 2502 if req_acc<0: 2503 req_acc2_inv=nevents 2504 else: 2505 req_acc2_inv=1/(req_acc*req_acc) 2506 if step+1 == 1 or step+1 == 2 : 2507 # determine the req. accuracy for each of the jobs for Mint-step = 1 2508 for job in jobs: 2509 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2510 job['accuracy']=accuracy 2511 if step+1 == 2: 2512 # Randomly (based on the relative ABS Xsec of the job) determine the 2513 # number of events each job needs to generate for MINT-step = 2. 2514 r=self.get_randinit_seed() 2515 random.seed(r) 2516 totevts=nevents 2517 for job in jobs: 2518 job['nevents'] = 0 2519 while totevts : 2520 target = random.random() * totABS 2521 crosssum = 0. 2522 i = 0 2523 while i<len(jobs) and crosssum < target: 2524 job = jobs[i] 2525 crosssum += job['resultABS'] 2526 i += 1 2527 totevts -= 1 2528 i -= 1 2529 jobs[i]['nevents'] += 1 2530 for job in jobs: 2531 job['mint_mode']=step+1 # next step 2532 return jobs 2533 else: 2534 return []
2535 2536
2537 - def get_randinit_seed(self):
2538 """ Get the random number seed from the randinit file """ 2539 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2540 # format of the file is "r=%d". 2541 iseed = int(randinit.read()[2:]) 2542 return iseed
2543 2544
2545 - def append_the_results(self,jobs,integration_step):
2546 """Appends the results for each of the jobs in the job list""" 2547 error_found=False 2548 for job in jobs: 2549 try: 2550 if integration_step >= 0 : 2551 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2552 results=res_file.readline().split() 2553 else: 2554 # should only be here when doing fixed order with the 'only_generation' 2555 # option equal to True. Take the results from the final run done. 2556 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2557 results=res_file.readline().split() 2558 except IOError: 2559 if not error_found: 2560 error_found=True 2561 error_log=[] 2562 error_log.append(pjoin(job['dirname'],'log.txt')) 2563 continue 2564 job['resultABS']=float(results[0]) 2565 job['errorABS']=float(results[1]) 2566 job['result']=float(results[2]) 2567 job['error']=float(results[3]) 2568 job['niters_done']=int(results[4]) 2569 job['npoints_done']=int(results[5]) 2570 job['time_spend']=float(results[6]) 2571 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2572 job['err_perc'] = job['error']/job['result']*100. 2573 if error_found: 2574 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2575 'Please check the .log files inside the directories which failed:\n' + 2576 '\n'.join(error_log)+'\n')
2577 2578 2579
2580 - def write_res_txt_file(self,jobs,integration_step):
2581 """writes the res.txt files in the SubProcess dir""" 2582 jobs.sort(key = lambda job: -job['errorABS']) 2583 content=[] 2584 content.append('\n\nCross section per integration channel:') 2585 for job in jobs: 2586 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2587 content.append('\n\nABS cross section per integration channel:') 2588 for job in jobs: 2589 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2590 totABS=0 2591 errABS=0 2592 tot=0 2593 err=0 2594 for job in jobs: 2595 totABS+= job['resultABS']*job['wgt_frac'] 2596 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2597 tot+= job['result']*job['wgt_frac'] 2598 err+= math.pow(job['error'],2)*job['wgt_frac'] 2599 if jobs: 2600 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2601 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2602 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2603 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2604 res_file.write('\n'.join(content)) 2605 randinit=self.get_randinit_seed() 2606 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2607 'erra':math.sqrt(errABS),'randinit':randinit}
2608 2609
2610 - def collect_scale_pdf_info(self,options,jobs):
2611 """read the scale_pdf_dependence.dat files and collects there results""" 2612 scale_pdf_info=[] 2613 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2614 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2615 evt_files=[] 2616 evt_wghts=[] 2617 for job in jobs: 2618 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2619 evt_wghts.append(job['wgt_frac']) 2620 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2621 return scale_pdf_info
2622 2623
2624 - def combine_plots_FO(self,folder_name,jobs):
2625 """combines the plots and puts then in the Events/run* directory""" 2626 devnull = open(os.devnull, 'w') 2627 2628 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2629 topfiles = [] 2630 for job in jobs: 2631 if job['dirname'].endswith('.top'): 2632 topfiles.append(job['dirname']) 2633 else: 2634 topfiles.append(pjoin(job['dirname'],'MADatNLO.top')) 2635 misc.call(['./combine_plots_FO.sh'] + topfiles, \ 2636 stdout=devnull, 2637 cwd=pjoin(self.me_dir, 'SubProcesses')) 2638 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2639 pjoin(self.me_dir, 'Events', self.run_name)) 2640 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2641 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2642 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2643 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2644 self.combine_plots_HwU(jobs,out) 2645 try: 2646 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2647 stdout=devnull,stderr=devnull,\ 2648 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2649 except Exception: 2650 pass 2651 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2652 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2653 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2654 rootfiles = [] 2655 for job in jobs: 2656 if job['dirname'].endswith('.root'): 2657 rootfiles.append(job['dirname']) 2658 else: 2659 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root')) 2660 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \ 2661 stdout=devnull, 2662 cwd=pjoin(self.me_dir, 'SubProcesses')) 2663 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2664 pjoin(self.me_dir, 'Events', self.run_name)) 2665 logger.info('The results of this run and the ROOT file with the plots' + \ 2666 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2667 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2668 self.combine_FO_lhe(jobs) 2669 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2670 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2671 else: 2672 logger.info('The results of this run' + \ 2673 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2674
2675 - def combine_FO_lhe(self,jobs):
2676 """combine the various lhe file generated in each directory. 2677 They are two steps: 2678 1) banner 2679 2) reweight each sample by the factor written at the end of each file 2680 3) concatenate each of the new files (gzip those). 2681 """ 2682 2683 logger.info('Combining lhe events for plotting analysis') 2684 start = time.time() 2685 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2686 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2687 if os.path.exists(output): 2688 os.remove(output) 2689 2690 2691 2692 2693 # 1. write the banner 2694 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2695 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2696 self.banner['initrwgt'] = text[10+i1:i2] 2697 # 2698 # <init> 2699 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2700 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2701 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2702 # </init> 2703 2704 cross = sum(j['result'] for j in jobs) 2705 error = math.sqrt(sum(j['error'] for j in jobs)) 2706 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2707 self.banner.write(output[:-3], close_tag=False) 2708 misc.gzip(output[:-3]) 2709 2710 2711 2712 fsock = lhe_parser.EventFile(output,'a') 2713 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2714 fsock.eventgroup = False 2715 else: 2716 fsock.eventgroup = True 2717 2718 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2719 for job in jobs: 2720 dirname = job['dirname'] 2721 #read last line 2722 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2723 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2724 # get normalisation ratio 2725 ratio = cross/sumwgt 2726 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2727 lhe.eventgroup = True # read the events by eventgroup 2728 for eventsgroup in lhe: 2729 neweventsgroup = [] 2730 for i,event in enumerate(eventsgroup): 2731 event.rescale_weights(ratio) 2732 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2733 and event == neweventsgroup[-1]: 2734 neweventsgroup[-1].wgt += event.wgt 2735 for key in event.reweight_data: 2736 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2737 else: 2738 neweventsgroup.append(event) 2739 fsock.write_events(neweventsgroup) 2740 lhe.close() 2741 os.remove(pjoin(dirname,'events.lhe')) 2742 else: 2743 lhe = [] 2744 lenlhe = [] 2745 misc.sprint('need to combine %s event file' % len(jobs)) 2746 globallhe = lhe_parser.MultiEventFile() 2747 globallhe.eventgroup = True 2748 for job in jobs: 2749 dirname = job['dirname'] 2750 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2751 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2752 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2753 nb_event=int(nb_event), scale=cross/sumwgt) 2754 for eventsgroup in globallhe: 2755 neweventsgroup = [] 2756 for i,event in enumerate(eventsgroup): 2757 event.rescale_weights(event.sample_scale) 2758 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2759 and event == neweventsgroup[-1]: 2760 neweventsgroup[-1].wgt += event.wgt 2761 for key in event.reweight_data: 2762 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2763 else: 2764 neweventsgroup.append(event) 2765 fsock.write_events(neweventsgroup) 2766 globallhe.close() 2767 fsock.write('</LesHouchesEvents>\n') 2768 fsock.close() 2769 misc.sprint('combining lhe file done in ', time.time()-start) 2770 for job in jobs: 2771 dirname = job['dirname'] 2772 os.remove(pjoin(dirname,'events.lhe')) 2773 2774 2775 2776 misc.sprint('combining lhe file done in ', time.time()-start)
2777 2778 2779 2780 2781 2782
2783 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2784 """Sums all the plots in the HwU format.""" 2785 logger.debug('Combining HwU plots.') 2786 2787 command = [] 2788 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2789 for job in jobs: 2790 if job['dirname'].endswith('.HwU'): 2791 command.append(job['dirname']) 2792 else: 2793 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2794 command.append("--out="+out) 2795 command.append("--gnuplot") 2796 command.append("--band=[]") 2797 command.append("--lhapdf-config="+self.options['lhapdf']) 2798 if normalisation: 2799 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2800 command.append("--sum") 2801 command.append("--keep_all_weights") 2802 command.append("--no_open") 2803 2804 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2805 2806 while p.poll() is None: 2807 line = p.stdout.readline() 2808 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2809 print line[:-1] 2810 elif __debug__ and line: 2811 logger.debug(line[:-1])
2812 2813
2814 - def applgrid_combine(self,cross,error,jobs):
2815 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2816 logger.debug('Combining APPLgrids \n') 2817 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2818 'applgrid-combine') 2819 all_jobs=[] 2820 for job in jobs: 2821 all_jobs.append(job['dirname']) 2822 ngrids=len(all_jobs) 2823 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2824 for obs in range(0,nobs): 2825 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2826 # combine APPLgrids from different channels for observable 'obs' 2827 if self.run_card["iappl"] == 1: 2828 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2829 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2830 elif self.run_card["iappl"] == 2: 2831 unc2_inv=pow(cross/error,2) 2832 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2833 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2834 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2835 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2836 for job in all_jobs: 2837 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2838 else: 2839 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2840 # after combining, delete the original grids 2841 for ggdir in gdir: 2842 os.remove(ggdir)
2843 2844
2845 - def applgrid_distribute(self,options,mode,p_dirs):
2846 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2847 # if no appl_start_grid argument given, guess it from the time stamps 2848 # of the starting grid files 2849 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2850 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2851 pjoin(self.me_dir,'Events')) 2852 2853 time_stamps={} 2854 for root_file in gfiles: 2855 time_stamps[root_file]=os.path.getmtime(root_file) 2856 options['appl_start_grid']= \ 2857 max(time_stamps.iterkeys(), key=(lambda key: 2858 time_stamps[key])).split('/')[-2] 2859 logger.info('No --appl_start_grid option given. '+\ 2860 'Guessing that start grid from run "%s" should be used.' \ 2861 % options['appl_start_grid']) 2862 2863 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2864 self.appl_start_grid = options['appl_start_grid'] 2865 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2866 # check that this dir exists and at least one grid file is there 2867 if not os.path.exists(pjoin(start_grid_dir, 2868 'aMCfast_obs_0_starting_grid.root')): 2869 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2870 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2871 else: 2872 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2873 start_grid_dir) if name.endswith("_starting_grid.root")] 2874 nobs =len(all_grids) 2875 gstring=" ".join(all_grids) 2876 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2877 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2878 'Please provide this information.') 2879 #copy the grid to all relevant directories 2880 for pdir in p_dirs: 2881 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2882 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2883 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2884 for g_dir in g_dirs: 2885 for grid in all_grids: 2886 obs=grid.split('_')[-3] 2887 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2888 'grid_obs_'+obs+'_in.root'))
2889 2890 2891 2892
2893 - def collect_log_files(self, jobs, integration_step):
2894 """collect the log files and put them in a single, html-friendly file 2895 inside the Events/run_.../ directory""" 2896 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2897 'alllogs_%d.html' % integration_step) 2898 outfile = open(log_file, 'w') 2899 2900 content = '' 2901 content += '<HTML><BODY>\n<font face="courier" size=2>' 2902 for job in jobs: 2903 # put an anchor 2904 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2905 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2906 pjoin(self.me_dir,'SubProcesses'),'')) 2907 # and put some nice header 2908 content += '<font color="red">\n' 2909 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2910 (os.path.dirname(log).replace(pjoin(self.me_dir, 2911 'SubProcesses'), ''), 2912 integration_step) 2913 content += '</font>\n' 2914 #then just flush the content of the small log inside the big log 2915 #the PRE tag prints everything verbatim 2916 with open(log) as l: 2917 content += '<PRE>\n' + l.read() + '\n</PRE>' 2918 content +='<br>\n' 2919 outfile.write(content) 2920 content='' 2921 2922 outfile.write('</font>\n</BODY></HTML>\n') 2923 outfile.close()
2924 2925
2926 - def finalise_run_FO(self,folder_name,jobs):
2927 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2928 # Copy the res_*.txt files to the Events/run* folder 2929 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2930 for res_file in res_files: 2931 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2932 # Collect the plots and put them in the Events/run* folder 2933 self.combine_plots_FO(folder_name,jobs) 2934 # If doing the applgrid-stuff, also combine those grids 2935 # and put those in the Events/run* folder 2936 if self.run_card['iappl'] != 0: 2937 cross=self.cross_sect_dict['xsect'] 2938 error=self.cross_sect_dict['errt'] 2939 self.applgrid_combine(cross,error,jobs)
2940 2941
2942 - def setup_cluster_or_multicore(self):
2943 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2944 if self.cluster_mode == 1: 2945 cluster_name = self.options['cluster_type'] 2946 try: 2947 self.cluster = cluster.from_name[cluster_name](**self.options) 2948 except KeyError: 2949 # Check if a plugin define this type of cluster 2950 # check for PLUGIN format 2951 cluster_class = misc.from_plugin_import(self.plugin_path, 2952 'new_cluster', cluster_name, 2953 info = 'cluster handling will be done with PLUGIN: %{plug}s' ) 2954 if cluster_class: 2955 self.cluster = cluster_class(**self.options) 2956 2957 if self.cluster_mode == 2: 2958 try: 2959 import multiprocessing 2960 if not self.nb_core: 2961 try: 2962 self.nb_core = int(self.options['nb_core']) 2963 except TypeError: 2964 self.nb_core = multiprocessing.cpu_count() 2965 logger.info('Using %d cores' % self.nb_core) 2966 except ImportError: 2967 self.nb_core = 1 2968 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2969 'Use set nb_core X in order to set this number and be able to'+ 2970 'run in multicore.') 2971 2972 self.cluster = cluster.MultiCore(**self.options)
2973 2974
2975 - def clean_previous_results(self,options,p_dirs,folder_name):
2976 """Clean previous results. 2977 o. If doing only the reweighting step, do not delete anything and return directlty. 2978 o. Always remove all the G*_* files (from split event generation). 2979 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2980 if options['reweightonly']: 2981 return 2982 if not options['only_generation']: 2983 self.update_status('Cleaning previous results', level=None) 2984 for dir in p_dirs: 2985 #find old folders to be removed 2986 for obj in folder_name: 2987 # list all the G* (or all_G* or born_G*) directories 2988 to_rm = [file for file in \ 2989 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2990 if file.startswith(obj[:-1]) and \ 2991 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2992 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2993 # list all the G*_* directories (from split event generation) 2994 to_always_rm = [file for file in \ 2995 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2996 if file.startswith(obj[:-1]) and 2997 '_' in file and not '_G' in file and \ 2998 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2999 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 3000 3001 if not options['only_generation']: 3002 to_always_rm.extend(to_rm) 3003 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 3004 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 3005 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 3006 return
3007 3008
3009 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
3010 """print a summary of the results contained in self.cross_sect_dict. 3011 step corresponds to the mintMC step, if =2 (i.e. after event generation) 3012 some additional infos are printed""" 3013 # find process name 3014 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 3015 process = '' 3016 for line in proc_card_lines: 3017 if line.startswith('generate') or line.startswith('add process'): 3018 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 3019 lpp = {0:'l', 1:'p', -1:'pbar', 2:'elastic photon from p', 3:'elastic photon from e'} 3020 if self.ninitial == 1: 3021 proc_info = '\n Process %s' % process[:-3] 3022 else: 3023 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 3024 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 3025 self.run_card['ebeam1'], self.run_card['ebeam2']) 3026 3027 if self.ninitial == 1: 3028 self.cross_sect_dict['unit']='GeV' 3029 self.cross_sect_dict['xsec_string']='(Partial) decay width' 3030 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 3031 else: 3032 self.cross_sect_dict['unit']='pb' 3033 self.cross_sect_dict['xsec_string']='Total cross section' 3034 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 3035 if self.run_card['event_norm'].lower()=='bias': 3036 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)' 3037 3038 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3039 status = ['Determining the number of unweighted events per channel', 3040 'Updating the number of unweighted events per channel', 3041 'Summary:'] 3042 computed='(computed from LHE events)' 3043 elif mode in ['NLO', 'LO']: 3044 status = ['Results after grid setup:','Current results:', 3045 'Final results and run summary:'] 3046 computed='(computed from histogram information)' 3047 3048 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3049 message = status[step] + '\n\n Intermediate results:' + \ 3050 ('\n Random seed: %(randinit)d' + \ 3051 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 3052 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 3053 % self.cross_sect_dict 3054 elif mode in ['NLO','LO'] and not done: 3055 if step == 0: 3056 message = '\n ' + status[0] + \ 3057 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3058 self.cross_sect_dict 3059 else: 3060 message = '\n ' + status[1] + \ 3061 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3062 self.cross_sect_dict 3063 3064 else: 3065 message = '\n --------------------------------------------------------------' 3066 message = message + \ 3067 '\n ' + status[2] + proc_info 3068 if mode not in ['LO', 'NLO']: 3069 message = message + \ 3070 '\n Number of events generated: %s' % self.run_card['nevents'] 3071 message = message + \ 3072 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3073 self.cross_sect_dict 3074 message = message + \ 3075 '\n --------------------------------------------------------------' 3076 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 3077 if scale_pdf_info[0]: 3078 # scale uncertainties 3079 message = message + '\n Scale variation %s:' % computed 3080 for s in scale_pdf_info[0]: 3081 if s['unc']: 3082 if self.run_card['ickkw'] != -1: 3083 message = message + \ 3084 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 3085 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 3086 else: 3087 message = message + \ 3088 ('\n Soft and hard scale dependence (added in quadrature): '\ 3089 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 3090 3091 else: 3092 message = message + \ 3093 ('\n Dynamical_scale_choice %(label)i: '\ 3094 '\n %(cen)8.3e pb') % s 3095 3096 if scale_pdf_info[1]: 3097 message = message + '\n PDF variation %s:' % computed 3098 for p in scale_pdf_info[1]: 3099 if p['unc']=='none': 3100 message = message + \ 3101 ('\n %(name)s (central value only): '\ 3102 '\n %(cen)8.3e pb') % p 3103 3104 elif p['unc']=='unknown': 3105 message = message + \ 3106 ('\n %(name)s (%(size)s members; combination method unknown): '\ 3107 '\n %(cen)8.3e pb') % p 3108 else: 3109 message = message + \ 3110 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 3111 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 3112 # pdf uncertainties 3113 message = message + \ 3114 '\n --------------------------------------------------------------' 3115 3116 3117 if (mode in ['NLO', 'LO'] and not done) or \ 3118 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 3119 logger.info(message+'\n') 3120 return 3121 3122 # Some advanced general statistics are shown in the debug message at the 3123 # end of the run 3124 # Make sure it never stops a run 3125 # Gather some basic statistics for the run and extracted from the log files. 3126 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3127 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 3128 pjoin(self.me_dir, 'SubProcesses')) 3129 all_log_files = log_GV_files 3130 elif mode == 'NLO': 3131 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 3132 pjoin(self.me_dir, 'SubProcesses')) 3133 all_log_files = log_GV_files 3134 3135 elif mode == 'LO': 3136 log_GV_files = '' 3137 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 3138 pjoin(self.me_dir, 'SubProcesses')) 3139 else: 3140 raise aMCatNLOError, 'Running mode %s not supported.'%mode 3141 3142 try: 3143 message, debug_msg = \ 3144 self.compile_advanced_stats(log_GV_files, all_log_files, message) 3145 except Exception as e: 3146 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 3147 err_string = StringIO.StringIO() 3148 traceback.print_exc(limit=4, file=err_string) 3149 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 3150 %err_string.getvalue() 3151 3152 logger.debug(debug_msg+'\n') 3153 logger.info(message+'\n') 3154 3155 # Now copy relevant information in the Events/Run_<xxx> directory 3156 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 3157 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 3158 open(pjoin(evt_path, '.full_summary.txt'), 3159 'w').write(message+'\n\n'+debug_msg+'\n') 3160 3161 self.archive_files(evt_path,mode)
3162
3163 - def archive_files(self, evt_path, mode):
3164 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 3165 the run.""" 3166 3167 files_to_arxiv = [pjoin('Cards','param_card.dat'), 3168 pjoin('Cards','MadLoopParams.dat'), 3169 pjoin('Cards','FKS_params.dat'), 3170 pjoin('Cards','run_card.dat'), 3171 pjoin('Subprocesses','setscales.f'), 3172 pjoin('Subprocesses','cuts.f')] 3173 3174 if mode in ['NLO', 'LO']: 3175 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 3176 3177 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 3178 os.mkdir(pjoin(evt_path,'RunMaterial')) 3179 3180 for path in files_to_arxiv: 3181 if os.path.isfile(pjoin(self.me_dir,path)): 3182 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 3183 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 3184 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3185
3186 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
3187 """ This functions goes through the log files given in arguments and 3188 compiles statistics about MadLoop stability, virtual integration 3189 optimization and detection of potential error messages into a nice 3190 debug message to printed at the end of the run """ 3191 3192 def safe_float(str_float): 3193 try: 3194 return float(str_float) 3195 except ValueError: 3196 logger.debug('Could not convert the following float during'+ 3197 ' advanced statistics printout: %s'%str(str_float)) 3198 return -1.0
3199 3200 3201 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 3202 # > Errors is a list of tuples with this format (log_file,nErrors) 3203 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 3204 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 3205 3206 # ================================== 3207 # == MadLoop stability statistics == 3208 # ================================== 3209 3210 # Recuperate the fraction of unstable PS points found in the runs for 3211 # the virtuals 3212 UPS_stat_finder = re.compile( 3213 r"Satistics from MadLoop:.*"+\ 3214 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 3215 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 3216 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 3217 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 3218 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 3219 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 3220 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 3221 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 3222 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 3223 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 3224 3225 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 3226 1 : 'CutTools (double precision)', 3227 2 : 'PJFry++', 3228 3 : 'IREGI', 3229 4 : 'Golem95', 3230 5 : 'Samurai', 3231 6 : 'Ninja (double precision)', 3232 7 : 'COLLIER', 3233 8 : 'Ninja (quadruple precision)', 3234 9 : 'CutTools (quadruple precision)'} 3235 RetUnit_finder =re.compile( 3236 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 3237 #Unit 3238 3239 for gv_log in log_GV_files: 3240 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 3241 log=open(gv_log,'r').read() 3242 UPS_stats = re.search(UPS_stat_finder,log) 3243 for retunit_stats in re.finditer(RetUnit_finder, log): 3244 if channel_name not in stats['UPS'].keys(): 3245 stats['UPS'][channel_name] = [0]*10+[[0]*10] 3246 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 3247 += int(retunit_stats.group('n_occurences')) 3248 if not UPS_stats is None: 3249 try: 3250 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 3251 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 3252 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 3253 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 3254 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 3255 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 3256 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 3257 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 3258 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 3259 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 3260 except KeyError: 3261 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 3262 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 3263 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 3264 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 3265 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 3266 int(UPS_stats.group('n10')),[0]*10] 3267 debug_msg = "" 3268 if len(stats['UPS'].keys())>0: 3269 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 3270 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 3271 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 3272 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 3273 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 3274 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 3275 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 3276 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 3277 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 3278 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 3279 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 3280 for i in range(10)] 3281 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 3282 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 3283 maxUPS = max(UPSfracs, key = lambda w: w[1]) 3284 3285 tmpStr = "" 3286 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 3287 tmpStr += '\n Stability unknown: %d'%nTotsun 3288 tmpStr += '\n Stable PS point: %d'%nTotsps 3289 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 3290 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 3291 tmpStr += '\n Only double precision used: %d'%nTotddp 3292 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 3293 tmpStr += '\n Initialization phase-space points: %d'%nTotini 3294 tmpStr += '\n Reduction methods used:' 3295 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 3296 unit_code_meaning.keys() if nTot1[i]>0] 3297 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 3298 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 3299 if nTot100 != 0: 3300 debug_msg += '\n Unknown return code (100): %d'%nTot100 3301 if nTot10 != 0: 3302 debug_msg += '\n Unknown return code (10): %d'%nTot10 3303 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 3304 not in unit_code_meaning.keys()) 3305 if nUnknownUnit != 0: 3306 debug_msg += '\n Unknown return code (1): %d'\ 3307 %nUnknownUnit 3308 3309 if maxUPS[1]>0.001: 3310 message += tmpStr 3311 message += '\n Total number of unstable PS point detected:'+\ 3312 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 3313 message += '\n Maximum fraction of UPS points in '+\ 3314 'channel %s (%4.2f%%)'%maxUPS 3315 message += '\n Please report this to the authors while '+\ 3316 'providing the file' 3317 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 3318 maxUPS[0],'UPS.log')) 3319 else: 3320 debug_msg += tmpStr 3321 3322 3323 # ==================================================== 3324 # == aMC@NLO virtual integration optimization stats == 3325 # ==================================================== 3326 3327 virt_tricks_finder = re.compile( 3328 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 3329 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 3330 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 3331 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 3332 3333 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 3334 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 3335 3336 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 3337 3338 channel_contr_list = {} 3339 for gv_log in log_GV_files: 3340 logfile=open(gv_log,'r') 3341 log = logfile.read() 3342 logfile.close() 3343 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3344 vf_stats = None 3345 for vf_stats in re.finditer(virt_frac_finder, log): 3346 pass 3347 if not vf_stats is None: 3348 v_frac = safe_float(vf_stats.group('v_frac')) 3349 v_average = safe_float(vf_stats.group('v_average')) 3350 try: 3351 if v_frac < stats['virt_stats']['v_frac_min'][0]: 3352 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 3353 if v_frac > stats['virt_stats']['v_frac_max'][0]: 3354 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 3355 stats['virt_stats']['v_frac_avg'][0] += v_frac 3356 stats['virt_stats']['v_frac_avg'][1] += 1 3357 except KeyError: 3358 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 3359 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 3360 stats['virt_stats']['v_frac_avg']=[v_frac,1] 3361 3362 3363 ccontr_stats = None 3364 for ccontr_stats in re.finditer(channel_contr_finder, log): 3365 pass 3366 if not ccontr_stats is None: 3367 contrib = safe_float(ccontr_stats.group('v_contr')) 3368 try: 3369 if contrib>channel_contr_list[channel_name]: 3370 channel_contr_list[channel_name]=contrib 3371 except KeyError: 3372 channel_contr_list[channel_name]=contrib 3373 3374 3375 # Now build the list of relevant virt log files to look for the maxima 3376 # of virt fractions and such. 3377 average_contrib = 0.0 3378 for value in channel_contr_list.values(): 3379 average_contrib += value 3380 if len(channel_contr_list.values()) !=0: 3381 average_contrib = average_contrib / len(channel_contr_list.values()) 3382 3383 relevant_log_GV_files = [] 3384 excluded_channels = set([]) 3385 all_channels = set([]) 3386 for log_file in log_GV_files: 3387 channel_name = '/'.join(log_file.split('/')[-3:-1]) 3388 all_channels.add(channel_name) 3389 try: 3390 if channel_contr_list[channel_name] > (0.1*average_contrib): 3391 relevant_log_GV_files.append(log_file) 3392 else: 3393 excluded_channels.add(channel_name) 3394 except KeyError: 3395 relevant_log_GV_files.append(log_file) 3396 3397 # Now we want to use the latest occurence of accumulated result in the log file 3398 for gv_log in relevant_log_GV_files: 3399 logfile=open(gv_log,'r') 3400 log = logfile.read() 3401 logfile.close() 3402 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3403 3404 vt_stats = None 3405 for vt_stats in re.finditer(virt_tricks_finder, log): 3406 pass 3407 if not vt_stats is None: 3408 vt_stats_group = vt_stats.groupdict() 3409 v_ratio = safe_float(vt_stats.group('v_ratio')) 3410 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 3411 v_contr = safe_float(vt_stats.group('v_abs_contr')) 3412 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 3413 try: 3414 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 3415 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 3416 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 3417 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 3418 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 3419 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 3420 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 3421 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 3422 if v_contr < stats['virt_stats']['v_contr_min'][0]: 3423 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 3424 if v_contr > stats['virt_stats']['v_contr_max'][0]: 3425 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 3426 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 3427 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 3428 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 3429 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 3430 except KeyError: 3431 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 3432 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 3433 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 3434 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 3435 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 3436 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 3437 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 3438 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 3439 3440 vf_stats = None 3441 for vf_stats in re.finditer(virt_frac_finder, log): 3442 pass 3443 if not vf_stats is None: 3444 v_frac = safe_float(vf_stats.group('v_frac')) 3445 v_average = safe_float(vf_stats.group('v_average')) 3446 try: 3447 if v_average < stats['virt_stats']['v_average_min'][0]: 3448 stats['virt_stats']['v_average_min']=(v_average,channel_name) 3449 if v_average > stats['virt_stats']['v_average_max'][0]: 3450 stats['virt_stats']['v_average_max']=(v_average,channel_name) 3451 stats['virt_stats']['v_average_avg'][0] += v_average 3452 stats['virt_stats']['v_average_avg'][1] += 1 3453 except KeyError: 3454 stats['virt_stats']['v_average_min']=[v_average,channel_name] 3455 stats['virt_stats']['v_average_max']=[v_average,channel_name] 3456 stats['virt_stats']['v_average_avg']=[v_average,1] 3457 3458 try: 3459 debug_msg += '\n\n Statistics on virtual integration optimization : ' 3460 3461 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3462 %tuple(stats['virt_stats']['v_frac_max']) 3463 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3464 %tuple(stats['virt_stats']['v_frac_min']) 3465 debug_msg += '\n Average virt fraction computed %.3f'\ 3466 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3467 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3468 (len(excluded_channels),len(all_channels)) 3469 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3470 %tuple(stats['virt_stats']['v_average_max']) 3471 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3472 %tuple(stats['virt_stats']['v_ratio_max']) 3473 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3474 %tuple(stats['virt_stats']['v_ratio_err_max']) 3475 debug_msg += tmpStr 3476 # After all it was decided that it is better not to alarm the user unecessarily 3477 # with such printout of the statistics. 3478 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3479 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3480 # message += "\n Suspiciously large MC error in :" 3481 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3482 # message += tmpStr 3483 3484 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3485 %tuple(stats['virt_stats']['v_contr_err_max']) 3486 debug_msg += tmpStr 3487 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3488 # message += tmpStr 3489 3490 3491 except KeyError: 3492 debug_msg += '\n Could not find statistics on the integration optimization. ' 3493 3494 # ======================================= 3495 # == aMC@NLO timing profile statistics == 3496 # ======================================= 3497 3498 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3499 "(?P<time>[\d\+-Eed\.]*)\s*") 3500 3501 for logf in log_GV_files: 3502 logfile=open(logf,'r') 3503 log = logfile.read() 3504 logfile.close() 3505 channel_name = '/'.join(logf.split('/')[-3:-1]) 3506 mint = re.search(mint_search,logf) 3507 if not mint is None: 3508 channel_name = channel_name+' [step %s]'%mint.group('ID') 3509 3510 for time_stats in re.finditer(timing_stat_finder, log): 3511 try: 3512 stats['timings'][time_stats.group('name')][channel_name]+=\ 3513 safe_float(time_stats.group('time')) 3514 except KeyError: 3515 if time_stats.group('name') not in stats['timings'].keys(): 3516 stats['timings'][time_stats.group('name')] = {} 3517 stats['timings'][time_stats.group('name')][channel_name]=\ 3518 safe_float(time_stats.group('time')) 3519 3520 # useful inline function 3521 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3522 try: 3523 totTimeList = [(time, chan) for chan, time in \ 3524 stats['timings']['Total'].items()] 3525 except KeyError: 3526 totTimeList = [] 3527 3528 totTimeList.sort() 3529 if len(totTimeList)>0: 3530 debug_msg += '\n\n Inclusive timing profile :' 3531 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3532 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3533 debug_msg += '\n Average channel running time %s'%\ 3534 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3535 debug_msg += '\n Aggregated total running time %s'%\ 3536 Tstr(sum([el[0] for el in totTimeList])) 3537 else: 3538 debug_msg += '\n\n Inclusive timing profile non available.' 3539 3540 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 3541 sum(stats['timings'][stat].values()), reverse=True) 3542 for name in sorted_keys: 3543 if name=='Total': 3544 continue 3545 if sum(stats['timings'][name].values())<=0.0: 3546 debug_msg += '\n Zero time record for %s.'%name 3547 continue 3548 try: 3549 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3550 chan) for chan, time in stats['timings'][name].items()] 3551 except KeyError, ZeroDivisionError: 3552 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3553 continue 3554 TimeList.sort() 3555 debug_msg += '\n Timing profile for <%s> :'%name 3556 try: 3557 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3558 safe_float((100.0*(sum(stats['timings'][name].values())/ 3559 sum(stats['timings']['Total'].values())))) 3560 except KeyError, ZeroDivisionError: 3561 debug_msg += '\n Overall fraction of time unavailable.' 3562 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3563 (TimeList[-1][0],TimeList[-1][1]) 3564 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3565 (TimeList[0][0],TimeList[0][1]) 3566 3567 # ============================= 3568 # == log file eror detection == 3569 # ============================= 3570 3571 # Find the number of potential errors found in all log files 3572 # This re is a simple match on a case-insensitve 'error' but there is 3573 # also some veto added for excluding the sentence 3574 # "See Section 6 of paper for error calculation." 3575 # which appear in the header of lhapdf in the logs. 3576 err_finder = re.compile(\ 3577 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3578 for log in all_log_files: 3579 logfile=open(log,'r') 3580 nErrors = len(re.findall(err_finder, logfile.read())) 3581 logfile.close() 3582 if nErrors != 0: 3583 stats['Errors'].append((str(log),nErrors)) 3584 3585 nErrors = sum([err[1] for err in stats['Errors']],0) 3586 if nErrors != 0: 3587 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3588 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3589 'found in the following log file%s:'%('s' if \ 3590 len(stats['Errors'])>1 else '') 3591 for error in stats['Errors'][:3]: 3592 log_name = '/'.join(error[0].split('/')[-5:]) 3593 debug_msg += '\n > %d error%s in %s'%\ 3594 (error[1],'s' if error[1]>1 else '',log_name) 3595 if len(stats['Errors'])>3: 3596 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3597 nRemainingLogs = len(stats['Errors'])-3 3598 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3599 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3600 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3601 3602 return message, debug_msg 3603 3604
3605 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3606 """this function calls the reweighting routines and creates the event file in the 3607 Event dir. Return the name of the event file created 3608 """ 3609 scale_pdf_info=[] 3610 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3611 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1\ 3612 or self.run_card['store_rwgt_info']: 3613 scale_pdf_info = self.run_reweight(options['reweightonly']) 3614 self.update_status('Collecting events', level='parton', update_results=True) 3615 misc.compile(['collect_events'], 3616 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3617 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3618 stdin=subprocess.PIPE, 3619 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3620 if event_norm.lower() == 'sum': 3621 p.communicate(input = '1\n') 3622 elif event_norm.lower() == 'unity': 3623 p.communicate(input = '3\n') 3624 elif event_norm.lower() == 'bias': 3625 p.communicate(input = '0\n') 3626 else: 3627 p.communicate(input = '2\n') 3628 3629 #get filename from collect events 3630 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3631 3632 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3633 raise aMCatNLOError('An error occurred during event generation. ' + \ 3634 'The event file has not been created. Check collect_events.log') 3635 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3636 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3637 if not options['reweightonly']: 3638 self.print_summary(options, 2, mode, scale_pdf_info) 3639 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3640 for res_file in res_files: 3641 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3642 3643 logger.info('The %s file has been generated.\n' % (evt_file)) 3644 self.results.add_detail('nb_event', nevents) 3645 self.update_status('Events generated', level='parton', update_results=True) 3646 return evt_file[:-3]
3647 3648
3649 - def run_mcatnlo(self, evt_file, options):
3650 """runs mcatnlo on the generated event file, to produce showered-events 3651 """ 3652 logger.info('Preparing MCatNLO run') 3653 try: 3654 misc.gunzip(evt_file) 3655 except Exception: 3656 pass 3657 3658 self.banner = banner_mod.Banner(evt_file) 3659 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3660 3661 #check that the number of split event files divides the number of 3662 # events, otherwise set it to 1 3663 if int(self.banner.get_detail('run_card', 'nevents') / \ 3664 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3665 != self.banner.get_detail('run_card', 'nevents'): 3666 logger.warning(\ 3667 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3668 'Setting it to 1.') 3669 self.shower_card['nsplit_jobs'] = 1 3670 3671 # don't split jobs if the user asks to shower only a part of the events 3672 if self.shower_card['nevents'] > 0 and \ 3673 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3674 self.shower_card['nsplit_jobs'] != 1: 3675 logger.warning(\ 3676 'Only a part of the events will be showered.\n' + \ 3677 'Setting nsplit_jobs in the shower_card to 1.') 3678 self.shower_card['nsplit_jobs'] = 1 3679 3680 self.banner_to_mcatnlo(evt_file) 3681 3682 # if fastjet has to be linked (in extralibs) then 3683 # add lib /include dirs for fastjet if fastjet-config is present on the 3684 # system, otherwise add fjcore to the files to combine 3685 if 'fastjet' in self.shower_card['extralibs']: 3686 #first, check that stdc++ is also linked 3687 if not 'stdc++' in self.shower_card['extralibs']: 3688 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3689 self.shower_card['extralibs'] += ' stdc++' 3690 # then check if options[fastjet] corresponds to a valid fj installation 3691 try: 3692 #this is for a complete fj installation 3693 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3694 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3695 output, error = p.communicate() 3696 #remove the line break from output (last character) 3697 output = output[:-1] 3698 # add lib/include paths 3699 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3700 logger.warning('Linking FastJet: updating EXTRAPATHS') 3701 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3702 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3703 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3704 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3705 # to be changed in the fortran wrapper 3706 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3707 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3708 except Exception: 3709 logger.warning('Linking FastJet: using fjcore') 3710 # this is for FJcore, so no FJ library has to be linked 3711 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3712 if not 'fjcore.o' in self.shower_card['analyse']: 3713 self.shower_card['analyse'] += ' fjcore.o' 3714 # to be changed in the fortran wrapper 3715 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3716 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3717 # change the fortran wrapper with the correct namespaces/include 3718 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3719 for line in fjwrapper_lines: 3720 if '//INCLUDE_FJ' in line: 3721 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3722 if '//NAMESPACE_FJ' in line: 3723 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3724 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3725 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3726 3727 extrapaths = self.shower_card['extrapaths'].split() 3728 3729 # check that the path needed by HW++ and PY8 are set if one uses these shower 3730 if shower in ['HERWIGPP', 'PYTHIA8']: 3731 path_dict = {'HERWIGPP': ['hepmc_path', 3732 'thepeg_path', 3733 'hwpp_path'], 3734 'PYTHIA8': ['pythia8_path']} 3735 3736 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3737 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3738 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3739 3740 if shower == 'HERWIGPP': 3741 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3742 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3743 3744 # add the HEPMC path of the pythia8 installation 3745 if shower == 'PYTHIA8': 3746 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3747 stdout = subprocess.PIPE).stdout.read().strip() 3748 #this gives all the flags, i.e. 3749 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3750 # we just need the path to the HepMC libraries 3751 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3752 3753 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3754 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3755 3756 # set the PATH for the dynamic libraries 3757 if sys.platform == 'darwin': 3758 ld_library_path = 'DYLD_LIBRARY_PATH' 3759 else: 3760 ld_library_path = 'LD_LIBRARY_PATH' 3761 if ld_library_path in os.environ.keys(): 3762 paths = os.environ[ld_library_path] 3763 else: 3764 paths = '' 3765 paths += ':' + ':'.join(extrapaths) 3766 os.putenv(ld_library_path, paths) 3767 3768 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3769 self.shower_card.write_card(shower, shower_card_path) 3770 3771 # overwrite if shower_card_set.dat exists in MCatNLO 3772 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3773 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3774 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3775 3776 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3777 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3778 3779 3780 # libdl may be needded for pythia 82xx 3781 #if shower == 'PYTHIA8' and not \ 3782 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3783 # 'dl' not in self.shower_card['extralibs'].split(): 3784 # # 'dl' has to be linked with the extralibs 3785 # self.shower_card['extralibs'] += ' dl' 3786 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3787 # "It is needed for the correct running of PY8.2xx.\n" + \ 3788 # "If this library cannot be found on your system, a crash will occur.") 3789 3790 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3791 stderr=open(mcatnlo_log, 'w'), 3792 cwd=pjoin(self.me_dir, 'MCatNLO'), 3793 close_fds=True) 3794 3795 exe = 'MCATNLO_%s_EXE' % shower 3796 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3797 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3798 print open(mcatnlo_log).read() 3799 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3800 logger.info(' ... done') 3801 3802 # create an empty dir where to run 3803 count = 1 3804 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3805 (shower, count))): 3806 count += 1 3807 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3808 (shower, count)) 3809 os.mkdir(rundir) 3810 files.cp(shower_card_path, rundir) 3811 3812 #look for the event files (don't resplit if one asks for the 3813 # same number of event files as in the previous run) 3814 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3815 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3816 logger.info('Cleaning old files and splitting the event file...') 3817 #clean the old files 3818 files.rm([f for f in event_files if 'events.lhe' not in f]) 3819 if self.shower_card['nsplit_jobs'] > 1: 3820 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3821 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3822 stdin=subprocess.PIPE, 3823 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3824 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3825 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3826 logger.info('Splitting done.') 3827 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3828 3829 event_files.sort() 3830 3831 self.update_status('Showering events...', level='shower') 3832 logger.info('(Running in %s)' % rundir) 3833 if shower != 'PYTHIA8': 3834 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3835 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3836 else: 3837 # special treatment for pythia8 3838 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3839 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3840 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3841 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3842 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3843 else: # this is PY8.2xxx 3844 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3845 #link the hwpp exe in the rundir 3846 if shower == 'HERWIGPP': 3847 try: 3848 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3849 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3850 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3851 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3852 except Exception: 3853 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3854 3855 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3856 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3857 3858 files.ln(evt_file, rundir, 'events.lhe') 3859 for i, f in enumerate(event_files): 3860 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3861 3862 if not self.shower_card['analyse']: 3863 # an hep/hepmc file as output 3864 out_id = 'HEP' 3865 else: 3866 # one or more .top file(s) as output 3867 if "HwU" in self.shower_card['analyse']: 3868 out_id = 'HWU' 3869 else: 3870 out_id = 'TOP' 3871 3872 # write the executable 3873 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3874 # set the PATH for the dynamic libraries 3875 if sys.platform == 'darwin': 3876 ld_library_path = 'DYLD_LIBRARY_PATH' 3877 else: 3878 ld_library_path = 'LD_LIBRARY_PATH' 3879 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3880 % {'ld_library_path': ld_library_path, 3881 'extralibs': ':'.join(extrapaths)}) 3882 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3883 3884 if event_files: 3885 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3886 for i in range(len(event_files))] 3887 else: 3888 arg_list = [[shower, out_id, self.run_name]] 3889 3890 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3891 self.njobs = 1 3892 self.wait_for_complete('shower') 3893 3894 # now collect the results 3895 message = '' 3896 warning = '' 3897 to_gzip = [evt_file] 3898 if out_id == 'HEP': 3899 #copy the showered stdhep/hepmc file back in events 3900 if shower in ['PYTHIA8', 'HERWIGPP']: 3901 hep_format = 'HEPMC' 3902 ext = 'hepmc' 3903 else: 3904 hep_format = 'StdHEP' 3905 ext = 'hep' 3906 3907 hep_file = '%s_%s_0.%s.gz' % \ 3908 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3909 count = 0 3910 3911 # find the first available name for the output: 3912 # check existing results with or without event splitting 3913 while os.path.exists(hep_file) or \ 3914 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3915 count +=1 3916 hep_file = '%s_%s_%d.%s.gz' % \ 3917 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3918 3919 try: 3920 if self.shower_card['nsplit_jobs'] == 1: 3921 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3922 message = ('The file %s has been generated. \nIt contains showered' + \ 3923 ' and hadronized events in the %s format obtained' + \ 3924 ' showering the parton-level event file %s.gz with %s') % \ 3925 (hep_file, hep_format, evt_file, shower) 3926 else: 3927 hep_list = [] 3928 for i in range(self.shower_card['nsplit_jobs']): 3929 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3930 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3931 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3932 ' and hadronized events in the %s format obtained' + \ 3933 ' showering the (split) parton-level event file %s.gz with %s') % \ 3934 ('\n '.join(hep_list), hep_format, evt_file, shower) 3935 3936 except OSError, IOError: 3937 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3938 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3939 3940 # run the plot creation in a secure way 3941 if hep_format == 'StdHEP': 3942 try: 3943 self.do_plot('%s -f' % self.run_name) 3944 except Exception, error: 3945 logger.info("Fail to make the plot. Continue...") 3946 pass 3947 3948 elif out_id == 'TOP' or out_id == 'HWU': 3949 #copy the topdrawer or HwU file(s) back in events 3950 if out_id=='TOP': 3951 ext='top' 3952 elif out_id=='HWU': 3953 ext='HwU' 3954 topfiles = [] 3955 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3956 for top_tar in top_tars: 3957 topfiles.extend(top_tar.getnames()) 3958 3959 # safety check 3960 if len(top_tars) != self.shower_card['nsplit_jobs']: 3961 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3962 (self.shower_card['nsplit_jobs'], len(top_tars))) 3963 3964 # find the first available name for the output: 3965 # check existing results with or without event splitting 3966 filename = 'plot_%s_%d_' % (shower, 1) 3967 count = 1 3968 while os.path.exists(pjoin(self.me_dir, 'Events', 3969 self.run_name, '%s0.%s' % (filename,ext))) or \ 3970 os.path.exists(pjoin(self.me_dir, 'Events', 3971 self.run_name, '%s0__1.%s' % (filename,ext))): 3972 count += 1 3973 filename = 'plot_%s_%d_' % (shower, count) 3974 3975 if out_id=='TOP': 3976 hist_format='TopDrawer format' 3977 elif out_id=='HWU': 3978 hist_format='HwU and GnuPlot formats' 3979 3980 if not topfiles: 3981 # if no topfiles are found just warn the user 3982 warning = 'No .top file has been generated. For the results of your ' +\ 3983 'run, please check inside %s' % rundir 3984 elif self.shower_card['nsplit_jobs'] == 1: 3985 # only one job for the shower 3986 top_tars[0].extractall(path = rundir) 3987 plotfiles = [] 3988 for i, file in enumerate(topfiles): 3989 if out_id=='TOP': 3990 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3991 '%s%d.top' % (filename, i)) 3992 files.mv(pjoin(rundir, file), plotfile) 3993 elif out_id=='HWU': 3994 out=pjoin(self.me_dir,'Events', 3995 self.run_name,'%s%d'% (filename,i)) 3996 histos=[{'dirname':pjoin(rundir,file)}] 3997 self.combine_plots_HwU(histos,out) 3998 try: 3999 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 4000 stdout=os.open(os.devnull, os.O_RDWR),\ 4001 stderr=os.open(os.devnull, os.O_RDWR),\ 4002 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4003 except Exception: 4004 pass 4005 plotfile=pjoin(self.me_dir,'Events',self.run_name, 4006 '%s%d.HwU'% (filename,i)) 4007 plotfiles.append(plotfile) 4008 4009 ffiles = 'files' 4010 have = 'have' 4011 if len(plotfiles) == 1: 4012 ffiles = 'file' 4013 have = 'has' 4014 4015 message = ('The %s %s %s been generated, with histograms in the' + \ 4016 ' %s, obtained by showering the parton-level' + \ 4017 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 4018 hist_format, evt_file, shower) 4019 else: 4020 # many jobs for the shower have been run 4021 topfiles_set = set(topfiles) 4022 plotfiles = [] 4023 for j, top_tar in enumerate(top_tars): 4024 top_tar.extractall(path = rundir) 4025 for i, file in enumerate(topfiles_set): 4026 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 4027 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 4028 files.mv(pjoin(rundir, file), plotfile) 4029 plotfiles.append(plotfile) 4030 4031 # check if the user asked to combine the .top into a single file 4032 if self.shower_card['combine_td']: 4033 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 4034 4035 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 4036 norm = 1. 4037 else: 4038 norm = 1./float(self.shower_card['nsplit_jobs']) 4039 4040 plotfiles2 = [] 4041 for i, file in enumerate(topfiles_set): 4042 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 4043 for j in range(self.shower_card['nsplit_jobs'])] 4044 if out_id=='TOP': 4045 infile="%d\n%s\n%s\n" % \ 4046 (self.shower_card['nsplit_jobs'], 4047 '\n'.join(filelist), 4048 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 4049 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 4050 stdin=subprocess.PIPE, 4051 stdout=os.open(os.devnull, os.O_RDWR), 4052 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4053 p.communicate(input = infile) 4054 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 4055 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 4056 elif out_id=='HWU': 4057 out=pjoin(self.me_dir,'Events', 4058 self.run_name,'%s%d'% (filename,i)) 4059 histos=[] 4060 norms=[] 4061 for plotfile in plotfiles: 4062 histos.append({'dirname':plotfile}) 4063 norms.append(norm) 4064 self.combine_plots_HwU(histos,out,normalisation=norms) 4065 try: 4066 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 4067 stdout=os.open(os.devnull, os.O_RDWR),\ 4068 stderr=os.open(os.devnull, os.O_RDWR),\ 4069 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 4070 except Exception: 4071 pass 4072 4073 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 4074 tar = tarfile.open( 4075 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 4076 for f in filelist: 4077 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 4078 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 4079 4080 tar.close() 4081 4082 ffiles = 'files' 4083 have = 'have' 4084 if len(plotfiles2) == 1: 4085 ffiles = 'file' 4086 have = 'has' 4087 4088 message = ('The %s %s %s been generated, with histograms in the' + \ 4089 ' %s, obtained by showering the parton-level' + \ 4090 ' file %s.gz with %s.\n' + \ 4091 'The files from the different shower ' + \ 4092 'jobs (before combining them) can be found inside %s.') % \ 4093 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 4094 evt_file, shower, 4095 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 4096 4097 else: 4098 message = ('The following files have been generated:\n %s\n' + \ 4099 'They contain histograms in the' + \ 4100 ' %s, obtained by showering the parton-level' + \ 4101 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 4102 hist_format, evt_file, shower) 4103 4104 # Now arxiv the shower card used if RunMaterial is present 4105 run_dir_path = pjoin(rundir, self.run_name) 4106 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 4107 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 4108 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 4109 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 4110 %(shower, count))) 4111 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 4112 cwd=run_dir_path) 4113 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 4114 # end of the run, gzip files and print out the message/warning 4115 for f in to_gzip: 4116 misc.gzip(f) 4117 if message: 4118 logger.info(message) 4119 if warning: 4120 logger.warning(warning) 4121 4122 self.update_status('Run complete', level='shower', update_results=True)
4123 4124 ############################################################################
4125 - def set_run_name(self, name, tag=None, level='parton', reload_card=False,**opts):
4126 """define the run name, the run_tag, the banner and the results.""" 4127 4128 # when are we force to change the tag new_run:previous run requiring changes 4129 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 4130 'shower': ['shower','delphes','madanalysis5_hadron'], 4131 'delphes':['delphes'], 4132 'madanalysis5_hadron':['madanalysis5_hadron'], 4133 'plot':[]} 4134 4135 if name == self.run_name: 4136 if reload_card: 4137 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4138 self.run_card = banner_mod.RunCardNLO(run_card) 4139 4140 #check if we need to change the tag 4141 if tag: 4142 self.run_card['run_tag'] = tag 4143 self.run_tag = tag 4144 self.results.add_run(self.run_name, self.run_card) 4145 else: 4146 for tag in upgrade_tag[level]: 4147 if getattr(self.results[self.run_name][-1], tag): 4148 tag = self.get_available_tag() 4149 self.run_card['run_tag'] = tag 4150 self.run_tag = tag 4151 self.results.add_run(self.run_name, self.run_card) 4152 break 4153 return # Nothing to do anymore 4154 4155 # save/clean previous run 4156 if self.run_name: 4157 self.store_result() 4158 # store new name 4159 self.run_name = name 4160 4161 # Read run_card 4162 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4163 self.run_card = banner_mod.RunCardNLO(run_card) 4164 4165 new_tag = False 4166 # First call for this run -> set the banner 4167 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 4168 if 'mgruncard' in self.banner: 4169 self.run_card = self.banner.charge_card('run_card') 4170 if tag: 4171 self.run_card['run_tag'] = tag 4172 new_tag = True 4173 elif not self.run_name in self.results and level =='parton': 4174 pass # No results yet, so current tag is fine 4175 elif not self.run_name in self.results: 4176 #This is only for case when you want to trick the interface 4177 logger.warning('Trying to run data on unknown run.') 4178 self.results.add_run(name, self.run_card) 4179 self.results.update('add run %s' % name, 'all', makehtml=True) 4180 else: 4181 for tag in upgrade_tag[level]: 4182 4183 if getattr(self.results[self.run_name][-1], tag): 4184 # LEVEL is already define in the last tag -> need to switch tag 4185 tag = self.get_available_tag() 4186 self.run_card['run_tag'] = tag 4187 new_tag = True 4188 break 4189 if not new_tag: 4190 # We can add the results to the current run 4191 tag = self.results[self.run_name][-1]['tag'] 4192 self.run_card['run_tag'] = tag # ensure that run_tag is correct 4193 4194 4195 if name in self.results and not new_tag: 4196 self.results.def_current(self.run_name) 4197 else: 4198 self.results.add_run(self.run_name, self.run_card) 4199 4200 self.run_tag = self.run_card['run_tag'] 4201 4202 # Return the tag of the previous run having the required data for this 4203 # tag/run to working wel. 4204 if level == 'parton': 4205 return 4206 elif level == 'pythia': 4207 return self.results[self.run_name][0]['tag'] 4208 else: 4209 for i in range(-1,-len(self.results[self.run_name])-1,-1): 4210 tagRun = self.results[self.run_name][i] 4211 if tagRun.pythia: 4212 return tagRun['tag']
4213 4214
4215 - def store_result(self):
4216 """ tar the pythia results. This is done when we are quite sure that 4217 the pythia output will not be use anymore """ 4218 4219 if not self.run_name: 4220 return 4221 4222 self.results.save() 4223 4224 if not self.to_store: 4225 return 4226 4227 if 'event' in self.to_store: 4228 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 4229 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 4230 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 4231 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4232 else: 4233 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4234 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 4235 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 4236 4237 4238 tag = self.run_card['run_tag'] 4239 4240 self.to_store = []
4241 4242 4243 ############################################################################
4244 - def get_Gdir(self, Pdir=None):
4245 """get the list of Gdirectory if not yet saved.""" 4246 4247 if hasattr(self, "Gdirs"): 4248 if self.me_dir in self.Gdirs: 4249 if Pdir is None: 4250 return sum(self.Gdirs.values()) 4251 else: 4252 return self.Gdirs[Pdir] 4253 4254 Pdirs = self.get_Pdir() 4255 Gdirs = {self.me_dir:[]} 4256 for P in Pdirs: 4257 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and 4258 os.path.isdir(pjoin(P,G))] 4259 4260 self.Gdirs = Gdirs 4261 return self.getGdir(Pdir)
4262 4263
4264 - def get_init_dict(self, evt_file):
4265 """reads the info in the init block and returns them in a dictionary""" 4266 ev_file = open(evt_file) 4267 init = "" 4268 found = False 4269 while True: 4270 line = ev_file.readline() 4271 if "<init>" in line: 4272 found = True 4273 elif found and not line.startswith('#'): 4274 init += line 4275 if "</init>" in line or "<event>" in line: 4276 break 4277 ev_file.close() 4278 4279 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 4280 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 4281 # these are not included (so far) in the init_dict 4282 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 4283 4284 init_dict = {} 4285 init_dict['idbmup1'] = int(init.split()[0]) 4286 init_dict['idbmup2'] = int(init.split()[1]) 4287 init_dict['ebmup1'] = float(init.split()[2]) 4288 init_dict['ebmup2'] = float(init.split()[3]) 4289 init_dict['pdfgup1'] = int(init.split()[4]) 4290 init_dict['pdfgup2'] = int(init.split()[5]) 4291 init_dict['pdfsup1'] = int(init.split()[6]) 4292 init_dict['pdfsup2'] = int(init.split()[7]) 4293 init_dict['idwtup'] = int(init.split()[8]) 4294 init_dict['nprup'] = int(init.split()[9]) 4295 4296 return init_dict
4297 4298
4299 - def banner_to_mcatnlo(self, evt_file):
4300 """creates the mcatnlo input script using the values set in the header of the event_file. 4301 It also checks if the lhapdf library is used""" 4302 4303 shower = self.banner.get('run_card', 'parton_shower').upper() 4304 pdlabel = self.banner.get('run_card', 'pdlabel') 4305 itry = 0 4306 nevents = self.shower_card['nevents'] 4307 init_dict = self.get_init_dict(evt_file) 4308 4309 if nevents < 0 or \ 4310 nevents > self.banner.get_detail('run_card', 'nevents'): 4311 nevents = self.banner.get_detail('run_card', 'nevents') 4312 4313 nevents = nevents / self.shower_card['nsplit_jobs'] 4314 4315 mcmass_dict = {} 4316 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 4317 pdg = int(line.split()[0]) 4318 mass = float(line.split()[1]) 4319 mcmass_dict[pdg] = mass 4320 4321 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 4322 content += 'NEVENTS=%d\n' % nevents 4323 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 4324 self.shower_card['nsplit_jobs']) 4325 content += 'MCMODE=%s\n' % shower 4326 content += 'PDLABEL=%s\n' % pdlabel 4327 4328 try: 4329 aewm1 = self.banner.get_detail('param_card', 'sminputs', 1).value 4330 raise KeyError 4331 except KeyError: 4332 mod = self.get_model() 4333 if not hasattr(mod, 'parameter_dict'): 4334 from models import model_reader 4335 mod = model_reader.ModelReader(mod) 4336 mod.set_parameters_and_couplings(self.banner.param_card) 4337 aewm1 = 0 4338 for key in ['aEWM1', 'AEWM1', 'aEWm1', 'aewm1']: 4339 if key in mod['parameter_dict']: 4340 aewm1 = mod['parameter_dict'][key] 4341 break 4342 elif 'mdl_%s' % key in mod['parameter_dict']: 4343 aewm1 = mod['parameter_dict']['mod_%s' % key] 4344 break 4345 else: 4346 for key in ['aEW', 'AEW', 'aEw', 'aew']: 4347 if key in mod['parameter_dict']: 4348 aewm1 = 1./mod['parameter_dict'][key] 4349 break 4350 elif 'mdl_%s' % key in mod['parameter_dict']: 4351 aewm1 = 1./mod['parameter_dict']['mod_%s' % key] 4352 break 4353 4354 content += 'ALPHAEW=%s\n' % aewm1 4355 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 4356 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4357 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 4358 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 4359 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 4360 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 4361 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 4362 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 4363 try: 4364 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 4365 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 4366 except KeyError: 4367 content += 'HGGMASS=120.\n' 4368 content += 'HGGWIDTH=0.00575308848\n' 4369 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 4370 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 4371 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 4372 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 4373 content += 'DMASS=%s\n' % mcmass_dict[1] 4374 content += 'UMASS=%s\n' % mcmass_dict[2] 4375 content += 'SMASS=%s\n' % mcmass_dict[3] 4376 content += 'CMASS=%s\n' % mcmass_dict[4] 4377 content += 'BMASS=%s\n' % mcmass_dict[5] 4378 try: 4379 content += 'EMASS=%s\n' % mcmass_dict[11] 4380 content += 'MUMASS=%s\n' % mcmass_dict[13] 4381 content += 'TAUMASS=%s\n' % mcmass_dict[15] 4382 except KeyError: 4383 # this is for backward compatibility 4384 mcmass_lines = [l for l in \ 4385 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 4386 ).read().split('\n') if l] 4387 new_mcmass_dict = {} 4388 for l in mcmass_lines: 4389 key, val = l.split('=') 4390 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 4391 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 4392 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 4393 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 4394 4395 content += 'GMASS=%s\n' % mcmass_dict[21] 4396 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 4397 # check if need to link lhapdf 4398 if int(self.shower_card['pdfcode']) > 1 or \ 4399 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 4400 shower=='HERWIGPP' : 4401 # Use LHAPDF (should be correctly installed, because 4402 # either events were already generated with them, or the 4403 # user explicitly gives an LHAPDF number in the 4404 # shower_card). 4405 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4406 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4407 stdout = subprocess.PIPE).stdout.read().strip() 4408 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4409 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4410 if self.shower_card['pdfcode']==0: 4411 lhaid_list = '' 4412 content += '' 4413 elif self.shower_card['pdfcode']==1: 4414 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4415 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4416 else: 4417 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 4418 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 4419 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4420 elif int(self.shower_card['pdfcode'])==1 or \ 4421 int(self.shower_card['pdfcode'])==-1 and True: 4422 # Try to use LHAPDF because user wants to use the same PDF 4423 # as was used for the event generation. However, for the 4424 # event generation, LHAPDF was not used, so non-trivial to 4425 # see if if LHAPDF is available with the corresponding PDF 4426 # set. If not found, give a warning and use build-in PDF 4427 # set instead. 4428 try: 4429 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4430 stdout = subprocess.PIPE).stdout.read().strip() 4431 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4432 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4433 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4434 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4435 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4436 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4437 except Exception: 4438 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 4439 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 4440 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 4441 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 4442 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 4443 content += 'LHAPDFPATH=\n' 4444 content += 'PDFCODE=0\n' 4445 else: 4446 content += 'LHAPDFPATH=\n' 4447 content += 'PDFCODE=0\n' 4448 4449 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 4450 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 4451 # add the pythia8/hwpp path(s) 4452 if self.options['pythia8_path']: 4453 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 4454 if self.options['hwpp_path']: 4455 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 4456 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 4457 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 4458 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 4459 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 4460 4461 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 4462 output.write(content) 4463 output.close() 4464 return shower
4465 4466
4467 - def run_reweight(self, only):
4468 """runs the reweight_xsec_events executables on each sub-event file generated 4469 to compute on the fly scale and/or PDF uncertainities""" 4470 logger.info(' Doing reweight') 4471 4472 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 4473 # if only doing reweight, copy back the nevents_unweighted file 4474 if only: 4475 if os.path.exists(nev_unw + '.orig'): 4476 files.cp(nev_unw + '.orig', nev_unw) 4477 else: 4478 raise aMCatNLOError('Cannot find event file information') 4479 4480 #read the nevents_unweighted file to get the list of event files 4481 file = open(nev_unw) 4482 lines = file.read().split('\n') 4483 file.close() 4484 # make copy of the original nevent_unweighted file 4485 files.cp(nev_unw, nev_unw + '.orig') 4486 # loop over lines (all but the last one whith is empty) and check that the 4487 # number of events is not 0 4488 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 4489 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 4490 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0: 4491 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts] 4492 #prepare the job_dict 4493 job_dict = {} 4494 exe = 'reweight_xsec_events.local' 4495 for i, evt_file in enumerate(evt_files): 4496 path, evt = os.path.split(evt_file) 4497 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 4498 pjoin(self.me_dir, 'SubProcesses', path)) 4499 job_dict[path] = [exe] 4500 4501 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 4502 4503 #check that the new event files are complete 4504 for evt_file in evt_files: 4505 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 4506 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 4507 stdout = subprocess.PIPE).stdout.read().strip() 4508 if last_line != "</LesHouchesEvents>": 4509 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 4510 '\'reweight_xsec_events.output\' files inside the ' + \ 4511 '\'SubProcesses/P*/G*/ directories for details') 4512 4513 #update file name in nevents_unweighted 4514 newfile = open(nev_unw, 'w') 4515 for line in lines: 4516 if line: 4517 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4518 newfile.close() 4519 4520 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4521
4522 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4523 """This function takes the files with the scale and pdf values 4524 written by the reweight_xsec_events.f code 4525 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4526 scale and PDF uncertainty (the latter is computed using the 4527 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4528 and returns it in percents. The expected format of the file 4529 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4530 xsec_pdf0 xsec_pdf1 ....""" 4531 4532 scales=[] 4533 pdfs=[] 4534 for i,evt_file in enumerate(evt_files): 4535 path, evt=os.path.split(evt_file) 4536 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4537 data_line=f.readline() 4538 if "scale variations:" in data_line: 4539 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4540 data_line = f.readline().split() 4541 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4542 try: 4543 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4544 except IndexError: 4545 scales+=[scales_this] 4546 data_line=f.readline() 4547 if "pdf variations:" in data_line: 4548 for j,pdf in enumerate(self.run_card['lhaid']): 4549 data_line = f.readline().split() 4550 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4551 try: 4552 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4553 except IndexError: 4554 pdfs+=[pdfs_this] 4555 4556 # get the scale uncertainty in percent 4557 scale_info=[] 4558 for j,scale in enumerate(scales): 4559 s_cen=scale[0] 4560 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4561 # max and min of the full envelope 4562 s_max=(max(scale)/s_cen-1)*100 4563 s_min=(1-min(scale)/s_cen)*100 4564 # ren and fac scale dependence added in quadrature 4565 ren_var=[] 4566 fac_var=[] 4567 for i in range(len(self.run_card['rw_rscale'])): 4568 ren_var.append(scale[i]-s_cen) # central fac scale 4569 for i in range(len(self.run_card['rw_fscale'])): 4570 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4571 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4572 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4573 s_size=len(scale) 4574 else: 4575 s_max=0.0 4576 s_min=0.0 4577 s_max_q=0.0 4578 s_min_q=0.0 4579 s_size=len(scale) 4580 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4581 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4582 'label':self.run_card['dynamical_scale_choice'][j], \ 4583 'unc':self.run_card['reweight_scale'][j]}) 4584 4585 # check if we can use LHAPDF to compute the PDF uncertainty 4586 if any(self.run_card['reweight_pdf']): 4587 use_lhapdf=False 4588 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 4589 stdout=subprocess.PIPE).stdout.read().strip() 4590 4591 try: 4592 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 4593 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 4594 except OSError: 4595 candidates=[] 4596 for candidate in candidates: 4597 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 4598 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 4599 try: 4600 import lhapdf 4601 use_lhapdf=True 4602 break 4603 except ImportError: 4604 sys.path.pop(0) 4605 continue 4606 4607 if not use_lhapdf: 4608 try: 4609 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 4610 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 4611 except OSError: 4612 candidates=[] 4613 for candidate in candidates: 4614 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 4615 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 4616 try: 4617 import lhapdf 4618 use_lhapdf=True 4619 break 4620 except ImportError: 4621 sys.path.pop(0) 4622 continue 4623 4624 if not use_lhapdf: 4625 try: 4626 import lhapdf 4627 use_lhapdf=True 4628 except ImportError: 4629 logger.warning("Failed to access python version of LHAPDF: "\ 4630 "cannot compute PDF uncertainty from the "\ 4631 "weights in the events. The weights in the LHE " \ 4632 "event files will still cover all PDF set members, "\ 4633 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4634 "If the python interface to LHAPDF is available on your system, try "\ 4635 "adding its location to the PYTHONPATH environment variable and the"\ 4636 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4637 use_lhapdf=False 4638 4639 # turn off lhapdf printing any messages 4640 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4641 4642 pdf_info=[] 4643 for j,pdfset in enumerate(pdfs): 4644 p_cen=pdfset[0] 4645 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4646 if use_lhapdf: 4647 pdfsetname=self.run_card['lhapdfsetname'][j] 4648 try: 4649 p=lhapdf.getPDFSet(pdfsetname) 4650 ep=p.uncertainty(pdfset,-1) 4651 p_cen=ep.central 4652 p_min=abs(ep.errminus/p_cen)*100 4653 p_max=abs(ep.errplus/p_cen)*100 4654 p_type=p.errorType 4655 p_size=p.size 4656 p_conf=p.errorConfLevel 4657 except: 4658 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4659 p_min=0.0 4660 p_max=0.0 4661 p_type='unknown' 4662 p_conf='unknown' 4663 p_size=len(pdfset) 4664 else: 4665 p_min=0.0 4666 p_max=0.0 4667 p_type='unknown' 4668 p_conf='unknown' 4669 p_size=len(pdfset) 4670 pdfsetname=self.run_card['lhaid'][j] 4671 else: 4672 p_min=0.0 4673 p_max=0.0 4674 p_type='none' 4675 p_conf='unknown' 4676 p_size=len(pdfset) 4677 pdfsetname=self.run_card['lhaid'][j] 4678 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4679 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4680 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4681 4682 scale_pdf_info=[scale_info,pdf_info] 4683 return scale_pdf_info
4684 4685
4686 - def wait_for_complete(self, run_type):
4687 """this function waits for jobs on cluster to complete their run.""" 4688 starttime = time.time() 4689 #logger.info(' Waiting for submitted jobs to complete') 4690 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4691 starttime=starttime, level='parton', update_results=True) 4692 try: 4693 self.cluster.wait(self.me_dir, update_status) 4694 except: 4695 self.cluster.remove() 4696 raise
4697
4698 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4699 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4700 self.ijob = 0 4701 if run_type != 'shower': 4702 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4703 for args in arg_list: 4704 for Pdir, jobs in job_dict.items(): 4705 for job in jobs: 4706 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4707 if self.cluster_mode == 2: 4708 time.sleep(1) # security to allow all jobs to be launched 4709 else: 4710 self.njobs = len(arg_list) 4711 for args in arg_list: 4712 [(cwd, exe)] = job_dict.items() 4713 self.run_exe(exe, args, run_type, cwd) 4714 4715 self.wait_for_complete(run_type)
4716 4717 4718
4719 - def check_event_files(self,jobs):
4720 """check the integrity of the event files after splitting, and resubmit 4721 those which are not nicely terminated""" 4722 jobs_to_resubmit = [] 4723 for job in jobs: 4724 last_line = '' 4725 try: 4726 last_line = subprocess.Popen( 4727 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4728 stdout = subprocess.PIPE).stdout.read().strip() 4729 except IOError: 4730 pass 4731 if last_line != "</LesHouchesEvents>": 4732 jobs_to_resubmit.append(job) 4733 self.njobs = 0 4734 if jobs_to_resubmit: 4735 run_type = 'Resubmitting broken jobs' 4736 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4737 for job in jobs_to_resubmit: 4738 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4739 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4740 4741
4742 - def find_jobs_to_split(self, pdir, job, arg):
4743 """looks into the nevents_unweighed_splitted file to check how many 4744 split jobs are needed for this (pdir, job). arg is F, B or V""" 4745 # find the number of the integration channel 4746 splittings = [] 4747 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4748 pattern = re.compile('for i in (\d+) ; do') 4749 match = re.search(pattern, ajob) 4750 channel = match.groups()[0] 4751 # then open the nevents_unweighted_splitted file and look for the 4752 # number of splittings to be done 4753 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4754 # This skips the channels with zero events, because they are 4755 # not of the form GFXX_YY, but simply GFXX 4756 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4757 pjoin(pdir, 'G%s%s' % (arg,channel))) 4758 matches = re.findall(pattern, nevents_file) 4759 for m in matches: 4760 splittings.append(m) 4761 return splittings
4762 4763
4764 - def run_exe(self, exe, args, run_type, cwd=None):
4765 """this basic function launch locally/on cluster exe with args as argument. 4766 """ 4767 # first test that exe exists: 4768 execpath = None 4769 if cwd and os.path.exists(pjoin(cwd, exe)): 4770 execpath = pjoin(cwd, exe) 4771 elif not cwd and os.path.exists(exe): 4772 execpath = exe 4773 else: 4774 raise aMCatNLOError('Cannot find executable %s in %s' \ 4775 % (exe, os.getcwd())) 4776 # check that the executable has exec permissions 4777 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4778 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4779 # finally run it 4780 if self.cluster_mode == 0: 4781 #this is for the serial run 4782 misc.call(['./'+exe] + args, cwd=cwd) 4783 self.ijob += 1 4784 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4785 min([1, self.njobs - self.ijob]), 4786 self.ijob, run_type), level='parton') 4787 4788 #this is for the cluster/multicore run 4789 elif 'reweight' in exe: 4790 # a reweight run 4791 # Find the correct PDF input file 4792 input_files, output_files = [], [] 4793 pdfinput = self.get_pdf_input_filename() 4794 if os.path.exists(pdfinput): 4795 input_files.append(pdfinput) 4796 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4797 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4798 input_files.append(args[0]) 4799 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4800 output_files.append('reweight_xsec_events.output') 4801 output_files.append('scale_pdf_dependence.dat') 4802 4803 return self.cluster.submit2(exe, args, cwd=cwd, 4804 input_files=input_files, output_files=output_files, 4805 required_output=output_files) 4806 4807 elif 'ajob' in exe: 4808 # the 'standard' amcatnlo job 4809 # check if args is a list of string 4810 if type(args[0]) == str: 4811 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4812 #submitting 4813 self.cluster.submit2(exe, args, cwd=cwd, 4814 input_files=input_files, output_files=output_files, 4815 required_output=required_output) 4816 4817 # # keep track of folders and arguments for splitted evt gen 4818 # subfolder=output_files[-1].split('/')[0] 4819 # if len(args) == 4 and '_' in subfolder: 4820 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4821 4822 elif 'shower' in exe: 4823 # a shower job 4824 # args are [shower, output(HEP or TOP), run_name] 4825 # cwd is the shower rundir, where the executable are found 4826 input_files, output_files = [], [] 4827 shower = args[0] 4828 # the input files 4829 if shower == 'PYTHIA8': 4830 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4831 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4832 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4833 input_files.append(pjoin(cwd, 'config.sh')) 4834 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4835 else: 4836 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4837 else: 4838 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4839 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4840 if shower == 'HERWIGPP': 4841 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4842 input_files.append(pjoin(cwd, 'Herwig++')) 4843 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4844 input_files.append(pjoin(cwd, 'Herwig')) 4845 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4846 if len(args) == 3: 4847 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4848 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4849 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4850 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4851 else: 4852 raise aMCatNLOError, 'Event file not present in %s' % \ 4853 pjoin(self.me_dir, 'Events', self.run_name) 4854 else: 4855 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4856 # the output files 4857 if len(args) == 3: 4858 output_files.append('mcatnlo_run.log') 4859 else: 4860 output_files.append('mcatnlo_run_%s.log' % args[3]) 4861 if args[1] == 'HEP': 4862 if len(args) == 3: 4863 fname = 'events' 4864 else: 4865 fname = 'events_%s' % args[3] 4866 if shower in ['PYTHIA8', 'HERWIGPP']: 4867 output_files.append(fname + '.hepmc.gz') 4868 else: 4869 output_files.append(fname + '.hep.gz') 4870 elif args[1] == 'TOP' or args[1] == 'HWU': 4871 if len(args) == 3: 4872 fname = 'histfile' 4873 else: 4874 fname = 'histfile_%s' % args[3] 4875 output_files.append(fname + '.tar') 4876 else: 4877 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4878 #submitting 4879 self.cluster.submit2(exe, args, cwd=cwd, 4880 input_files=input_files, output_files=output_files) 4881 4882 else: 4883 return self.cluster.submit(exe, args, cwd=cwd)
4884
4885 - def getIO_ajob(self,exe,cwd, args):
4886 # use local disk if possible => need to stands what are the 4887 # input/output files 4888 4889 output_files = [] 4890 required_output = [] 4891 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4892 pjoin(cwd, 'symfact.dat'), 4893 pjoin(cwd, 'iproc.dat'), 4894 pjoin(cwd, 'initial_states_map.dat'), 4895 pjoin(cwd, 'configs_and_props_info.dat'), 4896 pjoin(cwd, 'leshouche_info.dat'), 4897 pjoin(cwd, 'FKS_params.dat')] 4898 4899 # For GoSam interface, we must copy the SLHA card as well 4900 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4901 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4902 4903 if os.path.exists(pjoin(cwd,'nevents.tar')): 4904 input_files.append(pjoin(cwd,'nevents.tar')) 4905 4906 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4907 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4908 4909 # File for the loop (might not be present if MadLoop is not used) 4910 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4911 cluster.need_transfer(self.options): 4912 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4913 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4914 cluster.need_transfer(self.options): 4915 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4916 dereference=True) 4917 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4918 tf.close() 4919 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4920 4921 if args[1] == 'born' or args[1] == 'all': 4922 # MADEVENT MINT FO MODE 4923 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4924 if args[2] == '0': 4925 current = '%s_G%s' % (args[1],args[0]) 4926 else: 4927 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4928 if os.path.exists(pjoin(cwd,current)): 4929 input_files.append(pjoin(cwd, current)) 4930 output_files.append(current) 4931 4932 required_output.append('%s/results.dat' % current) 4933 required_output.append('%s/res_%s.dat' % (current,args[3])) 4934 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4935 required_output.append('%s/mint_grids' % current) 4936 required_output.append('%s/grid.MC_integer' % current) 4937 if args[3] != '0': 4938 required_output.append('%s/scale_pdf_dependence.dat' % current) 4939 4940 elif args[1] == 'F' or args[1] == 'B': 4941 # MINTMC MODE 4942 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4943 4944 if args[2] == '0': 4945 current = 'G%s%s' % (args[1],args[0]) 4946 else: 4947 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4948 if os.path.exists(pjoin(cwd,current)): 4949 input_files.append(pjoin(cwd, current)) 4950 output_files.append(current) 4951 if args[2] > '0': 4952 # this is for the split event generation 4953 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4954 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4955 4956 else: 4957 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4958 if args[3] in ['0','1']: 4959 required_output.append('%s/results.dat' % current) 4960 if args[3] == '1': 4961 output_files.append('%s/results.dat' % current) 4962 4963 else: 4964 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4965 4966 #Find the correct PDF input file 4967 pdfinput = self.get_pdf_input_filename() 4968 if os.path.exists(pdfinput): 4969 input_files.append(pdfinput) 4970 return input_files, output_files, required_output, args
4971 4972
4973 - def compile(self, mode, options):
4974 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4975 specified in mode""" 4976 4977 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4978 4979 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4980 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4981 4982 self.get_characteristics(pjoin(self.me_dir, 4983 'SubProcesses', 'proc_characteristics')) 4984 4985 #define a bunch of log files 4986 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4987 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4988 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4989 test_log = pjoin(self.me_dir, 'test.log') 4990 4991 # environmental variables to be included in make_opts 4992 self.make_opts_var = {} 4993 if self.proc_characteristics['has_loops'] and \ 4994 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4995 self.make_opts_var['madloop'] = 'true' 4996 4997 self.update_status('Compiling the code', level=None, update_results=True) 4998 4999 libdir = pjoin(self.me_dir, 'lib') 5000 sourcedir = pjoin(self.me_dir, 'Source') 5001 5002 #clean files 5003 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 5004 #define which executable/tests to compile 5005 if '+' in mode: 5006 mode = mode.split('+')[0] 5007 if mode in ['NLO', 'LO']: 5008 exe = 'madevent_mintFO' 5009 tests = ['test_ME'] 5010 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 5011 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 5012 exe = 'madevent_mintMC' 5013 tests = ['test_ME', 'test_MC'] 5014 # write an analyse_opts with a dummy analysis so that compilation goes through 5015 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 5016 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 5017 5018 #directory where to compile exe 5019 p_dirs = [d for d in \ 5020 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 5021 # create param_card.inc and run_card.inc 5022 self.do_treatcards('', amcatnlo=True, mode=mode) 5023 # if --nocompile option is specified, check here that all exes exists. 5024 # If they exists, return 5025 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 5026 for p_dir in p_dirs]) and options['nocompile']: 5027 return 5028 5029 # rm links to lhapdflib/ PDFsets if exist 5030 if os.path.exists(pjoin(libdir, 'PDFsets')): 5031 files.rm(pjoin(libdir, 'PDFsets')) 5032 5033 # read the run_card to find if lhapdf is used or not 5034 if self.run_card['pdlabel'] == 'lhapdf' and \ 5035 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 5036 self.banner.get_detail('run_card', 'lpp2') != 0): 5037 5038 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 5039 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 5040 lhaid_list = self.run_card['lhaid'] 5041 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 5042 5043 else: 5044 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 5045 logger.info('Using built-in libraries for PDFs') 5046 5047 self.make_opts_var['lhapdf'] = "" 5048 5049 # read the run_card to find if applgrid is used or not 5050 if self.run_card['iappl'] != 0: 5051 self.make_opts_var['applgrid'] = 'True' 5052 # check versions of applgrid and amcfast 5053 for code in ['applgrid','amcfast']: 5054 try: 5055 p = subprocess.Popen([self.options[code], '--version'], \ 5056 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 5057 except OSError: 5058 raise aMCatNLOError(('No valid %s installation found. \n' + \ 5059 'Please set the path to %s-config by using \n' + \ 5060 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 5061 else: 5062 output, _ = p.communicate() 5063 if code is 'applgrid' and output < '1.4.63': 5064 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 5065 +' You are using %s',output) 5066 if code is 'amcfast' and output < '1.1.1': 5067 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 5068 +' You are using %s',output) 5069 5070 # set-up the Source/make_opts with the correct applgrid-config file 5071 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 5072 % (self.options['amcfast'],self.options['applgrid']) 5073 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 5074 text_out=[] 5075 for line in text: 5076 if line.strip().startswith('APPLLIBS=$'): 5077 line=appllibs 5078 text_out.append(line) 5079 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 5080 fsock.writelines(text_out) 5081 else: 5082 self.make_opts_var['applgrid'] = "" 5083 5084 if 'fastjet' in self.options.keys() and self.options['fastjet']: 5085 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 5086 5087 # add the make_opts_var to make_opts 5088 self.update_make_opts() 5089 5090 # make Source 5091 self.update_status('Compiling source...', level=None) 5092 misc.compile(['clean4pdf'], cwd = sourcedir) 5093 misc.compile(cwd = sourcedir) 5094 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 5095 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 5096 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 5097 and os.path.exists(pjoin(libdir, 'libpdf.a')): 5098 logger.info(' ...done, continuing with P* directories') 5099 else: 5100 raise aMCatNLOError('Compilation failed') 5101 5102 # make StdHep (only necessary with MG option output_dependencies='internal') 5103 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 5104 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 5105 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 5106 if os.path.exists(pjoin(sourcedir,'StdHEP')): 5107 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 5108 try: 5109 misc.compile(['StdHEP'], cwd = sourcedir) 5110 except Exception as error: 5111 logger.debug(str(error)) 5112 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6") 5113 logger.info("details on the compilation error are available if the code is run with --debug flag") 5114 else: 5115 logger.info(' ...done.') 5116 else: 5117 logger.warning('Could not compile StdHEP because its'+\ 5118 ' source directory could not be found in the SOURCE folder.\n'+\ 5119 " Check the MG5_aMC option 'output_dependencies'.\n"+\ 5120 " This will prevent the use of HERWIG6/Pythia6 shower.") 5121 5122 5123 # make CutTools (only necessary with MG option output_dependencies='internal') 5124 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5125 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5126 if os.path.exists(pjoin(sourcedir,'CutTools')): 5127 logger.info('Compiling CutTools (can take a couple of minutes) ...') 5128 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5129 logger.info(' ...done.') 5130 else: 5131 raise aMCatNLOError('Could not compile CutTools because its'+\ 5132 ' source directory could not be found in the SOURCE folder.\n'+\ 5133 " Check the MG5_aMC option 'output_dependencies.'") 5134 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5135 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5136 raise aMCatNLOError('CutTools compilation failed.') 5137 5138 # Verify compatibility between current compiler and the one which was 5139 # used when last compiling CutTools (if specified). 5140 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5141 libdir, 'libcts.a')))),'compiler_version.log') 5142 if os.path.exists(compiler_log_path): 5143 compiler_version_used = open(compiler_log_path,'r').read() 5144 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5145 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5146 if os.path.exists(pjoin(sourcedir,'CutTools')): 5147 logger.info('CutTools was compiled with a different fortran'+\ 5148 ' compiler. Re-compiling it now...') 5149 misc.compile(['cleanCT'], cwd = sourcedir) 5150 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5151 logger.info(' ...done.') 5152 else: 5153 raise aMCatNLOError("CutTools installation in %s"\ 5154 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 5155 " seems to have been compiled with a different compiler than"+\ 5156 " the one specified in MG5_aMC. Please recompile CutTools.") 5157 5158 # make IREGI (only necessary with MG option output_dependencies='internal') 5159 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 5160 and os.path.exists(pjoin(sourcedir,'IREGI')): 5161 logger.info('Compiling IREGI (can take a couple of minutes) ...') 5162 misc.compile(['IREGI'], cwd = sourcedir) 5163 logger.info(' ...done.') 5164 5165 if os.path.exists(pjoin(libdir, 'libiregi.a')): 5166 # Verify compatibility between current compiler and the one which was 5167 # used when last compiling IREGI (if specified). 5168 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5169 libdir, 'libiregi.a')))),'compiler_version.log') 5170 if os.path.exists(compiler_log_path): 5171 compiler_version_used = open(compiler_log_path,'r').read() 5172 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5173 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5174 if os.path.exists(pjoin(sourcedir,'IREGI')): 5175 logger.info('IREGI was compiled with a different fortran'+\ 5176 ' compiler. Re-compiling it now...') 5177 misc.compile(['cleanIR'], cwd = sourcedir) 5178 misc.compile(['IREGI'], cwd = sourcedir) 5179 logger.info(' ...done.') 5180 else: 5181 raise aMCatNLOError("IREGI installation in %s"\ 5182 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 5183 " seems to have been compiled with a different compiler than"+\ 5184 " the one specified in MG5_aMC. Please recompile IREGI.") 5185 5186 # check if MadLoop virtuals have been generated 5187 if self.proc_characteristics['has_loops'] and \ 5188 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5189 if mode in ['NLO', 'aMC@NLO', 'noshower']: 5190 tests.append('check_poles') 5191 5192 # make and run tests (if asked for), gensym and make madevent in each dir 5193 self.update_status('Compiling directories...', level=None) 5194 5195 for test in tests: 5196 self.write_test_input(test) 5197 5198 try: 5199 import multiprocessing 5200 if not self.nb_core: 5201 try: 5202 self.nb_core = int(self.options['nb_core']) 5203 except TypeError: 5204 self.nb_core = multiprocessing.cpu_count() 5205 except ImportError: 5206 self.nb_core = 1 5207 5208 compile_options = copy.copy(self.options) 5209 compile_options['nb_core'] = self.nb_core 5210 compile_cluster = cluster.MultiCore(**compile_options) 5211 logger.info('Compiling on %d cores' % self.nb_core) 5212 5213 update_status = lambda i, r, f: self.donothing(i,r,f) 5214 for p_dir in p_dirs: 5215 compile_cluster.submit(prog = compile_dir, 5216 argument = [self.me_dir, p_dir, mode, options, 5217 tests, exe, self.options['run_mode']]) 5218 try: 5219 compile_cluster.wait(self.me_dir, update_status) 5220 except Exception, error: 5221 logger.warning("Fail to compile the Subprocesses") 5222 if __debug__: 5223 raise 5224 compile_cluster.remove() 5225 self.do_quit('') 5226 5227 logger.info('Checking test output:') 5228 for p_dir in p_dirs: 5229 logger.info(p_dir) 5230 for test in tests: 5231 logger.info(' Result for %s:' % test) 5232 5233 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 5234 #check that none of the tests failed 5235 self.check_tests(test, this_dir)
5236 5237
5238 - def donothing(*args):
5239 pass
5240 5241
5242 - def check_tests(self, test, dir):
5243 """just call the correct parser for the test log. 5244 Skip check_poles for LOonly folders""" 5245 if test in ['test_ME', 'test_MC']: 5246 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 5247 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 5248 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5249 5250
5251 - def parse_test_mx_log(self, log):
5252 """read and parse the test_ME/MC.log file""" 5253 content = open(log).read() 5254 if 'FAILED' in content: 5255 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD') 5256 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 5257 'Please check that widths of final state particles (e.g. top) have been' + \ 5258 ' set to 0 in the param_card.dat.') 5259 else: 5260 lines = [l for l in content.split('\n') if 'PASSED' in l] 5261 logger.info(' Passed.') 5262 logger.debug('\n'+'\n'.join(lines))
5263 5264
5265 - def parse_check_poles_log(self, log):
5266 """reads and parse the check_poles.log file""" 5267 content = open(log).read() 5268 npass = 0 5269 nfail = 0 5270 for line in content.split('\n'): 5271 if 'PASSED' in line: 5272 npass +=1 5273 tolerance = float(line.split()[1]) 5274 if 'FAILED' in line: 5275 nfail +=1 5276 tolerance = float(line.split()[1]) 5277 5278 if nfail + npass == 0: 5279 logger.warning('0 points have been tried') 5280 return 5281 5282 if float(nfail)/float(nfail+npass) > 0.1: 5283 raise aMCatNLOError('Poles do not cancel, run cannot continue') 5284 else: 5285 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 5286 %(npass, nfail+npass, tolerance))
5287 5288
5289 - def write_test_input(self, test):
5290 """write the input files to run test_ME/MC or check_poles""" 5291 if test in ['test_ME', 'test_MC']: 5292 content = "-2 -2\n" #generate randomly energy/angle 5293 content+= "100 100\n" #run 100 points for soft and collinear tests 5294 content+= "0\n" #all FKS configs 5295 content+= '\n'.join(["-1"] * 50) #random diagram (=first diagram) 5296 elif test == 'check_poles': 5297 content = '20 \n -1\n' 5298 5299 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 5300 if test == 'test_MC': 5301 shower = self.run_card['parton_shower'] 5302 header = "1 \n %s\n 1 -0.1\n-1 -0.1\n" % shower 5303 file.write(header + content) 5304 elif test == 'test_ME': 5305 header = "2 \n" 5306 file.write(header + content) 5307 else: 5308 file.write(content) 5309 file.close()
5310 5311 5312 action_switcher = AskRunNLO 5313 ############################################################################
5314 - def ask_run_configuration(self, mode, options, switch={}):
5315 """Ask the question when launching generate_events/multi_run""" 5316 5317 if 'parton' not in options: 5318 options['parton'] = False 5319 if 'reweightonly' not in options: 5320 options['reweightonly'] = False 5321 5322 if mode == 'auto': 5323 mode = None 5324 if not mode and (options['parton'] or options['reweightonly']): 5325 mode = 'noshower' 5326 5327 passing_cmd = [] 5328 for key,value in switch.keys(): 5329 passing_cmd.append('%s=%s' % (key,value)) 5330 5331 if 'do_reweight' in options and options['do_reweight']: 5332 passing_cmd.append('reweight=ON') 5333 if 'do_madspin' in options and options['do_madspin']: 5334 passing_cmd.append('madspin=ON') 5335 5336 force = self.force 5337 if mode == 'onlyshower': 5338 passing_cmd.append('onlyshower') 5339 force = True 5340 elif mode: 5341 passing_cmd.append(mode) 5342 5343 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher, 5344 mode=mode, force=force, 5345 first_cmd=passing_cmd, 5346 return_instance=True) 5347 5348 if 'mode' in switch: 5349 mode = switch['mode'] 5350 5351 #assign the mode depending of the switch 5352 if not mode or mode == 'auto': 5353 if switch['order'] == 'LO': 5354 if switch['runshower']: 5355 mode = 'aMC@LO' 5356 elif switch['fixed_order'] == 'ON': 5357 mode = 'LO' 5358 else: 5359 mode = 'noshowerLO' 5360 elif switch['order'] == 'NLO': 5361 if switch['runshower']: 5362 mode = 'aMC@NLO' 5363 elif switch['fixed_order'] == 'ON': 5364 mode = 'NLO' 5365 else: 5366 mode = 'noshower' 5367 logger.info('will run in mode: %s' % mode) 5368 5369 if mode == 'noshower': 5370 if switch['shower'] == 'OFF': 5371 logger.warning("""You have chosen not to run a parton shower. 5372 NLO events without showering are NOT physical. 5373 Please, shower the LesHouches events before using them for physics analyses. 5374 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""") 5375 else: 5376 logger.info("""Your Parton-shower choice is not available for running. 5377 The events will be generated for the associated Parton-Shower. 5378 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD') 5379 5380 5381 # specify the cards which are needed for this run. 5382 cards = ['param_card.dat', 'run_card.dat'] 5383 ignore = [] 5384 if mode in ['LO', 'NLO']: 5385 options['parton'] = True 5386 ignore = ['shower_card.dat', 'madspin_card.dat'] 5387 cards.append('FO_analyse_card.dat') 5388 else: 5389 if switch['madspin'] != 'OFF': 5390 cards.append('madspin_card.dat') 5391 if switch['reweight'] != 'OFF': 5392 cards.append('reweight_card.dat') 5393 if switch['madanalysis'] in ['HADRON', 'ON']: 5394 cards.append('madanalysis5_hadron_card.dat') 5395 if 'aMC@' in mode: 5396 cards.append('shower_card.dat') 5397 if mode == 'onlyshower': 5398 cards = ['shower_card.dat'] 5399 if options['reweightonly']: 5400 cards = ['run_card.dat'] 5401 5402 self.keep_cards(cards, ignore) 5403 5404 if mode =='onlyshower': 5405 cards = ['shower_card.dat'] 5406 5407 5408 # automatically switch to keep_wgt option 5409 first_cmd = cmd_switch.get_cardcmd() 5410 5411 if not options['force'] and not self.force: 5412 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5413 5414 self.banner = banner_mod.Banner() 5415 5416 # store the cards in the banner 5417 for card in cards: 5418 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5419 # and the run settings 5420 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5421 self.banner.add_text('run_settings', run_settings) 5422 5423 if not mode =='onlyshower': 5424 self.run_card = self.banner.charge_card('run_card') 5425 self.run_tag = self.run_card['run_tag'] 5426 #this is if the user did not provide a name for the current run 5427 if not hasattr(self, 'run_name') or not self.run_name: 5428 self.run_name = self.find_available_run_name(self.me_dir) 5429 #add a tag in the run_name for distinguish run_type 5430 if self.run_name.startswith('run_'): 5431 if mode in ['LO','aMC@LO','noshowerLO']: 5432 self.run_name += '_LO' 5433 self.set_run_name(self.run_name, self.run_tag, 'parton') 5434 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5435 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5436 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 5437 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5438 samples of various multiplicities without double counting, you 5439 have to remove some events after showering 'by hand'. Please 5440 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5441 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5442 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5443 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8' and self.run_card['parton_shower'].upper() != 'HERWIGPP': 5444 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5445 "Type \'n\' to stop or \'y\' to continue" 5446 answers = ['n','y'] 5447 answer = self.ask(question, 'n', answers) 5448 if answer == 'n': 5449 error = '''Stop opertation''' 5450 self.ask_run_configuration(mode, options) 5451 # raise aMCatNLOError(error) 5452 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5453 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5454 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5455 if 'aMC@' in mode or mode == 'onlyshower': 5456 self.shower_card = self.banner.charge_card('shower_card') 5457 5458 elif mode in ['LO', 'NLO']: 5459 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5460 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5461 5462 return mode
5463
5464 5465 #=============================================================================== 5466 # aMCatNLOCmd 5467 #=============================================================================== 5468 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5469 """The command line processor of MadGraph"""
5470 5471 _compile_usage = "compile [MODE] [options]\n" + \ 5472 "-- compiles aMC@NLO \n" + \ 5473 " MODE can be either FO, for fixed-order computations, \n" + \ 5474 " or MC for matching with parton-shower monte-carlos. \n" + \ 5475 " (if omitted, it is set to MC)\n" 5476 _compile_parser = misc.OptionParser(usage=_compile_usage) 5477 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5478 help="Use the card present in the directory for the launch, without editing them") 5479 5480 _launch_usage = "launch [MODE] [options]\n" + \ 5481 "-- execute aMC@NLO \n" + \ 5482 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5483 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5484 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5485 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5486 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5487 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5488 " in the run_card.dat\n" 5489 5490 _launch_parser = misc.OptionParser(usage=_launch_usage) 5491 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5492 help="Use the card present in the directory for the launch, without editing them") 5493 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5494 help="Submit the jobs on the cluster") 5495 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5496 help="Submit the jobs on multicore mode") 5497 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5498 help="Skip compilation. Ignored if no executable is found") 5499 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5500 help="Skip integration and event generation, just run reweight on the" + \ 5501 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5502 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5503 help="Stop the run after the parton level file generation (you need " + \ 5504 "to shower the file in order to get physical results)") 5505 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5506 help="Skip grid set up, just generate events starting from " + \ 5507 "the last available results") 5508 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5509 help="Provide a name to the run") 5510 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5511 help="For use with APPLgrid only: start from existing grids") 5512 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5513 help="Run the reweight module (reweighting by different model parameters)") 5514 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5515 help="Run the madspin package") 5516 5517 5518 5519 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5520 "-- execute aMC@NLO \n" + \ 5521 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5522 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5523 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5524 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5525 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5526 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5527 " in the run_card.dat\n" 5528 5529 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5530 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5531 help="Use the card present in the directory for the generate_events, without editing them") 5532 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5533 help="Submit the jobs on the cluster") 5534 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5535 help="Submit the jobs on multicore mode") 5536 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5537 help="Skip compilation. Ignored if no executable is found") 5538 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5539 help="Skip integration and event generation, just run reweight on the" + \ 5540 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5541 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5542 help="Stop the run after the parton level file generation (you need " + \ 5543 "to shower the file in order to get physical results)") 5544 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5545 help="Skip grid set up, just generate events starting from " + \ 5546 "the last available results") 5547 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5548 help="Provide a name to the run") 5549 5550 5551 5552 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5553 "-- calculate cross section up to ORDER.\n" + \ 5554 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5555 5556 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5557 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5558 help="Use the card present in the directory for the launch, without editing them") 5559 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5560 help="Submit the jobs on the cluster") 5561 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5562 help="Submit the jobs on multicore mode") 5563 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5564 help="Skip compilation. Ignored if no executable is found") 5565 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5566 help="Provide a name to the run") 5567 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5568 help="For use with APPLgrid only: start from existing grids") 5569 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5570 help="Skip grid set up, just generate events starting from " + \ 5571 "the last available results") 5572 5573 _shower_usage = 'shower run_name [options]\n' + \ 5574 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5575 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5576 ' are directly read from the header of the event file\n' 5577 _shower_parser = misc.OptionParser(usage=_shower_usage) 5578 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5579 help="Use the shower_card present in the directory for the launch, without editing") 5580 5581 if '__main__' == __name__: 5582 # Launch the interface without any check if one code is already running. 5583 # This can ONLY run a single command !! 5584 import sys 5585 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 5586 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 5587 'Please upgrate your version of python.') 5588 5589 import os 5590 import optparse 5591 # Get the directory of the script real path (bin) 5592 # and add it to the current PYTHONPATH 5593 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5594 sys.path.insert(0, root_path)
5595 5596 - class MyOptParser(optparse.OptionParser):
5597 - class InvalidOption(Exception): pass
5598 - def error(self, msg=''):
5599 raise MyOptParser.InvalidOption(msg)
5600 # Write out nice usage message if called with -h or --help 5601 usage = "usage: %prog [options] [FILE] " 5602 parser = MyOptParser(usage=usage) 5603 parser.add_option("-l", "--logging", default='INFO', 5604 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5605 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5606 help='force toce to be in secure mode') 5607 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5608 help='force to launch debug mode') 5609 parser_error = '' 5610 done = False 5611 5612 for i in range(len(sys.argv)-1): 5613 try: 5614 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5615 done = True 5616 except MyOptParser.InvalidOption, error: 5617 pass 5618 else: 5619 args += sys.argv[len(sys.argv)-i:] 5620 if not done: 5621 # raise correct error: 5622 try: 5623 (options, args) = parser.parse_args() 5624 except MyOptParser.InvalidOption, error: 5625 print error 5626 sys.exit(2) 5627 5628 if len(args) == 0: 5629 args = '' 5630 5631 import subprocess 5632 import logging 5633 import logging.config 5634 # Set logging level according to the logging level given by options 5635 #logging.basicConfig(level=vars(logging)[options.logging]) 5636 import internal.coloring_logging 5637 try: 5638 if __debug__ and options.logging == 'INFO': 5639 options.logging = 'DEBUG' 5640 if options.logging.isdigit(): 5641 level = int(options.logging) 5642 else: 5643 level = eval('logging.' + options.logging) 5644 print os.path.join(root_path, 'internal', 'me5_logging.conf') 5645 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5646 logging.root.setLevel(level) 5647 logging.getLogger('madgraph').setLevel(level) 5648 except: 5649 raise 5650 pass 5651 5652 # Call the cmd interface main loop 5653 try: 5654 if args: 5655 # a single command is provided 5656 if '--web' in args: 5657 i = args.index('--web') 5658 args.pop(i) 5659 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5660 else: 5661 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5662 5663 if not hasattr(cmd_line, 'do_%s' % args[0]): 5664 if parser_error: 5665 print parser_error 5666 print 'and %s can not be interpreted as a valid command.' % args[0] 5667 else: 5668 print 'ERROR: %s not a valid command. Please retry' % args[0] 5669 else: 5670 cmd_line.use_rawinput = False 5671 cmd_line.run_cmd(' '.join(args)) 5672 cmd_line.run_cmd('quit') 5673 5674 except KeyboardInterrupt: 5675 print 'quit on KeyboardInterrupt' 5676 pass 5677