Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41   
  42  try: 
  43      import readline 
  44      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  45  except: 
  46      GNU_SPLITTING = True 
  47   
  48  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  49  root_path = os.path.split(root_path)[0] 
  50  sys.path.insert(0, os.path.join(root_path,'bin')) 
  51   
  52  # usefull shortcut 
  53  pjoin = os.path.join 
  54  # Special logger for the Cmd Interface 
  55  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  56  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  57    
  58  try: 
  59      import madgraph 
  60  except ImportError:  
  61      aMCatNLO = True  
  62      import internal.extended_cmd as cmd 
  63      import internal.common_run_interface as common_run 
  64      import internal.banner as banner_mod 
  65      import internal.misc as misc     
  66      from internal import InvalidCmd, MadGraph5Error 
  67      import internal.files as files 
  68      import internal.cluster as cluster 
  69      import internal.save_load_object as save_load_object 
  70      import internal.gen_crossxhtml as gen_crossxhtml 
  71      import internal.sum_html as sum_html 
  72      import internal.shower_card as shower_card 
  73      import internal.FO_analyse_card as analyse_card  
  74      import internal.histograms as histograms 
  75  else: 
  76      # import from madgraph directory 
  77      aMCatNLO = False 
  78      import madgraph.interface.extended_cmd as cmd 
  79      import madgraph.interface.common_run_interface as common_run 
  80      import madgraph.iolibs.files as files 
  81      import madgraph.iolibs.save_load_object as save_load_object 
  82      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  83      import madgraph.madevent.sum_html as sum_html 
  84      import madgraph.various.banner as banner_mod 
  85      import madgraph.various.cluster as cluster 
  86      import madgraph.various.misc as misc 
  87      import madgraph.various.shower_card as shower_card 
  88      import madgraph.various.FO_analyse_card as analyse_card 
  89      import madgraph.various.histograms as histograms 
  90      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
  91   
92 -class aMCatNLOError(Exception):
93 pass
94 95
96 -def compile_dir(*arguments):
97 """compile the direcory p_dir 98 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 99 this function needs not to be a class method in order to do 100 the compilation on multicore""" 101 102 if len(arguments) == 1: 103 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 104 elif len(arguments)==7: 105 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 106 else: 107 raise aMCatNLOError, 'not correct number of argument' 108 logger.info(' Compiling %s...' % p_dir) 109 110 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 111 112 try: 113 #compile everything 114 # compile and run tests 115 for test in tests: 116 # skip check_poles for LOonly dirs 117 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 118 continue 119 misc.compile([test], cwd = this_dir, job_specs = False) 120 input = pjoin(me_dir, '%s_input.txt' % test) 121 #this can be improved/better written to handle the output 122 misc.call(['./%s' % (test)], cwd=this_dir, 123 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 124 close_fds=True) 125 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 126 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 127 dereference=True) 128 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 129 tf.close() 130 131 if not options['reweightonly']: 132 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 133 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode) 134 misc.call(['./gensym'],cwd= this_dir, 135 stdin=open(pjoin(this_dir, 'gensym_input.txt')), 136 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 137 close_fds=True) 138 #compile madevent_mintMC/mintFO 139 misc.compile([exe], cwd=this_dir, job_specs = False) 140 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 141 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 142 143 logger.info(' %s done.' % p_dir) 144 return 0 145 except MadGraph5Error, msg: 146 return msg
147 148
149 -def check_compiler(options, block=False):
150 """check that the current fortran compiler is gfortran 4.6 or later. 151 If block, stops the execution, otherwise just print a warning""" 152 153 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 154 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 155 'Note that You can still run all MadEvent run without any problem!' 156 #first check that gfortran is installed 157 if options['fortran_compiler']: 158 compiler = options['fortran_compiler'] 159 elif misc.which('gfortran'): 160 compiler = 'gfortran' 161 else: 162 compiler = '' 163 164 if 'gfortran' not in compiler: 165 if block: 166 raise aMCatNLOError(msg % compiler) 167 else: 168 logger.warning(msg % compiler) 169 else: 170 curr_version = misc.get_gfortran_version(compiler) 171 if not ''.join(curr_version.split('.')) >= '46': 172 if block: 173 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 174 else: 175 logger.warning(msg % (compiler + ' ' + curr_version))
176 177 178 179 #=============================================================================== 180 # CmdExtended 181 #===============================================================================
182 -class CmdExtended(common_run.CommonRunCmd):
183 """Particularisation of the cmd command for aMCatNLO""" 184 185 #suggested list of command 186 next_possibility = { 187 'start': [], 188 } 189 190 debug_output = 'ME5_debug' 191 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 192 error_debug += 'More information is found in \'%(debug)s\'.\n' 193 error_debug += 'Please attach this file to your report.' 194 195 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 196 197 198 keyboard_stop_msg = """stopping all operation 199 in order to quit MadGraph5_aMC@NLO please enter exit""" 200 201 # Define the Error 202 InvalidCmd = InvalidCmd 203 ConfigurationError = aMCatNLOError 204
205 - def __init__(self, me_dir, options, *arg, **opt):
206 """Init history and line continuation""" 207 208 # Tag allowing/forbiding question 209 self.force = False 210 211 # If possible, build an info line with current version number 212 # and date, from the VERSION text file 213 info = misc.get_pkg_info() 214 info_line = "" 215 if info and info.has_key('version') and info.has_key('date'): 216 len_version = len(info['version']) 217 len_date = len(info['date']) 218 if len_version + len_date < 30: 219 info_line = "#* VERSION %s %s %s *\n" % \ 220 (info['version'], 221 (30 - len_version - len_date) * ' ', 222 info['date']) 223 else: 224 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 225 info_line = "#* VERSION %s %s *\n" % \ 226 (version, (24 - len(version)) * ' ') 227 228 # Create a header for the history file. 229 # Remember to fill in time at writeout time! 230 self.history_header = \ 231 '#************************************************************\n' + \ 232 '#* MadGraph5_aMC@NLO *\n' + \ 233 '#* *\n' + \ 234 "#* * * *\n" + \ 235 "#* * * * * *\n" + \ 236 "#* * * * * 5 * * * * *\n" + \ 237 "#* * * * * *\n" + \ 238 "#* * * *\n" + \ 239 "#* *\n" + \ 240 "#* *\n" + \ 241 info_line + \ 242 "#* *\n" + \ 243 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 244 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 245 "#* and *\n" + \ 246 "#* http://amcatnlo.cern.ch *\n" + \ 247 '#* *\n' + \ 248 '#************************************************************\n' + \ 249 '#* *\n' + \ 250 '#* Command File for aMCatNLO *\n' + \ 251 '#* *\n' + \ 252 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 253 '#* *\n' + \ 254 '#************************************************************\n' 255 256 if info_line: 257 info_line = info_line[1:] 258 259 logger.info(\ 260 "************************************************************\n" + \ 261 "* *\n" + \ 262 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 263 "* a M C @ N L O *\n" + \ 264 "* *\n" + \ 265 "* * * *\n" + \ 266 "* * * * * *\n" + \ 267 "* * * * * 5 * * * * *\n" + \ 268 "* * * * * *\n" + \ 269 "* * * *\n" + \ 270 "* *\n" + \ 271 info_line + \ 272 "* *\n" + \ 273 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 274 "* http://amcatnlo.cern.ch *\n" + \ 275 "* *\n" + \ 276 "* Type 'help' for in-line help. *\n" + \ 277 "* *\n" + \ 278 "************************************************************") 279 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
280 281
282 - def get_history_header(self):
283 """return the history header""" 284 return self.history_header % misc.get_time_info()
285
286 - def stop_on_keyboard_stop(self):
287 """action to perform to close nicely on a keyboard interupt""" 288 try: 289 if hasattr(self, 'cluster'): 290 logger.info('rm jobs on queue') 291 self.cluster.remove() 292 if hasattr(self, 'results'): 293 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 294 self.add_error_log_in_html(KeyboardInterrupt) 295 except: 296 pass
297
298 - def postcmd(self, stop, line):
299 """ Update the status of the run for finishing interactive command """ 300 301 # relaxing the tag forbidding question 302 self.force = False 303 304 if not self.use_rawinput: 305 return stop 306 307 308 arg = line.split() 309 if len(arg) == 0: 310 return stop 311 elif str(arg[0]) in ['exit','quit','EOF']: 312 return stop 313 314 try: 315 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 316 level=None, error=True) 317 except Exception: 318 misc.sprint('self.update_status fails', log=logger) 319 pass
320
321 - def nice_user_error(self, error, line):
322 """If a ME run is currently running add a link in the html output""" 323 324 self.add_error_log_in_html() 325 cmd.Cmd.nice_user_error(self, error, line)
326
327 - def nice_config_error(self, error, line):
328 """If a ME run is currently running add a link in the html output""" 329 330 self.add_error_log_in_html() 331 cmd.Cmd.nice_config_error(self, error, line)
332
333 - def nice_error_handling(self, error, line):
334 """If a ME run is currently running add a link in the html output""" 335 336 self.add_error_log_in_html() 337 cmd.Cmd.nice_error_handling(self, error, line)
338 339 340 341 #=============================================================================== 342 # HelpToCmd 343 #===============================================================================
344 -class HelpToCmd(object):
345 """ The Series of help routine for the aMCatNLOCmd""" 346
347 - def help_launch(self):
348 """help for launch command""" 349 _launch_parser.print_help()
350
351 - def help_banner_run(self):
352 logger.info("syntax: banner_run Path|RUN [--run_options]") 353 logger.info("-- Reproduce a run following a given banner") 354 logger.info(" One of the following argument is require:") 355 logger.info(" Path should be the path of a valid banner.") 356 logger.info(" RUN should be the name of a run of the current directory") 357 self.run_options_help([('-f','answer all question by default'), 358 ('--name=X', 'Define the name associated with the new run')])
359 360
361 - def help_compile(self):
362 """help for compile command""" 363 _compile_parser.print_help()
364
365 - def help_generate_events(self):
366 """help for generate_events commandi 367 just call help_launch""" 368 _generate_events_parser.print_help()
369 370
371 - def help_calculate_xsect(self):
372 """help for generate_events command""" 373 _calculate_xsect_parser.print_help()
374
375 - def help_shower(self):
376 """help for shower command""" 377 _shower_parser.print_help()
378 379
380 - def help_open(self):
381 logger.info("syntax: open FILE ") 382 logger.info("-- open a file with the appropriate editor.") 383 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 384 logger.info(' the path to the last created/used directory is used')
385
386 - def run_options_help(self, data):
387 if data: 388 logger.info('-- local options:') 389 for name, info in data: 390 logger.info(' %s : %s' % (name, info)) 391 392 logger.info("-- session options:") 393 logger.info(" Note that those options will be kept for the current session") 394 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 395 logger.info(" --multicore : Run in multi-core configuration") 396 logger.info(" --nb_core=X : limit the number of core to use to X.")
397 398 399 400 401 #=============================================================================== 402 # CheckValidForCmd 403 #===============================================================================
404 -class CheckValidForCmd(object):
405 """ The Series of check routine for the aMCatNLOCmd""" 406
407 - def check_shower(self, args, options):
408 """Check the validity of the line. args[0] is the run_directory""" 409 410 if options['force']: 411 self.force = True 412 413 if len(args) == 0: 414 self.help_shower() 415 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 416 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 417 raise self.InvalidCmd, 'Directory %s does not exists' % \ 418 pjoin(os.getcwd(), 'Events', args[0]) 419 420 self.set_run_name(args[0], level= 'shower') 421 args[0] = pjoin(self.me_dir, 'Events', args[0])
422
423 - def check_plot(self, args):
424 """Check the argument for the plot command 425 plot run_name modes""" 426 427 428 madir = self.options['madanalysis_path'] 429 td = self.options['td_path'] 430 431 if not madir or not td: 432 logger.info('Retry to read configuration file to find madanalysis/td') 433 self.set_configuration() 434 435 madir = self.options['madanalysis_path'] 436 td = self.options['td_path'] 437 438 if not madir: 439 error_msg = 'No Madanalysis path correctly set.' 440 error_msg += 'Please use the set command to define the path and retry.' 441 error_msg += 'You can also define it in the configuration file.' 442 raise self.InvalidCmd(error_msg) 443 if not td: 444 error_msg = 'No path to td directory correctly set.' 445 error_msg += 'Please use the set command to define the path and retry.' 446 error_msg += 'You can also define it in the configuration file.' 447 raise self.InvalidCmd(error_msg) 448 449 if len(args) == 0: 450 if not hasattr(self, 'run_name') or not self.run_name: 451 self.help_plot() 452 raise self.InvalidCmd('No run name currently define. Please add this information.') 453 args.append('all') 454 return 455 456 457 if args[0] not in self._plot_mode: 458 self.set_run_name(args[0], level='plot') 459 del args[0] 460 if len(args) == 0: 461 args.append('all') 462 elif not self.run_name: 463 self.help_plot() 464 raise self.InvalidCmd('No run name currently define. Please add this information.') 465 466 for arg in args: 467 if arg not in self._plot_mode and arg != self.run_name: 468 self.help_plot() 469 raise self.InvalidCmd('unknown options %s' % arg)
470
471 - def check_pgs(self, arg):
472 """Check the argument for pythia command 473 syntax: pgs [NAME] 474 Note that other option are already remove at this point 475 """ 476 477 # If not pythia-pgs path 478 if not self.options['pythia-pgs_path']: 479 logger.info('Retry to read configuration file to find pythia-pgs path') 480 self.set_configuration() 481 482 if not self.options['pythia-pgs_path'] or not \ 483 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 484 error_msg = 'No pythia-pgs path correctly set.' 485 error_msg += 'Please use the set command to define the path and retry.' 486 error_msg += 'You can also define it in the configuration file.' 487 raise self.InvalidCmd(error_msg) 488 489 tag = [a for a in arg if a.startswith('--tag=')] 490 if tag: 491 arg.remove(tag[0]) 492 tag = tag[0][6:] 493 494 495 if len(arg) == 0 and not self.run_name: 496 if self.results.lastrun: 497 arg.insert(0, self.results.lastrun) 498 else: 499 raise self.InvalidCmd('No run name currently define. Please add this information.') 500 501 if len(arg) == 1 and self.run_name == arg[0]: 502 arg.pop(0) 503 504 if not len(arg) and \ 505 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 506 self.help_pgs() 507 raise self.InvalidCmd('''No file file pythia_events.hep currently available 508 Please specify a valid run_name''') 509 510 lock = None 511 if len(arg) == 1: 512 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 513 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 514 515 if not filenames: 516 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 517 else: 518 input_file = filenames[0] 519 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 520 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 521 argument=['-c', input_file], 522 close_fds=True) 523 else: 524 if tag: 525 self.run_card['run_tag'] = tag 526 self.set_run_name(self.run_name, tag, 'pgs') 527 528 return lock
529 530
531 - def check_delphes(self, arg):
532 """Check the argument for pythia command 533 syntax: delphes [NAME] 534 Note that other option are already remove at this point 535 """ 536 537 # If not pythia-pgs path 538 if not self.options['delphes_path']: 539 logger.info('Retry to read configuration file to find delphes path') 540 self.set_configuration() 541 542 if not self.options['delphes_path']: 543 error_msg = 'No delphes path correctly set.' 544 error_msg += 'Please use the set command to define the path and retry.' 545 error_msg += 'You can also define it in the configuration file.' 546 raise self.InvalidCmd(error_msg) 547 548 tag = [a for a in arg if a.startswith('--tag=')] 549 if tag: 550 arg.remove(tag[0]) 551 tag = tag[0][6:] 552 553 554 if len(arg) == 0 and not self.run_name: 555 if self.results.lastrun: 556 arg.insert(0, self.results.lastrun) 557 else: 558 raise self.InvalidCmd('No run name currently define. Please add this information.') 559 560 if len(arg) == 1 and self.run_name == arg[0]: 561 arg.pop(0) 562 563 if not len(arg) and \ 564 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 565 self.help_pgs() 566 raise self.InvalidCmd('''No file file pythia_events.hep currently available 567 Please specify a valid run_name''') 568 569 if len(arg) == 1: 570 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 571 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 572 573 574 if not filenames: 575 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 576 % (self.run_name, prev_tag, 577 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 578 else: 579 input_file = filenames[0] 580 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 581 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 582 argument=['-c', input_file], 583 close_fds=True) 584 else: 585 if tag: 586 self.run_card['run_tag'] = tag 587 self.set_run_name(self.run_name, tag, 'delphes')
588
589 - def check_calculate_xsect(self, args, options):
590 """check the validity of the line. args is ORDER, 591 ORDER being LO or NLO. If no mode is passed, NLO is used""" 592 # modify args in order to be DIR 593 # mode being either standalone or madevent 594 595 if options['force']: 596 self.force = True 597 598 if not args: 599 args.append('NLO') 600 return 601 602 if len(args) > 1: 603 self.help_calculate_xsect() 604 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 605 606 elif len(args) == 1: 607 if not args[0] in ['NLO', 'LO']: 608 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 609 mode = args[0] 610 611 # check for incompatible options/modes 612 if options['multicore'] and options['cluster']: 613 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 614 ' are not compatible. Please choose one.'
615 616
617 - def check_generate_events(self, args, options):
618 """check the validity of the line. args is ORDER, 619 ORDER being LO or NLO. If no mode is passed, NLO is used""" 620 # modify args in order to be DIR 621 # mode being either standalone or madevent 622 623 if not args: 624 args.append('NLO') 625 return 626 627 if len(args) > 1: 628 self.help_generate_events() 629 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 630 631 elif len(args) == 1: 632 if not args[0] in ['NLO', 'LO']: 633 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 634 mode = args[0] 635 636 # check for incompatible options/modes 637 if options['multicore'] and options['cluster']: 638 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 639 ' are not compatible. Please choose one.'
640
641 - def check_banner_run(self, args):
642 """check the validity of line""" 643 644 if len(args) == 0: 645 self.help_banner_run() 646 raise self.InvalidCmd('banner_run requires at least one argument.') 647 648 tag = [a[6:] for a in args if a.startswith('--tag=')] 649 650 651 if os.path.exists(args[0]): 652 type ='banner' 653 format = self.detect_card_type(args[0]) 654 if format != 'banner': 655 raise self.InvalidCmd('The file is not a valid banner.') 656 elif tag: 657 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 658 (args[0], tag)) 659 if not os.path.exists(args[0]): 660 raise self.InvalidCmd('No banner associates to this name and tag.') 661 else: 662 name = args[0] 663 type = 'run' 664 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 665 if not banners: 666 raise self.InvalidCmd('No banner associates to this name.') 667 elif len(banners) == 1: 668 args[0] = banners[0] 669 else: 670 #list the tag and propose those to the user 671 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 672 tag = self.ask('which tag do you want to use?', tags[0], tags) 673 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 674 (args[0], tag)) 675 676 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 677 if run_name: 678 try: 679 self.exec_cmd('remove %s all banner -f' % run_name) 680 except Exception: 681 pass 682 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 683 elif type == 'banner': 684 self.set_run_name(self.find_available_run_name(self.me_dir)) 685 elif type == 'run': 686 if not self.results[name].is_empty(): 687 run_name = self.find_available_run_name(self.me_dir) 688 logger.info('Run %s is not empty so will use run_name: %s' % \ 689 (name, run_name)) 690 self.set_run_name(run_name) 691 else: 692 try: 693 self.exec_cmd('remove %s all banner -f' % run_name) 694 except Exception: 695 pass 696 self.set_run_name(name)
697 698 699
700 - def check_launch(self, args, options):
701 """check the validity of the line. args is MODE 702 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 703 # modify args in order to be DIR 704 # mode being either standalone or madevent 705 706 if options['force']: 707 self.force = True 708 709 710 if not args: 711 args.append('auto') 712 return 713 714 if len(args) > 1: 715 self.help_launch() 716 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 717 718 elif len(args) == 1: 719 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 720 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 721 mode = args[0] 722 723 # check for incompatible options/modes 724 if options['multicore'] and options['cluster']: 725 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 726 ' are not compatible. Please choose one.' 727 if mode == 'NLO' and options['reweightonly']: 728 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
729 730
731 - def check_compile(self, args, options):
732 """check the validity of the line. args is MODE 733 MODE being FO or MC. If no mode is passed, MC is used""" 734 # modify args in order to be DIR 735 # mode being either standalone or madevent 736 737 if options['force']: 738 self.force = True 739 740 if not args: 741 args.append('MC') 742 return 743 744 if len(args) > 1: 745 self.help_compile() 746 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 747 748 elif len(args) == 1: 749 if not args[0] in ['MC', 'FO']: 750 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 751 mode = args[0]
752 753 # check for incompatible options/modes 754 755 756 #=============================================================================== 757 # CompleteForCmd 758 #===============================================================================
759 -class CompleteForCmd(CheckValidForCmd):
760 """ The Series of help routine for the MadGraphCmd""" 761
762 - def complete_launch(self, text, line, begidx, endidx):
763 """auto-completion for launch command""" 764 765 args = self.split_arg(line[0:begidx]) 766 if len(args) == 1: 767 #return mode 768 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 769 elif len(args) == 2 and line[begidx-1] == '@': 770 return self.list_completion(text,['LO','NLO'],line) 771 else: 772 opts = [] 773 for opt in _launch_parser.option_list: 774 opts += opt._long_opts + opt._short_opts 775 return self.list_completion(text, opts, line)
776
777 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
778 "Complete the banner run command" 779 try: 780 781 782 args = self.split_arg(line[0:begidx], error=False) 783 784 if args[-1].endswith(os.path.sep): 785 return self.path_completion(text, 786 os.path.join('.',*[a for a in args \ 787 if a.endswith(os.path.sep)])) 788 789 790 if len(args) > 1: 791 # only options are possible 792 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 793 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 794 795 if args[-1] != '--tag=': 796 tags = ['--tag=%s' % t for t in tags] 797 else: 798 return self.list_completion(text, tags) 799 return self.list_completion(text, tags +['--name=','-f'], line) 800 801 # First argument 802 possibilites = {} 803 804 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 805 if a.endswith(os.path.sep)])) 806 if os.path.sep in line: 807 return comp 808 else: 809 possibilites['Path from ./'] = comp 810 811 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 812 run_list = [n.rsplit('/',2)[1] for n in run_list] 813 possibilites['RUN Name'] = self.list_completion(text, run_list) 814 815 return self.deal_multiple_categories(possibilites, formatting) 816 817 818 except Exception, error: 819 print error
820 821
822 - def complete_compile(self, text, line, begidx, endidx):
823 """auto-completion for launch command""" 824 825 args = self.split_arg(line[0:begidx]) 826 if len(args) == 1: 827 #return mode 828 return self.list_completion(text,['FO','MC'],line) 829 else: 830 opts = [] 831 for opt in _compile_parser.option_list: 832 opts += opt._long_opts + opt._short_opts 833 return self.list_completion(text, opts, line)
834
835 - def complete_calculate_xsect(self, text, line, begidx, endidx):
836 """auto-completion for launch command""" 837 838 args = self.split_arg(line[0:begidx]) 839 if len(args) == 1: 840 #return mode 841 return self.list_completion(text,['LO','NLO'],line) 842 else: 843 opts = [] 844 for opt in _calculate_xsect_parser.option_list: 845 opts += opt._long_opts + opt._short_opts 846 return self.list_completion(text, opts, line)
847
848 - def complete_generate_events(self, text, line, begidx, endidx):
849 """auto-completion for generate_events command 850 call the compeltion for launch""" 851 self.complete_launch(text, line, begidx, endidx)
852 853
854 - def complete_shower(self, text, line, begidx, endidx):
855 args = self.split_arg(line[0:begidx]) 856 if len(args) == 1: 857 #return valid run_name 858 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 859 data = [n.rsplit('/',2)[1] for n in data] 860 tmp1 = self.list_completion(text, data) 861 if not self.run_name: 862 return tmp1
863
864 - def complete_plot(self, text, line, begidx, endidx):
865 """ Complete the plot command """ 866 867 args = self.split_arg(line[0:begidx], error=False) 868 869 if len(args) == 1: 870 #return valid run_name 871 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 872 data = [n.rsplit('/',2)[1] for n in data] 873 tmp1 = self.list_completion(text, data) 874 if not self.run_name: 875 return tmp1 876 877 if len(args) > 1: 878 return self.list_completion(text, self._plot_mode)
879
880 - def complete_pgs(self,text, line, begidx, endidx):
881 "Complete the pgs command" 882 args = self.split_arg(line[0:begidx], error=False) 883 if len(args) == 1: 884 #return valid run_name 885 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 886 pjoin(self.me_dir, 'Events')) 887 data = [n.rsplit('/',2)[1] for n in data] 888 tmp1 = self.list_completion(text, data) 889 if not self.run_name: 890 return tmp1 891 else: 892 tmp2 = self.list_completion(text, self._run_options + ['-f', 893 '--tag=' ,'--no_default'], line) 894 return tmp1 + tmp2 895 else: 896 return self.list_completion(text, self._run_options + ['-f', 897 '--tag=','--no_default'], line)
898 899 complete_delphes = complete_pgs
900
901 -class aMCatNLOAlreadyRunning(InvalidCmd):
902 pass
903 904 #=============================================================================== 905 # aMCatNLOCmd 906 #===============================================================================
907 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
908 """The command line processor of MadGraph""" 909 910 # Truth values 911 true = ['T','.true.',True,'true'] 912 # Options and formats available 913 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 914 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 915 _calculate_decay_options = ['-f', '--accuracy=0.'] 916 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 917 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 918 _clean_mode = _plot_mode + ['channel', 'banner'] 919 _display_opts = ['run_name', 'options', 'variable'] 920 # survey options, dict from name to type, default value, and help text 921 # Variables to store object information 922 web = False 923 cluster_mode = 0 924 queue = 'madgraph' 925 nb_core = None 926 make_opts_var = {} 927 928 next_possibility = { 929 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 930 'help generate_events'], 931 'generate_events': ['generate_events [OPTIONS]', 'shower'], 932 'launch': ['launch [OPTIONS]', 'shower'], 933 'shower' : ['generate_events [OPTIONS]'] 934 } 935 936 937 ############################################################################
938 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
939 """ add information to the cmd """ 940 941 self.start_time = 0 942 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 943 #common_run.CommonRunCmd.__init__(self, me_dir, options) 944 945 self.mode = 'aMCatNLO' 946 self.nb_core = 0 947 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 948 949 950 self.load_results_db() 951 self.results.def_web_mode(self.web) 952 # check that compiler is gfortran 4.6 or later if virtuals have been exported 953 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 954 955 if not '[real=QCD]' in proc_card: 956 check_compiler(self.options, block=True)
957 958 959 ############################################################################
960 - def do_shower(self, line):
961 """ run the shower on a given parton level file """ 962 argss = self.split_arg(line) 963 (options, argss) = _launch_parser.parse_args(argss) 964 # check argument validity and normalise argument 965 options = options.__dict__ 966 options['reweightonly'] = False 967 self.check_shower(argss, options) 968 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 969 self.ask_run_configuration('onlyshower', options) 970 self.run_mcatnlo(evt_file, options) 971 972 self.update_status('', level='all', update_results=True)
973 974 ################################################################################
975 - def do_plot(self, line):
976 """Create the plot for a given run""" 977 978 # Since in principle, all plot are already done automaticaly 979 args = self.split_arg(line) 980 # Check argument's validity 981 self.check_plot(args) 982 logger.info('plot for run %s' % self.run_name) 983 984 if not self.force: 985 self.ask_edit_cards([], args, plot=True) 986 987 if any([arg in ['parton'] for arg in args]): 988 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 989 if os.path.exists(filename+'.gz'): 990 misc.gunzip(filename) 991 if os.path.exists(filename): 992 logger.info('Found events.lhe file for run %s' % self.run_name) 993 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 994 self.create_plot('parton') 995 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 996 misc.gzip(filename) 997 998 if any([arg in ['all','parton'] for arg in args]): 999 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1000 if os.path.exists(filename): 1001 logger.info('Found MADatNLO.top file for run %s' % \ 1002 self.run_name) 1003 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1004 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1005 1006 if not os.path.isdir(plot_dir): 1007 os.makedirs(plot_dir) 1008 top_file = pjoin(plot_dir, 'plots.top') 1009 files.cp(filename, top_file) 1010 madir = self.options['madanalysis_path'] 1011 tag = self.run_card['run_tag'] 1012 td = self.options['td_path'] 1013 misc.call(['%s/plot' % self.dirbin, madir, td], 1014 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1015 stderr = subprocess.STDOUT, 1016 cwd=plot_dir) 1017 1018 misc.call(['%s/plot_page-pl' % self.dirbin, 1019 os.path.basename(plot_dir), 1020 'parton'], 1021 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1022 stderr = subprocess.STDOUT, 1023 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1024 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1025 output) 1026 1027 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1028 1029 if any([arg in ['all','shower'] for arg in args]): 1030 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1031 if len(filenames) != 1: 1032 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1033 if len(filenames) != 1: 1034 logger.info('No shower level file found for run %s' % \ 1035 self.run_name) 1036 return 1037 filename = filenames[0] 1038 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1039 1040 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1041 if aMCatNLO and not self.options['mg5_path']: 1042 raise "plotting NLO HEP file needs MG5 utilities" 1043 1044 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1045 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1046 self.run_hep2lhe() 1047 else: 1048 filename = filenames[0] 1049 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1050 1051 self.create_plot('shower') 1052 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1053 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1054 lhe_file_name) 1055 misc.gzip(lhe_file_name) 1056 1057 if any([arg in ['all','pgs'] for arg in args]): 1058 filename = pjoin(self.me_dir, 'Events', self.run_name, 1059 '%s_pgs_events.lhco' % self.run_tag) 1060 if os.path.exists(filename+'.gz'): 1061 misc.gunzip(filename) 1062 if os.path.exists(filename): 1063 self.create_plot('PGS') 1064 misc.gzip(filename) 1065 else: 1066 logger.info('No valid files for pgs plot') 1067 1068 if any([arg in ['all','delphes'] for arg in args]): 1069 filename = pjoin(self.me_dir, 'Events', self.run_name, 1070 '%s_delphes_events.lhco' % self.run_tag) 1071 if os.path.exists(filename+'.gz'): 1072 misc.gunzip(filename) 1073 if os.path.exists(filename): 1074 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1075 self.create_plot('Delphes') 1076 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1077 misc.gzip(filename) 1078 else: 1079 logger.info('No valid files for delphes plot')
1080 1081 1082 ############################################################################
1083 - def do_calculate_xsect(self, line):
1084 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1085 this function wraps the do_launch one""" 1086 1087 self.start_time = time.time() 1088 argss = self.split_arg(line) 1089 # check argument validity and normalise argument 1090 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1091 options = options.__dict__ 1092 options['reweightonly'] = False 1093 options['parton'] = True 1094 self.check_calculate_xsect(argss, options) 1095 self.do_launch(line, options, argss)
1096 1097 ############################################################################
1098 - def do_banner_run(self, line):
1099 """Make a run from the banner file""" 1100 1101 args = self.split_arg(line) 1102 #check the validity of the arguments 1103 self.check_banner_run(args) 1104 1105 # Remove previous cards 1106 for name in ['shower_card.dat', 'madspin_card.dat']: 1107 try: 1108 os.remove(pjoin(self.me_dir, 'Cards', name)) 1109 except Exception: 1110 pass 1111 1112 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1113 1114 # Check if we want to modify the run 1115 if not self.force: 1116 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1117 if ans == 'n': 1118 self.force = True 1119 1120 # Compute run mode: 1121 if self.force: 1122 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1123 banner = banner_mod.Banner(args[0]) 1124 for line in banner['run_settings']: 1125 if '=' in line: 1126 mode, value = [t.strip() for t in line.split('=')] 1127 mode_status[mode] = value 1128 else: 1129 mode_status = {} 1130 1131 # Call Generate events 1132 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1133 switch=mode_status)
1134 1135 ############################################################################
1136 - def do_generate_events(self, line):
1137 """Main commands: generate events 1138 this function just wraps the do_launch one""" 1139 self.do_launch(line)
1140 1141 1142 ############################################################################
1143 - def do_treatcards(self, line, amcatnlo=True):
1144 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1145 #check if no 'Auto' are present in the file 1146 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1147 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1148 1149 ############################################################################
1150 - def set_configuration(self, amcatnlo=True, **opt):
1151 """assign all configuration variable from file 1152 loop over the different config file if config_file not define """ 1153 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1154 1155 ############################################################################
1156 - def do_launch(self, line, options={}, argss=[], switch={}):
1157 """Main commands: launch the full chain 1158 options and args are relevant if the function is called from other 1159 functions, such as generate_events or calculate_xsect 1160 mode gives the list of switch needed for the computation (usefull for banner_run) 1161 """ 1162 1163 if not argss and not options: 1164 self.start_time = time.time() 1165 argss = self.split_arg(line) 1166 # check argument validity and normalise argument 1167 (options, argss) = _launch_parser.parse_args(argss) 1168 options = options.__dict__ 1169 self.check_launch(argss, options) 1170 1171 1172 if 'run_name' in options.keys() and options['run_name']: 1173 self.run_name = options['run_name'] 1174 # if a dir with the given run_name already exists 1175 # remove it and warn the user 1176 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1177 logger.warning('Removing old run information in \n'+ 1178 pjoin(self.me_dir, 'Events', self.run_name)) 1179 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1180 self.results.delete_run(self.run_name) 1181 else: 1182 self.run_name = '' # will be set later 1183 1184 if options['multicore']: 1185 self.cluster_mode = 2 1186 elif options['cluster']: 1187 self.cluster_mode = 1 1188 1189 if not switch: 1190 mode = argss[0] 1191 1192 if mode in ['LO', 'NLO']: 1193 options['parton'] = True 1194 mode = self.ask_run_configuration(mode, options) 1195 else: 1196 mode = self.ask_run_configuration('auto', options, switch) 1197 1198 self.results.add_detail('run_mode', mode) 1199 1200 self.update_status('Starting run', level=None, update_results=True) 1201 1202 if self.options['automatic_html_opening']: 1203 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1204 self.options['automatic_html_opening'] = False 1205 1206 if '+' in mode: 1207 mode = mode.split('+')[0] 1208 self.compile(mode, options) 1209 evt_file = self.run(mode, options) 1210 1211 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1212 logger.info('No event file generated: grids have been set-up with a '\ 1213 'relative precision of %s' % self.run_card['req_acc']) 1214 return 1215 1216 if not mode in ['LO', 'NLO']: 1217 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1218 1219 if self.run_card['systematics_program'] == 'systematics': 1220 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1221 1222 self.exec_cmd('reweight -from_cards', postcmd=False) 1223 self.exec_cmd('decay_events -from_cards', postcmd=False) 1224 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1225 1226 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1227 and not options['parton']: 1228 self.run_mcatnlo(evt_file, options) 1229 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1230 1231 elif mode == 'noshower': 1232 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1233 Please, shower the Les Houches events before using them for physics analyses.""") 1234 1235 1236 self.update_status('', level='all', update_results=True) 1237 if self.run_card['ickkw'] == 3 and \ 1238 (mode in ['noshower'] or \ 1239 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1240 logger.warning("""You are running with FxFx merging enabled. 1241 To be able to merge samples of various multiplicities without double counting, 1242 you have to remove some events after showering 'by hand'. 1243 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1244 1245 self.store_result() 1246 #check if the param_card defines a scan. 1247 if self.param_card_iterator: 1248 param_card_iterator = self.param_card_iterator 1249 self.param_card_iterator = [] #avoid to next generate go trough here 1250 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1251 orig_name = self.run_name 1252 #go trough the scal 1253 with misc.TMP_variable(self, 'allow_notification_center', False): 1254 for i,card in enumerate(param_card_iterator): 1255 card.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1256 self.check_param_card(pjoin(self.me_dir,'Cards','param_card.dat'), dependent=True) 1257 if not options['force']: 1258 options['force'] = True 1259 if options['run_name']: 1260 options['run_name'] = '%s_%s' % (orig_name, i+1) 1261 if not argss: 1262 argss = [mode, "-f"] 1263 elif argss[0] == "auto": 1264 argss[0] = mode 1265 self.do_launch("", options=options, argss=argss, switch=switch) 1266 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1267 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1268 #restore original param_card 1269 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1270 name = misc.get_scan_name(orig_name, self.run_name) 1271 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1272 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK') 1273 param_card_iterator.write_summary(path) 1274 1275 if self.allow_notification_center: 1276 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1277 '%s: %s +- %s ' % (self.results.current['run_name'], 1278 self.results.current['cross'], 1279 self.results.current['error']))
1280 1281 1282 ############################################################################
1283 - def do_compile(self, line):
1284 """Advanced commands: just compile the executables """ 1285 argss = self.split_arg(line) 1286 # check argument validity and normalise argument 1287 (options, argss) = _compile_parser.parse_args(argss) 1288 options = options.__dict__ 1289 options['reweightonly'] = False 1290 options['nocompile'] = False 1291 self.check_compile(argss, options) 1292 1293 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1294 self.ask_run_configuration(mode, options) 1295 self.compile(mode, options) 1296 1297 1298 self.update_status('', level='all', update_results=True)
1299 1300
1301 - def update_random_seed(self):
1302 """Update random number seed with the value from the run_card. 1303 If this is 0, update the number according to a fresh one""" 1304 iseed = self.run_card['iseed'] 1305 if iseed == 0: 1306 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1307 iseed = int(randinit.read()[2:]) + 1 1308 randinit.close() 1309 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1310 randinit.write('r=%d' % iseed) 1311 randinit.close()
1312 1313
1314 - def run(self, mode, options):
1315 """runs aMC@NLO. Returns the name of the event file created""" 1316 logger.info('Starting run') 1317 1318 if not 'only_generation' in options.keys(): 1319 options['only_generation'] = False 1320 1321 # for second step in applgrid mode, do only the event generation step 1322 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1323 options['only_generation'] = True 1324 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1325 self.setup_cluster_or_multicore() 1326 self.update_random_seed() 1327 #find and keep track of all the jobs 1328 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1329 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1330 folder_names['noshower'] = folder_names['aMC@NLO'] 1331 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1332 p_dirs = [d for d in \ 1333 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1334 #Clean previous results 1335 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1336 1337 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1338 1339 1340 if options['reweightonly']: 1341 event_norm=self.run_card['event_norm'] 1342 nevents=self.run_card['nevents'] 1343 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1344 1345 if mode in ['LO', 'NLO']: 1346 # this is for fixed order runs 1347 mode_dict = {'NLO': 'all', 'LO': 'born'} 1348 logger.info('Doing fixed order %s' % mode) 1349 req_acc = self.run_card['req_acc_FO'] 1350 1351 # Re-distribute the grids for the 2nd step of the applgrid 1352 # running 1353 if self.run_card['iappl'] == 2: 1354 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1355 1356 # create a list of dictionaries "jobs_to_run" with all the 1357 # jobs that need to be run 1358 integration_step=-1 1359 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1360 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1361 self.prepare_directories(jobs_to_run,mode) 1362 1363 # loop over the integration steps. After every step, check 1364 # if we have the required accuracy. If this is the case, 1365 # stop running, else do another step. 1366 while True: 1367 integration_step=integration_step+1 1368 self.run_all_jobs(jobs_to_run,integration_step) 1369 self.collect_log_files(jobs_to_run,integration_step) 1370 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1371 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1372 if not jobs_to_run: 1373 # there are no more jobs to run (jobs_to_run is empty) 1374 break 1375 # We are done. 1376 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1377 self.update_status('Run complete', level='parton', update_results=True) 1378 return 1379 1380 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1381 if self.ninitial == 1: 1382 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1383 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1384 'noshower': 'all', 'noshowerLO': 'born'} 1385 shower = self.run_card['parton_shower'].upper() 1386 nevents = self.run_card['nevents'] 1387 req_acc = self.run_card['req_acc'] 1388 if nevents == 0 and req_acc < 0 : 1389 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1390 'of events, because 0 events requested. Please set '\ 1391 'the "req_acc" parameter in the run_card to a value '\ 1392 'between 0 and 1') 1393 elif req_acc >1 or req_acc == 0 : 1394 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1395 'be between larger than 0 and smaller than 1, '\ 1396 'or set to -1 for automatic determination. Current '\ 1397 'value is %f' % req_acc) 1398 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1399 elif req_acc < 0 and nevents > 1000000 : 1400 req_acc=0.001 1401 1402 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1403 1404 if not shower in shower_list: 1405 raise aMCatNLOError('%s is not a valid parton shower. '\ 1406 'Please use one of the following: %s' \ 1407 % (shower, ', '.join(shower_list))) 1408 1409 # check that PYTHIA6PT is not used for processes with FSR 1410 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1411 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1412 1413 if mode in ['aMC@NLO', 'aMC@LO']: 1414 logger.info('Doing %s matched to parton shower' % mode[4:]) 1415 elif mode in ['noshower','noshowerLO']: 1416 logger.info('Generating events without running the shower.') 1417 elif options['only_generation']: 1418 logger.info('Generating events starting from existing results') 1419 1420 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1421 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1422 1423 # Make sure to update all the jobs to be ready for the event generation step 1424 if options['only_generation']: 1425 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1426 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1427 else: 1428 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1429 1430 1431 # Main loop over the three MINT generation steps: 1432 for mint_step, status in enumerate(mcatnlo_status): 1433 if options['only_generation'] and mint_step < 2: 1434 continue 1435 self.update_status(status, level='parton') 1436 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1437 self.collect_log_files(jobs_to_run,mint_step) 1438 if mint_step+1==2 and nevents==0: 1439 self.print_summary(options,2,mode) 1440 return 1441 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1442 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1443 # Sanity check on the event files. If error the jobs are resubmitted 1444 self.check_event_files(jobs_to_collect) 1445 1446 if self.cluster_mode == 1: 1447 #if cluster run, wait 10 sec so that event files are transferred back 1448 self.update_status( 1449 'Waiting while files are transferred back from the cluster nodes', 1450 level='parton') 1451 time.sleep(10) 1452 1453 event_norm=self.run_card['event_norm'] 1454 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1455
1456 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1457 integration_step,mode,fixed_order=True):
1458 """Creates a list of dictionaries with all the jobs to be run""" 1459 jobs_to_run=[] 1460 if not options['only_generation']: 1461 # Fresh, new run. Check all the P*/channels.txt files 1462 # (created by the 'gensym' executable) to set-up all the 1463 # jobs using the default inputs. 1464 npoints = self.run_card['npoints_FO_grid'] 1465 niters = self.run_card['niters_FO_grid'] 1466 for p_dir in p_dirs: 1467 try: 1468 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1469 channels=chan_file.readline().split() 1470 except IOError: 1471 logger.warning('No integration channels found for contribution %s' % p_dir) 1472 continue 1473 for channel in channels: 1474 job={} 1475 job['p_dir']=p_dir 1476 job['channel']=channel 1477 job['split']=0 1478 if fixed_order and req_acc == -1: 1479 job['accuracy']=0 1480 job['niters']=niters 1481 job['npoints']=npoints 1482 elif fixed_order and req_acc > 0: 1483 job['accuracy']=0.10 1484 job['niters']=6 1485 job['npoints']=-1 1486 elif not fixed_order: 1487 job['accuracy']=0.03 1488 job['niters']=12 1489 job['npoints']=-1 1490 else: 1491 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1492 'between 0 and 1 or set it equal to -1.') 1493 job['mint_mode']=0 1494 job['run_mode']=run_mode 1495 job['wgt_frac']=1.0 1496 jobs_to_run.append(job) 1497 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1498 else: 1499 # if options['only_generation'] is true, we need to loop 1500 # over all the existing G* directories and create the jobs 1501 # from there. 1502 name_suffix={'born' :'B', 'all':'F'} 1503 for p_dir in p_dirs: 1504 for chan_dir in os.listdir(pjoin(self.me_dir,'SubProcesses',p_dir)): 1505 if ((chan_dir.startswith(run_mode+'_G') and fixed_order) or\ 1506 (chan_dir.startswith('G'+name_suffix[run_mode]) and (not fixed_order))) and \ 1507 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)) or \ 1508 os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir))): 1509 job={} 1510 job['p_dir']=p_dir 1511 if fixed_order: 1512 channel=chan_dir.split('_')[1] 1513 job['channel']=channel[1:] # remove the 'G' 1514 if len(chan_dir.split('_')) == 3: 1515 split=int(chan_dir.split('_')[2]) 1516 else: 1517 split=0 1518 else: 1519 if len(chan_dir.split('_')) == 2: 1520 split=int(chan_dir.split('_')[1]) 1521 channel=chan_dir.split('_')[0] 1522 job['channel']=channel[2:] # remove the 'G' 1523 else: 1524 job['channel']=chan_dir[2:] # remove the 'G' 1525 split=0 1526 job['split']=split 1527 job['run_mode']=run_mode 1528 job['dirname']=pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir) 1529 job['wgt_frac']=1.0 1530 if not fixed_order: job['mint_mode']=1 1531 jobs_to_run.append(job) 1532 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1533 if fixed_order: 1534 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 1535 jobs_to_collect,integration_step,mode,run_mode) 1536 # Update the integration_step to make sure that nothing will be overwritten 1537 integration_step=1 1538 for job in jobs_to_run: 1539 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 1540 integration_step=integration_step+1 1541 integration_step=integration_step-1 1542 else: 1543 self.append_the_results(jobs_to_collect,integration_step) 1544 return jobs_to_run,jobs_to_collect,integration_step
1545
1546 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
1547 """Set-up the G* directories for running""" 1548 name_suffix={'born' :'B' , 'all':'F'} 1549 for job in jobs_to_run: 1550 if job['split'] == 0: 1551 if fixed_order : 1552 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1553 job['run_mode']+'_G'+job['channel']) 1554 else: 1555 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1556 'G'+name_suffix[job['run_mode']]+job['channel']) 1557 else: 1558 if fixed_order : 1559 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1560 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 1561 else: 1562 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1563 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 1564 job['dirname']=dirname 1565 if not os.path.isdir(dirname): 1566 os.makedirs(dirname) 1567 self.write_input_file(job,fixed_order) 1568 if not fixed_order: 1569 # copy the grids from the base directory to the split directory: 1570 if job['split'] != 0: 1571 for f in ['grid.MC_integer','mint_grids','res_1']: 1572 if not os.path.isfile(pjoin(job['dirname'],f)): 1573 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1574 1575
1576 - def write_input_file(self,job,fixed_order):
1577 """write the input file for the madevent_mint* executable in the appropriate directory""" 1578 if fixed_order: 1579 content= \ 1580 """NPOINTS = %(npoints)s 1581 NITERATIONS = %(niters)s 1582 ACCURACY = %(accuracy)s 1583 ADAPT_GRID = 2 1584 MULTICHANNEL = 1 1585 SUM_HELICITY = 1 1586 CHANNEL = %(channel)s 1587 SPLIT = %(split)s 1588 RUN_MODE = %(run_mode)s 1589 RESTART = %(mint_mode)s 1590 """ \ 1591 % job 1592 else: 1593 content = \ 1594 """-1 12 ! points, iterations 1595 %(accuracy)s ! desired fractional accuracy 1596 1 -0.1 ! alpha, beta for Gsoft 1597 1 -0.1 ! alpha, beta for Gazi 1598 1 ! Suppress amplitude (0 no, 1 yes)? 1599 1 ! Exact helicity sum (0 yes, n = number/event)? 1600 %(channel)s ! Enter Configuration Number: 1601 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 1602 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 1603 %(run_mode)s ! all, born, real, virt 1604 """ \ 1605 % job 1606 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 1607 input_file.write(content)
1608 1609
1610 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1611 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 1612 if fixed_order: 1613 if integration_step == 0: 1614 self.update_status('Setting up grids', level=None) 1615 else: 1616 self.update_status('Refining results, step %i' % integration_step, level=None) 1617 self.ijob = 0 1618 name_suffix={'born' :'B', 'all':'F'} 1619 if fixed_order: 1620 run_type="Fixed order integration step %s" % integration_step 1621 else: 1622 run_type="MINT step %s" % integration_step 1623 self.njobs=len(jobs_to_run) 1624 for job in jobs_to_run: 1625 executable='ajob1' 1626 if fixed_order: 1627 arguments=[job['channel'],job['run_mode'], \ 1628 str(job['split']),str(integration_step)] 1629 else: 1630 arguments=[job['channel'],name_suffix[job['run_mode']], \ 1631 str(job['split']),str(integration_step)] 1632 self.run_exe(executable,arguments,run_type, 1633 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 1634 1635 if self.cluster_mode == 2: 1636 time.sleep(1) # security to allow all jobs to be launched 1637 self.wait_for_complete(run_type)
1638 1639
1640 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 1641 integration_step,mode,run_mode,fixed_order=True):
1642 """Collect the results, make HTML pages, print the summary and 1643 determine if there are more jobs to run. Returns the list 1644 of the jobs that still need to be run, as well as the 1645 complete list of jobs that need to be collected to get the 1646 final answer. 1647 """ 1648 # Get the results of the current integration/MINT step 1649 self.append_the_results(jobs_to_run,integration_step) 1650 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 1651 # Update HTML pages 1652 if fixed_order: 1653 cross, error = sum_html.make_all_html_results(self, ['%s*' % run_mode]) 1654 else: 1655 name_suffix={'born' :'B' , 'all':'F'} 1656 cross, error = sum_html.make_all_html_results(self, ['G%s*' % name_suffix[run_mode]]) 1657 self.results.add_detail('cross', cross) 1658 self.results.add_detail('error', error) 1659 # Set-up jobs for the next iteration/MINT step 1660 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 1661 # if there are no more jobs, we are done! 1662 # Print summary 1663 if (not jobs_to_run_new) and fixed_order: 1664 # print final summary of results (for fixed order) 1665 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 1666 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 1667 return jobs_to_run_new,jobs_to_collect 1668 elif jobs_to_run_new: 1669 # print intermediate summary of results 1670 scale_pdf_info=[] 1671 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 1672 else: 1673 # When we are done for (N)LO+PS runs, do not print 1674 # anything yet. This will be done after the reweighting 1675 # and collection of the events 1676 scale_pdf_info=[] 1677 # Prepare for the next integration/MINT step 1678 if (not fixed_order) and integration_step+1 == 2 : 1679 # next step is event generation (mint_step 2) 1680 jobs_to_run_new,jobs_to_collect_new= \ 1681 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 1682 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1683 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 1684 self.write_nevts_files(jobs_to_run_new) 1685 else: 1686 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1687 jobs_to_collect_new=jobs_to_collect 1688 return jobs_to_run_new,jobs_to_collect_new
1689 1690
1691 - def write_nevents_unweighted_file(self,jobs,jobs0events):
1692 """writes the nevents_unweighted file in the SubProcesses directory. 1693 We also need to write the jobs that will generate 0 events, 1694 because that makes sure that the cross section from those channels 1695 is taken into account in the event weights (by collect_events.f). 1696 """ 1697 content=[] 1698 for job in jobs: 1699 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1700 lhefile=pjoin(path,'events.lhe') 1701 content.append(' %s %d %9e %9e' % \ 1702 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 1703 for job in jobs0events: 1704 if job['nevents']==0: 1705 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1706 lhefile=pjoin(path,'events.lhe') 1707 content.append(' %s %d %9e %9e' % \ 1708 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 1709 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 1710 f.write('\n'.join(content)+'\n')
1711
1712 - def write_nevts_files(self,jobs):
1713 """write the nevts files in the SubProcesses/P*/G*/ directories""" 1714 for job in jobs: 1715 with open(pjoin(job['dirname'],'nevts'),'w') as f: 1716 f.write('%i\n' % job['nevents'])
1717
1718 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
1719 """Looks in the jobs_to_run to see if there is the need to split the 1720 event generation step. Updates jobs_to_run and 1721 jobs_to_collect to replace the split-job by its 1722 splits. Also removes jobs that do not need any events. 1723 """ 1724 nevt_job=self.run_card['nevt_job'] 1725 if nevt_job > 0: 1726 jobs_to_collect_new=copy.copy(jobs_to_collect) 1727 for job in jobs_to_run: 1728 nevents=job['nevents'] 1729 if nevents == 0: 1730 jobs_to_collect_new.remove(job) 1731 elif nevents > nevt_job: 1732 jobs_to_collect_new.remove(job) 1733 if nevents % nevt_job != 0 : 1734 nsplit=int(nevents/nevt_job)+1 1735 else: 1736 nsplit=int(nevents/nevt_job) 1737 for i in range(1,nsplit+1): 1738 job_new=copy.copy(job) 1739 left_over=nevents % nsplit 1740 if i <= left_over: 1741 job_new['nevents']=int(nevents/nsplit)+1 1742 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1743 else: 1744 job_new['nevents']=int(nevents/nsplit) 1745 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1746 job_new['split']=i 1747 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 1748 jobs_to_collect_new.append(job_new) 1749 jobs_to_run_new=copy.copy(jobs_to_collect_new) 1750 else: 1751 jobs_to_run_new=copy.copy(jobs_to_collect) 1752 for job in jobs_to_collect: 1753 if job['nevents'] == 0: 1754 jobs_to_run_new.remove(job) 1755 jobs_to_collect_new=copy.copy(jobs_to_run_new) 1756 1757 return jobs_to_run_new,jobs_to_collect_new
1758 1759
1760 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
1761 """ 1762 For (N)LO+PS: determines the number of events and/or the required 1763 accuracy per job. 1764 For fixed order: determines which jobs need higher precision and 1765 returns those with the newly requested precision. 1766 """ 1767 err=self.cross_sect_dict['errt'] 1768 tot=self.cross_sect_dict['xsect'] 1769 errABS=self.cross_sect_dict['erra'] 1770 totABS=self.cross_sect_dict['xseca'] 1771 jobs_new=[] 1772 if fixed_order: 1773 if req_acc == -1: 1774 if step+1 == 1: 1775 npoints = self.run_card['npoints_FO'] 1776 niters = self.run_card['niters_FO'] 1777 for job in jobs: 1778 job['mint_mode']=-1 1779 job['niters']=niters 1780 job['npoints']=npoints 1781 jobs_new.append(job) 1782 elif step+1 == 2: 1783 pass 1784 elif step+1 > 2: 1785 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 1786 'for integration step %i' % step ) 1787 elif ( req_acc > 0 and err/tot > req_acc*1.2 ) or step <= 0: 1788 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 1789 for job in jobs: 1790 job['mint_mode']=-1 1791 # Determine relative required accuracy on the ABS for this job 1792 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 1793 # If already accurate enough, skip the job (except when doing the first 1794 # step for the iappl=2 run: we need to fill all the applgrid grids!) 1795 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 1796 and not (step==-1 and self.run_card['iappl'] == 2): 1797 continue 1798 # Update the number of PS points based on errorABS, ncall and accuracy 1799 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 1800 (job['accuracy']*job['resultABS']),2) 1801 if itmax_fl <= 4.0 : 1802 job['niters']=max(int(round(itmax_fl)),2) 1803 job['npoints']=job['npoints_done']*2 1804 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 1805 job['niters']=4 1806 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 1807 else: 1808 if itmax_fl > 100.0 : itmax_fl=50.0 1809 job['niters']=int(round(math.sqrt(itmax_fl))) 1810 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 1811 round(math.sqrt(itmax_fl))))*2 1812 # Add the job to the list of jobs that need to be run 1813 jobs_new.append(job) 1814 return jobs_new 1815 elif step+1 <= 2: 1816 nevents=self.run_card['nevents'] 1817 # Total required accuracy for the upper bounding envelope 1818 if req_acc<0: 1819 req_acc2_inv=nevents 1820 else: 1821 req_acc2_inv=1/(req_acc*req_acc) 1822 if step+1 == 1 or step+1 == 2 : 1823 # determine the req. accuracy for each of the jobs for Mint-step = 1 1824 for job in jobs: 1825 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 1826 job['accuracy']=accuracy 1827 if step+1 == 2: 1828 # Randomly (based on the relative ABS Xsec of the job) determine the 1829 # number of events each job needs to generate for MINT-step = 2. 1830 r=self.get_randinit_seed() 1831 random.seed(r) 1832 totevts=nevents 1833 for job in jobs: 1834 job['nevents'] = 0 1835 while totevts : 1836 target = random.random() * totABS 1837 crosssum = 0. 1838 i = 0 1839 while i<len(jobs) and crosssum < target: 1840 job = jobs[i] 1841 crosssum += job['resultABS'] 1842 i += 1 1843 totevts -= 1 1844 i -= 1 1845 jobs[i]['nevents'] += 1 1846 for job in jobs: 1847 job['mint_mode']=step+1 # next step 1848 return jobs 1849 else: 1850 return []
1851 1852
1853 - def get_randinit_seed(self):
1854 """ Get the random number seed from the randinit file """ 1855 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 1856 # format of the file is "r=%d". 1857 iseed = int(randinit.read()[2:]) 1858 return iseed
1859 1860
1861 - def append_the_results(self,jobs,integration_step):
1862 """Appends the results for each of the jobs in the job list""" 1863 error_found=False 1864 for job in jobs: 1865 try: 1866 if integration_step >= 0 : 1867 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 1868 results=res_file.readline().split() 1869 else: 1870 # should only be here when doing fixed order with the 'only_generation' 1871 # option equal to True. Take the results from the final run done. 1872 with open(pjoin(job['dirname'],'res.dat')) as res_file: 1873 results=res_file.readline().split() 1874 except IOError: 1875 if not error_found: 1876 error_found=True 1877 error_log=[] 1878 error_log.append(pjoin(job['dirname'],'log.txt')) 1879 continue 1880 job['resultABS']=float(results[0]) 1881 job['errorABS']=float(results[1]) 1882 job['result']=float(results[2]) 1883 job['error']=float(results[3]) 1884 job['niters_done']=int(results[4]) 1885 job['npoints_done']=int(results[5]) 1886 job['time_spend']=float(results[6]) 1887 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 1888 job['err_perc'] = job['error']/job['result']*100. 1889 if error_found: 1890 raise aMCatNLOError('An error occurred during the collection of results.\n' + 1891 'Please check the .log files inside the directories which failed:\n' + 1892 '\n'.join(error_log)+'\n')
1893 1894 1895
1896 - def write_res_txt_file(self,jobs,integration_step):
1897 """writes the res.txt files in the SubProcess dir""" 1898 jobs.sort(key = lambda job: -job['errorABS']) 1899 content=[] 1900 content.append('\n\nCross section per integration channel:') 1901 for job in jobs: 1902 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 1903 content.append('\n\nABS cross section per integration channel:') 1904 for job in jobs: 1905 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 1906 totABS=0 1907 errABS=0 1908 tot=0 1909 err=0 1910 for job in jobs: 1911 totABS+= job['resultABS']*job['wgt_frac'] 1912 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 1913 tot+= job['result']*job['wgt_frac'] 1914 err+= math.pow(job['error'],2)*job['wgt_frac'] 1915 if jobs: 1916 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 1917 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 1918 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 1919 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 1920 res_file.write('\n'.join(content)) 1921 randinit=self.get_randinit_seed() 1922 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 1923 'erra':math.sqrt(errABS),'randinit':randinit}
1924 1925
1926 - def collect_scale_pdf_info(self,options,jobs):
1927 """read the scale_pdf_dependence.dat files and collects there results""" 1928 scale_pdf_info=[] 1929 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 1930 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 1931 evt_files=[] 1932 evt_wghts=[] 1933 for job in jobs: 1934 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 1935 evt_wghts.append(job['wgt_frac']) 1936 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 1937 return scale_pdf_info
1938 1939
1940 - def combine_plots_FO(self,folder_name,jobs):
1941 """combines the plots and puts then in the Events/run* directory""" 1942 devnull = open(os.devnull, 'w') 1943 1944 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 1945 misc.call(['./combine_plots_FO.sh'] + folder_name, \ 1946 stdout=devnull, 1947 cwd=pjoin(self.me_dir, 'SubProcesses')) 1948 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 1949 pjoin(self.me_dir, 'Events', self.run_name)) 1950 logger.info('The results of this run and the TopDrawer file with the plots' + \ 1951 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1952 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 1953 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 1954 self.combine_plots_HwU(jobs,out) 1955 try: 1956 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 1957 stdout=devnull,stderr=devnull,\ 1958 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 1959 except Exception: 1960 pass 1961 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 1962 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1963 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 1964 misc.call(['./combine_root.sh'] + folder_name, \ 1965 stdout=devnull, 1966 cwd=pjoin(self.me_dir, 'SubProcesses')) 1967 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 1968 pjoin(self.me_dir, 'Events', self.run_name)) 1969 logger.info('The results of this run and the ROOT file with the plots' + \ 1970 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 1971 else: 1972 logger.info('The results of this run' + \ 1973 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1974
1975 - def combine_plots_HwU(self,jobs,out,normalisation=None):
1976 """Sums all the plots in the HwU format.""" 1977 logger.debug('Combining HwU plots.') 1978 1979 command = [] 1980 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 1981 for job in jobs: 1982 if job['dirname'].endswith('.HwU'): 1983 command.append(job['dirname']) 1984 else: 1985 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 1986 command.append("--out="+out) 1987 command.append("--gnuplot") 1988 command.append("--band=[]") 1989 command.append("--lhapdf-config="+self.options['lhapdf']) 1990 if normalisation: 1991 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 1992 command.append("--sum") 1993 command.append("--keep_all_weights") 1994 command.append("--no_open") 1995 1996 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 1997 1998 while p.poll() is None: 1999 line = p.stdout.readline() 2000 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2001 print line[:-1] 2002 elif __debug__ and line: 2003 logger.debug(line[:-1])
2004 2005
2006 - def applgrid_combine(self,cross,error,jobs):
2007 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2008 logger.debug('Combining APPLgrids \n') 2009 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2010 'applgrid-combine') 2011 all_jobs=[] 2012 for job in jobs: 2013 all_jobs.append(job['dirname']) 2014 ngrids=len(all_jobs) 2015 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2016 for obs in range(0,nobs): 2017 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2018 # combine APPLgrids from different channels for observable 'obs' 2019 if self.run_card["iappl"] == 1: 2020 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2021 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2022 elif self.run_card["iappl"] == 2: 2023 unc2_inv=pow(cross/error,2) 2024 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2025 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2026 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2027 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2028 for job in all_jobs: 2029 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2030 else: 2031 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2032 # after combining, delete the original grids 2033 for ggdir in gdir: 2034 os.remove(ggdir)
2035 2036
2037 - def applgrid_distribute(self,options,mode,p_dirs):
2038 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2039 # if no appl_start_grid argument given, guess it from the time stamps 2040 # of the starting grid files 2041 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2042 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2043 pjoin(self.me_dir,'Events')) 2044 2045 time_stamps={} 2046 for root_file in gfiles: 2047 time_stamps[root_file]=os.path.getmtime(root_file) 2048 options['appl_start_grid']= \ 2049 max(time_stamps.iterkeys(), key=(lambda key: 2050 time_stamps[key])).split('/')[-2] 2051 logger.info('No --appl_start_grid option given. '+\ 2052 'Guessing that start grid from run "%s" should be used.' \ 2053 % options['appl_start_grid']) 2054 2055 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2056 self.appl_start_grid = options['appl_start_grid'] 2057 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2058 # check that this dir exists and at least one grid file is there 2059 if not os.path.exists(pjoin(start_grid_dir, 2060 'aMCfast_obs_0_starting_grid.root')): 2061 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2062 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2063 else: 2064 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2065 start_grid_dir) if name.endswith("_starting_grid.root")] 2066 nobs =len(all_grids) 2067 gstring=" ".join(all_grids) 2068 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2069 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2070 'Please provide this information.') 2071 #copy the grid to all relevant directories 2072 for pdir in p_dirs: 2073 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2074 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2075 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2076 for g_dir in g_dirs: 2077 for grid in all_grids: 2078 obs=grid.split('_')[-3] 2079 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2080 'grid_obs_'+obs+'_in.root'))
2081 2082 2083 2084
2085 - def collect_log_files(self, jobs, integration_step):
2086 """collect the log files and put them in a single, html-friendly file 2087 inside the Events/run_.../ directory""" 2088 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2089 'alllogs_%d.html' % integration_step) 2090 outfile = open(log_file, 'w') 2091 2092 content = '' 2093 content += '<HTML><BODY>\n<font face="courier" size=2>' 2094 for job in jobs: 2095 # put an anchor 2096 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2097 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2098 pjoin(self.me_dir,'SubProcesses'),'')) 2099 # and put some nice header 2100 content += '<font color="red">\n' 2101 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2102 (os.path.dirname(log).replace(pjoin(self.me_dir, 2103 'SubProcesses'), ''), 2104 integration_step) 2105 content += '</font>\n' 2106 #then just flush the content of the small log inside the big log 2107 #the PRE tag prints everything verbatim 2108 content += '<PRE>\n' + open(log).read() + '\n</PRE>' 2109 content +='<br>\n' 2110 outfile.write(content) 2111 content='' 2112 2113 outfile.write('</font>\n</BODY></HTML>\n') 2114 outfile.close()
2115 2116
2117 - def finalise_run_FO(self,folder_name,jobs):
2118 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2119 # Copy the res_*.txt files to the Events/run* folder 2120 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2121 for res_file in res_files: 2122 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2123 # Collect the plots and put them in the Events/run* folder 2124 self.combine_plots_FO(folder_name,jobs) 2125 # If doing the applgrid-stuff, also combine those grids 2126 # and put those in the Events/run* folder 2127 if self.run_card['iappl'] != 0: 2128 cross=self.cross_sect_dict['xsect'] 2129 error=self.cross_sect_dict['errt'] 2130 self.applgrid_combine(cross,error,jobs)
2131 2132
2133 - def setup_cluster_or_multicore(self):
2134 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2135 if self.cluster_mode == 1: 2136 cluster_name = self.options['cluster_type'] 2137 try: 2138 self.cluster = cluster.from_name[cluster_name](**self.options) 2139 except KeyError: 2140 if aMCatNLO and ('mg5_path' not in self.options or not self.options['mg5_path']): 2141 if not self.plugin_path: 2142 raise self.InvalidCmd('%s not native cluster type and no plugin directory available.' % cluster_name) 2143 elif aMCatNLO: 2144 mg5dir = self.options['mg5_path'] 2145 if mg5dir not in sys.path: 2146 sys.path.append(mg5dir) 2147 if pjoin(mg5dir, 'PLUGIN') not in self.plugin_path: 2148 self.plugin_path.append(pjoin(mg5dir)) 2149 else: 2150 mg5dir = MG5DIR 2151 # Check if a plugin define this type of cluster 2152 # check for PLUGIN format 2153 for plugpath in self.plugin_path: 2154 plugindirname = os.path.basename(plugpath) 2155 for plug in os.listdir(plugpath): 2156 if os.path.exists(pjoin(plugpath, plug, '__init__.py')): 2157 try: 2158 __import__('%s.%s' % (plugindirname, plug)) 2159 except Exception, error: 2160 logger.critical('plugin directory %s/%s fail to be loaded. Please check it',plugindirname, plug) 2161 continue 2162 plugin = sys.modules['%s.%s' % (plugindirname,plug)] 2163 if not hasattr(plugin, 'new_cluster'): 2164 continue 2165 if not misc.is_plugin_supported(plugin): 2166 continue 2167 if cluster_name in plugin.new_cluster: 2168 logger.info("cluster handling will be done with PLUGIN: %s" % plug,'$MG:color:BLACK') 2169 self.cluster = plugin.new_cluster[cluster_name](**self.options) 2170 break 2171 2172 if self.cluster_mode == 2: 2173 try: 2174 import multiprocessing 2175 if not self.nb_core: 2176 try: 2177 self.nb_core = int(self.options['nb_core']) 2178 except TypeError: 2179 self.nb_core = multiprocessing.cpu_count() 2180 logger.info('Using %d cores' % self.nb_core) 2181 except ImportError: 2182 self.nb_core = 1 2183 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2184 'Use set nb_core X in order to set this number and be able to'+ 2185 'run in multicore.') 2186 2187 self.cluster = cluster.MultiCore(**self.options)
2188 2189
2190 - def clean_previous_results(self,options,p_dirs,folder_name):
2191 """Clean previous results. 2192 o. If doing only the reweighting step, do not delete anything and return directlty. 2193 o. Always remove all the G*_* files (from split event generation). 2194 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2195 if options['reweightonly']: 2196 return 2197 if not options['only_generation']: 2198 self.update_status('Cleaning previous results', level=None) 2199 for dir in p_dirs: 2200 #find old folders to be removed 2201 for obj in folder_name: 2202 # list all the G* (or all_G* or born_G*) directories 2203 to_rm = [file for file in \ 2204 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2205 if file.startswith(obj[:-1]) and \ 2206 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2207 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2208 # list all the G*_* directories (from split event generation) 2209 to_always_rm = [file for file in \ 2210 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2211 if file.startswith(obj[:-1]) and 2212 '_' in file and not '_G' in file and \ 2213 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2214 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2215 2216 if not options['only_generation']: 2217 to_always_rm.extend(to_rm) 2218 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2219 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2220 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2221 return
2222 2223
2224 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2225 """print a summary of the results contained in self.cross_sect_dict. 2226 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2227 some additional infos are printed""" 2228 # find process name 2229 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 2230 process = '' 2231 for line in proc_card_lines: 2232 if line.startswith('generate') or line.startswith('add process'): 2233 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 2234 lpp = {0:'l', 1:'p', -1:'pbar'} 2235 if self.ninitial == 1: 2236 proc_info = '\n Process %s' % process[:-3] 2237 else: 2238 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 2239 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 2240 self.run_card['ebeam1'], self.run_card['ebeam2']) 2241 2242 if self.ninitial == 1: 2243 self.cross_sect_dict['unit']='GeV' 2244 self.cross_sect_dict['xsec_string']='(Partial) decay width' 2245 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 2246 else: 2247 self.cross_sect_dict['unit']='pb' 2248 self.cross_sect_dict['xsec_string']='Total cross section' 2249 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 2250 2251 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2252 status = ['Determining the number of unweighted events per channel', 2253 'Updating the number of unweighted events per channel', 2254 'Summary:'] 2255 computed='(computed from LHE events)' 2256 elif mode in ['NLO', 'LO']: 2257 status = ['Results after grid setup:','Current results:', 2258 'Final results and run summary:'] 2259 computed='(computed from histogram information)' 2260 2261 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2262 message = status[step] + '\n\n Intermediate results:' + \ 2263 ('\n Random seed: %(randinit)d' + \ 2264 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 2265 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 2266 % self.cross_sect_dict 2267 elif mode in ['NLO','LO'] and not done: 2268 if step == 0: 2269 message = '\n ' + status[0] + \ 2270 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2271 self.cross_sect_dict 2272 else: 2273 message = '\n ' + status[1] + \ 2274 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2275 self.cross_sect_dict 2276 2277 else: 2278 message = '\n --------------------------------------------------------------' 2279 message = message + \ 2280 '\n ' + status[2] + proc_info 2281 if mode not in ['LO', 'NLO']: 2282 message = message + \ 2283 '\n Number of events generated: %s' % self.run_card['nevents'] 2284 message = message + \ 2285 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2286 self.cross_sect_dict 2287 message = message + \ 2288 '\n --------------------------------------------------------------' 2289 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 2290 if scale_pdf_info[0]: 2291 # scale uncertainties 2292 message = message + '\n Scale variation %s:' % computed 2293 for s in scale_pdf_info[0]: 2294 if s['unc']: 2295 if self.run_card['ickkw'] != -1: 2296 message = message + \ 2297 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 2298 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 2299 else: 2300 message = message + \ 2301 ('\n Soft and hard scale dependence (added in quadrature): '\ 2302 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 2303 2304 else: 2305 message = message + \ 2306 ('\n Dynamical_scale_choice %(label)i: '\ 2307 '\n %(cen)8.3e pb') % s 2308 2309 if scale_pdf_info[1]: 2310 message = message + '\n PDF variation %s:' % computed 2311 for p in scale_pdf_info[1]: 2312 if p['unc']=='none': 2313 message = message + \ 2314 ('\n %(name)s (central value only): '\ 2315 '\n %(cen)8.3e pb') % p 2316 2317 elif p['unc']=='unknown': 2318 message = message + \ 2319 ('\n %(name)s (%(size)s members; combination method unknown): '\ 2320 '\n %(cen)8.3e pb') % p 2321 else: 2322 message = message + \ 2323 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 2324 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 2325 # pdf uncertainties 2326 message = message + \ 2327 '\n --------------------------------------------------------------' 2328 2329 2330 if (mode in ['NLO', 'LO'] and not done) or \ 2331 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 2332 logger.info(message+'\n') 2333 return 2334 2335 # Some advanced general statistics are shown in the debug message at the 2336 # end of the run 2337 # Make sure it never stops a run 2338 # Gather some basic statistics for the run and extracted from the log files. 2339 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2340 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 2341 pjoin(self.me_dir, 'SubProcesses')) 2342 all_log_files = log_GV_files 2343 elif mode == 'NLO': 2344 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 2345 pjoin(self.me_dir, 'SubProcesses')) 2346 all_log_files = log_GV_files 2347 2348 elif mode == 'LO': 2349 log_GV_files = '' 2350 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 2351 pjoin(self.me_dir, 'SubProcesses')) 2352 else: 2353 raise aMCatNLOError, 'Running mode %s not supported.'%mode 2354 2355 try: 2356 message, debug_msg = \ 2357 self.compile_advanced_stats(log_GV_files, all_log_files, message) 2358 except Exception as e: 2359 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 2360 err_string = StringIO.StringIO() 2361 traceback.print_exc(limit=4, file=err_string) 2362 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 2363 %err_string.getvalue() 2364 2365 logger.debug(debug_msg+'\n') 2366 logger.info(message+'\n') 2367 2368 # Now copy relevant information in the Events/Run_<xxx> directory 2369 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 2370 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 2371 open(pjoin(evt_path, '.full_summary.txt'), 2372 'w').write(message+'\n\n'+debug_msg+'\n') 2373 2374 self.archive_files(evt_path,mode)
2375
2376 - def archive_files(self, evt_path, mode):
2377 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 2378 the run.""" 2379 2380 files_to_arxiv = [pjoin('Cards','param_card.dat'), 2381 pjoin('Cards','MadLoopParams.dat'), 2382 pjoin('Cards','FKS_params.dat'), 2383 pjoin('Cards','run_card.dat'), 2384 pjoin('Subprocesses','setscales.f'), 2385 pjoin('Subprocesses','cuts.f')] 2386 2387 if mode in ['NLO', 'LO']: 2388 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 2389 2390 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 2391 os.mkdir(pjoin(evt_path,'RunMaterial')) 2392 2393 for path in files_to_arxiv: 2394 if os.path.isfile(pjoin(self.me_dir,path)): 2395 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 2396 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 2397 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2398
2399 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
2400 """ This functions goes through the log files given in arguments and 2401 compiles statistics about MadLoop stability, virtual integration 2402 optimization and detection of potential error messages into a nice 2403 debug message to printed at the end of the run """ 2404 2405 def safe_float(str_float): 2406 try: 2407 return float(str_float) 2408 except ValueError: 2409 logger.debug('Could not convert the following float during'+ 2410 ' advanced statistics printout: %s'%str(str_float)) 2411 return -1.0
2412 2413 2414 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 2415 # > Errors is a list of tuples with this format (log_file,nErrors) 2416 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 2417 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 2418 2419 # ================================== 2420 # == MadLoop stability statistics == 2421 # ================================== 2422 2423 # Recuperate the fraction of unstable PS points found in the runs for 2424 # the virtuals 2425 UPS_stat_finder = re.compile( 2426 r"Satistics from MadLoop:.*"+\ 2427 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 2428 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 2429 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 2430 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 2431 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 2432 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 2433 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 2434 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 2435 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 2436 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 2437 2438 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 2439 1 : 'CutTools (double precision)', 2440 2 : 'PJFry++', 2441 3 : 'IREGI', 2442 4 : 'Golem95', 2443 5 : 'Samurai', 2444 6 : 'Ninja (double precision)', 2445 7 : 'COLLIER', 2446 8 : 'Ninja (quadruple precision)', 2447 9 : 'CutTools (quadruple precision)'} 2448 RetUnit_finder =re.compile( 2449 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 2450 #Unit 2451 2452 for gv_log in log_GV_files: 2453 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 2454 log=open(gv_log,'r').read() 2455 UPS_stats = re.search(UPS_stat_finder,log) 2456 for retunit_stats in re.finditer(RetUnit_finder, log): 2457 if channel_name not in stats['UPS'].keys(): 2458 stats['UPS'][channel_name] = [0]*10+[[0]*10] 2459 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 2460 += int(retunit_stats.group('n_occurences')) 2461 if not UPS_stats is None: 2462 try: 2463 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 2464 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 2465 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 2466 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 2467 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 2468 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 2469 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 2470 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 2471 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 2472 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 2473 except KeyError: 2474 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 2475 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 2476 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 2477 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 2478 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 2479 int(UPS_stats.group('n10')),[0]*10] 2480 debug_msg = "" 2481 if len(stats['UPS'].keys())>0: 2482 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 2483 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 2484 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 2485 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 2486 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 2487 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 2488 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 2489 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 2490 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 2491 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 2492 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 2493 for i in range(10)] 2494 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 2495 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 2496 maxUPS = max(UPSfracs, key = lambda w: w[1]) 2497 2498 tmpStr = "" 2499 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 2500 tmpStr += '\n Stability unknown: %d'%nTotsun 2501 tmpStr += '\n Stable PS point: %d'%nTotsps 2502 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 2503 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 2504 tmpStr += '\n Only double precision used: %d'%nTotddp 2505 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 2506 tmpStr += '\n Initialization phase-space points: %d'%nTotini 2507 tmpStr += '\n Reduction methods used:' 2508 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 2509 unit_code_meaning.keys() if nTot1[i]>0] 2510 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 2511 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 2512 if nTot100 != 0: 2513 debug_msg += '\n Unknown return code (100): %d'%nTot100 2514 if nTot10 != 0: 2515 debug_msg += '\n Unknown return code (10): %d'%nTot10 2516 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 2517 not in unit_code_meaning.keys()) 2518 if nUnknownUnit != 0: 2519 debug_msg += '\n Unknown return code (1): %d'\ 2520 %nUnknownUnit 2521 2522 if maxUPS[1]>0.001: 2523 message += tmpStr 2524 message += '\n Total number of unstable PS point detected:'+\ 2525 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 2526 message += '\n Maximum fraction of UPS points in '+\ 2527 'channel %s (%4.2f%%)'%maxUPS 2528 message += '\n Please report this to the authors while '+\ 2529 'providing the file' 2530 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 2531 maxUPS[0],'UPS.log')) 2532 else: 2533 debug_msg += tmpStr 2534 2535 2536 # ==================================================== 2537 # == aMC@NLO virtual integration optimization stats == 2538 # ==================================================== 2539 2540 virt_tricks_finder = re.compile( 2541 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 2542 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 2543 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 2544 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 2545 2546 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 2547 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 2548 2549 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 2550 2551 channel_contr_list = {} 2552 for gv_log in log_GV_files: 2553 logfile=open(gv_log,'r') 2554 log = logfile.read() 2555 logfile.close() 2556 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2557 vf_stats = None 2558 for vf_stats in re.finditer(virt_frac_finder, log): 2559 pass 2560 if not vf_stats is None: 2561 v_frac = safe_float(vf_stats.group('v_frac')) 2562 v_average = safe_float(vf_stats.group('v_average')) 2563 try: 2564 if v_frac < stats['virt_stats']['v_frac_min'][0]: 2565 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 2566 if v_frac > stats['virt_stats']['v_frac_max'][0]: 2567 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 2568 stats['virt_stats']['v_frac_avg'][0] += v_frac 2569 stats['virt_stats']['v_frac_avg'][1] += 1 2570 except KeyError: 2571 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 2572 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 2573 stats['virt_stats']['v_frac_avg']=[v_frac,1] 2574 2575 2576 ccontr_stats = None 2577 for ccontr_stats in re.finditer(channel_contr_finder, log): 2578 pass 2579 if not ccontr_stats is None: 2580 contrib = safe_float(ccontr_stats.group('v_contr')) 2581 try: 2582 if contrib>channel_contr_list[channel_name]: 2583 channel_contr_list[channel_name]=contrib 2584 except KeyError: 2585 channel_contr_list[channel_name]=contrib 2586 2587 2588 # Now build the list of relevant virt log files to look for the maxima 2589 # of virt fractions and such. 2590 average_contrib = 0.0 2591 for value in channel_contr_list.values(): 2592 average_contrib += value 2593 if len(channel_contr_list.values()) !=0: 2594 average_contrib = average_contrib / len(channel_contr_list.values()) 2595 2596 relevant_log_GV_files = [] 2597 excluded_channels = set([]) 2598 all_channels = set([]) 2599 for log_file in log_GV_files: 2600 channel_name = '/'.join(log_file.split('/')[-3:-1]) 2601 all_channels.add(channel_name) 2602 try: 2603 if channel_contr_list[channel_name] > (0.1*average_contrib): 2604 relevant_log_GV_files.append(log_file) 2605 else: 2606 excluded_channels.add(channel_name) 2607 except KeyError: 2608 relevant_log_GV_files.append(log_file) 2609 2610 # Now we want to use the latest occurence of accumulated result in the log file 2611 for gv_log in relevant_log_GV_files: 2612 logfile=open(gv_log,'r') 2613 log = logfile.read() 2614 logfile.close() 2615 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2616 2617 vt_stats = None 2618 for vt_stats in re.finditer(virt_tricks_finder, log): 2619 pass 2620 if not vt_stats is None: 2621 vt_stats_group = vt_stats.groupdict() 2622 v_ratio = safe_float(vt_stats.group('v_ratio')) 2623 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 2624 v_contr = safe_float(vt_stats.group('v_abs_contr')) 2625 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 2626 try: 2627 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 2628 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 2629 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 2630 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 2631 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 2632 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 2633 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 2634 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 2635 if v_contr < stats['virt_stats']['v_contr_min'][0]: 2636 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 2637 if v_contr > stats['virt_stats']['v_contr_max'][0]: 2638 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 2639 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 2640 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 2641 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 2642 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 2643 except KeyError: 2644 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 2645 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 2646 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 2647 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 2648 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 2649 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 2650 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 2651 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 2652 2653 vf_stats = None 2654 for vf_stats in re.finditer(virt_frac_finder, log): 2655 pass 2656 if not vf_stats is None: 2657 v_frac = safe_float(vf_stats.group('v_frac')) 2658 v_average = safe_float(vf_stats.group('v_average')) 2659 try: 2660 if v_average < stats['virt_stats']['v_average_min'][0]: 2661 stats['virt_stats']['v_average_min']=(v_average,channel_name) 2662 if v_average > stats['virt_stats']['v_average_max'][0]: 2663 stats['virt_stats']['v_average_max']=(v_average,channel_name) 2664 stats['virt_stats']['v_average_avg'][0] += v_average 2665 stats['virt_stats']['v_average_avg'][1] += 1 2666 except KeyError: 2667 stats['virt_stats']['v_average_min']=[v_average,channel_name] 2668 stats['virt_stats']['v_average_max']=[v_average,channel_name] 2669 stats['virt_stats']['v_average_avg']=[v_average,1] 2670 2671 try: 2672 debug_msg += '\n\n Statistics on virtual integration optimization : ' 2673 2674 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 2675 %tuple(stats['virt_stats']['v_frac_max']) 2676 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 2677 %tuple(stats['virt_stats']['v_frac_min']) 2678 debug_msg += '\n Average virt fraction computed %.3f'\ 2679 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 2680 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 2681 (len(excluded_channels),len(all_channels)) 2682 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 2683 %tuple(stats['virt_stats']['v_average_max']) 2684 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 2685 %tuple(stats['virt_stats']['v_ratio_max']) 2686 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 2687 %tuple(stats['virt_stats']['v_ratio_err_max']) 2688 debug_msg += tmpStr 2689 # After all it was decided that it is better not to alarm the user unecessarily 2690 # with such printout of the statistics. 2691 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 2692 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2693 # message += "\n Suspiciously large MC error in :" 2694 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 2695 # message += tmpStr 2696 2697 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 2698 %tuple(stats['virt_stats']['v_contr_err_max']) 2699 debug_msg += tmpStr 2700 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 2701 # message += tmpStr 2702 2703 2704 except KeyError: 2705 debug_msg += '\n Could not find statistics on the integration optimization. ' 2706 2707 # ======================================= 2708 # == aMC@NLO timing profile statistics == 2709 # ======================================= 2710 2711 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 2712 "(?P<time>[\d\+-Eed\.]*)\s*") 2713 2714 for logf in log_GV_files: 2715 logfile=open(logf,'r') 2716 log = logfile.read() 2717 logfile.close() 2718 channel_name = '/'.join(logf.split('/')[-3:-1]) 2719 mint = re.search(mint_search,logf) 2720 if not mint is None: 2721 channel_name = channel_name+' [step %s]'%mint.group('ID') 2722 2723 for time_stats in re.finditer(timing_stat_finder, log): 2724 try: 2725 stats['timings'][time_stats.group('name')][channel_name]+=\ 2726 safe_float(time_stats.group('time')) 2727 except KeyError: 2728 if time_stats.group('name') not in stats['timings'].keys(): 2729 stats['timings'][time_stats.group('name')] = {} 2730 stats['timings'][time_stats.group('name')][channel_name]=\ 2731 safe_float(time_stats.group('time')) 2732 2733 # useful inline function 2734 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 2735 try: 2736 totTimeList = [(time, chan) for chan, time in \ 2737 stats['timings']['Total'].items()] 2738 except KeyError: 2739 totTimeList = [] 2740 2741 totTimeList.sort() 2742 if len(totTimeList)>0: 2743 debug_msg += '\n\n Inclusive timing profile :' 2744 debug_msg += '\n Overall slowest channel %s (%s)'%\ 2745 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 2746 debug_msg += '\n Average channel running time %s'%\ 2747 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 2748 debug_msg += '\n Aggregated total running time %s'%\ 2749 Tstr(sum([el[0] for el in totTimeList])) 2750 else: 2751 debug_msg += '\n\n Inclusive timing profile non available.' 2752 2753 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 2754 sum(stats['timings'][stat].values()), reverse=True) 2755 for name in sorted_keys: 2756 if name=='Total': 2757 continue 2758 if sum(stats['timings'][name].values())<=0.0: 2759 debug_msg += '\n Zero time record for %s.'%name 2760 continue 2761 try: 2762 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 2763 chan) for chan, time in stats['timings'][name].items()] 2764 except KeyError, ZeroDivisionError: 2765 debug_msg += '\n\n Timing profile for %s unavailable.'%name 2766 continue 2767 TimeList.sort() 2768 debug_msg += '\n Timing profile for <%s> :'%name 2769 try: 2770 debug_msg += '\n Overall fraction of time %.3f %%'%\ 2771 safe_float((100.0*(sum(stats['timings'][name].values())/ 2772 sum(stats['timings']['Total'].values())))) 2773 except KeyError, ZeroDivisionError: 2774 debug_msg += '\n Overall fraction of time unavailable.' 2775 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 2776 (TimeList[-1][0],TimeList[-1][1]) 2777 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 2778 (TimeList[0][0],TimeList[0][1]) 2779 2780 # ============================= 2781 # == log file eror detection == 2782 # ============================= 2783 2784 # Find the number of potential errors found in all log files 2785 # This re is a simple match on a case-insensitve 'error' but there is 2786 # also some veto added for excluding the sentence 2787 # "See Section 6 of paper for error calculation." 2788 # which appear in the header of lhapdf in the logs. 2789 err_finder = re.compile(\ 2790 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 2791 for log in all_log_files: 2792 logfile=open(log,'r') 2793 nErrors = len(re.findall(err_finder, logfile.read())) 2794 logfile.close() 2795 if nErrors != 0: 2796 stats['Errors'].append((str(log),nErrors)) 2797 2798 nErrors = sum([err[1] for err in stats['Errors']],0) 2799 if nErrors != 0: 2800 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 2801 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 2802 'found in the following log file%s:'%('s' if \ 2803 len(stats['Errors'])>1 else '') 2804 for error in stats['Errors'][:3]: 2805 log_name = '/'.join(error[0].split('/')[-5:]) 2806 debug_msg += '\n > %d error%s in %s'%\ 2807 (error[1],'s' if error[1]>1 else '',log_name) 2808 if len(stats['Errors'])>3: 2809 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 2810 nRemainingLogs = len(stats['Errors'])-3 2811 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 2812 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 2813 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 2814 2815 return message, debug_msg 2816 2817
2818 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
2819 """this function calls the reweighting routines and creates the event file in the 2820 Event dir. Return the name of the event file created 2821 """ 2822 scale_pdf_info=[] 2823 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2824 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2825 scale_pdf_info = self.run_reweight(options['reweightonly']) 2826 self.update_status('Collecting events', level='parton', update_results=True) 2827 misc.compile(['collect_events'], 2828 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 2829 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 2830 stdin=subprocess.PIPE, 2831 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 2832 if event_norm.lower() == 'sum': 2833 p.communicate(input = '1\n') 2834 elif event_norm.lower() == 'unity': 2835 p.communicate(input = '3\n') 2836 else: 2837 p.communicate(input = '2\n') 2838 2839 #get filename from collect events 2840 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 2841 2842 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 2843 raise aMCatNLOError('An error occurred during event generation. ' + \ 2844 'The event file has not been created. Check collect_events.log') 2845 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2846 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 2847 if not options['reweightonly']: 2848 self.print_summary(options, 2, mode, scale_pdf_info) 2849 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 2850 for res_file in res_files: 2851 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2852 2853 logger.info('The %s file has been generated.\n' % (evt_file)) 2854 self.results.add_detail('nb_event', nevents) 2855 self.update_status('Events generated', level='parton', update_results=True) 2856 return evt_file[:-3]
2857 2858
2859 - def run_mcatnlo(self, evt_file, options):
2860 """runs mcatnlo on the generated event file, to produce showered-events 2861 """ 2862 logger.info('Preparing MCatNLO run') 2863 try: 2864 misc.gunzip(evt_file) 2865 except Exception: 2866 pass 2867 2868 self.banner = banner_mod.Banner(evt_file) 2869 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 2870 2871 #check that the number of split event files divides the number of 2872 # events, otherwise set it to 1 2873 if int(self.banner.get_detail('run_card', 'nevents') / \ 2874 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 2875 != self.banner.get_detail('run_card', 'nevents'): 2876 logger.warning(\ 2877 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 2878 'Setting it to 1.') 2879 self.shower_card['nsplit_jobs'] = 1 2880 2881 # don't split jobs if the user asks to shower only a part of the events 2882 if self.shower_card['nevents'] > 0 and \ 2883 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 2884 self.shower_card['nsplit_jobs'] != 1: 2885 logger.warning(\ 2886 'Only a part of the events will be showered.\n' + \ 2887 'Setting nsplit_jobs in the shower_card to 1.') 2888 self.shower_card['nsplit_jobs'] = 1 2889 2890 self.banner_to_mcatnlo(evt_file) 2891 2892 # if fastjet has to be linked (in extralibs) then 2893 # add lib /include dirs for fastjet if fastjet-config is present on the 2894 # system, otherwise add fjcore to the files to combine 2895 if 'fastjet' in self.shower_card['extralibs']: 2896 #first, check that stdc++ is also linked 2897 if not 'stdc++' in self.shower_card['extralibs']: 2898 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 2899 self.shower_card['extralibs'] += ' stdc++' 2900 # then check if options[fastjet] corresponds to a valid fj installation 2901 try: 2902 #this is for a complete fj installation 2903 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 2904 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 2905 output, error = p.communicate() 2906 #remove the line break from output (last character) 2907 output = output[:-1] 2908 # add lib/include paths 2909 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 2910 logger.warning('Linking FastJet: updating EXTRAPATHS') 2911 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 2912 if not pjoin(output, 'include') in self.shower_card['includepaths']: 2913 logger.warning('Linking FastJet: updating INCLUDEPATHS') 2914 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 2915 # to be changed in the fortran wrapper 2916 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 2917 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 2918 except Exception: 2919 logger.warning('Linking FastJet: using fjcore') 2920 # this is for FJcore, so no FJ library has to be linked 2921 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 2922 if not 'fjcore.o' in self.shower_card['analyse']: 2923 self.shower_card['analyse'] += ' fjcore.o' 2924 # to be changed in the fortran wrapper 2925 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 2926 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 2927 # change the fortran wrapper with the correct namespaces/include 2928 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 2929 for line in fjwrapper_lines: 2930 if '//INCLUDE_FJ' in line: 2931 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 2932 if '//NAMESPACE_FJ' in line: 2933 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 2934 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 2935 fsock.write('\n'.join(fjwrapper_lines) + '\n') 2936 2937 extrapaths = self.shower_card['extrapaths'].split() 2938 2939 # check that the path needed by HW++ and PY8 are set if one uses these shower 2940 if shower in ['HERWIGPP', 'PYTHIA8']: 2941 path_dict = {'HERWIGPP': ['hepmc_path', 2942 'thepeg_path', 2943 'hwpp_path'], 2944 'PYTHIA8': ['pythia8_path']} 2945 2946 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 2947 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 2948 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 2949 2950 if shower == 'HERWIGPP': 2951 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 2952 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 2953 2954 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 2955 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 2956 2957 if 'LD_LIBRARY_PATH' in os.environ.keys(): 2958 ldlibrarypath = os.environ['LD_LIBRARY_PATH'] 2959 else: 2960 ldlibrarypath = '' 2961 ldlibrarypath += ':' + ':'.join(extrapaths) 2962 os.putenv('LD_LIBRARY_PATH', ldlibrarypath) 2963 2964 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 2965 self.shower_card.write_card(shower, shower_card_path) 2966 2967 # overwrite if shower_card_set.dat exists in MCatNLO 2968 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 2969 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 2970 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 2971 2972 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 2973 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 2974 2975 2976 # libdl may be needded for pythia 82xx 2977 #if shower == 'PYTHIA8' and not \ 2978 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 2979 # 'dl' not in self.shower_card['extralibs'].split(): 2980 # # 'dl' has to be linked with the extralibs 2981 # self.shower_card['extralibs'] += ' dl' 2982 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 2983 # "It is needed for the correct running of PY8.2xx.\n" + \ 2984 # "If this library cannot be found on your system, a crash will occur.") 2985 2986 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 2987 stderr=open(mcatnlo_log, 'w'), 2988 cwd=pjoin(self.me_dir, 'MCatNLO'), 2989 close_fds=True) 2990 2991 exe = 'MCATNLO_%s_EXE' % shower 2992 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 2993 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 2994 print open(mcatnlo_log).read() 2995 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 2996 logger.info(' ... done') 2997 2998 # create an empty dir where to run 2999 count = 1 3000 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3001 (shower, count))): 3002 count += 1 3003 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3004 (shower, count)) 3005 os.mkdir(rundir) 3006 files.cp(shower_card_path, rundir) 3007 3008 #look for the event files (don't resplit if one asks for the 3009 # same number of event files as in the previous run) 3010 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3011 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3012 logger.info('Cleaning old files and splitting the event file...') 3013 #clean the old files 3014 files.rm([f for f in event_files if 'events.lhe' not in f]) 3015 if self.shower_card['nsplit_jobs'] > 1: 3016 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3017 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3018 stdin=subprocess.PIPE, 3019 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3020 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3021 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3022 logger.info('Splitting done.') 3023 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3024 3025 event_files.sort() 3026 3027 self.update_status('Showering events...', level='shower') 3028 logger.info('(Running in %s)' % rundir) 3029 if shower != 'PYTHIA8': 3030 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3031 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3032 else: 3033 # special treatment for pythia8 3034 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3035 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3036 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3037 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3038 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3039 else: # this is PY8.2xxx 3040 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3041 #link the hwpp exe in the rundir 3042 if shower == 'HERWIGPP': 3043 try: 3044 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3045 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3046 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3047 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3048 except Exception: 3049 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3050 3051 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3052 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3053 3054 files.ln(evt_file, rundir, 'events.lhe') 3055 for i, f in enumerate(event_files): 3056 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3057 3058 if not self.shower_card['analyse']: 3059 # an hep/hepmc file as output 3060 out_id = 'HEP' 3061 else: 3062 # one or more .top file(s) as output 3063 if "HwU" in self.shower_card['analyse']: 3064 out_id = 'HWU' 3065 else: 3066 out_id = 'TOP' 3067 3068 # write the executable 3069 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3070 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3071 % {'extralibs': ':'.join(extrapaths)}) 3072 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3073 3074 if event_files: 3075 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3076 for i in range(len(event_files))] 3077 else: 3078 arg_list = [[shower, out_id, self.run_name]] 3079 3080 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3081 self.njobs = 1 3082 self.wait_for_complete('shower') 3083 3084 # now collect the results 3085 message = '' 3086 warning = '' 3087 to_gzip = [evt_file] 3088 if out_id == 'HEP': 3089 #copy the showered stdhep/hepmc file back in events 3090 if shower in ['PYTHIA8', 'HERWIGPP']: 3091 hep_format = 'HEPMC' 3092 ext = 'hepmc' 3093 else: 3094 hep_format = 'StdHEP' 3095 ext = 'hep' 3096 3097 hep_file = '%s_%s_0.%s.gz' % \ 3098 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3099 count = 0 3100 3101 # find the first available name for the output: 3102 # check existing results with or without event splitting 3103 while os.path.exists(hep_file) or \ 3104 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3105 count +=1 3106 hep_file = '%s_%s_%d.%s.gz' % \ 3107 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3108 3109 try: 3110 if self.shower_card['nsplit_jobs'] == 1: 3111 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3112 message = ('The file %s has been generated. \nIt contains showered' + \ 3113 ' and hadronized events in the %s format obtained' + \ 3114 ' showering the parton-level event file %s.gz with %s') % \ 3115 (hep_file, hep_format, evt_file, shower) 3116 else: 3117 hep_list = [] 3118 for i in range(self.shower_card['nsplit_jobs']): 3119 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3120 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3121 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3122 ' and hadronized events in the %s format obtained' + \ 3123 ' showering the (split) parton-level event file %s.gz with %s') % \ 3124 ('\n '.join(hep_list), hep_format, evt_file, shower) 3125 3126 except OSError, IOError: 3127 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3128 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3129 3130 # run the plot creation in a secure way 3131 if hep_format == 'StdHEP': 3132 try: 3133 self.do_plot('%s -f' % self.run_name) 3134 except Exception, error: 3135 logger.info("Fail to make the plot. Continue...") 3136 pass 3137 3138 elif out_id == 'TOP' or out_id == 'HWU': 3139 #copy the topdrawer or HwU file(s) back in events 3140 if out_id=='TOP': 3141 ext='top' 3142 elif out_id=='HWU': 3143 ext='HwU' 3144 topfiles = [] 3145 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3146 for top_tar in top_tars: 3147 topfiles.extend(top_tar.getnames()) 3148 3149 # safety check 3150 if len(top_tars) != self.shower_card['nsplit_jobs']: 3151 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3152 (self.shower_card['nsplit_jobs'], len(top_tars))) 3153 3154 # find the first available name for the output: 3155 # check existing results with or without event splitting 3156 filename = 'plot_%s_%d_' % (shower, 1) 3157 count = 1 3158 while os.path.exists(pjoin(self.me_dir, 'Events', 3159 self.run_name, '%s0.%s' % (filename,ext))) or \ 3160 os.path.exists(pjoin(self.me_dir, 'Events', 3161 self.run_name, '%s0__1.%s' % (filename,ext))): 3162 count += 1 3163 filename = 'plot_%s_%d_' % (shower, count) 3164 3165 if out_id=='TOP': 3166 hist_format='TopDrawer format' 3167 elif out_id=='HWU': 3168 hist_format='HwU and GnuPlot formats' 3169 3170 if not topfiles: 3171 # if no topfiles are found just warn the user 3172 warning = 'No .top file has been generated. For the results of your ' +\ 3173 'run, please check inside %s' % rundir 3174 elif self.shower_card['nsplit_jobs'] == 1: 3175 # only one job for the shower 3176 top_tars[0].extractall(path = rundir) 3177 plotfiles = [] 3178 for i, file in enumerate(topfiles): 3179 if out_id=='TOP': 3180 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3181 '%s%d.top' % (filename, i)) 3182 files.mv(pjoin(rundir, file), plotfile) 3183 elif out_id=='HWU': 3184 out=pjoin(self.me_dir,'Events', 3185 self.run_name,'%s%d'% (filename,i)) 3186 histos=[{'dirname':pjoin(rundir,file)}] 3187 self.combine_plots_HwU(histos,out) 3188 try: 3189 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3190 stdout=os.open(os.devnull, os.O_RDWR),\ 3191 stderr=os.open(os.devnull, os.O_RDWR),\ 3192 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3193 except Exception: 3194 pass 3195 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3196 '%s%d.HwU'% (filename,i)) 3197 plotfiles.append(plotfile) 3198 3199 ffiles = 'files' 3200 have = 'have' 3201 if len(plotfiles) == 1: 3202 ffiles = 'file' 3203 have = 'has' 3204 3205 message = ('The %s %s %s been generated, with histograms in the' + \ 3206 ' %s, obtained by showering the parton-level' + \ 3207 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 3208 hist_format, evt_file, shower) 3209 else: 3210 # many jobs for the shower have been run 3211 topfiles_set = set(topfiles) 3212 plotfiles = [] 3213 for j, top_tar in enumerate(top_tars): 3214 top_tar.extractall(path = rundir) 3215 for i, file in enumerate(topfiles_set): 3216 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3217 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 3218 files.mv(pjoin(rundir, file), plotfile) 3219 plotfiles.append(plotfile) 3220 3221 # check if the user asked to combine the .top into a single file 3222 if self.shower_card['combine_td']: 3223 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 3224 3225 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 3226 norm = 1. 3227 elif self.banner.get('run_card', 'event_norm').lower() == 'average': 3228 norm = 1./float(self.shower_card['nsplit_jobs']) 3229 3230 plotfiles2 = [] 3231 for i, file in enumerate(topfiles_set): 3232 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 3233 for j in range(self.shower_card['nsplit_jobs'])] 3234 if out_id=='TOP': 3235 infile="%d\n%s\n%s\n" % \ 3236 (self.shower_card['nsplit_jobs'], 3237 '\n'.join(filelist), 3238 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 3239 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 3240 stdin=subprocess.PIPE, 3241 stdout=os.open(os.devnull, os.O_RDWR), 3242 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3243 p.communicate(input = infile) 3244 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 3245 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 3246 elif out_id=='HWU': 3247 out=pjoin(self.me_dir,'Events', 3248 self.run_name,'%s%d'% (filename,i)) 3249 histos=[] 3250 norms=[] 3251 for plotfile in plotfiles: 3252 histos.append({'dirname':plotfile}) 3253 norms.append(norm) 3254 self.combine_plots_HwU(histos,out,normalisation=norms) 3255 try: 3256 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 3257 stdout=os.open(os.devnull, os.O_RDWR),\ 3258 stderr=os.open(os.devnull, os.O_RDWR),\ 3259 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 3260 except Exception: 3261 pass 3262 3263 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 3264 tar = tarfile.open( 3265 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 3266 for f in filelist: 3267 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 3268 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 3269 3270 tar.close() 3271 3272 ffiles = 'files' 3273 have = 'have' 3274 if len(plotfiles2) == 1: 3275 ffiles = 'file' 3276 have = 'has' 3277 3278 message = ('The %s %s %s been generated, with histograms in the' + \ 3279 ' %s, obtained by showering the parton-level' + \ 3280 ' file %s.gz with %s.\n' + \ 3281 'The files from the different shower ' + \ 3282 'jobs (before combining them) can be found inside %s.') % \ 3283 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 3284 evt_file, shower, 3285 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 3286 3287 else: 3288 message = ('The following files have been generated:\n %s\n' + \ 3289 'They contain histograms in the' + \ 3290 ' %s, obtained by showering the parton-level' + \ 3291 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 3292 hist_format, evt_file, shower) 3293 3294 # Now arxiv the shower card used if RunMaterial is present 3295 run_dir_path = pjoin(rundir, self.run_name) 3296 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 3297 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 3298 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 3299 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 3300 %(shower, count))) 3301 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 3302 cwd=run_dir_path) 3303 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 3304 # end of the run, gzip files and print out the message/warning 3305 for f in to_gzip: 3306 misc.gzip(f) 3307 if message: 3308 logger.info(message) 3309 if warning: 3310 logger.warning(warning) 3311 3312 self.update_status('Run complete', level='shower', update_results=True)
3313 3314 ############################################################################
3315 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3316 """define the run name, the run_tag, the banner and the results.""" 3317 3318 # when are we force to change the tag new_run:previous run requiring changes 3319 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 3320 'shower': ['shower','delphes','madanalysis5_hadron'], 3321 'delphes':['delphes'], 3322 'madanalysis5_hadron':['madanalysis5_hadron'], 3323 'plot':[]} 3324 3325 if name == self.run_name: 3326 if reload_card: 3327 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3328 self.run_card = banner_mod.RunCardNLO(run_card) 3329 3330 #check if we need to change the tag 3331 if tag: 3332 self.run_card['run_tag'] = tag 3333 self.run_tag = tag 3334 self.results.add_run(self.run_name, self.run_card) 3335 else: 3336 for tag in upgrade_tag[level]: 3337 if getattr(self.results[self.run_name][-1], tag): 3338 tag = self.get_available_tag() 3339 self.run_card['run_tag'] = tag 3340 self.run_tag = tag 3341 self.results.add_run(self.run_name, self.run_card) 3342 break 3343 return # Nothing to do anymore 3344 3345 # save/clean previous run 3346 if self.run_name: 3347 self.store_result() 3348 # store new name 3349 self.run_name = name 3350 3351 # Read run_card 3352 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3353 self.run_card = banner_mod.RunCardNLO(run_card) 3354 3355 new_tag = False 3356 # First call for this run -> set the banner 3357 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 3358 if 'mgruncard' in self.banner: 3359 self.run_card = self.banner.charge_card('run_card') 3360 if tag: 3361 self.run_card['run_tag'] = tag 3362 new_tag = True 3363 elif not self.run_name in self.results and level =='parton': 3364 pass # No results yet, so current tag is fine 3365 elif not self.run_name in self.results: 3366 #This is only for case when you want to trick the interface 3367 logger.warning('Trying to run data on unknown run.') 3368 self.results.add_run(name, self.run_card) 3369 self.results.update('add run %s' % name, 'all', makehtml=True) 3370 else: 3371 for tag in upgrade_tag[level]: 3372 3373 if getattr(self.results[self.run_name][-1], tag): 3374 # LEVEL is already define in the last tag -> need to switch tag 3375 tag = self.get_available_tag() 3376 self.run_card['run_tag'] = tag 3377 new_tag = True 3378 break 3379 if not new_tag: 3380 # We can add the results to the current run 3381 tag = self.results[self.run_name][-1]['tag'] 3382 self.run_card['run_tag'] = tag # ensure that run_tag is correct 3383 3384 3385 if name in self.results and not new_tag: 3386 self.results.def_current(self.run_name) 3387 else: 3388 self.results.add_run(self.run_name, self.run_card) 3389 3390 self.run_tag = self.run_card['run_tag'] 3391 3392 # Return the tag of the previous run having the required data for this 3393 # tag/run to working wel. 3394 if level == 'parton': 3395 return 3396 elif level == 'pythia': 3397 return self.results[self.run_name][0]['tag'] 3398 else: 3399 for i in range(-1,-len(self.results[self.run_name])-1,-1): 3400 tagRun = self.results[self.run_name][i] 3401 if tagRun.pythia: 3402 return tagRun['tag']
3403 3404
3405 - def store_result(self):
3406 """ tar the pythia results. This is done when we are quite sure that 3407 the pythia output will not be use anymore """ 3408 3409 if not self.run_name: 3410 return 3411 3412 self.results.save() 3413 3414 if not self.to_store: 3415 return 3416 3417 if 'event' in self.to_store: 3418 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 3419 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 3420 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 3421 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 3422 else: 3423 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 3424 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 3425 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 3426 3427 3428 tag = self.run_card['run_tag'] 3429 3430 self.to_store = []
3431 3432
3433 - def get_init_dict(self, evt_file):
3434 """reads the info in the init block and returns them in a dictionary""" 3435 ev_file = open(evt_file) 3436 init = "" 3437 found = False 3438 while True: 3439 line = ev_file.readline() 3440 if "<init>" in line: 3441 found = True 3442 elif found and not line.startswith('#'): 3443 init += line 3444 if "</init>" in line or "<event>" in line: 3445 break 3446 ev_file.close() 3447 3448 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 3449 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 3450 # these are not included (so far) in the init_dict 3451 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 3452 3453 init_dict = {} 3454 init_dict['idbmup1'] = int(init.split()[0]) 3455 init_dict['idbmup2'] = int(init.split()[1]) 3456 init_dict['ebmup1'] = float(init.split()[2]) 3457 init_dict['ebmup2'] = float(init.split()[3]) 3458 init_dict['pdfgup1'] = int(init.split()[4]) 3459 init_dict['pdfgup2'] = int(init.split()[5]) 3460 init_dict['pdfsup1'] = int(init.split()[6]) 3461 init_dict['pdfsup2'] = int(init.split()[7]) 3462 init_dict['idwtup'] = int(init.split()[8]) 3463 init_dict['nprup'] = int(init.split()[9]) 3464 3465 return init_dict
3466 3467
3468 - def banner_to_mcatnlo(self, evt_file):
3469 """creates the mcatnlo input script using the values set in the header of the event_file. 3470 It also checks if the lhapdf library is used""" 3471 shower = self.banner.get('run_card', 'parton_shower').upper() 3472 pdlabel = self.banner.get('run_card', 'pdlabel') 3473 itry = 0 3474 nevents = self.shower_card['nevents'] 3475 init_dict = self.get_init_dict(evt_file) 3476 3477 if nevents < 0 or \ 3478 nevents > self.banner.get_detail('run_card', 'nevents'): 3479 nevents = self.banner.get_detail('run_card', 'nevents') 3480 3481 nevents = nevents / self.shower_card['nsplit_jobs'] 3482 3483 mcmass_dict = {} 3484 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 3485 pdg = int(line.split()[0]) 3486 mass = float(line.split()[1]) 3487 mcmass_dict[pdg] = mass 3488 3489 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 3490 content += 'NEVENTS=%d\n' % nevents 3491 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 3492 self.shower_card['nsplit_jobs']) 3493 content += 'MCMODE=%s\n' % shower 3494 content += 'PDLABEL=%s\n' % pdlabel 3495 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 3496 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 3497 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3498 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 3499 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 3500 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 3501 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 3502 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 3503 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 3504 try: 3505 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 3506 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 3507 except KeyError: 3508 content += 'HGGMASS=120.\n' 3509 content += 'HGGWIDTH=0.00575308848\n' 3510 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 3511 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 3512 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 3513 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 3514 content += 'DMASS=%s\n' % mcmass_dict[1] 3515 content += 'UMASS=%s\n' % mcmass_dict[2] 3516 content += 'SMASS=%s\n' % mcmass_dict[3] 3517 content += 'CMASS=%s\n' % mcmass_dict[4] 3518 content += 'BMASS=%s\n' % mcmass_dict[5] 3519 try: 3520 content += 'EMASS=%s\n' % mcmass_dict[11] 3521 content += 'MUMASS=%s\n' % mcmass_dict[13] 3522 content += 'TAUMASS=%s\n' % mcmass_dict[15] 3523 except KeyError: 3524 # this is for backward compatibility 3525 mcmass_lines = [l for l in \ 3526 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 3527 ).read().split('\n') if l] 3528 new_mcmass_dict = {} 3529 for l in mcmass_lines: 3530 key, val = l.split('=') 3531 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 3532 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 3533 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 3534 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 3535 3536 content += 'GMASS=%s\n' % mcmass_dict[21] 3537 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 3538 # check if need to link lhapdf 3539 if int(self.shower_card['pdfcode']) > 1 or \ 3540 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 3541 shower=='HERWIGPP' : 3542 # Use LHAPDF (should be correctly installed, because 3543 # either events were already generated with them, or the 3544 # user explicitly gives an LHAPDF number in the 3545 # shower_card). 3546 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3547 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3548 stdout = subprocess.PIPE).stdout.read().strip() 3549 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3550 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3551 if self.shower_card['pdfcode']==0: 3552 lhaid_list = '' 3553 content += '' 3554 elif self.shower_card['pdfcode']==1: 3555 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3556 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3557 else: 3558 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 3559 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 3560 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3561 elif int(self.shower_card['pdfcode'])==1: 3562 # Try to use LHAPDF because user wants to use the same PDF 3563 # as was used for the event generation. However, for the 3564 # event generation, LHAPDF was not used, so non-trivial to 3565 # see if if LHAPDF is available with the corresponding PDF 3566 # set. If not found, give a warning and use build-in PDF 3567 # set instead. 3568 try: 3569 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3570 stdout = subprocess.PIPE).stdout.read().strip() 3571 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3572 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3573 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3574 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3575 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3576 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3577 except Exception: 3578 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 3579 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 3580 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 3581 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 3582 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 3583 content += 'LHAPDFPATH=\n' 3584 content += 'PDFCODE=0\n' 3585 else: 3586 content += 'LHAPDFPATH=\n' 3587 content += 'PDFCODE=0\n' 3588 3589 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 3590 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 3591 # add the pythia8/hwpp path(s) 3592 if self.options['pythia8_path']: 3593 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 3594 if self.options['hwpp_path']: 3595 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 3596 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 3597 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 3598 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 3599 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 3600 3601 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 3602 output.write(content) 3603 output.close() 3604 return shower
3605 3606
3607 - def run_reweight(self, only):
3608 """runs the reweight_xsec_events executables on each sub-event file generated 3609 to compute on the fly scale and/or PDF uncertainities""" 3610 logger.info(' Doing reweight') 3611 3612 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 3613 # if only doing reweight, copy back the nevents_unweighted file 3614 if only: 3615 if os.path.exists(nev_unw + '.orig'): 3616 files.cp(nev_unw + '.orig', nev_unw) 3617 else: 3618 raise aMCatNLOError('Cannot find event file information') 3619 3620 #read the nevents_unweighted file to get the list of event files 3621 file = open(nev_unw) 3622 lines = file.read().split('\n') 3623 file.close() 3624 # make copy of the original nevent_unweighted file 3625 files.cp(nev_unw, nev_unw + '.orig') 3626 # loop over lines (all but the last one whith is empty) and check that the 3627 # number of events is not 0 3628 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 3629 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 3630 #prepare the job_dict 3631 job_dict = {} 3632 exe = 'reweight_xsec_events.local' 3633 for i, evt_file in enumerate(evt_files): 3634 path, evt = os.path.split(evt_file) 3635 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 3636 pjoin(self.me_dir, 'SubProcesses', path)) 3637 job_dict[path] = [exe] 3638 3639 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 3640 3641 #check that the new event files are complete 3642 for evt_file in evt_files: 3643 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 3644 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 3645 stdout = subprocess.PIPE).stdout.read().strip() 3646 if last_line != "</LesHouchesEvents>": 3647 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 3648 '\'reweight_xsec_events.output\' files inside the ' + \ 3649 '\'SubProcesses/P*/G*/ directories for details') 3650 3651 #update file name in nevents_unweighted 3652 newfile = open(nev_unw, 'w') 3653 for line in lines: 3654 if line: 3655 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 3656 newfile.close() 3657 3658 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
3659
3660 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
3661 """This function takes the files with the scale and pdf values 3662 written by the reweight_xsec_events.f code 3663 (P*/G*/pdf_scale_dependence.dat) and computes the overall 3664 scale and PDF uncertainty (the latter is computed using the 3665 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 3666 and returns it in percents. The expected format of the file 3667 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 3668 xsec_pdf0 xsec_pdf1 ....""" 3669 3670 scales=[] 3671 pdfs=[] 3672 for i,evt_file in enumerate(evt_files): 3673 path, evt=os.path.split(evt_file) 3674 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 3675 data_line=f.readline() 3676 if "scale variations:" in data_line: 3677 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 3678 data_line = f.readline().split() 3679 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 3680 try: 3681 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 3682 except IndexError: 3683 scales+=[scales_this] 3684 data_line=f.readline() 3685 if "pdf variations:" in data_line: 3686 for j,pdf in enumerate(self.run_card['lhaid']): 3687 data_line = f.readline().split() 3688 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 3689 try: 3690 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 3691 except IndexError: 3692 pdfs+=[pdfs_this] 3693 3694 # get the scale uncertainty in percent 3695 scale_info=[] 3696 for j,scale in enumerate(scales): 3697 s_cen=scale[0] 3698 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 3699 # max and min of the full envelope 3700 s_max=(max(scale)/s_cen-1)*100 3701 s_min=(1-min(scale)/s_cen)*100 3702 # ren and fac scale dependence added in quadrature 3703 ren_var=[] 3704 fac_var=[] 3705 for i in range(len(self.run_card['rw_rscale'])): 3706 ren_var.append(scale[i]-s_cen) # central fac scale 3707 for i in range(len(self.run_card['rw_fscale'])): 3708 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 3709 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 3710 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 3711 s_size=len(scale) 3712 else: 3713 s_max=0.0 3714 s_min=0.0 3715 s_max_q=0.0 3716 s_min_q=0.0 3717 s_size=len(scale) 3718 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 3719 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 3720 'label':self.run_card['dynamical_scale_choice'][j], \ 3721 'unc':self.run_card['reweight_scale'][j]}) 3722 3723 # check if we can use LHAPDF to compute the PDF uncertainty 3724 if any(self.run_card['reweight_pdf']): 3725 use_lhapdf=False 3726 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 3727 stdout=subprocess.PIPE).stdout.read().strip() 3728 3729 try: 3730 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 3731 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 3732 except OSError: 3733 candidates=[] 3734 for candidate in candidates: 3735 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 3736 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 3737 try: 3738 import lhapdf 3739 use_lhapdf=True 3740 break 3741 except ImportError: 3742 sys.path.pop(0) 3743 continue 3744 3745 if not use_lhapdf: 3746 try: 3747 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 3748 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 3749 except OSError: 3750 candidates=[] 3751 for candidate in candidates: 3752 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 3753 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 3754 try: 3755 import lhapdf 3756 use_lhapdf=True 3757 break 3758 except ImportError: 3759 sys.path.pop(0) 3760 continue 3761 3762 if not use_lhapdf: 3763 try: 3764 import lhapdf 3765 use_lhapdf=True 3766 except ImportError: 3767 logger.warning("Failed to access python version of LHAPDF: "\ 3768 "cannot compute PDF uncertainty from the "\ 3769 "weights in the events. The weights in the LHE " \ 3770 "event files will still cover all PDF set members, "\ 3771 "but there will be no PDF uncertainty printed in the run summary. \n "\ 3772 "If the python interface to LHAPDF is available on your system, try "\ 3773 "adding its location to the PYTHONPATH environment variable and the"\ 3774 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 3775 use_lhapdf=False 3776 3777 # turn off lhapdf printing any messages 3778 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 3779 3780 pdf_info=[] 3781 for j,pdfset in enumerate(pdfs): 3782 p_cen=pdfset[0] 3783 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 3784 if use_lhapdf: 3785 pdfsetname=self.run_card['lhapdfsetname'][j] 3786 try: 3787 p=lhapdf.getPDFSet(pdfsetname) 3788 ep=p.uncertainty(pdfset,-1) 3789 p_cen=ep.central 3790 p_min=abs(ep.errminus/p_cen)*100 3791 p_max=abs(ep.errplus/p_cen)*100 3792 p_type=p.errorType 3793 p_size=p.size 3794 p_conf=p.errorConfLevel 3795 except: 3796 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 3797 p_min=0.0 3798 p_max=0.0 3799 p_type='unknown' 3800 p_conf='unknown' 3801 p_size=len(pdfset) 3802 else: 3803 p_min=0.0 3804 p_max=0.0 3805 p_type='unknown' 3806 p_conf='unknown' 3807 p_size=len(pdfset) 3808 pdfsetname=self.run_card['lhaid'][j] 3809 else: 3810 p_min=0.0 3811 p_max=0.0 3812 p_type='none' 3813 p_conf='unknown' 3814 p_size=len(pdfset) 3815 pdfsetname=self.run_card['lhaid'][j] 3816 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 3817 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 3818 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 3819 3820 scale_pdf_info=[scale_info,pdf_info] 3821 return scale_pdf_info
3822 3823
3824 - def wait_for_complete(self, run_type):
3825 """this function waits for jobs on cluster to complete their run.""" 3826 starttime = time.time() 3827 #logger.info(' Waiting for submitted jobs to complete') 3828 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 3829 starttime=starttime, level='parton', update_results=True) 3830 try: 3831 self.cluster.wait(self.me_dir, update_status) 3832 except: 3833 self.cluster.remove() 3834 raise
3835
3836 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3837 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 3838 self.ijob = 0 3839 if run_type != 'shower': 3840 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 3841 for args in arg_list: 3842 for Pdir, jobs in job_dict.items(): 3843 for job in jobs: 3844 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 3845 if self.cluster_mode == 2: 3846 time.sleep(1) # security to allow all jobs to be launched 3847 else: 3848 self.njobs = len(arg_list) 3849 for args in arg_list: 3850 [(cwd, exe)] = job_dict.items() 3851 self.run_exe(exe, args, run_type, cwd) 3852 3853 self.wait_for_complete(run_type)
3854 3855 3856
3857 - def check_event_files(self,jobs):
3858 """check the integrity of the event files after splitting, and resubmit 3859 those which are not nicely terminated""" 3860 jobs_to_resubmit = [] 3861 for job in jobs: 3862 last_line = '' 3863 try: 3864 last_line = subprocess.Popen( 3865 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 3866 stdout = subprocess.PIPE).stdout.read().strip() 3867 except IOError: 3868 pass 3869 if last_line != "</LesHouchesEvents>": 3870 jobs_to_resubmit.append(job) 3871 self.njobs = 0 3872 if jobs_to_resubmit: 3873 run_type = 'Resubmitting broken jobs' 3874 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 3875 for job in jobs_to_resubmit: 3876 logger.debug('Resubmitting ' + job['dirname'] + '\n') 3877 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
3878 3879
3880 - def find_jobs_to_split(self, pdir, job, arg):
3881 """looks into the nevents_unweighed_splitted file to check how many 3882 split jobs are needed for this (pdir, job). arg is F, B or V""" 3883 # find the number of the integration channel 3884 splittings = [] 3885 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 3886 pattern = re.compile('for i in (\d+) ; do') 3887 match = re.search(pattern, ajob) 3888 channel = match.groups()[0] 3889 # then open the nevents_unweighted_splitted file and look for the 3890 # number of splittings to be done 3891 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 3892 # This skips the channels with zero events, because they are 3893 # not of the form GFXX_YY, but simply GFXX 3894 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 3895 pjoin(pdir, 'G%s%s' % (arg,channel))) 3896 matches = re.findall(pattern, nevents_file) 3897 for m in matches: 3898 splittings.append(m) 3899 return splittings
3900 3901
3902 - def run_exe(self, exe, args, run_type, cwd=None):
3903 """this basic function launch locally/on cluster exe with args as argument. 3904 """ 3905 3906 # first test that exe exists: 3907 execpath = None 3908 if cwd and os.path.exists(pjoin(cwd, exe)): 3909 execpath = pjoin(cwd, exe) 3910 elif not cwd and os.path.exists(exe): 3911 execpath = exe 3912 else: 3913 raise aMCatNLOError('Cannot find executable %s in %s' \ 3914 % (exe, os.getcwd())) 3915 # check that the executable has exec permissions 3916 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 3917 subprocess.call(['chmod', '+x', exe], cwd=cwd) 3918 # finally run it 3919 if self.cluster_mode == 0: 3920 #this is for the serial run 3921 misc.call(['./'+exe] + args, cwd=cwd) 3922 self.ijob += 1 3923 self.update_status((max([self.njobs - self.ijob - 1, 0]), 3924 min([1, self.njobs - self.ijob]), 3925 self.ijob, run_type), level='parton') 3926 3927 #this is for the cluster/multicore run 3928 elif 'reweight' in exe: 3929 # a reweight run 3930 # Find the correct PDF input file 3931 input_files, output_files = [], [] 3932 pdfinput = self.get_pdf_input_filename() 3933 if os.path.exists(pdfinput): 3934 input_files.append(pdfinput) 3935 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 3936 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 3937 input_files.append(args[0]) 3938 output_files.append('%s.rwgt' % os.path.basename(args[0])) 3939 output_files.append('reweight_xsec_events.output') 3940 output_files.append('scale_pdf_dependence.dat') 3941 3942 return self.cluster.submit2(exe, args, cwd=cwd, 3943 input_files=input_files, output_files=output_files, 3944 required_output=output_files) 3945 3946 elif 'ajob' in exe: 3947 # the 'standard' amcatnlo job 3948 # check if args is a list of string 3949 if type(args[0]) == str: 3950 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 3951 #submitting 3952 self.cluster.submit2(exe, args, cwd=cwd, 3953 input_files=input_files, output_files=output_files, 3954 required_output=required_output) 3955 3956 # # keep track of folders and arguments for splitted evt gen 3957 # subfolder=output_files[-1].split('/')[0] 3958 # if len(args) == 4 and '_' in subfolder: 3959 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 3960 3961 elif 'shower' in exe: 3962 # a shower job 3963 # args are [shower, output(HEP or TOP), run_name] 3964 # cwd is the shower rundir, where the executable are found 3965 input_files, output_files = [], [] 3966 shower = args[0] 3967 # the input files 3968 if shower == 'PYTHIA8': 3969 input_files.append(pjoin(cwd, 'Pythia8.exe')) 3970 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 3971 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3972 input_files.append(pjoin(cwd, 'config.sh')) 3973 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 3974 else: 3975 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 3976 else: 3977 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 3978 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 3979 if shower == 'HERWIGPP': 3980 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3981 input_files.append(pjoin(cwd, 'Herwig++')) 3982 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3983 input_files.append(pjoin(cwd, 'Herwig')) 3984 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 3985 if len(args) == 3: 3986 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 3987 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 3988 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 3989 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 3990 else: 3991 raise aMCatNLOError, 'Event file not present in %s' % \ 3992 pjoin(self.me_dir, 'Events', self.run_name) 3993 else: 3994 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 3995 # the output files 3996 if len(args) == 3: 3997 output_files.append('mcatnlo_run.log') 3998 else: 3999 output_files.append('mcatnlo_run_%s.log' % args[3]) 4000 if args[1] == 'HEP': 4001 if len(args) == 3: 4002 fname = 'events' 4003 else: 4004 fname = 'events_%s' % args[3] 4005 if shower in ['PYTHIA8', 'HERWIGPP']: 4006 output_files.append(fname + '.hepmc.gz') 4007 else: 4008 output_files.append(fname + '.hep.gz') 4009 elif args[1] == 'TOP' or args[1] == 'HWU': 4010 if len(args) == 3: 4011 fname = 'histfile' 4012 else: 4013 fname = 'histfile_%s' % args[3] 4014 output_files.append(fname + '.tar') 4015 else: 4016 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4017 #submitting 4018 self.cluster.submit2(exe, args, cwd=cwd, 4019 input_files=input_files, output_files=output_files) 4020 4021 else: 4022 return self.cluster.submit(exe, args, cwd=cwd)
4023
4024 - def getIO_ajob(self,exe,cwd, args):
4025 # use local disk if possible => need to stands what are the 4026 # input/output files 4027 4028 output_files = [] 4029 required_output = [] 4030 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4031 pjoin(cwd, 'symfact.dat'), 4032 pjoin(cwd, 'iproc.dat'), 4033 pjoin(cwd, 'initial_states_map.dat'), 4034 pjoin(cwd, 'configs_and_props_info.dat'), 4035 pjoin(cwd, 'leshouche_info.dat'), 4036 pjoin(cwd, 'FKS_params.dat')] 4037 4038 # For GoSam interface, we must copy the SLHA card as well 4039 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4040 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4041 4042 if os.path.exists(pjoin(cwd,'nevents.tar')): 4043 input_files.append(pjoin(cwd,'nevents.tar')) 4044 4045 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4046 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4047 4048 # File for the loop (might not be present if MadLoop is not used) 4049 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4050 cluster.need_transfer(self.options): 4051 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4052 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4053 cluster.need_transfer(self.options): 4054 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4055 dereference=True) 4056 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4057 tf.close() 4058 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4059 4060 if args[1] == 'born' or args[1] == 'all': 4061 # MADEVENT MINT FO MODE 4062 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4063 if args[2] == '0': 4064 current = '%s_G%s' % (args[1],args[0]) 4065 else: 4066 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4067 if os.path.exists(pjoin(cwd,current)): 4068 input_files.append(pjoin(cwd, current)) 4069 output_files.append(current) 4070 4071 required_output.append('%s/results.dat' % current) 4072 required_output.append('%s/res_%s.dat' % (current,args[3])) 4073 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4074 required_output.append('%s/mint_grids' % current) 4075 required_output.append('%s/grid.MC_integer' % current) 4076 if args[3] != '0': 4077 required_output.append('%s/scale_pdf_dependence.dat' % current) 4078 4079 elif args[1] == 'F' or args[1] == 'B': 4080 # MINTMC MODE 4081 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4082 4083 if args[2] == '0': 4084 current = 'G%s%s' % (args[1],args[0]) 4085 else: 4086 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4087 if os.path.exists(pjoin(cwd,current)): 4088 input_files.append(pjoin(cwd, current)) 4089 output_files.append(current) 4090 if args[2] > '0': 4091 # this is for the split event generation 4092 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4093 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4094 4095 else: 4096 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4097 if args[3] in ['0','1']: 4098 required_output.append('%s/results.dat' % current) 4099 if args[3] == '1': 4100 output_files.append('%s/results.dat' % current) 4101 4102 else: 4103 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4104 4105 #Find the correct PDF input file 4106 pdfinput = self.get_pdf_input_filename() 4107 if os.path.exists(pdfinput): 4108 input_files.append(pdfinput) 4109 return input_files, output_files, required_output, args
4110 4111
4112 - def compile(self, mode, options):
4113 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4114 specified in mode""" 4115 4116 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4117 4118 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4119 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4120 4121 self.get_characteristics(pjoin(self.me_dir, 4122 'SubProcesses', 'proc_characteristics')) 4123 4124 #define a bunch of log files 4125 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4126 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4127 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4128 test_log = pjoin(self.me_dir, 'test.log') 4129 4130 # environmental variables to be included in make_opts 4131 self.make_opts_var = {} 4132 if self.proc_characteristics['has_loops'] and \ 4133 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4134 self.make_opts_var['madloop'] = 'true' 4135 4136 self.update_status('Compiling the code', level=None, update_results=True) 4137 4138 libdir = pjoin(self.me_dir, 'lib') 4139 sourcedir = pjoin(self.me_dir, 'Source') 4140 4141 #clean files 4142 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4143 #define which executable/tests to compile 4144 if '+' in mode: 4145 mode = mode.split('+')[0] 4146 if mode in ['NLO', 'LO']: 4147 exe = 'madevent_mintFO' 4148 tests = ['test_ME'] 4149 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4150 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4151 exe = 'madevent_mintMC' 4152 tests = ['test_ME', 'test_MC'] 4153 # write an analyse_opts with a dummy analysis so that compilation goes through 4154 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 4155 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4156 4157 #directory where to compile exe 4158 p_dirs = [d for d in \ 4159 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4160 # create param_card.inc and run_card.inc 4161 self.do_treatcards('', amcatnlo=True) 4162 # if --nocompile option is specified, check here that all exes exists. 4163 # If they exists, return 4164 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4165 for p_dir in p_dirs]) and options['nocompile']: 4166 return 4167 4168 # rm links to lhapdflib/ PDFsets if exist 4169 if os.path.exists(pjoin(libdir, 'PDFsets')): 4170 files.rm(pjoin(libdir, 'PDFsets')) 4171 4172 # read the run_card to find if lhapdf is used or not 4173 if self.run_card['pdlabel'] == 'lhapdf' and \ 4174 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 4175 self.banner.get_detail('run_card', 'lpp2') != 0): 4176 4177 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 4178 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4179 lhaid_list = self.run_card['lhaid'] 4180 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4181 4182 else: 4183 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 4184 logger.info('Using built-in libraries for PDFs') 4185 if self.run_card['lpp1'] == 0 == self.run_card['lpp2']: 4186 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.') 4187 self.make_opts_var['lhapdf'] = "" 4188 4189 # read the run_card to find if applgrid is used or not 4190 if self.run_card['iappl'] != 0: 4191 self.make_opts_var['applgrid'] = 'True' 4192 # check versions of applgrid and amcfast 4193 for code in ['applgrid','amcfast']: 4194 try: 4195 p = subprocess.Popen([self.options[code], '--version'], \ 4196 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 4197 except OSError: 4198 raise aMCatNLOError(('No valid %s installation found. \n' + \ 4199 'Please set the path to %s-config by using \n' + \ 4200 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 4201 else: 4202 output, _ = p.communicate() 4203 if code is 'applgrid' and output < '1.4.63': 4204 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 4205 +' You are using %s',output) 4206 if code is 'amcfast' and output < '1.1.1': 4207 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 4208 +' You are using %s',output) 4209 4210 # set-up the Source/make_opts with the correct applgrid-config file 4211 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 4212 % (self.options['amcfast'],self.options['applgrid']) 4213 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 4214 text_out=[] 4215 for line in text: 4216 if line.strip().startswith('APPLLIBS=$'): 4217 line=appllibs 4218 text_out.append(line) 4219 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 4220 fsock.writelines(text_out) 4221 else: 4222 self.make_opts_var['applgrid'] = "" 4223 4224 if 'fastjet' in self.options.keys() and self.options['fastjet']: 4225 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 4226 4227 # add the make_opts_var to make_opts 4228 self.update_make_opts() 4229 4230 # make Source 4231 self.update_status('Compiling source...', level=None) 4232 misc.compile(['clean4pdf'], cwd = sourcedir) 4233 misc.compile(cwd = sourcedir) 4234 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 4235 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 4236 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 4237 and os.path.exists(pjoin(libdir, 'libpdf.a')): 4238 logger.info(' ...done, continuing with P* directories') 4239 else: 4240 raise aMCatNLOError('Compilation failed') 4241 4242 # make StdHep (only necessary with MG option output_dependencies='internal') 4243 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 4244 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 4245 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 4246 if os.path.exists(pjoin(sourcedir,'StdHEP')): 4247 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 4248 misc.compile(['StdHEP'], cwd = sourcedir) 4249 logger.info(' ...done.') 4250 else: 4251 raise aMCatNLOError('Could not compile StdHEP because its'+\ 4252 ' source directory could not be found in the SOURCE folder.\n'+\ 4253 " Check the MG5_aMC option 'output_dependencies.'") 4254 4255 # make CutTools (only necessary with MG option output_dependencies='internal') 4256 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4257 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4258 if os.path.exists(pjoin(sourcedir,'CutTools')): 4259 logger.info('Compiling CutTools (can take a couple of minutes) ...') 4260 misc.compile(['CutTools'], cwd = sourcedir) 4261 logger.info(' ...done.') 4262 else: 4263 raise aMCatNLOError('Could not compile CutTools because its'+\ 4264 ' source directory could not be found in the SOURCE folder.\n'+\ 4265 " Check the MG5_aMC option 'output_dependencies.'") 4266 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4267 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4268 raise aMCatNLOError('CutTools compilation failed.') 4269 4270 # Verify compatibility between current compiler and the one which was 4271 # used when last compiling CutTools (if specified). 4272 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4273 libdir, 'libcts.a')))),'compiler_version.log') 4274 if os.path.exists(compiler_log_path): 4275 compiler_version_used = open(compiler_log_path,'r').read() 4276 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4277 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4278 if os.path.exists(pjoin(sourcedir,'CutTools')): 4279 logger.info('CutTools was compiled with a different fortran'+\ 4280 ' compiler. Re-compiling it now...') 4281 misc.compile(['cleanCT'], cwd = sourcedir) 4282 misc.compile(['CutTools'], cwd = sourcedir) 4283 logger.info(' ...done.') 4284 else: 4285 raise aMCatNLOError("CutTools installation in %s"\ 4286 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 4287 " seems to have been compiled with a different compiler than"+\ 4288 " the one specified in MG5_aMC. Please recompile CutTools.") 4289 4290 # make IREGI (only necessary with MG option output_dependencies='internal') 4291 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 4292 and os.path.exists(pjoin(sourcedir,'IREGI')): 4293 logger.info('Compiling IREGI (can take a couple of minutes) ...') 4294 misc.compile(['IREGI'], cwd = sourcedir) 4295 logger.info(' ...done.') 4296 4297 if os.path.exists(pjoin(libdir, 'libiregi.a')): 4298 # Verify compatibility between current compiler and the one which was 4299 # used when last compiling IREGI (if specified). 4300 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4301 libdir, 'libiregi.a')))),'compiler_version.log') 4302 if os.path.exists(compiler_log_path): 4303 compiler_version_used = open(compiler_log_path,'r').read() 4304 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4305 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4306 if os.path.exists(pjoin(sourcedir,'IREGI')): 4307 logger.info('IREGI was compiled with a different fortran'+\ 4308 ' compiler. Re-compiling it now...') 4309 misc.compile(['cleanIR'], cwd = sourcedir) 4310 misc.compile(['IREGI'], cwd = sourcedir) 4311 logger.info(' ...done.') 4312 else: 4313 raise aMCatNLOError("IREGI installation in %s"\ 4314 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 4315 " seems to have been compiled with a different compiler than"+\ 4316 " the one specified in MG5_aMC. Please recompile IREGI.") 4317 4318 # check if MadLoop virtuals have been generated 4319 if self.proc_characteristics['has_loops'] and \ 4320 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4321 if mode in ['NLO', 'aMC@NLO', 'noshower']: 4322 tests.append('check_poles') 4323 4324 # make and run tests (if asked for), gensym and make madevent in each dir 4325 self.update_status('Compiling directories...', level=None) 4326 4327 for test in tests: 4328 self.write_test_input(test) 4329 4330 try: 4331 import multiprocessing 4332 if not self.nb_core: 4333 try: 4334 self.nb_core = int(self.options['nb_core']) 4335 except TypeError: 4336 self.nb_core = multiprocessing.cpu_count() 4337 except ImportError: 4338 self.nb_core = 1 4339 4340 compile_options = copy.copy(self.options) 4341 compile_options['nb_core'] = self.nb_core 4342 compile_cluster = cluster.MultiCore(**compile_options) 4343 logger.info('Compiling on %d cores' % self.nb_core) 4344 4345 update_status = lambda i, r, f: self.donothing(i,r,f) 4346 for p_dir in p_dirs: 4347 compile_cluster.submit(prog = compile_dir, 4348 argument = [self.me_dir, p_dir, mode, options, 4349 tests, exe, self.options['run_mode']]) 4350 try: 4351 compile_cluster.wait(self.me_dir, update_status) 4352 except Exception, error: 4353 logger.warning("Fail to compile the Subprocesses") 4354 if __debug__: 4355 raise 4356 compile_cluster.remove() 4357 self.do_quit('') 4358 4359 logger.info('Checking test output:') 4360 for p_dir in p_dirs: 4361 logger.info(p_dir) 4362 for test in tests: 4363 logger.info(' Result for %s:' % test) 4364 4365 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 4366 #check that none of the tests failed 4367 self.check_tests(test, this_dir)
4368 4369
4370 - def donothing(*args):
4371 pass
4372 4373
4374 - def check_tests(self, test, dir):
4375 """just call the correct parser for the test log. 4376 Skip check_poles for LOonly folders""" 4377 if test in ['test_ME', 'test_MC']: 4378 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 4379 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 4380 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4381 4382
4383 - def parse_test_mx_log(self, log):
4384 """read and parse the test_ME/MC.log file""" 4385 content = open(log).read() 4386 if 'FAILED' in content: 4387 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK') 4388 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 4389 'Please check that widths of final state particles (e.g. top) have been' + \ 4390 ' set to 0 in the param_card.dat.') 4391 else: 4392 lines = [l for l in content.split('\n') if 'PASSED' in l] 4393 logger.info(' Passed.') 4394 logger.debug('\n'+'\n'.join(lines))
4395 4396
4397 - def parse_check_poles_log(self, log):
4398 """reads and parse the check_poles.log file""" 4399 content = open(log).read() 4400 npass = 0 4401 nfail = 0 4402 for line in content.split('\n'): 4403 if 'PASSED' in line: 4404 npass +=1 4405 tolerance = float(line.split()[1]) 4406 if 'FAILED' in line: 4407 nfail +=1 4408 tolerance = float(line.split()[1]) 4409 4410 if nfail + npass == 0: 4411 logger.warning('0 points have been tried') 4412 return 4413 4414 if float(nfail)/float(nfail+npass) > 0.1: 4415 raise aMCatNLOError('Poles do not cancel, run cannot continue') 4416 else: 4417 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 4418 %(npass, nfail+npass, tolerance))
4419 4420
4421 - def write_test_input(self, test):
4422 """write the input files to run test_ME/MC or check_poles""" 4423 if test in ['test_ME', 'test_MC']: 4424 content = "-2 -2\n" #generate randomly energy/angle 4425 content+= "100 100\n" #run 100 points for soft and collinear tests 4426 content+= "0\n" #sum over helicities 4427 content+= "0\n" #all FKS configs 4428 content+= '\n'.join(["-1"] * 50) #random diagram 4429 elif test == 'check_poles': 4430 content = '20 \n -1\n' 4431 4432 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 4433 if test == 'test_MC': 4434 shower = self.run_card['parton_shower'] 4435 MC_header = "%s\n " % shower + \ 4436 "1 \n1 -0.1\n-1 -0.1\n" 4437 file.write(MC_header + content) 4438 else: 4439 file.write(content) 4440 file.close()
4441 4442 4443 4444 ############################################################################
4445 - def find_model_name(self):
4446 """ return the model name """ 4447 if hasattr(self, 'model_name'): 4448 return self.model_name 4449 4450 model = 'sm' 4451 proc = [] 4452 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')): 4453 line = line.split('#')[0] 4454 #line = line.split('=')[0] 4455 if line.startswith('import') and 'model' in line: 4456 model = line.split()[2] 4457 proc = [] 4458 elif line.startswith('generate'): 4459 proc.append(line.split(None,1)[1]) 4460 elif line.startswith('add process'): 4461 proc.append(line.split(None,2)[2]) 4462 4463 self.model = model 4464 self.process = proc 4465 return model
4466 4467 4468 4469 ############################################################################
4470 - def ask_run_configuration(self, mode, options, switch={}):
4471 """Ask the question when launching generate_events/multi_run""" 4472 4473 if 'parton' not in options: 4474 options['parton'] = False 4475 if 'reweightonly' not in options: 4476 options['reweightonly'] = False 4477 4478 4479 void = 'Not installed' 4480 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight','madanalysis5'] 4481 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void, 4482 'madspin': void,'reweight':'OFF','madanalysis5':void} 4483 if not switch: 4484 switch = switch_default 4485 else: 4486 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch)) 4487 default_switch = ['ON', 'OFF'] 4488 4489 4490 allowed_switch_value = {'order': ['LO', 'NLO'], 4491 'fixed_order': default_switch, 4492 'shower': default_switch, 4493 'madspin': default_switch, 4494 'reweight': default_switch, 4495 'madanalysis5':['OFF','HADRON']} 4496 4497 if not os.path.exists(pjoin(self.me_dir, 'Cards', 4498 'madanalysis5_hadron_card_default.dat')): 4499 allowed_switch_value['madanalysis5']=[] 4500 4501 description = {'order': 'Perturbative order of the calculation:', 4502 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):', 4503 'shower': 'Shower the generated events:', 4504 'madspin': 'Decay particles with the MadSpin module:', 4505 'reweight': 'Add weights to the events based on changing model parameters:', 4506 'madanalysis5':'Run MadAnalysis5 on the events generated:'} 4507 4508 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'}, 4509 ('madspin', 'ON'): {'fixed_order':'OFF'}, 4510 ('reweight', 'ON'): {'fixed_order':'OFF'}, 4511 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF','madanalysis5':'OFF'}, 4512 ('madanalysis5','HADRON'): {'shower': 'ON','fixed_order':'OFF'}, 4513 ('shower','OFF'): {'madanalysis5': 'OFF'}, 4514 } 4515 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] 4516 4517 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void ) 4518 4519 if self.proc_characteristics['ninitial'] == 1: 4520 switch['fixed_order'] = 'ON' 4521 switch['shower'] = 'Not available for decay' 4522 switch['madspin'] = 'Not available for decay' 4523 switch['reweight'] = 'Not available for decay' 4524 switch['madanalysis5'] = 'Not available for decay' 4525 allowed_switch_value['fixed_order'] = ['ON'] 4526 allowed_switch_value['shower'] = ['OFF'] 4527 allowed_switch_value['madspin'] = ['OFF'] 4528 allowed_switch_value['reweight'] = ['OFF'] 4529 allowed_switch_value['madanalysis5'] = ['OFF'] 4530 available_mode = ['0','1'] 4531 special_values = ['LO', 'NLO'] 4532 else: 4533 # Init the switch value according to the current status 4534 available_mode = ['0', '1', '2','3'] 4535 4536 if mode == 'auto': 4537 mode = None 4538 if not mode and (options['parton'] or options['reweightonly']): 4539 mode = 'noshower' 4540 4541 4542 if '3' in available_mode: 4543 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 4544 switch['shower'] = 'ON' 4545 else: 4546 switch['shower'] = 'OFF' 4547 if os.path.exists(pjoin(self.me_dir, 'Cards', 'madanalysis5_hadron_card_default.dat')): 4548 available_mode.append('6') 4549 if os.path.exists(pjoin(self.me_dir, 'Cards', 'madanalysis5_hadron_card.dat')): 4550 switch['madanalysis5'] = 'HADRON' 4551 else: 4552 switch['madanalysis5'] = 'OFF' 4553 4554 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode: 4555 available_mode.append('4') 4556 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4557 switch['madspin'] = 'ON' 4558 else: 4559 switch['madspin'] = 'OFF' 4560 if misc.has_f2py() or self.options['f2py_compiler']: 4561 available_mode.append('5') 4562 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 4563 switch['reweight'] = 'ON' 4564 else: 4565 switch['reweight'] = 'OFF' 4566 else: 4567 switch['reweight'] = 'Not available (requires NumPy)' 4568 4569 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode: 4570 if switch['reweight'] == "OFF": 4571 switch['reweight'] = "ON" 4572 elif switch['reweight'] != "ON": 4573 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight']) 4574 if 'do_madspin' in options and options['do_madspin']: 4575 if switch['madspin'] == "OFF": 4576 switch['madspin'] = 'ON' 4577 elif switch['madspin'] != "ON": 4578 logger.critical("Cannot run MadSpin module: %s" % switch['reweight']) 4579 4580 answers = list(available_mode) + ['auto', 'done'] 4581 alias = {} 4582 for id, key in enumerate(switch_order): 4583 if switch[key] != void and switch[key] in allowed_switch_value[key] and \ 4584 len(allowed_switch_value[key])>1: 4585 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]] 4586 #allow lower case for on/off 4587 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s)) 4588 for s in allowed_switch_value[key])) 4589 answers += special_values 4590 4591 def create_question(switch): 4592 switch_format = " %i %-61s %12s=%s\n" 4593 question = "The following switches determine which operations are executed:\n" 4594 for id, key in enumerate(switch_order): 4595 question += switch_format % (id+1, description[key], key, switch[key]) 4596 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1) 4597 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n' 4598 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n' 4599 return question
4600 4601 4602 def modify_switch(mode, answer, switch): 4603 if '=' in answer: 4604 key, status = answer.split('=') 4605 switch[key] = status 4606 if (key, status) in force_switch: 4607 for key2, status2 in force_switch[(key, status)].items(): 4608 if switch[key2] not in [status2, void]: 4609 logger.info('For coherence \'%s\' is set to \'%s\'' 4610 % (key2, status2), '$MG:color:BLACK') 4611 switch[key2] = status2 4612 elif answer in ['0', 'auto', 'done']: 4613 return 4614 elif answer in special_values: 4615 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK') 4616 #assign_switch('reweight', 'OFF') 4617 #assign_switch('madspin', 'OFF') 4618 if answer == 'LO': 4619 switch['order'] = 'LO' 4620 switch['fixed_order'] = 'ON' 4621 assign_switch('shower', 'OFF') 4622 elif answer == 'NLO': 4623 switch['order'] = 'NLO' 4624 switch['fixed_order'] = 'ON' 4625 assign_switch('shower', 'OFF') 4626 elif answer == 'aMC@NLO': 4627 switch['order'] = 'NLO' 4628 switch['fixed_order'] = 'OFF' 4629 assign_switch('shower', 'ON') 4630 elif answer == 'aMC@LO': 4631 switch['order'] = 'LO' 4632 switch['fixed_order'] = 'OFF' 4633 assign_switch('shower', 'ON') 4634 elif answer == 'noshower': 4635 switch['order'] = 'NLO' 4636 switch['fixed_order'] = 'OFF' 4637 assign_switch('shower', 'OFF') 4638 elif answer == 'noshowerLO': 4639 switch['order'] = 'LO' 4640 switch['fixed_order'] = 'OFF' 4641 assign_switch('shower', 'OFF') 4642 if mode: 4643 return 4644 return switch 4645 4646 modify_switch(mode, self.last_mode, switch) 4647 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4648 assign_switch('madspin', 'ON') 4649 4650 if not self.force: 4651 answer = '' 4652 while answer not in ['0', 'done', 'auto', 'onlyshower']: 4653 question = create_question(switch) 4654 if mode: 4655 answer = mode 4656 else: 4657 answer = self.ask(question, '0', answers, alias=alias) 4658 if answer.isdigit() and answer != '0': 4659 key = switch_order[int(answer) - 1] 4660 opt1 = allowed_switch_value[key][0] 4661 opt2 = allowed_switch_value[key][1] 4662 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2) 4663 4664 if not modify_switch(mode, answer, switch): 4665 break 4666 4667 #assign the mode depending of the switch 4668 if not mode or mode == 'auto': 4669 if switch['order'] == 'LO': 4670 if switch['shower'] == 'ON': 4671 mode = 'aMC@LO' 4672 elif switch['fixed_order'] == 'ON': 4673 mode = 'LO' 4674 else: 4675 mode = 'noshowerLO' 4676 elif switch['order'] == 'NLO': 4677 if switch['shower'] == 'ON': 4678 mode = 'aMC@NLO' 4679 elif switch['fixed_order'] == 'ON': 4680 mode = 'NLO' 4681 else: 4682 mode = 'noshower' 4683 logger.info('will run in mode: %s' % mode) 4684 4685 if mode == 'noshower': 4686 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 4687 Please, shower the Les Houches events before using them for physics analyses.""") 4688 4689 4690 # specify the cards which are needed for this run. 4691 cards = ['param_card.dat', 'run_card.dat'] 4692 ignore = [] 4693 if mode in ['LO', 'NLO']: 4694 options['parton'] = True 4695 ignore = ['shower_card.dat', 'madspin_card.dat'] 4696 cards.append('FO_analyse_card.dat') 4697 else: 4698 if switch['madspin'] == 'ON': 4699 cards.append('madspin_card.dat') 4700 if switch['reweight'] == 'ON': 4701 cards.append('reweight_card.dat') 4702 if switch['madanalysis5'] == 'HADRON': 4703 cards.append('madanalysis5_hadron_card.dat') 4704 if 'aMC@' in mode: 4705 cards.append('shower_card.dat') 4706 if mode == 'onlyshower': 4707 cards = ['shower_card.dat'] 4708 if options['reweightonly']: 4709 cards = ['run_card.dat'] 4710 4711 self.keep_cards(cards, ignore) 4712 4713 if mode =='onlyshower': 4714 cards = ['shower_card.dat'] 4715 4716 4717 # automatically switch to keep_wgt option 4718 first_cmd = [] # force to change some switch 4719 4720 if not options['force'] and not self.force: 4721 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 4722 4723 4724 self.banner = banner_mod.Banner() 4725 4726 # store the cards in the banner 4727 for card in cards: 4728 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 4729 # and the run settings 4730 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 4731 self.banner.add_text('run_settings', run_settings) 4732 4733 if not mode =='onlyshower': 4734 self.run_card = self.banner.charge_card('run_card') 4735 self.run_tag = self.run_card['run_tag'] 4736 #this is if the user did not provide a name for the current run 4737 if not hasattr(self, 'run_name') or not self.run_name: 4738 self.run_name = self.find_available_run_name(self.me_dir) 4739 #add a tag in the run_name for distinguish run_type 4740 if self.run_name.startswith('run_'): 4741 if mode in ['LO','aMC@LO','noshowerLO']: 4742 self.run_name += '_LO' 4743 self.set_run_name(self.run_name, self.run_tag, 'parton') 4744 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 4745 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 4746 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 4747 logger.warning("""You are running with FxFx merging enabled. To be able to merge 4748 samples of various multiplicities without double counting, you 4749 have to remove some events after showering 'by hand'. Please 4750 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 4751 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 4752 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 4753 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 4754 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 4755 "Type \'n\' to stop or \'y\' to continue" 4756 answers = ['n','y'] 4757 answer = self.ask(question, 'n', answers, alias=alias) 4758 if answer == 'n': 4759 error = '''Stop opertation''' 4760 self.ask_run_configuration(mode, options) 4761 # raise aMCatNLOError(error) 4762 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 4763 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 4764 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 4765 if 'aMC@' in mode or mode == 'onlyshower': 4766 self.shower_card = self.banner.charge_card('shower_card') 4767 4768 elif mode in ['LO', 'NLO']: 4769 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 4770 self.analyse_card = self.banner.charge_card('FO_analyse_card') 4771 4772 return mode 4773 4774 4775 #=============================================================================== 4776 # aMCatNLOCmd 4777 #===============================================================================
4778 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
4779 """The command line processor of MadGraph"""
4780 4781 _compile_usage = "compile [MODE] [options]\n" + \ 4782 "-- compiles aMC@NLO \n" + \ 4783 " MODE can be either FO, for fixed-order computations, \n" + \ 4784 " or MC for matching with parton-shower monte-carlos. \n" + \ 4785 " (if omitted, it is set to MC)\n" 4786 _compile_parser = misc.OptionParser(usage=_compile_usage) 4787 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 4788 help="Use the card present in the directory for the launch, without editing them") 4789 4790 _launch_usage = "launch [MODE] [options]\n" + \ 4791 "-- execute aMC@NLO \n" + \ 4792 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4793 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4794 " computation of the total cross section and the filling of parton-level histograms \n" + \ 4795 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4796 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4797 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4798 " in the run_card.dat\n" 4799 4800 _launch_parser = misc.OptionParser(usage=_launch_usage) 4801 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 4802 help="Use the card present in the directory for the launch, without editing them") 4803 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 4804 help="Submit the jobs on the cluster") 4805 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 4806 help="Submit the jobs on multicore mode") 4807 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4808 help="Skip compilation. Ignored if no executable is found") 4809 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4810 help="Skip integration and event generation, just run reweight on the" + \ 4811 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4812 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 4813 help="Stop the run after the parton level file generation (you need " + \ 4814 "to shower the file in order to get physical results)") 4815 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4816 help="Skip grid set up, just generate events starting from " + \ 4817 "the last available results") 4818 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 4819 help="Provide a name to the run") 4820 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4821 help="For use with APPLgrid only: start from existing grids") 4822 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 4823 help="Run the reweight module (reweighting by different model parameters)") 4824 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 4825 help="Run the madspin package") 4826 4827 4828 4829 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 4830 "-- execute aMC@NLO \n" + \ 4831 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 4832 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 4833 " computation of the total cross section and the filling of parton-level histograms \n" + \ 4834 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 4835 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 4836 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 4837 " in the run_card.dat\n" 4838 4839 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 4840 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 4841 help="Use the card present in the directory for the generate_events, without editing them") 4842 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 4843 help="Submit the jobs on the cluster") 4844 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 4845 help="Submit the jobs on multicore mode") 4846 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4847 help="Skip compilation. Ignored if no executable is found") 4848 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 4849 help="Skip integration and event generation, just run reweight on the" + \ 4850 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 4851 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 4852 help="Stop the run after the parton level file generation (you need " + \ 4853 "to shower the file in order to get physical results)") 4854 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4855 help="Skip grid set up, just generate events starting from " + \ 4856 "the last available results") 4857 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 4858 help="Provide a name to the run") 4859 4860 4861 4862 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 4863 "-- calculate cross section up to ORDER.\n" + \ 4864 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 4865 4866 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 4867 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 4868 help="Use the card present in the directory for the launch, without editing them") 4869 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 4870 help="Submit the jobs on the cluster") 4871 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 4872 help="Submit the jobs on multicore mode") 4873 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 4874 help="Skip compilation. Ignored if no executable is found") 4875 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 4876 help="Provide a name to the run") 4877 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 4878 help="For use with APPLgrid only: start from existing grids") 4879 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 4880 help="Skip grid set up, just generate events starting from " + \ 4881 "the last available results") 4882 4883 _shower_usage = 'shower run_name [options]\n' + \ 4884 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 4885 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 4886 ' are directly read from the header of the event file\n' 4887 _shower_parser = misc.OptionParser(usage=_shower_usage) 4888 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 4889 help="Use the shower_card present in the directory for the launch, without editing") 4890 4891 if '__main__' == __name__: 4892 # Launch the interface without any check if one code is already running. 4893 # This can ONLY run a single command !! 4894 import sys 4895 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 4896 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 4897 'Please upgrate your version of python.') 4898 4899 import os 4900 import optparse 4901 # Get the directory of the script real path (bin) 4902 # and add it to the current PYTHONPATH 4903 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 4904 sys.path.insert(0, root_path) 4905
4906 - class MyOptParser(optparse.OptionParser):
4907 - class InvalidOption(Exception): pass
4908 - def error(self, msg=''):
4909 raise MyOptParser.InvalidOption(msg)
4910 # Write out nice usage message if called with -h or --help 4911 usage = "usage: %prog [options] [FILE] " 4912 parser = MyOptParser(usage=usage) 4913 parser.add_option("-l", "--logging", default='INFO', 4914 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 4915 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 4916 help='force toce to be in secure mode') 4917 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 4918 help='force to launch debug mode') 4919 parser_error = '' 4920 done = False 4921 4922 for i in range(len(sys.argv)-1): 4923 try: 4924 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 4925 done = True 4926 except MyOptParser.InvalidOption, error: 4927 pass 4928 else: 4929 args += sys.argv[len(sys.argv)-i:] 4930 if not done: 4931 # raise correct error: 4932 try: 4933 (options, args) = parser.parse_args() 4934 except MyOptParser.InvalidOption, error: 4935 print error 4936 sys.exit(2) 4937 4938 if len(args) == 0: 4939 args = '' 4940 4941 import subprocess 4942 import logging 4943 import logging.config 4944 # Set logging level according to the logging level given by options 4945 #logging.basicConfig(level=vars(logging)[options.logging]) 4946 import internal.coloring_logging 4947 try: 4948 if __debug__ and options.logging == 'INFO': 4949 options.logging = 'DEBUG' 4950 if options.logging.isdigit(): 4951 level = int(options.logging) 4952 else: 4953 level = eval('logging.' + options.logging) 4954 print os.path.join(root_path, 'internal', 'me5_logging.conf') 4955 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 4956 logging.root.setLevel(level) 4957 logging.getLogger('madgraph').setLevel(level) 4958 except: 4959 raise 4960 pass 4961 4962 # Call the cmd interface main loop 4963 try: 4964 if args: 4965 # a single command is provided 4966 if '--web' in args: 4967 i = args.index('--web') 4968 args.pop(i) 4969 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 4970 else: 4971 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 4972 4973 if not hasattr(cmd_line, 'do_%s' % args[0]): 4974 if parser_error: 4975 print parser_error 4976 print 'and %s can not be interpreted as a valid command.' % args[0] 4977 else: 4978 print 'ERROR: %s not a valid command. Please retry' % args[0] 4979 else: 4980 cmd_line.use_rawinput = False 4981 cmd_line.run_cmd(' '.join(args)) 4982 cmd_line.run_cmd('quit') 4983 4984 except KeyboardInterrupt: 4985 print 'quit on KeyboardInterrupt' 4986 pass 4987