1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 import atexit
21 import glob
22 import logging
23 import math
24 import optparse
25 import os
26 import pydoc
27 import random
28 import re
29 import shutil
30 import subprocess
31 import sys
32 import traceback
33 import time
34 import signal
35 import tarfile
36 import copy
37 import datetime
38 import tarfile
39 import traceback
40 import StringIO
41 try:
42 import cpickle as pickle
43 except:
44 import pickle
45
46 try:
47 import readline
48 GNU_SPLITTING = ('GNU' in readline.__doc__)
49 except:
50 GNU_SPLITTING = True
51
52 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
53 root_path = os.path.split(root_path)[0]
54 sys.path.insert(0, os.path.join(root_path,'bin'))
55
56
57 pjoin = os.path.join
58
59 logger = logging.getLogger('madgraph.stdout')
60 logger_stderr = logging.getLogger('madgraph.stderr')
61
62 try:
63 import madgraph
64 except ImportError:
65 aMCatNLO = True
66 import internal.extended_cmd as cmd
67 import internal.common_run_interface as common_run
68 import internal.banner as banner_mod
69 import internal.misc as misc
70 from internal import InvalidCmd, MadGraph5Error
71 import internal.files as files
72 import internal.cluster as cluster
73 import internal.save_load_object as save_load_object
74 import internal.gen_crossxhtml as gen_crossxhtml
75 import internal.sum_html as sum_html
76 import internal.shower_card as shower_card
77 import internal.FO_analyse_card as analyse_card
78 import internal.lhe_parser as lhe_parser
79 else:
80
81 aMCatNLO = False
82 import madgraph.interface.extended_cmd as cmd
83 import madgraph.interface.common_run_interface as common_run
84 import madgraph.iolibs.files as files
85 import madgraph.iolibs.save_load_object as save_load_object
86 import madgraph.madevent.gen_crossxhtml as gen_crossxhtml
87 import madgraph.madevent.sum_html as sum_html
88 import madgraph.various.banner as banner_mod
89 import madgraph.various.cluster as cluster
90 import madgraph.various.misc as misc
91 import madgraph.various.shower_card as shower_card
92 import madgraph.various.FO_analyse_card as analyse_card
93 import madgraph.various.lhe_parser as lhe_parser
94 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR
95
98
99
101 """compile the direcory p_dir
102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
103 this function needs not to be a class method in order to do
104 the compilation on multicore"""
105
106 if len(arguments) == 1:
107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0]
108 elif len(arguments)==7:
109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
110 else:
111 raise aMCatNLOError, 'not correct number of argument'
112 logger.info(' Compiling %s...' % p_dir)
113
114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
115
116 try:
117
118
119 for test in tests:
120
121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')):
122 continue
123 if test == 'test_ME' or test == 'test_MC':
124 test_exe='test_soft_col_limits'
125 else:
126 test_exe=test
127 misc.compile([test_exe], cwd = this_dir, job_specs = False)
128 input = pjoin(me_dir, '%s_input.txt' % test)
129
130 misc.call(['./%s' % (test_exe)], cwd=this_dir,
131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'),
132 close_fds=True)
133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) :
134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz',
135 dereference=True)
136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources')
137 tf.close()
138
139 if not options['reweightonly']:
140 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
141 misc.call(['./gensym'],cwd= this_dir,
142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'),
143 close_fds=True)
144
145 misc.compile([exe], cwd=this_dir, job_specs = False)
146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
148
149 logger.info(' %s done.' % p_dir)
150 return 0
151 except MadGraph5Error, msg:
152 return msg
153
154
156 """check that the current fortran compiler is gfortran 4.6 or later.
157 If block, stops the execution, otherwise just print a warning"""
158
159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
161 'Note that You can still run all MadEvent run without any problem!'
162
163 if options['fortran_compiler']:
164 compiler = options['fortran_compiler']
165 elif misc.which('gfortran'):
166 compiler = 'gfortran'
167 else:
168 compiler = ''
169
170 if 'gfortran' not in compiler:
171 if block:
172 raise aMCatNLOError(msg % compiler)
173 else:
174 logger.warning(msg % compiler)
175 else:
176 curr_version = misc.get_gfortran_version(compiler)
177 if not ''.join(curr_version.split('.')) >= '46':
178 if block:
179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
180 else:
181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183
184
185
186
187
189 """Particularisation of the cmd command for aMCatNLO"""
190
191
192 next_possibility = {
193 'start': [],
194 }
195
196 debug_output = 'ME5_debug'
197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n'
198 error_debug += 'More information is found in \'%(debug)s\'.\n'
199 error_debug += 'Please attach this file to your report.'
200
201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n'
202
203
204 keyboard_stop_msg = """stopping all operation
205 in order to quit MadGraph5_aMC@NLO please enter exit"""
206
207
208 InvalidCmd = InvalidCmd
209 ConfigurationError = aMCatNLOError
210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation"""
213
214
215 self.force = False
216
217
218
219 info = misc.get_pkg_info()
220 info_line = ""
221 if info and info.has_key('version') and info.has_key('date'):
222 len_version = len(info['version'])
223 len_date = len(info['date'])
224 if len_version + len_date < 30:
225 info_line = "#* VERSION %s %s %s *\n" % \
226 (info['version'],
227 (30 - len_version - len_date) * ' ',
228 info['date'])
229 else:
230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
231 info_line = "#* VERSION %s %s *\n" % \
232 (version, (24 - len(version)) * ' ')
233
234
235
236 self.history_header = \
237 '#************************************************************\n' + \
238 '#* MadGraph5_aMC@NLO *\n' + \
239 '#* *\n' + \
240 "#* * * *\n" + \
241 "#* * * * * *\n" + \
242 "#* * * * * 5 * * * * *\n" + \
243 "#* * * * * *\n" + \
244 "#* * * *\n" + \
245 "#* *\n" + \
246 "#* *\n" + \
247 info_line + \
248 "#* *\n" + \
249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
251 "#* and *\n" + \
252 "#* http://amcatnlo.cern.ch *\n" + \
253 '#* *\n' + \
254 '#************************************************************\n' + \
255 '#* *\n' + \
256 '#* Command File for aMCatNLO *\n' + \
257 '#* *\n' + \
258 '#* run as ./bin/aMCatNLO.py filename *\n' + \
259 '#* *\n' + \
260 '#************************************************************\n'
261
262 if info_line:
263 info_line = info_line[1:]
264
265 logger.info(\
266 "************************************************************\n" + \
267 "* *\n" + \
268 "* W E L C O M E to M A D G R A P H 5 *\n" + \
269 "* a M C @ N L O *\n" + \
270 "* *\n" + \
271 "* * * *\n" + \
272 "* * * * * *\n" + \
273 "* * * * * 5 * * * * *\n" + \
274 "* * * * * *\n" + \
275 "* * * *\n" + \
276 "* *\n" + \
277 info_line + \
278 "* *\n" + \
279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
280 "* http://amcatnlo.cern.ch *\n" + \
281 "* *\n" + \
282 "* Type 'help' for in-line help. *\n" + \
283 "* *\n" + \
284 "************************************************************")
285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286
287
289 """return the history header"""
290 return self.history_header % misc.get_time_info()
291
293 """action to perform to close nicely on a keyboard interupt"""
294 try:
295 if hasattr(self, 'cluster'):
296 logger.info('rm jobs on queue')
297 self.cluster.remove()
298 if hasattr(self, 'results'):
299 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
300 self.add_error_log_in_html(KeyboardInterrupt)
301 except:
302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """
306
307
308 self.force = False
309
310 if not self.use_rawinput:
311 return stop
312
313
314 arg = line.split()
315 if len(arg) == 0:
316 return stop
317 elif str(arg[0]) in ['exit','quit','EOF']:
318 return stop
319
320 try:
321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
322 level=None, error=True)
323 except Exception:
324 misc.sprint('self.update_status fails', log=logger)
325 pass
326
332
338
344
345
346
347
348
349
351 """ The Series of help routine for the aMCatNLOCmd"""
352
356
358 logger.info("syntax: banner_run Path|RUN [--run_options]")
359 logger.info("-- Reproduce a run following a given banner")
360 logger.info(" One of the following argument is require:")
361 logger.info(" Path should be the path of a valid banner.")
362 logger.info(" RUN should be the name of a run of the current directory")
363 self.run_options_help([('-f','answer all question by default'),
364 ('--name=X', 'Define the name associated with the new run')])
365
366
370
375
376
380
384
385
387 logger.info("syntax: open FILE ")
388 logger.info("-- open a file with the appropriate editor.")
389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
390 logger.info(' the path to the last created/used directory is used')
391
393 if data:
394 logger.info('-- local options:')
395 for name, info in data:
396 logger.info(' %s : %s' % (name, info))
397
398 logger.info("-- session options:")
399 logger.info(" Note that those options will be kept for the current session")
400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
401 logger.info(" --multicore : Run in multi-core configuration")
402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
404
405
406
407
408
409
411 """ The Series of check routine for the aMCatNLOCmd"""
412
414 """Check the validity of the line. args[0] is the run_directory"""
415
416 if options['force']:
417 self.force = True
418
419 if len(args) == 0:
420 self.help_shower()
421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name'
422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
423 raise self.InvalidCmd, 'Directory %s does not exists' % \
424 pjoin(os.getcwd(), 'Events', args[0])
425
426 self.set_run_name(args[0], level= 'shower')
427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
430 """Check the argument for the plot command
431 plot run_name modes"""
432
433
434 madir = self.options['madanalysis_path']
435 td = self.options['td_path']
436
437 if not madir or not td:
438 logger.info('Retry to read configuration file to find madanalysis/td')
439 self.set_configuration()
440
441 madir = self.options['madanalysis_path']
442 td = self.options['td_path']
443
444 if not madir:
445 error_msg = 'No Madanalysis path correctly set.'
446 error_msg += 'Please use the set command to define the path and retry.'
447 error_msg += 'You can also define it in the configuration file.'
448 raise self.InvalidCmd(error_msg)
449 if not td:
450 error_msg = 'No path to td directory correctly set.'
451 error_msg += 'Please use the set command to define the path and retry.'
452 error_msg += 'You can also define it in the configuration file.'
453 raise self.InvalidCmd(error_msg)
454
455 if len(args) == 0:
456 if not hasattr(self, 'run_name') or not self.run_name:
457 self.help_plot()
458 raise self.InvalidCmd('No run name currently define. Please add this information.')
459 args.append('all')
460 return
461
462
463 if args[0] not in self._plot_mode:
464 self.set_run_name(args[0], level='plot')
465 del args[0]
466 if len(args) == 0:
467 args.append('all')
468 elif not self.run_name:
469 self.help_plot()
470 raise self.InvalidCmd('No run name currently define. Please add this information.')
471
472 for arg in args:
473 if arg not in self._plot_mode and arg != self.run_name:
474 self.help_plot()
475 raise self.InvalidCmd('unknown options %s' % arg)
476
478 """Check the argument for pythia command
479 syntax: pgs [NAME]
480 Note that other option are already remove at this point
481 """
482
483
484 if not self.options['pythia-pgs_path']:
485 logger.info('Retry to read configuration file to find pythia-pgs path')
486 self.set_configuration()
487
488 if not self.options['pythia-pgs_path'] or not \
489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
490 error_msg = 'No pythia-pgs path correctly set.'
491 error_msg += 'Please use the set command to define the path and retry.'
492 error_msg += 'You can also define it in the configuration file.'
493 raise self.InvalidCmd(error_msg)
494
495 tag = [a for a in arg if a.startswith('--tag=')]
496 if tag:
497 arg.remove(tag[0])
498 tag = tag[0][6:]
499
500
501 if len(arg) == 0 and not self.run_name:
502 if self.results.lastrun:
503 arg.insert(0, self.results.lastrun)
504 else:
505 raise self.InvalidCmd('No run name currently define. Please add this information.')
506
507 if len(arg) == 1 and self.run_name == arg[0]:
508 arg.pop(0)
509
510 if not len(arg) and \
511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
512 self.help_pgs()
513 raise self.InvalidCmd('''No file file pythia_events.hep currently available
514 Please specify a valid run_name''')
515
516 lock = None
517 if len(arg) == 1:
518 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
520
521 if not filenames:
522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
523 else:
524 input_file = filenames[0]
525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
527 argument=['-c', input_file],
528 close_fds=True)
529 else:
530 if tag:
531 self.run_card['run_tag'] = tag
532 self.set_run_name(self.run_name, tag, 'pgs')
533
534 return lock
535
536
538 """Check the argument for pythia command
539 syntax: delphes [NAME]
540 Note that other option are already remove at this point
541 """
542
543
544 if not self.options['delphes_path']:
545 logger.info('Retry to read configuration file to find delphes path')
546 self.set_configuration()
547
548 if not self.options['delphes_path']:
549 error_msg = 'No delphes path correctly set.'
550 error_msg += 'Please use the set command to define the path and retry.'
551 error_msg += 'You can also define it in the configuration file.'
552 raise self.InvalidCmd(error_msg)
553
554 tag = [a for a in arg if a.startswith('--tag=')]
555 if tag:
556 arg.remove(tag[0])
557 tag = tag[0][6:]
558
559
560 if len(arg) == 0 and not self.run_name:
561 if self.results.lastrun:
562 arg.insert(0, self.results.lastrun)
563 else:
564 raise self.InvalidCmd('No run name currently define. Please add this information.')
565
566 if len(arg) == 1 and self.run_name == arg[0]:
567 arg.pop(0)
568
569 if not len(arg) and \
570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
571 self.help_pgs()
572 raise self.InvalidCmd('''No file file pythia_events.hep currently available
573 Please specify a valid run_name''')
574
575 if len(arg) == 1:
576 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events'))
578
579
580 if not filenames:
581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
582 % (self.run_name, prev_tag,
583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
584 else:
585 input_file = filenames[0]
586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
588 argument=['-c', input_file],
589 close_fds=True)
590 else:
591 if tag:
592 self.run_card['run_tag'] = tag
593 self.set_run_name(self.run_name, tag, 'delphes')
594
596 """check the validity of the line. args is ORDER,
597 ORDER being LO or NLO. If no mode is passed, NLO is used"""
598
599
600
601 if options['force']:
602 self.force = True
603
604 if not args:
605 args.append('NLO')
606 return
607
608 if len(args) > 1:
609 self.help_calculate_xsect()
610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
611
612 elif len(args) == 1:
613 if not args[0] in ['NLO', 'LO']:
614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
615 mode = args[0]
616
617
618 if options['multicore'] and options['cluster']:
619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
620 ' are not compatible. Please choose one.'
621
622
624 """check the validity of the line. args is ORDER,
625 ORDER being LO or NLO. If no mode is passed, NLO is used"""
626
627
628
629 if not args:
630 args.append('NLO')
631 return
632
633 if len(args) > 1:
634 self.help_generate_events()
635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
636
637 elif len(args) == 1:
638 if not args[0] in ['NLO', 'LO']:
639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
640 mode = args[0]
641
642
643 if options['multicore'] and options['cluster']:
644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
645 ' are not compatible. Please choose one.'
646
648 """check the validity of line"""
649
650 if len(args) == 0:
651 self.help_banner_run()
652 raise self.InvalidCmd('banner_run requires at least one argument.')
653
654 tag = [a[6:] for a in args if a.startswith('--tag=')]
655
656
657 if os.path.exists(args[0]):
658 type ='banner'
659 format = self.detect_card_type(args[0])
660 if format != 'banner':
661 raise self.InvalidCmd('The file is not a valid banner.')
662 elif tag:
663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
664 (args[0], tag))
665 if not os.path.exists(args[0]):
666 raise self.InvalidCmd('No banner associates to this name and tag.')
667 else:
668 name = args[0]
669 type = 'run'
670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0]))
671 if not banners:
672 raise self.InvalidCmd('No banner associates to this name.')
673 elif len(banners) == 1:
674 args[0] = banners[0]
675 else:
676
677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
678 tag = self.ask('which tag do you want to use?', tags[0], tags)
679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
680 (args[0], tag))
681
682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
683 if run_name:
684 try:
685 self.exec_cmd('remove %s all banner -f' % run_name)
686 except Exception:
687 pass
688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
689 elif type == 'banner':
690 self.set_run_name(self.find_available_run_name(self.me_dir))
691 elif type == 'run':
692 if not self.results[name].is_empty():
693 run_name = self.find_available_run_name(self.me_dir)
694 logger.info('Run %s is not empty so will use run_name: %s' % \
695 (name, run_name))
696 self.set_run_name(run_name)
697 else:
698 try:
699 self.exec_cmd('remove %s all banner -f' % run_name)
700 except Exception:
701 pass
702 self.set_run_name(name)
703
704
705
707 """check the validity of the line. args is MODE
708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
709
710
711
712 if options['force']:
713 self.force = True
714
715
716 if not args:
717 args.append('auto')
718 return
719
720 if len(args) > 1:
721 self.help_launch()
722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
723
724 elif len(args) == 1:
725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]
727 mode = args[0]
728
729
730 if options['multicore'] and options['cluster']:
731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
732 ' are not compatible. Please choose one.'
733 if mode == 'NLO' and options['reweightonly']:
734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735
736
738 """check the validity of the line. args is MODE
739 MODE being FO or MC. If no mode is passed, MC is used"""
740
741
742
743 if options['force']:
744 self.force = True
745
746 if not args:
747 args.append('MC')
748 return
749
750 if len(args) > 1:
751 self.help_compile()
752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
753
754 elif len(args) == 1:
755 if not args[0] in ['MC', 'FO']:
756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0]
757 mode = args[0]
758
759
760
761
762
763
764
766 """ The Series of help routine for the MadGraphCmd"""
767
769 """auto-completion for launch command"""
770
771 args = self.split_arg(line[0:begidx])
772 if len(args) == 1:
773
774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
775 elif len(args) == 2 and line[begidx-1] == '@':
776 return self.list_completion(text,['LO','NLO'],line)
777 else:
778 opts = []
779 for opt in _launch_parser.option_list:
780 opts += opt._long_opts + opt._short_opts
781 return self.list_completion(text, opts, line)
782
784 "Complete the banner run command"
785 try:
786
787
788 args = self.split_arg(line[0:begidx], error=False)
789
790 if args[-1].endswith(os.path.sep):
791 return self.path_completion(text,
792 os.path.join('.',*[a for a in args \
793 if a.endswith(os.path.sep)]))
794
795
796 if len(args) > 1:
797
798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1]))
799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
800
801 if args[-1] != '--tag=':
802 tags = ['--tag=%s' % t for t in tags]
803 else:
804 return self.list_completion(text, tags)
805 return self.list_completion(text, tags +['--name=','-f'], line)
806
807
808 possibilites = {}
809
810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
811 if a.endswith(os.path.sep)]))
812 if os.path.sep in line:
813 return comp
814 else:
815 possibilites['Path from ./'] = comp
816
817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events'))
818 run_list = [n.rsplit('/',2)[1] for n in run_list]
819 possibilites['RUN Name'] = self.list_completion(text, run_list)
820
821 return self.deal_multiple_categories(possibilites, formatting)
822
823
824 except Exception, error:
825 print error
826
827
840
853
855 """auto-completion for generate_events command
856 call the compeltion for launch"""
857 self.complete_launch(text, line, begidx, endidx)
858
859
869
885
887 "Complete the pgs command"
888 args = self.split_arg(line[0:begidx], error=False)
889 if len(args) == 1:
890
891 data = misc.glob(pjoin('*', 'events_*.hep.gz'),
892 pjoin(self.me_dir, 'Events'))
893 data = [n.rsplit('/',2)[1] for n in data]
894 tmp1 = self.list_completion(text, data)
895 if not self.run_name:
896 return tmp1
897 else:
898 tmp2 = self.list_completion(text, self._run_options + ['-f',
899 '--tag=' ,'--no_default'], line)
900 return tmp1 + tmp2
901 else:
902 return self.list_completion(text, self._run_options + ['-f',
903 '--tag=','--no_default'], line)
904
905 complete_delphes = complete_pgs
906
909
910
911
912
913 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
914 """The command line processor of MadGraph"""
915
916
917 true = ['T','.true.',True,'true']
918
919 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
920 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
921 _calculate_decay_options = ['-f', '--accuracy=0.']
922 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
923 _plot_mode = ['all', 'parton','shower','pgs','delphes']
924 _clean_mode = _plot_mode + ['channel', 'banner']
925 _display_opts = ['run_name', 'options', 'variable']
926
927
928 web = False
929 cluster_mode = 0
930 queue = 'madgraph'
931 nb_core = None
932 make_opts_var = {}
933
934 next_possibility = {
935 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
936 'help generate_events'],
937 'generate_events': ['generate_events [OPTIONS]', 'shower'],
938 'launch': ['launch [OPTIONS]', 'shower'],
939 'shower' : ['generate_events [OPTIONS]']
940 }
941
942
943
944 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
945 """ add information to the cmd """
946
947 self.start_time = 0
948 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
949
950
951 self.mode = 'aMCatNLO'
952 self.nb_core = 0
953 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
954
955
956 self.load_results_db()
957 self.results.def_web_mode(self.web)
958
959 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
960
961 if not '[real=QCD]' in proc_card:
962 check_compiler(self.options, block=True)
963
964
965
967 """ run the shower on a given parton level file """
968 argss = self.split_arg(line)
969 (options, argss) = _launch_parser.parse_args(argss)
970
971 options = options.__dict__
972 options['reweightonly'] = False
973 self.check_shower(argss, options)
974 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
975 self.ask_run_configuration('onlyshower', options)
976 self.run_mcatnlo(evt_file, options)
977
978 self.update_status('', level='all', update_results=True)
979
980
982 """Create the plot for a given run"""
983
984
985 args = self.split_arg(line)
986
987 self.check_plot(args)
988 logger.info('plot for run %s' % self.run_name)
989
990 if not self.force:
991 self.ask_edit_cards([], args, plot=True)
992
993 if any([arg in ['parton'] for arg in args]):
994 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
995 if os.path.exists(filename+'.gz'):
996 misc.gunzip(filename)
997 if os.path.exists(filename):
998 logger.info('Found events.lhe file for run %s' % self.run_name)
999 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
1000 self.create_plot('parton')
1001 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
1002 misc.gzip(filename)
1003
1004 if any([arg in ['all','parton'] for arg in args]):
1005 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
1006 if os.path.exists(filename):
1007 logger.info('Found MADatNLO.top file for run %s' % \
1008 self.run_name)
1009 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
1010 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
1011
1012 if not os.path.isdir(plot_dir):
1013 os.makedirs(plot_dir)
1014 top_file = pjoin(plot_dir, 'plots.top')
1015 files.cp(filename, top_file)
1016 madir = self.options['madanalysis_path']
1017 tag = self.run_card['run_tag']
1018 td = self.options['td_path']
1019 misc.call(['%s/plot' % self.dirbin, madir, td],
1020 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1021 stderr = subprocess.STDOUT,
1022 cwd=plot_dir)
1023
1024 misc.call(['%s/plot_page-pl' % self.dirbin,
1025 os.path.basename(plot_dir),
1026 'parton'],
1027 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1028 stderr = subprocess.STDOUT,
1029 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1030 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1031 output)
1032
1033 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1034
1035 if any([arg in ['all','shower'] for arg in args]):
1036 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name))
1037 if len(filenames) != 1:
1038 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
1039 if len(filenames) != 1:
1040 logger.info('No shower level file found for run %s' % \
1041 self.run_name)
1042 return
1043 filename = filenames[0]
1044 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1045
1046 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1047 if aMCatNLO and not self.options['mg5_path']:
1048 raise "plotting NLO HEP file needs MG5 utilities"
1049
1050 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1051 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1052 self.run_hep2lhe()
1053 else:
1054 filename = filenames[0]
1055 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1056
1057 self.create_plot('shower')
1058 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1059 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1060 lhe_file_name)
1061 misc.gzip(lhe_file_name)
1062
1063 if any([arg in ['all','pgs'] for arg in args]):
1064 filename = pjoin(self.me_dir, 'Events', self.run_name,
1065 '%s_pgs_events.lhco' % self.run_tag)
1066 if os.path.exists(filename+'.gz'):
1067 misc.gunzip(filename)
1068 if os.path.exists(filename):
1069 self.create_plot('PGS')
1070 misc.gzip(filename)
1071 else:
1072 logger.info('No valid files for pgs plot')
1073
1074 if any([arg in ['all','delphes'] for arg in args]):
1075 filename = pjoin(self.me_dir, 'Events', self.run_name,
1076 '%s_delphes_events.lhco' % self.run_tag)
1077 if os.path.exists(filename+'.gz'):
1078 misc.gunzip(filename)
1079 if os.path.exists(filename):
1080
1081 self.create_plot('Delphes')
1082
1083 misc.gzip(filename)
1084 else:
1085 logger.info('No valid files for delphes plot')
1086
1087
1088
1090 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1091 this function wraps the do_launch one"""
1092
1093 self.start_time = time.time()
1094 argss = self.split_arg(line)
1095
1096 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1097 options = options.__dict__
1098 options['reweightonly'] = False
1099 options['parton'] = True
1100 self.check_calculate_xsect(argss, options)
1101 self.do_launch(line, options, argss)
1102
1103
1105 """Make a run from the banner file"""
1106
1107 args = self.split_arg(line)
1108
1109 self.check_banner_run(args)
1110
1111
1112 for name in ['shower_card.dat', 'madspin_card.dat']:
1113 try:
1114 os.remove(pjoin(self.me_dir, 'Cards', name))
1115 except Exception:
1116 pass
1117
1118 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1119
1120
1121 if not self.force:
1122 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1123 if ans == 'n':
1124 self.force = True
1125
1126
1127 if self.force:
1128 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1129 banner = banner_mod.Banner(args[0])
1130 for line in banner['run_settings']:
1131 if '=' in line:
1132 mode, value = [t.strip() for t in line.split('=')]
1133 mode_status[mode] = value
1134 else:
1135 mode_status = {}
1136
1137
1138 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1139 switch=mode_status)
1140
1141
1143 """Main commands: generate events
1144 this function just wraps the do_launch one"""
1145 self.do_launch(line)
1146
1147
1148
1150 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1151
1152 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1153
1154
1155
1156
1157
1158 if mode in ['LO', 'NLO']:
1159 name = 'fo_lhe_weight_ratio'
1160 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat'))
1161 if name in FO_card:
1162 self.run_card.set(name, FO_card[name], user=False)
1163 name = 'fo_lhe_postprocessing'
1164 if name in FO_card:
1165 self.run_card.set(name, FO_card[name], user=False)
1166
1167 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1168
1169
1171 """assign all configuration variable from file
1172 loop over the different config file if config_file not define """
1173 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1174
1175
1176 - def do_launch(self, line, options={}, argss=[], switch={}):
1177 """Main commands: launch the full chain
1178 options and args are relevant if the function is called from other
1179 functions, such as generate_events or calculate_xsect
1180 mode gives the list of switch needed for the computation (usefull for banner_run)
1181 """
1182
1183 if not argss and not options:
1184 self.start_time = time.time()
1185 argss = self.split_arg(line)
1186
1187 (options, argss) = _launch_parser.parse_args(argss)
1188 options = options.__dict__
1189 self.check_launch(argss, options)
1190
1191
1192 if 'run_name' in options.keys() and options['run_name']:
1193 self.run_name = options['run_name']
1194
1195
1196 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1197 logger.warning('Removing old run information in \n'+
1198 pjoin(self.me_dir, 'Events', self.run_name))
1199 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1200 self.results.delete_run(self.run_name)
1201 else:
1202 self.run_name = ''
1203
1204 if options['multicore']:
1205 self.cluster_mode = 2
1206 elif options['cluster']:
1207 self.cluster_mode = 1
1208
1209 if not switch:
1210 mode = argss[0]
1211
1212 if mode in ['LO', 'NLO']:
1213 options['parton'] = True
1214 mode = self.ask_run_configuration(mode, options)
1215 else:
1216 mode = self.ask_run_configuration('auto', options, switch)
1217
1218 self.results.add_detail('run_mode', mode)
1219
1220 self.update_status('Starting run', level=None, update_results=True)
1221
1222 if self.options['automatic_html_opening']:
1223 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1224 self.options['automatic_html_opening'] = False
1225
1226 if '+' in mode:
1227 mode = mode.split('+')[0]
1228 self.compile(mode, options)
1229 evt_file = self.run(mode, options)
1230
1231 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']:
1232 logger.info('No event file generated: grids have been set-up with a '\
1233 'relative precision of %s' % self.run_card['req_acc'])
1234 return
1235
1236 if not mode in ['LO', 'NLO']:
1237 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1238
1239 if self.run_card['systematics_program'] == 'systematics':
1240 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments'])))
1241
1242 self.exec_cmd('reweight -from_cards', postcmd=False)
1243 self.exec_cmd('decay_events -from_cards', postcmd=False)
1244 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1245
1246 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1247 and not options['parton']:
1248 self.run_mcatnlo(evt_file, options)
1249 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False)
1250
1251 elif mode == 'noshower':
1252 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1253 Please, shower the Les Houches events before using them for physics analyses.""")
1254
1255
1256 self.update_status('', level='all', update_results=True)
1257 if self.run_card['ickkw'] == 3 and \
1258 (mode in ['noshower'] or \
1259 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))):
1260 logger.warning("""You are running with FxFx merging enabled.
1261 To be able to merge samples of various multiplicities without double counting,
1262 you have to remove some events after showering 'by hand'.
1263 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1264
1265 self.store_result()
1266
1267 if self.param_card_iterator:
1268 param_card_iterator = self.param_card_iterator
1269 self.param_card_iterator = []
1270 param_card_iterator.store_entry(self.run_name, self.results.current['cross'],
1271 error=self.results.current['error'])
1272 orig_name = self.run_name
1273
1274 with misc.TMP_variable(self, 'allow_notification_center', False):
1275 for i,card in enumerate(param_card_iterator):
1276 card.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1277 self.check_param_card(pjoin(self.me_dir,'Cards','param_card.dat'), dependent=True)
1278 if not options['force']:
1279 options['force'] = True
1280 if options['run_name']:
1281 options['run_name'] = '%s_%s' % (orig_name, i+1)
1282 if not argss:
1283 argss = [mode, "-f"]
1284 elif argss[0] == "auto":
1285 argss[0] = mode
1286 self.do_launch("", options=options, argss=argss, switch=switch)
1287
1288 param_card_iterator.store_entry(self.run_name, self.results.current['cross'],
1289 error=self.results.current['error'])
1290
1291 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1292 name = misc.get_scan_name(orig_name, self.run_name)
1293 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name)
1294 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK')
1295 param_card_iterator.write_summary(path)
1296
1297 if self.allow_notification_center:
1298 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir),
1299 '%s: %s +- %s ' % (self.results.current['run_name'],
1300 self.results.current['cross'],
1301 self.results.current['error']))
1302
1303
1304
1306 """Advanced commands: just compile the executables """
1307 argss = self.split_arg(line)
1308
1309 (options, argss) = _compile_parser.parse_args(argss)
1310 options = options.__dict__
1311 options['reweightonly'] = False
1312 options['nocompile'] = False
1313 self.check_compile(argss, options)
1314
1315 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1316 self.ask_run_configuration(mode, options)
1317 self.compile(mode, options)
1318
1319
1320 self.update_status('', level='all', update_results=True)
1321
1322
1324 """Update random number seed with the value from the run_card.
1325 If this is 0, update the number according to a fresh one"""
1326 iseed = self.run_card['iseed']
1327 if iseed == 0:
1328 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1329 iseed = int(randinit.read()[2:]) + 1
1330 randinit.close()
1331 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1332 randinit.write('r=%d' % iseed)
1333 randinit.close()
1334
1335
1336 - def run(self, mode, options):
1337 """runs aMC@NLO. Returns the name of the event file created"""
1338 logger.info('Starting run')
1339
1340 if not 'only_generation' in options.keys():
1341 options['only_generation'] = False
1342
1343
1344 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']:
1345 options['only_generation'] = True
1346 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1347 self.setup_cluster_or_multicore()
1348 self.update_random_seed()
1349
1350 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1351 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1352 folder_names['noshower'] = folder_names['aMC@NLO']
1353 folder_names['noshowerLO'] = folder_names['aMC@LO']
1354 p_dirs = [d for d in \
1355 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1356
1357 self.clean_previous_results(options,p_dirs,folder_names[mode])
1358
1359 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events']
1360
1361
1362 if options['reweightonly']:
1363 event_norm=self.run_card['event_norm']
1364 nevents=self.run_card['nevents']
1365 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1366
1367 if mode in ['LO', 'NLO']:
1368
1369 mode_dict = {'NLO': 'all', 'LO': 'born'}
1370 logger.info('Doing fixed order %s' % mode)
1371 req_acc = self.run_card['req_acc_FO']
1372
1373
1374
1375 if self.run_card['iappl'] == 2:
1376 self.applgrid_distribute(options,mode_dict[mode],p_dirs)
1377
1378
1379
1380 integration_step=-1
1381 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1382 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True)
1383 self.prepare_directories(jobs_to_run,mode)
1384
1385
1386
1387
1388 while True:
1389 integration_step=integration_step+1
1390 self.run_all_jobs(jobs_to_run,integration_step)
1391 self.collect_log_files(jobs_to_run,integration_step)
1392 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1393 jobs_to_collect,integration_step,mode,mode_dict[mode])
1394 if not jobs_to_run:
1395
1396 break
1397
1398 self.finalise_run_FO(folder_names[mode],jobs_to_collect)
1399 self.update_status('Run complete', level='parton', update_results=True)
1400 return
1401
1402 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1403 if self.ninitial == 1:
1404 raise aMCatNLOError('Decay processes can only be run at fixed order.')
1405 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\
1406 'noshower': 'all', 'noshowerLO': 'born'}
1407 shower = self.run_card['parton_shower'].upper()
1408 nevents = self.run_card['nevents']
1409 req_acc = self.run_card['req_acc']
1410 if nevents == 0 and req_acc < 0 :
1411 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1412 'of events, because 0 events requested. Please set '\
1413 'the "req_acc" parameter in the run_card to a value '\
1414 'between 0 and 1')
1415 elif req_acc >1 or req_acc == 0 :
1416 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1417 'be between larger than 0 and smaller than 1, '\
1418 'or set to -1 for automatic determination. Current '\
1419 'value is %f' % req_acc)
1420
1421 elif req_acc < 0 and nevents > 1000000 :
1422 req_acc=0.001
1423
1424 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1425
1426 if not shower in shower_list:
1427 raise aMCatNLOError('%s is not a valid parton shower. '\
1428 'Please use one of the following: %s' \
1429 % (shower, ', '.join(shower_list)))
1430
1431
1432 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']:
1433 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1434
1435 if mode in ['aMC@NLO', 'aMC@LO']:
1436 logger.info('Doing %s matched to parton shower' % mode[4:])
1437 elif mode in ['noshower','noshowerLO']:
1438 logger.info('Generating events without running the shower.')
1439 elif options['only_generation']:
1440 logger.info('Generating events starting from existing results')
1441
1442 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1443 req_acc,mode_dict[mode],1,mode,fixed_order=False)
1444
1445 if options['only_generation']:
1446 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1447 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False)
1448 else:
1449 self.prepare_directories(jobs_to_run,mode,fixed_order=False)
1450
1451
1452
1453 for mint_step, status in enumerate(mcatnlo_status):
1454 if options['only_generation'] and mint_step < 2:
1455 continue
1456 self.update_status(status, level='parton')
1457 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False)
1458 self.collect_log_files(jobs_to_run,mint_step)
1459 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1460 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False)
1461 if mint_step+1==2 and nevents==0:
1462 self.print_summary(options,2,mode)
1463 return
1464
1465
1466 self.check_event_files(jobs_to_collect)
1467
1468 if self.cluster_mode == 1:
1469
1470 self.update_status(
1471 'Waiting while files are transferred back from the cluster nodes',
1472 level='parton')
1473 time.sleep(10)
1474
1475 event_norm=self.run_card['event_norm']
1476 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1477
1478 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\
1479 integration_step,mode,fixed_order=True):
1480 """Creates a list of dictionaries with all the jobs to be run"""
1481 jobs_to_run=[]
1482 if not options['only_generation']:
1483
1484
1485
1486 npoints = self.run_card['npoints_FO_grid']
1487 niters = self.run_card['niters_FO_grid']
1488 for p_dir in p_dirs:
1489 try:
1490 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file:
1491 channels=chan_file.readline().split()
1492 except IOError:
1493 logger.warning('No integration channels found for contribution %s' % p_dir)
1494 continue
1495 if fixed_order:
1496 lch=len(channels)
1497 maxchannels=20
1498 if self.run_card['iappl'] != 0: maxchannels=1
1499 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \
1500 else int(lch/maxchannels))
1501 for nj in range(1,njobs+1):
1502 job={}
1503 job['p_dir']=p_dir
1504 job['channel']=str(nj)
1505 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs))
1506 job['configs']=' '.join(channels[:job['nchans']])
1507 del channels[:job['nchans']]
1508 job['split']=0
1509 if req_acc == -1:
1510 job['accuracy']=0
1511 job['niters']=niters
1512 job['npoints']=npoints
1513 elif req_acc > 0:
1514 job['accuracy']=0.05
1515 job['niters']=6
1516 job['npoints']=-1
1517 else:
1518 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+
1519 'between 0 and 1 or set it equal to -1.')
1520 job['mint_mode']=0
1521 job['run_mode']=run_mode
1522 job['wgt_frac']=1.0
1523 job['wgt_mult']=1.0
1524 jobs_to_run.append(job)
1525 if channels:
1526 raise aMCatNLOError('channels is not empty %s' % channels)
1527 else:
1528 for channel in channels:
1529 job={}
1530 job['p_dir']=p_dir
1531 job['channel']=channel
1532 job['split']=0
1533 job['accuracy']=0.03
1534 job['niters']=12
1535 job['npoints']=-1
1536 job['mint_mode']=0
1537 job['run_mode']=run_mode
1538 job['wgt_frac']=1.0
1539 jobs_to_run.append(job)
1540 jobs_to_collect=copy.copy(jobs_to_run)
1541 else:
1542
1543 try:
1544 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f:
1545 jobs_to_collect=pickle.load(f)
1546 for job in jobs_to_collect:
1547 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1])
1548 jobs_to_run=copy.copy(jobs_to_collect)
1549 except:
1550 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \
1551 pjoin(self.me_dir,'SubProcesses','job_status.pkl'))
1552
1553 if fixed_order:
1554 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run,
1555 jobs_to_collect,integration_step,mode,run_mode)
1556
1557 integration_step=1
1558 for job in jobs_to_run:
1559 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)):
1560 integration_step=integration_step+1
1561 integration_step=integration_step-1
1562 else:
1563 self.append_the_results(jobs_to_collect,integration_step)
1564 return jobs_to_run,jobs_to_collect,integration_step
1565
1567 """Set-up the G* directories for running"""
1568 name_suffix={'born' :'B' , 'all':'F'}
1569 for job in jobs_to_run:
1570 if job['split'] == 0:
1571 if fixed_order :
1572 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1573 job['run_mode']+'_G'+job['channel'])
1574 else:
1575 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1576 'G'+name_suffix[job['run_mode']]+job['channel'])
1577 else:
1578 if fixed_order :
1579 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1580 job['run_mode']+'_G'+job['channel']+'_'+str(job['split']))
1581 else:
1582 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1583 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split']))
1584 job['dirname']=dirname
1585 if not os.path.isdir(dirname):
1586 os.makedirs(dirname)
1587 self.write_input_file(job,fixed_order)
1588
1589 if not fixed_order:
1590 if job['split'] != 0:
1591 for f in ['grid.MC_integer','mint_grids','res_1']:
1592 if not os.path.isfile(pjoin(job['dirname'],f)):
1593 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1594 else:
1595 if job['split'] != 0:
1596 for f in ['grid.MC_integer','mint_grids']:
1597 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1598
1599
1634
1635
1636 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1637 """Loops over the jobs_to_run and executes them using the function 'run_exe'"""
1638 if fixed_order:
1639 if integration_step == 0:
1640 self.update_status('Setting up grids', level=None)
1641 else:
1642 self.update_status('Refining results, step %i' % integration_step, level=None)
1643 self.ijob = 0
1644 name_suffix={'born' :'B', 'all':'F'}
1645 if fixed_order:
1646 run_type="Fixed order integration step %s" % integration_step
1647 else:
1648 run_type="MINT step %s" % integration_step
1649 self.njobs=len(jobs_to_run)
1650 for job in jobs_to_run:
1651 executable='ajob1'
1652 if fixed_order:
1653 arguments=[job['channel'],job['run_mode'], \
1654 str(job['split']),str(integration_step)]
1655 else:
1656 arguments=[job['channel'],name_suffix[job['run_mode']], \
1657 str(job['split']),str(integration_step)]
1658 self.run_exe(executable,arguments,run_type,
1659 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir']))
1660
1661 if self.cluster_mode == 2:
1662 time.sleep(1)
1663 self.wait_for_complete(run_type)
1664
1665
1666 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\
1667 integration_step,mode,run_mode,fixed_order=True):
1668 """Collect the results, make HTML pages, print the summary and
1669 determine if there are more jobs to run. Returns the list
1670 of the jobs that still need to be run, as well as the
1671 complete list of jobs that need to be collected to get the
1672 final answer.
1673 """
1674
1675 self.append_the_results(jobs_to_run,integration_step)
1676 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step)
1677
1678 if fixed_order:
1679 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode],
1680 jobs=jobs_to_collect)
1681 else:
1682 name_suffix={'born' :'B' , 'all':'F'}
1683 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]])
1684 self.results.add_detail('cross', cross)
1685 self.results.add_detail('error', error)
1686
1687 if fixed_order:
1688 jobs_to_run=self.combine_split_order_run(jobs_to_run)
1689
1690 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order)
1691
1692 if fixed_order:
1693
1694
1695 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f:
1696 pickle.dump(jobs_to_collect,f)
1697
1698 if (not jobs_to_run_new) and fixed_order:
1699
1700 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect)
1701 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True)
1702 return jobs_to_run_new,jobs_to_collect
1703 elif jobs_to_run_new:
1704
1705 scale_pdf_info=[]
1706 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False)
1707 else:
1708
1709
1710
1711 scale_pdf_info=[]
1712
1713 if (not fixed_order) and integration_step+1 == 2 :
1714
1715
1716 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f:
1717 pickle.dump(jobs_to_collect,f)
1718
1719 jobs_to_run_new,jobs_to_collect_new= \
1720 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect)
1721 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
1722 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect)
1723 self.write_nevts_files(jobs_to_run_new)
1724 else:
1725 if fixed_order and self.run_card['iappl'] == 0 \
1726 and self.run_card['req_acc_FO'] > 0:
1727 jobs_to_run_new,jobs_to_collect= \
1728 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect)
1729 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
1730 jobs_to_collect_new=jobs_to_collect
1731 return jobs_to_run_new,jobs_to_collect_new
1732
1733
1735 """writes the nevents_unweighted file in the SubProcesses directory.
1736 We also need to write the jobs that will generate 0 events,
1737 because that makes sure that the cross section from those channels
1738 is taken into account in the event weights (by collect_events.f).
1739 """
1740 content=[]
1741 for job in jobs:
1742 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
1743 lhefile=pjoin(path,'events.lhe')
1744 content.append(' %s %d %9e %9e' % \
1745 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac']))
1746 for job in jobs0events:
1747 if job['nevents']==0:
1748 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
1749 lhefile=pjoin(path,'events.lhe')
1750 content.append(' %s %d %9e %9e' % \
1751 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.))
1752 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f:
1753 f.write('\n'.join(content)+'\n')
1754
1756 """write the nevts files in the SubProcesses/P*/G*/ directories"""
1757 for job in jobs:
1758 with open(pjoin(job['dirname'],'nevts'),'w') as f:
1759 if self.run_card['event_norm'].lower()=='bias':
1760 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca']))
1761 else:
1762 f.write('%i\n' % job['nevents'])
1763
1765 """Combines jobs and grids from split jobs that have been run"""
1766
1767
1768
1769 jobgroups_to_combine=[]
1770 jobs_to_run_new=[]
1771 for job in jobs_to_run:
1772 if job['split'] == 0:
1773 job['combined']=1
1774 jobs_to_run_new.append(job)
1775 elif job['split'] == 1:
1776 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \
1777 j['channel'] == job['channel'], jobs_to_run))
1778 else:
1779 continue
1780 for job_group in jobgroups_to_combine:
1781
1782 self.combine_split_order_grids(job_group)
1783 jobs_to_run_new.append(self.combine_split_order_jobs(job_group))
1784 return jobs_to_run_new
1785
1787 """combine the jobs in job_group and return a single summed job"""
1788
1789 sum_job=copy.copy(job_group[0])
1790
1791 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0])
1792 sum_job['split']=0
1793 sum_job['wgt_mult']=1.0
1794 sum_job['combined']=len(job_group)
1795
1796 keys=['niters_done','npoints_done','niters','npoints',\
1797 'result','resultABS','time_spend']
1798 keys2=['error','errorABS']
1799
1800 for key in keys2:
1801 sum_job[key]=math.pow(sum_job[key],2)
1802
1803 for i,job in enumerate(job_group):
1804 if i==0 : continue
1805 for key in keys:
1806 sum_job[key]+=job[key]
1807 for key in keys2:
1808 sum_job[key]+=math.pow(job[key],2)
1809 for key in keys2:
1810 sum_job[key]=math.sqrt(sum_job[key])
1811 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100.
1812 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100.
1813 sum_job['niters']=int(sum_job['niters_done']/len(job_group))
1814 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group))
1815 return sum_job
1816
1817
1819 """Combines the mint_grids and MC-integer grids from the split order
1820 jobs (fixed order only).
1821 """
1822 files_mint_grids=[]
1823 files_MC_integer=[]
1824 location=None
1825 for job in job_group:
1826 files_mint_grids.append(open(pjoin(job['dirname'],'mint_grids'),'r+'))
1827 files_MC_integer.append(open(pjoin(job['dirname'],'grid.MC_integer'),'r+'))
1828 if not location:
1829 location=pjoin(job['dirname'].rsplit('_',1)[0])
1830 else:
1831 if location != pjoin(job['dirname'].rsplit('_',1)[0]) :
1832 raise aMCatNLOError('Not all jobs have the same location. '\
1833 +'Cannot combine them.')
1834
1835
1836
1837 for j,fs in enumerate([files_mint_grids,files_MC_integer]):
1838 linesoffiles=[f.readlines() for f in fs]
1839 to_write=[]
1840 for rowgrp in zip(*linesoffiles):
1841 try:
1842
1843
1844
1845
1846
1847 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp]
1848 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp]
1849 floatgrps = zip(*floatsbyfile)
1850 special=[]
1851 for i,floatgrp in enumerate(floatgrps):
1852 if i==0:
1853 special.append(sum(floatgrp))
1854 elif i==1:
1855 special.append(math.sqrt(sum([err**2 for err in floatgrp])))
1856 elif i==2:
1857 special.append(int(sum(floatgrp)/len(floatgrp)))
1858 elif i==3:
1859 special.append(int(sum(floatgrp)))
1860 elif i==4:
1861 special.append(int(sum(floatgrp)/len(floatgrp)))
1862 else:
1863 raise aMCatNLOError('"mint_grids" files not in correct format. '+\
1864 'Cannot combine them.')
1865 to_write.append(" ".join(str(s) for s in special) + "\n")
1866 except ValueError:
1867
1868 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp]
1869 floatgrps = zip(*floatsbyfile)
1870 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps]
1871 to_write.append(" ".join(str(a) for a in averages) + "\n")
1872
1873 for f in fs:
1874 f.close
1875
1876 if j==0:
1877 with open(pjoin(location,'mint_grids'),'w') as f:
1878 f.writelines(to_write)
1879 elif j==1:
1880 with open(pjoin(location,'grid.MC_integer'),'w') as f:
1881 f.writelines(to_write)
1882
1883
1885 """Looks in the jobs_to_run to see if there is the need to split the
1886 jobs, depending on the expected time they take. Updates
1887 jobs_to_run and jobs_to_collect to replace the split-job by
1888 its splits.
1889 """
1890
1891 if self.options['run_mode'] ==2:
1892 nb_submit = int(self.options['nb_core'])
1893 elif self.options['run_mode'] ==1:
1894 nb_submit = int(self.options['cluster_size'])
1895 else:
1896 nb_submit =1
1897
1898 time_expected=0
1899 for job in jobs_to_run:
1900 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \
1901 (job['niters_done']*job['npoints_done'])
1902
1903
1904 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2))
1905 jobs_to_run_new=[]
1906 jobs_to_collect_new=copy.copy(jobs_to_collect)
1907 for job in jobs_to_run:
1908
1909
1910
1911 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \
1912 j['channel'] == job['channel'], jobs_to_collect_new):
1913 jobs_to_collect_new.remove(j)
1914 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \
1915 (job['niters_done']*job['npoints_done'])
1916
1917
1918
1919 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job):
1920
1921 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit)
1922 for i in range(1,nsplit+1):
1923 job_new=copy.copy(job)
1924 job_new['split']=i
1925 job_new['wgt_mult']=1./float(nsplit)
1926 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
1927 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1)
1928 if nsplit >= job['niters']:
1929 job_new['npoints']=int(job['npoints']*job['niters']/nsplit)
1930 job_new['niters']=1
1931 else:
1932 job_new['npoints']=int(job['npoints']/nsplit)
1933 jobs_to_collect_new.append(job_new)
1934 jobs_to_run_new.append(job_new)
1935 else:
1936 jobs_to_collect_new.append(job)
1937 jobs_to_run_new.append(job)
1938 return jobs_to_run_new,jobs_to_collect_new
1939
1940
1942 """Looks in the jobs_to_run to see if there is the need to split the
1943 event generation step. Updates jobs_to_run and
1944 jobs_to_collect to replace the split-job by its
1945 splits. Also removes jobs that do not need any events.
1946 """
1947 nevt_job=self.run_card['nevt_job']
1948 if nevt_job > 0:
1949 jobs_to_collect_new=copy.copy(jobs_to_collect)
1950 for job in jobs_to_run:
1951 nevents=job['nevents']
1952 if nevents == 0:
1953 jobs_to_collect_new.remove(job)
1954 elif nevents > nevt_job:
1955 jobs_to_collect_new.remove(job)
1956 if nevents % nevt_job != 0 :
1957 nsplit=int(nevents/nevt_job)+1
1958 else:
1959 nsplit=int(nevents/nevt_job)
1960 for i in range(1,nsplit+1):
1961 job_new=copy.copy(job)
1962 left_over=nevents % nsplit
1963 if i <= left_over:
1964 job_new['nevents']=int(nevents/nsplit)+1
1965 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
1966 else:
1967 job_new['nevents']=int(nevents/nsplit)
1968 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
1969 job_new['split']=i
1970 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
1971 jobs_to_collect_new.append(job_new)
1972 jobs_to_run_new=copy.copy(jobs_to_collect_new)
1973 else:
1974 jobs_to_run_new=copy.copy(jobs_to_collect)
1975 for job in jobs_to_collect:
1976 if job['nevents'] == 0:
1977 jobs_to_run_new.remove(job)
1978 jobs_to_collect_new=copy.copy(jobs_to_run_new)
1979
1980 return jobs_to_run_new,jobs_to_collect_new
1981
1982
1984 """
1985 For (N)LO+PS: determines the number of events and/or the required
1986 accuracy per job.
1987 For fixed order: determines which jobs need higher precision and
1988 returns those with the newly requested precision.
1989 """
1990 err=self.cross_sect_dict['errt']
1991 tot=self.cross_sect_dict['xsect']
1992 errABS=self.cross_sect_dict['erra']
1993 totABS=self.cross_sect_dict['xseca']
1994 jobs_new=[]
1995 if fixed_order:
1996 if req_acc == -1:
1997 if step+1 == 1:
1998 npoints = self.run_card['npoints_FO']
1999 niters = self.run_card['niters_FO']
2000 for job in jobs:
2001 job['mint_mode']=-1
2002 job['niters']=niters
2003 job['npoints']=npoints
2004 jobs_new.append(job)
2005 elif step+1 == 2:
2006 pass
2007 elif step+1 > 2:
2008 raise aMCatNLOError('Cannot determine number of iterations and PS points '+
2009 'for integration step %i' % step )
2010 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0:
2011 req_accABS=req_acc*abs(tot)/totABS
2012 for job in jobs:
2013 job['mint_mode']=-1
2014
2015 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS'])
2016
2017
2018 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \
2019 and not (step==-1 and self.run_card['iappl'] == 2):
2020 continue
2021
2022 itmax_fl=job['niters_done']*math.pow(job['errorABS']/
2023 (job['accuracy']*job['resultABS']),2)
2024 if itmax_fl <= 4.0 :
2025 job['niters']=max(int(round(itmax_fl)),2)
2026 job['npoints']=job['npoints_done']*2
2027 elif itmax_fl > 4.0 and itmax_fl <= 16.0 :
2028 job['niters']=4
2029 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2
2030 else:
2031 if itmax_fl > 100.0 : itmax_fl=50.0
2032 job['niters']=int(round(math.sqrt(itmax_fl)))
2033 job['npoints']=int(round(job['npoints_done']*itmax_fl/
2034 round(math.sqrt(itmax_fl))))*2
2035
2036 jobs_new.append(job)
2037 return jobs_new
2038 elif step+1 <= 2:
2039 nevents=self.run_card['nevents']
2040
2041 if req_acc<0:
2042 req_acc2_inv=nevents
2043 else:
2044 req_acc2_inv=1/(req_acc*req_acc)
2045 if step+1 == 1 or step+1 == 2 :
2046
2047 for job in jobs:
2048 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2)
2049 job['accuracy']=accuracy
2050 if step+1 == 2:
2051
2052
2053 r=self.get_randinit_seed()
2054 random.seed(r)
2055 totevts=nevents
2056 for job in jobs:
2057 job['nevents'] = 0
2058 while totevts :
2059 target = random.random() * totABS
2060 crosssum = 0.
2061 i = 0
2062 while i<len(jobs) and crosssum < target:
2063 job = jobs[i]
2064 crosssum += job['resultABS']
2065 i += 1
2066 totevts -= 1
2067 i -= 1
2068 jobs[i]['nevents'] += 1
2069 for job in jobs:
2070 job['mint_mode']=step+1
2071 return jobs
2072 else:
2073 return []
2074
2075
2077 """ Get the random number seed from the randinit file """
2078 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit:
2079
2080 iseed = int(randinit.read()[2:])
2081 return iseed
2082
2083
2085 """Appends the results for each of the jobs in the job list"""
2086 error_found=False
2087 for job in jobs:
2088 try:
2089 if integration_step >= 0 :
2090 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file:
2091 results=res_file.readline().split()
2092 else:
2093
2094
2095 with open(pjoin(job['dirname'],'res.dat')) as res_file:
2096 results=res_file.readline().split()
2097 except IOError:
2098 if not error_found:
2099 error_found=True
2100 error_log=[]
2101 error_log.append(pjoin(job['dirname'],'log.txt'))
2102 continue
2103 job['resultABS']=float(results[0])
2104 job['errorABS']=float(results[1])
2105 job['result']=float(results[2])
2106 job['error']=float(results[3])
2107 job['niters_done']=int(results[4])
2108 job['npoints_done']=int(results[5])
2109 job['time_spend']=float(results[6])
2110 job['err_percABS'] = job['errorABS']/job['resultABS']*100.
2111 job['err_perc'] = job['error']/job['result']*100.
2112 if error_found:
2113 raise aMCatNLOError('An error occurred during the collection of results.\n' +
2114 'Please check the .log files inside the directories which failed:\n' +
2115 '\n'.join(error_log)+'\n')
2116
2117
2118
2120 """writes the res.txt files in the SubProcess dir"""
2121 jobs.sort(key = lambda job: -job['errorABS'])
2122 content=[]
2123 content.append('\n\nCross section per integration channel:')
2124 for job in jobs:
2125 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job)
2126 content.append('\n\nABS cross section per integration channel:')
2127 for job in jobs:
2128 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job)
2129 totABS=0
2130 errABS=0
2131 tot=0
2132 err=0
2133 for job in jobs:
2134 totABS+= job['resultABS']*job['wgt_frac']
2135 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac']
2136 tot+= job['result']*job['wgt_frac']
2137 err+= math.pow(job['error'],2)*job['wgt_frac']
2138 if jobs:
2139 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\
2140 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\
2141 tot, math.sqrt(err), math.sqrt(err)/tot *100.))
2142 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file:
2143 res_file.write('\n'.join(content))
2144 randinit=self.get_randinit_seed()
2145 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\
2146 'erra':math.sqrt(errABS),'randinit':randinit}
2147
2148
2150 """read the scale_pdf_dependence.dat files and collects there results"""
2151 scale_pdf_info=[]
2152 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
2153 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
2154 evt_files=[]
2155 evt_wghts=[]
2156 for job in jobs:
2157 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat'))
2158 evt_wghts.append(job['wgt_frac'])
2159 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts)
2160 return scale_pdf_info
2161
2162
2164 """combines the plots and puts then in the Events/run* directory"""
2165 devnull = open(os.devnull, 'w')
2166
2167 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
2168 misc.call(['./combine_plots_FO.sh'] + folder_name, \
2169 stdout=devnull,
2170 cwd=pjoin(self.me_dir, 'SubProcesses'))
2171 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
2172 pjoin(self.me_dir, 'Events', self.run_name))
2173 logger.info('The results of this run and the TopDrawer file with the plots' + \
2174 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2175 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
2176 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO')
2177 self.combine_plots_HwU(jobs,out)
2178 try:
2179 misc.call(['gnuplot','MADatNLO.gnuplot'],\
2180 stdout=devnull,stderr=devnull,\
2181 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2182 except Exception:
2183 pass
2184 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
2185 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2186 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
2187 misc.call(['./combine_root.sh'] + folder_name, \
2188 stdout=devnull,
2189 cwd=pjoin(self.me_dir, 'SubProcesses'))
2190 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
2191 pjoin(self.me_dir, 'Events', self.run_name))
2192 logger.info('The results of this run and the ROOT file with the plots' + \
2193 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2194 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe':
2195 self.combine_FO_lhe(jobs)
2196 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \
2197 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2198 else:
2199 logger.info('The results of this run' + \
2200 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2201
2203 """combine the various lhe file generated in each directory.
2204 They are two steps:
2205 1) banner
2206 2) reweight each sample by the factor written at the end of each file
2207 3) concatenate each of the new files (gzip those).
2208 """
2209
2210 logger.info('Combining lhe events for plotting analysis')
2211 start = time.time()
2212 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']]
2213 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2214 if os.path.exists(output):
2215 os.remove(output)
2216
2217
2218
2219
2220
2221 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read()
2222 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>')
2223 self.banner['initrwgt'] = text[10+i1:i2]
2224
2225
2226
2227
2228
2229
2230
2231 cross = sum(j['result'] for j in jobs)
2232 error = math.sqrt(sum(j['error'] for j in jobs))
2233 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross)
2234 self.banner.write(output[:-3], close_tag=False)
2235 misc.gzip(output[:-3])
2236
2237
2238
2239 fsock = lhe_parser.EventFile(output,'a')
2240 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']:
2241 fsock.eventgroup = False
2242 else:
2243 fsock.eventgroup = True
2244
2245 if 'norandom' in self.run_card['fo_lhe_postprocessing']:
2246 for job in jobs:
2247 dirname = job['dirname']
2248
2249 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline()
2250 nb_event, sumwgt, cross = [float(i) for i in lastline.split()]
2251
2252 ratio = cross/sumwgt
2253 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe'))
2254 lhe.eventgroup = True
2255 for eventsgroup in lhe:
2256 neweventsgroup = []
2257 for i,event in enumerate(eventsgroup):
2258 event.rescale_weights(ratio)
2259 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \
2260 and event == neweventsgroup[-1]:
2261 neweventsgroup[-1].wgt += event.wgt
2262 for key in event.reweight_data:
2263 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key]
2264 else:
2265 neweventsgroup.append(event)
2266 fsock.write_events(neweventsgroup)
2267 lhe.close()
2268 os.remove(pjoin(dirname,'events.lhe'))
2269 else:
2270 lhe = []
2271 lenlhe = []
2272 misc.sprint('need to combine %s event file' % len(jobs))
2273 globallhe = lhe_parser.MultiEventFile()
2274 globallhe.eventgroup = True
2275 for job in jobs:
2276 dirname = job['dirname']
2277 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline()
2278 nb_event, sumwgt, cross = [float(i) for i in lastline.split()]
2279 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross,
2280 nb_event=int(nb_event), scale=cross/sumwgt)
2281 for eventsgroup in globallhe:
2282 neweventsgroup = []
2283 for i,event in enumerate(eventsgroup):
2284 event.rescale_weights(event.sample_scale)
2285 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \
2286 and event == neweventsgroup[-1]:
2287 neweventsgroup[-1].wgt += event.wgt
2288 for key in event.reweight_data:
2289 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key]
2290 else:
2291 neweventsgroup.append(event)
2292 fsock.write_events(neweventsgroup)
2293 globallhe.close()
2294 fsock.write('</LesHouchesEvents>\n')
2295 fsock.close()
2296 misc.sprint('combining lhe file done in ', time.time()-start)
2297 for job in jobs:
2298 dirname = job['dirname']
2299 os.remove(pjoin(dirname,'events.lhe'))
2300
2301
2302
2303 misc.sprint('combining lhe file done in ', time.time()-start)
2304
2305
2306
2307
2308
2309
2311 """Sums all the plots in the HwU format."""
2312 logger.debug('Combining HwU plots.')
2313
2314 command = []
2315 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py'))
2316 for job in jobs:
2317 if job['dirname'].endswith('.HwU'):
2318 command.append(job['dirname'])
2319 else:
2320 command.append(pjoin(job['dirname'],'MADatNLO.HwU'))
2321 command.append("--out="+out)
2322 command.append("--gnuplot")
2323 command.append("--band=[]")
2324 command.append("--lhapdf-config="+self.options['lhapdf'])
2325 if normalisation:
2326 command.append("--multiply="+(','.join([str(n) for n in normalisation])))
2327 command.append("--sum")
2328 command.append("--keep_all_weights")
2329 command.append("--no_open")
2330
2331 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir)
2332
2333 while p.poll() is None:
2334 line = p.stdout.readline()
2335 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']):
2336 print line[:-1]
2337 elif __debug__ and line:
2338 logger.debug(line[:-1])
2339
2340
2342 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
2343 logger.debug('Combining APPLgrids \n')
2344 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),
2345 'applgrid-combine')
2346 all_jobs=[]
2347 for job in jobs:
2348 all_jobs.append(job['dirname'])
2349 ngrids=len(all_jobs)
2350 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")])
2351 for obs in range(0,nobs):
2352 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
2353
2354 if self.run_card["iappl"] == 1:
2355 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,
2356 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
2357 elif self.run_card["iappl"] == 2:
2358 unc2_inv=pow(cross/error,2)
2359 unc2_inv_ngrids=pow(cross/error,2)*ngrids
2360 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",
2361 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',
2362 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
2363 for job in all_jobs:
2364 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root"))
2365 else:
2366 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
2367
2368 for ggdir in gdir:
2369 os.remove(ggdir)
2370
2371
2373 """Distributes the APPLgrids ready to be filled by a second run of the code"""
2374
2375
2376 if not('appl_start_grid' in options.keys() and options['appl_start_grid']):
2377 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'),
2378 pjoin(self.me_dir,'Events'))
2379
2380 time_stamps={}
2381 for root_file in gfiles:
2382 time_stamps[root_file]=os.path.getmtime(root_file)
2383 options['appl_start_grid']= \
2384 max(time_stamps.iterkeys(), key=(lambda key:
2385 time_stamps[key])).split('/')[-2]
2386 logger.info('No --appl_start_grid option given. '+\
2387 'Guessing that start grid from run "%s" should be used.' \
2388 % options['appl_start_grid'])
2389
2390 if 'appl_start_grid' in options.keys() and options['appl_start_grid']:
2391 self.appl_start_grid = options['appl_start_grid']
2392 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
2393
2394 if not os.path.exists(pjoin(start_grid_dir,
2395 'aMCfast_obs_0_starting_grid.root')):
2396 raise self.InvalidCmd('APPLgrid file not found: %s' % \
2397 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
2398 else:
2399 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \
2400 start_grid_dir) if name.endswith("_starting_grid.root")]
2401 nobs =len(all_grids)
2402 gstring=" ".join(all_grids)
2403 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
2404 raise self.InvalidCmd('No APPLgrid name currently defined.'+
2405 'Please provide this information.')
2406
2407 for pdir in p_dirs:
2408 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,
2409 "SubProcesses",pdir)) if file.startswith(mode+'_G') and
2410 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
2411 for g_dir in g_dirs:
2412 for grid in all_grids:
2413 obs=grid.split('_')[-3]
2414 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,
2415 'grid_obs_'+obs+'_in.root'))
2416
2417
2418
2419
2421 """collect the log files and put them in a single, html-friendly file
2422 inside the Events/run_.../ directory"""
2423 log_file = pjoin(self.me_dir, 'Events', self.run_name,
2424 'alllogs_%d.html' % integration_step)
2425 outfile = open(log_file, 'w')
2426
2427 content = ''
2428 content += '<HTML><BODY>\n<font face="courier" size=2>'
2429 for job in jobs:
2430
2431 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step)
2432 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(
2433 pjoin(self.me_dir,'SubProcesses'),''))
2434
2435 content += '<font color="red">\n'
2436 content += '<br>LOG file for integration channel %s, %s <br>' % \
2437 (os.path.dirname(log).replace(pjoin(self.me_dir,
2438 'SubProcesses'), ''),
2439 integration_step)
2440 content += '</font>\n'
2441
2442
2443 with open(log) as l:
2444 content += '<PRE>\n' + l.read() + '\n</PRE>'
2445 content +='<br>\n'
2446 outfile.write(content)
2447 content=''
2448
2449 outfile.write('</font>\n</BODY></HTML>\n')
2450 outfile.close()
2451
2452
2454 """Combine the plots and put the res*.txt files in the Events/run.../ folder."""
2455
2456 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses'))
2457 for res_file in res_files:
2458 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2459
2460 self.combine_plots_FO(folder_name,jobs)
2461
2462
2463 if self.run_card['iappl'] != 0:
2464 cross=self.cross_sect_dict['xsect']
2465 error=self.cross_sect_dict['errt']
2466 self.applgrid_combine(cross,error,jobs)
2467
2468
2470 """setup the number of cores for multicore, and the cluster-type for cluster runs"""
2471 if self.cluster_mode == 1:
2472 cluster_name = self.options['cluster_type']
2473 try:
2474 self.cluster = cluster.from_name[cluster_name](**self.options)
2475 except KeyError:
2476 if aMCatNLO and ('mg5_path' not in self.options or not self.options['mg5_path']):
2477 if not self.plugin_path:
2478 raise self.InvalidCmd('%s not native cluster type and no plugin directory available.' % cluster_name)
2479 elif aMCatNLO:
2480 mg5dir = self.options['mg5_path']
2481 if mg5dir not in sys.path:
2482 sys.path.append(mg5dir)
2483 if pjoin(mg5dir, 'PLUGIN') not in self.plugin_path:
2484 self.plugin_path.append(pjoin(mg5dir))
2485 else:
2486 mg5dir = MG5DIR
2487
2488
2489 for plugpath in self.plugin_path:
2490 plugindirname = os.path.basename(plugpath)
2491 for plug in os.listdir(plugpath):
2492 if os.path.exists(pjoin(plugpath, plug, '__init__.py')):
2493 try:
2494 __import__('%s.%s' % (plugindirname, plug))
2495 except Exception, error:
2496 logger.critical('plugin directory %s/%s fail to be loaded. Please check it',plugindirname, plug)
2497 continue
2498 plugin = sys.modules['%s.%s' % (plugindirname,plug)]
2499 if not hasattr(plugin, 'new_cluster'):
2500 continue
2501 if not misc.is_plugin_supported(plugin):
2502 continue
2503 if cluster_name in plugin.new_cluster:
2504 logger.info("cluster handling will be done with PLUGIN: %s" % plug,'$MG:color:BLACK')
2505 self.cluster = plugin.new_cluster[cluster_name](**self.options)
2506 break
2507
2508 if self.cluster_mode == 2:
2509 try:
2510 import multiprocessing
2511 if not self.nb_core:
2512 try:
2513 self.nb_core = int(self.options['nb_core'])
2514 except TypeError:
2515 self.nb_core = multiprocessing.cpu_count()
2516 logger.info('Using %d cores' % self.nb_core)
2517 except ImportError:
2518 self.nb_core = 1
2519 logger.warning('Impossible to detect the number of cores => Using One.\n'+
2520 'Use set nb_core X in order to set this number and be able to'+
2521 'run in multicore.')
2522
2523 self.cluster = cluster.MultiCore(**self.options)
2524
2525
2527 """Clean previous results.
2528 o. If doing only the reweighting step, do not delete anything and return directlty.
2529 o. Always remove all the G*_* files (from split event generation).
2530 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only."""
2531 if options['reweightonly']:
2532 return
2533 if not options['only_generation']:
2534 self.update_status('Cleaning previous results', level=None)
2535 for dir in p_dirs:
2536
2537 for obj in folder_name:
2538
2539 to_rm = [file for file in \
2540 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2541 if file.startswith(obj[:-1]) and \
2542 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2543 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2544
2545 to_always_rm = [file for file in \
2546 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2547 if file.startswith(obj[:-1]) and
2548 '_' in file and not '_G' in file and \
2549 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2550 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2551
2552 if not options['only_generation']:
2553 to_always_rm.extend(to_rm)
2554 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
2555 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
2556 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
2557 return
2558
2559
2560 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2561 """print a summary of the results contained in self.cross_sect_dict.
2562 step corresponds to the mintMC step, if =2 (i.e. after event generation)
2563 some additional infos are printed"""
2564
2565 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
2566 process = ''
2567 for line in proc_card_lines:
2568 if line.startswith('generate') or line.startswith('add process'):
2569 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
2570 lpp = {0:'l', 1:'p', -1:'pbar'}
2571 if self.ninitial == 1:
2572 proc_info = '\n Process %s' % process[:-3]
2573 else:
2574 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
2575 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
2576 self.run_card['ebeam1'], self.run_card['ebeam2'])
2577
2578 if self.ninitial == 1:
2579 self.cross_sect_dict['unit']='GeV'
2580 self.cross_sect_dict['xsec_string']='(Partial) decay width'
2581 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)'
2582 else:
2583 self.cross_sect_dict['unit']='pb'
2584 self.cross_sect_dict['xsec_string']='Total cross section'
2585 self.cross_sect_dict['axsec_string']='Total abs(cross section)'
2586 if self.run_card['event_norm'].lower()=='bias':
2587 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)'
2588
2589 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2590 status = ['Determining the number of unweighted events per channel',
2591 'Updating the number of unweighted events per channel',
2592 'Summary:']
2593 computed='(computed from LHE events)'
2594 elif mode in ['NLO', 'LO']:
2595 status = ['Results after grid setup:','Current results:',
2596 'Final results and run summary:']
2597 computed='(computed from histogram information)'
2598
2599 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2600 message = status[step] + '\n\n Intermediate results:' + \
2601 ('\n Random seed: %(randinit)d' + \
2602 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \
2603 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \
2604 % self.cross_sect_dict
2605 elif mode in ['NLO','LO'] and not done:
2606 if step == 0:
2607 message = '\n ' + status[0] + \
2608 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2609 self.cross_sect_dict
2610 else:
2611 message = '\n ' + status[1] + \
2612 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2613 self.cross_sect_dict
2614
2615 else:
2616 message = '\n --------------------------------------------------------------'
2617 message = message + \
2618 '\n ' + status[2] + proc_info
2619 if mode not in ['LO', 'NLO']:
2620 message = message + \
2621 '\n Number of events generated: %s' % self.run_card['nevents']
2622 message = message + \
2623 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2624 self.cross_sect_dict
2625 message = message + \
2626 '\n --------------------------------------------------------------'
2627 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']):
2628 if scale_pdf_info[0]:
2629
2630 message = message + '\n Scale variation %s:' % computed
2631 for s in scale_pdf_info[0]:
2632 if s['unc']:
2633 if self.run_card['ickkw'] != -1:
2634 message = message + \
2635 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\
2636 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s
2637 else:
2638 message = message + \
2639 ('\n Soft and hard scale dependence (added in quadrature): '\
2640 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s
2641
2642 else:
2643 message = message + \
2644 ('\n Dynamical_scale_choice %(label)i: '\
2645 '\n %(cen)8.3e pb') % s
2646
2647 if scale_pdf_info[1]:
2648 message = message + '\n PDF variation %s:' % computed
2649 for p in scale_pdf_info[1]:
2650 if p['unc']=='none':
2651 message = message + \
2652 ('\n %(name)s (central value only): '\
2653 '\n %(cen)8.3e pb') % p
2654
2655 elif p['unc']=='unknown':
2656 message = message + \
2657 ('\n %(name)s (%(size)s members; combination method unknown): '\
2658 '\n %(cen)8.3e pb') % p
2659 else:
2660 message = message + \
2661 ('\n %(name)s (%(size)s members; using %(unc)s method): '\
2662 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p
2663
2664 message = message + \
2665 '\n --------------------------------------------------------------'
2666
2667
2668 if (mode in ['NLO', 'LO'] and not done) or \
2669 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
2670 logger.info(message+'\n')
2671 return
2672
2673
2674
2675
2676
2677 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2678 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'),
2679 pjoin(self.me_dir, 'SubProcesses'))
2680 all_log_files = log_GV_files
2681 elif mode == 'NLO':
2682 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'),
2683 pjoin(self.me_dir, 'SubProcesses'))
2684 all_log_files = log_GV_files
2685
2686 elif mode == 'LO':
2687 log_GV_files = ''
2688 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'),
2689 pjoin(self.me_dir, 'SubProcesses'))
2690 else:
2691 raise aMCatNLOError, 'Running mode %s not supported.'%mode
2692
2693 try:
2694 message, debug_msg = \
2695 self.compile_advanced_stats(log_GV_files, all_log_files, message)
2696 except Exception as e:
2697 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e)
2698 err_string = StringIO.StringIO()
2699 traceback.print_exc(limit=4, file=err_string)
2700 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\
2701 %err_string.getvalue()
2702
2703 logger.debug(debug_msg+'\n')
2704 logger.info(message+'\n')
2705
2706
2707 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
2708 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
2709 open(pjoin(evt_path, '.full_summary.txt'),
2710 'w').write(message+'\n\n'+debug_msg+'\n')
2711
2712 self.archive_files(evt_path,mode)
2713
2715 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
2716 the run."""
2717
2718 files_to_arxiv = [pjoin('Cards','param_card.dat'),
2719 pjoin('Cards','MadLoopParams.dat'),
2720 pjoin('Cards','FKS_params.dat'),
2721 pjoin('Cards','run_card.dat'),
2722 pjoin('Subprocesses','setscales.f'),
2723 pjoin('Subprocesses','cuts.f')]
2724
2725 if mode in ['NLO', 'LO']:
2726 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
2727
2728 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
2729 os.mkdir(pjoin(evt_path,'RunMaterial'))
2730
2731 for path in files_to_arxiv:
2732 if os.path.isfile(pjoin(self.me_dir,path)):
2733 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
2734 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
2735 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2736
2738 """ This functions goes through the log files given in arguments and
2739 compiles statistics about MadLoop stability, virtual integration
2740 optimization and detection of potential error messages into a nice
2741 debug message to printed at the end of the run """
2742
2743 def safe_float(str_float):
2744 try:
2745 return float(str_float)
2746 except ValueError:
2747 logger.debug('Could not convert the following float during'+
2748 ' advanced statistics printout: %s'%str(str_float))
2749 return -1.0
2750
2751
2752
2753
2754 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
2755 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
2756
2757
2758
2759
2760
2761
2762
2763 UPS_stat_finder = re.compile(
2764 r"Satistics from MadLoop:.*"+\
2765 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
2766 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
2767 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
2768 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
2769 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
2770 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
2771 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
2772 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
2773 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
2774 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
2775
2776 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
2777 1 : 'CutTools (double precision)',
2778 2 : 'PJFry++',
2779 3 : 'IREGI',
2780 4 : 'Golem95',
2781 5 : 'Samurai',
2782 6 : 'Ninja (double precision)',
2783 7 : 'COLLIER',
2784 8 : 'Ninja (quadruple precision)',
2785 9 : 'CutTools (quadruple precision)'}
2786 RetUnit_finder =re.compile(
2787 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
2788
2789
2790 for gv_log in log_GV_files:
2791 channel_name = '/'.join(gv_log.split('/')[-5:-1])
2792 log=open(gv_log,'r').read()
2793 UPS_stats = re.search(UPS_stat_finder,log)
2794 for retunit_stats in re.finditer(RetUnit_finder, log):
2795 if channel_name not in stats['UPS'].keys():
2796 stats['UPS'][channel_name] = [0]*10+[[0]*10]
2797 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
2798 += int(retunit_stats.group('n_occurences'))
2799 if not UPS_stats is None:
2800 try:
2801 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
2802 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
2803 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
2804 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
2805 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
2806 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
2807 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
2808 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
2809 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
2810 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
2811 except KeyError:
2812 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
2813 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
2814 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
2815 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
2816 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
2817 int(UPS_stats.group('n10')),[0]*10]
2818 debug_msg = ""
2819 if len(stats['UPS'].keys())>0:
2820 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
2821 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
2822 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
2823 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
2824 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
2825 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
2826 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
2827 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
2828 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
2829 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
2830 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
2831 for i in range(10)]
2832 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
2833 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
2834 maxUPS = max(UPSfracs, key = lambda w: w[1])
2835
2836 tmpStr = ""
2837 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
2838 tmpStr += '\n Stability unknown: %d'%nTotsun
2839 tmpStr += '\n Stable PS point: %d'%nTotsps
2840 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
2841 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
2842 tmpStr += '\n Only double precision used: %d'%nTotddp
2843 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
2844 tmpStr += '\n Initialization phase-space points: %d'%nTotini
2845 tmpStr += '\n Reduction methods used:'
2846 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
2847 unit_code_meaning.keys() if nTot1[i]>0]
2848 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
2849 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
2850 if nTot100 != 0:
2851 debug_msg += '\n Unknown return code (100): %d'%nTot100
2852 if nTot10 != 0:
2853 debug_msg += '\n Unknown return code (10): %d'%nTot10
2854 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
2855 not in unit_code_meaning.keys())
2856 if nUnknownUnit != 0:
2857 debug_msg += '\n Unknown return code (1): %d'\
2858 %nUnknownUnit
2859
2860 if maxUPS[1]>0.001:
2861 message += tmpStr
2862 message += '\n Total number of unstable PS point detected:'+\
2863 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS)
2864 message += '\n Maximum fraction of UPS points in '+\
2865 'channel %s (%4.2f%%)'%maxUPS
2866 message += '\n Please report this to the authors while '+\
2867 'providing the file'
2868 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
2869 maxUPS[0],'UPS.log'))
2870 else:
2871 debug_msg += tmpStr
2872
2873
2874
2875
2876
2877
2878 virt_tricks_finder = re.compile(
2879 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
2880 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
2881 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
2882 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
2883
2884 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
2885 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
2886
2887 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
2888
2889 channel_contr_list = {}
2890 for gv_log in log_GV_files:
2891 logfile=open(gv_log,'r')
2892 log = logfile.read()
2893 logfile.close()
2894 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2895 vf_stats = None
2896 for vf_stats in re.finditer(virt_frac_finder, log):
2897 pass
2898 if not vf_stats is None:
2899 v_frac = safe_float(vf_stats.group('v_frac'))
2900 v_average = safe_float(vf_stats.group('v_average'))
2901 try:
2902 if v_frac < stats['virt_stats']['v_frac_min'][0]:
2903 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
2904 if v_frac > stats['virt_stats']['v_frac_max'][0]:
2905 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
2906 stats['virt_stats']['v_frac_avg'][0] += v_frac
2907 stats['virt_stats']['v_frac_avg'][1] += 1
2908 except KeyError:
2909 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
2910 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
2911 stats['virt_stats']['v_frac_avg']=[v_frac,1]
2912
2913
2914 ccontr_stats = None
2915 for ccontr_stats in re.finditer(channel_contr_finder, log):
2916 pass
2917 if not ccontr_stats is None:
2918 contrib = safe_float(ccontr_stats.group('v_contr'))
2919 try:
2920 if contrib>channel_contr_list[channel_name]:
2921 channel_contr_list[channel_name]=contrib
2922 except KeyError:
2923 channel_contr_list[channel_name]=contrib
2924
2925
2926
2927
2928 average_contrib = 0.0
2929 for value in channel_contr_list.values():
2930 average_contrib += value
2931 if len(channel_contr_list.values()) !=0:
2932 average_contrib = average_contrib / len(channel_contr_list.values())
2933
2934 relevant_log_GV_files = []
2935 excluded_channels = set([])
2936 all_channels = set([])
2937 for log_file in log_GV_files:
2938 channel_name = '/'.join(log_file.split('/')[-3:-1])
2939 all_channels.add(channel_name)
2940 try:
2941 if channel_contr_list[channel_name] > (0.1*average_contrib):
2942 relevant_log_GV_files.append(log_file)
2943 else:
2944 excluded_channels.add(channel_name)
2945 except KeyError:
2946 relevant_log_GV_files.append(log_file)
2947
2948
2949 for gv_log in relevant_log_GV_files:
2950 logfile=open(gv_log,'r')
2951 log = logfile.read()
2952 logfile.close()
2953 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2954
2955 vt_stats = None
2956 for vt_stats in re.finditer(virt_tricks_finder, log):
2957 pass
2958 if not vt_stats is None:
2959 vt_stats_group = vt_stats.groupdict()
2960 v_ratio = safe_float(vt_stats.group('v_ratio'))
2961 v_ratio_err = safe_float(vt_stats.group('v_ratio_err'))
2962 v_contr = safe_float(vt_stats.group('v_abs_contr'))
2963 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err'))
2964 try:
2965 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
2966 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
2967 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
2968 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
2969 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
2970 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
2971 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
2972 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
2973 if v_contr < stats['virt_stats']['v_contr_min'][0]:
2974 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
2975 if v_contr > stats['virt_stats']['v_contr_max'][0]:
2976 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
2977 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
2978 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
2979 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
2980 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
2981 except KeyError:
2982 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
2983 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
2984 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
2985 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
2986 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
2987 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
2988 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
2989 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
2990
2991 vf_stats = None
2992 for vf_stats in re.finditer(virt_frac_finder, log):
2993 pass
2994 if not vf_stats is None:
2995 v_frac = safe_float(vf_stats.group('v_frac'))
2996 v_average = safe_float(vf_stats.group('v_average'))
2997 try:
2998 if v_average < stats['virt_stats']['v_average_min'][0]:
2999 stats['virt_stats']['v_average_min']=(v_average,channel_name)
3000 if v_average > stats['virt_stats']['v_average_max'][0]:
3001 stats['virt_stats']['v_average_max']=(v_average,channel_name)
3002 stats['virt_stats']['v_average_avg'][0] += v_average
3003 stats['virt_stats']['v_average_avg'][1] += 1
3004 except KeyError:
3005 stats['virt_stats']['v_average_min']=[v_average,channel_name]
3006 stats['virt_stats']['v_average_max']=[v_average,channel_name]
3007 stats['virt_stats']['v_average_avg']=[v_average,1]
3008
3009 try:
3010 debug_msg += '\n\n Statistics on virtual integration optimization : '
3011
3012 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
3013 %tuple(stats['virt_stats']['v_frac_max'])
3014 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
3015 %tuple(stats['virt_stats']['v_frac_min'])
3016 debug_msg += '\n Average virt fraction computed %.3f'\
3017 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1]))
3018 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
3019 (len(excluded_channels),len(all_channels))
3020 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
3021 %tuple(stats['virt_stats']['v_average_max'])
3022 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
3023 %tuple(stats['virt_stats']['v_ratio_max'])
3024 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
3025 %tuple(stats['virt_stats']['v_ratio_err_max'])
3026 debug_msg += tmpStr
3027
3028
3029
3030
3031
3032
3033
3034
3035 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
3036 %tuple(stats['virt_stats']['v_contr_err_max'])
3037 debug_msg += tmpStr
3038
3039
3040
3041
3042 except KeyError:
3043 debug_msg += '\n Could not find statistics on the integration optimization. '
3044
3045
3046
3047
3048
3049 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
3050 "(?P<time>[\d\+-Eed\.]*)\s*")
3051
3052 for logf in log_GV_files:
3053 logfile=open(logf,'r')
3054 log = logfile.read()
3055 logfile.close()
3056 channel_name = '/'.join(logf.split('/')[-3:-1])
3057 mint = re.search(mint_search,logf)
3058 if not mint is None:
3059 channel_name = channel_name+' [step %s]'%mint.group('ID')
3060
3061 for time_stats in re.finditer(timing_stat_finder, log):
3062 try:
3063 stats['timings'][time_stats.group('name')][channel_name]+=\
3064 safe_float(time_stats.group('time'))
3065 except KeyError:
3066 if time_stats.group('name') not in stats['timings'].keys():
3067 stats['timings'][time_stats.group('name')] = {}
3068 stats['timings'][time_stats.group('name')][channel_name]=\
3069 safe_float(time_stats.group('time'))
3070
3071
3072 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
3073 try:
3074 totTimeList = [(time, chan) for chan, time in \
3075 stats['timings']['Total'].items()]
3076 except KeyError:
3077 totTimeList = []
3078
3079 totTimeList.sort()
3080 if len(totTimeList)>0:
3081 debug_msg += '\n\n Inclusive timing profile :'
3082 debug_msg += '\n Overall slowest channel %s (%s)'%\
3083 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
3084 debug_msg += '\n Average channel running time %s'%\
3085 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
3086 debug_msg += '\n Aggregated total running time %s'%\
3087 Tstr(sum([el[0] for el in totTimeList]))
3088 else:
3089 debug_msg += '\n\n Inclusive timing profile non available.'
3090
3091 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \
3092 sum(stats['timings'][stat].values()), reverse=True)
3093 for name in sorted_keys:
3094 if name=='Total':
3095 continue
3096 if sum(stats['timings'][name].values())<=0.0:
3097 debug_msg += '\n Zero time record for %s.'%name
3098 continue
3099 try:
3100 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
3101 chan) for chan, time in stats['timings'][name].items()]
3102 except KeyError, ZeroDivisionError:
3103 debug_msg += '\n\n Timing profile for %s unavailable.'%name
3104 continue
3105 TimeList.sort()
3106 debug_msg += '\n Timing profile for <%s> :'%name
3107 try:
3108 debug_msg += '\n Overall fraction of time %.3f %%'%\
3109 safe_float((100.0*(sum(stats['timings'][name].values())/
3110 sum(stats['timings']['Total'].values()))))
3111 except KeyError, ZeroDivisionError:
3112 debug_msg += '\n Overall fraction of time unavailable.'
3113 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
3114 (TimeList[-1][0],TimeList[-1][1])
3115 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
3116 (TimeList[0][0],TimeList[0][1])
3117
3118
3119
3120
3121
3122
3123
3124
3125
3126
3127 err_finder = re.compile(\
3128 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
3129 for log in all_log_files:
3130 logfile=open(log,'r')
3131 nErrors = len(re.findall(err_finder, logfile.read()))
3132 logfile.close()
3133 if nErrors != 0:
3134 stats['Errors'].append((str(log),nErrors))
3135
3136 nErrors = sum([err[1] for err in stats['Errors']],0)
3137 if nErrors != 0:
3138 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
3139 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
3140 'found in the following log file%s:'%('s' if \
3141 len(stats['Errors'])>1 else '')
3142 for error in stats['Errors'][:3]:
3143 log_name = '/'.join(error[0].split('/')[-5:])
3144 debug_msg += '\n > %d error%s in %s'%\
3145 (error[1],'s' if error[1]>1 else '',log_name)
3146 if len(stats['Errors'])>3:
3147 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
3148 nRemainingLogs = len(stats['Errors'])-3
3149 debug_msg += '\n And another %d error%s in %d other log file%s'%\
3150 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
3151 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
3152
3153 return message, debug_msg
3154
3155
3157 """this function calls the reweighting routines and creates the event file in the
3158 Event dir. Return the name of the event file created
3159 """
3160 scale_pdf_info=[]
3161 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
3162 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
3163 scale_pdf_info = self.run_reweight(options['reweightonly'])
3164 self.update_status('Collecting events', level='parton', update_results=True)
3165 misc.compile(['collect_events'],
3166 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile'])
3167 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
3168 stdin=subprocess.PIPE,
3169 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
3170 if event_norm.lower() == 'sum':
3171 p.communicate(input = '1\n')
3172 elif event_norm.lower() == 'unity':
3173 p.communicate(input = '3\n')
3174 elif event_norm.lower() == 'bias':
3175 p.communicate(input = '0\n')
3176 else:
3177 p.communicate(input = '2\n')
3178
3179
3180 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
3181
3182 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
3183 raise aMCatNLOError('An error occurred during event generation. ' + \
3184 'The event file has not been created. Check collect_events.log')
3185 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
3186 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
3187 if not options['reweightonly']:
3188 self.print_summary(options, 2, mode, scale_pdf_info)
3189 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses'))
3190 for res_file in res_files:
3191 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
3192
3193 logger.info('The %s file has been generated.\n' % (evt_file))
3194 self.results.add_detail('nb_event', nevents)
3195 self.update_status('Events generated', level='parton', update_results=True)
3196 return evt_file[:-3]
3197
3198
3200 """runs mcatnlo on the generated event file, to produce showered-events
3201 """
3202 logger.info('Preparing MCatNLO run')
3203 try:
3204 misc.gunzip(evt_file)
3205 except Exception:
3206 pass
3207
3208 self.banner = banner_mod.Banner(evt_file)
3209 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
3210
3211
3212
3213 if int(self.banner.get_detail('run_card', 'nevents') / \
3214 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
3215 != self.banner.get_detail('run_card', 'nevents'):
3216 logger.warning(\
3217 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
3218 'Setting it to 1.')
3219 self.shower_card['nsplit_jobs'] = 1
3220
3221
3222 if self.shower_card['nevents'] > 0 and \
3223 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \
3224 self.shower_card['nsplit_jobs'] != 1:
3225 logger.warning(\
3226 'Only a part of the events will be showered.\n' + \
3227 'Setting nsplit_jobs in the shower_card to 1.')
3228 self.shower_card['nsplit_jobs'] = 1
3229
3230 self.banner_to_mcatnlo(evt_file)
3231
3232
3233
3234
3235 if 'fastjet' in self.shower_card['extralibs']:
3236
3237 if not 'stdc++' in self.shower_card['extralibs']:
3238 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
3239 self.shower_card['extralibs'] += ' stdc++'
3240
3241 try:
3242
3243 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
3244 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
3245 output, error = p.communicate()
3246
3247 output = output[:-1]
3248
3249 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
3250 logger.warning('Linking FastJet: updating EXTRAPATHS')
3251 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
3252 if not pjoin(output, 'include') in self.shower_card['includepaths']:
3253 logger.warning('Linking FastJet: updating INCLUDEPATHS')
3254 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
3255
3256 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
3257 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
3258 except Exception:
3259 logger.warning('Linking FastJet: using fjcore')
3260
3261 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
3262 if not 'fjcore.o' in self.shower_card['analyse']:
3263 self.shower_card['analyse'] += ' fjcore.o'
3264
3265 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
3266 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
3267
3268 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
3269 for line in fjwrapper_lines:
3270 if '//INCLUDE_FJ' in line:
3271 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
3272 if '//NAMESPACE_FJ' in line:
3273 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
3274 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock:
3275 fsock.write('\n'.join(fjwrapper_lines) + '\n')
3276
3277 extrapaths = self.shower_card['extrapaths'].split()
3278
3279
3280 if shower in ['HERWIGPP', 'PYTHIA8']:
3281 path_dict = {'HERWIGPP': ['hepmc_path',
3282 'thepeg_path',
3283 'hwpp_path'],
3284 'PYTHIA8': ['pythia8_path']}
3285
3286 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]):
3287 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \
3288 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower])))
3289
3290 if shower == 'HERWIGPP':
3291 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
3292 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib')
3293
3294
3295 if shower == 'PYTHIA8':
3296 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'],
3297 stdout = subprocess.PIPE).stdout.read().strip()
3298
3299
3300
3301 extrapaths.append(hepmc.split()[1].replace('-L', ''))
3302
3303 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3304 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib'))
3305
3306
3307 if sys.platform == 'darwin':
3308 ld_library_path = 'DYLD_LIBRARY_PATH'
3309 else:
3310 ld_library_path = 'LD_LIBRARY_PATH'
3311 if ld_library_path in os.environ.keys():
3312 paths = os.environ[ld_library_path]
3313 else:
3314 paths = ''
3315 paths += ':' + ':'.join(extrapaths)
3316 os.putenv(ld_library_path, paths)
3317
3318 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
3319 self.shower_card.write_card(shower, shower_card_path)
3320
3321
3322 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')):
3323 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'),
3324 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat'))
3325
3326 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
3327 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
3328
3329
3330
3331
3332
3333
3334
3335
3336
3337
3338
3339
3340 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
3341 stderr=open(mcatnlo_log, 'w'),
3342 cwd=pjoin(self.me_dir, 'MCatNLO'),
3343 close_fds=True)
3344
3345 exe = 'MCATNLO_%s_EXE' % shower
3346 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
3347 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
3348 print open(mcatnlo_log).read()
3349 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
3350 logger.info(' ... done')
3351
3352
3353 count = 1
3354 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
3355 (shower, count))):
3356 count += 1
3357 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
3358 (shower, count))
3359 os.mkdir(rundir)
3360 files.cp(shower_card_path, rundir)
3361
3362
3363
3364 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
3365 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
3366 logger.info('Cleaning old files and splitting the event file...')
3367
3368 files.rm([f for f in event_files if 'events.lhe' not in f])
3369 if self.shower_card['nsplit_jobs'] > 1:
3370 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile'])
3371 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
3372 stdin=subprocess.PIPE,
3373 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
3374 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3375 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs'])
3376 logger.info('Splitting done.')
3377 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
3378
3379 event_files.sort()
3380
3381 self.update_status('Showering events...', level='shower')
3382 logger.info('(Running in %s)' % rundir)
3383 if shower != 'PYTHIA8':
3384 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
3385 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
3386 else:
3387
3388 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
3389 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
3390 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3391 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
3392 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
3393 else:
3394 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
3395
3396 if shower == 'HERWIGPP':
3397 try:
3398 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')):
3399 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
3400 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')):
3401 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir)
3402 except Exception:
3403 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
3404
3405 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
3406 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
3407
3408 files.ln(evt_file, rundir, 'events.lhe')
3409 for i, f in enumerate(event_files):
3410 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
3411
3412 if not self.shower_card['analyse']:
3413
3414 out_id = 'HEP'
3415 else:
3416
3417 if "HwU" in self.shower_card['analyse']:
3418 out_id = 'HWU'
3419 else:
3420 out_id = 'TOP'
3421
3422
3423 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock:
3424
3425 if sys.platform == 'darwin':
3426 ld_library_path = 'DYLD_LIBRARY_PATH'
3427 else:
3428 ld_library_path = 'LD_LIBRARY_PATH'
3429 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
3430 % {'ld_library_path': ld_library_path,
3431 'extralibs': ':'.join(extrapaths)})
3432 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
3433
3434 if event_files:
3435 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
3436 for i in range(len(event_files))]
3437 else:
3438 arg_list = [[shower, out_id, self.run_name]]
3439
3440 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
3441 self.njobs = 1
3442 self.wait_for_complete('shower')
3443
3444
3445 message = ''
3446 warning = ''
3447 to_gzip = [evt_file]
3448 if out_id == 'HEP':
3449
3450 if shower in ['PYTHIA8', 'HERWIGPP']:
3451 hep_format = 'HEPMC'
3452 ext = 'hepmc'
3453 else:
3454 hep_format = 'StdHEP'
3455 ext = 'hep'
3456
3457 hep_file = '%s_%s_0.%s.gz' % \
3458 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
3459 count = 0
3460
3461
3462
3463 while os.path.exists(hep_file) or \
3464 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
3465 count +=1
3466 hep_file = '%s_%s_%d.%s.gz' % \
3467 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
3468
3469 try:
3470 if self.shower_card['nsplit_jobs'] == 1:
3471 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
3472 message = ('The file %s has been generated. \nIt contains showered' + \
3473 ' and hadronized events in the %s format obtained' + \
3474 ' showering the parton-level event file %s.gz with %s') % \
3475 (hep_file, hep_format, evt_file, shower)
3476 else:
3477 hep_list = []
3478 for i in range(self.shower_card['nsplit_jobs']):
3479 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
3480 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
3481 message = ('The following files have been generated:\n %s\nThey contain showered' + \
3482 ' and hadronized events in the %s format obtained' + \
3483 ' showering the (split) parton-level event file %s.gz with %s') % \
3484 ('\n '.join(hep_list), hep_format, evt_file, shower)
3485
3486 except OSError, IOError:
3487 raise aMCatNLOError('No file has been generated, an error occurred.'+\
3488 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
3489
3490
3491 if hep_format == 'StdHEP':
3492 try:
3493 self.do_plot('%s -f' % self.run_name)
3494 except Exception, error:
3495 logger.info("Fail to make the plot. Continue...")
3496 pass
3497
3498 elif out_id == 'TOP' or out_id == 'HWU':
3499
3500 if out_id=='TOP':
3501 ext='top'
3502 elif out_id=='HWU':
3503 ext='HwU'
3504 topfiles = []
3505 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)]
3506 for top_tar in top_tars:
3507 topfiles.extend(top_tar.getnames())
3508
3509
3510 if len(top_tars) != self.shower_card['nsplit_jobs']:
3511 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
3512 (self.shower_card['nsplit_jobs'], len(top_tars)))
3513
3514
3515
3516 filename = 'plot_%s_%d_' % (shower, 1)
3517 count = 1
3518 while os.path.exists(pjoin(self.me_dir, 'Events',
3519 self.run_name, '%s0.%s' % (filename,ext))) or \
3520 os.path.exists(pjoin(self.me_dir, 'Events',
3521 self.run_name, '%s0__1.%s' % (filename,ext))):
3522 count += 1
3523 filename = 'plot_%s_%d_' % (shower, count)
3524
3525 if out_id=='TOP':
3526 hist_format='TopDrawer format'
3527 elif out_id=='HWU':
3528 hist_format='HwU and GnuPlot formats'
3529
3530 if not topfiles:
3531
3532 warning = 'No .top file has been generated. For the results of your ' +\
3533 'run, please check inside %s' % rundir
3534 elif self.shower_card['nsplit_jobs'] == 1:
3535
3536 top_tars[0].extractall(path = rundir)
3537 plotfiles = []
3538 for i, file in enumerate(topfiles):
3539 if out_id=='TOP':
3540 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3541 '%s%d.top' % (filename, i))
3542 files.mv(pjoin(rundir, file), plotfile)
3543 elif out_id=='HWU':
3544 out=pjoin(self.me_dir,'Events',
3545 self.run_name,'%s%d'% (filename,i))
3546 histos=[{'dirname':pjoin(rundir,file)}]
3547 self.combine_plots_HwU(histos,out)
3548 try:
3549 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\
3550 stdout=os.open(os.devnull, os.O_RDWR),\
3551 stderr=os.open(os.devnull, os.O_RDWR),\
3552 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3553 except Exception:
3554 pass
3555 plotfile=pjoin(self.me_dir,'Events',self.run_name,
3556 '%s%d.HwU'% (filename,i))
3557 plotfiles.append(plotfile)
3558
3559 ffiles = 'files'
3560 have = 'have'
3561 if len(plotfiles) == 1:
3562 ffiles = 'file'
3563 have = 'has'
3564
3565 message = ('The %s %s %s been generated, with histograms in the' + \
3566 ' %s, obtained by showering the parton-level' + \
3567 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
3568 hist_format, evt_file, shower)
3569 else:
3570
3571 topfiles_set = set(topfiles)
3572 plotfiles = []
3573 for j, top_tar in enumerate(top_tars):
3574 top_tar.extractall(path = rundir)
3575 for i, file in enumerate(topfiles_set):
3576 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3577 '%s%d__%d.%s' % (filename, i, j + 1,ext))
3578 files.mv(pjoin(rundir, file), plotfile)
3579 plotfiles.append(plotfile)
3580
3581
3582 if self.shower_card['combine_td']:
3583 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
3584
3585 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
3586 norm = 1.
3587 else:
3588 norm = 1./float(self.shower_card['nsplit_jobs'])
3589
3590 plotfiles2 = []
3591 for i, file in enumerate(topfiles_set):
3592 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \
3593 for j in range(self.shower_card['nsplit_jobs'])]
3594 if out_id=='TOP':
3595 infile="%d\n%s\n%s\n" % \
3596 (self.shower_card['nsplit_jobs'],
3597 '\n'.join(filelist),
3598 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
3599 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
3600 stdin=subprocess.PIPE,
3601 stdout=os.open(os.devnull, os.O_RDWR),
3602 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3603 p.communicate(input = infile)
3604 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
3605 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
3606 elif out_id=='HWU':
3607 out=pjoin(self.me_dir,'Events',
3608 self.run_name,'%s%d'% (filename,i))
3609 histos=[]
3610 norms=[]
3611 for plotfile in plotfiles:
3612 histos.append({'dirname':plotfile})
3613 norms.append(norm)
3614 self.combine_plots_HwU(histos,out,normalisation=norms)
3615 try:
3616 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\
3617 stdout=os.open(os.devnull, os.O_RDWR),\
3618 stderr=os.open(os.devnull, os.O_RDWR),\
3619 cwd=pjoin(self.me_dir, 'Events',self.run_name))
3620 except Exception:
3621 pass
3622
3623 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext)))
3624 tar = tarfile.open(
3625 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
3626 for f in filelist:
3627 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
3628 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
3629
3630 tar.close()
3631
3632 ffiles = 'files'
3633 have = 'have'
3634 if len(plotfiles2) == 1:
3635 ffiles = 'file'
3636 have = 'has'
3637
3638 message = ('The %s %s %s been generated, with histograms in the' + \
3639 ' %s, obtained by showering the parton-level' + \
3640 ' file %s.gz with %s.\n' + \
3641 'The files from the different shower ' + \
3642 'jobs (before combining them) can be found inside %s.') % \
3643 (ffiles, ', '.join(plotfiles2), have, hist_format,\
3644 evt_file, shower,
3645 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2]))
3646
3647 else:
3648 message = ('The following files have been generated:\n %s\n' + \
3649 'They contain histograms in the' + \
3650 ' %s, obtained by showering the parton-level' + \
3651 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
3652 hist_format, evt_file, shower)
3653
3654
3655 run_dir_path = pjoin(rundir, self.run_name)
3656 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
3657 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
3658 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
3659 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
3660 %(shower, count)))
3661 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
3662 cwd=run_dir_path)
3663 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
3664
3665 for f in to_gzip:
3666 misc.gzip(f)
3667 if message:
3668 logger.info(message)
3669 if warning:
3670 logger.warning(warning)
3671
3672 self.update_status('Run complete', level='shower', update_results=True)
3673
3674
3675 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3676 """define the run name, the run_tag, the banner and the results."""
3677
3678
3679 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'],
3680 'shower': ['shower','delphes','madanalysis5_hadron'],
3681 'delphes':['delphes'],
3682 'madanalysis5_hadron':['madanalysis5_hadron'],
3683 'plot':[]}
3684
3685 if name == self.run_name:
3686 if reload_card:
3687 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
3688 self.run_card = banner_mod.RunCardNLO(run_card)
3689
3690
3691 if tag:
3692 self.run_card['run_tag'] = tag
3693 self.run_tag = tag
3694 self.results.add_run(self.run_name, self.run_card)
3695 else:
3696 for tag in upgrade_tag[level]:
3697 if getattr(self.results[self.run_name][-1], tag):
3698 tag = self.get_available_tag()
3699 self.run_card['run_tag'] = tag
3700 self.run_tag = tag
3701 self.results.add_run(self.run_name, self.run_card)
3702 break
3703 return
3704
3705
3706 if self.run_name:
3707 self.store_result()
3708
3709 self.run_name = name
3710
3711
3712 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
3713 self.run_card = banner_mod.RunCardNLO(run_card)
3714
3715 new_tag = False
3716
3717 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
3718 if 'mgruncard' in self.banner:
3719 self.run_card = self.banner.charge_card('run_card')
3720 if tag:
3721 self.run_card['run_tag'] = tag
3722 new_tag = True
3723 elif not self.run_name in self.results and level =='parton':
3724 pass
3725 elif not self.run_name in self.results:
3726
3727 logger.warning('Trying to run data on unknown run.')
3728 self.results.add_run(name, self.run_card)
3729 self.results.update('add run %s' % name, 'all', makehtml=True)
3730 else:
3731 for tag in upgrade_tag[level]:
3732
3733 if getattr(self.results[self.run_name][-1], tag):
3734
3735 tag = self.get_available_tag()
3736 self.run_card['run_tag'] = tag
3737 new_tag = True
3738 break
3739 if not new_tag:
3740
3741 tag = self.results[self.run_name][-1]['tag']
3742 self.run_card['run_tag'] = tag
3743
3744
3745 if name in self.results and not new_tag:
3746 self.results.def_current(self.run_name)
3747 else:
3748 self.results.add_run(self.run_name, self.run_card)
3749
3750 self.run_tag = self.run_card['run_tag']
3751
3752
3753
3754 if level == 'parton':
3755 return
3756 elif level == 'pythia':
3757 return self.results[self.run_name][0]['tag']
3758 else:
3759 for i in range(-1,-len(self.results[self.run_name])-1,-1):
3760 tagRun = self.results[self.run_name][i]
3761 if tagRun.pythia:
3762 return tagRun['tag']
3763
3764
3766 """ tar the pythia results. This is done when we are quite sure that
3767 the pythia output will not be use anymore """
3768
3769 if not self.run_name:
3770 return
3771
3772 self.results.save()
3773
3774 if not self.to_store:
3775 return
3776
3777 if 'event' in self.to_store:
3778 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')):
3779 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')):
3780 self.update_status('gzipping output file: events.lhe', level='parton', error=True)
3781 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
3782 else:
3783 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
3784 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')):
3785 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe'))
3786
3787
3788 tag = self.run_card['run_tag']
3789
3790 self.to_store = []
3791
3792
3794 """reads the info in the init block and returns them in a dictionary"""
3795 ev_file = open(evt_file)
3796 init = ""
3797 found = False
3798 while True:
3799 line = ev_file.readline()
3800 if "<init>" in line:
3801 found = True
3802 elif found and not line.startswith('#'):
3803 init += line
3804 if "</init>" in line or "<event>" in line:
3805 break
3806 ev_file.close()
3807
3808
3809
3810
3811
3812
3813 init_dict = {}
3814 init_dict['idbmup1'] = int(init.split()[0])
3815 init_dict['idbmup2'] = int(init.split()[1])
3816 init_dict['ebmup1'] = float(init.split()[2])
3817 init_dict['ebmup2'] = float(init.split()[3])
3818 init_dict['pdfgup1'] = int(init.split()[4])
3819 init_dict['pdfgup2'] = int(init.split()[5])
3820 init_dict['pdfsup1'] = int(init.split()[6])
3821 init_dict['pdfsup2'] = int(init.split()[7])
3822 init_dict['idwtup'] = int(init.split()[8])
3823 init_dict['nprup'] = int(init.split()[9])
3824
3825 return init_dict
3826
3827
3829 """creates the mcatnlo input script using the values set in the header of the event_file.
3830 It also checks if the lhapdf library is used"""
3831
3832 shower = self.banner.get('run_card', 'parton_shower').upper()
3833 pdlabel = self.banner.get('run_card', 'pdlabel')
3834 itry = 0
3835 nevents = self.shower_card['nevents']
3836 init_dict = self.get_init_dict(evt_file)
3837
3838 if nevents < 0 or \
3839 nevents > self.banner.get_detail('run_card', 'nevents'):
3840 nevents = self.banner.get_detail('run_card', 'nevents')
3841
3842 nevents = nevents / self.shower_card['nsplit_jobs']
3843
3844 mcmass_dict = {}
3845 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
3846 pdg = int(line.split()[0])
3847 mass = float(line.split()[1])
3848 mcmass_dict[pdg] = mass
3849
3850 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
3851 content += 'NEVENTS=%d\n' % nevents
3852 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\
3853 self.shower_card['nsplit_jobs'])
3854 content += 'MCMODE=%s\n' % shower
3855 content += 'PDLABEL=%s\n' % pdlabel
3856 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value
3857
3858
3859 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
3860 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
3861 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
3862 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
3863 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
3864 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
3865 try:
3866 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
3867 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
3868 except KeyError:
3869 content += 'HGGMASS=120.\n'
3870 content += 'HGGWIDTH=0.00575308848\n'
3871 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
3872 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
3873 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
3874 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
3875 content += 'DMASS=%s\n' % mcmass_dict[1]
3876 content += 'UMASS=%s\n' % mcmass_dict[2]
3877 content += 'SMASS=%s\n' % mcmass_dict[3]
3878 content += 'CMASS=%s\n' % mcmass_dict[4]
3879 content += 'BMASS=%s\n' % mcmass_dict[5]
3880 try:
3881 content += 'EMASS=%s\n' % mcmass_dict[11]
3882 content += 'MUMASS=%s\n' % mcmass_dict[13]
3883 content += 'TAUMASS=%s\n' % mcmass_dict[15]
3884 except KeyError:
3885
3886 mcmass_lines = [l for l in \
3887 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
3888 ).read().split('\n') if l]
3889 new_mcmass_dict = {}
3890 for l in mcmass_lines:
3891 key, val = l.split('=')
3892 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
3893 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
3894 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
3895 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
3896
3897 content += 'GMASS=%s\n' % mcmass_dict[21]
3898 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
3899
3900 if int(self.shower_card['pdfcode']) > 1 or \
3901 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \
3902 shower=='HERWIGPP' :
3903
3904
3905
3906
3907 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
3908 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
3909 stdout = subprocess.PIPE).stdout.read().strip()
3910 content += 'LHAPDFPATH=%s\n' % lhapdfpath
3911 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3912 if self.shower_card['pdfcode']==0:
3913 lhaid_list = ''
3914 content += ''
3915 elif self.shower_card['pdfcode']==1:
3916 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
3917 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
3918 else:
3919 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
3920 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
3921 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3922 elif int(self.shower_card['pdfcode'])==1 or \
3923 int(self.shower_card['pdfcode'])==-1 and True:
3924
3925
3926
3927
3928
3929
3930 try:
3931 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
3932 stdout = subprocess.PIPE).stdout.read().strip()
3933 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
3934 content += 'LHAPDFPATH=%s\n' % lhapdfpath
3935 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3936 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
3937 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
3938 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3939 except Exception:
3940 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
3941 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
3942 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
3943 ' same set as was used in the event generation install LHAPDF and set the path using'+\
3944 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
3945 content += 'LHAPDFPATH=\n'
3946 content += 'PDFCODE=0\n'
3947 else:
3948 content += 'LHAPDFPATH=\n'
3949 content += 'PDFCODE=0\n'
3950
3951 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
3952 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
3953
3954 if self.options['pythia8_path']:
3955 content+='PY8PATH=%s\n' % self.options['pythia8_path']
3956 if self.options['hwpp_path']:
3957 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
3958 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']:
3959 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
3960 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']:
3961 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
3962
3963 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
3964 output.write(content)
3965 output.close()
3966 return shower
3967
3968
3970 """runs the reweight_xsec_events executables on each sub-event file generated
3971 to compute on the fly scale and/or PDF uncertainities"""
3972 logger.info(' Doing reweight')
3973
3974 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
3975
3976 if only:
3977 if os.path.exists(nev_unw + '.orig'):
3978 files.cp(nev_unw + '.orig', nev_unw)
3979 else:
3980 raise aMCatNLOError('Cannot find event file information')
3981
3982
3983 file = open(nev_unw)
3984 lines = file.read().split('\n')
3985 file.close()
3986
3987 files.cp(nev_unw, nev_unw + '.orig')
3988
3989
3990 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
3991 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0']
3992 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0:
3993 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts]
3994
3995 job_dict = {}
3996 exe = 'reweight_xsec_events.local'
3997 for i, evt_file in enumerate(evt_files):
3998 path, evt = os.path.split(evt_file)
3999 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
4000 pjoin(self.me_dir, 'SubProcesses', path))
4001 job_dict[path] = [exe]
4002
4003 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
4004
4005
4006 for evt_file in evt_files:
4007 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
4008 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
4009 stdout = subprocess.PIPE).stdout.read().strip()
4010 if last_line != "</LesHouchesEvents>":
4011 raise aMCatNLOError('An error occurred during reweight. Check the' + \
4012 '\'reweight_xsec_events.output\' files inside the ' + \
4013 '\'SubProcesses/P*/G*/ directories for details')
4014
4015
4016 newfile = open(nev_unw, 'w')
4017 for line in lines:
4018 if line:
4019 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
4020 newfile.close()
4021
4022 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4023
4025 """This function takes the files with the scale and pdf values
4026 written by the reweight_xsec_events.f code
4027 (P*/G*/pdf_scale_dependence.dat) and computes the overall
4028 scale and PDF uncertainty (the latter is computed using the
4029 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
4030 and returns it in percents. The expected format of the file
4031 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
4032 xsec_pdf0 xsec_pdf1 ...."""
4033
4034 scales=[]
4035 pdfs=[]
4036 for i,evt_file in enumerate(evt_files):
4037 path, evt=os.path.split(evt_file)
4038 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f:
4039 data_line=f.readline()
4040 if "scale variations:" in data_line:
4041 for j,scale in enumerate(self.run_card['dynamical_scale_choice']):
4042 data_line = f.readline().split()
4043 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
4044 try:
4045 scales[j] = [a + b for a, b in zip(scales[j], scales_this)]
4046 except IndexError:
4047 scales+=[scales_this]
4048 data_line=f.readline()
4049 if "pdf variations:" in data_line:
4050 for j,pdf in enumerate(self.run_card['lhaid']):
4051 data_line = f.readline().split()
4052 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
4053 try:
4054 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)]
4055 except IndexError:
4056 pdfs+=[pdfs_this]
4057
4058
4059 scale_info=[]
4060 for j,scale in enumerate(scales):
4061 s_cen=scale[0]
4062 if s_cen != 0.0 and self.run_card['reweight_scale'][j]:
4063
4064 s_max=(max(scale)/s_cen-1)*100
4065 s_min=(1-min(scale)/s_cen)*100
4066
4067 ren_var=[]
4068 fac_var=[]
4069 for i in range(len(self.run_card['rw_rscale'])):
4070 ren_var.append(scale[i]-s_cen)
4071 for i in range(len(self.run_card['rw_fscale'])):
4072 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen)
4073 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100
4074 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100
4075 s_size=len(scale)
4076 else:
4077 s_max=0.0
4078 s_min=0.0
4079 s_max_q=0.0
4080 s_min_q=0.0
4081 s_size=len(scale)
4082 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \
4083 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \
4084 'label':self.run_card['dynamical_scale_choice'][j], \
4085 'unc':self.run_card['reweight_scale'][j]})
4086
4087
4088 if any(self.run_card['reweight_pdf']):
4089 use_lhapdf=False
4090 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\
4091 stdout=subprocess.PIPE).stdout.read().strip()
4092
4093 try:
4094 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \
4095 if os.path.isdir(pjoin(lhapdf_libdir,dirname))]
4096 except OSError:
4097 candidates=[]
4098 for candidate in candidates:
4099 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')):
4100 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages'))
4101 try:
4102 import lhapdf
4103 use_lhapdf=True
4104 break
4105 except ImportError:
4106 sys.path.pop(0)
4107 continue
4108
4109 if not use_lhapdf:
4110 try:
4111 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \
4112 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))]
4113 except OSError:
4114 candidates=[]
4115 for candidate in candidates:
4116 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')):
4117 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages'))
4118 try:
4119 import lhapdf
4120 use_lhapdf=True
4121 break
4122 except ImportError:
4123 sys.path.pop(0)
4124 continue
4125
4126 if not use_lhapdf:
4127 try:
4128 import lhapdf
4129 use_lhapdf=True
4130 except ImportError:
4131 logger.warning("Failed to access python version of LHAPDF: "\
4132 "cannot compute PDF uncertainty from the "\
4133 "weights in the events. The weights in the LHE " \
4134 "event files will still cover all PDF set members, "\
4135 "but there will be no PDF uncertainty printed in the run summary. \n "\
4136 "If the python interface to LHAPDF is available on your system, try "\
4137 "adding its location to the PYTHONPATH environment variable and the"\
4138 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).")
4139 use_lhapdf=False
4140
4141
4142 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0)
4143
4144 pdf_info=[]
4145 for j,pdfset in enumerate(pdfs):
4146 p_cen=pdfset[0]
4147 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]:
4148 if use_lhapdf:
4149 pdfsetname=self.run_card['lhapdfsetname'][j]
4150 try:
4151 p=lhapdf.getPDFSet(pdfsetname)
4152 ep=p.uncertainty(pdfset,-1)
4153 p_cen=ep.central
4154 p_min=abs(ep.errminus/p_cen)*100
4155 p_max=abs(ep.errplus/p_cen)*100
4156 p_type=p.errorType
4157 p_size=p.size
4158 p_conf=p.errorConfLevel
4159 except:
4160 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname)
4161 p_min=0.0
4162 p_max=0.0
4163 p_type='unknown'
4164 p_conf='unknown'
4165 p_size=len(pdfset)
4166 else:
4167 p_min=0.0
4168 p_max=0.0
4169 p_type='unknown'
4170 p_conf='unknown'
4171 p_size=len(pdfset)
4172 pdfsetname=self.run_card['lhaid'][j]
4173 else:
4174 p_min=0.0
4175 p_max=0.0
4176 p_type='none'
4177 p_conf='unknown'
4178 p_size=len(pdfset)
4179 pdfsetname=self.run_card['lhaid'][j]
4180 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \
4181 'unc':p_type, 'name':pdfsetname, 'size':p_size, \
4182 'label':self.run_card['lhaid'][j], 'conf':p_conf})
4183
4184 scale_pdf_info=[scale_info,pdf_info]
4185 return scale_pdf_info
4186
4187
4199
4200 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4201 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
4202 self.ijob = 0
4203 if run_type != 'shower':
4204 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
4205 for args in arg_list:
4206 for Pdir, jobs in job_dict.items():
4207 for job in jobs:
4208 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
4209 if self.cluster_mode == 2:
4210 time.sleep(1)
4211 else:
4212 self.njobs = len(arg_list)
4213 for args in arg_list:
4214 [(cwd, exe)] = job_dict.items()
4215 self.run_exe(exe, args, run_type, cwd)
4216
4217 self.wait_for_complete(run_type)
4218
4219
4220
4222 """check the integrity of the event files after splitting, and resubmit
4223 those which are not nicely terminated"""
4224 jobs_to_resubmit = []
4225 for job in jobs:
4226 last_line = ''
4227 try:
4228 last_line = subprocess.Popen(
4229 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \
4230 stdout = subprocess.PIPE).stdout.read().strip()
4231 except IOError:
4232 pass
4233 if last_line != "</LesHouchesEvents>":
4234 jobs_to_resubmit.append(job)
4235 self.njobs = 0
4236 if jobs_to_resubmit:
4237 run_type = 'Resubmitting broken jobs'
4238 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
4239 for job in jobs_to_resubmit:
4240 logger.debug('Resubmitting ' + job['dirname'] + '\n')
4241 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4242
4243
4245 """looks into the nevents_unweighed_splitted file to check how many
4246 split jobs are needed for this (pdir, job). arg is F, B or V"""
4247
4248 splittings = []
4249 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
4250 pattern = re.compile('for i in (\d+) ; do')
4251 match = re.search(pattern, ajob)
4252 channel = match.groups()[0]
4253
4254
4255 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
4256
4257
4258 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
4259 pjoin(pdir, 'G%s%s' % (arg,channel)))
4260 matches = re.findall(pattern, nevents_file)
4261 for m in matches:
4262 splittings.append(m)
4263 return splittings
4264
4265
4266 - def run_exe(self, exe, args, run_type, cwd=None):
4267 """this basic function launch locally/on cluster exe with args as argument.
4268 """
4269
4270 execpath = None
4271 if cwd and os.path.exists(pjoin(cwd, exe)):
4272 execpath = pjoin(cwd, exe)
4273 elif not cwd and os.path.exists(exe):
4274 execpath = exe
4275 else:
4276 raise aMCatNLOError('Cannot find executable %s in %s' \
4277 % (exe, os.getcwd()))
4278
4279 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
4280 subprocess.call(['chmod', '+x', exe], cwd=cwd)
4281
4282 if self.cluster_mode == 0:
4283
4284 misc.call(['./'+exe] + args, cwd=cwd)
4285 self.ijob += 1
4286 self.update_status((max([self.njobs - self.ijob - 1, 0]),
4287 min([1, self.njobs - self.ijob]),
4288 self.ijob, run_type), level='parton')
4289
4290
4291 elif 'reweight' in exe:
4292
4293
4294 input_files, output_files = [], []
4295 pdfinput = self.get_pdf_input_filename()
4296 if os.path.exists(pdfinput):
4297 input_files.append(pdfinput)
4298 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
4299 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat'))
4300 input_files.append(args[0])
4301 output_files.append('%s.rwgt' % os.path.basename(args[0]))
4302 output_files.append('reweight_xsec_events.output')
4303 output_files.append('scale_pdf_dependence.dat')
4304
4305 return self.cluster.submit2(exe, args, cwd=cwd,
4306 input_files=input_files, output_files=output_files,
4307 required_output=output_files)
4308
4309 elif 'ajob' in exe:
4310
4311
4312 if type(args[0]) == str:
4313 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args)
4314
4315 self.cluster.submit2(exe, args, cwd=cwd,
4316 input_files=input_files, output_files=output_files,
4317 required_output=required_output)
4318
4319
4320
4321
4322
4323
4324 elif 'shower' in exe:
4325
4326
4327
4328 input_files, output_files = [], []
4329 shower = args[0]
4330
4331 if shower == 'PYTHIA8':
4332 input_files.append(pjoin(cwd, 'Pythia8.exe'))
4333 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
4334 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
4335 input_files.append(pjoin(cwd, 'config.sh'))
4336 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
4337 else:
4338 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'))
4339 else:
4340 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
4341 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
4342 if shower == 'HERWIGPP':
4343 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')):
4344 input_files.append(pjoin(cwd, 'Herwig++'))
4345 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')):
4346 input_files.append(pjoin(cwd, 'Herwig'))
4347 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
4348 if len(args) == 3:
4349 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
4350 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
4351 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
4352 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
4353 else:
4354 raise aMCatNLOError, 'Event file not present in %s' % \
4355 pjoin(self.me_dir, 'Events', self.run_name)
4356 else:
4357 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
4358
4359 if len(args) == 3:
4360 output_files.append('mcatnlo_run.log')
4361 else:
4362 output_files.append('mcatnlo_run_%s.log' % args[3])
4363 if args[1] == 'HEP':
4364 if len(args) == 3:
4365 fname = 'events'
4366 else:
4367 fname = 'events_%s' % args[3]
4368 if shower in ['PYTHIA8', 'HERWIGPP']:
4369 output_files.append(fname + '.hepmc.gz')
4370 else:
4371 output_files.append(fname + '.hep.gz')
4372 elif args[1] == 'TOP' or args[1] == 'HWU':
4373 if len(args) == 3:
4374 fname = 'histfile'
4375 else:
4376 fname = 'histfile_%s' % args[3]
4377 output_files.append(fname + '.tar')
4378 else:
4379 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1]
4380
4381 self.cluster.submit2(exe, args, cwd=cwd,
4382 input_files=input_files, output_files=output_files)
4383
4384 else:
4385 return self.cluster.submit(exe, args, cwd=cwd)
4386
4388
4389
4390
4391 output_files = []
4392 required_output = []
4393 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'),
4394 pjoin(cwd, 'symfact.dat'),
4395 pjoin(cwd, 'iproc.dat'),
4396 pjoin(cwd, 'initial_states_map.dat'),
4397 pjoin(cwd, 'configs_and_props_info.dat'),
4398 pjoin(cwd, 'leshouche_info.dat'),
4399 pjoin(cwd, 'FKS_params.dat')]
4400
4401
4402 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')):
4403 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat'))
4404
4405 if os.path.exists(pjoin(cwd,'nevents.tar')):
4406 input_files.append(pjoin(cwd,'nevents.tar'))
4407
4408 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
4409 input_files.append(pjoin(cwd, 'OLE_order.olc'))
4410
4411
4412 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \
4413 cluster.need_transfer(self.options):
4414 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4415 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
4416 cluster.need_transfer(self.options):
4417 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
4418 dereference=True)
4419 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
4420 tf.close()
4421 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4422
4423 if args[1] == 'born' or args[1] == 'all':
4424
4425 input_files.append(pjoin(cwd, 'madevent_mintFO'))
4426 if args[2] == '0':
4427 current = '%s_G%s' % (args[1],args[0])
4428 else:
4429 current = '%s_G%s_%s' % (args[1],args[0],args[2])
4430 if os.path.exists(pjoin(cwd,current)):
4431 input_files.append(pjoin(cwd, current))
4432 output_files.append(current)
4433
4434 required_output.append('%s/results.dat' % current)
4435 required_output.append('%s/res_%s.dat' % (current,args[3]))
4436 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4437 required_output.append('%s/mint_grids' % current)
4438 required_output.append('%s/grid.MC_integer' % current)
4439 if args[3] != '0':
4440 required_output.append('%s/scale_pdf_dependence.dat' % current)
4441
4442 elif args[1] == 'F' or args[1] == 'B':
4443
4444 input_files.append(pjoin(cwd, 'madevent_mintMC'))
4445
4446 if args[2] == '0':
4447 current = 'G%s%s' % (args[1],args[0])
4448 else:
4449 current = 'G%s%s_%s' % (args[1],args[0],args[2])
4450 if os.path.exists(pjoin(cwd,current)):
4451 input_files.append(pjoin(cwd, current))
4452 output_files.append(current)
4453 if args[2] > '0':
4454
4455 output_files.append('G%s%s_%s' % (args[1], args[0], args[2]))
4456 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3]))
4457
4458 else:
4459 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4460 if args[3] in ['0','1']:
4461 required_output.append('%s/results.dat' % current)
4462 if args[3] == '1':
4463 output_files.append('%s/results.dat' % current)
4464
4465 else:
4466 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
4467
4468
4469 pdfinput = self.get_pdf_input_filename()
4470 if os.path.exists(pdfinput):
4471 input_files.append(pdfinput)
4472 return input_files, output_files, required_output, args
4473
4474
4475 - def compile(self, mode, options):
4476 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
4477 specified in mode"""
4478
4479 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
4480
4481 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
4482 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
4483
4484 self.get_characteristics(pjoin(self.me_dir,
4485 'SubProcesses', 'proc_characteristics'))
4486
4487
4488 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
4489 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
4490 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
4491 test_log = pjoin(self.me_dir, 'test.log')
4492
4493
4494 self.make_opts_var = {}
4495 if self.proc_characteristics['has_loops'] and \
4496 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4497 self.make_opts_var['madloop'] = 'true'
4498
4499 self.update_status('Compiling the code', level=None, update_results=True)
4500
4501 libdir = pjoin(self.me_dir, 'lib')
4502 sourcedir = pjoin(self.me_dir, 'Source')
4503
4504
4505 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
4506
4507 if '+' in mode:
4508 mode = mode.split('+')[0]
4509 if mode in ['NLO', 'LO']:
4510 exe = 'madevent_mintFO'
4511 tests = ['test_ME']
4512 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
4513 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
4514 exe = 'madevent_mintMC'
4515 tests = ['test_ME', 'test_MC']
4516
4517 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock:
4518 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n')
4519
4520
4521 p_dirs = [d for d in \
4522 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
4523
4524 self.do_treatcards('', amcatnlo=True, mode=mode)
4525
4526
4527 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
4528 for p_dir in p_dirs]) and options['nocompile']:
4529 return
4530
4531
4532 if os.path.exists(pjoin(libdir, 'PDFsets')):
4533 files.rm(pjoin(libdir, 'PDFsets'))
4534
4535
4536 if self.run_card['pdlabel'] == 'lhapdf' and \
4537 (self.banner.get_detail('run_card', 'lpp1') != 0 or \
4538 self.banner.get_detail('run_card', 'lpp2') != 0):
4539
4540 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
4541 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4542 lhaid_list = self.run_card['lhaid']
4543 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4544
4545 else:
4546 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']:
4547 logger.info('Using built-in libraries for PDFs')
4548
4549 self.make_opts_var['lhapdf'] = ""
4550
4551
4552 if self.run_card['iappl'] != 0:
4553 self.make_opts_var['applgrid'] = 'True'
4554
4555 for code in ['applgrid','amcfast']:
4556 try:
4557 p = subprocess.Popen([self.options[code], '--version'], \
4558 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
4559 except OSError:
4560 raise aMCatNLOError(('No valid %s installation found. \n' + \
4561 'Please set the path to %s-config by using \n' + \
4562 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
4563 else:
4564 output, _ = p.communicate()
4565 if code is 'applgrid' and output < '1.4.63':
4566 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
4567 +' You are using %s',output)
4568 if code is 'amcfast' and output < '1.1.1':
4569 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
4570 +' You are using %s',output)
4571
4572
4573 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \
4574 % (self.options['amcfast'],self.options['applgrid'])
4575 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
4576 text_out=[]
4577 for line in text:
4578 if line.strip().startswith('APPLLIBS=$'):
4579 line=appllibs
4580 text_out.append(line)
4581 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock:
4582 fsock.writelines(text_out)
4583 else:
4584 self.make_opts_var['applgrid'] = ""
4585
4586 if 'fastjet' in self.options.keys() and self.options['fastjet']:
4587 self.make_opts_var['fastjet_config'] = self.options['fastjet']
4588
4589
4590 self.update_make_opts()
4591
4592
4593 self.update_status('Compiling source...', level=None)
4594 misc.compile(['clean4pdf'], cwd = sourcedir)
4595 misc.compile(cwd = sourcedir)
4596 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
4597 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
4598 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
4599 and os.path.exists(pjoin(libdir, 'libpdf.a')):
4600 logger.info(' ...done, continuing with P* directories')
4601 else:
4602 raise aMCatNLOError('Compilation failed')
4603
4604
4605 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
4606 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
4607 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
4608 if os.path.exists(pjoin(sourcedir,'StdHEP')):
4609 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
4610 misc.compile(['StdHEP'], cwd = sourcedir)
4611 logger.info(' ...done.')
4612 else:
4613 raise aMCatNLOError('Could not compile StdHEP because its'+\
4614 ' source directory could not be found in the SOURCE folder.\n'+\
4615 " Check the MG5_aMC option 'output_dependencies.'")
4616
4617
4618 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
4619 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
4620 if os.path.exists(pjoin(sourcedir,'CutTools')):
4621 logger.info('Compiling CutTools (can take a couple of minutes) ...')
4622 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1)
4623 logger.info(' ...done.')
4624 else:
4625 raise aMCatNLOError('Could not compile CutTools because its'+\
4626 ' source directory could not be found in the SOURCE folder.\n'+\
4627 " Check the MG5_aMC option 'output_dependencies.'")
4628 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
4629 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
4630 raise aMCatNLOError('CutTools compilation failed.')
4631
4632
4633
4634 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
4635 libdir, 'libcts.a')))),'compiler_version.log')
4636 if os.path.exists(compiler_log_path):
4637 compiler_version_used = open(compiler_log_path,'r').read()
4638 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
4639 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
4640 if os.path.exists(pjoin(sourcedir,'CutTools')):
4641 logger.info('CutTools was compiled with a different fortran'+\
4642 ' compiler. Re-compiling it now...')
4643 misc.compile(['cleanCT'], cwd = sourcedir)
4644 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1)
4645 logger.info(' ...done.')
4646 else:
4647 raise aMCatNLOError("CutTools installation in %s"\
4648 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
4649 " seems to have been compiled with a different compiler than"+\
4650 " the one specified in MG5_aMC. Please recompile CutTools.")
4651
4652
4653 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
4654 and os.path.exists(pjoin(sourcedir,'IREGI')):
4655 logger.info('Compiling IREGI (can take a couple of minutes) ...')
4656 misc.compile(['IREGI'], cwd = sourcedir)
4657 logger.info(' ...done.')
4658
4659 if os.path.exists(pjoin(libdir, 'libiregi.a')):
4660
4661
4662 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
4663 libdir, 'libiregi.a')))),'compiler_version.log')
4664 if os.path.exists(compiler_log_path):
4665 compiler_version_used = open(compiler_log_path,'r').read()
4666 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
4667 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
4668 if os.path.exists(pjoin(sourcedir,'IREGI')):
4669 logger.info('IREGI was compiled with a different fortran'+\
4670 ' compiler. Re-compiling it now...')
4671 misc.compile(['cleanIR'], cwd = sourcedir)
4672 misc.compile(['IREGI'], cwd = sourcedir)
4673 logger.info(' ...done.')
4674 else:
4675 raise aMCatNLOError("IREGI installation in %s"\
4676 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
4677 " seems to have been compiled with a different compiler than"+\
4678 " the one specified in MG5_aMC. Please recompile IREGI.")
4679
4680
4681 if self.proc_characteristics['has_loops'] and \
4682 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4683 if mode in ['NLO', 'aMC@NLO', 'noshower']:
4684 tests.append('check_poles')
4685
4686
4687 self.update_status('Compiling directories...', level=None)
4688
4689 for test in tests:
4690 self.write_test_input(test)
4691
4692 try:
4693 import multiprocessing
4694 if not self.nb_core:
4695 try:
4696 self.nb_core = int(self.options['nb_core'])
4697 except TypeError:
4698 self.nb_core = multiprocessing.cpu_count()
4699 except ImportError:
4700 self.nb_core = 1
4701
4702 compile_options = copy.copy(self.options)
4703 compile_options['nb_core'] = self.nb_core
4704 compile_cluster = cluster.MultiCore(**compile_options)
4705 logger.info('Compiling on %d cores' % self.nb_core)
4706
4707 update_status = lambda i, r, f: self.donothing(i,r,f)
4708 for p_dir in p_dirs:
4709 compile_cluster.submit(prog = compile_dir,
4710 argument = [self.me_dir, p_dir, mode, options,
4711 tests, exe, self.options['run_mode']])
4712 try:
4713 compile_cluster.wait(self.me_dir, update_status)
4714 except Exception, error:
4715 logger.warning("Fail to compile the Subprocesses")
4716 if __debug__:
4717 raise
4718 compile_cluster.remove()
4719 self.do_quit('')
4720
4721 logger.info('Checking test output:')
4722 for p_dir in p_dirs:
4723 logger.info(p_dir)
4724 for test in tests:
4725 logger.info(' Result for %s:' % test)
4726
4727 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
4728
4729 self.check_tests(test, this_dir)
4730
4731
4734
4735
4737 """just call the correct parser for the test log.
4738 Skip check_poles for LOonly folders"""
4739 if test in ['test_ME', 'test_MC']:
4740 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
4741 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')):
4742 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4743
4744
4746 """read and parse the test_ME/MC.log file"""
4747 content = open(log).read()
4748 if 'FAILED' in content:
4749 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK')
4750 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
4751 'Please check that widths of final state particles (e.g. top) have been' + \
4752 ' set to 0 in the param_card.dat.')
4753 else:
4754 lines = [l for l in content.split('\n') if 'PASSED' in l]
4755 logger.info(' Passed.')
4756 logger.debug('\n'+'\n'.join(lines))
4757
4758
4760 """reads and parse the check_poles.log file"""
4761 content = open(log).read()
4762 npass = 0
4763 nfail = 0
4764 for line in content.split('\n'):
4765 if 'PASSED' in line:
4766 npass +=1
4767 tolerance = float(line.split()[1])
4768 if 'FAILED' in line:
4769 nfail +=1
4770 tolerance = float(line.split()[1])
4771
4772 if nfail + npass == 0:
4773 logger.warning('0 points have been tried')
4774 return
4775
4776 if float(nfail)/float(nfail+npass) > 0.1:
4777 raise aMCatNLOError('Poles do not cancel, run cannot continue')
4778 else:
4779 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
4780 %(npass, nfail+npass, tolerance))
4781
4782
4804
4805
4807 """ return the model name """
4808 if hasattr(self, 'model_name'):
4809 return self.model_name
4810
4811 model = 'sm'
4812 proc = []
4813 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')):
4814 line = line.split('#')[0]
4815
4816 if line.startswith('import') and 'model' in line:
4817 model = line.split()[2]
4818 proc = []
4819 elif line.startswith('generate'):
4820 proc.append(line.split(None,1)[1])
4821 elif line.startswith('add process'):
4822 proc.append(line.split(None,2)[2])
4823
4824 self.model = model
4825 self.process = proc
4826 return model
4827
4828
4829
4830
4832 """Ask the question when launching generate_events/multi_run"""
4833
4834 if 'parton' not in options:
4835 options['parton'] = False
4836 if 'reweightonly' not in options:
4837 options['reweightonly'] = False
4838
4839
4840 void = 'Not installed'
4841 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight','madanalysis5']
4842 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void,
4843 'madspin': void,'reweight':'OFF','madanalysis5':void}
4844 if not switch:
4845 switch = switch_default
4846 else:
4847 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch))
4848 default_switch = ['ON', 'OFF']
4849
4850
4851 allowed_switch_value = {'order': ['LO', 'NLO'],
4852 'fixed_order': default_switch,
4853 'shower': default_switch,
4854 'madspin': default_switch,
4855 'reweight': default_switch,
4856 'madanalysis5':['OFF','HADRON']}
4857
4858 if not os.path.exists(pjoin(self.me_dir, 'Cards',
4859 'madanalysis5_hadron_card_default.dat')):
4860 allowed_switch_value['madanalysis5']=[]
4861
4862 description = {'order': 'Perturbative order of the calculation:',
4863 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):',
4864 'shower': 'Shower the generated events:',
4865 'madspin': 'Decay particles with the MadSpin module:',
4866 'reweight': 'Add weights to the events based on changing model parameters:',
4867 'madanalysis5':'Run MadAnalysis5 on the events generated:'}
4868
4869 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'},
4870 ('madspin', 'ON'): {'fixed_order':'OFF'},
4871 ('reweight', 'ON'): {'fixed_order':'OFF'},
4872 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF','madanalysis5':'OFF'},
4873 ('madanalysis5','HADRON'): {'shower': 'ON','fixed_order':'OFF'},
4874 ('shower','OFF'): {'madanalysis5': 'OFF'},
4875 }
4876 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']
4877
4878 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void )
4879
4880 if self.proc_characteristics['ninitial'] == 1:
4881 switch['fixed_order'] = 'ON'
4882 switch['shower'] = 'Not available for decay'
4883 switch['madspin'] = 'Not available for decay'
4884 switch['reweight'] = 'Not available for decay'
4885 switch['madanalysis5'] = 'Not available for decay'
4886 allowed_switch_value['fixed_order'] = ['ON']
4887 allowed_switch_value['shower'] = ['OFF']
4888 allowed_switch_value['madspin'] = ['OFF']
4889 allowed_switch_value['reweight'] = ['OFF']
4890 allowed_switch_value['madanalysis5'] = ['OFF']
4891 available_mode = ['0','1']
4892 special_values = ['LO', 'NLO']
4893 else:
4894
4895 available_mode = ['0', '1', '2','3']
4896
4897 if mode == 'auto':
4898 mode = None
4899 if not mode and (options['parton'] or options['reweightonly']):
4900 mode = 'noshower'
4901
4902
4903 if '3' in available_mode:
4904 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
4905 switch['shower'] = 'ON'
4906 else:
4907 switch['shower'] = 'OFF'
4908 if os.path.exists(pjoin(self.me_dir, 'Cards', 'madanalysis5_hadron_card_default.dat')):
4909 available_mode.append('6')
4910 if os.path.exists(pjoin(self.me_dir, 'Cards', 'madanalysis5_hadron_card.dat')):
4911 switch['madanalysis5'] = 'HADRON'
4912 else:
4913 switch['madanalysis5'] = 'OFF'
4914
4915 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode:
4916 available_mode.append('4')
4917 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4918 switch['madspin'] = 'ON'
4919 else:
4920 switch['madspin'] = 'OFF'
4921 if misc.has_f2py() or self.options['f2py_compiler']:
4922 available_mode.append('5')
4923 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')):
4924 switch['reweight'] = 'ON'
4925 else:
4926 switch['reweight'] = 'OFF'
4927 else:
4928 switch['reweight'] = 'Not available (requires NumPy)'
4929
4930 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode:
4931 if switch['reweight'] == "OFF":
4932 switch['reweight'] = "ON"
4933 elif switch['reweight'] != "ON":
4934 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight'])
4935 if 'do_madspin' in options and options['do_madspin']:
4936 if switch['madspin'] == "OFF":
4937 switch['madspin'] = 'ON'
4938 elif switch['madspin'] != "ON":
4939 logger.critical("Cannot run MadSpin module: %s" % switch['reweight'])
4940
4941 answers = list(available_mode) + ['auto', 'done']
4942 alias = {}
4943 for id, key in enumerate(switch_order):
4944 if switch[key] != void and switch[key] in allowed_switch_value[key] and \
4945 len(allowed_switch_value[key])>1:
4946 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]]
4947
4948 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s))
4949 for s in allowed_switch_value[key]))
4950 answers += special_values
4951
4952 def create_question(switch):
4953 switch_format = " %i %-61s %12s=%s\n"
4954 question = "The following switches determine which operations are executed:\n"
4955 for id, key in enumerate(switch_order):
4956 question += switch_format % (id+1, description[key], key, switch[key])
4957 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1)
4958 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n'
4959 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n'
4960 return question
4961
4962
4963 def modify_switch(mode, answer, switch):
4964 if '=' in answer:
4965 key, status = answer.split('=')
4966 switch[key] = status
4967 if (key, status) in force_switch:
4968 for key2, status2 in force_switch[(key, status)].items():
4969 if switch[key2] not in [status2, void]:
4970 logger.info('For coherence \'%s\' is set to \'%s\''
4971 % (key2, status2), '$MG:color:BLACK')
4972 switch[key2] = status2
4973 elif answer in ['0', 'auto', 'done']:
4974 return
4975 elif answer in special_values:
4976 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK')
4977
4978
4979 if answer == 'LO':
4980 switch['order'] = 'LO'
4981 switch['fixed_order'] = 'ON'
4982 assign_switch('shower', 'OFF')
4983 elif answer == 'NLO':
4984 switch['order'] = 'NLO'
4985 switch['fixed_order'] = 'ON'
4986 assign_switch('shower', 'OFF')
4987 elif answer == 'aMC@NLO':
4988 switch['order'] = 'NLO'
4989 switch['fixed_order'] = 'OFF'
4990 assign_switch('shower', 'ON')
4991 elif answer == 'aMC@LO':
4992 switch['order'] = 'LO'
4993 switch['fixed_order'] = 'OFF'
4994 assign_switch('shower', 'ON')
4995 elif answer == 'noshower':
4996 switch['order'] = 'NLO'
4997 switch['fixed_order'] = 'OFF'
4998 assign_switch('shower', 'OFF')
4999 elif answer == 'noshowerLO':
5000 switch['order'] = 'LO'
5001 switch['fixed_order'] = 'OFF'
5002 assign_switch('shower', 'OFF')
5003 if mode:
5004 return
5005 return switch
5006
5007 modify_switch(mode, self.last_mode, switch)
5008 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
5009 assign_switch('madspin', 'ON')
5010
5011 if not self.force:
5012 answer = ''
5013 while answer not in ['0', 'done', 'auto', 'onlyshower']:
5014 question = create_question(switch)
5015 if mode:
5016 answer = mode
5017 else:
5018 answer = self.ask(question, '0', answers, alias=alias)
5019 if answer.isdigit() and answer != '0':
5020 key = switch_order[int(answer) - 1]
5021 opt1 = allowed_switch_value[key][0]
5022 opt2 = allowed_switch_value[key][1]
5023 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2)
5024
5025 if not modify_switch(mode, answer, switch):
5026 break
5027
5028
5029 if not mode or mode == 'auto':
5030 if switch['order'] == 'LO':
5031 if switch['shower'] == 'ON':
5032 mode = 'aMC@LO'
5033 elif switch['fixed_order'] == 'ON':
5034 mode = 'LO'
5035 else:
5036 mode = 'noshowerLO'
5037 elif switch['order'] == 'NLO':
5038 if switch['shower'] == 'ON':
5039 mode = 'aMC@NLO'
5040 elif switch['fixed_order'] == 'ON':
5041 mode = 'NLO'
5042 else:
5043 mode = 'noshower'
5044 logger.info('will run in mode: %s' % mode)
5045
5046 if mode == 'noshower':
5047 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
5048 Please, shower the Les Houches events before using them for physics analyses.""")
5049
5050
5051
5052 cards = ['param_card.dat', 'run_card.dat']
5053 ignore = []
5054 if mode in ['LO', 'NLO']:
5055 options['parton'] = True
5056 ignore = ['shower_card.dat', 'madspin_card.dat']
5057 cards.append('FO_analyse_card.dat')
5058 else:
5059 if switch['madspin'] == 'ON':
5060 cards.append('madspin_card.dat')
5061 if switch['reweight'] == 'ON':
5062 cards.append('reweight_card.dat')
5063 if switch['madanalysis5'] == 'HADRON':
5064 cards.append('madanalysis5_hadron_card.dat')
5065 if 'aMC@' in mode:
5066 cards.append('shower_card.dat')
5067 if mode == 'onlyshower':
5068 cards = ['shower_card.dat']
5069 if options['reweightonly']:
5070 cards = ['run_card.dat']
5071
5072 self.keep_cards(cards, ignore)
5073
5074 if mode =='onlyshower':
5075 cards = ['shower_card.dat']
5076
5077
5078
5079 first_cmd = []
5080
5081 if not options['force'] and not self.force:
5082 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd)
5083
5084 self.banner = banner_mod.Banner()
5085
5086
5087 for card in cards:
5088 self.banner.add(pjoin(self.me_dir, 'Cards', card))
5089
5090 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
5091 self.banner.add_text('run_settings', run_settings)
5092
5093 if not mode =='onlyshower':
5094 self.run_card = self.banner.charge_card('run_card')
5095 self.run_tag = self.run_card['run_tag']
5096
5097 if not hasattr(self, 'run_name') or not self.run_name:
5098 self.run_name = self.find_available_run_name(self.me_dir)
5099
5100 if self.run_name.startswith('run_'):
5101 if mode in ['LO','aMC@LO','noshowerLO']:
5102 self.run_name += '_LO'
5103 self.set_run_name(self.run_name, self.run_tag, 'parton')
5104 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
5105 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""")
5106 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']:
5107 logger.warning("""You are running with FxFx merging enabled. To be able to merge
5108 samples of various multiplicities without double counting, you
5109 have to remove some events after showering 'by hand'. Please
5110 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
5111 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
5112 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""")
5113 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8':
5114 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
5115 "Type \'n\' to stop or \'y\' to continue"
5116 answers = ['n','y']
5117 answer = self.ask(question, 'n', answers, alias=alias)
5118 if answer == 'n':
5119 error = '''Stop opertation'''
5120 self.ask_run_configuration(mode, options)
5121
5122 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']:
5123
5124 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""")
5125 if 'aMC@' in mode or mode == 'onlyshower':
5126 self.shower_card = self.banner.charge_card('shower_card')
5127
5128 elif mode in ['LO', 'NLO']:
5129 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
5130 self.analyse_card = self.banner.charge_card('FO_analyse_card')
5131
5132 return mode
5133
5134
5135
5136
5137
5139 """The command line processor of MadGraph"""
5140
5141 _compile_usage = "compile [MODE] [options]\n" + \
5142 "-- compiles aMC@NLO \n" + \
5143 " MODE can be either FO, for fixed-order computations, \n" + \
5144 " or MC for matching with parton-shower monte-carlos. \n" + \
5145 " (if omitted, it is set to MC)\n"
5146 _compile_parser = misc.OptionParser(usage=_compile_usage)
5147 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
5148 help="Use the card present in the directory for the launch, without editing them")
5149
5150 _launch_usage = "launch [MODE] [options]\n" + \
5151 "-- execute aMC@NLO \n" + \
5152 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
5153 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
5154 " computation of the total cross section and the filling of parton-level histograms \n" + \
5155 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
5156 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
5157 " event file is generated which will be showered with the MonteCarlo specified \n" + \
5158 " in the run_card.dat\n"
5159
5160 _launch_parser = misc.OptionParser(usage=_launch_usage)
5161 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
5162 help="Use the card present in the directory for the launch, without editing them")
5163 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
5164 help="Submit the jobs on the cluster")
5165 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
5166 help="Submit the jobs on multicore mode")
5167 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5168 help="Skip compilation. Ignored if no executable is found")
5169 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
5170 help="Skip integration and event generation, just run reweight on the" + \
5171 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
5172 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
5173 help="Stop the run after the parton level file generation (you need " + \
5174 "to shower the file in order to get physical results)")
5175 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5176 help="Skip grid set up, just generate events starting from " + \
5177 "the last available results")
5178 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
5179 help="Provide a name to the run")
5180 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
5181 help="For use with APPLgrid only: start from existing grids")
5182 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true',
5183 help="Run the reweight module (reweighting by different model parameters)")
5184 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true',
5185 help="Run the madspin package")
5186
5187
5188
5189 _generate_events_usage = "generate_events [MODE] [options]\n" + \
5190 "-- execute aMC@NLO \n" + \
5191 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
5192 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
5193 " computation of the total cross section and the filling of parton-level histograms \n" + \
5194 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
5195 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
5196 " event file is generated which will be showered with the MonteCarlo specified \n" + \
5197 " in the run_card.dat\n"
5198
5199 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
5200 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
5201 help="Use the card present in the directory for the generate_events, without editing them")
5202 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
5203 help="Submit the jobs on the cluster")
5204 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
5205 help="Submit the jobs on multicore mode")
5206 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5207 help="Skip compilation. Ignored if no executable is found")
5208 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
5209 help="Skip integration and event generation, just run reweight on the" + \
5210 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
5211 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
5212 help="Stop the run after the parton level file generation (you need " + \
5213 "to shower the file in order to get physical results)")
5214 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5215 help="Skip grid set up, just generate events starting from " + \
5216 "the last available results")
5217 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
5218 help="Provide a name to the run")
5219
5220
5221
5222 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
5223 "-- calculate cross section up to ORDER.\n" + \
5224 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
5225
5226 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
5227 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
5228 help="Use the card present in the directory for the launch, without editing them")
5229 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
5230 help="Submit the jobs on the cluster")
5231 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
5232 help="Submit the jobs on multicore mode")
5233 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5234 help="Skip compilation. Ignored if no executable is found")
5235 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
5236 help="Provide a name to the run")
5237 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
5238 help="For use with APPLgrid only: start from existing grids")
5239 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5240 help="Skip grid set up, just generate events starting from " + \
5241 "the last available results")
5242
5243 _shower_usage = 'shower run_name [options]\n' + \
5244 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
5245 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
5246 ' are directly read from the header of the event file\n'
5247 _shower_parser = misc.OptionParser(usage=_shower_usage)
5248 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
5249 help="Use the shower_card present in the directory for the launch, without editing")
5250
5251 if '__main__' == __name__:
5252
5253
5254 import sys
5255 if not sys.version_info[0] == 2 or sys.version_info[1] < 6:
5256 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\
5257 'Please upgrate your version of python.')
5258
5259 import os
5260 import optparse
5261
5262
5263 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ )))
5264 sys.path.insert(0, root_path)
5265
5268 - def error(self, msg=''):
5270
5271 usage = "usage: %prog [options] [FILE] "
5272 parser = MyOptParser(usage=usage)
5273 parser.add_option("-l", "--logging", default='INFO',
5274 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]")
5275 parser.add_option("","--web", action="store_true", default=False, dest='web', \
5276 help='force toce to be in secure mode')
5277 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \
5278 help='force to launch debug mode')
5279 parser_error = ''
5280 done = False
5281
5282 for i in range(len(sys.argv)-1):
5283 try:
5284 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i])
5285 done = True
5286 except MyOptParser.InvalidOption, error:
5287 pass
5288 else:
5289 args += sys.argv[len(sys.argv)-i:]
5290 if not done:
5291
5292 try:
5293 (options, args) = parser.parse_args()
5294 except MyOptParser.InvalidOption, error:
5295 print error
5296 sys.exit(2)
5297
5298 if len(args) == 0:
5299 args = ''
5300
5301 import subprocess
5302 import logging
5303 import logging.config
5304
5305
5306 import internal.coloring_logging
5307 try:
5308 if __debug__ and options.logging == 'INFO':
5309 options.logging = 'DEBUG'
5310 if options.logging.isdigit():
5311 level = int(options.logging)
5312 else:
5313 level = eval('logging.' + options.logging)
5314 print os.path.join(root_path, 'internal', 'me5_logging.conf')
5315 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf'))
5316 logging.root.setLevel(level)
5317 logging.getLogger('madgraph').setLevel(level)
5318 except:
5319 raise
5320 pass
5321
5322
5323 try:
5324 if args:
5325
5326 if '--web' in args:
5327 i = args.index('--web')
5328 args.pop(i)
5329 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True)
5330 else:
5331 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True)
5332
5333 if not hasattr(cmd_line, 'do_%s' % args[0]):
5334 if parser_error:
5335 print parser_error
5336 print 'and %s can not be interpreted as a valid command.' % args[0]
5337 else:
5338 print 'ERROR: %s not a valid command. Please retry' % args[0]
5339 else:
5340 cmd_line.use_rawinput = False
5341 cmd_line.run_cmd(' '.join(args))
5342 cmd_line.run_cmd('quit')
5343
5344 except KeyboardInterrupt:
5345 print 'quit on KeyboardInterrupt'
5346 pass
5347