1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 import atexit
21 import glob
22 import logging
23 import math
24 import optparse
25 import os
26 import pydoc
27 import random
28 import re
29 import shutil
30 import subprocess
31 import sys
32 import traceback
33 import time
34 import signal
35 import tarfile
36 import copy
37 import datetime
38 import tarfile
39 import traceback
40 import StringIO
41 try:
42 import cpickle as pickle
43 except:
44 import pickle
45
46 try:
47 import readline
48 GNU_SPLITTING = ('GNU' in readline.__doc__)
49 except:
50 GNU_SPLITTING = True
51
52 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
53 root_path = os.path.split(root_path)[0]
54 sys.path.insert(0, os.path.join(root_path,'bin'))
55
56
57 pjoin = os.path.join
58
59 logger = logging.getLogger('madgraph.stdout')
60 logger_stderr = logging.getLogger('madgraph.stderr')
61
62 try:
63 import madgraph
64 except ImportError:
65 aMCatNLO = True
66 import internal.extended_cmd as cmd
67 import internal.common_run_interface as common_run
68 import internal.banner as banner_mod
69 import internal.misc as misc
70 from internal import InvalidCmd, MadGraph5Error
71 import internal.files as files
72 import internal.cluster as cluster
73 import internal.save_load_object as save_load_object
74 import internal.gen_crossxhtml as gen_crossxhtml
75 import internal.sum_html as sum_html
76 import internal.shower_card as shower_card
77 import internal.FO_analyse_card as analyse_card
78 import internal.lhe_parser as lhe_parser
79 else:
80
81 aMCatNLO = False
82 import madgraph.interface.extended_cmd as cmd
83 import madgraph.interface.common_run_interface as common_run
84 import madgraph.iolibs.files as files
85 import madgraph.iolibs.save_load_object as save_load_object
86 import madgraph.madevent.gen_crossxhtml as gen_crossxhtml
87 import madgraph.madevent.sum_html as sum_html
88 import madgraph.various.banner as banner_mod
89 import madgraph.various.cluster as cluster
90 import madgraph.various.misc as misc
91 import madgraph.various.shower_card as shower_card
92 import madgraph.various.FO_analyse_card as analyse_card
93 import madgraph.various.lhe_parser as lhe_parser
94 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR
98
101 """compile the direcory p_dir
102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
103 this function needs not to be a class method in order to do
104 the compilation on multicore"""
105
106 if len(arguments) == 1:
107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0]
108 elif len(arguments)==7:
109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
110 else:
111 raise aMCatNLOError, 'not correct number of argument'
112 logger.info(' Compiling %s...' % p_dir)
113
114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
115
116 try:
117
118
119 for test in tests:
120
121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')):
122 continue
123 if test == 'test_ME' or test == 'test_MC':
124 test_exe='test_soft_col_limits'
125 else:
126 test_exe=test
127 misc.compile([test_exe], cwd = this_dir, job_specs = False)
128 input = pjoin(me_dir, '%s_input.txt' % test)
129
130 misc.call(['./%s' % (test_exe)], cwd=this_dir,
131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'),
132 close_fds=True)
133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) :
134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz',
135 dereference=True)
136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources')
137 tf.close()
138
139 if not options['reweightonly']:
140 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
141 misc.call(['./gensym'],cwd= this_dir,
142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'),
143 close_fds=True)
144
145 misc.compile([exe], cwd=this_dir, job_specs = False)
146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
148
149 logger.info(' %s done.' % p_dir)
150 return 0
151 except MadGraph5Error, msg:
152 return msg
153
156 """check that the current fortran compiler is gfortran 4.6 or later.
157 If block, stops the execution, otherwise just print a warning"""
158
159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
161 'Note that You can still run all MadEvent run without any problem!'
162
163 if options['fortran_compiler']:
164 compiler = options['fortran_compiler']
165 elif misc.which('gfortran'):
166 compiler = 'gfortran'
167 else:
168 compiler = ''
169
170 if 'gfortran' not in compiler:
171 if block:
172 raise aMCatNLOError(msg % compiler)
173 else:
174 logger.warning(msg % compiler)
175 else:
176 curr_version = misc.get_gfortran_version(compiler)
177 if not ''.join(curr_version.split('.')) >= '46':
178 if block:
179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
180 else:
181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183
184
185
186
187
188 -class CmdExtended(common_run.CommonRunCmd):
189 """Particularisation of the cmd command for aMCatNLO"""
190
191
192 next_possibility = {
193 'start': [],
194 }
195
196 debug_output = 'ME5_debug'
197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n'
198 error_debug += 'More information is found in \'%(debug)s\'.\n'
199 error_debug += 'Please attach this file to your report.'
200
201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n'
202
203
204 keyboard_stop_msg = """stopping all operation
205 in order to quit MadGraph5_aMC@NLO please enter exit"""
206
207
208 InvalidCmd = InvalidCmd
209 ConfigurationError = aMCatNLOError
210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation"""
213
214
215 self.force = False
216
217
218
219 info = misc.get_pkg_info()
220 info_line = ""
221 if info and info.has_key('version') and info.has_key('date'):
222 len_version = len(info['version'])
223 len_date = len(info['date'])
224 if len_version + len_date < 30:
225 info_line = "#* VERSION %s %s %s *\n" % \
226 (info['version'],
227 (30 - len_version - len_date) * ' ',
228 info['date'])
229 else:
230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
231 info_line = "#* VERSION %s %s *\n" % \
232 (version, (24 - len(version)) * ' ')
233
234
235
236 self.history_header = \
237 '#************************************************************\n' + \
238 '#* MadGraph5_aMC@NLO *\n' + \
239 '#* *\n' + \
240 "#* * * *\n" + \
241 "#* * * * * *\n" + \
242 "#* * * * * 5 * * * * *\n" + \
243 "#* * * * * *\n" + \
244 "#* * * *\n" + \
245 "#* *\n" + \
246 "#* *\n" + \
247 info_line + \
248 "#* *\n" + \
249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
251 "#* and *\n" + \
252 "#* http://amcatnlo.cern.ch *\n" + \
253 '#* *\n' + \
254 '#************************************************************\n' + \
255 '#* *\n' + \
256 '#* Command File for aMCatNLO *\n' + \
257 '#* *\n' + \
258 '#* run as ./bin/aMCatNLO.py filename *\n' + \
259 '#* *\n' + \
260 '#************************************************************\n'
261
262 if info_line:
263 info_line = info_line[1:]
264
265 logger.info(\
266 "************************************************************\n" + \
267 "* *\n" + \
268 "* W E L C O M E to M A D G R A P H 5 *\n" + \
269 "* a M C @ N L O *\n" + \
270 "* *\n" + \
271 "* * * *\n" + \
272 "* * * * * *\n" + \
273 "* * * * * 5 * * * * *\n" + \
274 "* * * * * *\n" + \
275 "* * * *\n" + \
276 "* *\n" + \
277 info_line + \
278 "* *\n" + \
279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
280 "* http://amcatnlo.cern.ch *\n" + \
281 "* *\n" + \
282 "* Type 'help' for in-line help. *\n" + \
283 "* *\n" + \
284 "************************************************************")
285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286
287
289 """return the history header"""
290 return self.history_header % misc.get_time_info()
291
293 """action to perform to close nicely on a keyboard interupt"""
294 try:
295 if hasattr(self, 'cluster'):
296 logger.info('rm jobs on queue')
297 self.cluster.remove()
298 if hasattr(self, 'results'):
299 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
300 self.add_error_log_in_html(KeyboardInterrupt)
301 except:
302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """
306
307
308 self.force = False
309
310 if not self.use_rawinput:
311 return stop
312
313
314 arg = line.split()
315 if len(arg) == 0:
316 return stop
317 elif str(arg[0]) in ['exit','quit','EOF']:
318 return stop
319
320 try:
321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
322 level=None, error=True)
323 except Exception:
324 misc.sprint('self.update_status fails', log=logger)
325 pass
326
332
338
344
351 """ The Series of help routine for the aMCatNLOCmd"""
352
356
358 logger.info("syntax: banner_run Path|RUN [--run_options]")
359 logger.info("-- Reproduce a run following a given banner")
360 logger.info(" One of the following argument is require:")
361 logger.info(" Path should be the path of a valid banner.")
362 logger.info(" RUN should be the name of a run of the current directory")
363 self.run_options_help([('-f','answer all question by default'),
364 ('--name=X', 'Define the name associated with the new run')])
365
366
370
375
376
380
384
385
387 logger.info("syntax: open FILE ")
388 logger.info("-- open a file with the appropriate editor.")
389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
390 logger.info(' the path to the last created/used directory is used')
391
393 if data:
394 logger.info('-- local options:')
395 for name, info in data:
396 logger.info(' %s : %s' % (name, info))
397
398 logger.info("-- session options:")
399 logger.info(" Note that those options will be kept for the current session")
400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
401 logger.info(" --multicore : Run in multi-core configuration")
402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
411 """ The Series of check routine for the aMCatNLOCmd"""
412
414 """Check the validity of the line. args[0] is the run_directory"""
415
416 if options['force']:
417 self.force = True
418
419 if len(args) == 0:
420 self.help_shower()
421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name'
422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
423 raise self.InvalidCmd, 'Directory %s does not exists' % \
424 pjoin(os.getcwd(), 'Events', args[0])
425
426 self.set_run_name(args[0], level= 'shower')
427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
430 """Check the argument for the plot command
431 plot run_name modes"""
432
433
434 madir = self.options['madanalysis_path']
435 td = self.options['td_path']
436
437 if not madir or not td:
438 logger.info('Retry to read configuration file to find madanalysis/td')
439 self.set_configuration()
440
441 madir = self.options['madanalysis_path']
442 td = self.options['td_path']
443
444 if not madir:
445 error_msg = 'No Madanalysis path correctly set.'
446 error_msg += 'Please use the set command to define the path and retry.'
447 error_msg += 'You can also define it in the configuration file.'
448 raise self.InvalidCmd(error_msg)
449 if not td:
450 error_msg = 'No path to td directory correctly set.'
451 error_msg += 'Please use the set command to define the path and retry.'
452 error_msg += 'You can also define it in the configuration file.'
453 raise self.InvalidCmd(error_msg)
454
455 if len(args) == 0:
456 if not hasattr(self, 'run_name') or not self.run_name:
457 self.help_plot()
458 raise self.InvalidCmd('No run name currently define. Please add this information.')
459 args.append('all')
460 return
461
462
463 if args[0] not in self._plot_mode:
464 self.set_run_name(args[0], level='plot')
465 del args[0]
466 if len(args) == 0:
467 args.append('all')
468 elif not self.run_name:
469 self.help_plot()
470 raise self.InvalidCmd('No run name currently define. Please add this information.')
471
472 for arg in args:
473 if arg not in self._plot_mode and arg != self.run_name:
474 self.help_plot()
475 raise self.InvalidCmd('unknown options %s' % arg)
476
478 """Check the argument for pythia command
479 syntax: pgs [NAME]
480 Note that other option are already remove at this point
481 """
482
483
484 if not self.options['pythia-pgs_path']:
485 logger.info('Retry to read configuration file to find pythia-pgs path')
486 self.set_configuration()
487
488 if not self.options['pythia-pgs_path'] or not \
489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
490 error_msg = 'No pythia-pgs path correctly set.'
491 error_msg += 'Please use the set command to define the path and retry.'
492 error_msg += 'You can also define it in the configuration file.'
493 raise self.InvalidCmd(error_msg)
494
495 tag = [a for a in arg if a.startswith('--tag=')]
496 if tag:
497 arg.remove(tag[0])
498 tag = tag[0][6:]
499
500
501 if len(arg) == 0 and not self.run_name:
502 if self.results.lastrun:
503 arg.insert(0, self.results.lastrun)
504 else:
505 raise self.InvalidCmd('No run name currently define. Please add this information.')
506
507 if len(arg) == 1 and self.run_name == arg[0]:
508 arg.pop(0)
509
510 if not len(arg) and \
511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
512 self.help_pgs()
513 raise self.InvalidCmd('''No file file pythia_events.hep currently available
514 Please specify a valid run_name''')
515
516 lock = None
517 if len(arg) == 1:
518 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
520
521 if not filenames:
522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
523 else:
524 input_file = filenames[0]
525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
527 argument=['-c', input_file],
528 close_fds=True)
529 else:
530 if tag:
531 self.run_card['run_tag'] = tag
532 self.set_run_name(self.run_name, tag, 'pgs')
533
534 return lock
535
536
538 """Check the argument for pythia command
539 syntax: delphes [NAME]
540 Note that other option are already remove at this point
541 """
542
543
544 if not self.options['delphes_path']:
545 logger.info('Retry to read configuration file to find delphes path')
546 self.set_configuration()
547
548 if not self.options['delphes_path']:
549 error_msg = 'No delphes path correctly set.'
550 error_msg += 'Please use the set command to define the path and retry.'
551 error_msg += 'You can also define it in the configuration file.'
552 raise self.InvalidCmd(error_msg)
553
554 tag = [a for a in arg if a.startswith('--tag=')]
555 if tag:
556 arg.remove(tag[0])
557 tag = tag[0][6:]
558
559
560 if len(arg) == 0 and not self.run_name:
561 if self.results.lastrun:
562 arg.insert(0, self.results.lastrun)
563 else:
564 raise self.InvalidCmd('No run name currently define. Please add this information.')
565
566 if len(arg) == 1 and self.run_name == arg[0]:
567 arg.pop(0)
568
569 if not len(arg) and \
570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
571 self.help_pgs()
572 raise self.InvalidCmd('''No file file pythia_events.hep currently available
573 Please specify a valid run_name''')
574
575 if len(arg) == 1:
576 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events'))
578
579
580 if not filenames:
581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
582 % (self.run_name, prev_tag,
583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
584 else:
585 input_file = filenames[0]
586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
588 argument=['-c', input_file],
589 close_fds=True)
590 else:
591 if tag:
592 self.run_card['run_tag'] = tag
593 self.set_run_name(self.run_name, tag, 'delphes')
594
596 """check the validity of the line. args is ORDER,
597 ORDER being LO or NLO. If no mode is passed, NLO is used"""
598
599
600
601 if options['force']:
602 self.force = True
603
604 if not args:
605 args.append('NLO')
606 return
607
608 if len(args) > 1:
609 self.help_calculate_xsect()
610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
611
612 elif len(args) == 1:
613 if not args[0] in ['NLO', 'LO']:
614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
615 mode = args[0]
616
617
618 if options['multicore'] and options['cluster']:
619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
620 ' are not compatible. Please choose one.'
621
622
624 """check the validity of the line. args is ORDER,
625 ORDER being LO or NLO. If no mode is passed, NLO is used"""
626
627
628
629 if not args:
630 args.append('NLO')
631 return
632
633 if len(args) > 1:
634 self.help_generate_events()
635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
636
637 elif len(args) == 1:
638 if not args[0] in ['NLO', 'LO']:
639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
640 mode = args[0]
641
642
643 if options['multicore'] and options['cluster']:
644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
645 ' are not compatible. Please choose one.'
646
648 """check the validity of line"""
649
650 if len(args) == 0:
651 self.help_banner_run()
652 raise self.InvalidCmd('banner_run requires at least one argument.')
653
654 tag = [a[6:] for a in args if a.startswith('--tag=')]
655
656
657 if os.path.exists(args[0]):
658 type ='banner'
659 format = self.detect_card_type(args[0])
660 if format != 'banner':
661 raise self.InvalidCmd('The file is not a valid banner.')
662 elif tag:
663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
664 (args[0], tag))
665 if not os.path.exists(args[0]):
666 raise self.InvalidCmd('No banner associates to this name and tag.')
667 else:
668 name = args[0]
669 type = 'run'
670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0]))
671 if not banners:
672 raise self.InvalidCmd('No banner associates to this name.')
673 elif len(banners) == 1:
674 args[0] = banners[0]
675 else:
676
677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
678 tag = self.ask('which tag do you want to use?', tags[0], tags)
679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
680 (args[0], tag))
681
682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
683 if run_name:
684 try:
685 self.exec_cmd('remove %s all banner -f' % run_name)
686 except Exception:
687 pass
688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
689 elif type == 'banner':
690 self.set_run_name(self.find_available_run_name(self.me_dir))
691 elif type == 'run':
692 if not self.results[name].is_empty():
693 run_name = self.find_available_run_name(self.me_dir)
694 logger.info('Run %s is not empty so will use run_name: %s' % \
695 (name, run_name))
696 self.set_run_name(run_name)
697 else:
698 try:
699 self.exec_cmd('remove %s all banner -f' % run_name)
700 except Exception:
701 pass
702 self.set_run_name(name)
703
704
705
707 """check the validity of the line. args is MODE
708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
709
710
711
712 if options['force']:
713 self.force = True
714
715
716 if not args:
717 args.append('auto')
718 return
719
720 if len(args) > 1:
721 self.help_launch()
722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
723
724 elif len(args) == 1:
725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]
727 mode = args[0]
728
729
730 if options['multicore'] and options['cluster']:
731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
732 ' are not compatible. Please choose one.'
733 if mode == 'NLO' and options['reweightonly']:
734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735
736
738 """check the validity of the line. args is MODE
739 MODE being FO or MC. If no mode is passed, MC is used"""
740
741
742
743 if options['force']:
744 self.force = True
745
746 if not args:
747 args.append('MC')
748 return
749
750 if len(args) > 1:
751 self.help_compile()
752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
753
754 elif len(args) == 1:
755 if not args[0] in ['MC', 'FO']:
756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0]
757 mode = args[0]
758
766 """ The Series of help routine for the MadGraphCmd"""
767
769 """auto-completion for launch command"""
770
771 args = self.split_arg(line[0:begidx])
772 if len(args) == 1:
773
774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
775 elif len(args) == 2 and line[begidx-1] == '@':
776 return self.list_completion(text,['LO','NLO'],line)
777 else:
778 opts = []
779 for opt in _launch_parser.option_list:
780 opts += opt._long_opts + opt._short_opts
781 return self.list_completion(text, opts, line)
782
784 "Complete the banner run command"
785 try:
786
787
788 args = self.split_arg(line[0:begidx], error=False)
789
790 if args[-1].endswith(os.path.sep):
791 return self.path_completion(text,
792 os.path.join('.',*[a for a in args \
793 if a.endswith(os.path.sep)]))
794
795
796 if len(args) > 1:
797
798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1]))
799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
800
801 if args[-1] != '--tag=':
802 tags = ['--tag=%s' % t for t in tags]
803 else:
804 return self.list_completion(text, tags)
805 return self.list_completion(text, tags +['--name=','-f'], line)
806
807
808 possibilites = {}
809
810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
811 if a.endswith(os.path.sep)]))
812 if os.path.sep in line:
813 return comp
814 else:
815 possibilites['Path from ./'] = comp
816
817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events'))
818 run_list = [n.rsplit('/',2)[1] for n in run_list]
819 possibilites['RUN Name'] = self.list_completion(text, run_list)
820
821 return self.deal_multiple_categories(possibilites, formatting)
822
823
824 except Exception, error:
825 print error
826
827
840
853
855 """auto-completion for generate_events command
856 call the compeltion for launch"""
857 self.complete_launch(text, line, begidx, endidx)
858
859
869
885
887 "Complete the pgs command"
888 args = self.split_arg(line[0:begidx], error=False)
889 if len(args) == 1:
890
891 data = misc.glob(pjoin('*', 'events_*.hep.gz'),
892 pjoin(self.me_dir, 'Events'))
893 data = [n.rsplit('/',2)[1] for n in data]
894 tmp1 = self.list_completion(text, data)
895 if not self.run_name:
896 return tmp1
897 else:
898 tmp2 = self.list_completion(text, self._run_options + ['-f',
899 '--tag=' ,'--no_default'], line)
900 return tmp1 + tmp2
901 else:
902 return self.list_completion(text, self._run_options + ['-f',
903 '--tag=','--no_default'], line)
904
905 complete_delphes = complete_pgs
906
909
911
912 to_control = [('order', 'Type of perturbative computation'),
913 ('fixed_order', 'No MC@[N]LO matching / event generation'),
914 ('shower', 'Shower the generated events'),
915 ('madspin', 'Decay onshell particles'),
916 ('reweight', 'Add weights to events for new hypp.'),
917 ('madanalysis','Run MadAnalysis5 on the events generated')]
918
919 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower']
920
921 - def __init__(self, question, line_args=[], mode=None, force=False,
922 *args, **opt):
923
924 self.me_dir = opt['mother_interface'].me_dir
925 self.check_available_module(opt['mother_interface'].options)
926 self.last_mode = opt['mother_interface'].last_mode
927 self.proc_characteristics = opt['mother_interface'].proc_characteristics
928 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat'))
929 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'],
930 *args, **opt)
931
932 @property
934
935 out = super(AskRunNLO, self).answer
936 if out['shower'] == 'HERWIG7':
937 out['shower'] = 'HERWIGPP'
938
939 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF':
940 out['runshower'] = False
941 else:
942 out['runshower'] = True
943 return out
944
945
947
948 self.available_module = set()
949 if options['madanalysis5_path']:
950 self.available_module.add('MA5')
951 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']):
952
953 self.available_module.add('MadSpin')
954 if misc.has_f2py() or options['f2py_compiler']:
955 self.available_module.add('reweight')
956 if options['pythia8_path']:
957 self.available_module.add('PY8')
958 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']:
959 self.available_module.add('HW7')
960
961 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
962 if os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))):
963 self.available_module.add('StdHEP')
964
965
966
968 """ function called if the user type lo=value. or lo (then value is None)"""
969
970 if value is None:
971 self.switch['order'] = 'LO'
972 self.switch['fixed_order'] = 'ON'
973 self.set_switch('shower', 'OFF')
974 else:
975 logger.warning('Invalid command: lo=%s' % value)
976
978 if value is None:
979 self.switch['order'] = 'NLO'
980 self.switch['fixed_order'] = 'ON'
981 self.set_switch('shower', 'OFF')
982 else:
983 logger.warning('Invalid command: nlo=%s' % value)
984
986 if value is None:
987 self.switch['order'] = 'NLO'
988 self.switch['fixed_order'] = 'OFF'
989 self.set_switch('shower', 'ON')
990 else:
991 logger.warning('Invalid command: aMC@NLO=%s' % value)
992
994 if value is None:
995 self.switch['order'] = 'LO'
996 self.switch['fixed_order'] = 'OFF'
997 self.set_switch('shower', 'ON')
998 else:
999 logger.warning('Invalid command: aMC@LO=%s' % value)
1000
1002 if value is None:
1003 self.switch['order'] = 'NLO'
1004 self.switch['fixed_order'] = 'OFF'
1005 self.set_switch('shower', 'OFF')
1006 else:
1007 logger.warning('Invalid command: noshower=%s' % value)
1008
1010 if value is None:
1011 self.switch['mode'] = 'onlyshower'
1012 self.switch['madspin'] = 'OFF'
1013 self.switch['reweight'] = 'OFF'
1014 else:
1015 logger.warning('Invalid command: onlyshower=%s' % value)
1016
1018 if value is None:
1019 self.switch['order'] = 'LO'
1020 self.switch['fixed_order'] = 'OFF'
1021 self.set_switch('shower', 'OFF')
1022 else:
1023 logger.warning('Invalid command: noshowerlo=%s' % value)
1024
1026 """ shortcut madanalysis5 -> madanalysis """
1027
1028 if value is None:
1029 return self.onecmd('madanalysis')
1030 else:
1031 self.set_switch('madanalysis', value)
1032
1033
1034
1036 return ["LO", "NLO"]
1037
1039
1040 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']:
1041 self.switch['order'] = 'LO'
1042 self.switch['order'] = 'NLO'
1043
1046
1047
1048
1050 """ """
1051 if self.proc_characteristics['ninitial'] == 1:
1052 return ['ON']
1053 else:
1054 return ['ON', 'OFF']
1055
1057
1058 if self.last_mode in ['LO', 'NLO']:
1059 self.switch['fixed_order'] = 'ON'
1060 if self.proc_characteristics['ninitial'] == 1:
1061 self.switch['fixed_order'] = 'ON'
1062 else:
1063 self.switch['fixed_order'] = 'OFF'
1064
1066
1067 if switch_value in ['OFF']:
1068 return self.green % switch_value
1069 else:
1070 return self.red % switch_value
1071
1073
1074 if switch_value in ['ON']:
1075 return self.green % switch_value
1076 elif switch_value in self.get_allowed('shower'):
1077 return self.green % switch_value
1078 else:
1079 return self.red % switch_value
1080
1082 """ consistency_XX_YY(val_XX, val_YY)
1083 -> XX is the new key set by the user to a new value val_XX
1084 -> YY is another key set by the user.
1085 -> return value should be None or "replace_YY"
1086 """
1087
1088 if vfix == 'ON' and vshower != 'OFF' :
1089 return 'OFF'
1090 return None
1091
1092 consistency_fixed_order_madspin = consistency_fixed_order_shower
1093 consistency_fixed_order_reweight = consistency_fixed_order_shower
1094
1096
1097 if vfix == 'ON' and vma5 == 'ON' :
1098 return 'OFF'
1099 return None
1100
1101
1103 """ consistency_XX_YY(val_XX, val_YY)
1104 -> XX is the new key set by the user to a new value val_XX
1105 -> YY is another key set by the user.
1106 -> return value should be None or "replace_YY"
1107 """
1108
1109 if vshower != 'OFF' and vfix == 'ON':
1110 return 'OFF'
1111 return None
1112
1113 consistency_madspin_fixed_order = consistency_shower_fixed_order
1114 consistency_reweight_fixed_order = consistency_shower_fixed_order
1115 consistency_madanalysis_fixed_order = consistency_shower_fixed_order
1116
1117
1118
1119
1120
1122 """ """
1123
1124 if hasattr(self, 'allowed_shower'):
1125 return self.allowed_shower
1126
1127 if not misc.which('bc'):
1128 return ['OFF']
1129
1130 if self.proc_characteristics['ninitial'] == 1:
1131 self.allowed_shower = ['OFF']
1132 return ['OFF']
1133 else:
1134 if 'StdHEP' in self.available_module:
1135 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ]
1136 else:
1137 allowed = ['OFF']
1138 if 'PY8' in self.available_module:
1139 allowed.append('PYTHIA8')
1140 if 'HW7' in self.available_module:
1141 allowed.append('HERWIGPP')
1142
1143
1144 self.allowed_shower = allowed
1145
1146 return allowed
1147
1149 """ """
1150
1151 if value.upper() in self.get_allowed_shower():
1152 return True
1153 if value.upper() in ['PYTHIA8', 'HERWIGPP']:
1154 return True
1155 if value.upper() == 'ON':
1156 return self.run_card['parton_shower']
1157 if value.upper() in ['P8','PY8','PYTHIA_8']:
1158 return 'PYTHIA8'
1159 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']:
1160 return 'PYTHIA6PT'
1161 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']:
1162 return 'PYTHIA6Q'
1163 if value.upper() in ['HW7', 'HERWIG7']:
1164 return 'HERWIG7'
1165 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']:
1166 return 'HERWIGPP'
1167 if value.upper() in ['HW6', 'HERWIG_6']:
1168 return 'HERWIG6'
1169
1171
1172 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']:
1173 self.switch['shower'] = 'OFF'
1174 return
1175
1176 if self.proc_characteristics['ninitial'] == 1:
1177 self.switch['shower'] = 'OFF'
1178 return
1179
1180 if not misc.which('bc'):
1181 logger.warning('bc command not available. Forbids to run the shower. please install it if you want to run the shower. (sudo apt-get install bc)')
1182 self.switch['shower'] = 'OFF'
1183 return
1184
1185 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
1186 self.switch['shower'] = self.run_card['parton_shower']
1187
1188 self.switch['fixed_order'] = "OFF"
1189 else:
1190 self.switch['shower'] = 'OFF'
1191
1193 """ MA5 only possible with (N)LO+PS if shower is run"""
1194
1195 if vshower == 'OFF' and vma5 == 'ON':
1196 return 'OFF'
1197 return None
1198
1200
1201 if vma5=='ON' and vshower == 'OFF':
1202 return 'ON'
1203 return None
1204
1206 """ adpat run_card according to this setup. return list of cmd to run"""
1207
1208 if value != 'OFF':
1209 return ['set parton_shower %s' % self.switch['shower']]
1210 return []
1211
1212
1213
1214
1216 """ """
1217
1218 if hasattr(self, 'allowed_madspin'):
1219 return self.allowed_madspin
1220
1221 self.allowed_madspin = []
1222
1223
1224 if 'MadSpin' not in self.available_module:
1225 return self.allowed_madspin
1226 if self.proc_characteristics['ninitial'] == 1:
1227 self.available_module.remove('MadSpin')
1228 self.allowed_madspin = ['OFF']
1229 return self.allowed_madspin
1230 else:
1231 self.allowed_madspin = ['OFF', 'ON', 'onshell']
1232 return self.allowed_madspin
1233
1235 """handle alias and valid option not present in get_allowed_madspin
1236 remember that this mode should always be OFF for 1>N. (ON not in allowed value)"""
1237
1238 if value.upper() in self.get_allowed_madspin():
1239 if value == value.upper():
1240 return True
1241 else:
1242 return value.upper()
1243 elif value.lower() in self.get_allowed_madspin():
1244 if value == value.lower():
1245 return True
1246 else:
1247 return value.lower()
1248
1249 if 'MadSpin' not in self.available_module or \
1250 'ON' not in self.get_allowed_madspin():
1251 return False
1252
1253 if value.lower() in ['madspin', 'full']:
1254 return 'full'
1255 elif value.lower() in ['none']:
1256 return 'none'
1257
1259
1260 if 'MadSpin' in self.available_module:
1261 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
1262 self.switch['madspin'] = 'ON'
1263 else:
1264 self.switch['madspin'] = 'OFF'
1265 else:
1266 self.switch['madspin'] = 'Not Avail.'
1267
1269 """set some command to run before allowing the user to modify the cards."""
1270
1271 if value == 'onshell':
1272 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"]
1273 elif value in ['full', 'madspin']:
1274 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"]
1275 elif value == 'none':
1276 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"]
1277 else:
1278 return []
1279
1280
1281
1282
1284 """set the valid (visible) options for reweight"""
1285
1286 if hasattr(self, 'allowed_reweight'):
1287 return getattr(self, 'allowed_reweight')
1288
1289 self.allowed_reweight = []
1290 if 'reweight' not in self.available_module:
1291 return self.allowed_reweight
1292 if self.proc_characteristics['ninitial'] == 1:
1293 self.available_module.remove('reweight')
1294 self.allowed_reweight.append('OFF')
1295 return self.allowed_reweight
1296 else:
1297 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO']
1298 return self.allowed_reweight
1299
1301 """initialise the switch for reweight"""
1302
1303 if 'reweight' in self.available_module:
1304 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')):
1305 self.switch['reweight'] = 'ON'
1306 else:
1307 self.switch['reweight'] = 'OFF'
1308 else:
1309 self.switch['reweight'] = 'Not Avail.'
1310
1312 """ adpat run_card according to this setup. return list of cmd to run"""
1313
1314 if value == 'LO':
1315 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"]
1316 elif value == 'NLO':
1317 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO",
1318 "set store_rwgt_info T"]
1319 elif value == 'NLO_TREE':
1320 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree",
1321 "set store_rwgt_info T"]
1322 return []
1323
1324
1325
1326
1328
1329 if hasattr(self, 'allowed_madanalysis'):
1330 return self.allowed_madanalysis
1331
1332 self.allowed_madanalysis = []
1333
1334
1335 if 'MA5' not in self.available_module:
1336 return self.allowed_madanalysis
1337
1338 if self.proc_characteristics['ninitial'] == 1:
1339 self.available_module.remove('MA5')
1340 self.allowed_madanalysis = ['OFF']
1341 return self.allowed_madanalysis
1342 else:
1343 self.allowed_madanalysis = ['OFF', 'ON']
1344 return self.allowed_madanalysis
1345
1347 """initialise the switch for reweight"""
1348
1349 if 'MA5' not in self.available_module:
1350 self.switch['madanalysis'] = 'Not Avail.'
1351 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')):
1352 self.switch['madanalysis'] = 'ON'
1353 else:
1354 self.switch['madanalysis'] = 'OFF'
1355
1357 """check an entry is valid. return the valid entry in case of shortcut"""
1358
1359 if value.upper() in self.get_allowed('madanalysis'):
1360 return True
1361 value = value.lower()
1362 if value == 'hadron':
1363 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False
1364 else:
1365 return False
1366
1367
1368
1369
1370
1371 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1372 """The command line processor of MadGraph"""
1373
1374
1375 true = ['T','.true.',True,'true']
1376
1377 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
1378 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
1379 _calculate_decay_options = ['-f', '--accuracy=0.']
1380 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
1381 _plot_mode = ['all', 'parton','shower','pgs','delphes']
1382 _clean_mode = _plot_mode + ['channel', 'banner']
1383 _display_opts = ['run_name', 'options', 'variable']
1384
1385
1386 web = False
1387 cluster_mode = 0
1388 queue = 'madgraph'
1389 nb_core = None
1390 make_opts_var = {}
1391
1392 next_possibility = {
1393 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
1394 'help generate_events'],
1395 'generate_events': ['generate_events [OPTIONS]', 'shower'],
1396 'launch': ['launch [OPTIONS]', 'shower'],
1397 'shower' : ['generate_events [OPTIONS]']
1398 }
1399
1400
1401
1402 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1403 """ add information to the cmd """
1404
1405 self.start_time = 0
1406 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
1407
1408
1409 self.mode = 'aMCatNLO'
1410 self.nb_core = 0
1411 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
1412
1413
1414 self.load_results_db()
1415 self.results.def_web_mode(self.web)
1416
1417 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
1418
1419 if not '[real=QCD]' in proc_card:
1420 check_compiler(self.options, block=True)
1421
1422
1423
1425 """ run the shower on a given parton level file """
1426 argss = self.split_arg(line)
1427 (options, argss) = _launch_parser.parse_args(argss)
1428
1429 options = options.__dict__
1430 options['reweightonly'] = False
1431 self.check_shower(argss, options)
1432 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
1433 self.ask_run_configuration('onlyshower', options)
1434 self.run_mcatnlo(evt_file, options)
1435
1436 self.update_status('', level='all', update_results=True)
1437
1438
1440 """Create the plot for a given run"""
1441
1442
1443 args = self.split_arg(line)
1444
1445 self.check_plot(args)
1446 logger.info('plot for run %s' % self.run_name)
1447
1448 if not self.force:
1449 self.ask_edit_cards([], args, plot=True)
1450
1451 if any([arg in ['parton'] for arg in args]):
1452 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
1453 if os.path.exists(filename+'.gz'):
1454 misc.gunzip(filename)
1455 if os.path.exists(filename):
1456 logger.info('Found events.lhe file for run %s' % self.run_name)
1457 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
1458 self.create_plot('parton')
1459 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
1460 misc.gzip(filename)
1461
1462 if any([arg in ['all','parton'] for arg in args]):
1463 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
1464 if os.path.exists(filename):
1465 logger.info('Found MADatNLO.top file for run %s' % \
1466 self.run_name)
1467 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
1468 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
1469
1470 if not os.path.isdir(plot_dir):
1471 os.makedirs(plot_dir)
1472 top_file = pjoin(plot_dir, 'plots.top')
1473 files.cp(filename, top_file)
1474 madir = self.options['madanalysis_path']
1475 tag = self.run_card['run_tag']
1476 td = self.options['td_path']
1477 misc.call(['%s/plot' % self.dirbin, madir, td],
1478 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1479 stderr = subprocess.STDOUT,
1480 cwd=plot_dir)
1481
1482 misc.call(['%s/plot_page-pl' % self.dirbin,
1483 os.path.basename(plot_dir),
1484 'parton'],
1485 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1486 stderr = subprocess.STDOUT,
1487 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1488 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1489 output)
1490
1491 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1492
1493 if any([arg in ['all','shower'] for arg in args]):
1494 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name))
1495 if len(filenames) != 1:
1496 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
1497 if len(filenames) != 1:
1498 logger.info('No shower level file found for run %s' % \
1499 self.run_name)
1500 return
1501 filename = filenames[0]
1502 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1503
1504 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1505 if aMCatNLO and not self.options['mg5_path']:
1506 raise "plotting NLO HEP file needs MG5 utilities"
1507
1508 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1509 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1510 self.run_hep2lhe()
1511 else:
1512 filename = filenames[0]
1513 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1514
1515 self.create_plot('shower')
1516 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1517 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1518 lhe_file_name)
1519 misc.gzip(lhe_file_name)
1520
1521 if any([arg in ['all','pgs'] for arg in args]):
1522 filename = pjoin(self.me_dir, 'Events', self.run_name,
1523 '%s_pgs_events.lhco' % self.run_tag)
1524 if os.path.exists(filename+'.gz'):
1525 misc.gunzip(filename)
1526 if os.path.exists(filename):
1527 self.create_plot('PGS')
1528 misc.gzip(filename)
1529 else:
1530 logger.info('No valid files for pgs plot')
1531
1532 if any([arg in ['all','delphes'] for arg in args]):
1533 filename = pjoin(self.me_dir, 'Events', self.run_name,
1534 '%s_delphes_events.lhco' % self.run_tag)
1535 if os.path.exists(filename+'.gz'):
1536 misc.gunzip(filename)
1537 if os.path.exists(filename):
1538
1539 self.create_plot('Delphes')
1540
1541 misc.gzip(filename)
1542 else:
1543 logger.info('No valid files for delphes plot')
1544
1545
1546
1548 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1549 this function wraps the do_launch one"""
1550
1551 self.start_time = time.time()
1552 argss = self.split_arg(line)
1553
1554 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1555 options = options.__dict__
1556 options['reweightonly'] = False
1557 options['parton'] = True
1558 self.check_calculate_xsect(argss, options)
1559 self.do_launch(line, options, argss)
1560
1561
1563 """Make a run from the banner file"""
1564
1565 args = self.split_arg(line)
1566
1567 self.check_banner_run(args)
1568
1569
1570 for name in ['shower_card.dat', 'madspin_card.dat']:
1571 try:
1572 os.remove(pjoin(self.me_dir, 'Cards', name))
1573 except Exception:
1574 pass
1575
1576 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1577
1578
1579 if not self.force:
1580 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1581 if ans == 'n':
1582 self.force = True
1583
1584
1585 if self.force:
1586 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1587 banner = banner_mod.Banner(args[0])
1588 for line in banner['run_settings']:
1589 if '=' in line:
1590 mode, value = [t.strip() for t in line.split('=')]
1591 mode_status[mode] = value
1592 else:
1593 mode_status = {}
1594
1595
1596 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1597 switch=mode_status)
1598
1599
1601 """Main commands: generate events
1602 this function just wraps the do_launch one"""
1603 self.do_launch(line)
1604
1605
1606
1608 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1609
1610 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1611
1612
1613
1614
1615
1616 if mode in ['LO', 'NLO']:
1617 name = 'fo_lhe_weight_ratio'
1618 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat'))
1619 if name in FO_card:
1620 self.run_card.set(name, FO_card[name], user=False)
1621 name = 'fo_lhe_postprocessing'
1622 if name in FO_card:
1623 self.run_card.set(name, FO_card[name], user=False)
1624
1625 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1626
1627
1629 """assign all configuration variable from file
1630 loop over the different config file if config_file not define """
1631 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1632
1633
1634 - def do_launch(self, line, options={}, argss=[], switch={}):
1635 """Main commands: launch the full chain
1636 options and args are relevant if the function is called from other
1637 functions, such as generate_events or calculate_xsect
1638 mode gives the list of switch needed for the computation (usefull for banner_run)
1639 """
1640
1641 if not argss and not options:
1642 self.start_time = time.time()
1643 argss = self.split_arg(line)
1644
1645 (options, argss) = _launch_parser.parse_args(argss)
1646 options = options.__dict__
1647 self.check_launch(argss, options)
1648
1649
1650 if 'run_name' in options.keys() and options['run_name']:
1651 self.run_name = options['run_name']
1652
1653
1654 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1655 logger.warning('Removing old run information in \n'+
1656 pjoin(self.me_dir, 'Events', self.run_name))
1657 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1658 self.results.delete_run(self.run_name)
1659 else:
1660 self.run_name = ''
1661
1662 if options['multicore']:
1663 self.cluster_mode = 2
1664 elif options['cluster']:
1665 self.cluster_mode = 1
1666
1667 if not switch:
1668 mode = argss[0]
1669
1670 if mode in ['LO', 'NLO']:
1671 options['parton'] = True
1672 mode = self.ask_run_configuration(mode, options)
1673 else:
1674 mode = self.ask_run_configuration('auto', options, switch)
1675
1676 self.results.add_detail('run_mode', mode)
1677
1678 self.update_status('Starting run', level=None, update_results=True)
1679
1680 if self.options['automatic_html_opening']:
1681 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1682 self.options['automatic_html_opening'] = False
1683
1684 if '+' in mode:
1685 mode = mode.split('+')[0]
1686 self.compile(mode, options)
1687 evt_file = self.run(mode, options)
1688
1689 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']:
1690 logger.info('No event file generated: grids have been set-up with a '\
1691 'relative precision of %s' % self.run_card['req_acc'])
1692 return
1693
1694 if not mode in ['LO', 'NLO']:
1695 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1696
1697 if self.run_card['systematics_program'] == 'systematics':
1698 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments'])))
1699
1700 self.exec_cmd('reweight -from_cards', postcmd=False)
1701 self.exec_cmd('decay_events -from_cards', postcmd=False)
1702 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1703
1704 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1705 and not options['parton']:
1706 self.run_mcatnlo(evt_file, options)
1707 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False)
1708
1709 elif mode == 'noshower':
1710 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1711 Please, shower the Les Houches events before using them for physics analyses.""")
1712
1713
1714 self.update_status('', level='all', update_results=True)
1715 if self.run_card['ickkw'] == 3 and \
1716 (mode in ['noshower'] or \
1717 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))):
1718 logger.warning("""You are running with FxFx merging enabled.
1719 To be able to merge samples of various multiplicities without double counting,
1720 you have to remove some events after showering 'by hand'.
1721 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1722
1723 self.store_result()
1724
1725 if self.param_card_iterator:
1726 cpath = pjoin(self.me_dir,'Cards','param_card.dat')
1727 param_card_iterator = self.param_card_iterator
1728 self.param_card_iterator = []
1729 param_card_iterator.store_entry(self.run_name, self.results.current['cross'],
1730 error=self.results.current['error'],
1731 param_card_path=cpath)
1732 orig_name = self.run_name
1733
1734 with misc.TMP_variable(self, 'allow_notification_center', False):
1735 for i,card in enumerate(param_card_iterator):
1736 card.write(cpath)
1737 self.check_param_card(cpath, dependent=True)
1738 if not options['force']:
1739 options['force'] = True
1740 if options['run_name']:
1741 options['run_name'] = '%s_%s' % (orig_name, i+1)
1742 if not argss:
1743 argss = [mode, "-f"]
1744 elif argss[0] == "auto":
1745 argss[0] = mode
1746 self.do_launch("", options=options, argss=argss, switch=switch)
1747
1748 param_card_iterator.store_entry(self.run_name, self.results.current['cross'],
1749 error=self.results.current['error'],
1750 param_card_path=cpath)
1751
1752 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1753 name = misc.get_scan_name(orig_name, self.run_name)
1754 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name)
1755 logger.info("write all cross-section results in %s" % path, '$MG:BOLD')
1756 param_card_iterator.write_summary(path)
1757
1758 if self.allow_notification_center:
1759 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir),
1760 '%s: %s +- %s ' % (self.results.current['run_name'],
1761 self.results.current['cross'],
1762 self.results.current['error']))
1763
1764
1765
1767 """Advanced commands: just compile the executables """
1768 argss = self.split_arg(line)
1769
1770 (options, argss) = _compile_parser.parse_args(argss)
1771 options = options.__dict__
1772 options['reweightonly'] = False
1773 options['nocompile'] = False
1774 self.check_compile(argss, options)
1775
1776 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1777 self.ask_run_configuration(mode, options)
1778 self.compile(mode, options)
1779
1780
1781 self.update_status('', level='all', update_results=True)
1782
1783
1785 """Update random number seed with the value from the run_card.
1786 If this is 0, update the number according to a fresh one"""
1787 iseed = self.run_card['iseed']
1788 if iseed == 0:
1789 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1790 iseed = int(randinit.read()[2:]) + 1
1791 randinit.close()
1792 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1793 randinit.write('r=%d' % iseed)
1794 randinit.close()
1795
1796
1797 - def run(self, mode, options):
1798 """runs aMC@NLO. Returns the name of the event file created"""
1799 logger.info('Starting run')
1800
1801 if not 'only_generation' in options.keys():
1802 options['only_generation'] = False
1803
1804
1805 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']:
1806 options['only_generation'] = True
1807 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1808 self.setup_cluster_or_multicore()
1809 self.update_random_seed()
1810
1811 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1812 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1813 folder_names['noshower'] = folder_names['aMC@NLO']
1814 folder_names['noshowerLO'] = folder_names['aMC@LO']
1815 p_dirs = [d for d in \
1816 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1817
1818 self.clean_previous_results(options,p_dirs,folder_names[mode])
1819
1820 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events']
1821
1822
1823 if options['reweightonly']:
1824 event_norm=self.run_card['event_norm']
1825 nevents=self.run_card['nevents']
1826 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1827
1828 if mode in ['LO', 'NLO']:
1829
1830 mode_dict = {'NLO': 'all', 'LO': 'born'}
1831 logger.info('Doing fixed order %s' % mode)
1832 req_acc = self.run_card['req_acc_FO']
1833
1834
1835
1836 if self.run_card['iappl'] == 2:
1837 self.applgrid_distribute(options,mode_dict[mode],p_dirs)
1838
1839
1840
1841 integration_step=-1
1842 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1843 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True)
1844 self.prepare_directories(jobs_to_run,mode)
1845
1846
1847
1848
1849 while True:
1850 integration_step=integration_step+1
1851 self.run_all_jobs(jobs_to_run,integration_step)
1852 self.collect_log_files(jobs_to_run,integration_step)
1853 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1854 jobs_to_collect,integration_step,mode,mode_dict[mode])
1855 if not jobs_to_run:
1856
1857 break
1858
1859 self.finalise_run_FO(folder_names[mode],jobs_to_collect)
1860 self.update_status('Run complete', level='parton', update_results=True)
1861 return
1862
1863 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1864 if self.ninitial == 1:
1865 raise aMCatNLOError('Decay processes can only be run at fixed order.')
1866 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\
1867 'noshower': 'all', 'noshowerLO': 'born'}
1868 shower = self.run_card['parton_shower'].upper()
1869 nevents = self.run_card['nevents']
1870 req_acc = self.run_card['req_acc']
1871 if nevents == 0 and req_acc < 0 :
1872 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1873 'of events, because 0 events requested. Please set '\
1874 'the "req_acc" parameter in the run_card to a value '\
1875 'between 0 and 1')
1876 elif req_acc >1 or req_acc == 0 :
1877 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1878 'be between larger than 0 and smaller than 1, '\
1879 'or set to -1 for automatic determination. Current '\
1880 'value is %f' % req_acc)
1881
1882 elif req_acc < 0 and nevents > 1000000 :
1883 req_acc=0.001
1884
1885 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1886
1887 if not shower in shower_list:
1888 raise aMCatNLOError('%s is not a valid parton shower. '\
1889 'Please use one of the following: %s' \
1890 % (shower, ', '.join(shower_list)))
1891
1892
1893 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']:
1894 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1895
1896 if mode in ['aMC@NLO', 'aMC@LO']:
1897 logger.info('Doing %s matched to parton shower' % mode[4:])
1898 elif mode in ['noshower','noshowerLO']:
1899 logger.info('Generating events without running the shower.')
1900 elif options['only_generation']:
1901 logger.info('Generating events starting from existing results')
1902
1903 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1904 req_acc,mode_dict[mode],1,mode,fixed_order=False)
1905
1906 if options['only_generation']:
1907 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1908 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False)
1909 else:
1910 self.prepare_directories(jobs_to_run,mode,fixed_order=False)
1911
1912
1913
1914 for mint_step, status in enumerate(mcatnlo_status):
1915 if options['only_generation'] and mint_step < 2:
1916 continue
1917 self.update_status(status, level='parton')
1918 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False)
1919 self.collect_log_files(jobs_to_run,mint_step)
1920 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1921 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False)
1922 if mint_step+1==2 and nevents==0:
1923 self.print_summary(options,2,mode)
1924 return
1925
1926
1927 self.check_event_files(jobs_to_collect)
1928
1929 if self.cluster_mode == 1:
1930
1931 self.update_status(
1932 'Waiting while files are transferred back from the cluster nodes',
1933 level='parton')
1934 time.sleep(10)
1935
1936 event_norm=self.run_card['event_norm']
1937 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1938
1939 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\
1940 integration_step,mode,fixed_order=True):
1941 """Creates a list of dictionaries with all the jobs to be run"""
1942 jobs_to_run=[]
1943 if not options['only_generation']:
1944
1945
1946
1947 npoints = self.run_card['npoints_FO_grid']
1948 niters = self.run_card['niters_FO_grid']
1949 for p_dir in p_dirs:
1950 try:
1951 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file:
1952 channels=chan_file.readline().split()
1953 except IOError:
1954 logger.warning('No integration channels found for contribution %s' % p_dir)
1955 continue
1956 if fixed_order:
1957 lch=len(channels)
1958 maxchannels=20
1959 if self.run_card['iappl'] != 0: maxchannels=1
1960 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \
1961 else int(lch/maxchannels))
1962 for nj in range(1,njobs+1):
1963 job={}
1964 job['p_dir']=p_dir
1965 job['channel']=str(nj)
1966 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs))
1967 job['configs']=' '.join(channels[:job['nchans']])
1968 del channels[:job['nchans']]
1969 job['split']=0
1970 if req_acc == -1:
1971 job['accuracy']=0
1972 job['niters']=niters
1973 job['npoints']=npoints
1974 elif req_acc > 0:
1975 job['accuracy']=0.05
1976 job['niters']=6
1977 job['npoints']=-1
1978 else:
1979 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+
1980 'between 0 and 1 or set it equal to -1.')
1981 job['mint_mode']=0
1982 job['run_mode']=run_mode
1983 job['wgt_frac']=1.0
1984 job['wgt_mult']=1.0
1985 jobs_to_run.append(job)
1986 if channels:
1987 raise aMCatNLOError('channels is not empty %s' % channels)
1988 else:
1989 for channel in channels:
1990 job={}
1991 job['p_dir']=p_dir
1992 job['channel']=channel
1993 job['split']=0
1994 job['accuracy']=0.03
1995 job['niters']=12
1996 job['npoints']=-1
1997 job['mint_mode']=0
1998 job['run_mode']=run_mode
1999 job['wgt_frac']=1.0
2000 jobs_to_run.append(job)
2001 jobs_to_collect=copy.copy(jobs_to_run)
2002 else:
2003
2004 try:
2005 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f:
2006 jobs_to_collect=pickle.load(f)
2007 for job in jobs_to_collect:
2008 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1])
2009 jobs_to_run=copy.copy(jobs_to_collect)
2010 except:
2011 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \
2012 pjoin(self.me_dir,'SubProcesses','job_status.pkl'))
2013
2014 if fixed_order:
2015 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run,
2016 jobs_to_collect,integration_step,mode,run_mode)
2017
2018 integration_step=1
2019 for job in jobs_to_run:
2020 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)):
2021 integration_step=integration_step+1
2022 integration_step=integration_step-1
2023 else:
2024 self.append_the_results(jobs_to_collect,integration_step)
2025 return jobs_to_run,jobs_to_collect,integration_step
2026
2028 """Set-up the G* directories for running"""
2029 name_suffix={'born' :'B' , 'all':'F'}
2030 for job in jobs_to_run:
2031 if job['split'] == 0:
2032 if fixed_order :
2033 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2034 job['run_mode']+'_G'+job['channel'])
2035 else:
2036 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2037 'G'+name_suffix[job['run_mode']]+job['channel'])
2038 else:
2039 if fixed_order :
2040 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2041 job['run_mode']+'_G'+job['channel']+'_'+str(job['split']))
2042 else:
2043 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2044 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split']))
2045 job['dirname']=dirname
2046 if not os.path.isdir(dirname):
2047 os.makedirs(dirname)
2048 self.write_input_file(job,fixed_order)
2049
2050 if not fixed_order:
2051 if job['split'] != 0:
2052 for f in ['grid.MC_integer','mint_grids','res_1']:
2053 if not os.path.isfile(pjoin(job['dirname'],f)):
2054 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2055 else:
2056 if job['split'] != 0:
2057 for f in ['grid.MC_integer','mint_grids']:
2058 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2059
2060
2095
2096
2097 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2098 """Loops over the jobs_to_run and executes them using the function 'run_exe'"""
2099 if fixed_order:
2100 if integration_step == 0:
2101 self.update_status('Setting up grids', level=None)
2102 else:
2103 self.update_status('Refining results, step %i' % integration_step, level=None)
2104 self.ijob = 0
2105 name_suffix={'born' :'B', 'all':'F'}
2106 if fixed_order:
2107 run_type="Fixed order integration step %s" % integration_step
2108 else:
2109 run_type="MINT step %s" % integration_step
2110 self.njobs=len(jobs_to_run)
2111 for job in jobs_to_run:
2112 executable='ajob1'
2113 if fixed_order:
2114 arguments=[job['channel'],job['run_mode'], \
2115 str(job['split']),str(integration_step)]
2116 else:
2117 arguments=[job['channel'],name_suffix[job['run_mode']], \
2118 str(job['split']),str(integration_step)]
2119 self.run_exe(executable,arguments,run_type,
2120 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir']))
2121
2122 if self.cluster_mode == 2:
2123 time.sleep(1)
2124 self.wait_for_complete(run_type)
2125
2126
2127 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\
2128 integration_step,mode,run_mode,fixed_order=True):
2129 """Collect the results, make HTML pages, print the summary and
2130 determine if there are more jobs to run. Returns the list
2131 of the jobs that still need to be run, as well as the
2132 complete list of jobs that need to be collected to get the
2133 final answer.
2134 """
2135
2136 self.append_the_results(jobs_to_run,integration_step)
2137 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step)
2138
2139 if fixed_order:
2140 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode],
2141 jobs=jobs_to_collect)
2142 else:
2143 name_suffix={'born' :'B' , 'all':'F'}
2144 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]])
2145 self.results.add_detail('cross', cross)
2146 self.results.add_detail('error', error)
2147
2148 if fixed_order:
2149 jobs_to_run=self.combine_split_order_run(jobs_to_run)
2150
2151 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order)
2152
2153 if fixed_order:
2154
2155
2156 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f:
2157 pickle.dump(jobs_to_collect,f)
2158
2159 if (not jobs_to_run_new) and fixed_order:
2160
2161 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect)
2162 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True)
2163 return jobs_to_run_new,jobs_to_collect
2164 elif jobs_to_run_new:
2165
2166 scale_pdf_info=[]
2167 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False)
2168 else:
2169
2170
2171
2172 scale_pdf_info=[]
2173
2174 if (not fixed_order) and integration_step+1 == 2 :
2175
2176
2177 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f:
2178 pickle.dump(jobs_to_collect,f)
2179
2180 jobs_to_run_new,jobs_to_collect_new= \
2181 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect)
2182 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
2183 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect)
2184 self.write_nevts_files(jobs_to_run_new)
2185 else:
2186 if fixed_order and self.run_card['iappl'] == 0 \
2187 and self.run_card['req_acc_FO'] > 0:
2188 jobs_to_run_new,jobs_to_collect= \
2189 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect)
2190 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
2191 jobs_to_collect_new=jobs_to_collect
2192 return jobs_to_run_new,jobs_to_collect_new
2193
2194
2196 """writes the nevents_unweighted file in the SubProcesses directory.
2197 We also need to write the jobs that will generate 0 events,
2198 because that makes sure that the cross section from those channels
2199 is taken into account in the event weights (by collect_events.f).
2200 """
2201 content=[]
2202 for job in jobs:
2203 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
2204 lhefile=pjoin(path,'events.lhe')
2205 content.append(' %s %d %9e %9e' % \
2206 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac']))
2207 for job in jobs0events:
2208 if job['nevents']==0:
2209 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
2210 lhefile=pjoin(path,'events.lhe')
2211 content.append(' %s %d %9e %9e' % \
2212 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.))
2213 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f:
2214 f.write('\n'.join(content)+'\n')
2215
2217 """write the nevts files in the SubProcesses/P*/G*/ directories"""
2218 for job in jobs:
2219 with open(pjoin(job['dirname'],'nevts'),'w') as f:
2220 if self.run_card['event_norm'].lower()=='bias':
2221 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca']))
2222 else:
2223 f.write('%i\n' % job['nevents'])
2224
2226 """Combines jobs and grids from split jobs that have been run"""
2227
2228
2229
2230 jobgroups_to_combine=[]
2231 jobs_to_run_new=[]
2232 for job in jobs_to_run:
2233 if job['split'] == 0:
2234 job['combined']=1
2235 jobs_to_run_new.append(job)
2236 elif job['split'] == 1:
2237 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \
2238 j['channel'] == job['channel'], jobs_to_run))
2239 else:
2240 continue
2241 for job_group in jobgroups_to_combine:
2242
2243 self.combine_split_order_grids(job_group)
2244 jobs_to_run_new.append(self.combine_split_order_jobs(job_group))
2245 return jobs_to_run_new
2246
2248 """combine the jobs in job_group and return a single summed job"""
2249
2250 sum_job=copy.copy(job_group[0])
2251
2252 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0])
2253 sum_job['split']=0
2254 sum_job['wgt_mult']=1.0
2255 sum_job['combined']=len(job_group)
2256
2257 keys=['niters_done','npoints_done','niters','npoints',\
2258 'result','resultABS','time_spend']
2259 keys2=['error','errorABS']
2260
2261 for key in keys2:
2262 sum_job[key]=math.pow(sum_job[key],2)
2263
2264 for i,job in enumerate(job_group):
2265 if i==0 : continue
2266 for key in keys:
2267 sum_job[key]+=job[key]
2268 for key in keys2:
2269 sum_job[key]+=math.pow(job[key],2)
2270 for key in keys2:
2271 sum_job[key]=math.sqrt(sum_job[key])
2272 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100.
2273 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100.
2274 sum_job['niters']=int(sum_job['niters_done']/len(job_group))
2275 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group))
2276 return sum_job
2277
2278
2280 """Combines the mint_grids and MC-integer grids from the split order
2281 jobs (fixed order only).
2282 """
2283 files_mint_grids=[]
2284 files_MC_integer=[]
2285 location=None
2286 for job in job_group:
2287 files_mint_grids.append(pjoin(job['dirname'],'mint_grids'))
2288 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer'))
2289 if not location:
2290 location=pjoin(job['dirname'].rsplit('_',1)[0])
2291 else:
2292 if location != pjoin(job['dirname'].rsplit('_',1)[0]) :
2293 raise aMCatNLOError('Not all jobs have the same location. '\
2294 +'Cannot combine them.')
2295
2296
2297
2298 for j,fs in enumerate([files_mint_grids,files_MC_integer]):
2299 linesoffiles=[]
2300 for f in fs:
2301 with open(f,'r+') as fi:
2302 linesoffiles.append(fi.readlines())
2303 to_write=[]
2304 for rowgrp in zip(*linesoffiles):
2305 try:
2306
2307
2308
2309
2310
2311 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp]
2312 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp]
2313 floatgrps = zip(*floatsbyfile)
2314 special=[]
2315 for i,floatgrp in enumerate(floatgrps):
2316 if i==0:
2317 special.append(sum(floatgrp))
2318 elif i==1:
2319 special.append(math.sqrt(sum([err**2 for err in floatgrp])))
2320 elif i==2:
2321 special.append(int(sum(floatgrp)/len(floatgrp)))
2322 elif i==3:
2323 special.append(int(sum(floatgrp)))
2324 elif i==4:
2325 special.append(int(sum(floatgrp)/len(floatgrp)))
2326 else:
2327 raise aMCatNLOError('"mint_grids" files not in correct format. '+\
2328 'Cannot combine them.')
2329 to_write.append(" ".join(str(s) for s in special) + "\n")
2330 except ValueError:
2331
2332 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp]
2333 floatgrps = zip(*floatsbyfile)
2334 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps]
2335 to_write.append(" ".join(str(a) for a in averages) + "\n")
2336
2337 if j==0:
2338 with open(pjoin(location,'mint_grids'),'w') as f:
2339 f.writelines(to_write)
2340 elif j==1:
2341 with open(pjoin(location,'grid.MC_integer'),'w') as f:
2342 f.writelines(to_write)
2343
2344
2346 """Looks in the jobs_to_run to see if there is the need to split the
2347 jobs, depending on the expected time they take. Updates
2348 jobs_to_run and jobs_to_collect to replace the split-job by
2349 its splits.
2350 """
2351
2352 if self.options['run_mode'] ==2:
2353 nb_submit = int(self.options['nb_core'])
2354 elif self.options['run_mode'] ==1:
2355 nb_submit = int(self.options['cluster_size'])
2356 else:
2357 nb_submit =1
2358
2359 time_expected=0
2360 for job in jobs_to_run:
2361 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \
2362 (job['niters_done']*job['npoints_done'])
2363
2364
2365 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2))
2366 jobs_to_run_new=[]
2367 jobs_to_collect_new=copy.copy(jobs_to_collect)
2368 for job in jobs_to_run:
2369
2370
2371
2372 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \
2373 j['channel'] == job['channel'], jobs_to_collect_new):
2374 jobs_to_collect_new.remove(j)
2375 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \
2376 (job['niters_done']*job['npoints_done'])
2377
2378
2379
2380 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job):
2381
2382 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit)
2383 for i in range(1,nsplit+1):
2384 job_new=copy.copy(job)
2385 job_new['split']=i
2386 job_new['wgt_mult']=1./float(nsplit)
2387 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
2388 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1)
2389 if nsplit >= job['niters']:
2390 job_new['npoints']=int(job['npoints']*job['niters']/nsplit)
2391 job_new['niters']=1
2392 else:
2393 job_new['npoints']=int(job['npoints']/nsplit)
2394 jobs_to_collect_new.append(job_new)
2395 jobs_to_run_new.append(job_new)
2396 else:
2397 jobs_to_collect_new.append(job)
2398 jobs_to_run_new.append(job)
2399 return jobs_to_run_new,jobs_to_collect_new
2400
2401
2403 """Looks in the jobs_to_run to see if there is the need to split the
2404 event generation step. Updates jobs_to_run and
2405 jobs_to_collect to replace the split-job by its
2406 splits. Also removes jobs that do not need any events.
2407 """
2408 nevt_job=self.run_card['nevt_job']
2409 if nevt_job > 0:
2410 jobs_to_collect_new=copy.copy(jobs_to_collect)
2411 for job in jobs_to_run:
2412 nevents=job['nevents']
2413 if nevents == 0:
2414 jobs_to_collect_new.remove(job)
2415 elif nevents > nevt_job:
2416 jobs_to_collect_new.remove(job)
2417 if nevents % nevt_job != 0 :
2418 nsplit=int(nevents/nevt_job)+1
2419 else:
2420 nsplit=int(nevents/nevt_job)
2421 for i in range(1,nsplit+1):
2422 job_new=copy.copy(job)
2423 left_over=nevents % nsplit
2424 if i <= left_over:
2425 job_new['nevents']=int(nevents/nsplit)+1
2426 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
2427 else:
2428 job_new['nevents']=int(nevents/nsplit)
2429 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
2430 job_new['split']=i
2431 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
2432 jobs_to_collect_new.append(job_new)
2433 jobs_to_run_new=copy.copy(jobs_to_collect_new)
2434 else:
2435 jobs_to_run_new=copy.copy(jobs_to_collect)
2436 for job in jobs_to_collect:
2437 if job['nevents'] == 0:
2438 jobs_to_run_new.remove(job)
2439 jobs_to_collect_new=copy.copy(jobs_to_run_new)
2440
2441 return jobs_to_run_new,jobs_to_collect_new
2442
2443
2445 """
2446 For (N)LO+PS: determines the number of events and/or the required
2447 accuracy per job.
2448 For fixed order: determines which jobs need higher precision and
2449 returns those with the newly requested precision.
2450 """
2451 err=self.cross_sect_dict['errt']
2452 tot=self.cross_sect_dict['xsect']
2453 errABS=self.cross_sect_dict['erra']
2454 totABS=self.cross_sect_dict['xseca']
2455 jobs_new=[]
2456 if fixed_order:
2457 if req_acc == -1:
2458 if step+1 == 1:
2459 npoints = self.run_card['npoints_FO']
2460 niters = self.run_card['niters_FO']
2461 for job in jobs:
2462 job['mint_mode']=-1
2463 job['niters']=niters
2464 job['npoints']=npoints
2465 jobs_new.append(job)
2466 elif step+1 == 2:
2467 pass
2468 elif step+1 > 2:
2469 raise aMCatNLOError('Cannot determine number of iterations and PS points '+
2470 'for integration step %i' % step )
2471 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0:
2472 req_accABS=req_acc*abs(tot)/totABS
2473 for job in jobs:
2474 job['mint_mode']=-1
2475
2476 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS'])
2477
2478
2479 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \
2480 and not (step==-1 and self.run_card['iappl'] == 2):
2481 continue
2482
2483 itmax_fl=job['niters_done']*math.pow(job['errorABS']/
2484 (job['accuracy']*job['resultABS']),2)
2485 if itmax_fl <= 4.0 :
2486 job['niters']=max(int(round(itmax_fl)),2)
2487 job['npoints']=job['npoints_done']*2
2488 elif itmax_fl > 4.0 and itmax_fl <= 16.0 :
2489 job['niters']=4
2490 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2
2491 else:
2492 if itmax_fl > 100.0 : itmax_fl=50.0
2493 job['niters']=int(round(math.sqrt(itmax_fl)))
2494 job['npoints']=int(round(job['npoints_done']*itmax_fl/
2495 round(math.sqrt(itmax_fl))))*2
2496
2497 jobs_new.append(job)
2498 return jobs_new
2499 elif step+1 <= 2:
2500 nevents=self.run_card['nevents']
2501
2502 if req_acc<0:
2503 req_acc2_inv=nevents
2504 else:
2505 req_acc2_inv=1/(req_acc*req_acc)
2506 if step+1 == 1 or step+1 == 2 :
2507
2508 for job in jobs:
2509 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2)
2510 job['accuracy']=accuracy
2511 if step+1 == 2:
2512
2513
2514 r=self.get_randinit_seed()
2515 random.seed(r)
2516 totevts=nevents
2517 for job in jobs:
2518 job['nevents'] = 0
2519 while totevts :
2520 target = random.random() * totABS
2521 crosssum = 0.
2522 i = 0
2523 while i<len(jobs) and crosssum < target:
2524 job = jobs[i]
2525 crosssum += job['resultABS']
2526 i += 1
2527 totevts -= 1
2528 i -= 1
2529 jobs[i]['nevents'] += 1
2530 for job in jobs:
2531 job['mint_mode']=step+1
2532 return jobs
2533 else:
2534 return []
2535
2536
2538 """ Get the random number seed from the randinit file """
2539 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit:
2540
2541 iseed = int(randinit.read()[2:])
2542 return iseed
2543
2544
2546 """Appends the results for each of the jobs in the job list"""
2547 error_found=False
2548 for job in jobs:
2549 try:
2550 if integration_step >= 0 :
2551 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file:
2552 results=res_file.readline().split()
2553 else:
2554
2555
2556 with open(pjoin(job['dirname'],'res.dat')) as res_file:
2557 results=res_file.readline().split()
2558 except IOError:
2559 if not error_found:
2560 error_found=True
2561 error_log=[]
2562 error_log.append(pjoin(job['dirname'],'log.txt'))
2563 continue
2564 job['resultABS']=float(results[0])
2565 job['errorABS']=float(results[1])
2566 job['result']=float(results[2])
2567 job['error']=float(results[3])
2568 job['niters_done']=int(results[4])
2569 job['npoints_done']=int(results[5])
2570 job['time_spend']=float(results[6])
2571 job['err_percABS'] = job['errorABS']/job['resultABS']*100.
2572 job['err_perc'] = job['error']/job['result']*100.
2573 if error_found:
2574 raise aMCatNLOError('An error occurred during the collection of results.\n' +
2575 'Please check the .log files inside the directories which failed:\n' +
2576 '\n'.join(error_log)+'\n')
2577
2578
2579
2581 """writes the res.txt files in the SubProcess dir"""
2582 jobs.sort(key = lambda job: -job['errorABS'])
2583 content=[]
2584 content.append('\n\nCross section per integration channel:')
2585 for job in jobs:
2586 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job)
2587 content.append('\n\nABS cross section per integration channel:')
2588 for job in jobs:
2589 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job)
2590 totABS=0
2591 errABS=0
2592 tot=0
2593 err=0
2594 for job in jobs:
2595 totABS+= job['resultABS']*job['wgt_frac']
2596 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac']
2597 tot+= job['result']*job['wgt_frac']
2598 err+= math.pow(job['error'],2)*job['wgt_frac']
2599 if jobs:
2600 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\
2601 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\
2602 tot, math.sqrt(err), math.sqrt(err)/tot *100.))
2603 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file:
2604 res_file.write('\n'.join(content))
2605 randinit=self.get_randinit_seed()
2606 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\
2607 'erra':math.sqrt(errABS),'randinit':randinit}
2608
2609
2611 """read the scale_pdf_dependence.dat files and collects there results"""
2612 scale_pdf_info=[]
2613 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
2614 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
2615 evt_files=[]
2616 evt_wghts=[]
2617 for job in jobs:
2618 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat'))
2619 evt_wghts.append(job['wgt_frac'])
2620 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts)
2621 return scale_pdf_info
2622
2623
2625 """combines the plots and puts then in the Events/run* directory"""
2626 devnull = open(os.devnull, 'w')
2627
2628 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
2629 topfiles = []
2630 for job in jobs:
2631 if job['dirname'].endswith('.top'):
2632 topfiles.append(job['dirname'])
2633 else:
2634 topfiles.append(pjoin(job['dirname'],'MADatNLO.top'))
2635 misc.call(['./combine_plots_FO.sh'] + topfiles, \
2636 stdout=devnull,
2637 cwd=pjoin(self.me_dir, 'SubProcesses'))
2638 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
2639 pjoin(self.me_dir, 'Events', self.run_name))
2640 logger.info('The results of this run and the TopDrawer file with the plots' + \
2641 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2642 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
2643 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO')
2644 self.combine_plots_HwU(jobs,out)
2645 try:
2646 misc.call(['gnuplot','MADatNLO.gnuplot'],\
2647 stdout=devnull,stderr=devnull,\
2648 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2649 except Exception:
2650 pass
2651 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
2652 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2653 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
2654 rootfiles = []
2655 for job in jobs:
2656 if job['dirname'].endswith('.root'):
2657 rootfiles.append(job['dirname'])
2658 else:
2659 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root'))
2660 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \
2661 stdout=devnull,
2662 cwd=pjoin(self.me_dir, 'SubProcesses'))
2663 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
2664 pjoin(self.me_dir, 'Events', self.run_name))
2665 logger.info('The results of this run and the ROOT file with the plots' + \
2666 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2667 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe':
2668 self.combine_FO_lhe(jobs)
2669 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \
2670 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2671 else:
2672 logger.info('The results of this run' + \
2673 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2674
2676 """combine the various lhe file generated in each directory.
2677 They are two steps:
2678 1) banner
2679 2) reweight each sample by the factor written at the end of each file
2680 3) concatenate each of the new files (gzip those).
2681 """
2682
2683 logger.info('Combining lhe events for plotting analysis')
2684 start = time.time()
2685 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']]
2686 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2687 if os.path.exists(output):
2688 os.remove(output)
2689
2690
2691
2692
2693
2694 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read()
2695 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>')
2696 self.banner['initrwgt'] = text[10+i1:i2]
2697
2698
2699
2700
2701
2702
2703
2704 cross = sum(j['result'] for j in jobs)
2705 error = math.sqrt(sum(j['error'] for j in jobs))
2706 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross)
2707 self.banner.write(output[:-3], close_tag=False)
2708 misc.gzip(output[:-3])
2709
2710
2711
2712 fsock = lhe_parser.EventFile(output,'a')
2713 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']:
2714 fsock.eventgroup = False
2715 else:
2716 fsock.eventgroup = True
2717
2718 if 'norandom' in self.run_card['fo_lhe_postprocessing']:
2719 for job in jobs:
2720 dirname = job['dirname']
2721
2722 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline()
2723 nb_event, sumwgt, cross = [float(i) for i in lastline.split()]
2724
2725 ratio = cross/sumwgt
2726 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe'))
2727 lhe.eventgroup = True
2728 for eventsgroup in lhe:
2729 neweventsgroup = []
2730 for i,event in enumerate(eventsgroup):
2731 event.rescale_weights(ratio)
2732 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \
2733 and event == neweventsgroup[-1]:
2734 neweventsgroup[-1].wgt += event.wgt
2735 for key in event.reweight_data:
2736 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key]
2737 else:
2738 neweventsgroup.append(event)
2739 fsock.write_events(neweventsgroup)
2740 lhe.close()
2741 os.remove(pjoin(dirname,'events.lhe'))
2742 else:
2743 lhe = []
2744 lenlhe = []
2745 misc.sprint('need to combine %s event file' % len(jobs))
2746 globallhe = lhe_parser.MultiEventFile()
2747 globallhe.eventgroup = True
2748 for job in jobs:
2749 dirname = job['dirname']
2750 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline()
2751 nb_event, sumwgt, cross = [float(i) for i in lastline.split()]
2752 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross,
2753 nb_event=int(nb_event), scale=cross/sumwgt)
2754 for eventsgroup in globallhe:
2755 neweventsgroup = []
2756 for i,event in enumerate(eventsgroup):
2757 event.rescale_weights(event.sample_scale)
2758 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \
2759 and event == neweventsgroup[-1]:
2760 neweventsgroup[-1].wgt += event.wgt
2761 for key in event.reweight_data:
2762 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key]
2763 else:
2764 neweventsgroup.append(event)
2765 fsock.write_events(neweventsgroup)
2766 globallhe.close()
2767 fsock.write('</LesHouchesEvents>\n')
2768 fsock.close()
2769 misc.sprint('combining lhe file done in ', time.time()-start)
2770 for job in jobs:
2771 dirname = job['dirname']
2772 os.remove(pjoin(dirname,'events.lhe'))
2773
2774
2775
2776 misc.sprint('combining lhe file done in ', time.time()-start)
2777
2778
2779
2780
2781
2782
2784 """Sums all the plots in the HwU format."""
2785 logger.debug('Combining HwU plots.')
2786
2787 command = []
2788 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py'))
2789 for job in jobs:
2790 if job['dirname'].endswith('.HwU'):
2791 command.append(job['dirname'])
2792 else:
2793 command.append(pjoin(job['dirname'],'MADatNLO.HwU'))
2794 command.append("--out="+out)
2795 command.append("--gnuplot")
2796 command.append("--band=[]")
2797 command.append("--lhapdf-config="+self.options['lhapdf'])
2798 if normalisation:
2799 command.append("--multiply="+(','.join([str(n) for n in normalisation])))
2800 command.append("--sum")
2801 command.append("--keep_all_weights")
2802 command.append("--no_open")
2803
2804 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir)
2805
2806 while p.poll() is None:
2807 line = p.stdout.readline()
2808 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']):
2809 print line[:-1]
2810 elif __debug__ and line:
2811 logger.debug(line[:-1])
2812
2813
2815 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
2816 logger.debug('Combining APPLgrids \n')
2817 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),
2818 'applgrid-combine')
2819 all_jobs=[]
2820 for job in jobs:
2821 all_jobs.append(job['dirname'])
2822 ngrids=len(all_jobs)
2823 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")])
2824 for obs in range(0,nobs):
2825 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
2826
2827 if self.run_card["iappl"] == 1:
2828 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,
2829 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
2830 elif self.run_card["iappl"] == 2:
2831 unc2_inv=pow(cross/error,2)
2832 unc2_inv_ngrids=pow(cross/error,2)*ngrids
2833 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",
2834 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',
2835 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
2836 for job in all_jobs:
2837 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root"))
2838 else:
2839 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
2840
2841 for ggdir in gdir:
2842 os.remove(ggdir)
2843
2844
2846 """Distributes the APPLgrids ready to be filled by a second run of the code"""
2847
2848
2849 if not('appl_start_grid' in options.keys() and options['appl_start_grid']):
2850 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'),
2851 pjoin(self.me_dir,'Events'))
2852
2853 time_stamps={}
2854 for root_file in gfiles:
2855 time_stamps[root_file]=os.path.getmtime(root_file)
2856 options['appl_start_grid']= \
2857 max(time_stamps.iterkeys(), key=(lambda key:
2858 time_stamps[key])).split('/')[-2]
2859 logger.info('No --appl_start_grid option given. '+\
2860 'Guessing that start grid from run "%s" should be used.' \
2861 % options['appl_start_grid'])
2862
2863 if 'appl_start_grid' in options.keys() and options['appl_start_grid']:
2864 self.appl_start_grid = options['appl_start_grid']
2865 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
2866
2867 if not os.path.exists(pjoin(start_grid_dir,
2868 'aMCfast_obs_0_starting_grid.root')):
2869 raise self.InvalidCmd('APPLgrid file not found: %s' % \
2870 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
2871 else:
2872 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \
2873 start_grid_dir) if name.endswith("_starting_grid.root")]
2874 nobs =len(all_grids)
2875 gstring=" ".join(all_grids)
2876 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
2877 raise self.InvalidCmd('No APPLgrid name currently defined.'+
2878 'Please provide this information.')
2879
2880 for pdir in p_dirs:
2881 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,
2882 "SubProcesses",pdir)) if file.startswith(mode+'_G') and
2883 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
2884 for g_dir in g_dirs:
2885 for grid in all_grids:
2886 obs=grid.split('_')[-3]
2887 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,
2888 'grid_obs_'+obs+'_in.root'))
2889
2890
2891
2892
2894 """collect the log files and put them in a single, html-friendly file
2895 inside the Events/run_.../ directory"""
2896 log_file = pjoin(self.me_dir, 'Events', self.run_name,
2897 'alllogs_%d.html' % integration_step)
2898 outfile = open(log_file, 'w')
2899
2900 content = ''
2901 content += '<HTML><BODY>\n<font face="courier" size=2>'
2902 for job in jobs:
2903
2904 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step)
2905 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(
2906 pjoin(self.me_dir,'SubProcesses'),''))
2907
2908 content += '<font color="red">\n'
2909 content += '<br>LOG file for integration channel %s, %s <br>' % \
2910 (os.path.dirname(log).replace(pjoin(self.me_dir,
2911 'SubProcesses'), ''),
2912 integration_step)
2913 content += '</font>\n'
2914
2915
2916 with open(log) as l:
2917 content += '<PRE>\n' + l.read() + '\n</PRE>'
2918 content +='<br>\n'
2919 outfile.write(content)
2920 content=''
2921
2922 outfile.write('</font>\n</BODY></HTML>\n')
2923 outfile.close()
2924
2925
2927 """Combine the plots and put the res*.txt files in the Events/run.../ folder."""
2928
2929 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses'))
2930 for res_file in res_files:
2931 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2932
2933 self.combine_plots_FO(folder_name,jobs)
2934
2935
2936 if self.run_card['iappl'] != 0:
2937 cross=self.cross_sect_dict['xsect']
2938 error=self.cross_sect_dict['errt']
2939 self.applgrid_combine(cross,error,jobs)
2940
2941
2943 """setup the number of cores for multicore, and the cluster-type for cluster runs"""
2944 if self.cluster_mode == 1:
2945 cluster_name = self.options['cluster_type']
2946 try:
2947 self.cluster = cluster.from_name[cluster_name](**self.options)
2948 except KeyError:
2949
2950
2951 cluster_class = misc.from_plugin_import(self.plugin_path,
2952 'new_cluster', cluster_name,
2953 info = 'cluster handling will be done with PLUGIN: %{plug}s' )
2954 if cluster_class:
2955 self.cluster = cluster_class(**self.options)
2956
2957 if self.cluster_mode == 2:
2958 try:
2959 import multiprocessing
2960 if not self.nb_core:
2961 try:
2962 self.nb_core = int(self.options['nb_core'])
2963 except TypeError:
2964 self.nb_core = multiprocessing.cpu_count()
2965 logger.info('Using %d cores' % self.nb_core)
2966 except ImportError:
2967 self.nb_core = 1
2968 logger.warning('Impossible to detect the number of cores => Using One.\n'+
2969 'Use set nb_core X in order to set this number and be able to'+
2970 'run in multicore.')
2971
2972 self.cluster = cluster.MultiCore(**self.options)
2973
2974
2976 """Clean previous results.
2977 o. If doing only the reweighting step, do not delete anything and return directlty.
2978 o. Always remove all the G*_* files (from split event generation).
2979 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only."""
2980 if options['reweightonly']:
2981 return
2982 if not options['only_generation']:
2983 self.update_status('Cleaning previous results', level=None)
2984 for dir in p_dirs:
2985
2986 for obj in folder_name:
2987
2988 to_rm = [file for file in \
2989 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2990 if file.startswith(obj[:-1]) and \
2991 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2992 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2993
2994 to_always_rm = [file for file in \
2995 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2996 if file.startswith(obj[:-1]) and
2997 '_' in file and not '_G' in file and \
2998 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2999 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
3000
3001 if not options['only_generation']:
3002 to_always_rm.extend(to_rm)
3003 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
3004 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
3005 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
3006 return
3007
3008
3009 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
3010 """print a summary of the results contained in self.cross_sect_dict.
3011 step corresponds to the mintMC step, if =2 (i.e. after event generation)
3012 some additional infos are printed"""
3013
3014 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
3015 process = ''
3016 for line in proc_card_lines:
3017 if line.startswith('generate') or line.startswith('add process'):
3018 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
3019 lpp = {0:'l', 1:'p', -1:'pbar'}
3020 if self.ninitial == 1:
3021 proc_info = '\n Process %s' % process[:-3]
3022 else:
3023 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
3024 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
3025 self.run_card['ebeam1'], self.run_card['ebeam2'])
3026
3027 if self.ninitial == 1:
3028 self.cross_sect_dict['unit']='GeV'
3029 self.cross_sect_dict['xsec_string']='(Partial) decay width'
3030 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)'
3031 else:
3032 self.cross_sect_dict['unit']='pb'
3033 self.cross_sect_dict['xsec_string']='Total cross section'
3034 self.cross_sect_dict['axsec_string']='Total abs(cross section)'
3035 if self.run_card['event_norm'].lower()=='bias':
3036 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)'
3037
3038 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
3039 status = ['Determining the number of unweighted events per channel',
3040 'Updating the number of unweighted events per channel',
3041 'Summary:']
3042 computed='(computed from LHE events)'
3043 elif mode in ['NLO', 'LO']:
3044 status = ['Results after grid setup:','Current results:',
3045 'Final results and run summary:']
3046 computed='(computed from histogram information)'
3047
3048 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
3049 message = status[step] + '\n\n Intermediate results:' + \
3050 ('\n Random seed: %(randinit)d' + \
3051 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \
3052 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \
3053 % self.cross_sect_dict
3054 elif mode in ['NLO','LO'] and not done:
3055 if step == 0:
3056 message = '\n ' + status[0] + \
3057 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
3058 self.cross_sect_dict
3059 else:
3060 message = '\n ' + status[1] + \
3061 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
3062 self.cross_sect_dict
3063
3064 else:
3065 message = '\n --------------------------------------------------------------'
3066 message = message + \
3067 '\n ' + status[2] + proc_info
3068 if mode not in ['LO', 'NLO']:
3069 message = message + \
3070 '\n Number of events generated: %s' % self.run_card['nevents']
3071 message = message + \
3072 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
3073 self.cross_sect_dict
3074 message = message + \
3075 '\n --------------------------------------------------------------'
3076 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']):
3077 if scale_pdf_info[0]:
3078
3079 message = message + '\n Scale variation %s:' % computed
3080 for s in scale_pdf_info[0]:
3081 if s['unc']:
3082 if self.run_card['ickkw'] != -1:
3083 message = message + \
3084 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\
3085 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s
3086 else:
3087 message = message + \
3088 ('\n Soft and hard scale dependence (added in quadrature): '\
3089 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s
3090
3091 else:
3092 message = message + \
3093 ('\n Dynamical_scale_choice %(label)i: '\
3094 '\n %(cen)8.3e pb') % s
3095
3096 if scale_pdf_info[1]:
3097 message = message + '\n PDF variation %s:' % computed
3098 for p in scale_pdf_info[1]:
3099 if p['unc']=='none':
3100 message = message + \
3101 ('\n %(name)s (central value only): '\
3102 '\n %(cen)8.3e pb') % p
3103
3104 elif p['unc']=='unknown':
3105 message = message + \
3106 ('\n %(name)s (%(size)s members; combination method unknown): '\
3107 '\n %(cen)8.3e pb') % p
3108 else:
3109 message = message + \
3110 ('\n %(name)s (%(size)s members; using %(unc)s method): '\
3111 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p
3112
3113 message = message + \
3114 '\n --------------------------------------------------------------'
3115
3116
3117 if (mode in ['NLO', 'LO'] and not done) or \
3118 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
3119 logger.info(message+'\n')
3120 return
3121
3122
3123
3124
3125
3126 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
3127 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'),
3128 pjoin(self.me_dir, 'SubProcesses'))
3129 all_log_files = log_GV_files
3130 elif mode == 'NLO':
3131 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'),
3132 pjoin(self.me_dir, 'SubProcesses'))
3133 all_log_files = log_GV_files
3134
3135 elif mode == 'LO':
3136 log_GV_files = ''
3137 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'),
3138 pjoin(self.me_dir, 'SubProcesses'))
3139 else:
3140 raise aMCatNLOError, 'Running mode %s not supported.'%mode
3141
3142 try:
3143 message, debug_msg = \
3144 self.compile_advanced_stats(log_GV_files, all_log_files, message)
3145 except Exception as e:
3146 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e)
3147 err_string = StringIO.StringIO()
3148 traceback.print_exc(limit=4, file=err_string)
3149 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\
3150 %err_string.getvalue()
3151
3152 logger.debug(debug_msg+'\n')
3153 logger.info(message+'\n')
3154
3155
3156 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
3157 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
3158 open(pjoin(evt_path, '.full_summary.txt'),
3159 'w').write(message+'\n\n'+debug_msg+'\n')
3160
3161 self.archive_files(evt_path,mode)
3162
3164 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
3165 the run."""
3166
3167 files_to_arxiv = [pjoin('Cards','param_card.dat'),
3168 pjoin('Cards','MadLoopParams.dat'),
3169 pjoin('Cards','FKS_params.dat'),
3170 pjoin('Cards','run_card.dat'),
3171 pjoin('Subprocesses','setscales.f'),
3172 pjoin('Subprocesses','cuts.f')]
3173
3174 if mode in ['NLO', 'LO']:
3175 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
3176
3177 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
3178 os.mkdir(pjoin(evt_path,'RunMaterial'))
3179
3180 for path in files_to_arxiv:
3181 if os.path.isfile(pjoin(self.me_dir,path)):
3182 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
3183 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
3184 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3185
3187 """ This functions goes through the log files given in arguments and
3188 compiles statistics about MadLoop stability, virtual integration
3189 optimization and detection of potential error messages into a nice
3190 debug message to printed at the end of the run """
3191
3192 def safe_float(str_float):
3193 try:
3194 return float(str_float)
3195 except ValueError:
3196 logger.debug('Could not convert the following float during'+
3197 ' advanced statistics printout: %s'%str(str_float))
3198 return -1.0
3199
3200
3201
3202
3203 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
3204 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
3205
3206
3207
3208
3209
3210
3211
3212 UPS_stat_finder = re.compile(
3213 r"Satistics from MadLoop:.*"+\
3214 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
3215 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
3216 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
3217 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
3218 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
3219 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
3220 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
3221 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
3222 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
3223 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
3224
3225 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
3226 1 : 'CutTools (double precision)',
3227 2 : 'PJFry++',
3228 3 : 'IREGI',
3229 4 : 'Golem95',
3230 5 : 'Samurai',
3231 6 : 'Ninja (double precision)',
3232 7 : 'COLLIER',
3233 8 : 'Ninja (quadruple precision)',
3234 9 : 'CutTools (quadruple precision)'}
3235 RetUnit_finder =re.compile(
3236 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
3237
3238
3239 for gv_log in log_GV_files:
3240 channel_name = '/'.join(gv_log.split('/')[-5:-1])
3241 log=open(gv_log,'r').read()
3242 UPS_stats = re.search(UPS_stat_finder,log)
3243 for retunit_stats in re.finditer(RetUnit_finder, log):
3244 if channel_name not in stats['UPS'].keys():
3245 stats['UPS'][channel_name] = [0]*10+[[0]*10]
3246 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
3247 += int(retunit_stats.group('n_occurences'))
3248 if not UPS_stats is None:
3249 try:
3250 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
3251 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
3252 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
3253 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
3254 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
3255 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
3256 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
3257 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
3258 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
3259 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
3260 except KeyError:
3261 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
3262 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
3263 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
3264 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
3265 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
3266 int(UPS_stats.group('n10')),[0]*10]
3267 debug_msg = ""
3268 if len(stats['UPS'].keys())>0:
3269 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
3270 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
3271 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
3272 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
3273 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
3274 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
3275 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
3276 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
3277 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
3278 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
3279 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
3280 for i in range(10)]
3281 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
3282 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
3283 maxUPS = max(UPSfracs, key = lambda w: w[1])
3284
3285 tmpStr = ""
3286 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
3287 tmpStr += '\n Stability unknown: %d'%nTotsun
3288 tmpStr += '\n Stable PS point: %d'%nTotsps
3289 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
3290 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
3291 tmpStr += '\n Only double precision used: %d'%nTotddp
3292 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
3293 tmpStr += '\n Initialization phase-space points: %d'%nTotini
3294 tmpStr += '\n Reduction methods used:'
3295 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
3296 unit_code_meaning.keys() if nTot1[i]>0]
3297 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
3298 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
3299 if nTot100 != 0:
3300 debug_msg += '\n Unknown return code (100): %d'%nTot100
3301 if nTot10 != 0:
3302 debug_msg += '\n Unknown return code (10): %d'%nTot10
3303 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
3304 not in unit_code_meaning.keys())
3305 if nUnknownUnit != 0:
3306 debug_msg += '\n Unknown return code (1): %d'\
3307 %nUnknownUnit
3308
3309 if maxUPS[1]>0.001:
3310 message += tmpStr
3311 message += '\n Total number of unstable PS point detected:'+\
3312 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS)
3313 message += '\n Maximum fraction of UPS points in '+\
3314 'channel %s (%4.2f%%)'%maxUPS
3315 message += '\n Please report this to the authors while '+\
3316 'providing the file'
3317 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
3318 maxUPS[0],'UPS.log'))
3319 else:
3320 debug_msg += tmpStr
3321
3322
3323
3324
3325
3326
3327 virt_tricks_finder = re.compile(
3328 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
3329 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
3330 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
3331 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
3332
3333 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
3334 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
3335
3336 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
3337
3338 channel_contr_list = {}
3339 for gv_log in log_GV_files:
3340 logfile=open(gv_log,'r')
3341 log = logfile.read()
3342 logfile.close()
3343 channel_name = '/'.join(gv_log.split('/')[-3:-1])
3344 vf_stats = None
3345 for vf_stats in re.finditer(virt_frac_finder, log):
3346 pass
3347 if not vf_stats is None:
3348 v_frac = safe_float(vf_stats.group('v_frac'))
3349 v_average = safe_float(vf_stats.group('v_average'))
3350 try:
3351 if v_frac < stats['virt_stats']['v_frac_min'][0]:
3352 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
3353 if v_frac > stats['virt_stats']['v_frac_max'][0]:
3354 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
3355 stats['virt_stats']['v_frac_avg'][0] += v_frac
3356 stats['virt_stats']['v_frac_avg'][1] += 1
3357 except KeyError:
3358 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
3359 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
3360 stats['virt_stats']['v_frac_avg']=[v_frac,1]
3361
3362
3363 ccontr_stats = None
3364 for ccontr_stats in re.finditer(channel_contr_finder, log):
3365 pass
3366 if not ccontr_stats is None:
3367 contrib = safe_float(ccontr_stats.group('v_contr'))
3368 try:
3369 if contrib>channel_contr_list[channel_name]:
3370 channel_contr_list[channel_name]=contrib
3371 except KeyError:
3372 channel_contr_list[channel_name]=contrib
3373
3374
3375
3376
3377 average_contrib = 0.0
3378 for value in channel_contr_list.values():
3379 average_contrib += value
3380 if len(channel_contr_list.values()) !=0:
3381 average_contrib = average_contrib / len(channel_contr_list.values())
3382
3383 relevant_log_GV_files = []
3384 excluded_channels = set([])
3385 all_channels = set([])
3386 for log_file in log_GV_files:
3387 channel_name = '/'.join(log_file.split('/')[-3:-1])
3388 all_channels.add(channel_name)
3389 try:
3390 if channel_contr_list[channel_name] > (0.1*average_contrib):
3391 relevant_log_GV_files.append(log_file)
3392 else:
3393 excluded_channels.add(channel_name)
3394 except KeyError:
3395 relevant_log_GV_files.append(log_file)
3396
3397
3398 for gv_log in relevant_log_GV_files:
3399 logfile=open(gv_log,'r')
3400 log = logfile.read()
3401 logfile.close()
3402 channel_name = '/'.join(gv_log.split('/')[-3:-1])
3403
3404 vt_stats = None
3405 for vt_stats in re.finditer(virt_tricks_finder, log):
3406 pass
3407 if not vt_stats is None:
3408 vt_stats_group = vt_stats.groupdict()
3409 v_ratio = safe_float(vt_stats.group('v_ratio'))
3410 v_ratio_err = safe_float(vt_stats.group('v_ratio_err'))
3411 v_contr = safe_float(vt_stats.group('v_abs_contr'))
3412 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err'))
3413 try:
3414 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
3415 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
3416 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
3417 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
3418 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
3419 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
3420 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
3421 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
3422 if v_contr < stats['virt_stats']['v_contr_min'][0]:
3423 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
3424 if v_contr > stats['virt_stats']['v_contr_max'][0]:
3425 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
3426 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
3427 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
3428 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
3429 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
3430 except KeyError:
3431 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
3432 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
3433 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
3434 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
3435 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
3436 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
3437 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
3438 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
3439
3440 vf_stats = None
3441 for vf_stats in re.finditer(virt_frac_finder, log):
3442 pass
3443 if not vf_stats is None:
3444 v_frac = safe_float(vf_stats.group('v_frac'))
3445 v_average = safe_float(vf_stats.group('v_average'))
3446 try:
3447 if v_average < stats['virt_stats']['v_average_min'][0]:
3448 stats['virt_stats']['v_average_min']=(v_average,channel_name)
3449 if v_average > stats['virt_stats']['v_average_max'][0]:
3450 stats['virt_stats']['v_average_max']=(v_average,channel_name)
3451 stats['virt_stats']['v_average_avg'][0] += v_average
3452 stats['virt_stats']['v_average_avg'][1] += 1
3453 except KeyError:
3454 stats['virt_stats']['v_average_min']=[v_average,channel_name]
3455 stats['virt_stats']['v_average_max']=[v_average,channel_name]
3456 stats['virt_stats']['v_average_avg']=[v_average,1]
3457
3458 try:
3459 debug_msg += '\n\n Statistics on virtual integration optimization : '
3460
3461 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
3462 %tuple(stats['virt_stats']['v_frac_max'])
3463 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
3464 %tuple(stats['virt_stats']['v_frac_min'])
3465 debug_msg += '\n Average virt fraction computed %.3f'\
3466 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1]))
3467 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
3468 (len(excluded_channels),len(all_channels))
3469 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
3470 %tuple(stats['virt_stats']['v_average_max'])
3471 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
3472 %tuple(stats['virt_stats']['v_ratio_max'])
3473 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
3474 %tuple(stats['virt_stats']['v_ratio_err_max'])
3475 debug_msg += tmpStr
3476
3477
3478
3479
3480
3481
3482
3483
3484 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
3485 %tuple(stats['virt_stats']['v_contr_err_max'])
3486 debug_msg += tmpStr
3487
3488
3489
3490
3491 except KeyError:
3492 debug_msg += '\n Could not find statistics on the integration optimization. '
3493
3494
3495
3496
3497
3498 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
3499 "(?P<time>[\d\+-Eed\.]*)\s*")
3500
3501 for logf in log_GV_files:
3502 logfile=open(logf,'r')
3503 log = logfile.read()
3504 logfile.close()
3505 channel_name = '/'.join(logf.split('/')[-3:-1])
3506 mint = re.search(mint_search,logf)
3507 if not mint is None:
3508 channel_name = channel_name+' [step %s]'%mint.group('ID')
3509
3510 for time_stats in re.finditer(timing_stat_finder, log):
3511 try:
3512 stats['timings'][time_stats.group('name')][channel_name]+=\
3513 safe_float(time_stats.group('time'))
3514 except KeyError:
3515 if time_stats.group('name') not in stats['timings'].keys():
3516 stats['timings'][time_stats.group('name')] = {}
3517 stats['timings'][time_stats.group('name')][channel_name]=\
3518 safe_float(time_stats.group('time'))
3519
3520
3521 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
3522 try:
3523 totTimeList = [(time, chan) for chan, time in \
3524 stats['timings']['Total'].items()]
3525 except KeyError:
3526 totTimeList = []
3527
3528 totTimeList.sort()
3529 if len(totTimeList)>0:
3530 debug_msg += '\n\n Inclusive timing profile :'
3531 debug_msg += '\n Overall slowest channel %s (%s)'%\
3532 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
3533 debug_msg += '\n Average channel running time %s'%\
3534 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
3535 debug_msg += '\n Aggregated total running time %s'%\
3536 Tstr(sum([el[0] for el in totTimeList]))
3537 else:
3538 debug_msg += '\n\n Inclusive timing profile non available.'
3539
3540 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \
3541 sum(stats['timings'][stat].values()), reverse=True)
3542 for name in sorted_keys:
3543 if name=='Total':
3544 continue
3545 if sum(stats['timings'][name].values())<=0.0:
3546 debug_msg += '\n Zero time record for %s.'%name
3547 continue
3548 try:
3549 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
3550 chan) for chan, time in stats['timings'][name].items()]
3551 except KeyError, ZeroDivisionError:
3552 debug_msg += '\n\n Timing profile for %s unavailable.'%name
3553 continue
3554 TimeList.sort()
3555 debug_msg += '\n Timing profile for <%s> :'%name
3556 try:
3557 debug_msg += '\n Overall fraction of time %.3f %%'%\
3558 safe_float((100.0*(sum(stats['timings'][name].values())/
3559 sum(stats['timings']['Total'].values()))))
3560 except KeyError, ZeroDivisionError:
3561 debug_msg += '\n Overall fraction of time unavailable.'
3562 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
3563 (TimeList[-1][0],TimeList[-1][1])
3564 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
3565 (TimeList[0][0],TimeList[0][1])
3566
3567
3568
3569
3570
3571
3572
3573
3574
3575
3576 err_finder = re.compile(\
3577 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
3578 for log in all_log_files:
3579 logfile=open(log,'r')
3580 nErrors = len(re.findall(err_finder, logfile.read()))
3581 logfile.close()
3582 if nErrors != 0:
3583 stats['Errors'].append((str(log),nErrors))
3584
3585 nErrors = sum([err[1] for err in stats['Errors']],0)
3586 if nErrors != 0:
3587 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
3588 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
3589 'found in the following log file%s:'%('s' if \
3590 len(stats['Errors'])>1 else '')
3591 for error in stats['Errors'][:3]:
3592 log_name = '/'.join(error[0].split('/')[-5:])
3593 debug_msg += '\n > %d error%s in %s'%\
3594 (error[1],'s' if error[1]>1 else '',log_name)
3595 if len(stats['Errors'])>3:
3596 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
3597 nRemainingLogs = len(stats['Errors'])-3
3598 debug_msg += '\n And another %d error%s in %d other log file%s'%\
3599 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
3600 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
3601
3602 return message, debug_msg
3603
3604
3606 """this function calls the reweighting routines and creates the event file in the
3607 Event dir. Return the name of the event file created
3608 """
3609 scale_pdf_info=[]
3610 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
3611 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
3612 scale_pdf_info = self.run_reweight(options['reweightonly'])
3613 self.update_status('Collecting events', level='parton', update_results=True)
3614 misc.compile(['collect_events'],
3615 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile'])
3616 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
3617 stdin=subprocess.PIPE,
3618 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
3619 if event_norm.lower() == 'sum':
3620 p.communicate(input = '1\n')
3621 elif event_norm.lower() == 'unity':
3622 p.communicate(input = '3\n')
3623 elif event_norm.lower() == 'bias':
3624 p.communicate(input = '0\n')
3625 else:
3626 p.communicate(input = '2\n')
3627
3628
3629 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
3630
3631 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
3632 raise aMCatNLOError('An error occurred during event generation. ' + \
3633 'The event file has not been created. Check collect_events.log')
3634 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
3635 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
3636 if not options['reweightonly']:
3637 self.print_summary(options, 2, mode, scale_pdf_info)
3638 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses'))
3639 for res_file in res_files:
3640 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
3641
3642 logger.info('The %s file has been generated.\n' % (evt_file))
3643 self.results.add_detail('nb_event', nevents)
3644 self.update_status('Events generated', level='parton', update_results=True)
3645 return evt_file[:-3]
3646
3647
3649 """runs mcatnlo on the generated event file, to produce showered-events
3650 """
3651 logger.info('Preparing MCatNLO run')
3652 try:
3653 misc.gunzip(evt_file)
3654 except Exception:
3655 pass
3656
3657 self.banner = banner_mod.Banner(evt_file)
3658 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
3659
3660
3661
3662 if int(self.banner.get_detail('run_card', 'nevents') / \
3663 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
3664 != self.banner.get_detail('run_card', 'nevents'):
3665 logger.warning(\
3666 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
3667 'Setting it to 1.')
3668 self.shower_card['nsplit_jobs'] = 1
3669
3670
3671 if self.shower_card['nevents'] > 0 and \
3672 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \
3673 self.shower_card['nsplit_jobs'] != 1:
3674 logger.warning(\
3675 'Only a part of the events will be showered.\n' + \
3676 'Setting nsplit_jobs in the shower_card to 1.')
3677 self.shower_card['nsplit_jobs'] = 1
3678
3679 self.banner_to_mcatnlo(evt_file)
3680
3681
3682
3683
3684 if 'fastjet' in self.shower_card['extralibs']:
3685
3686 if not 'stdc++' in self.shower_card['extralibs']:
3687 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
3688 self.shower_card['extralibs'] += ' stdc++'
3689
3690 try:
3691
3692 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
3693 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
3694 output, error = p.communicate()
3695
3696 output = output[:-1]
3697
3698 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
3699 logger.warning('Linking FastJet: updating EXTRAPATHS')
3700 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
3701 if not pjoin(output, 'include') in self.shower_card['includepaths']:
3702 logger.warning('Linking FastJet: updating INCLUDEPATHS')
3703 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
3704
3705 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
3706 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
3707 except Exception:
3708 logger.warning('Linking FastJet: using fjcore')
3709
3710 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
3711 if not 'fjcore.o' in self.shower_card['analyse']:
3712 self.shower_card['analyse'] += ' fjcore.o'
3713
3714 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
3715 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
3716
3717 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
3718 for line in fjwrapper_lines:
3719 if '//INCLUDE_FJ' in line:
3720 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
3721 if '//NAMESPACE_FJ' in line:
3722 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
3723 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock:
3724 fsock.write('\n'.join(fjwrapper_lines) + '\n')
3725
3726 extrapaths = self.shower_card['extrapaths'].split()
3727
3728
3729 if shower in ['HERWIGPP', 'PYTHIA8']:
3730 path_dict = {'HERWIGPP': ['hepmc_path',
3731 'thepeg_path',
3732 'hwpp_path'],
3733 'PYTHIA8': ['pythia8_path']}
3734
3735 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]):
3736 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \
3737 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower])))
3738
3739 if shower == 'HERWIGPP':
3740 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
3741 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib')
3742
3743
3744 if shower == 'PYTHIA8':
3745 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'],
3746 stdout = subprocess.PIPE).stdout.read().strip()
3747
3748
3749
3750 extrapaths.append(hepmc.split()[1].replace('-L', ''))
3751
3752 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3753 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib'))
3754
3755
3756 if sys.platform == 'darwin':
3757 ld_library_path = 'DYLD_LIBRARY_PATH'
3758 else:
3759 ld_library_path = 'LD_LIBRARY_PATH'
3760 if ld_library_path in os.environ.keys():
3761 paths = os.environ[ld_library_path]
3762 else:
3763 paths = ''
3764 paths += ':' + ':'.join(extrapaths)
3765 os.putenv(ld_library_path, paths)
3766
3767 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
3768 self.shower_card.write_card(shower, shower_card_path)
3769
3770
3771 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')):
3772 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'),
3773 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat'))
3774
3775 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
3776 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
3790 stderr=open(mcatnlo_log, 'w'),
3791 cwd=pjoin(self.me_dir, 'MCatNLO'),
3792 close_fds=True)
3793
3794 exe = 'MCATNLO_%s_EXE' % shower
3795 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
3796 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
3797 print open(mcatnlo_log).read()
3798 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
3799 logger.info(' ... done')
3800
3801
3802 count = 1
3803 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
3804 (shower, count))):
3805 count += 1
3806 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
3807 (shower, count))
3808 os.mkdir(rundir)
3809 files.cp(shower_card_path, rundir)
3810
3811
3812
3813 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
3814 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
3815 logger.info('Cleaning old files and splitting the event file...')
3816
3817 files.rm([f for f in event_files if 'events.lhe' not in f])
3818 if self.shower_card['nsplit_jobs'] > 1:
3819 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile'])
3820 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
3821 stdin=subprocess.PIPE,
3822 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
3823 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3824 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs'])
3825 logger.info('Splitting done.')
3826 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
3827
3828 event_files.sort()
3829
3830 self.update_status('Showering events...', level='shower')
3831 logger.info('(Running in %s)' % rundir)
3832 if shower != 'PYTHIA8':
3833 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
3834 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
3835 else:
3836
3837 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
3838 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
3839 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3840 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
3841 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
3842 else:
3843 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
3844
3845 if shower == 'HERWIGPP':
3846 try:
3847 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')):
3848 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
3849 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')):
3850 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir)
3851 except Exception:
3852 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
3853
3854 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
3855 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
3856
3857 files.ln(evt_file, rundir, 'events.lhe')
3858 for i, f in enumerate(event_files):
3859 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
3860
3861 if not self.shower_card['analyse']:
3862
3863 out_id = 'HEP'
3864 else:
3865
3866 if "HwU" in self.shower_card['analyse']:
3867 out_id = 'HWU'
3868 else:
3869 out_id = 'TOP'
3870
3871
3872 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock:
3873
3874 if sys.platform == 'darwin':
3875 ld_library_path = 'DYLD_LIBRARY_PATH'
3876 else:
3877 ld_library_path = 'LD_LIBRARY_PATH'
3878 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
3879 % {'ld_library_path': ld_library_path,
3880 'extralibs': ':'.join(extrapaths)})
3881 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
3882
3883 if event_files:
3884 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
3885 for i in range(len(event_files))]
3886 else:
3887 arg_list = [[shower, out_id, self.run_name]]
3888
3889 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
3890 self.njobs = 1
3891 self.wait_for_complete('shower')
3892
3893
3894 message = ''
3895 warning = ''
3896 to_gzip = [evt_file]
3897 if out_id == 'HEP':
3898
3899 if shower in ['PYTHIA8', 'HERWIGPP']:
3900 hep_format = 'HEPMC'
3901 ext = 'hepmc'
3902 else:
3903 hep_format = 'StdHEP'
3904 ext = 'hep'
3905
3906 hep_file = '%s_%s_0.%s.gz' % \
3907 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
3908 count = 0
3909
3910
3911
3912 while os.path.exists(hep_file) or \
3913 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
3914 count +=1
3915 hep_file = '%s_%s_%d.%s.gz' % \
3916 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
3917
3918 try:
3919 if self.shower_card['nsplit_jobs'] == 1:
3920 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
3921 message = ('The file %s has been generated. \nIt contains showered' + \
3922 ' and hadronized events in the %s format obtained' + \
3923 ' showering the parton-level event file %s.gz with %s') % \
3924 (hep_file, hep_format, evt_file, shower)
3925 else:
3926 hep_list = []
3927 for i in range(self.shower_card['nsplit_jobs']):
3928 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
3929 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
3930 message = ('The following files have been generated:\n %s\nThey contain showered' + \
3931 ' and hadronized events in the %s format obtained' + \
3932 ' showering the (split) parton-level event file %s.gz with %s') % \
3933 ('\n '.join(hep_list), hep_format, evt_file, shower)
3934
3935 except OSError, IOError:
3936 raise aMCatNLOError('No file has been generated, an error occurred.'+\
3937 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
3938
3939
3940 if hep_format == 'StdHEP':
3941 try:
3942 self.do_plot('%s -f' % self.run_name)
3943 except Exception, error:
3944 logger.info("Fail to make the plot. Continue...")
3945 pass
3946
3947 elif out_id == 'TOP' or out_id == 'HWU':
3948
3949 if out_id=='TOP':
3950 ext='top'
3951 elif out_id=='HWU':
3952 ext='HwU'
3953 topfiles = []
3954 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)]
3955 for top_tar in top_tars:
3956 topfiles.extend(top_tar.getnames())
3957
3958
3959 if len(top_tars) != self.shower_card['nsplit_jobs']:
3960 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
3961 (self.shower_card['nsplit_jobs'], len(top_tars)))
3962
3963
3964
3965 filename = 'plot_%s_%d_' % (shower, 1)
3966 count = 1
3967 while os.path.exists(pjoin(self.me_dir, 'Events',
3968 self.run_name, '%s0.%s' % (filename,ext))) or \
3969 os.path.exists(pjoin(self.me_dir, 'Events',
3970 self.run_name, '%s0__1.%s' % (filename,ext))):
3971 count += 1
3972 filename = 'plot_%s_%d_' % (shower, count)
3973
3974 if out_id=='TOP':
3975 hist_format='TopDrawer format'
3976 elif out_id=='HWU':
3977 hist_format='HwU and GnuPlot formats'
3978
3979 if not topfiles:
3980
3981 warning = 'No .top file has been generated. For the results of your ' +\
3982 'run, please check inside %s' % rundir
3983 elif self.shower_card['nsplit_jobs'] == 1:
3984
3985 top_tars[0].extractall(path = rundir)
3986 plotfiles = []
3987 for i, file in enumerate(topfiles):
3988 if out_id=='TOP':
3989 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3990 '%s%d.top' % (filename, i))
3991 files.mv(pjoin(rundir, file), plotfile)
3992 elif out_id=='HWU':
3993 out=pjoin(self.me_dir,'Events',
3994 self.run_name,'%s%d'% (filename,i))
3995 histos=[{'dirname':pjoin(rundir,file)}]
3996 self.combine_plots_HwU(histos,out)
3997 try:
3998 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\
3999 stdout=os.open(os.devnull, os.O_RDWR),\
4000 stderr=os.open(os.devnull, os.O_RDWR),\
4001 cwd=pjoin(self.me_dir, 'Events', self.run_name))
4002 except Exception:
4003 pass
4004 plotfile=pjoin(self.me_dir,'Events',self.run_name,
4005 '%s%d.HwU'% (filename,i))
4006 plotfiles.append(plotfile)
4007
4008 ffiles = 'files'
4009 have = 'have'
4010 if len(plotfiles) == 1:
4011 ffiles = 'file'
4012 have = 'has'
4013
4014 message = ('The %s %s %s been generated, with histograms in the' + \
4015 ' %s, obtained by showering the parton-level' + \
4016 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
4017 hist_format, evt_file, shower)
4018 else:
4019
4020 topfiles_set = set(topfiles)
4021 plotfiles = []
4022 for j, top_tar in enumerate(top_tars):
4023 top_tar.extractall(path = rundir)
4024 for i, file in enumerate(topfiles_set):
4025 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
4026 '%s%d__%d.%s' % (filename, i, j + 1,ext))
4027 files.mv(pjoin(rundir, file), plotfile)
4028 plotfiles.append(plotfile)
4029
4030
4031 if self.shower_card['combine_td']:
4032 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
4033
4034 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
4035 norm = 1.
4036 else:
4037 norm = 1./float(self.shower_card['nsplit_jobs'])
4038
4039 plotfiles2 = []
4040 for i, file in enumerate(topfiles_set):
4041 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \
4042 for j in range(self.shower_card['nsplit_jobs'])]
4043 if out_id=='TOP':
4044 infile="%d\n%s\n%s\n" % \
4045 (self.shower_card['nsplit_jobs'],
4046 '\n'.join(filelist),
4047 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
4048 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
4049 stdin=subprocess.PIPE,
4050 stdout=os.open(os.devnull, os.O_RDWR),
4051 cwd=pjoin(self.me_dir, 'Events', self.run_name))
4052 p.communicate(input = infile)
4053 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
4054 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
4055 elif out_id=='HWU':
4056 out=pjoin(self.me_dir,'Events',
4057 self.run_name,'%s%d'% (filename,i))
4058 histos=[]
4059 norms=[]
4060 for plotfile in plotfiles:
4061 histos.append({'dirname':plotfile})
4062 norms.append(norm)
4063 self.combine_plots_HwU(histos,out,normalisation=norms)
4064 try:
4065 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\
4066 stdout=os.open(os.devnull, os.O_RDWR),\
4067 stderr=os.open(os.devnull, os.O_RDWR),\
4068 cwd=pjoin(self.me_dir, 'Events',self.run_name))
4069 except Exception:
4070 pass
4071
4072 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext)))
4073 tar = tarfile.open(
4074 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
4075 for f in filelist:
4076 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
4077 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
4078
4079 tar.close()
4080
4081 ffiles = 'files'
4082 have = 'have'
4083 if len(plotfiles2) == 1:
4084 ffiles = 'file'
4085 have = 'has'
4086
4087 message = ('The %s %s %s been generated, with histograms in the' + \
4088 ' %s, obtained by showering the parton-level' + \
4089 ' file %s.gz with %s.\n' + \
4090 'The files from the different shower ' + \
4091 'jobs (before combining them) can be found inside %s.') % \
4092 (ffiles, ', '.join(plotfiles2), have, hist_format,\
4093 evt_file, shower,
4094 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2]))
4095
4096 else:
4097 message = ('The following files have been generated:\n %s\n' + \
4098 'They contain histograms in the' + \
4099 ' %s, obtained by showering the parton-level' + \
4100 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
4101 hist_format, evt_file, shower)
4102
4103
4104 run_dir_path = pjoin(rundir, self.run_name)
4105 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
4106 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
4107 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
4108 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
4109 %(shower, count)))
4110 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
4111 cwd=run_dir_path)
4112 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
4113
4114 for f in to_gzip:
4115 misc.gzip(f)
4116 if message:
4117 logger.info(message)
4118 if warning:
4119 logger.warning(warning)
4120
4121 self.update_status('Run complete', level='shower', update_results=True)
4122
4123
4124 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
4125 """define the run name, the run_tag, the banner and the results."""
4126
4127
4128 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'],
4129 'shower': ['shower','delphes','madanalysis5_hadron'],
4130 'delphes':['delphes'],
4131 'madanalysis5_hadron':['madanalysis5_hadron'],
4132 'plot':[]}
4133
4134 if name == self.run_name:
4135 if reload_card:
4136 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
4137 self.run_card = banner_mod.RunCardNLO(run_card)
4138
4139
4140 if tag:
4141 self.run_card['run_tag'] = tag
4142 self.run_tag = tag
4143 self.results.add_run(self.run_name, self.run_card)
4144 else:
4145 for tag in upgrade_tag[level]:
4146 if getattr(self.results[self.run_name][-1], tag):
4147 tag = self.get_available_tag()
4148 self.run_card['run_tag'] = tag
4149 self.run_tag = tag
4150 self.results.add_run(self.run_name, self.run_card)
4151 break
4152 return
4153
4154
4155 if self.run_name:
4156 self.store_result()
4157
4158 self.run_name = name
4159
4160
4161 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
4162 self.run_card = banner_mod.RunCardNLO(run_card)
4163
4164 new_tag = False
4165
4166 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
4167 if 'mgruncard' in self.banner:
4168 self.run_card = self.banner.charge_card('run_card')
4169 if tag:
4170 self.run_card['run_tag'] = tag
4171 new_tag = True
4172 elif not self.run_name in self.results and level =='parton':
4173 pass
4174 elif not self.run_name in self.results:
4175
4176 logger.warning('Trying to run data on unknown run.')
4177 self.results.add_run(name, self.run_card)
4178 self.results.update('add run %s' % name, 'all', makehtml=True)
4179 else:
4180 for tag in upgrade_tag[level]:
4181
4182 if getattr(self.results[self.run_name][-1], tag):
4183
4184 tag = self.get_available_tag()
4185 self.run_card['run_tag'] = tag
4186 new_tag = True
4187 break
4188 if not new_tag:
4189
4190 tag = self.results[self.run_name][-1]['tag']
4191 self.run_card['run_tag'] = tag
4192
4193
4194 if name in self.results and not new_tag:
4195 self.results.def_current(self.run_name)
4196 else:
4197 self.results.add_run(self.run_name, self.run_card)
4198
4199 self.run_tag = self.run_card['run_tag']
4200
4201
4202
4203 if level == 'parton':
4204 return
4205 elif level == 'pythia':
4206 return self.results[self.run_name][0]['tag']
4207 else:
4208 for i in range(-1,-len(self.results[self.run_name])-1,-1):
4209 tagRun = self.results[self.run_name][i]
4210 if tagRun.pythia:
4211 return tagRun['tag']
4212
4213
4215 """ tar the pythia results. This is done when we are quite sure that
4216 the pythia output will not be use anymore """
4217
4218 if not self.run_name:
4219 return
4220
4221 self.results.save()
4222
4223 if not self.to_store:
4224 return
4225
4226 if 'event' in self.to_store:
4227 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')):
4228 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')):
4229 self.update_status('gzipping output file: events.lhe', level='parton', error=True)
4230 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
4231 else:
4232 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
4233 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')):
4234 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe'))
4235
4236
4237 tag = self.run_card['run_tag']
4238
4239 self.to_store = []
4240
4241
4242
4244 """get the list of Gdirectory if not yet saved."""
4245
4246 if hasattr(self, "Gdirs"):
4247 if self.me_dir in self.Gdirs:
4248 if Pdir is None:
4249 return sum(self.Gdirs.values())
4250 else:
4251 return self.Gdirs[Pdir]
4252
4253 Pdirs = self.get_Pdir()
4254 Gdirs = {self.me_dir:[]}
4255 for P in Pdirs:
4256 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and
4257 os.path.isdir(pjoin(P,G))]
4258
4259 self.Gdirs = Gdirs
4260 return self.getGdir(Pdir)
4261
4262
4264 """reads the info in the init block and returns them in a dictionary"""
4265 ev_file = open(evt_file)
4266 init = ""
4267 found = False
4268 while True:
4269 line = ev_file.readline()
4270 if "<init>" in line:
4271 found = True
4272 elif found and not line.startswith('#'):
4273 init += line
4274 if "</init>" in line or "<event>" in line:
4275 break
4276 ev_file.close()
4277
4278
4279
4280
4281
4282
4283 init_dict = {}
4284 init_dict['idbmup1'] = int(init.split()[0])
4285 init_dict['idbmup2'] = int(init.split()[1])
4286 init_dict['ebmup1'] = float(init.split()[2])
4287 init_dict['ebmup2'] = float(init.split()[3])
4288 init_dict['pdfgup1'] = int(init.split()[4])
4289 init_dict['pdfgup2'] = int(init.split()[5])
4290 init_dict['pdfsup1'] = int(init.split()[6])
4291 init_dict['pdfsup2'] = int(init.split()[7])
4292 init_dict['idwtup'] = int(init.split()[8])
4293 init_dict['nprup'] = int(init.split()[9])
4294
4295 return init_dict
4296
4297
4299 """creates the mcatnlo input script using the values set in the header of the event_file.
4300 It also checks if the lhapdf library is used"""
4301
4302 shower = self.banner.get('run_card', 'parton_shower').upper()
4303 pdlabel = self.banner.get('run_card', 'pdlabel')
4304 itry = 0
4305 nevents = self.shower_card['nevents']
4306 init_dict = self.get_init_dict(evt_file)
4307
4308 if nevents < 0 or \
4309 nevents > self.banner.get_detail('run_card', 'nevents'):
4310 nevents = self.banner.get_detail('run_card', 'nevents')
4311
4312 nevents = nevents / self.shower_card['nsplit_jobs']
4313
4314 mcmass_dict = {}
4315 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
4316 pdg = int(line.split()[0])
4317 mass = float(line.split()[1])
4318 mcmass_dict[pdg] = mass
4319
4320 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
4321 content += 'NEVENTS=%d\n' % nevents
4322 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\
4323 self.shower_card['nsplit_jobs'])
4324 content += 'MCMODE=%s\n' % shower
4325 content += 'PDLABEL=%s\n' % pdlabel
4326
4327 try:
4328 aewm1 = self.banner.get_detail('param_card', 'sminputs', 1).value
4329 raise KeyError
4330 except KeyError:
4331 mod = self.get_model()
4332 if not hasattr(mod, 'parameter_dict'):
4333 from models import model_reader
4334 mod = model_reader.ModelReader(mod)
4335 mod.set_parameters_and_couplings(self.banner.param_card)
4336 aewm1 = 0
4337 for key in ['aEWM1', 'AEWM1', 'aEWm1', 'aewm1']:
4338 if key in mod['parameter_dict']:
4339 aewm1 = mod['parameter_dict'][key]
4340 break
4341 elif 'mdl_%s' % key in mod['parameter_dict']:
4342 aewm1 = mod['parameter_dict']['mod_%s' % key]
4343 break
4344 else:
4345 for key in ['aEW', 'AEW', 'aEw', 'aew']:
4346 if key in mod['parameter_dict']:
4347 aewm1 = 1./mod['parameter_dict'][key]
4348 break
4349 elif 'mdl_%s' % key in mod['parameter_dict']:
4350 aewm1 = 1./mod['parameter_dict']['mod_%s' % key]
4351 break
4352
4353 content += 'ALPHAEW=%s\n' % aewm1
4354
4355
4356 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
4357 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
4358 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
4359 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
4360 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
4361 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
4362 try:
4363 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
4364 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
4365 except KeyError:
4366 content += 'HGGMASS=120.\n'
4367 content += 'HGGWIDTH=0.00575308848\n'
4368 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
4369 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
4370 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
4371 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
4372 content += 'DMASS=%s\n' % mcmass_dict[1]
4373 content += 'UMASS=%s\n' % mcmass_dict[2]
4374 content += 'SMASS=%s\n' % mcmass_dict[3]
4375 content += 'CMASS=%s\n' % mcmass_dict[4]
4376 content += 'BMASS=%s\n' % mcmass_dict[5]
4377 try:
4378 content += 'EMASS=%s\n' % mcmass_dict[11]
4379 content += 'MUMASS=%s\n' % mcmass_dict[13]
4380 content += 'TAUMASS=%s\n' % mcmass_dict[15]
4381 except KeyError:
4382
4383 mcmass_lines = [l for l in \
4384 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
4385 ).read().split('\n') if l]
4386 new_mcmass_dict = {}
4387 for l in mcmass_lines:
4388 key, val = l.split('=')
4389 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
4390 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
4391 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
4392 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
4393
4394 content += 'GMASS=%s\n' % mcmass_dict[21]
4395 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
4396
4397 if int(self.shower_card['pdfcode']) > 1 or \
4398 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \
4399 shower=='HERWIGPP' :
4400
4401
4402
4403
4404 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
4405 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
4406 stdout = subprocess.PIPE).stdout.read().strip()
4407 content += 'LHAPDFPATH=%s\n' % lhapdfpath
4408 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4409 if self.shower_card['pdfcode']==0:
4410 lhaid_list = ''
4411 content += ''
4412 elif self.shower_card['pdfcode']==1:
4413 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
4414 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
4415 else:
4416 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
4417 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
4418 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4419 elif int(self.shower_card['pdfcode'])==1 or \
4420 int(self.shower_card['pdfcode'])==-1 and True:
4421
4422
4423
4424
4425
4426
4427 try:
4428 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
4429 stdout = subprocess.PIPE).stdout.read().strip()
4430 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
4431 content += 'LHAPDFPATH=%s\n' % lhapdfpath
4432 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4433 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
4434 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
4435 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4436 except Exception:
4437 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
4438 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
4439 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
4440 ' same set as was used in the event generation install LHAPDF and set the path using'+\
4441 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
4442 content += 'LHAPDFPATH=\n'
4443 content += 'PDFCODE=0\n'
4444 else:
4445 content += 'LHAPDFPATH=\n'
4446 content += 'PDFCODE=0\n'
4447
4448 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
4449 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
4450
4451 if self.options['pythia8_path']:
4452 content+='PY8PATH=%s\n' % self.options['pythia8_path']
4453 if self.options['hwpp_path']:
4454 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
4455 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']:
4456 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
4457 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']:
4458 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
4459
4460 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
4461 output.write(content)
4462 output.close()
4463 return shower
4464
4465
4467 """runs the reweight_xsec_events executables on each sub-event file generated
4468 to compute on the fly scale and/or PDF uncertainities"""
4469 logger.info(' Doing reweight')
4470
4471 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
4472
4473 if only:
4474 if os.path.exists(nev_unw + '.orig'):
4475 files.cp(nev_unw + '.orig', nev_unw)
4476 else:
4477 raise aMCatNLOError('Cannot find event file information')
4478
4479
4480 file = open(nev_unw)
4481 lines = file.read().split('\n')
4482 file.close()
4483
4484 files.cp(nev_unw, nev_unw + '.orig')
4485
4486
4487 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
4488 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0']
4489 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0:
4490 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts]
4491
4492 job_dict = {}
4493 exe = 'reweight_xsec_events.local'
4494 for i, evt_file in enumerate(evt_files):
4495 path, evt = os.path.split(evt_file)
4496 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
4497 pjoin(self.me_dir, 'SubProcesses', path))
4498 job_dict[path] = [exe]
4499
4500 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
4501
4502
4503 for evt_file in evt_files:
4504 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
4505 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
4506 stdout = subprocess.PIPE).stdout.read().strip()
4507 if last_line != "</LesHouchesEvents>":
4508 raise aMCatNLOError('An error occurred during reweight. Check the' + \
4509 '\'reweight_xsec_events.output\' files inside the ' + \
4510 '\'SubProcesses/P*/G*/ directories for details')
4511
4512
4513 newfile = open(nev_unw, 'w')
4514 for line in lines:
4515 if line:
4516 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
4517 newfile.close()
4518
4519 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4520
4522 """This function takes the files with the scale and pdf values
4523 written by the reweight_xsec_events.f code
4524 (P*/G*/pdf_scale_dependence.dat) and computes the overall
4525 scale and PDF uncertainty (the latter is computed using the
4526 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
4527 and returns it in percents. The expected format of the file
4528 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
4529 xsec_pdf0 xsec_pdf1 ...."""
4530
4531 scales=[]
4532 pdfs=[]
4533 for i,evt_file in enumerate(evt_files):
4534 path, evt=os.path.split(evt_file)
4535 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f:
4536 data_line=f.readline()
4537 if "scale variations:" in data_line:
4538 for j,scale in enumerate(self.run_card['dynamical_scale_choice']):
4539 data_line = f.readline().split()
4540 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
4541 try:
4542 scales[j] = [a + b for a, b in zip(scales[j], scales_this)]
4543 except IndexError:
4544 scales+=[scales_this]
4545 data_line=f.readline()
4546 if "pdf variations:" in data_line:
4547 for j,pdf in enumerate(self.run_card['lhaid']):
4548 data_line = f.readline().split()
4549 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
4550 try:
4551 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)]
4552 except IndexError:
4553 pdfs+=[pdfs_this]
4554
4555
4556 scale_info=[]
4557 for j,scale in enumerate(scales):
4558 s_cen=scale[0]
4559 if s_cen != 0.0 and self.run_card['reweight_scale'][j]:
4560
4561 s_max=(max(scale)/s_cen-1)*100
4562 s_min=(1-min(scale)/s_cen)*100
4563
4564 ren_var=[]
4565 fac_var=[]
4566 for i in range(len(self.run_card['rw_rscale'])):
4567 ren_var.append(scale[i]-s_cen)
4568 for i in range(len(self.run_card['rw_fscale'])):
4569 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen)
4570 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100
4571 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100
4572 s_size=len(scale)
4573 else:
4574 s_max=0.0
4575 s_min=0.0
4576 s_max_q=0.0
4577 s_min_q=0.0
4578 s_size=len(scale)
4579 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \
4580 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \
4581 'label':self.run_card['dynamical_scale_choice'][j], \
4582 'unc':self.run_card['reweight_scale'][j]})
4583
4584
4585 if any(self.run_card['reweight_pdf']):
4586 use_lhapdf=False
4587 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\
4588 stdout=subprocess.PIPE).stdout.read().strip()
4589
4590 try:
4591 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \
4592 if os.path.isdir(pjoin(lhapdf_libdir,dirname))]
4593 except OSError:
4594 candidates=[]
4595 for candidate in candidates:
4596 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')):
4597 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages'))
4598 try:
4599 import lhapdf
4600 use_lhapdf=True
4601 break
4602 except ImportError:
4603 sys.path.pop(0)
4604 continue
4605
4606 if not use_lhapdf:
4607 try:
4608 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \
4609 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))]
4610 except OSError:
4611 candidates=[]
4612 for candidate in candidates:
4613 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')):
4614 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages'))
4615 try:
4616 import lhapdf
4617 use_lhapdf=True
4618 break
4619 except ImportError:
4620 sys.path.pop(0)
4621 continue
4622
4623 if not use_lhapdf:
4624 try:
4625 import lhapdf
4626 use_lhapdf=True
4627 except ImportError:
4628 logger.warning("Failed to access python version of LHAPDF: "\
4629 "cannot compute PDF uncertainty from the "\
4630 "weights in the events. The weights in the LHE " \
4631 "event files will still cover all PDF set members, "\
4632 "but there will be no PDF uncertainty printed in the run summary. \n "\
4633 "If the python interface to LHAPDF is available on your system, try "\
4634 "adding its location to the PYTHONPATH environment variable and the"\
4635 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).")
4636 use_lhapdf=False
4637
4638
4639 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0)
4640
4641 pdf_info=[]
4642 for j,pdfset in enumerate(pdfs):
4643 p_cen=pdfset[0]
4644 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]:
4645 if use_lhapdf:
4646 pdfsetname=self.run_card['lhapdfsetname'][j]
4647 try:
4648 p=lhapdf.getPDFSet(pdfsetname)
4649 ep=p.uncertainty(pdfset,-1)
4650 p_cen=ep.central
4651 p_min=abs(ep.errminus/p_cen)*100
4652 p_max=abs(ep.errplus/p_cen)*100
4653 p_type=p.errorType
4654 p_size=p.size
4655 p_conf=p.errorConfLevel
4656 except:
4657 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname)
4658 p_min=0.0
4659 p_max=0.0
4660 p_type='unknown'
4661 p_conf='unknown'
4662 p_size=len(pdfset)
4663 else:
4664 p_min=0.0
4665 p_max=0.0
4666 p_type='unknown'
4667 p_conf='unknown'
4668 p_size=len(pdfset)
4669 pdfsetname=self.run_card['lhaid'][j]
4670 else:
4671 p_min=0.0
4672 p_max=0.0
4673 p_type='none'
4674 p_conf='unknown'
4675 p_size=len(pdfset)
4676 pdfsetname=self.run_card['lhaid'][j]
4677 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \
4678 'unc':p_type, 'name':pdfsetname, 'size':p_size, \
4679 'label':self.run_card['lhaid'][j], 'conf':p_conf})
4680
4681 scale_pdf_info=[scale_info,pdf_info]
4682 return scale_pdf_info
4683
4684
4696
4697 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4698 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
4699 self.ijob = 0
4700 if run_type != 'shower':
4701 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
4702 for args in arg_list:
4703 for Pdir, jobs in job_dict.items():
4704 for job in jobs:
4705 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
4706 if self.cluster_mode == 2:
4707 time.sleep(1)
4708 else:
4709 self.njobs = len(arg_list)
4710 for args in arg_list:
4711 [(cwd, exe)] = job_dict.items()
4712 self.run_exe(exe, args, run_type, cwd)
4713
4714 self.wait_for_complete(run_type)
4715
4716
4717
4719 """check the integrity of the event files after splitting, and resubmit
4720 those which are not nicely terminated"""
4721 jobs_to_resubmit = []
4722 for job in jobs:
4723 last_line = ''
4724 try:
4725 last_line = subprocess.Popen(
4726 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \
4727 stdout = subprocess.PIPE).stdout.read().strip()
4728 except IOError:
4729 pass
4730 if last_line != "</LesHouchesEvents>":
4731 jobs_to_resubmit.append(job)
4732 self.njobs = 0
4733 if jobs_to_resubmit:
4734 run_type = 'Resubmitting broken jobs'
4735 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
4736 for job in jobs_to_resubmit:
4737 logger.debug('Resubmitting ' + job['dirname'] + '\n')
4738 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4739
4740
4742 """looks into the nevents_unweighed_splitted file to check how many
4743 split jobs are needed for this (pdir, job). arg is F, B or V"""
4744
4745 splittings = []
4746 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
4747 pattern = re.compile('for i in (\d+) ; do')
4748 match = re.search(pattern, ajob)
4749 channel = match.groups()[0]
4750
4751
4752 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
4753
4754
4755 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
4756 pjoin(pdir, 'G%s%s' % (arg,channel)))
4757 matches = re.findall(pattern, nevents_file)
4758 for m in matches:
4759 splittings.append(m)
4760 return splittings
4761
4762
4763 - def run_exe(self, exe, args, run_type, cwd=None):
4764 """this basic function launch locally/on cluster exe with args as argument.
4765 """
4766
4767 execpath = None
4768 if cwd and os.path.exists(pjoin(cwd, exe)):
4769 execpath = pjoin(cwd, exe)
4770 elif not cwd and os.path.exists(exe):
4771 execpath = exe
4772 else:
4773 raise aMCatNLOError('Cannot find executable %s in %s' \
4774 % (exe, os.getcwd()))
4775
4776 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
4777 subprocess.call(['chmod', '+x', exe], cwd=cwd)
4778
4779 if self.cluster_mode == 0:
4780
4781 misc.call(['./'+exe] + args, cwd=cwd)
4782 self.ijob += 1
4783 self.update_status((max([self.njobs - self.ijob - 1, 0]),
4784 min([1, self.njobs - self.ijob]),
4785 self.ijob, run_type), level='parton')
4786
4787
4788 elif 'reweight' in exe:
4789
4790
4791 input_files, output_files = [], []
4792 pdfinput = self.get_pdf_input_filename()
4793 if os.path.exists(pdfinput):
4794 input_files.append(pdfinput)
4795 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
4796 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat'))
4797 input_files.append(args[0])
4798 output_files.append('%s.rwgt' % os.path.basename(args[0]))
4799 output_files.append('reweight_xsec_events.output')
4800 output_files.append('scale_pdf_dependence.dat')
4801
4802 return self.cluster.submit2(exe, args, cwd=cwd,
4803 input_files=input_files, output_files=output_files,
4804 required_output=output_files)
4805
4806 elif 'ajob' in exe:
4807
4808
4809 if type(args[0]) == str:
4810 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args)
4811
4812 self.cluster.submit2(exe, args, cwd=cwd,
4813 input_files=input_files, output_files=output_files,
4814 required_output=required_output)
4815
4816
4817
4818
4819
4820
4821 elif 'shower' in exe:
4822
4823
4824
4825 input_files, output_files = [], []
4826 shower = args[0]
4827
4828 if shower == 'PYTHIA8':
4829 input_files.append(pjoin(cwd, 'Pythia8.exe'))
4830 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
4831 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
4832 input_files.append(pjoin(cwd, 'config.sh'))
4833 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
4834 else:
4835 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'))
4836 else:
4837 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
4838 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
4839 if shower == 'HERWIGPP':
4840 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')):
4841 input_files.append(pjoin(cwd, 'Herwig++'))
4842 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')):
4843 input_files.append(pjoin(cwd, 'Herwig'))
4844 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
4845 if len(args) == 3:
4846 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
4847 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
4848 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
4849 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
4850 else:
4851 raise aMCatNLOError, 'Event file not present in %s' % \
4852 pjoin(self.me_dir, 'Events', self.run_name)
4853 else:
4854 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
4855
4856 if len(args) == 3:
4857 output_files.append('mcatnlo_run.log')
4858 else:
4859 output_files.append('mcatnlo_run_%s.log' % args[3])
4860 if args[1] == 'HEP':
4861 if len(args) == 3:
4862 fname = 'events'
4863 else:
4864 fname = 'events_%s' % args[3]
4865 if shower in ['PYTHIA8', 'HERWIGPP']:
4866 output_files.append(fname + '.hepmc.gz')
4867 else:
4868 output_files.append(fname + '.hep.gz')
4869 elif args[1] == 'TOP' or args[1] == 'HWU':
4870 if len(args) == 3:
4871 fname = 'histfile'
4872 else:
4873 fname = 'histfile_%s' % args[3]
4874 output_files.append(fname + '.tar')
4875 else:
4876 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1]
4877
4878 self.cluster.submit2(exe, args, cwd=cwd,
4879 input_files=input_files, output_files=output_files)
4880
4881 else:
4882 return self.cluster.submit(exe, args, cwd=cwd)
4883
4885
4886
4887
4888 output_files = []
4889 required_output = []
4890 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'),
4891 pjoin(cwd, 'symfact.dat'),
4892 pjoin(cwd, 'iproc.dat'),
4893 pjoin(cwd, 'initial_states_map.dat'),
4894 pjoin(cwd, 'configs_and_props_info.dat'),
4895 pjoin(cwd, 'leshouche_info.dat'),
4896 pjoin(cwd, 'FKS_params.dat')]
4897
4898
4899 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')):
4900 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat'))
4901
4902 if os.path.exists(pjoin(cwd,'nevents.tar')):
4903 input_files.append(pjoin(cwd,'nevents.tar'))
4904
4905 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
4906 input_files.append(pjoin(cwd, 'OLE_order.olc'))
4907
4908
4909 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \
4910 cluster.need_transfer(self.options):
4911 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4912 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
4913 cluster.need_transfer(self.options):
4914 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
4915 dereference=True)
4916 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
4917 tf.close()
4918 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4919
4920 if args[1] == 'born' or args[1] == 'all':
4921
4922 input_files.append(pjoin(cwd, 'madevent_mintFO'))
4923 if args[2] == '0':
4924 current = '%s_G%s' % (args[1],args[0])
4925 else:
4926 current = '%s_G%s_%s' % (args[1],args[0],args[2])
4927 if os.path.exists(pjoin(cwd,current)):
4928 input_files.append(pjoin(cwd, current))
4929 output_files.append(current)
4930
4931 required_output.append('%s/results.dat' % current)
4932 required_output.append('%s/res_%s.dat' % (current,args[3]))
4933 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4934 required_output.append('%s/mint_grids' % current)
4935 required_output.append('%s/grid.MC_integer' % current)
4936 if args[3] != '0':
4937 required_output.append('%s/scale_pdf_dependence.dat' % current)
4938
4939 elif args[1] == 'F' or args[1] == 'B':
4940
4941 input_files.append(pjoin(cwd, 'madevent_mintMC'))
4942
4943 if args[2] == '0':
4944 current = 'G%s%s' % (args[1],args[0])
4945 else:
4946 current = 'G%s%s_%s' % (args[1],args[0],args[2])
4947 if os.path.exists(pjoin(cwd,current)):
4948 input_files.append(pjoin(cwd, current))
4949 output_files.append(current)
4950 if args[2] > '0':
4951
4952 output_files.append('G%s%s_%s' % (args[1], args[0], args[2]))
4953 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3]))
4954
4955 else:
4956 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4957 if args[3] in ['0','1']:
4958 required_output.append('%s/results.dat' % current)
4959 if args[3] == '1':
4960 output_files.append('%s/results.dat' % current)
4961
4962 else:
4963 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
4964
4965
4966 pdfinput = self.get_pdf_input_filename()
4967 if os.path.exists(pdfinput):
4968 input_files.append(pdfinput)
4969 return input_files, output_files, required_output, args
4970
4971
4972 - def compile(self, mode, options):
4973 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
4974 specified in mode"""
4975
4976 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
4977
4978 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
4979 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
4980
4981 self.get_characteristics(pjoin(self.me_dir,
4982 'SubProcesses', 'proc_characteristics'))
4983
4984
4985 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
4986 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
4987 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
4988 test_log = pjoin(self.me_dir, 'test.log')
4989
4990
4991 self.make_opts_var = {}
4992 if self.proc_characteristics['has_loops'] and \
4993 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4994 self.make_opts_var['madloop'] = 'true'
4995
4996 self.update_status('Compiling the code', level=None, update_results=True)
4997
4998 libdir = pjoin(self.me_dir, 'lib')
4999 sourcedir = pjoin(self.me_dir, 'Source')
5000
5001
5002 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
5003
5004 if '+' in mode:
5005 mode = mode.split('+')[0]
5006 if mode in ['NLO', 'LO']:
5007 exe = 'madevent_mintFO'
5008 tests = ['test_ME']
5009 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
5010 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
5011 exe = 'madevent_mintMC'
5012 tests = ['test_ME', 'test_MC']
5013
5014 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock:
5015 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n')
5016
5017
5018 p_dirs = [d for d in \
5019 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
5020
5021 self.do_treatcards('', amcatnlo=True, mode=mode)
5022
5023
5024 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
5025 for p_dir in p_dirs]) and options['nocompile']:
5026 return
5027
5028
5029 if os.path.exists(pjoin(libdir, 'PDFsets')):
5030 files.rm(pjoin(libdir, 'PDFsets'))
5031
5032
5033 if self.run_card['pdlabel'] == 'lhapdf' and \
5034 (self.banner.get_detail('run_card', 'lpp1') != 0 or \
5035 self.banner.get_detail('run_card', 'lpp2') != 0):
5036
5037 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
5038 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
5039 lhaid_list = self.run_card['lhaid']
5040 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
5041
5042 else:
5043 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']:
5044 logger.info('Using built-in libraries for PDFs')
5045
5046 self.make_opts_var['lhapdf'] = ""
5047
5048
5049 if self.run_card['iappl'] != 0:
5050 self.make_opts_var['applgrid'] = 'True'
5051
5052 for code in ['applgrid','amcfast']:
5053 try:
5054 p = subprocess.Popen([self.options[code], '--version'], \
5055 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
5056 except OSError:
5057 raise aMCatNLOError(('No valid %s installation found. \n' + \
5058 'Please set the path to %s-config by using \n' + \
5059 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
5060 else:
5061 output, _ = p.communicate()
5062 if code is 'applgrid' and output < '1.4.63':
5063 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
5064 +' You are using %s',output)
5065 if code is 'amcfast' and output < '1.1.1':
5066 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
5067 +' You are using %s',output)
5068
5069
5070 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \
5071 % (self.options['amcfast'],self.options['applgrid'])
5072 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
5073 text_out=[]
5074 for line in text:
5075 if line.strip().startswith('APPLLIBS=$'):
5076 line=appllibs
5077 text_out.append(line)
5078 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock:
5079 fsock.writelines(text_out)
5080 else:
5081 self.make_opts_var['applgrid'] = ""
5082
5083 if 'fastjet' in self.options.keys() and self.options['fastjet']:
5084 self.make_opts_var['fastjet_config'] = self.options['fastjet']
5085
5086
5087 self.update_make_opts()
5088
5089
5090 self.update_status('Compiling source...', level=None)
5091 misc.compile(['clean4pdf'], cwd = sourcedir)
5092 misc.compile(cwd = sourcedir)
5093 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
5094 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
5095 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
5096 and os.path.exists(pjoin(libdir, 'libpdf.a')):
5097 logger.info(' ...done, continuing with P* directories')
5098 else:
5099 raise aMCatNLOError('Compilation failed')
5100
5101
5102 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
5103 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
5104 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
5105 if os.path.exists(pjoin(sourcedir,'StdHEP')):
5106 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
5107 try:
5108 misc.compile(['StdHEP'], cwd = sourcedir)
5109 except Exception as error:
5110 logger.debug(str(error))
5111 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6")
5112 logger.info("details on the compilation error are available if the code is run with --debug flag")
5113 else:
5114 logger.info(' ...done.')
5115 else:
5116 logger.warning('Could not compile StdHEP because its'+\
5117 ' source directory could not be found in the SOURCE folder.\n'+\
5118 " Check the MG5_aMC option 'output_dependencies'.\n"+\
5119 " This will prevent the use of HERWIG6/Pythia6 shower.")
5120
5121
5122
5123 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
5124 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
5125 if os.path.exists(pjoin(sourcedir,'CutTools')):
5126 logger.info('Compiling CutTools (can take a couple of minutes) ...')
5127 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1)
5128 logger.info(' ...done.')
5129 else:
5130 raise aMCatNLOError('Could not compile CutTools because its'+\
5131 ' source directory could not be found in the SOURCE folder.\n'+\
5132 " Check the MG5_aMC option 'output_dependencies.'")
5133 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
5134 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
5135 raise aMCatNLOError('CutTools compilation failed.')
5136
5137
5138
5139 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
5140 libdir, 'libcts.a')))),'compiler_version.log')
5141 if os.path.exists(compiler_log_path):
5142 compiler_version_used = open(compiler_log_path,'r').read()
5143 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
5144 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
5145 if os.path.exists(pjoin(sourcedir,'CutTools')):
5146 logger.info('CutTools was compiled with a different fortran'+\
5147 ' compiler. Re-compiling it now...')
5148 misc.compile(['cleanCT'], cwd = sourcedir)
5149 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1)
5150 logger.info(' ...done.')
5151 else:
5152 raise aMCatNLOError("CutTools installation in %s"\
5153 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
5154 " seems to have been compiled with a different compiler than"+\
5155 " the one specified in MG5_aMC. Please recompile CutTools.")
5156
5157
5158 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
5159 and os.path.exists(pjoin(sourcedir,'IREGI')):
5160 logger.info('Compiling IREGI (can take a couple of minutes) ...')
5161 misc.compile(['IREGI'], cwd = sourcedir)
5162 logger.info(' ...done.')
5163
5164 if os.path.exists(pjoin(libdir, 'libiregi.a')):
5165
5166
5167 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
5168 libdir, 'libiregi.a')))),'compiler_version.log')
5169 if os.path.exists(compiler_log_path):
5170 compiler_version_used = open(compiler_log_path,'r').read()
5171 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
5172 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
5173 if os.path.exists(pjoin(sourcedir,'IREGI')):
5174 logger.info('IREGI was compiled with a different fortran'+\
5175 ' compiler. Re-compiling it now...')
5176 misc.compile(['cleanIR'], cwd = sourcedir)
5177 misc.compile(['IREGI'], cwd = sourcedir)
5178 logger.info(' ...done.')
5179 else:
5180 raise aMCatNLOError("IREGI installation in %s"\
5181 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
5182 " seems to have been compiled with a different compiler than"+\
5183 " the one specified in MG5_aMC. Please recompile IREGI.")
5184
5185
5186 if self.proc_characteristics['has_loops'] and \
5187 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
5188 if mode in ['NLO', 'aMC@NLO', 'noshower']:
5189 tests.append('check_poles')
5190
5191
5192 self.update_status('Compiling directories...', level=None)
5193
5194 for test in tests:
5195 self.write_test_input(test)
5196
5197 try:
5198 import multiprocessing
5199 if not self.nb_core:
5200 try:
5201 self.nb_core = int(self.options['nb_core'])
5202 except TypeError:
5203 self.nb_core = multiprocessing.cpu_count()
5204 except ImportError:
5205 self.nb_core = 1
5206
5207 compile_options = copy.copy(self.options)
5208 compile_options['nb_core'] = self.nb_core
5209 compile_cluster = cluster.MultiCore(**compile_options)
5210 logger.info('Compiling on %d cores' % self.nb_core)
5211
5212 update_status = lambda i, r, f: self.donothing(i,r,f)
5213 for p_dir in p_dirs:
5214 compile_cluster.submit(prog = compile_dir,
5215 argument = [self.me_dir, p_dir, mode, options,
5216 tests, exe, self.options['run_mode']])
5217 try:
5218 compile_cluster.wait(self.me_dir, update_status)
5219 except Exception, error:
5220 logger.warning("Fail to compile the Subprocesses")
5221 if __debug__:
5222 raise
5223 compile_cluster.remove()
5224 self.do_quit('')
5225
5226 logger.info('Checking test output:')
5227 for p_dir in p_dirs:
5228 logger.info(p_dir)
5229 for test in tests:
5230 logger.info(' Result for %s:' % test)
5231
5232 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
5233
5234 self.check_tests(test, this_dir)
5235
5236
5239
5240
5242 """just call the correct parser for the test log.
5243 Skip check_poles for LOonly folders"""
5244 if test in ['test_ME', 'test_MC']:
5245 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
5246 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')):
5247 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5248
5249
5251 """read and parse the test_ME/MC.log file"""
5252 content = open(log).read()
5253 if 'FAILED' in content:
5254 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD')
5255 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
5256 'Please check that widths of final state particles (e.g. top) have been' + \
5257 ' set to 0 in the param_card.dat.')
5258 else:
5259 lines = [l for l in content.split('\n') if 'PASSED' in l]
5260 logger.info(' Passed.')
5261 logger.debug('\n'+'\n'.join(lines))
5262
5263
5265 """reads and parse the check_poles.log file"""
5266 content = open(log).read()
5267 npass = 0
5268 nfail = 0
5269 for line in content.split('\n'):
5270 if 'PASSED' in line:
5271 npass +=1
5272 tolerance = float(line.split()[1])
5273 if 'FAILED' in line:
5274 nfail +=1
5275 tolerance = float(line.split()[1])
5276
5277 if nfail + npass == 0:
5278 logger.warning('0 points have been tried')
5279 return
5280
5281 if float(nfail)/float(nfail+npass) > 0.1:
5282 raise aMCatNLOError('Poles do not cancel, run cannot continue')
5283 else:
5284 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
5285 %(npass, nfail+npass, tolerance))
5286
5287
5309
5310
5311 action_switcher = AskRunNLO
5312
5314 """Ask the question when launching generate_events/multi_run"""
5315
5316 if 'parton' not in options:
5317 options['parton'] = False
5318 if 'reweightonly' not in options:
5319 options['reweightonly'] = False
5320
5321 if mode == 'auto':
5322 mode = None
5323 if not mode and (options['parton'] or options['reweightonly']):
5324 mode = 'noshower'
5325
5326 passing_cmd = []
5327 for key,value in switch.keys():
5328 passing_cmd.append('%s=%s' % (key,value))
5329
5330 if 'do_reweight' in options and options['do_reweight']:
5331 passing_cmd.append('reweight=ON')
5332 if 'do_madspin' in options and options['do_madspin']:
5333 passing_cmd.append('madspin=ON')
5334
5335 force = self.force
5336 if mode == 'onlyshower':
5337 passing_cmd.append('onlyshower')
5338 force = True
5339 elif mode:
5340 passing_cmd.append(mode)
5341
5342 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher,
5343 mode=mode, force=force,
5344 first_cmd=passing_cmd,
5345 return_instance=True)
5346
5347 if 'mode' in switch:
5348 mode = switch['mode']
5349
5350
5351 if not mode or mode == 'auto':
5352 if switch['order'] == 'LO':
5353 if switch['runshower']:
5354 mode = 'aMC@LO'
5355 elif switch['fixed_order'] == 'ON':
5356 mode = 'LO'
5357 else:
5358 mode = 'noshowerLO'
5359 elif switch['order'] == 'NLO':
5360 if switch['runshower']:
5361 mode = 'aMC@NLO'
5362 elif switch['fixed_order'] == 'ON':
5363 mode = 'NLO'
5364 else:
5365 mode = 'noshower'
5366 logger.info('will run in mode: %s' % mode)
5367
5368 if mode == 'noshower':
5369 if switch['shower'] == 'OFF':
5370 logger.warning("""You have chosen not to run a parton shower.
5371 NLO events without showering are NOT physical.
5372 Please, shower the LesHouches events before using them for physics analyses.
5373 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""")
5374 else:
5375 logger.info("""Your Parton-shower choice is not available for running.
5376 The events will be generated for the associated Parton-Shower.
5377 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD')
5378
5379
5380
5381 cards = ['param_card.dat', 'run_card.dat']
5382 ignore = []
5383 if mode in ['LO', 'NLO']:
5384 options['parton'] = True
5385 ignore = ['shower_card.dat', 'madspin_card.dat']
5386 cards.append('FO_analyse_card.dat')
5387 else:
5388 if switch['madspin'] != 'OFF':
5389 cards.append('madspin_card.dat')
5390 if switch['reweight'] != 'OFF':
5391 cards.append('reweight_card.dat')
5392 if switch['madanalysis'] in ['HADRON', 'ON']:
5393 cards.append('madanalysis5_hadron_card.dat')
5394 if 'aMC@' in mode:
5395 cards.append('shower_card.dat')
5396 if mode == 'onlyshower':
5397 cards = ['shower_card.dat']
5398 if options['reweightonly']:
5399 cards = ['run_card.dat']
5400
5401 self.keep_cards(cards, ignore)
5402
5403 if mode =='onlyshower':
5404 cards = ['shower_card.dat']
5405
5406
5407
5408 first_cmd = cmd_switch.get_cardcmd()
5409
5410 if not options['force'] and not self.force:
5411 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd)
5412
5413 self.banner = banner_mod.Banner()
5414
5415
5416 for card in cards:
5417 self.banner.add(pjoin(self.me_dir, 'Cards', card))
5418
5419 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
5420 self.banner.add_text('run_settings', run_settings)
5421
5422 if not mode =='onlyshower':
5423 self.run_card = self.banner.charge_card('run_card')
5424 self.run_tag = self.run_card['run_tag']
5425
5426 if not hasattr(self, 'run_name') or not self.run_name:
5427 self.run_name = self.find_available_run_name(self.me_dir)
5428
5429 if self.run_name.startswith('run_'):
5430 if mode in ['LO','aMC@LO','noshowerLO']:
5431 self.run_name += '_LO'
5432 self.set_run_name(self.run_name, self.run_tag, 'parton')
5433 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
5434 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""")
5435 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']:
5436 logger.warning("""You are running with FxFx merging enabled. To be able to merge
5437 samples of various multiplicities without double counting, you
5438 have to remove some events after showering 'by hand'. Please
5439 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
5440 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
5441 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""")
5442 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8' and self.run_card['parton_shower'].upper() != 'HERWIGPP':
5443 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
5444 "Type \'n\' to stop or \'y\' to continue"
5445 answers = ['n','y']
5446 answer = self.ask(question, 'n', answers)
5447 if answer == 'n':
5448 error = '''Stop opertation'''
5449 self.ask_run_configuration(mode, options)
5450
5451 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']:
5452
5453 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""")
5454 if 'aMC@' in mode or mode == 'onlyshower':
5455 self.shower_card = self.banner.charge_card('shower_card')
5456
5457 elif mode in ['LO', 'NLO']:
5458 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
5459 self.analyse_card = self.banner.charge_card('FO_analyse_card')
5460
5461 return mode
5462
5468 """The command line processor of MadGraph"""
5469
5470 _compile_usage = "compile [MODE] [options]\n" + \
5471 "-- compiles aMC@NLO \n" + \
5472 " MODE can be either FO, for fixed-order computations, \n" + \
5473 " or MC for matching with parton-shower monte-carlos. \n" + \
5474 " (if omitted, it is set to MC)\n"
5475 _compile_parser = misc.OptionParser(usage=_compile_usage)
5476 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
5477 help="Use the card present in the directory for the launch, without editing them")
5478
5479 _launch_usage = "launch [MODE] [options]\n" + \
5480 "-- execute aMC@NLO \n" + \
5481 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
5482 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
5483 " computation of the total cross section and the filling of parton-level histograms \n" + \
5484 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
5485 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
5486 " event file is generated which will be showered with the MonteCarlo specified \n" + \
5487 " in the run_card.dat\n"
5488
5489 _launch_parser = misc.OptionParser(usage=_launch_usage)
5490 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
5491 help="Use the card present in the directory for the launch, without editing them")
5492 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
5493 help="Submit the jobs on the cluster")
5494 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
5495 help="Submit the jobs on multicore mode")
5496 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5497 help="Skip compilation. Ignored if no executable is found")
5498 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
5499 help="Skip integration and event generation, just run reweight on the" + \
5500 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
5501 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
5502 help="Stop the run after the parton level file generation (you need " + \
5503 "to shower the file in order to get physical results)")
5504 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5505 help="Skip grid set up, just generate events starting from " + \
5506 "the last available results")
5507 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
5508 help="Provide a name to the run")
5509 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
5510 help="For use with APPLgrid only: start from existing grids")
5511 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true',
5512 help="Run the reweight module (reweighting by different model parameters)")
5513 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true',
5514 help="Run the madspin package")
5515
5516
5517
5518 _generate_events_usage = "generate_events [MODE] [options]\n" + \
5519 "-- execute aMC@NLO \n" + \
5520 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
5521 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
5522 " computation of the total cross section and the filling of parton-level histograms \n" + \
5523 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
5524 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
5525 " event file is generated which will be showered with the MonteCarlo specified \n" + \
5526 " in the run_card.dat\n"
5527
5528 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
5529 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
5530 help="Use the card present in the directory for the generate_events, without editing them")
5531 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
5532 help="Submit the jobs on the cluster")
5533 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
5534 help="Submit the jobs on multicore mode")
5535 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5536 help="Skip compilation. Ignored if no executable is found")
5537 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
5538 help="Skip integration and event generation, just run reweight on the" + \
5539 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
5540 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
5541 help="Stop the run after the parton level file generation (you need " + \
5542 "to shower the file in order to get physical results)")
5543 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5544 help="Skip grid set up, just generate events starting from " + \
5545 "the last available results")
5546 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
5547 help="Provide a name to the run")
5548
5549
5550
5551 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
5552 "-- calculate cross section up to ORDER.\n" + \
5553 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
5554
5555 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
5556 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
5557 help="Use the card present in the directory for the launch, without editing them")
5558 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
5559 help="Submit the jobs on the cluster")
5560 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
5561 help="Submit the jobs on multicore mode")
5562 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5563 help="Skip compilation. Ignored if no executable is found")
5564 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
5565 help="Provide a name to the run")
5566 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
5567 help="For use with APPLgrid only: start from existing grids")
5568 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5569 help="Skip grid set up, just generate events starting from " + \
5570 "the last available results")
5571
5572 _shower_usage = 'shower run_name [options]\n' + \
5573 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
5574 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
5575 ' are directly read from the header of the event file\n'
5576 _shower_parser = misc.OptionParser(usage=_shower_usage)
5577 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
5578 help="Use the shower_card present in the directory for the launch, without editing")
5579
5580 if '__main__' == __name__:
5581
5582
5583 import sys
5584 if not sys.version_info[0] == 2 or sys.version_info[1] < 6:
5585 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\
5586 'Please upgrate your version of python.')
5587
5588 import os
5589 import optparse
5590
5591
5592 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ )))
5593 sys.path.insert(0, root_path)
5597 - def error(self, msg=''):
5599
5600 usage = "usage: %prog [options] [FILE] "
5601 parser = MyOptParser(usage=usage)
5602 parser.add_option("-l", "--logging", default='INFO',
5603 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]")
5604 parser.add_option("","--web", action="store_true", default=False, dest='web', \
5605 help='force toce to be in secure mode')
5606 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \
5607 help='force to launch debug mode')
5608 parser_error = ''
5609 done = False
5610
5611 for i in range(len(sys.argv)-1):
5612 try:
5613 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i])
5614 done = True
5615 except MyOptParser.InvalidOption, error:
5616 pass
5617 else:
5618 args += sys.argv[len(sys.argv)-i:]
5619 if not done:
5620
5621 try:
5622 (options, args) = parser.parse_args()
5623 except MyOptParser.InvalidOption, error:
5624 print error
5625 sys.exit(2)
5626
5627 if len(args) == 0:
5628 args = ''
5629
5630 import subprocess
5631 import logging
5632 import logging.config
5633
5634
5635 import internal.coloring_logging
5636 try:
5637 if __debug__ and options.logging == 'INFO':
5638 options.logging = 'DEBUG'
5639 if options.logging.isdigit():
5640 level = int(options.logging)
5641 else:
5642 level = eval('logging.' + options.logging)
5643 print os.path.join(root_path, 'internal', 'me5_logging.conf')
5644 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf'))
5645 logging.root.setLevel(level)
5646 logging.getLogger('madgraph').setLevel(level)
5647 except:
5648 raise
5649 pass
5650
5651
5652 try:
5653 if args:
5654
5655 if '--web' in args:
5656 i = args.index('--web')
5657 args.pop(i)
5658 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True)
5659 else:
5660 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True)
5661
5662 if not hasattr(cmd_line, 'do_%s' % args[0]):
5663 if parser_error:
5664 print parser_error
5665 print 'and %s can not be interpreted as a valid command.' % args[0]
5666 else:
5667 print 'ERROR: %s not a valid command. Please retry' % args[0]
5668 else:
5669 cmd_line.use_rawinput = False
5670 cmd_line.run_cmd(' '.join(args))
5671 cmd_line.run_cmd('quit')
5672
5673 except KeyboardInterrupt:
5674 print 'quit on KeyboardInterrupt'
5675 pass
5676