1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ A python file to replace the fortran script gen_ximprove.
16 This script analyses the result of the survey/ previous refine and
17 creates the jobs for the following script.
18 """
19 from __future__ import division
20
21 import collections
22 import os
23 import glob
24 import logging
25 import math
26 import re
27 import subprocess
28 import shutil
29
30 try:
31 import madgraph
32 except ImportError:
33 MADEVENT = True
34 import internal.sum_html as sum_html
35 import internal.banner as bannermod
36 import internal.misc as misc
37 import internal.files as files
38 import internal.cluster as cluster
39 import internal.combine_grid as combine_grid
40 import internal.combine_runs as combine_runs
41 import internal.lhe_parser as lhe_parser
42 else:
43 MADEVENT= False
44 import madgraph.madevent.sum_html as sum_html
45 import madgraph.various.banner as bannermod
46 import madgraph.various.misc as misc
47 import madgraph.iolibs.files as files
48 import madgraph.various.cluster as cluster
49 import madgraph.madevent.combine_grid as combine_grid
50 import madgraph.madevent.combine_runs as combine_runs
51 import madgraph.various.lhe_parser as lhe_parser
52
53 logger = logging.getLogger('madgraph.madevent.gen_ximprove')
54 pjoin = os.path.join
57 """a class to call the fortran gensym executable and handle it's output
58 in order to create the various job that are needed for the survey"""
59
60
61 @ staticmethod
64
65 combining_job = 2
66 splitted_grid = False
67 min_iterations = 3
68 mode= "survey"
69
70
72
73 try:
74 super(gensym, self).__init__(cmd, opt)
75 except TypeError:
76 pass
77
78
79 self.run_statistics = {}
80
81 self.cmd = cmd
82 self.run_card = cmd.run_card
83 self.me_dir = cmd.me_dir
84
85
86
87 self.cross = collections.defaultdict(int)
88 self.abscross = collections.defaultdict(int)
89 self.sigma = collections.defaultdict(int)
90 self.chi2 = collections.defaultdict(int)
91
92 self.splitted_grid = False
93 if self.cmd.proc_characteristics['loop_induced']:
94 nexternal = self.cmd.proc_characteristics['nexternal']
95 self.splitted_grid = max(2, (nexternal-2)**2)
96 if hasattr(self.cmd, "opts") and self.cmd.opts['accuracy'] == 0.1:
97 self.cmd.opts['accuracy'] = 0.02
98
99 if isinstance(cmd.cluster, cluster.MultiCore) and self.splitted_grid > 1:
100 self.splitted_grid = int(cmd.cluster.nb_core**0.5)
101 if self.splitted_grid == 1 and cmd.cluster.nb_core >1:
102 self.splitted_grid = 2
103
104
105 if self.run_card['survey_splitting'] != -1:
106 self.splitted_grid = self.run_card['survey_splitting']
107 if self.run_card['survey_nchannel_per_job'] != -1:
108 self.combining_job = self.run_card['survey_nchannel_per_job']
109
110 self.splitted_Pdir = {}
111 self.splitted_for_dir = lambda x,y: self.splitted_grid
112 self.combining_job_for_Pdir = lambda x: self.combining_job
113 self.lastoffset = {}
114
115 - def launch(self, to_submit=True, clean=True):
116 """ """
117
118 self.subproc = [l.strip() for l in open(pjoin(self.me_dir,'SubProcesses',
119 'subproc.mg'))]
120 subproc = self.subproc
121
122 P_zero_result = []
123
124 nb_tot_proc = len(subproc)
125 job_list = {}
126 for nb_proc,subdir in enumerate(subproc):
127 self.cmd.update_status('Compiling for process %s/%s. <br> (previous processes already running)' % \
128 (nb_proc+1,nb_tot_proc), level=None)
129
130 subdir = subdir.strip()
131 Pdir = pjoin(self.me_dir, 'SubProcesses',subdir)
132 logger.info(' %s ' % subdir)
133
134
135 if clean:
136 for match in misc.glob('*ajob*', Pdir):
137 if os.path.basename(match)[:4] in ['ajob', 'wait', 'run.', 'done']:
138 os.remove(match)
139 for match in misc.glob('G*', Pdir):
140 if os.path.exists(pjoin(match,'results.dat')):
141 os.remove(pjoin(match, 'results.dat'))
142 if os.path.exists(pjoin(match, 'ftn25')):
143 os.remove(pjoin(match, 'ftn25'))
144
145
146 self.cmd.compile(['gensym'], cwd=Pdir)
147 if not os.path.exists(pjoin(Pdir, 'gensym')):
148 raise Exception, 'Error make gensym not successful'
149
150
151 p = misc.Popen(['./gensym'], stdout=subprocess.PIPE,
152 stderr=subprocess.STDOUT, cwd=Pdir)
153
154 (stdout, _) = p.communicate('')
155
156 if os.path.exists(pjoin(self.me_dir,'error')):
157 files.mv(pjoin(self.me_dir,'error'), pjoin(Pdir,'ajob.no_ps.log'))
158 P_zero_result.append(subdir)
159 continue
160
161 jobs = stdout.split()
162 job_list[Pdir] = jobs
163 try:
164
165 [float(s) for s in jobs]
166 except Exception:
167 logger.debug("unformated string found in gensym. Please check:\n %s" % stdout)
168 done=False
169 job_list[Pdir] = []
170 lines = stdout.split('\n')
171 for l in lines:
172 try:
173 [float(s) for s in l.split()]
174 except:
175 continue
176 else:
177 if done:
178 raise Exception, 'Parsing error in gensym: %s' % stdout
179 job_list[Pdir] = l.split()
180 done = True
181 if not done:
182 raise Exception, 'Parsing error in gensym: %s' % stdout
183
184 self.cmd.compile(['madevent'], cwd=Pdir)
185 if to_submit:
186 self.submit_to_cluster(job_list)
187 job_list = {}
188
189 return job_list, P_zero_result
190
191 - def resubmit(self, min_precision=1.0, resubmit_zero=False):
192 """collect the result of the current run and relaunch each channel
193 not completed or optionally a completed one with a precision worse than
194 a threshold (and/or the zero result channel)"""
195
196 job_list, P_zero_result = self.launch(to_submit=False, clean=False)
197
198 for P , jobs in dict(job_list).items():
199 misc.sprint(jobs)
200 to_resub = []
201 for job in jobs:
202 if os.path.exists(pjoin(P, 'G%s' % job)) and os.path.exists(pjoin(P, 'G%s' % job, 'results.dat')):
203 one_result = sum_html.OneResult(job)
204 try:
205 one_result.read_results(pjoin(P, 'G%s' % job, 'results.dat'))
206 except:
207 to_resub.append(job)
208 if one_result.xsec == 0:
209 if resubmit_zero:
210 to_resub.append(job)
211 elif max(one_result.xerru, one_result.xerrc)/one_result.xsec > min_precision:
212 to_resub.append(job)
213 else:
214 to_resub.append(job)
215 if to_resub:
216 for G in to_resub:
217 try:
218 shutil.rmtree(pjoin(P, 'G%s' % G))
219 except Exception, error:
220 misc.sprint(error)
221 pass
222 misc.sprint(to_resub)
223 self.submit_to_cluster({P: to_resub})
224
225
226
227
228
229
230
231
232
233
234
236 """ """
237
238 if self.run_card['job_strategy'] > 0:
239 if len(job_list) >1:
240 for path, dirs in job_list.items():
241 self.submit_to_cluster({path:dirs})
242 return
243 path, value = job_list.items()[0]
244 nexternal = self.cmd.proc_characteristics['nexternal']
245 current = open(pjoin(path, "nexternal.inc")).read()
246 ext = re.search(r"PARAMETER \(NEXTERNAL=(\d+)\)", current).group(1)
247
248 if self.run_card['job_strategy'] == 2:
249 self.splitted_grid = 2
250 if nexternal == int(ext):
251 to_split = 2
252 else:
253 to_split = 0
254 if hasattr(self, 'splitted_Pdir'):
255 self.splitted_Pdir[path] = to_split
256 else:
257 self.splitted_Pdir = {path: to_split}
258 self.splitted_for_dir = lambda x,y : self.splitted_Pdir[x]
259 elif self.run_card['job_strategy'] == 1:
260 if nexternal == int(ext):
261 combine = 1
262 else:
263 combine = self.combining_job
264 if hasattr(self, 'splitted_Pdir'):
265 self.splitted_Pdir[path] = combine
266 else:
267 self.splitted_Pdir = {path: combine}
268 self.combining_job_for_Pdir = lambda x : self.splitted_Pdir[x]
269
270 if not self.splitted_grid:
271 return self.submit_to_cluster_no_splitting(job_list)
272 elif self.cmd.cluster_mode == 0:
273 return self.submit_to_cluster_no_splitting(job_list)
274 elif self.cmd.cluster_mode == 2 and self.cmd.options['nb_core'] == 1:
275 return self.submit_to_cluster_no_splitting(job_list)
276 else:
277 return self.submit_to_cluster_splitted(job_list)
278
279
281 """submit the survey without the parralelization.
282 This is the old mode which is still usefull in single core"""
283
284
285 self.write_parameter(parralelization=False, Pdirs=job_list.keys())
286
287
288
289 for Pdir, jobs in job_list.items():
290 jobs = list(jobs)
291 i=0
292 while jobs:
293 i+=1
294 to_submit = ['0']
295 for _ in range(self.combining_job_for_Pdir(Pdir)):
296 if jobs:
297 to_submit.append(jobs.pop(0))
298
299 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'survey.sh'),
300 argument=to_submit,
301 cwd=pjoin(self.me_dir,'SubProcesses' , Pdir))
302
303
305 """prepare the input_file for submitting the channel"""
306
307
308 if 'SubProcesses' not in Pdir:
309 Pdir = pjoin(self.me_dir, 'SubProcesses', Pdir)
310
311
312 self.splitted_Pdir[(Pdir, G)] = int(nb_job)
313
314
315
316 run_card = self.cmd.run_card
317 options = {'event' : submit_ps,
318 'maxiter': 1,
319 'miniter': 1,
320 'accuracy': self.cmd.opts['accuracy'],
321 'helicity': run_card['nhel_survey'] if 'nhel_survey' in run_card \
322 else run_card['nhel'],
323 'gridmode': -2,
324 'channel' : G
325 }
326
327 Gdir = pjoin(Pdir, 'G%s' % G)
328 self.write_parameter_file(pjoin(Gdir, 'input_app.txt'), options)
329
330
331 assert os.path.exists(pjoin(Gdir, "ftn25"))
332
333
334
335
336 packet = cluster.Packet((Pdir, G, step+1),
337 self.combine_iteration,
338 (Pdir, G, step+1))
339
340 if step ==0:
341 self.lastoffset[(Pdir, G)] = 0
342
343
344 for i in xrange(int(nb_job)):
345 name = "G%s_%s" % (G,i+1)
346 self.lastoffset[(Pdir, G)] += 1
347 offset = self.lastoffset[(Pdir, G)]
348 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'refine_splitted.sh'),
349 argument=[name, 'G%s'%G, offset],
350 cwd= Pdir,
351 packet_member=packet)
352
353
355 """ submit the version of the survey with splitted grid creation
356 """
357
358
359
360
361 for Pdir, jobs in job_list.items():
362 if self.splitted_for_dir(Pdir, jobs[0]) <= 1:
363 return self.submit_to_cluster_no_splitting({Pdir:jobs})
364
365 self.write_parameter(parralelization=True, Pdirs=[Pdir])
366
367
368 for job in jobs:
369 packet = cluster.Packet((Pdir, job, 1), self.combine_iteration, (Pdir, job, 1))
370 for i in range(self.splitted_for_dir(Pdir, job)):
371 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'survey.sh'),
372 argument=[i+1, job],
373 cwd=pjoin(self.me_dir,'SubProcesses' , Pdir),
374 packet_member=packet)
375
377
378 grid_calculator, cross, error = self.combine_grid(Pdir, G, step)
379
380
381 nb_events = grid_calculator.target_evt
382
383 Gdirs = []
384 for i in range(self.splitted_for_dir(Pdir, G)):
385 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
386 Gdirs.append(path)
387
388
389
390
391
392 need_submit = False
393 if step < self.min_iterations and cross != 0:
394 if step == 1:
395 need_submit = True
396 else:
397 across = self.abscross[(Pdir,G)]/(self.sigma[(Pdir,G)]+1e-99)
398 tot_across = self.get_current_axsec()
399 if across / tot_across < 1e-6:
400 need_submit = False
401 elif error < self.cmd.opts['accuracy'] / 100:
402 need_submit = False
403 else:
404 need_submit = True
405
406 elif step >= self.cmd.opts['iterations']:
407 need_submit = False
408 elif self.cmd.opts['accuracy'] < 0:
409
410 raise Exception, "Not Implemented"
411 elif self.abscross[(Pdir,G)] == 0:
412 need_submit = False
413 else:
414 across = self.abscross[(Pdir,G)]/(self.sigma[(Pdir,G)]+1e-99)
415 tot_across = self.get_current_axsec()
416 if across == 0:
417 need_submit = False
418 elif across / tot_across < 1e-5:
419 need_submit = False
420 elif error > self.cmd.opts['accuracy']:
421 need_submit = True
422 else:
423 need_submit = False
424
425
426 if cross:
427 grid_calculator.write_grid_for_submission(Pdir,G,
428 self.splitted_for_dir(Pdir, G),
429 nb_events,mode=self.mode,
430 conservative_factor=5.0)
431
432 xsec_format = '.%ig'%(max(3,int(math.log10(1.0/float(error)))+2)
433 if float(cross)!=0.0 and float(error)!=0.0 else 8)
434 if need_submit:
435 message = "%%s/G%%s is at %%%s +- %%.3g pb. Now submitting iteration #%s."%(xsec_format, step+1)
436 logger.info(message%\
437 (os.path.basename(Pdir), G, float(cross),
438 float(error)*float(cross)))
439 self.resubmit_survey(Pdir,G, Gdirs, step)
440 elif cross:
441 logger.info("Survey finished for %s/G%s at %s"%(
442 os.path.basename(Pdir),G,('%%%s +- %%.3g pb'%xsec_format))%
443 (float(cross), float(error)*float(cross)))
444
445 newGpath = pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G)
446 if not os.path.exists(newGpath):
447 os.mkdir(newGpath)
448
449
450 files.cp(pjoin(Gdirs[0], 'ftn25'),
451 pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G, 'ftn26'))
452
453
454 fsock = open(pjoin(newGpath, 'events.lhe'), 'w')
455 for Gdir in Gdirs:
456 fsock.write(open(pjoin(Gdir, 'events.lhe')).read())
457
458
459 files.cp(pjoin(Gdirs[0], 'log.txt'),
460 pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G))
461
462
463
464 self.write_results(grid_calculator, cross, error, Pdir, G, step)
465 else:
466 logger.info("Survey finished for %s/G%s [0 cross]", os.path.basename(Pdir),G)
467
468 Gdir = pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G)
469 if not os.path.exists(Gdir):
470 os.mkdir(Gdir)
471
472 files.cp(pjoin(Gdirs[0], 'log.txt'), Gdir)
473
474 self.write_results(grid_calculator, cross, error, Pdir, G, step)
475
476 return 0
477
478 - def combine_grid(self, Pdir, G, step, exclude_sub_jobs=[]):
479 """ exclude_sub_jobs is to remove some of the subjobs if a numerical
480 issue is detected in one of them. Warning is issue when this occurs.
481 """
482
483
484 grid_calculator = combine_grid.grid_information(self.run_card['nhel'])
485
486 for i in range(self.splitted_for_dir(Pdir, G)):
487 if i in exclude_sub_jobs:
488 continue
489 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
490 fsock = misc.mult_try_open(pjoin(path, 'results.dat'))
491 one_result = grid_calculator.add_results_information(fsock)
492 fsock.close()
493 if one_result.axsec == 0:
494 grid_calculator.onefail = True
495 continue
496 fsock = misc.mult_try_open(pjoin(path, 'grid_information'))
497 grid_calculator.add_one_grid_information(fsock)
498 fsock.close()
499 os.remove(pjoin(path, 'results.dat'))
500
501
502
503
504
505
506 cross, across, sigma = grid_calculator.get_cross_section()
507
508
509
510 maxwgt = grid_calculator.get_max_wgt(0.01)
511 if maxwgt:
512 nunwgt = grid_calculator.get_nunwgt(maxwgt)
513
514
515
516
517 apply_instability_security = False
518 rel_contrib = 0.0
519 if (self.__class__ != gensym or step > 1):
520 Pdir_across = 0.0
521 Gdir_across = 0.0
522 for (mPdir,mG) in self.abscross.keys():
523 if mPdir == Pdir:
524 Pdir_across += (self.abscross[(mPdir,mG)]/
525 (self.sigma[(mPdir,mG)]+1e-99))
526 if mG == G:
527 Gdir_across += (self.abscross[(mPdir,mG)]/
528 (self.sigma[(mPdir,mG)]+1e-99))
529 rel_contrib = abs(Gdir_across/(Pdir_across+1e-99))
530 if rel_contrib > (1.0e-8) and \
531 nunwgt < 2 and len(grid_calculator.results) > 1:
532 apply_instability_security = True
533
534 if apply_instability_security:
535
536 th_maxwgt = [(r.th_maxwgt,i) for i,r in enumerate(grid_calculator.results)]
537 th_maxwgt.sort()
538 ratio = th_maxwgt[-1][0]/th_maxwgt[-2][0]
539 if ratio > 1e4:
540 logger.warning(
541 """"One Event with large weight have been found (ratio = %.3g) in channel G%s (with rel.contrib=%.3g).
542 This is likely due to numerical instabilities. The associated job is discarded to recover.
543 For offline investigation, the problematic discarded events are stored in:
544 %s"""%(ratio,G,rel_contrib,pjoin(Pdir,'DiscardedUnstableEvents')))
545 exclude_sub_jobs = list(exclude_sub_jobs)
546 exclude_sub_jobs.append(th_maxwgt[-1][1])
547 grid_calculator.results.run_statistics['skipped_subchannel'] += 1
548
549
550 gPath = pjoin(Pdir, "G%s_%s" % (G, th_maxwgt[-1][1]+1))
551 if os.path.isfile(pjoin(gPath,'events.lhe')):
552 lhe_file = lhe_parser.EventFile(pjoin(gPath,'events.lhe'))
553 discardedPath = pjoin(Pdir,'DiscardedUnstableEvents')
554 if not os.path.exists(discardedPath):
555 os.mkdir(discardedPath)
556 if os.path.isdir(discardedPath):
557
558
559 evtRecord = open(pjoin(discardedPath,'discarded_G%s.dat'%G),'a')
560 lhe_file.seek(0)
561 try:
562 evtRecord.write('\n'+str(max(lhe_file,key=lambda evt:abs(evt.wgt))))
563 except Exception:
564
565 lhe_file.close()
566 evtRecord.write(pjoin(gPath,'events.lhe').read())
567 evtRecord.close()
568
569 return self.combine_grid(Pdir, G, step, exclude_sub_jobs)
570
571
572 if across !=0:
573 if sigma != 0:
574 self.cross[(Pdir,G)] += cross**3/sigma**2
575 self.abscross[(Pdir,G)] += across * cross**2/sigma**2
576 self.sigma[(Pdir,G)] += cross**2/ sigma**2
577 self.chi2[(Pdir,G)] += cross**4/sigma**2
578
579 cross = self.cross[(Pdir,G)]/self.sigma[(Pdir,G)]
580 if step > 1:
581 error = math.sqrt(abs((self.chi2[(Pdir,G)]/cross**2 - \
582 self.sigma[(Pdir,G)])/(step-1))/self.sigma[(Pdir,G)])
583 else:
584 error = sigma/cross
585 else:
586 self.cross[(Pdir,G)] = cross
587 self.abscross[(Pdir,G)] = across
588 self.sigma[(Pdir,G)] = 0
589 self.chi2[(Pdir,G)] = 0
590 cross = self.cross[(Pdir,G)]
591 error = 0
592
593 else:
594 error = 0
595
596 grid_calculator.results.compute_values(update_statistics=True)
597 if (str(os.path.basename(Pdir)), G) in self.run_statistics:
598 self.run_statistics[(str(os.path.basename(Pdir)), G)]\
599 .aggregate_statistics(grid_calculator.results.run_statistics)
600 else:
601 self.run_statistics[(str(os.path.basename(Pdir)), G)] = \
602 grid_calculator.results.run_statistics
603
604 self.warnings_from_statistics(G, grid_calculator.results.run_statistics)
605 stats_msg = grid_calculator.results.run_statistics.nice_output(
606 '/'.join([os.path.basename(Pdir),'G%s'%G]))
607
608 if stats_msg:
609 logger.log(5, stats_msg)
610
611
612 for i in range(self.splitted_for_dir(Pdir, G)):
613 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
614 try:
615 os.remove(pjoin(path, 'grid_information'))
616 except OSError, oneerror:
617 if oneerror.errno != 2:
618 raise
619 return grid_calculator, cross, error
620
622 """Possible warn user for worrying MadLoop stats for this channel."""
623
624 if stats['n_madloop_calls']==0:
625 return
626
627 EPS_fraction = float(stats['exceptional_points'])/stats['n_madloop_calls']
628
629 msg = "Channel %s has encountered a fraction of %.3g\n"+ \
630 "of numerically unstable loop matrix element computations\n"+\
631 "(which could not be rescued using quadruple precision).\n"+\
632 "The results might not be trusted."
633
634 if 0.01 > EPS_fraction > 0.001:
635 logger.warning(msg%(G,EPS_fraction))
636 elif EPS_fraction > 0.01:
637 logger.critical((msg%(G,EPS_fraction)).replace('might', 'can'))
638 raise Exception, (msg%(G,EPS_fraction)).replace('might', 'can')
639
641
642 across = 0
643 for (Pdir,G) in self.abscross:
644 across += self.abscross[(Pdir,G)]/(self.sigma[(Pdir,G)]+1e-99)
645 return across
646
647 - def write_results(self, grid_calculator, cross, error, Pdir, G, step):
648
649
650 if cross == 0:
651 abscross,nw, luminosity = 0, 0, 0
652 wgt, maxit,nunwgt, wgt, nevents = 0,0,0,0,0
653 maxwgt = 0
654 error = 0
655 else:
656 grid_calculator.results.compute_values()
657 abscross = self.abscross[(Pdir,G)]/self.sigma[(Pdir,G)]
658 nw = grid_calculator.results.nw
659 wgt = grid_calculator.results.wgt
660 maxit = step
661 wgt = 0
662 nevents = grid_calculator.results.nevents
663 maxwgt = grid_calculator.get_max_wgt()
664 nunwgt = grid_calculator.get_nunwgt()
665 luminosity = nunwgt/cross
666
667
668 def fstr(nb):
669 data = '%E' % nb
670 nb, power = data.split('E')
671 nb = float(nb) /10
672 power = int(power) + 1
673 return '%.5fE%+03i' %(nb,power)
674 line = '%s %s %s %i %i %i %i %s %s %s %s 0.0 0\n' % \
675 (fstr(cross), fstr(error*cross), fstr(error*cross),
676 nevents, nw, maxit,nunwgt,
677 fstr(luminosity), fstr(wgt), fstr(abscross), fstr(maxwgt))
678
679 fsock = open(pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G,
680 'results.dat'),'w')
681 fsock.writelines(line)
682 fsock.close()
683
685 """submit the next iteration of the survey"""
686
687
688 run_card = self.cmd.run_card
689 options = {'event' : 2**(step) * self.cmd.opts['points'] / self.splitted_grid,
690 'maxiter': 1,
691 'miniter': 1,
692 'accuracy': self.cmd.opts['accuracy'],
693 'helicity': run_card['nhel_survey'] if 'nhel_survey' in run_card \
694 else run_card['nhel'],
695 'gridmode': -2,
696 'channel' : ''
697 }
698
699 if int(options['helicity']) == 1:
700 options['event'] = options['event'] * 2**(self.cmd.proc_characteristics['nexternal']//3)
701
702 for Gdir in Gdirs:
703 self.write_parameter_file(pjoin(Gdir, 'input_app.txt'), options)
704
705
706
707 packet = cluster.Packet((Pdir, G, step+1), self.combine_iteration, \
708 (Pdir, G, step+1))
709 nb_step = len(Gdirs) * (step+1)
710 for i,subdir in enumerate(Gdirs):
711 subdir = subdir.rsplit('_',1)[1]
712 subdir = int(subdir)
713 offset = nb_step+i+1
714 offset=str(offset)
715 tag = "%s.%s" % (subdir, offset)
716
717 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'survey.sh'),
718 argument=[tag, G],
719 cwd=pjoin(self.me_dir,'SubProcesses' , Pdir),
720 packet_member=packet)
721
722
723
724
726 """ """
727
728 template =""" %(event)s %(maxiter)s %(miniter)s !Number of events and max and min iterations
729 %(accuracy)s !Accuracy
730 %(gridmode)s !Grid Adjustment 0=none, 2=adjust
731 1 !Suppress Amplitude 1=yes
732 %(helicity)s !Helicity Sum/event 0=exact
733 %(channel)s """
734 options['event'] = int(options['event'])
735 open(path, 'w').write(template % options)
736
737
738
740 """Write the parameter of the survey run"""
741
742 run_card = self.cmd.run_card
743
744 options = {'event' : self.cmd.opts['points'],
745 'maxiter': self.cmd.opts['iterations'],
746 'miniter': self.min_iterations,
747 'accuracy': self.cmd.opts['accuracy'],
748 'helicity': run_card['nhel_survey'] if 'nhel_survey' in run_card \
749 else run_card['nhel'],
750 'gridmode': 2,
751 'channel': ''
752 }
753
754 if int(options['helicity'])== 1:
755 options['event'] = options['event'] * 2**(self.cmd.proc_characteristics['nexternal']//3)
756
757 if parralelization:
758 options['gridmode'] = -2
759 options['maxiter'] = 1
760 options['miniter'] = 1
761 options['event'] /= self.splitted_grid
762
763 if not Pdirs:
764 Pdirs = self.subproc
765
766 for Pdir in Pdirs:
767 path =pjoin(Pdir, 'input_app.txt')
768 self.write_parameter_file(path, options)
769
773
774
775
776 gen_events_security = 1.2
777 combining_job = 0
778 max_request_event = 1000
779 max_event_in_iter = 5000
780 min_event_in_iter = 1000
781 max_splitting = 130
782 min_iter = 3
783 max_iter = 9
784 keep_grid_for_refine = False
785
786
787 @ staticmethod
790
791
793 """Choose in which type of refine we want to be"""
794
795 if cmd.proc_characteristics['loop_induced']:
796 return super(gen_ximprove, cls).__new__(gen_ximprove_share, cmd, opt)
797 elif gen_ximprove.format_variable(cmd.run_card['gridpack'], bool):
798 raise Exception, "Not implemented"
799 elif cmd.run_card["job_strategy"] == 2:
800 return super(gen_ximprove, cls).__new__(gen_ximprove_share, cmd, opt)
801 else:
802 return super(gen_ximprove, cls).__new__(gen_ximprove_v4, cmd, opt)
803
804
806
807 try:
808 super(gen_ximprove, self).__init__(cmd, opt)
809 except TypeError:
810 pass
811
812 self.run_statistics = {}
813 self.cmd = cmd
814 self.run_card = cmd.run_card
815 run_card = self.run_card
816 self.me_dir = cmd.me_dir
817
818
819 self.gridpack = run_card['gridpack']
820 self.nhel = run_card['nhel']
821 if "nhel_refine" in run_card:
822 self.nhel = run_card["nhel_refine"]
823
824 if self.run_card['refine_evt_by_job'] != -1:
825 self.max_request_event = run_card['refine_evt_by_job']
826
827
828
829 self.gen_events = True
830 self.min_iter = 3
831 self.parralel = False
832
833 self.err_goal = 0.01
834 self.max_np = 9
835 self.split_channels = False
836
837 self.nreq = 2000
838 self.iseed = 4321
839 self.ngran = 1
840
841
842 self.results = 0
843
844 if isinstance(opt, dict):
845 self.configure(opt)
846 elif isinstance(opt, bannermod.GridpackCard):
847 self.configure_gridpack(opt)
848
851
866
867
888
890 """not needed but for gridpack --which is not handle here for the moment"""
891 return
892
893
895 """return the list of channel that need to be improved"""
896
897 assert self.err_goal >=1
898 self.err_goal = int(self.err_goal)
899
900 goal_lum = self.err_goal/(self.results.axsec+1e-99)
901 logger.info('Effective Luminosity %s pb^-1', goal_lum)
902
903 all_channels = sum([list(P) for P in self.results],[])
904 all_channels.sort(cmp= lambda x,y: 1 if y.get('luminosity') - \
905 x.get('luminosity') > 0 else -1)
906
907 to_refine = []
908 for C in all_channels:
909 if C.get('axsec') == 0:
910 continue
911 if goal_lum/(C.get('luminosity')+1e-99) >= 1 + (self.gen_events_security-1)/2:
912 logger.debug("channel %s is at %s (%s) (%s pb)", C.name, C.get('luminosity'), goal_lum/(C.get('luminosity')+1e-99), C.get('xsec'))
913 to_refine.append(C)
914 elif C.get('xerr') > max(C.get('axsec'),
915 (1/(100*math.sqrt(self.err_goal)))*all_channels[-1].get('axsec')):
916 to_refine.append(C)
917
918 logger.info('need to improve %s channels' % len(to_refine))
919 return goal_lum, to_refine
920
922 """update the html from this object since it contains all the information"""
923
924
925 run = self.cmd.results.current['run_name']
926 if not os.path.exists(pjoin(self.cmd.me_dir, 'HTML', run)):
927 os.mkdir(pjoin(self.cmd.me_dir, 'HTML', run))
928
929 unit = self.cmd.results.unit
930 P_text = ""
931 if self.results:
932 Presults = self.results
933 else:
934 self.results = sum_html.collect_result(self.cmd, None)
935 Presults = self.results
936
937 for P_comb in Presults:
938 P_text += P_comb.get_html(run, unit, self.cmd.me_dir)
939
940 Presults.write_results_dat(pjoin(self.cmd.me_dir,'SubProcesses', 'results.dat'))
941
942 fsock = open(pjoin(self.cmd.me_dir, 'HTML', run, 'results.html'),'w')
943 fsock.write(sum_html.results_header)
944 fsock.write('%s <dl>' % Presults.get_html(run, unit, self.cmd.me_dir))
945 fsock.write('%s </dl></body>' % P_text)
946
947 self.cmd.results.add_detail('cross', Presults.xsec)
948 self.cmd.results.add_detail('error', Presults.xerru)
949
950 return Presults.xsec, Presults.xerru
951
974
976
977 for path in misc.glob(pjoin('*', '*','multijob.dat'), pjoin(self.me_dir, 'SubProcesses')):
978 open(path,'w').write('0\n')
979
981 """ """
982 if nb_split <=1:
983 return
984 f = open(pjoin(self.me_dir, 'SubProcesses', Channel.get('name'), 'multijob.dat'), 'w')
985 f.write('%i\n' % nb_split)
986 f.close()
987
997
998 alphabet = "abcdefghijklmnopqrstuvwxyz"
1000 """generate the script in order to generate a given number of event"""
1001
1002
1003
1004 goal_lum, to_refine = self.find_job_for_event()
1005
1006
1007 self.reset_multijob()
1008
1009 jobs = []
1010
1011
1012
1013 if self.combining_job >1:
1014
1015 new_order = []
1016 if self.combining_job % 2 == 0:
1017 for i in range(len(to_refine) //2):
1018 new_order.append(to_refine[i])
1019 new_order.append(to_refine[-i-1])
1020 if len(to_refine) % 2:
1021 new_order.append(to_refine[i+1])
1022 else:
1023 for i in range(len(to_refine) //3):
1024 new_order.append(to_refine[i])
1025 new_order.append(to_refine[-2*i-1])
1026 new_order.append(to_refine[-2*i-2])
1027 if len(to_refine) % 3 == 1:
1028 new_order.append(to_refine[i+1])
1029 elif len(to_refine) % 3 == 2:
1030 new_order.append(to_refine[i+2])
1031
1032 assert set([id(C) for C in to_refine]) == set([id(C) for C in new_order])
1033 to_refine = new_order
1034
1035
1036
1037 for C in to_refine:
1038
1039 needed_event = goal_lum*C.get('axsec')
1040 nb_split = int(max(1,((needed_event-1)// self.max_request_event) +1))
1041 if not self.split_channels:
1042 nb_split = 1
1043 if nb_split > self.max_splitting:
1044 nb_split = self.max_splitting
1045 nb_split=max(1, nb_split)
1046
1047
1048
1049 if C.get('nunwgt') > 0:
1050 nevents = needed_event / nb_split * (C.get('nevents') / C.get('nunwgt'))
1051
1052 nevents = int(nevents / (2**self.min_iter-1))
1053 else:
1054 nevents = self.max_event_in_iter
1055
1056 if nevents < self.min_event_in_iter:
1057 nb_split = int(nb_split * nevents / self.min_event_in_iter) + 1
1058 nevents = self.min_event_in_iter
1059
1060
1061 nevents = max(self.min_event_in_iter, min(self.max_event_in_iter, nevents))
1062 logger.debug("%s : need %s event. Need %s split job of %s points", C.name, needed_event, nb_split, nevents)
1063
1064
1065
1066 self.write_multijob(C, nb_split)
1067
1068 packet = cluster.Packet((C.parent_name, C.name),
1069 combine_runs.CombineRuns,
1070 (pjoin(self.me_dir, 'SubProcesses', C.parent_name)),
1071 {"subproc": C.name, "nb_split":nb_split})
1072
1073
1074
1075 info = {'name': self.cmd.results.current['run_name'],
1076 'script_name': 'unknown',
1077 'directory': C.name,
1078 'P_dir': C.parent_name,
1079 'offset': 1,
1080 'nevents': nevents,
1081 'maxiter': self.max_iter,
1082 'miniter': self.min_iter,
1083 'precision': -goal_lum/nb_split,
1084 'nhel': self.run_card['nhel'],
1085 'channel': C.name.replace('G',''),
1086 'grid_refinment' : 0,
1087 'base_directory': '',
1088 'packet': packet,
1089 }
1090
1091 if nb_split == 1:
1092 jobs.append(info)
1093 else:
1094 for i in range(nb_split):
1095 new_info = dict(info)
1096 new_info['offset'] = i+1
1097 new_info['directory'] += self.alphabet[i % 26] + str((i+1)//26)
1098 if self.keep_grid_for_refine:
1099 new_info['base_directory'] = info['directory']
1100 jobs.append(new_info)
1101
1102 self.create_ajob(pjoin(self.me_dir, 'SubProcesses', 'refine.sh'), jobs)
1103
1104
1106 """create the ajob"""
1107
1108 if not jobs:
1109 return
1110
1111
1112 P2job= collections.defaultdict(list)
1113 for j in jobs:
1114 P2job[j['P_dir']].append(j)
1115 if len(P2job) >1:
1116 for P in P2job.values():
1117 self.create_ajob(template, P)
1118 return
1119
1120
1121 path = pjoin(self.me_dir, 'SubProcesses' ,jobs[0]['P_dir'])
1122
1123 template_text = open(template, 'r').read()
1124
1125
1126 if self.combining_job > 1:
1127 skip1=0
1128 n_channels = len(jobs)
1129 nb_sub = n_channels // self.combining_job
1130 nb_job_in_last = n_channels % self.combining_job
1131 if nb_job_in_last:
1132 nb_sub +=1
1133 skip1 = self.combining_job - nb_job_in_last
1134 if skip1 > nb_sub:
1135 self.combining_job -=1
1136 return self.create_ajob(template, jobs)
1137 combining_job = self.combining_job
1138 else:
1139
1140
1141 skip1=0
1142 combining_job =1
1143 nb_sub = len(jobs)
1144
1145
1146 nb_use = 0
1147 for i in range(nb_sub):
1148 script_number = i+1
1149 if i < skip1:
1150 nb_job = combining_job -1
1151 else:
1152 nb_job = combining_job
1153 fsock = open(pjoin(path, 'ajob%i' % script_number), 'w')
1154 for j in range(nb_use, nb_use + nb_job):
1155 if j> len(jobs):
1156 break
1157 info = jobs[j]
1158 info['script_name'] = 'ajob%i' % script_number
1159 info['keeplog'] = 'false'
1160 if "base_directory" not in info:
1161 info["base_directory"] = "./"
1162 fsock.write(template_text % info)
1163 nb_use += nb_job
1164
1166 """create the ajob to achieve a give precision on the total cross-section"""
1167
1168
1169 assert self.err_goal <=1
1170 xtot = abs(self.results.xsec)
1171 logger.info("Working on precision: %s %%" %(100*self.err_goal))
1172 all_channels = sum([list(P) for P in self.results if P.mfactor],[])
1173 limit = self.err_goal * xtot / len(all_channels)
1174 to_refine = []
1175 rerr = 0
1176 for C in all_channels:
1177 cerr = C.mfactor*(C.xerru + len(all_channels)*C.xerrc)
1178 if cerr > abs(limit):
1179 to_refine.append(C)
1180 else:
1181 rerr += cerr
1182 rerr *=rerr
1183 if not len(to_refine):
1184 return
1185
1186
1187 limit = math.sqrt((self.err_goal * xtot)**2 - rerr/math.sqrt(len(to_refine)))
1188 for C in to_refine[:]:
1189 cerr = C.mfactor*(C.xerru + len(to_refine)*C.xerrc)
1190 if cerr < limit:
1191 to_refine.remove(C)
1192
1193
1194 logger.info('need to improve %s channels' % len(to_refine))
1195
1196
1197 jobs = []
1198
1199
1200
1201 for C in to_refine:
1202
1203
1204 yerr = C.mfactor*(C.xerru+len(to_refine)*C.xerrc)
1205 nevents = 0.2*C.nevents*(yerr/limit)**2
1206
1207 nb_split = int((nevents*(C.nunwgt/C.nevents)/self.max_request_event/ (2**self.min_iter-1))**(2/3))
1208 nb_split = max(nb_split, 1)
1209
1210 if nb_split > self.max_splitting:
1211 nb_split = self.max_splitting
1212
1213 if nb_split >1:
1214 nevents = nevents / nb_split
1215 self.write_multijob(C, nb_split)
1216
1217 nevents = min(self.min_event_in_iter, max(self.max_event_in_iter, nevents))
1218
1219
1220
1221 info = {'name': self.cmd.results.current['run_name'],
1222 'script_name': 'unknown',
1223 'directory': C.name,
1224 'P_dir': C.parent_name,
1225 'offset': 1,
1226 'nevents': nevents,
1227 'maxiter': self.max_iter,
1228 'miniter': self.min_iter,
1229 'precision': yerr/math.sqrt(nb_split)/(C.get('xsec')+ yerr),
1230 'nhel': self.run_card['nhel'],
1231 'channel': C.name.replace('G',''),
1232 'grid_refinment' : 1
1233 }
1234
1235 if nb_split == 1:
1236 jobs.append(info)
1237 else:
1238 for i in range(nb_split):
1239 new_info = dict(info)
1240 new_info['offset'] = i+1
1241 new_info['directory'] += self.alphabet[i % 26] + str((i+1)//26)
1242 jobs.append(new_info)
1243 self.create_ajob(pjoin(self.me_dir, 'SubProcesses', 'refine.sh'), jobs)
1244
1246 """update the html from this object since it contains all the information"""
1247
1248
1249 run = self.cmd.results.current['run_name']
1250 if not os.path.exists(pjoin(self.cmd.me_dir, 'HTML', run)):
1251 os.mkdir(pjoin(self.cmd.me_dir, 'HTML', run))
1252
1253 unit = self.cmd.results.unit
1254 P_text = ""
1255 if self.results:
1256 Presults = self.results
1257 else:
1258 self.results = sum_html.collect_result(self.cmd, None)
1259 Presults = self.results
1260
1261 for P_comb in Presults:
1262 P_text += P_comb.get_html(run, unit, self.cmd.me_dir)
1263
1264 Presults.write_results_dat(pjoin(self.cmd.me_dir,'SubProcesses', 'results.dat'))
1265
1266 fsock = open(pjoin(self.cmd.me_dir, 'HTML', run, 'results.html'),'w')
1267 fsock.write(sum_html.results_header)
1268 fsock.write('%s <dl>' % Presults.get_html(run, unit, self.cmd.me_dir))
1269 fsock.write('%s </dl></body>' % P_text)
1270
1271 self.cmd.results.add_detail('cross', Presults.xsec)
1272 self.cmd.results.add_detail('error', Presults.xerru)
1273
1274 return Presults.xsec, Presults.xerru
1275
1300
1315
1317 """Doing the refine in multicore. Each core handle a couple of PS point."""
1318
1319 nb_ps_by_job = 2000
1320 mode = "refine"
1321 gen_events_security = 1.15
1322
1323
1324
1326
1327 super(gen_ximprove_share, self).__init__(*args, **opts)
1328 self.generated_events = {}
1329 self.splitted_for_dir = lambda x,y : self.splitted_Pdir[(x,y)]
1330
1331
1333 """generate the script in order to generate a given number of event"""
1334
1335
1336
1337 goal_lum, to_refine = self.find_job_for_event()
1338 self.goal_lum = goal_lum
1339
1340
1341 total_ps_points = 0
1342 channel_to_ps_point = []
1343 for C in to_refine:
1344
1345 try:
1346 os.remove(pjoin(self.me_dir, "SubProcesses",C.parent_name, C.name, "events.lhe"))
1347 except:
1348 pass
1349
1350
1351 needed_event = goal_lum*C.get('axsec')
1352 if needed_event == 0:
1353 continue
1354
1355 if C.get('nunwgt') > 0:
1356 nevents = needed_event * (C.get('nevents') / C.get('nunwgt'))
1357
1358 nevents = int(nevents / (2**self.min_iter-1))
1359 else:
1360 nb_split = int(max(1,((needed_event-1)// self.max_request_event) +1))
1361 if not self.split_channels:
1362 nb_split = 1
1363 if nb_split > self.max_splitting:
1364 nb_split = self.max_splitting
1365 nevents = self.max_event_in_iter * self.max_splitting
1366 else:
1367 nevents = self.max_event_in_iter * nb_split
1368
1369 if nevents > self.max_splitting*self.max_event_in_iter:
1370 logger.warning("Channel %s/%s has a very low efficiency of unweighting. Might not be possible to reach target" % \
1371 (C.name, C.parent_name))
1372 nevents = self.max_event_in_iter * self.max_splitting
1373
1374 total_ps_points += nevents
1375 channel_to_ps_point.append((C, nevents))
1376
1377 if self.cmd.options["run_mode"] == 1:
1378 if self.cmd.options["cluster_size"]:
1379 nb_ps_by_job = total_ps_points /int(self.cmd.options["cluster_size"])
1380 else:
1381 nb_ps_by_job = self.nb_ps_by_job
1382 elif self.cmd.options["run_mode"] == 2:
1383 remain = total_ps_points % self.cmd.options["nb_core"]
1384 if remain:
1385 nb_ps_by_job = 1 + (total_ps_points - remain) / self.cmd.options["nb_core"]
1386 else:
1387 nb_ps_by_job = total_ps_points / self.cmd.options["nb_core"]
1388 else:
1389 nb_ps_by_job = self.nb_ps_by_job
1390
1391 nb_ps_by_job = int(max(nb_ps_by_job, 500))
1392
1393 for C, nevents in channel_to_ps_point:
1394 if nevents % nb_ps_by_job:
1395 nb_job = 1 + int(nevents // nb_ps_by_job)
1396 else:
1397 nb_job = int(nevents // nb_ps_by_job)
1398 submit_ps = min(nevents, nb_ps_by_job)
1399 if nb_job == 1:
1400 submit_ps = max(submit_ps, self.min_event_in_iter)
1401 self.create_resubmit_one_iter(C.parent_name, C.name[1:], submit_ps, nb_job, step=0)
1402 needed_event = goal_lum*C.get('xsec')
1403 logger.debug("%s/%s : need %s event. Need %s split job of %s points", C.parent_name, C.name, needed_event, nb_job, submit_ps)
1404
1405
1407
1408 grid_calculator, cross, error = self.combine_grid(Pdir, G, step)
1409
1410
1411 Gdirs = []
1412 for i in range(self.splitted_for_dir(Pdir, G)):
1413 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
1414 Gdirs.append(path)
1415 assert len(grid_calculator.results) == len(Gdirs) == self.splitted_for_dir(Pdir, G)
1416
1417
1418
1419 needed_event = cross * self.goal_lum
1420 if needed_event == 0:
1421 return 0
1422
1423
1424 if self.err_goal >=1:
1425 if needed_event > self.gen_events_security * self.err_goal:
1426 needed_event = int(self.gen_events_security * self.err_goal)
1427
1428 if (Pdir, G) in self.generated_events:
1429 old_nunwgt, old_maxwgt = self.generated_events[(Pdir, G)]
1430 else:
1431 old_nunwgt, old_maxwgt = 0, 0
1432
1433 if old_nunwgt == 0 and os.path.exists(pjoin(Pdir,"G%s" % G, "events.lhe")):
1434
1435 lhe = lhe_parser.EventFile(pjoin(Pdir,"G%s" % G, "events.lhe"))
1436 old_nunwgt = lhe.unweight(None, trunc_error=0.005, log_level=0)
1437 old_maxwgt = lhe.max_wgt
1438
1439
1440
1441 maxwgt = max(grid_calculator.get_max_wgt(), old_maxwgt)
1442 new_evt = grid_calculator.get_nunwgt(maxwgt)
1443 efficiency = new_evt / sum([R.nevents for R in grid_calculator.results])
1444 nunwgt = old_nunwgt * old_maxwgt / maxwgt
1445 nunwgt += new_evt
1446
1447
1448 one_iter_nb_event = max(grid_calculator.get_nunwgt(),1)
1449 drop_previous_iteration = False
1450
1451 n_target_one_iter = (needed_event-one_iter_nb_event) / ( one_iter_nb_event/ sum([R.nevents for R in grid_calculator.results]))
1452 n_target_combined = (needed_event-nunwgt) / efficiency
1453 if n_target_one_iter < n_target_combined:
1454
1455
1456 drop_previous_iteration = True
1457 nunwgt = one_iter_nb_event
1458 maxwgt = grid_calculator.get_max_wgt()
1459 new_evt = nunwgt
1460 efficiency = ( one_iter_nb_event/ sum([R.nevents for R in grid_calculator.results]))
1461
1462 try:
1463 if drop_previous_iteration:
1464 raise IOError
1465 output_file = open(pjoin(Pdir,"G%s" % G, "events.lhe"), 'a')
1466 except IOError:
1467 output_file = open(pjoin(Pdir,"G%s" % G, "events.lhe"), 'w')
1468
1469 misc.call(["cat"] + [pjoin(d, "events.lhe") for d in Gdirs],
1470 stdout=output_file)
1471 output_file.close()
1472
1473
1474 if nunwgt < 0.6 * needed_event and step > self.min_iter:
1475 lhe = lhe_parser.EventFile(output_file.name)
1476 old_nunwgt =nunwgt
1477 nunwgt = lhe.unweight(None, trunc_error=0.01, log_level=0)
1478
1479
1480 self.generated_events[(Pdir, G)] = (nunwgt, maxwgt)
1481
1482
1483
1484 if nunwgt >= int(0.96*needed_event)+1:
1485
1486 logger.info("found enough event for %s/G%s" % (os.path.basename(Pdir), G))
1487 self.write_results(grid_calculator, cross, error, Pdir, G, step, efficiency)
1488 return 0
1489 elif step >= self.max_iter:
1490 logger.debug("fail to find enough event")
1491 self.write_results(grid_calculator, cross, error, Pdir, G, step, efficiency)
1492 return 0
1493
1494 nb_split_before = len(grid_calculator.results)
1495 nevents = grid_calculator.results[0].nevents
1496 if nevents == 0:
1497 nevents = max(g.nevents for g in grid_calculator.results)
1498
1499 need_ps_point = (needed_event - nunwgt)/(efficiency+1e-99)
1500 need_job = need_ps_point // nevents + 1
1501
1502 if step < self.min_iter:
1503
1504 job_at_first_iter = nb_split_before/2**(step-1)
1505 expected_total_job = job_at_first_iter * (2**self.min_iter-1)
1506 done_job = job_at_first_iter * (2**step-1)
1507 expected_remaining_job = expected_total_job - done_job
1508
1509 logger.debug("efficiency status (smaller is better): %s", need_job/expected_remaining_job)
1510
1511 need_job = min(need_job, expected_remaining_job*1.25)
1512
1513 nb_job = (need_job-0.5)//(2**(self.min_iter-step)-1) + 1
1514 nb_job = max(1, nb_job)
1515 grid_calculator.write_grid_for_submission(Pdir,G,
1516 self.splitted_for_dir(Pdir, G), nb_job*nevents ,mode=self.mode,
1517 conservative_factor=self.max_iter)
1518 logger.info("%s/G%s is at %i/%i (%.2g%%) event. Resubmit %i job at iteration %i." \
1519 % (os.path.basename(Pdir), G, int(nunwgt),int(needed_event)+1,
1520 (float(nunwgt)/needed_event)*100.0 if needed_event>0.0 else 0.0,
1521 nb_job, step))
1522 self.create_resubmit_one_iter(Pdir, G, nevents, nb_job, step)
1523
1524
1525 elif step < self.max_iter:
1526 if step + 1 == self.max_iter:
1527 need_job = 1.20 * need_job
1528
1529 nb_job = int(min(need_job, nb_split_before*1.5))
1530 grid_calculator.write_grid_for_submission(Pdir,G,
1531 self.splitted_for_dir(Pdir, G), nb_job*nevents ,mode=self.mode,
1532 conservative_factor=self.max_iter)
1533
1534
1535 logger.info("%s/G%s is at %i/%i ('%.2g%%') event. Resubmit %i job at iteration %i." \
1536 % (os.path.basename(Pdir), G, int(nunwgt),int(needed_event)+1,
1537 (float(nunwgt)/needed_event)*100.0 if needed_event>0.0 else 0.0,
1538 nb_job, step))
1539 self.create_resubmit_one_iter(Pdir, G, nevents, nb_job, step)
1540
1541
1542
1543 return 0
1544
1545
1546 - def write_results(self, grid_calculator, cross, error, Pdir, G, step, efficiency):
1547
1548
1549 if cross == 0:
1550 abscross,nw, luminosity = 0, 0, 0
1551 wgt, maxit,nunwgt, wgt, nevents = 0,0,0,0,0
1552 error = 0
1553 else:
1554 grid_calculator.results.compute_values()
1555 abscross = self.abscross[(Pdir,G)]/self.sigma[(Pdir,G)]
1556 nunwgt, wgt = self.generated_events[(Pdir, G)]
1557 nw = int(nunwgt / efficiency)
1558 nunwgt = int(nunwgt)
1559 maxit = step
1560 nevents = nunwgt
1561
1562 luminosity = nunwgt/cross
1563
1564
1565 def fstr(nb):
1566 data = '%E' % nb
1567 nb, power = data.split('E')
1568 nb = float(nb) /10
1569 power = int(power) + 1
1570 return '%.5fE%+03i' %(nb,power)
1571 line = '%s %s %s %i %i %i %i %s %s %s 0.0 0.0 0\n' % \
1572 (fstr(cross), fstr(error*cross), fstr(error*cross),
1573 nevents, nw, maxit,nunwgt,
1574 fstr(luminosity), fstr(wgt), fstr(abscross))
1575
1576 fsock = open(pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G,
1577 'results.dat'),'w')
1578 fsock.writelines(line)
1579 fsock.close()
1580