1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 """Methods and classes to export models and matrix elements to Pythia 8
17 and C++ Standalone format."""
18
19 import fractions
20 import glob
21 import itertools
22 import logging
23 from math import fmod
24 import os
25 import re
26 import shutil
27 import subprocess
28
29 import madgraph.core.base_objects as base_objects
30 import madgraph.core.color_algebra as color
31 import madgraph.core.helas_objects as helas_objects
32 import madgraph.iolibs.drawing_eps as draw
33 import madgraph.iolibs.files as files
34 import madgraph.iolibs.helas_call_writers as helas_call_writers
35 import madgraph.iolibs.file_writers as writers
36 import madgraph.iolibs.template_files as template_files
37 import madgraph.iolibs.ufo_expression_parsers as parsers
38 from madgraph import MadGraph5Error, InvalidCmd, MG5DIR
39 from madgraph.iolibs.files import cp, ln, mv
40
41 import madgraph.various.misc as misc
42
43 import aloha.create_aloha as create_aloha
44 import aloha.aloha_writers as aloha_writers
45
46 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
47 logger = logging.getLogger('madgraph.export_pythia8')
56 """Prepare export_dir as standalone_cpp directory, including:
57 src (for RAMBO, model and ALOHA files + makefile)
58 lib (with compiled libraries from src)
59 SubProcesses (with check_sa.cpp + makefile and Pxxxxx directories)
60 """
61
62 cwd = os.getcwd()
63
64 try:
65 os.mkdir(dirpath)
66 except os.error as error:
67 logger.warning(error.strerror + " " + dirpath)
68
69 try:
70 os.chdir(dirpath)
71 except os.error:
72 logger.error('Could not cd to directory %s' % dirpath)
73 return 0
74
75 logger.info('Creating subdirectories in directory %s' % dirpath)
76
77 try:
78 os.mkdir('src')
79 except os.error as error:
80 logger.warning(error.strerror + " " + dirpath)
81
82 try:
83 os.mkdir('lib')
84 except os.error as error:
85 logger.warning(error.strerror + " " + dirpath)
86
87 try:
88 os.mkdir('Cards')
89 except os.error as error:
90 logger.warning(error.strerror + " " + dirpath)
91
92 try:
93 os.mkdir('SubProcesses')
94 except os.error as error:
95 logger.warning(error.strerror + " " + dirpath)
96
97
98 open(os.path.join("Cards","param_card.dat"), 'w').write(\
99 model.write_param_card())
100
101 src_files = ['rambo.h', 'rambo.cc', 'read_slha.h', 'read_slha.cc']
102
103
104 for f in src_files:
105 cp(_file_path + 'iolibs/template_files/' + f, 'src')
106
107
108 makefile = read_template_file('Makefile_sa_cpp_src') % \
109 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
110 open(os.path.join('src', 'Makefile'), 'w').write(makefile)
111
112
113 cp(_file_path + 'iolibs/template_files/check_sa.cpp', 'SubProcesses')
114
115
116 makefile = read_template_file('Makefile_sa_cpp_sp') % \
117 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
118 open(os.path.join('SubProcesses', 'Makefile'), 'w').write(makefile)
119
120
121 os.chdir(cwd)
122
130
131 """Generate the Pxxxxx directory for a subprocess in C++ standalone,
132 including the necessary .h and .cc files"""
133
134 cwd = os.getcwd()
135
136 if format == 'standalone_cpp':
137 process_exporter_cpp = ProcessExporterCPP(matrix_element,
138 cpp_helas_call_writer)
139 elif format == 'matchbox_cpp':
140 process_exporter_cpp = ProcessExporterMatchbox(matrix_element,
141 cpp_helas_call_writer)
142 else:
143 raise Exception, 'Unrecognized format %s' % format
144
145
146 dirpath = os.path.join(path, \
147 "P%d_%s" % (process_exporter_cpp.process_number,
148 process_exporter_cpp.process_name))
149 try:
150 os.mkdir(dirpath)
151 except os.error as error:
152 logger.warning(error.strerror + " " + dirpath)
153
154 try:
155 os.chdir(dirpath)
156 except os.error:
157 logger.error('Could not cd to directory %s' % dirpath)
158 return 0
159
160 logger.info('Creating files in directory %s' % dirpath)
161
162 process_exporter_cpp.path = dirpath
163
164 process_exporter_cpp.generate_process_files()
165
166 linkfiles = ['check_sa.cpp', 'Makefile']
167
168
169 for file in linkfiles:
170 ln('../%s' % file)
171
172
173 os.chdir(cwd)
174
175 return
176
178 """Make the model library in a C++ standalone directory"""
179
180 source_dir = os.path.join(dir_path, "src")
181
182 logger.info("Running make for src")
183 misc.compile(cwd=source_dir)
184
189 """Class to take care of exporting a set of matrix elements to
190 C++ format."""
191
192
193 process_dir = '.'
194 include_dir = '.'
195 process_template_h = 'cpp_process_h.inc'
196 process_template_cc = 'cpp_process_cc.inc'
197 process_class_template = 'cpp_process_class.inc'
198 process_definition_template = 'cpp_process_function_definitions.inc'
199 process_wavefunction_template = 'cpp_process_wavefunctions.inc'
200 process_sigmaKin_function_template = 'cpp_process_sigmaKin_function.inc'
201 single_process_template = 'cpp_process_matrix.inc'
202
205
206 - def __init__(self, matrix_elements, cpp_helas_call_writer, process_string = "",
207 process_number = 0, path = os.getcwd()):
208 """Initiate with matrix elements, helas call writer, process
209 string, path. Generate the process .h and .cc files."""
210
211 if isinstance(matrix_elements, helas_objects.HelasMultiProcess):
212 self.matrix_elements = matrix_elements.get('matrix_elements')
213 elif isinstance(matrix_elements, helas_objects.HelasMatrixElement):
214 self.matrix_elements = \
215 helas_objects.HelasMatrixElementList([matrix_elements])
216 elif isinstance(matrix_elements, helas_objects.HelasMatrixElementList):
217 self.matrix_elements = matrix_elements
218 else:
219 raise base_objects.PhysicsObject.PhysicsObjectError,\
220 "Wrong object type for matrix_elements"
221
222 if not self.matrix_elements:
223 raise MadGraph5Error("No matrix elements to export")
224
225 self.model = self.matrix_elements[0].get('processes')[0].get('model')
226 self.model_name = ProcessExporterCPP.get_model_name(self.model.get('name'))
227
228 self.processes = sum([me.get('processes') for \
229 me in self.matrix_elements], [])
230 self.processes.extend(sum([me.get_mirror_processes() for \
231 me in self.matrix_elements], []))
232
233 self.nprocesses = len(self.matrix_elements)
234 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
235 self.nprocesses = 2*len(self.matrix_elements)
236
237 if process_string:
238 self.process_string = process_string
239 else:
240 self.process_string = self.processes[0].base_string()
241
242 if process_number:
243 self.process_number = process_number
244 else:
245 self.process_number = self.processes[0].get('id')
246
247 self.process_name = self.get_process_name()
248 self.process_class = "CPPProcess"
249
250 self.path = path
251 self.helas_call_writer = cpp_helas_call_writer
252
253 if not isinstance(self.helas_call_writer, helas_call_writers.CPPUFOHelasCallWriter):
254 raise self.ProcessExporterCPPError, \
255 "helas_call_writer not CPPUFOHelasCallWriter"
256
257 self.nexternal, self.ninitial = \
258 self.matrix_elements[0].get_nexternal_ninitial()
259 self.nfinal = self.nexternal - self.ninitial
260
261
262
263
264 self.single_helicities = True
265
266 hel_matrix = self.get_helicity_matrix(self.matrix_elements[0])
267
268 for me in self.matrix_elements[1:]:
269 if self.get_helicity_matrix(me) != hel_matrix:
270 self.single_helicities = False
271
272 if self.single_helicities:
273
274
275
276
277 self.wavefunctions = []
278 wf_number = 0
279
280 for me in self.matrix_elements:
281 for iwf, wf in enumerate(me.get_all_wavefunctions()):
282 try:
283 old_wf = \
284 self.wavefunctions[self.wavefunctions.index(wf)]
285 wf.set('number', old_wf.get('number'))
286 except ValueError:
287 wf_number += 1
288 wf.set('number', wf_number)
289 self.wavefunctions.append(wf)
290
291
292 self.amplitudes = helas_objects.HelasAmplitudeList()
293 amp_number = 0
294 for me in self.matrix_elements:
295 for iamp, amp in enumerate(me.get_all_amplitudes()):
296 try:
297 old_amp = \
298 self.amplitudes[self.amplitudes.index(amp)]
299 amp.set('number', old_amp.get('number'))
300 except ValueError:
301 amp_number += 1
302 amp.set('number', amp_number)
303 self.amplitudes.append(amp)
304 diagram = helas_objects.HelasDiagram({'amplitudes': self.amplitudes})
305 self.amplitudes = helas_objects.HelasMatrixElement({\
306 'diagrams': helas_objects.HelasDiagramList([diagram])})
307
308
309
311 """Generate the .h and .cc files needed for C++, for the
312 processes described by multi_matrix_element"""
313
314
315 if not os.path.isdir(os.path.join(self.path, self.include_dir)):
316 os.makedirs(os.path.join(self.path, self.include_dir))
317 filename = os.path.join(self.path, self.include_dir,
318 '%s.h' % self.process_class)
319 self.write_process_h_file(writers.CPPWriter(filename))
320
321 if not os.path.isdir(os.path.join(self.path, self.process_dir)):
322 os.makedirs(os.path.join(self.path, self.process_dir))
323 filename = os.path.join(self.path, self.process_dir,
324 '%s.cc' % self.process_class)
325 self.write_process_cc_file(writers.CPPWriter(filename))
326
327 logger.info('Created files %(process)s.h and %(process)s.cc in' % \
328 {'process': self.process_class} + \
329 ' directory %(dir)s' % {'dir': os.path.split(filename)[0]})
330
331
332
333
334
335
337 """Write the class definition (.h) file for the process"""
338
339 if not isinstance(writer, writers.CPPWriter):
340 raise writers.CPPWriter.CPPWriterError(\
341 "writer not CPPWriter")
342
343 replace_dict = {}
344
345
346 info_lines = get_mg5_info_lines()
347 replace_dict['info_lines'] = info_lines
348
349
350 replace_dict['model_name'] = \
351 self.model_name
352
353
354 replace_dict['process_file_name'] = self.process_name
355
356
357 process_class_definitions = self.get_process_class_definitions()
358 replace_dict['process_class_definitions'] = process_class_definitions
359
360 file = read_template_file(self.process_template_h) % replace_dict
361
362
363 writer.writelines(file)
364
365
366
367
369 """Write the class member definition (.cc) file for the process
370 described by matrix_element"""
371
372 if not isinstance(writer, writers.CPPWriter):
373 raise writers.CPPWriter.CPPWriterError(\
374 "writer not CPPWriter")
375
376 replace_dict = {}
377
378
379 info_lines = get_mg5_info_lines()
380 replace_dict['info_lines'] = info_lines
381
382
383 replace_dict['process_file_name'] = self.process_name
384
385
386 replace_dict['model_name'] = self.model_name
387
388
389
390 process_function_definitions = \
391 self.get_process_function_definitions()
392 replace_dict['process_function_definitions'] = \
393 process_function_definitions
394
395 file = read_template_file(self.process_template_cc) % replace_dict
396
397
398 writer.writelines(file)
399
400
401
402
404 """The complete class definition for the process"""
405
406 replace_dict = {}
407
408
409 replace_dict['model_name'] = self.model_name
410
411
412 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
413 self.matrix_elements])
414
415 replace_dict['process_lines'] = process_lines
416
417
418 replace_dict['nfinal'] = self.nfinal
419
420
421 replace_dict['ninitial'] = self.ninitial
422
423
424 replace_dict['process_class_name'] = self.process_name
425
426
427 process_definition = "%s (%s)" % (self.process_string,
428 self.model_name)
429 replace_dict['process_definition'] = process_definition
430
431 process = self.processes[0]
432
433 replace_dict['process_code'] = self.process_number
434 replace_dict['nexternal'] = self.nexternal
435 replace_dict['nprocesses'] = self.nprocesses
436
437
438 color_amplitudes = self.matrix_elements[0].get_color_amplitudes()
439
440 replace_dict['ncolor'] = len(color_amplitudes)
441
442 if self.single_helicities:
443 replace_dict['all_sigma_kin_definitions'] = \
444 """// Calculate wavefunctions
445 void calculate_wavefunctions(const int perm[], const int hel[]);
446 static const int nwavefuncs = %d;
447 std::complex<double> w[nwavefuncs][18];
448 static const int namplitudes = %d;
449 std::complex<double> amp[namplitudes];""" % \
450 (len(self.wavefunctions),
451 len(self.amplitudes.get_all_amplitudes()))
452 replace_dict['all_matrix_definitions'] = \
453 "\n".join(["double matrix_%s();" % \
454 me.get('processes')[0].shell_string().\
455 replace("0_", "") \
456 for me in self.matrix_elements])
457
458 else:
459 replace_dict['all_sigma_kin_definitions'] = \
460 "\n".join(["void sigmaKin_%s();" % \
461 me.get('processes')[0].shell_string().\
462 replace("0_", "") \
463 for me in self.matrix_elements])
464 replace_dict['all_matrix_definitions'] = \
465 "\n".join(["double matrix_%s(const int hel[]);" % \
466 me.get('processes')[0].shell_string().\
467 replace("0_", "") \
468 for me in self.matrix_elements])
469
470
471 file = read_template_file(self.process_class_template) % replace_dict
472
473 return file
474
476 """The complete Pythia 8 class definition for the process"""
477
478 replace_dict = {}
479
480
481 replace_dict['model_name'] = self.model_name
482
483
484 replace_dict['process_lines'] = \
485 "\n".join([self.get_process_info_lines(me) for \
486 me in self.matrix_elements])
487
488
489 replace_dict['process_class_name'] = self.process_name
490
491 color_amplitudes = [me.get_color_amplitudes() for me in \
492 self.matrix_elements]
493
494 replace_dict['initProc_lines'] = \
495 self.get_initProc_lines(self.matrix_elements[0],
496 color_amplitudes)
497 replace_dict['reset_jamp_lines'] = \
498 self.get_reset_jamp_lines(color_amplitudes)
499 replace_dict['sigmaKin_lines'] = \
500 self.get_sigmaKin_lines(color_amplitudes)
501 replace_dict['sigmaHat_lines'] = \
502 self.get_sigmaHat_lines()
503
504 replace_dict['all_sigmaKin'] = \
505 self.get_all_sigmaKin_lines(color_amplitudes,
506 'CPPProcess')
507
508 file = read_template_file(self.process_definition_template) %\
509 replace_dict
510
511 return file
512
514 """Return process file name for the process in matrix_element"""
515
516 process_string = self.process_string
517
518
519 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*(.*)$")
520 proc_number_re = proc_number_pattern.match(process_string)
521 proc_number = 0
522 if proc_number_re:
523 proc_number = int(proc_number_re.group(2))
524 process_string = proc_number_re.group(1) + \
525 proc_number_re.group(3)
526
527
528 order_pattern = re.compile("^(.+)\s+(\w+)\s*=\s*(\d+)\s*$")
529 order_re = order_pattern.match(process_string)
530 while order_re:
531 process_string = order_re.group(1)
532 order_re = order_pattern.match(process_string)
533
534 process_string = process_string.replace(' ', '')
535 process_string = process_string.replace('>', '_')
536 process_string = process_string.replace('+', 'p')
537 process_string = process_string.replace('-', 'm')
538 process_string = process_string.replace('~', 'x')
539 process_string = process_string.replace('/', '_no_')
540 process_string = process_string.replace('$', '_nos_')
541 process_string = process_string.replace('|', '_or_')
542 if proc_number != 0:
543 process_string = "%d_%s" % (proc_number, process_string)
544
545 process_string = "Sigma_%s_%s" % (self.model_name,
546 process_string)
547 return process_string
548
550 """Return info lines describing the processes for this matrix element"""
551
552 return"\n".join([ "# " + process.nice_string().replace('\n', '\n# * ') \
553 for process in matrix_element.get('processes')])
554
555
557 """Get initProc_lines for function definition for Pythia 8 .cc file"""
558
559 initProc_lines = []
560
561 initProc_lines.append("// Set external particle masses for this matrix element")
562
563 for part in matrix_element.get_external_wavefunctions():
564 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass'))
565 for i, colamp in enumerate(color_amplitudes):
566 initProc_lines.append("jamp2[%d] = new double[%d];" % \
567 (i, len(colamp)))
568
569 return "\n".join(initProc_lines)
570
572 """Get lines to reset jamps"""
573
574 ret_lines = ""
575 for icol, col_amp in enumerate(color_amplitudes):
576 ret_lines+= """for(int i=0;i < %(ncolor)d; i++)
577 jamp2[%(proc_number)d][i]=0.;\n""" % \
578 {"ncolor": len(col_amp), "proc_number": icol}
579 return ret_lines
580
581
605
606
608 """Get sigmaKin_lines for function definition for Pythia 8 .cc file"""
609
610
611 if self.single_helicities:
612 replace_dict = {}
613
614
615 replace_dict['ncomb'] = \
616 self.matrix_elements[0].get_helicity_combinations()
617
618
619 replace_dict['process_class_name'] = self.process_name
620
621
622 replace_dict['id1'] = self.processes[0].get('legs')[0].get('id')
623 replace_dict['id2'] = self.processes[0].get('legs')[1].get('id')
624
625
626 replace_dict['helicity_matrix'] = \
627 self.get_helicity_matrix(self.matrix_elements[0])
628
629
630 den_factors = [str(me.get_denominator_factor()) for me in \
631 self.matrix_elements]
632 if self.nprocesses != len(self.matrix_elements):
633 den_factors.extend(den_factors)
634 replace_dict['den_factors'] = ",".join(den_factors)
635 replace_dict['get_matrix_t_lines'] = "\n".join(
636 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
637 {"iproc": i, "proc_name": \
638 me.get('processes')[0].shell_string().replace("0_", "")} \
639 for i, me in enumerate(self.matrix_elements)])
640
641
642 mirror_matrix_lines = ""
643
644 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
645 mirror_matrix_lines += \
646 """ // Mirror initial state momenta for mirror process
647 perm[0]=1;
648 perm[1]=0;
649 // Calculate wavefunctions
650 calculate_wavefunctions(perm, helicities[ihel]);
651 // Mirror back
652 perm[0]=0;
653 perm[1]=1;
654 // Calculate matrix elements
655 """
656
657 mirror_matrix_lines += "\n".join(
658 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
659 {"iproc": i + len(self.matrix_elements), "proc_name": \
660 me.get('processes')[0].shell_string().replace("0_", "")} \
661 for i, me in enumerate(self.matrix_elements) if me.get('has_mirror_process')])
662
663 replace_dict['get_mirror_matrix_lines'] = mirror_matrix_lines
664
665
666 file = \
667 read_template_file(\
668 self.process_sigmaKin_function_template) %\
669 replace_dict
670
671 return file
672
673 else:
674 ret_lines = "// Call the individual sigmaKin for each process\n"
675 return ret_lines + \
676 "\n".join(["sigmaKin_%s();" % \
677 me.get('processes')[0].shell_string().\
678 replace("0_", "") for \
679 me in self.matrix_elements])
680
682 """Get sigmaKin_process for all subprocesses for Pythia 8 .cc file"""
683
684 ret_lines = []
685 if self.single_helicities:
686 ret_lines.append(\
687 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \
688 class_name)
689 ret_lines.append("// Calculate wavefunctions for all processes")
690 ret_lines.append(self.get_calculate_wavefunctions(\
691 self.wavefunctions, self.amplitudes))
692 ret_lines.append("}")
693 else:
694 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \
695 for i, me in enumerate(self.matrix_elements)])
696 ret_lines.extend([self.get_matrix_single_process(i, me,
697 color_amplitudes[i],
698 class_name) \
699 for i, me in enumerate(self.matrix_elements)])
700 return "\n".join(ret_lines)
701
702
704 """Write sigmaKin for each process"""
705
706
707
708 replace_dict = {}
709
710
711 replace_dict['proc_name'] = \
712 matrix_element.get('processes')[0].shell_string().replace("0_", "")
713
714
715 replace_dict['process_class_name'] = self.process_name
716
717
718 replace_dict['proc_number'] = i
719
720
721 replace_dict['ncomb'] = matrix_element.get_helicity_combinations()
722
723
724 replace_dict['helicity_matrix'] = \
725 self.get_helicity_matrix(matrix_element)
726
727 replace_dict['den_factor'] = matrix_element.get_denominator_factor()
728
729 file = \
730 read_template_file('cpp_process_sigmaKin_subproc_function.inc') %\
731 replace_dict
732
733 return file
734
737 """Write matrix() for each process"""
738
739
740
741 replace_dict = {}
742
743
744 replace_dict['proc_name'] = \
745 matrix_element.get('processes')[0].shell_string().replace("0_", "")
746
747
748
749 if self.single_helicities:
750 replace_dict['matrix_args'] = ""
751 replace_dict['all_wavefunction_calls'] = "int i, j;"
752 else:
753 replace_dict['matrix_args'] = "const int hel[]"
754 wavefunctions = matrix_element.get_all_wavefunctions()
755 replace_dict['all_wavefunction_calls'] = \
756 """const int nwavefuncs = %d;
757 std::complex<double> w[nwavefuncs][18];
758 """ % len(wavefunctions)+ \
759 self.get_calculate_wavefunctions(wavefunctions, [])
760
761
762 replace_dict['process_class_name'] = class_name
763
764
765 replace_dict['proc_number'] = i
766
767
768 replace_dict['ncolor'] = len(color_amplitudes)
769
770 replace_dict['ngraphs'] = matrix_element.get_number_of_amplitudes()
771
772
773 replace_dict['color_matrix_lines'] = \
774 self.get_color_matrix_lines(matrix_element)
775
776
777 replace_dict['jamp_lines'] = self.get_jamp_lines(color_amplitudes)
778
779
780
781 replace_dict = self.get_class_specific_definition_matrix(replace_dict, matrix_element)
782
783 file = read_template_file(self.single_process_template) % \
784 replace_dict
785
786 return file
787
789 """place to add some specific hack to a given exporter.
790 Please always use Super in that case"""
791
792 return converter
793
795 """Get sigmaHat_lines for function definition for Pythia 8 .cc file"""
796
797
798 beams = set([(process.get('legs')[0].get('id'),
799 process.get('legs')[1].get('id')) \
800 for process in self.processes])
801
802 res_lines = []
803
804
805
806 res_lines.append("// Select between the different processes")
807 for ibeam, beam_parts in enumerate(beams):
808
809 if ibeam == 0:
810 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
811 else:
812 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
813
814
815 beam_processes = [(i, me) for (i, me) in \
816 enumerate(self.matrix_elements) if beam_parts in \
817 [(process.get('legs')[0].get('id'),
818 process.get('legs')[1].get('id')) \
819 for process in me.get('processes')]]
820
821
822 beam_processes.extend([(len(self.matrix_elements) + i, me) for (i, me) in \
823 enumerate(self.matrix_elements) if beam_parts in \
824 [(process.get('legs')[0].get('id'),
825 process.get('legs')[1].get('id')) \
826 for process in me.get_mirror_processes()]])
827
828
829 res_lines.append("// Add matrix elements for processes with beams %s" % \
830 repr(beam_parts))
831 res_lines.append("return %s;" % \
832 ("+".join(["matrix_element[%i]*%i" % \
833 (i, len([proc for proc in \
834 me.get('processes') if beam_parts == \
835 (proc.get('legs')[0].get('id'),
836 proc.get('legs')[1].get('id')) or \
837 me.get('has_mirror_process') and \
838 beam_parts == \
839 (proc.get('legs')[1].get('id'),
840 proc.get('legs')[0].get('id'))])) \
841 for (i, me) in beam_processes]).\
842 replace('*1', '')))
843 res_lines.append("}")
844
845
846 res_lines.append("else {")
847 res_lines.append("// Return 0 if not correct initial state assignment")
848 res_lines.append(" return 0.;}")
849
850 return "\n".join(res_lines)
851
852
854 """Return the Helicity matrix definition lines for this matrix element"""
855
856 helicity_line = "static const int helicities[ncomb][nexternal] = {";
857 helicity_line_list = []
858
859 for helicities in matrix_element.get_helicity_matrix(allow_reverse=False):
860 helicity_line_list.append("{"+",".join(['%d'] * len(helicities)) % \
861 tuple(helicities) + "}")
862
863 return helicity_line + ",".join(helicity_line_list) + "};"
864
866 """Return the denominator factor line for this matrix element"""
867
868 return "const int denominator = %d;" % \
869 matrix_element.get_denominator_factor()
870
872 """Return the color matrix definition lines for this matrix element. Split
873 rows in chunks of size n."""
874
875 if not matrix_element.get('color_matrix'):
876 return "\n".join(["static const double denom[1] = {1.};",
877 "static const double cf[1][1] = {1.};"])
878 else:
879 color_denominators = matrix_element.get('color_matrix').\
880 get_line_denominators()
881 denom_string = "static const double denom[ncolor] = {%s};" % \
882 ",".join(["%i" % denom for denom in color_denominators])
883
884 matrix_strings = []
885 my_cs = color.ColorString()
886 for index, denominator in enumerate(color_denominators):
887
888 num_list = matrix_element.get('color_matrix').\
889 get_line_numerators(index, denominator)
890
891 matrix_strings.append("{%s}" % \
892 ",".join(["%d" % i for i in num_list]))
893 matrix_string = "static const double cf[ncolor][ncolor] = {" + \
894 ",".join(matrix_strings) + "};"
895 return "\n".join([denom_string, matrix_string])
896
897
898
899
900
901
903 """Return the jamp = sum(fermionfactor * amp[i]) lines"""
904
905 res_list = []
906
907 for i, coeff_list in enumerate(color_amplitudes):
908
909 res = "jamp[%i]=" % i
910
911
912
913 list_fracs = [abs(coefficient[0][1]) for coefficient in coeff_list]
914 common_factor = False
915 diff_fracs = list(set(list_fracs))
916 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1:
917 common_factor = True
918 global_factor = diff_fracs[0]
919 res = res + '%s(' % coeff(1, global_factor, False, 0)
920
921 for (coefficient, amp_number) in coeff_list:
922 if common_factor:
923 res = res + "%samp[%d]" % (coeff(coefficient[0],
924 coefficient[1] / abs(coefficient[1]),
925 coefficient[2],
926 coefficient[3]),
927 amp_number - 1)
928 else:
929 res = res + "%samp[%d]" % (coeff(coefficient[0],
930 coefficient[1],
931 coefficient[2],
932 coefficient[3]),
933 amp_number - 1)
934
935 if common_factor:
936 res = res + ')'
937
938 res += ';'
939
940 res_list.append(res)
941
942 return "\n".join(res_list)
943
944 @staticmethod
951
952
953
954
955 -def generate_process_files_pythia8(multi_matrix_element, cpp_helas_call_writer,
956 process_string = "",
957 process_number = 0, path = os.getcwd()):
958
959 """Generate the .h and .cc files needed for Pythia 8, for the
960 processes described by multi_matrix_element"""
961
962 process_exporter_pythia8 = ProcessExporterPythia8(multi_matrix_element,
963 cpp_helas_call_writer,
964 process_string,
965 process_number,
966 path)
967
968
969 model = process_exporter_pythia8.model
970 model_name = process_exporter_pythia8.model_name
971 process_exporter_pythia8.process_dir = \
972 'Processes_%(model)s' % {'model': \
973 model_name}
974 process_exporter_pythia8.include_dir = process_exporter_pythia8.process_dir
975 process_exporter_pythia8.generate_process_files()
976 return process_exporter_pythia8
977
980 """Class to take care of exporting a set of matrix elements to
981 Matchbox format."""
982
983
984 process_class_template = 'matchbox_class.inc'
985 single_process_template = 'matchbox_matrix.inc'
986 process_definition_template = 'matchbox_function_definitions.inc'
987
989 """Get initProc_lines for function definition for Pythia 8 .cc file"""
990
991 initProc_lines = []
992
993 initProc_lines.append("// Set external particle masses for this matrix element")
994
995 for part in matrix_element.get_external_wavefunctions():
996 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass'))
997 return "\n".join(initProc_lines)
998
999
1010
1012 """Get sigmaKin_process for all subprocesses for MAtchbox .cc file"""
1013
1014 ret_lines = []
1015 if self.single_helicities:
1016 ret_lines.append(\
1017 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \
1018 class_name)
1019 ret_lines.append("// Calculate wavefunctions for all processes")
1020 ret_lines.append(self.get_calculate_wavefunctions(\
1021 self.wavefunctions, self.amplitudes))
1022 ret_lines.append(self.get_jamp_lines(color_amplitudes[0]))
1023 ret_lines.append("}")
1024 else:
1025 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \
1026 for i, me in enumerate(self.matrix_elements)])
1027 ret_lines.extend([self.get_matrix_single_process(i, me,
1028 color_amplitudes[i],
1029 class_name) \
1030 for i, me in enumerate(self.matrix_elements)])
1031 return "\n".join(ret_lines)
1032
1033
1035 """Return the color matrix definition lines for this matrix element. Split
1036 rows in chunks of size n."""
1037
1038 if not matrix_element.get('color_matrix'):
1039 return "\n".join(["static const double res[1][1] = {-1.};"])
1040
1041
1042 color_denominators = matrix_element.get('color_matrix').\
1043 get_line_denominators()
1044 matrix_strings = []
1045 my_cs = color.ColorString()
1046
1047 for i_color in xrange(len(color_denominators)):
1048
1049 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color])
1050 t_str=repr(my_cs)
1051 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)")
1052
1053
1054 all_matches = t_match.findall(t_str)
1055 tmp_color = []
1056 for match in all_matches:
1057 ctype, arg = match[0], [m.strip() for m in match[1].split(',')]
1058 if ctype not in ['T', 'Tr']:
1059 raise self.ProcessExporterCPPError, 'Color Structure not handle by Matchbox'
1060 tmp_color.append(arg)
1061
1062 nb_index = sum(len(o) for o in tmp_color)
1063 max_len = nb_index + (nb_index//2) -1
1064
1065 curr_color = tmp_color[0]
1066 for tcolor in tmp_color[1:]:
1067 curr_color += ['0'] + tcolor
1068 curr_color += ['0'] * (max_len- len(curr_color))
1069
1070 matrix_strings.append('{%s}' % ','.join(curr_color))
1071
1072 matrix_string = 'static const double res[%s][%s] = {%s};' % \
1073 (len(color_denominators), max_len, ",".join(matrix_strings))
1074
1075 return matrix_string
1076
1082 """Class to take care of exporting a set of matrix elements to
1083 Pythia 8 format."""
1084
1085
1086 process_template_h = 'pythia8_process_h.inc'
1087 process_template_cc = 'pythia8_process_cc.inc'
1088 process_class_template = 'pythia8_process_class.inc'
1089 process_definition_template = 'pythia8_process_function_definitions.inc'
1090 process_wavefunction_template = 'pythia8_process_wavefunctions.inc'
1091 process_sigmaKin_function_template = 'pythia8_process_sigmaKin_function.inc'
1092
1094 """Set process class name"""
1095
1096 super(ProcessExporterPythia8, self).__init__(*args, **opts)
1097
1098
1099 for me in self.matrix_elements:
1100 if me.get_nexternal_ninitial() not in [(3,2),(4,2),(5,2)]:
1101 nex,nin = me.get_nexternal_ninitial()
1102 raise InvalidCmd,\
1103 "Pythia 8 can only handle 2->1,2,3 processes, not %d->%d" % \
1104 (nin,nex-nin)
1105
1106 self.process_class = self.process_name
1107
1108
1109
1110
1111
1112
1114 """The complete Pythia 8 class definition for the process"""
1115
1116 replace_dict = {}
1117
1118
1119 replace_dict['model_name'] = self.model_name
1120
1121
1122 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
1123 self.matrix_elements])
1124
1125 replace_dict['process_lines'] = process_lines
1126
1127
1128 replace_dict['nfinal'] = self.nfinal
1129
1130
1131 replace_dict['process_class_name'] = self.process_name
1132
1133
1134 process_definition = "%s (%s)" % (self.process_string,
1135 self.model_name)
1136 replace_dict['process_definition'] = process_definition
1137
1138 process = self.processes[0]
1139 replace_dict['process_code'] = 10000 + \
1140 100*process.get('id') + \
1141 self.process_number
1142
1143 replace_dict['inFlux'] = self.get_process_influx()
1144
1145 replace_dict['id_masses'] = self.get_id_masses(process)
1146 replace_dict['resonances'] = self.get_resonance_lines()
1147
1148 replace_dict['nexternal'] = self.nexternal
1149 replace_dict['nprocesses'] = self.nprocesses
1150
1151 if self.single_helicities:
1152 replace_dict['all_sigma_kin_definitions'] = \
1153 """// Calculate wavefunctions
1154 void calculate_wavefunctions(const int perm[], const int hel[]);
1155 static const int nwavefuncs = %d;
1156 std::complex<double> w[nwavefuncs][18];
1157 static const int namplitudes = %d;
1158 std::complex<double> amp[namplitudes];""" % \
1159 (len(self.wavefunctions),
1160 len(self.amplitudes.get_all_amplitudes()))
1161 replace_dict['all_matrix_definitions'] = \
1162 "\n".join(["double matrix_%s();" % \
1163 me.get('processes')[0].shell_string().\
1164 replace("0_", "") \
1165 for me in self.matrix_elements])
1166
1167 else:
1168 replace_dict['all_sigma_kin_definitions'] = \
1169 "\n".join(["void sigmaKin_%s();" % \
1170 me.get('processes')[0].shell_string().\
1171 replace("0_", "") \
1172 for me in self.matrix_elements])
1173 replace_dict['all_matrix_definitions'] = \
1174 "\n".join(["double matrix_%s(const int hel[]);" % \
1175 me.get('processes')[0].shell_string().\
1176 replace("0_", "") \
1177 for me in self.matrix_elements])
1178
1179
1180 file = read_template_file('pythia8_process_class.inc') % replace_dict
1181
1182 return file
1183
1185 """The complete Pythia 8 class definition for the process"""
1186
1187 replace_dict = {}
1188
1189
1190 replace_dict['model_name'] = self.model_name
1191
1192
1193 replace_dict['process_lines'] = \
1194 "\n".join([self.get_process_info_lines(me) for \
1195 me in self.matrix_elements])
1196
1197
1198 replace_dict['process_class_name'] = self.process_name
1199
1200 color_amplitudes = [me.get_color_amplitudes() for me in \
1201 self.matrix_elements]
1202
1203 replace_dict['initProc_lines'] = \
1204 self.get_initProc_lines(color_amplitudes)
1205 replace_dict['reset_jamp_lines'] = \
1206 self.get_reset_jamp_lines(color_amplitudes)
1207 replace_dict['sigmaKin_lines'] = \
1208 self.get_sigmaKin_lines(color_amplitudes)
1209 replace_dict['sigmaHat_lines'] = \
1210 self.get_sigmaHat_lines()
1211
1212 replace_dict['setIdColAcol_lines'] = \
1213 self.get_setIdColAcol_lines(color_amplitudes)
1214
1215 replace_dict['weightDecay_lines'] = \
1216 self.get_weightDecay_lines()
1217
1218 replace_dict['all_sigmaKin'] = \
1219 self.get_all_sigmaKin_lines(color_amplitudes,
1220 self.process_name)
1221
1222 file = read_template_file('pythia8_process_function_definitions.inc') %\
1223 replace_dict
1224
1225 return file
1226
1228 """Return process file name for the process in matrix_element"""
1229
1230
1231
1232 beams = set([tuple(sorted([process.get('legs')[0].get('id'),
1233 process.get('legs')[1].get('id')])) \
1234 for process in self.processes])
1235
1236
1237 antiquarks = range(-1, -6, -1)
1238 quarks = range(1,6)
1239 antileptons = range(-11, -17, -1)
1240 leptons = range(11, 17, 1)
1241 allquarks = antiquarks + quarks
1242 antifermions = antiquarks + antileptons
1243 fermions = quarks + leptons
1244 allfermions = allquarks + antileptons + leptons
1245 downfermions = range(-2, -5, -2) + range(-1, -5, -2) + \
1246 range(-12, -17, -2) + range(-11, -17, -2)
1247 upfermions = range(1, 5, 2) + range(2, 5, 2) + \
1248 range(11, 17, 2) + range(12, 17, 2)
1249
1250
1251
1252 set_tuples = [(set([(21, 21)]), "gg"),
1253 (set(list(itertools.product(allquarks, [21]))), "qg"),
1254 (set(zip(antiquarks, quarks)), "qqbarSame"),
1255 (set(list(itertools.product(allquarks,
1256 allquarks))), "qq"),
1257 (set(zip(antifermions, fermions)),"ffbarSame"),
1258 (set(zip(downfermions, upfermions)),"ffbarChg"),
1259 (set(list(itertools.product(allfermions,
1260 allfermions))), "ff"),
1261 (set(list(itertools.product(allfermions, [22]))), "fgm"),
1262 (set([(21, 22)]), "ggm"),
1263 (set([(22, 22)]), "gmgm")]
1264
1265 for set_tuple in set_tuples:
1266 if beams.issubset(set_tuple[0]):
1267 return set_tuple[1]
1268
1269 raise InvalidCmd('Pythia 8 cannot handle incoming flavors %s' %\
1270 repr(beams))
1271
1272 return
1273
1275 """Return the lines which define the ids for the final state particles,
1276 for the Pythia phase space"""
1277
1278 if self.nfinal == 1:
1279 return ""
1280
1281 mass_strings = []
1282 for i in range(2, len(process.get_legs_with_decays())):
1283 if self.model.get_particle(process.get_legs_with_decays()[i].get('id')).\
1284 get('mass') not in ['zero', 'ZERO']:
1285 mass_strings.append("int id%dMass() const {return %d;}" % \
1286 (i + 1, abs(process.get_legs_with_decays()[i].get('id'))))
1287
1288 return "\n".join(mass_strings)
1289
1291 """Return the lines which define the ids for intermediate resonances
1292 for the Pythia phase space"""
1293
1294 if self.nfinal == 1:
1295 return "virtual int resonanceA() const {return %d;}" % \
1296 abs(self.processes[0].get('legs')[2].get('id'))
1297
1298 res_strings = []
1299 res_letters = ['A', 'B']
1300
1301 sids, singleres, schannel = self.get_resonances()
1302
1303 for i, sid in enumerate(sids[:2]):
1304 res_strings.append("virtual int resonance%s() const {return %d;}"\
1305 % (res_letters[i], sid))
1306
1307 if schannel:
1308 res_strings.append("virtual bool isSChannel() const {return true;}")
1309
1310 if singleres != 0:
1311 res_strings.append("virtual int idSChannel() const {return %d;}" \
1312 % singleres)
1313
1314 return "\n".join(res_strings)
1315
1317 """Return the PIDs for any resonances in 2->2 and 2->3 processes."""
1318
1319 model = self.matrix_elements[0].get('processes')[0].get('model')
1320 new_pdg = model.get_first_non_pdg()
1321
1322 diagrams = sum([me.get('diagrams') for me in self.matrix_elements], [])
1323 resonances = []
1324 no_t_channels = True
1325 final_s_channels = []
1326 for diagram in diagrams:
1327 schannels, tchannels = diagram.get('amplitudes')[0].\
1328 get_s_and_t_channels(self.ninitial, model,
1329 new_pdg)
1330 for schannel in schannels:
1331 sid = schannel.get('legs')[-1].get('id')
1332 part = self.model.get_particle(sid)
1333 if part:
1334 width = self.model.get_particle(sid).get('width')
1335 if width.lower() != 'zero':
1336
1337 resonances.append(abs(sid))
1338 else:
1339 sid = 0
1340 if len(tchannels) == 1 and schannel == schannels[-1]:
1341 final_s_channels.append(abs(sid))
1342
1343 if len(tchannels) > 1:
1344
1345 no_t_channels = False
1346
1347 resonance_set = set(resonances)
1348 final_s_set = set(final_s_channels)
1349
1350 singleres = 0
1351
1352 if len(final_s_channels) == len(diagrams) and len(final_s_set) == 1 \
1353 and final_s_channels[0] != 0:
1354 singleres = final_s_channels[0]
1355
1356 resonance_set = list(set([pid for pid in resonance_set]))
1357
1358
1359
1360 schannel = no_t_channels and \
1361 not any(['QCD' in d.calculate_orders() for d in diagrams])
1362
1363 return resonance_set, singleres, schannel
1364
1366 """Get initProc_lines for function definition for Pythia 8 .cc file"""
1367
1368 initProc_lines = []
1369
1370 initProc_lines.append("// Set massive/massless matrix elements for c/b/mu/tau")
1371
1372 if not self.model.get_particle(4) or \
1373 self.model.get_particle(4).get('mass').lower() == 'zero':
1374 cMassiveME = "0."
1375 else:
1376 cMassiveME = "particleDataPtr->m0(4)"
1377 initProc_lines.append("mcME = %s;" % cMassiveME)
1378 if not self.model.get_particle(5) or \
1379 self.model.get_particle(5).get('mass').lower() == 'zero':
1380 bMassiveME = "0."
1381 else:
1382 bMassiveME = "particleDataPtr->m0(5)"
1383 initProc_lines.append("mbME = %s;" % bMassiveME)
1384 if not self.model.get_particle(13) or \
1385 self.model.get_particle(13).get('mass').lower() == 'zero':
1386 muMassiveME = "0."
1387 else:
1388 muMassiveME = "particleDataPtr->m0(13)"
1389 initProc_lines.append("mmuME = %s;" % muMassiveME)
1390 if not self.model.get_particle(15) or \
1391 self.model.get_particle(15).get('mass').lower() == 'zero':
1392 tauMassiveME = "0."
1393 else:
1394 tauMassiveME = "particleDataPtr->m0(15)"
1395 initProc_lines.append("mtauME = %s;" % tauMassiveME)
1396
1397 for i, me in enumerate(self.matrix_elements):
1398 initProc_lines.append("jamp2[%d] = new double[%d];" % \
1399 (i, len(color_amplitudes[i])))
1400
1401 return "\n".join(initProc_lines)
1402
1404 """Generate lines to set final-state id and color info for process"""
1405
1406 res_lines = []
1407
1408
1409 beams = set([(process.get('legs')[0].get('id'),
1410 process.get('legs')[1].get('id')) \
1411 for process in self.processes])
1412
1413
1414 for ibeam, beam_parts in enumerate(beams):
1415 if ibeam == 0:
1416 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
1417 else:
1418 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
1419
1420 beam_processes = [(i, me) for (i, me) in \
1421 enumerate(self.matrix_elements) if beam_parts in \
1422 [(process.get('legs')[0].get('id'),
1423 process.get('legs')[1].get('id')) \
1424 for process in me.get('processes')]]
1425
1426 beam_mirror_processes = []
1427 if beam_parts[0] != beam_parts[1]:
1428 beam_mirror_processes = [(i, me) for (i, me) in \
1429 enumerate(self.matrix_elements) if beam_parts in \
1430 [(process.get('legs')[1].get('id'),
1431 process.get('legs')[0].get('id')) \
1432 for process in me.get('processes')]]
1433
1434 final_id_list = []
1435 final_mirror_id_list = []
1436 for (i, me) in beam_processes:
1437 final_id_list.extend([tuple([l.get('id') for l in \
1438 proc.get_legs_with_decays() if l.get('state')]) \
1439 for proc in me.get('processes') \
1440 if beam_parts == \
1441 (proc.get('legs')[0].get('id'),
1442 proc.get('legs')[1].get('id'))])
1443 for (i, me) in beam_mirror_processes:
1444 final_mirror_id_list.extend([tuple([l.get('id') for l in \
1445 proc.get_legs_with_decays() if l.get('state')]) \
1446 for proc in me.get_mirror_processes() \
1447 if beam_parts == \
1448 (proc.get('legs')[0].get('id'),
1449 proc.get('legs')[1].get('id'))])
1450 final_id_list = set(final_id_list)
1451 final_mirror_id_list = set(final_mirror_id_list)
1452
1453 if final_id_list and final_mirror_id_list or \
1454 not final_id_list and not final_mirror_id_list:
1455 raise self.ProcessExporterCPPError,\
1456 "Missing processes, or both process and mirror process"
1457
1458
1459 ncombs = len(final_id_list)+len(final_mirror_id_list)
1460
1461 res_lines.append("// Pick one of the flavor combinations %s" % \
1462 ", ".join([repr(ids) for ids in final_id_list]))
1463
1464 me_weight = []
1465 for final_ids in final_id_list:
1466 items = [(i, len([ p for p in me.get('processes') \
1467 if [l.get('id') for l in \
1468 p.get_legs_with_decays()] == \
1469 list(beam_parts) + list(final_ids)])) \
1470 for (i, me) in beam_processes]
1471 me_weight.append("+".join(["matrix_element[%i]*%i" % (i, l) for\
1472 (i, l) in items if l > 0]).\
1473 replace('*1', ''))
1474 if any([l>1 for (i, l) in items]):
1475 raise self.ProcessExporterCPPError,\
1476 "More than one process with identical " + \
1477 "external particles is not supported"
1478
1479 for final_ids in final_mirror_id_list:
1480 items = [(i, len([ p for p in me.get_mirror_processes() \
1481 if [l.get('id') for l in p.get_legs_with_decays()] == \
1482 list(beam_parts) + list(final_ids)])) \
1483 for (i, me) in beam_mirror_processes]
1484 me_weight.append("+".join(["matrix_element[%i]*%i" % \
1485 (i+len(self.matrix_elements), l) for\
1486 (i, l) in items if l > 0]).\
1487 replace('*1', ''))
1488 if any([l>1 for (i, l) in items]):
1489 raise self.ProcessExporterCPPError,\
1490 "More than one process with identical " + \
1491 "external particles is not supported"
1492
1493 if final_id_list:
1494 res_lines.append("int flavors[%d][%d] = {%s};" % \
1495 (ncombs, self.nfinal,
1496 ",".join(["{" + ",".join([str(id) for id \
1497 in ids]) + "}" for ids \
1498 in final_id_list])))
1499 elif final_mirror_id_list:
1500 res_lines.append("int flavors[%d][%d] = {%s};" % \
1501 (ncombs, self.nfinal,
1502 ",".join(["{" + ",".join([str(id) for id \
1503 in ids]) + "}" for ids \
1504 in final_mirror_id_list])))
1505 res_lines.append("vector<double> probs;")
1506 res_lines.append("double sum = %s;" % "+".join(me_weight))
1507 for me in me_weight:
1508 res_lines.append("probs.push_back(%s/sum);" % me)
1509 res_lines.append("int choice = rndmPtr->pick(probs);")
1510 for i in range(self.nfinal):
1511 res_lines.append("id%d = flavors[choice][%d];" % (i+3, i))
1512
1513 res_lines.append("}")
1514
1515 res_lines.append("setId(%s);" % ",".join(["id%d" % i for i in \
1516 range(1, self.nexternal + 1)]))
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527 res_lines.append("// Pick color flow")
1528
1529 res_lines.append("int ncolor[%d] = {%s};" % \
1530 (len(color_amplitudes),
1531 ",".join([str(len(colamp)) for colamp in \
1532 color_amplitudes])))
1533
1534
1535 for ime, me in enumerate(self.matrix_elements):
1536
1537 res_lines.append("if((%s)){" % \
1538 ")||(".join(["&&".join(["id%d == %d" % \
1539 (i+1, l.get('id')) for (i, l) in \
1540 enumerate(p.get_legs_with_decays())])\
1541 for p in me.get('processes')]))
1542 if ime > 0:
1543 res_lines[-1] = "else " + res_lines[-1]
1544
1545 proc = me.get('processes')[0]
1546 if not me.get('color_basis'):
1547
1548 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1549 else:
1550
1551 repr_dict = {}
1552 legs = proc.get_legs_with_decays()
1553 for l in legs:
1554 repr_dict[l.get('number')] = \
1555 proc.get('model').get_particle(l.get('id')).get_color()
1556
1557 color_flow_list = \
1558 me.get('color_basis').color_flow_decomposition(\
1559 repr_dict, self.ninitial)
1560
1561 ncolor = len(me.get('color_basis'))
1562 res_lines.append("""vector<double> probs;
1563 double sum = %s;
1564 for(int i=0;i<ncolor[%i];i++)
1565 probs.push_back(jamp2[%i][i]/sum);
1566 int ic = rndmPtr->pick(probs);""" % \
1567 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1568 in range(ncolor)]), ime, ime))
1569
1570 color_flows = []
1571 for color_flow_dict in color_flow_list:
1572 color_flows.append([int(fmod(color_flow_dict[l.get('number')][i], 500)) \
1573 for (l,i) in itertools.product(legs, [0,1])])
1574
1575
1576 res_lines.append("static int colors[%d][%d] = {%s};" % \
1577 (ncolor, 2 * self.nexternal,
1578 ",".join(["{" + ",".join([str(id) for id \
1579 in flows]) + "}" for flows \
1580 in color_flows])))
1581
1582 res_lines.append("setColAcol(%s);" % \
1583 ",".join(["colors[ic][%d]" % i for i in \
1584 range(2 * self.nexternal)]))
1585 res_lines.append('}')
1586
1587
1588 for ime, me in enumerate(self.matrix_elements):
1589 if not me.get('has_mirror_process'):
1590 continue
1591 res_lines.append("else if((%s)){" % \
1592 ")||(".join(["&&".join(["id%d == %d" % \
1593 (i+1, l.get('id')) for (i, l) in \
1594 enumerate(p.get_legs_with_decays())])\
1595 for p in me.get_mirror_processes()]))
1596
1597 proc = me.get('processes')[0]
1598 if not me.get('color_basis'):
1599
1600 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1601 else:
1602
1603 repr_dict = {}
1604 legs = proc.get_legs_with_decays()
1605 legs[0:2] = [legs[1],legs[0]]
1606 for l in legs:
1607 repr_dict[l.get('number')] = \
1608 proc.get('model').get_particle(l.get('id')).get_color()
1609
1610 color_flow_list = \
1611 me.get('color_basis').color_flow_decomposition(\
1612 repr_dict, self.ninitial)
1613
1614 ncolor = len(me.get('color_basis'))
1615 res_lines.append("""vector<double> probs;
1616 double sum = %s;
1617 for(int i=0;i<ncolor[%i];i++)
1618 probs.push_back(jamp2[%i][i]/sum);
1619 int ic = rndmPtr->pick(probs);""" % \
1620 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1621 in range(ncolor)]), ime, ime))
1622
1623 color_flows = []
1624 for color_flow_dict in color_flow_list:
1625 color_flows.append([color_flow_dict[l.get('number')][i] % 500 \
1626 for (l,i) in itertools.product(legs, [0,1])])
1627
1628
1629 res_lines.append("static int colors[%d][%d] = {%s};" % \
1630 (ncolor, 2 * self.nexternal,
1631 ",".join(["{" + ",".join([str(id) for id \
1632 in flows]) + "}" for flows \
1633 in color_flows])))
1634
1635 res_lines.append("setColAcol(%s);" % \
1636 ",".join(["colors[ic][%d]" % i for i in \
1637 range(2 * self.nexternal)]))
1638 res_lines.append('}')
1639
1640 return "\n".join(res_lines)
1641
1642
1644 """Get weightDecay_lines for function definition for Pythia 8 .cc file"""
1645
1646 weightDecay_lines = "// Just use isotropic decay (default)\n"
1647 weightDecay_lines += "return 1.;"
1648
1649 return weightDecay_lines
1650
1656 """Open a template file and return the contents."""
1657
1658 return open(os.path.join(_file_path, \
1659 'iolibs', 'template_files',
1660 filename)).read()
1661
1663 """Return info lines for MG5, suitable to place at beginning of
1664 Fortran files"""
1665
1666 info = misc.get_pkg_info()
1667 info_lines = ""
1668 if info and info.has_key('version') and info.has_key('date'):
1669 info_lines = "# MadGraph5_aMC@NLO v. %s, %s\n" % \
1670 (info['version'], info['date'])
1671 info_lines = info_lines + \
1672 "# By the MadGraph5_aMC@NLO Development Team\n" + \
1673 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch"
1674 else:
1675 info_lines = "# MadGraph5_aMC@NLO\n" + \
1676 "# By the MadGraph5_aMC@NLO Development Team\n" + \
1677 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch"
1678
1679 return info_lines
1680
1681 -def coeff(ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1682 """Returns a nicely formatted string for the coefficients in JAMP lines"""
1683
1684 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power
1685
1686 if total_coeff == 1:
1687 if is_imaginary:
1688 return '+std::complex<double>(0,1)*'
1689 else:
1690 return '+'
1691 elif total_coeff == -1:
1692 if is_imaginary:
1693 return '-std::complex<double>(0,1)*'
1694 else:
1695 return '-'
1696
1697 res_str = '%+i.' % total_coeff.numerator
1698
1699 if total_coeff.denominator != 1:
1700
1701 res_str = res_str + '/%i.' % total_coeff.denominator
1702
1703 if is_imaginary:
1704 res_str = res_str + '*std::complex<double>(0,1)'
1705
1706 return res_str + '*'
1707
1708
1709
1710
1711
1712 -def convert_model_to_cpp(model, output_dir, wanted_lorentz = [],
1713 wanted_couplings = []):
1714 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
1715
1716
1717 model_builder = UFOModelConverterCPP(model,
1718 os.path.join(output_dir, 'src'),
1719 wanted_lorentz,
1720 wanted_couplings)
1721 model_builder.write_files()
1722
1728 """ A converter of the UFO-MG5 Model to the C++ format """
1729
1730
1731 output_name = 'C++ Standalone'
1732 namespace = 'MG5'
1733
1734
1735 type_dict = {"real": "double",
1736 "complex": "std::complex<double>"}
1737
1738
1739 compiler_option_re = re.compile('^#\w')
1740 namespace_re = re.compile('^using namespace')
1741
1742 slha_to_depend = {('SMINPUTS', (3,)): ('aS',),
1743 ('SMINPUTS', (1,)): ('aEM',)}
1744
1745
1746 include_dir = '.'
1747 cc_file_dir = '.'
1748 param_template_h = 'cpp_model_parameters_h.inc'
1749 param_template_cc = 'cpp_model_parameters_cc.inc'
1750 aloha_template_h = 'cpp_hel_amps_h.inc'
1751 aloha_template_cc = 'cpp_hel_amps_cc.inc'
1752
1753 copy_include_files = []
1754 copy_cc_files = []
1755
1756 - def __init__(self, model, output_path, wanted_lorentz = [],
1757 wanted_couplings = []):
1758 """ initialization of the objects """
1759
1760 self.model = model
1761 self.model_name = ProcessExporterCPP.get_model_name(model['name'])
1762
1763 self.dir_path = output_path
1764
1765
1766 self.wanted_lorentz = wanted_lorentz
1767
1768
1769
1770
1771 self.coups_dep = {}
1772 self.coups_indep = []
1773 self.params_dep = []
1774 self.params_indep = []
1775 self.p_to_cpp = parsers.UFOExpressionParserCPP()
1776
1777
1778 self.prepare_parameters()
1779 self.prepare_couplings(wanted_couplings)
1780
1789
1790
1791
1793 """Extract the parameters from the model, and store them in
1794 the two lists params_indep and params_dep"""
1795
1796
1797 keys = self.model['parameters'].keys()
1798 keys.sort(key=len)
1799 params_ext = []
1800 for key in keys:
1801 if key == ('external',):
1802 params_ext += [p for p in self.model['parameters'][key] if p.name]
1803 elif 'aS' in key:
1804 for p in self.model['parameters'][key]:
1805 self.params_dep.append(base_objects.ModelVariable(p.name,
1806 p.name + " = " + \
1807 self.p_to_cpp.parse(p.expr) + ";",
1808 p.type,
1809 p.depend))
1810 else:
1811 for p in self.model['parameters'][key]:
1812 if p.name == 'ZERO':
1813 continue
1814 self.params_indep.append(base_objects.ModelVariable(p.name,
1815 p.name + " = " + \
1816 self.p_to_cpp.parse(p.expr) + ";",
1817 p.type,
1818 p.depend))
1819
1820
1821 while params_ext:
1822 param = params_ext.pop(0)
1823
1824 expression = ""
1825 assert param.value.imag == 0
1826 if len(param.lhacode) == 1:
1827 expression = "%s = slha.get_block_entry(\"%s\", %d, %e);" % \
1828 (param.name, param.lhablock.lower(),
1829 param.lhacode[0], param.value.real)
1830 elif len(param.lhacode) == 2:
1831 expression = "indices[0] = %d;\nindices[1] = %d;\n" % \
1832 (param.lhacode[0], param.lhacode[1])
1833 expression += "%s = slha.get_block_entry(\"%s\", indices, %e);" \
1834 % (param.name, param.lhablock.lower(), param.value.real)
1835 else:
1836 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
1837 self.params_indep.insert(0,
1838 base_objects.ModelVariable(param.name,
1839 expression,
1840 'real'))
1841
1843 """Extract the couplings from the model, and store them in
1844 the two lists coups_indep and coups_dep"""
1845
1846
1847 keys = self.model['couplings'].keys()
1848 keys.sort(key=len)
1849 for key, coup_list in self.model['couplings'].items():
1850 if "aS" in key:
1851 for c in coup_list:
1852 if not wanted_couplings or c.name in wanted_couplings:
1853 self.coups_dep[c.name] = base_objects.ModelVariable(\
1854 c.name,
1855 c.expr,
1856 c.type,
1857 c.depend)
1858 else:
1859 for c in coup_list:
1860 if not wanted_couplings or c.name in wanted_couplings:
1861 self.coups_indep.append(base_objects.ModelVariable(\
1862 c.name,
1863 c.expr,
1864 c.type,
1865 c.depend))
1866
1867
1868 for coup in self.coups_dep.values() + self.coups_indep:
1869 coup.expr = coup.name + " = " + self.p_to_cpp.parse(coup.expr) + ";"
1870
1871
1872
1874 """Generate the parameters_model.h and parameters_model.cc
1875 files, which have the parameters and couplings for the model."""
1876
1877 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
1878 os.makedirs(os.path.join(self.dir_path, self.include_dir))
1879 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
1880 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
1881
1882 parameter_h_file = os.path.join(self.dir_path, self.include_dir,
1883 'Parameters_%s.h' % self.model_name)
1884 parameter_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
1885 'Parameters_%s.cc' % self.model_name)
1886
1887 file_h, file_cc = self.generate_parameters_class_files()
1888
1889
1890 writers.CPPWriter(parameter_h_file).writelines(file_h)
1891 writers.CPPWriter(parameter_cc_file).writelines(file_cc)
1892
1893
1894 for copy_file in self.copy_include_files:
1895 shutil.copy(os.path.join(_file_path, 'iolibs',
1896 'template_files',copy_file),
1897 os.path.join(self.dir_path, self.include_dir))
1898
1899 for copy_file in self.copy_cc_files:
1900 shutil.copy(os.path.join(_file_path, 'iolibs',
1901 'template_files',copy_file),
1902 os.path.join(self.dir_path, self.cc_file_dir))
1903
1904 logger.info("Created files %s and %s in directory" \
1905 % (os.path.split(parameter_h_file)[-1],
1906 os.path.split(parameter_cc_file)[-1]))
1907 logger.info("%s and %s" % \
1908 (os.path.split(parameter_h_file)[0],
1909 os.path.split(parameter_cc_file)[0]))
1910
1912 """Create the content of the Parameters_model.h and .cc files"""
1913
1914 replace_dict = {}
1915
1916 replace_dict['info_lines'] = get_mg5_info_lines()
1917 replace_dict['model_name'] = self.model_name
1918
1919 replace_dict['independent_parameters'] = \
1920 "// Model parameters independent of aS\n" + \
1921 self.write_parameters(self.params_indep)
1922 replace_dict['independent_couplings'] = \
1923 "// Model parameters dependent on aS\n" + \
1924 self.write_parameters(self.params_dep)
1925 replace_dict['dependent_parameters'] = \
1926 "// Model couplings independent of aS\n" + \
1927 self.write_parameters(self.coups_indep)
1928 replace_dict['dependent_couplings'] = \
1929 "// Model couplings dependent on aS\n" + \
1930 self.write_parameters(self.coups_dep.values())
1931
1932 replace_dict['set_independent_parameters'] = \
1933 self.write_set_parameters(self.params_indep)
1934 replace_dict['set_independent_couplings'] = \
1935 self.write_set_parameters(self.coups_indep)
1936 replace_dict['set_dependent_parameters'] = \
1937 self.write_set_parameters(self.params_dep)
1938 replace_dict['set_dependent_couplings'] = \
1939 self.write_set_parameters(self.coups_dep.values())
1940
1941 replace_dict['print_independent_parameters'] = \
1942 self.write_print_parameters(self.params_indep)
1943 replace_dict['print_independent_couplings'] = \
1944 self.write_print_parameters(self.coups_indep)
1945 replace_dict['print_dependent_parameters'] = \
1946 self.write_print_parameters(self.params_dep)
1947 replace_dict['print_dependent_couplings'] = \
1948 self.write_print_parameters(self.coups_dep.values())
1949
1950 file_h = read_template_file(self.param_template_h) % \
1951 replace_dict
1952 file_cc = read_template_file(self.param_template_cc) % \
1953 replace_dict
1954
1955 return file_h, file_cc
1956
1958 """Write out the definitions of parameters"""
1959
1960
1961 type_param_dict = {}
1962
1963 for param in params:
1964 type_param_dict[param.type] = \
1965 type_param_dict.setdefault(param.type, []) + [param.name]
1966
1967
1968
1969 res_strings = []
1970 for key in type_param_dict:
1971 res_strings.append("%s %s;" % (self.type_dict[key],
1972 ",".join(type_param_dict[key])))
1973
1974 return "\n".join(res_strings)
1975
1977 """Write out the lines of independent parameters"""
1978
1979
1980
1981 res_strings = []
1982 for param in params:
1983 res_strings.append("%s" % param.expr)
1984
1985
1986
1987 for particle in self.model.get('particles'):
1988 if particle.is_fermion() and particle.get('self_antipart') and \
1989 particle.get('width').lower() != 'zero':
1990 res_strings.append("if (%s < 0)" % particle.get('mass'))
1991 res_strings.append("%(width)s = -abs(%(width)s);" % \
1992 {"width": particle.get('width')})
1993
1994 return "\n".join(res_strings)
1995
1997 """Write out the lines of independent parameters"""
1998
1999
2000
2001 res_strings = []
2002 for param in params:
2003 res_strings.append("cout << setw(20) << \"%s \" << \"= \" << setiosflags(ios::scientific) << setw(10) << %s << endl;" % (param.name, param.name))
2004
2005 return "\n".join(res_strings)
2006
2007
2008
2010 """Generate the hel_amps_model.h and hel_amps_model.cc files, which
2011 have the complete set of generalized Helas routines for the model"""
2012
2013 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
2014 os.makedirs(os.path.join(self.dir_path, self.include_dir))
2015 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
2016 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
2017
2018 model_h_file = os.path.join(self.dir_path, self.include_dir,
2019 'HelAmps_%s.h' % self.model_name)
2020 model_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
2021 'HelAmps_%s.cc' % self.model_name)
2022
2023 replace_dict = {}
2024
2025 replace_dict['output_name'] = self.output_name
2026 replace_dict['info_lines'] = get_mg5_info_lines()
2027 replace_dict['namespace'] = self.namespace
2028 replace_dict['model_name'] = self.model_name
2029
2030
2031
2032 template_h_files = self.read_aloha_template_files(ext = 'h')
2033 template_cc_files = self.read_aloha_template_files(ext = 'cc')
2034
2035 aloha_model = create_aloha.AbstractALOHAModel(self.model.get('name'))
2036 aloha_model.add_Lorentz_object(self.model.get('lorentz'))
2037
2038 if self.wanted_lorentz:
2039 aloha_model.compute_subset(self.wanted_lorentz)
2040 else:
2041 aloha_model.compute_all(save=False, custom_propa=True)
2042
2043 for abstracthelas in dict(aloha_model).values():
2044 h_rout, cc_rout = abstracthelas.write(output_dir=None, language='CPP',
2045 mode='no_include')
2046
2047 template_h_files.append(h_rout)
2048 template_cc_files.append(cc_rout)
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058 replace_dict['function_declarations'] = '\n'.join(template_h_files)
2059 replace_dict['function_definitions'] = '\n'.join(template_cc_files)
2060
2061 file_h = read_template_file(self.aloha_template_h) % replace_dict
2062 file_cc = read_template_file(self.aloha_template_cc) % replace_dict
2063
2064
2065 writers.CPPWriter(model_h_file).writelines(file_h)
2066 writers.CPPWriter(model_cc_file).writelines(file_cc)
2067
2068 logger.info("Created files %s and %s in directory" \
2069 % (os.path.split(model_h_file)[-1],
2070 os.path.split(model_cc_file)[-1]))
2071 logger.info("%s and %s" % \
2072 (os.path.split(model_h_file)[0],
2073 os.path.split(model_cc_file)[0]))
2074
2075
2077 """Read all ALOHA template files with extension ext, strip them of
2078 compiler options and namespace options, and return in a list"""
2079
2080 template_files = []
2081 for filename in glob.glob(os.path.join(MG5DIR, 'aloha',
2082 'template_files', '*.%s' % ext)):
2083 file = open(filename, 'r')
2084 template_file_string = ""
2085 while file:
2086 line = file.readline()
2087 if len(line) == 0: break
2088 line = self.clean_line(line)
2089 if not line:
2090 continue
2091 template_file_string += line.strip() + '\n'
2092 template_files.append(template_file_string)
2093
2094 return template_files
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2119 """Strip a line of compiler options and namespace options."""
2120
2121 if self.compiler_option_re.match(line) or self.namespace_re.match(line):
2122 return ""
2123
2124 return line
2125
2126
2127
2128
2129 -def generate_example_file_pythia8(path,
2130 model_path,
2131 process_names,
2132 exporter,
2133 main_file_name = "",
2134 example_dir = "examples"):
2135 """Generate the main_model_name.cc file and Makefile in the examples dir"""
2136
2137 filepath = os.path.join(path, example_dir)
2138 if not os.path.isdir(filepath):
2139 os.makedirs(filepath)
2140
2141 replace_dict = {}
2142
2143
2144 info_lines = get_mg5_info_lines()
2145 replace_dict['info_lines'] = info_lines
2146
2147
2148 replace_dict['model_name'] = exporter.model_name
2149
2150
2151 replace_dict['include_lines'] = \
2152 "\n".join(["#include \"%s.h\"" % proc_name \
2153 for proc_name in process_names])
2154
2155
2156 replace_dict['sigma_pointer_lines'] = \
2157 "\n".join(["pythia.setSigmaPtr(new %s());" % proc_name \
2158 for proc_name in process_names])
2159
2160
2161 replace_dict['param_card'] = os.path.join(os.path.pardir,model_path,
2162 "param_card_%s.dat" % \
2163 exporter.model_name)
2164
2165
2166 file = read_template_file('pythia8_main_example_cc.inc') % \
2167 replace_dict
2168
2169 if not main_file_name:
2170 num = 1
2171 while os.path.exists(os.path.join(filepath,
2172 'main_%s_%i' % (exporter.model_name, num))):
2173 num += 1
2174 main_file_name = str(num)
2175
2176 main_file = 'main_%s_%s' % (exporter.model_name,
2177 main_file_name)
2178
2179 main_filename = os.path.join(filepath, main_file + '.cc')
2180
2181
2182 writers.CPPWriter(main_filename).writelines(file)
2183
2184 replace_dict = {}
2185
2186
2187 replace_dict['info_lines'] = get_mg5_info_lines()
2188
2189 replace_dict['main_file'] = main_file
2190
2191 replace_dict['process_dir'] = model_path
2192
2193 replace_dict['include_dir'] = exporter.include_dir
2194
2195
2196 file = read_template_file('pythia8_main_makefile.inc') % \
2197 replace_dict
2198
2199 make_filename = os.path.join(filepath, 'Makefile_%s_%s' % \
2200 (exporter.model_name, main_file_name))
2201
2202
2203 open(make_filename, 'w').write(file)
2204
2205 logger.info("Created files %s and %s in directory %s" \
2206 % (os.path.split(main_filename)[-1],
2207 os.path.split(make_filename)[-1],
2208 os.path.split(make_filename)[0]))
2209 return main_file, make_filename
2210
2218 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
2219
2220 if not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia.h')):
2221 logger.warning('Directory %s is not a valid Pythia 8 main dir.' % pythia_dir)
2222
2223
2224 model_builder = UFOModelConverterPythia8(model, pythia_dir)
2225 model_builder.cc_file_dir = "Processes_" + model_builder.model_name
2226 model_builder.include_dir = model_builder.cc_file_dir
2227
2228 model_builder.write_files()
2229
2230 model_builder.write_makefile()
2231
2232 model_builder.write_param_card()
2233 return model_builder.model_name, model_builder.cc_file_dir
2234
2240 """ A converter of the UFO-MG5 Model to the Pythia 8 format """
2241
2242
2243 output_name = 'Pythia 8'
2244 namespace = 'Pythia8'
2245
2246
2247 slha_to_expr = {('SMINPUTS', (1,)): '1./csm->alphaEM(pow(pd->m0(23),2))',
2248 ('SMINPUTS', (2,)): 'M_PI*csm->alphaEM(pow(pd->m0(23),2))*pow(pd->m0(23),2)/(sqrt(2.)*pow(pd->m0(24),2)*(pow(pd->m0(23),2)-pow(pd->m0(24),2)))',
2249 ('SMINPUTS', (3,)): 'alpS',
2250 ('CKMBLOCK', (1,)): 'csm->VCKMgen(1,2)',
2251 }
2252
2253
2254 param_template_h = 'pythia8_model_parameters_h.inc'
2255 param_template_cc = 'pythia8_model_parameters_cc.inc'
2256
2258 """Extract the model parameters from Pythia 8, and store them in
2259 the two lists params_indep and params_dep"""
2260
2261
2262 keys = self.model['parameters'].keys()
2263 keys.sort(key=len)
2264 params_ext = []
2265 for key in keys:
2266 if key == ('external',):
2267 params_ext += [p for p in self.model['parameters'][key] if p.name]
2268 elif 'aS' in key:
2269 for p in self.model['parameters'][key]:
2270 self.params_dep.append(base_objects.ModelVariable(p.name,
2271 p.name + " = " + \
2272 self.p_to_cpp.parse(p.expr) + ';',
2273 p.type,
2274 p.depend))
2275 else:
2276 for p in self.model['parameters'][key]:
2277 self.params_indep.append(base_objects.ModelVariable(p.name,
2278 p.name + " = " + \
2279 self.p_to_cpp.parse(p.expr) + ';',
2280 p.type,
2281 p.depend))
2282
2283
2284
2285
2286 while params_ext:
2287 param = params_ext.pop(0)
2288 key = (param.lhablock, tuple(param.lhacode))
2289 if 'aS' in self.slha_to_depend.setdefault(key, ()):
2290
2291 self.params_dep.insert(0,
2292 base_objects.ModelVariable(param.name,
2293 param.name + ' = ' + \
2294 self.slha_to_expr[key] + ';',
2295 'real'))
2296 else:
2297 try:
2298
2299 self.params_indep.insert(0,
2300 base_objects.ModelVariable(param.name,
2301 param.name + ' = ' + \
2302 self.slha_to_expr[key] + ';',
2303 'real'))
2304 except Exception:
2305
2306
2307 if param.lhablock == 'YUKAWA':
2308 self.slha_to_expr[key] = 'pd->mRun(%i, pd->m0(24))' \
2309 % param.lhacode[0]
2310 if param.lhablock == 'MASS':
2311 self.slha_to_expr[key] = 'pd->m0(%i)' \
2312 % param.lhacode[0]
2313 if param.lhablock == 'DECAY':
2314 self.slha_to_expr[key] = \
2315 'pd->mWidth(%i)' % param.lhacode[0]
2316 if key in self.slha_to_expr:
2317 self.params_indep.insert(0,\
2318 base_objects.ModelVariable(param.name,
2319 param.name + "=" + self.slha_to_expr[key] \
2320 + ';',
2321 'real'))
2322 else:
2323
2324 if len(param.lhacode) == 1:
2325 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %s)){\n" % \
2326 (param.lhablock.lower(),
2327 param.lhacode[0],
2328 param.name) + \
2329 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2330 + "%s = %e;}") % (param.name, param.value.real,
2331 param.name, param.value.real)
2332 elif len(param.lhacode) == 2:
2333 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %s)){\n" % \
2334 (param.lhablock.lower(),
2335 param.lhacode[0],
2336 param.lhacode[1],
2337 param.name) + \
2338 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2339 + "%s = %e;}") % (param.name, param.value.real,
2340 param.name, param.value.real)
2341 elif len(param.lhacode) == 3:
2342 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %d, %s)){\n" % \
2343 (param.lhablock.lower(),
2344 param.lhacode[0],
2345 param.lhacode[1],
2346 param.lhacode[2],
2347 param.name) + \
2348 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2349 + "%s = %e;}") % (param.name, param.value.real,
2350 param.name, param.value.real)
2351 else:
2352 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
2353 self.params_indep.insert(0,
2354 base_objects.ModelVariable(param.name,
2355 expression,
2356 'real'))
2357
2359 """Generate the Makefile, which creates library files."""
2360
2361 makefilename = os.path.join(self.dir_path, self.cc_file_dir,
2362 'Makefile')
2363
2364 replace_dict = {}
2365
2366 replace_dict['info_lines'] = get_mg5_info_lines()
2367 replace_dict['model'] = self.model_name
2368
2369 makefile = read_template_file('pythia8_makefile.inc') % replace_dict
2370
2371
2372 open(makefilename, 'w').write(makefile)
2373
2374 logger.info("Created %s in directory %s" \
2375 % (os.path.split(makefilename)[-1],
2376 os.path.split(makefilename)[0]))
2377
2379 """Generate the param_card for the model."""
2380
2381 paramcardname = os.path.join(self.dir_path, self.cc_file_dir,
2382 'param_card_%s.dat' % self.model_name)
2383
2384 open(paramcardname, 'w').write(\
2385 self.model.write_param_card())
2386
2387 logger.info("Created %s in directory %s" \
2388 % (os.path.split(paramcardname)[-1],
2389 os.path.split(paramcardname)[0]))
2390