Package aloha :: Module create_aloha
[hide private]
[frames] | no frames]

Source Code for Module aloha.create_aloha

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2010 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import division 
  16  import cmath 
  17  import copy 
  18  import cPickle 
  19  import glob 
  20  import logging 
  21  import numbers 
  22  import os 
  23  import re 
  24  import shutil 
  25  import sys 
  26  import time 
  27   
  28  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  29  sys.path.append(root_path) 
  30  from aloha.aloha_object import * 
  31  import aloha 
  32  import aloha.aloha_writers as aloha_writers 
  33  import aloha.aloha_lib as aloha_lib 
  34  import aloha.aloha_object as aloha_object 
  35  import aloha.aloha_parsers as aloha_parsers 
  36  import aloha.aloha_fct as aloha_fct 
  37  try: 
  38      import madgraph.iolibs.files as files 
  39      import madgraph.various.misc as misc 
  40  except Exception: 
  41      import aloha.files as files 
  42      import aloha.misc as misc 
  43       
  44  aloha_path = os.path.dirname(os.path.realpath(__file__)) 
  45  logger = logging.getLogger('ALOHA') 
  46   
  47  _conjugate_gap = 50 
  48  _spin2_mult = 1000 
  49   
  50  pjoin = os.path.join 
  51   
  52  ALOHAERROR = aloha.ALOHAERROR 
53 54 -class AbstractRoutine(object):
55 """ store the result of the computation of Helicity Routine 56 this is use for storing and passing to writer """ 57
58 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
59 """ store the information """ 60 61 self.spins = spins 62 self.expr = expr 63 self.denominator = denom 64 self.name = name 65 self.outgoing = outgoing 66 self.infostr = infostr 67 self.symmetries = [] 68 self.combined = [] 69 self.tag = [] 70 self.contracted = {}
71 72 73
74 - def add_symmetry(self, outgoing):
75 """ add an outgoing """ 76 77 if not outgoing in self.symmetries: 78 self.symmetries.append(outgoing)
79
80 - def add_combine(self, lor_list):
81 """add a combine rule """ 82 83 if lor_list not in self.combined: 84 self.combined.append(lor_list)
85
86 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
87 """ write the content of the object """ 88 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag) 89 text = writer.write(mode=mode, **opt) 90 if combine: 91 for grouped in self.combined: 92 if isinstance(text, tuple): 93 text = tuple([old.__add__(new) for old, new in zip(text, 94 writer.write_combined(grouped, mode=mode+'no_include', **opt))]) 95 else: 96 text += writer.write_combined(grouped, mode=mode+'no_include', **opt) 97 if aloha.mp_precision and 'MP' not in self.tag: 98 self.tag.append('MP') 99 text += self.write(output_dir, language, mode, **opt) 100 return text
101
102 - def get_info(self, info):
103 """return some information on the routine 104 """ 105 if info == "rank": 106 assert isinstance(self.expr, aloha_lib.SplitCoefficient) 107 rank= 1 108 for coeff in self.expr: 109 rank = max(sum(coeff), rank) 110 return rank -1 # due to the coefficient associate to the wavefunctions 111 else: 112 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
113
114 115 -class AbstractRoutineBuilder(object):
116 """ Launch the creation of the Helicity Routine""" 117 118 prop_lib = {} # Store computation for the propagator 119 counter = 0 # counter for statistic only 120
121 - class AbstractALOHAError(Exception):
122 """ An error class for ALOHA"""
123
124 - def __init__(self, lorentz, model=None):
125 """ initialize the run 126 lorentz: the lorentz information analyzed (UFO format) 127 language: define in which language we write the output 128 modes: 0 for all incoming particles 129 >0 defines the outgoing part (start to count at 1) 130 """ 131 132 self.spins = [s for s in lorentz.spins] 133 self.name = lorentz.name 134 self.conjg = [] 135 self.tag = [] 136 self.outgoing = None 137 self.lorentz_expr = lorentz.structure 138 self.routine_kernel = None 139 self.spin2_massless = False 140 self.spin32_massless = False 141 self.contracted = {} 142 self.fct = {} 143 self.model = model 144 self.denominator = None 145 # assert model 146 147 self.lastprint = 0 # to avoid that ALOHA makes too many printout 148 149 if hasattr(lorentz, 'formfactors') and lorentz.formfactors: 150 for formf in lorentz.formfactors: 151 pat = re.compile(r'\b%s\b' % formf.name) 152 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
153
154 - def compute_routine(self, mode, tag=[], factorize=True):
155 """compute the expression and return it""" 156 self.outgoing = mode 157 self.tag = tag 158 if __debug__: 159 if mode == 0: 160 assert not any(t.startswith('L') for t in tag) 161 self.expr = self.compute_aloha_high_kernel(mode, factorize) 162 return self.define_simple_output()
163
164 - def define_all_conjugate_builder(self, pair_list):
165 """ return the full set of AbstractRoutineBuilder linked to fermion 166 clash""" 167 168 solution = [] 169 170 for i, pair in enumerate(pair_list): 171 new_builder = self.define_conjugate_builder(pair) 172 solution.append(new_builder) 173 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:]) 174 return solution
175
176 - def define_conjugate_builder(self, pairs=1):
177 """ return a AbstractRoutineBuilder for the conjugate operation. 178 If they are more than one pair of fermion. Then use pair to claim which 179 one is conjugated""" 180 181 new_builder = copy.copy(self) 182 new_builder.conjg = self.conjg[:] 183 try: 184 for index in pairs: 185 new_builder.apply_conjugation(index) 186 except TypeError: 187 new_builder.apply_conjugation(pairs) 188 return new_builder
189
190 - def apply_conjugation(self, pair=1):
191 """ apply conjugation on self object""" 192 193 nb_fermion = len([1 for s in self.spins if s % 2 == 0]) 194 if isinstance(pair, tuple): 195 if len(pair) ==1 : 196 pair = pair[0] 197 else: 198 raise Exception 199 200 201 if (pair > 1 or nb_fermion >2) and not self.conjg: 202 # self.conjg avoif multiple check 203 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion) 204 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)]) 205 if not data == target: 206 text = """Unable to deal with 4(or more) point interactions 207 in presence of majorana particle/flow violation""" 208 raise ALOHAERROR, text 209 210 old_id = 2 * pair - 1 211 new_id = _conjugate_gap + old_id 212 213 self.kernel_tag = set() 214 aloha_lib.KERNEL.use_tag = set() 215 if not self.routine_kernel or isinstance(self.routine_kernel, str): 216 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr)) 217 self.kernel_tag = aloha_lib.KERNEL.use_tag 218 # We need to compute C Gamma^T C^-1 = C_ab G_cb (-1) C_cd 219 # = C_ac G_bc (-1) C_bd = C_ac G_bc C_db 220 self.routine_kernel = \ 221 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id) 222 223 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \ 224 (new_id, old_id + 1, new_id + 1, old_id ) 225 226 self.conjg.append(pair)
227 228
229 - def define_simple_output(self):
230 """ define a simple output for this AbstractRoutine """ 231 232 infostr = str(self.lorentz_expr) 233 234 output = AbstractRoutine(self.expr, self.outgoing, self.spins, self.name, \ 235 infostr, self.denominator) 236 output.contracted = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 237 for name in aloha_lib.KERNEL.use_tag 238 if name.startswith('TMP')]) 239 240 output.fct = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 241 for name in aloha_lib.KERNEL.use_tag 242 if name.startswith('FCT')]) 243 244 output.tag = [t for t in self.tag if not t.startswith('C')] 245 output.tag += ['C%s' % pair for pair in self.conjg] 246 return output
247
248 - def parse_expression(self, expr=None, need_P_sign=False):
249 """change the sign of P for outcoming fermion in order to 250 correct the mismatch convention between HELAS and FR""" 251 252 if not expr: 253 expr = self.lorentz_expr 254 255 if need_P_sign: 256 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr) 257 258 calc = aloha_parsers.ALOHAExpressionParser() 259 lorentz_expr = calc.parse(expr) 260 return lorentz_expr
261
262 - def compute_aloha_high_kernel(self, mode, factorize=True):
263 """compute the abstract routine associate to this mode """ 264 265 # reset tag for particles 266 aloha_lib.KERNEL.use_tag=set() 267 #multiply by the wave functions 268 nb_spinor = 0 269 outgoing = self.outgoing 270 if (outgoing + 1) // 2 in self.conjg: 271 #flip the outgoing tag if in conjugate 272 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2 273 274 if not self.routine_kernel: 275 AbstractRoutineBuilder.counter += 1 276 if self.tag == []: 277 logger.info('aloha creates %s routines' % self.name) 278 elif AbstractALOHAModel.lastprint < time.time() - 1: 279 AbstractALOHAModel.lastprint = time.time() 280 logger.info('aloha creates %s set of routines with options: %s' \ 281 % (self.name, ','.join(self.tag)) ) 282 try: 283 lorentz = self.parse_expression() 284 self.routine_kernel = lorentz 285 lorentz = eval(lorentz) 286 except NameError as error: 287 logger.error('unknow type in Lorentz Evaluation:%s'%str(error)) 288 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error) 289 else: 290 self.kernel_tag = set(aloha_lib.KERNEL.use_tag) 291 elif isinstance(self.routine_kernel,str): 292 lorentz = eval(self.routine_kernel) 293 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 294 else: 295 lorentz = copy.copy(self.routine_kernel) 296 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 297 for (i, spin ) in enumerate(self.spins): 298 id = i + 1 299 #Check if this is the outgoing particle 300 if id == outgoing: 301 302 # check if we need a special propagator 303 propa = [t[1:] for t in self.tag if t.startswith('P')] 304 if propa == ['0']: 305 massless = True 306 self.denominator = None 307 elif propa == []: 308 massless = False 309 self.denominator = None 310 else: 311 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id) 312 continue 313 314 315 316 if spin in [1,-1]: 317 lorentz *= complex(0,1) 318 elif spin == 2: 319 # shift and flip the tag if we multiply by C matrices 320 if (id + 1) // 2 in self.conjg: 321 id += _conjugate_gap + id % 2 - (id +1) % 2 322 if (id % 2): 323 #propagator outcoming 324 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing) 325 else: 326 # #propagator incoming 327 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing) 328 elif spin == 3 : 329 if massless or not aloha.unitary_gauge: 330 lorentz *= VectorPropagatorMassless(id, 'I2', id) 331 else: 332 lorentz *= VectorPropagator(id, 'I2', id) 333 elif spin == 4: 334 # shift and flip the tag if we multiply by C matrices 335 if (id + 1) // 2 in self.conjg: 336 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 337 else: 338 spin_id = id 339 nb_spinor += 1 340 if not massless and (spin_id % 2): 341 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing) 342 elif not massless and not (spin_id % 2): 343 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing) 344 elif spin_id %2: 345 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing) 346 else : 347 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing) 348 349 elif spin == 5 : 350 #lorentz *= 1 # delayed evaluation (fastenize the code) 351 if massless: 352 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \ 353 2 * _spin2_mult + id,'I2','I3') 354 else: 355 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \ 356 2 * _spin2_mult + id,'I2','I3', id) 357 else: 358 raise self.AbstractALOHAError( 359 'The spin value %s (2s+1) is not supported yet' % spin) 360 else: 361 # This is an incoming particle 362 if spin in [1,-1]: 363 lorentz *= Scalar(id) 364 elif spin == 2: 365 # shift the tag if we multiply by C matrices 366 if (id+1) // 2 in self.conjg: 367 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 368 else: 369 spin_id = id 370 lorentz *= Spinor(spin_id, id) 371 elif spin == 3: 372 lorentz *= Vector(id, id) 373 elif spin == 4: 374 # shift the tag if we multiply by C matrices 375 if (id+1) // 2 in self.conjg: 376 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 377 else: 378 spin_id = id 379 nb_spinor += 1 380 lorentz *= Spin3Half(id, spin_id, id) 381 elif spin == 5: 382 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id) 383 else: 384 raise self.AbstractALOHAError( 385 'The spin value %s (2s+1) is not supported yet' % spin) 386 387 # If no particle OffShell 388 if not outgoing: 389 lorentz *= complex(0,-1) 390 # Propagator are taken care separately 391 392 lorentz = lorentz.simplify() 393 394 # Modify the expression in case of loop-pozzorini 395 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)): 396 return self.compute_loop_coefficient(lorentz, outgoing) 397 398 lorentz = lorentz.expand() 399 lorentz = lorentz.simplify() 400 401 if factorize: 402 lorentz = lorentz.factorize() 403 404 lorentz.tag = set(aloha_lib.KERNEL.use_tag) 405 return lorentz
406 407 @staticmethod
408 - def mod_propagator_expression(tag, text):
409 """Change the index of the propagator to match the current need""" 410 411 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text) 412 to_change = {} 413 for old, new in tag.items(): 414 if isinstance(new, str): 415 new='\'%s\'' % new 416 else: 417 new = str(new) 418 to_change[r'%s' % old] = new 419 pos=-2 420 while pos +3 < len(data): 421 pos = pos+3 422 ltype = data[pos] 423 if ltype != 'complex': 424 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change), 425 lambda x: to_change[x.group()], data[pos+1]) 426 data[pos+1] = '(%s)' % data[pos+1] 427 text=''.join(data) 428 return text
429
430 - def get_custom_propa(self, propa, spin, id):
431 """Return the ALOHA object associated to the user define propagator""" 432 433 propagator = getattr(self.model.propagators, propa) 434 numerator = propagator.numerator 435 denominator = propagator.denominator 436 437 # Find how to make the replacement for the various tag in the propagator expression 438 needPflipping = False 439 if spin in [1,-1]: 440 tag = {'id': id} 441 elif spin == 2: 442 # shift and flip the tag if we multiply by C matrices 443 if (id + 1) // 2 in self.conjg: 444 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 445 else: 446 spin_id = id 447 if (spin_id % 2): 448 #propagator outcoming 449 needPflipping = True 450 tag ={'1': spin_id, '2': 'I2', 'id': id} 451 else: 452 tag ={'1': 'I2', '2': spin_id, 'id': id} 453 elif spin == 3 : 454 tag ={'1': id, '2': 'I2', 'id': id} 455 elif spin == 4: 456 delta = lambda i,j: aloha_object.Identity(i,j) 457 deltaL = lambda i,j: aloha_object.IdentityL(i,j) 458 # shift and flip the tag if we multiply by C matrices 459 if (id + 1) // 2 in self.conjg: 460 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 461 else: 462 spin_id = id 463 if spin_id % 2: 464 needPflipping = True 465 tag = {'1': 'pr_1', '2': 'pr_2', 'id':id} 466 else: 467 tag = {'1': 'pr_2', '2': 'pr_1'} 468 numerator *= deltaL('pr_1',id) * deltaL('pr_2', 'I2') * \ 469 delta('pr_1', spin_id) * delta('pr_2', 'I3') 470 elif spin == 5 : 471 tag = {'1': _spin2_mult + id, '2': 2 * _spin2_mult + id, 472 '51': 'I2', '52': 'I3', 'id':id} 473 474 numerator = self.mod_propagator_expression(tag, numerator) 475 if denominator: 476 denominator = self.mod_propagator_expression(tag, denominator) 477 478 numerator = self.parse_expression(numerator, needPflipping) 479 if denominator: 480 self.denominator = self.parse_expression(denominator, needPflipping) 481 self.denominator = eval(self.denominator) 482 if not isinstance(self.denominator, numbers.Number): 483 self.denominator = self.denominator.simplify().expand().simplify().get((0,)) 484 485 return eval(numerator)
486 487 488 489
490 - def compute_loop_coefficient(self, lorentz, outgoing):
491 492 493 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0] 494 if (l_in + 1) // 2 in self.conjg: 495 #flip the outgoing tag if in conjugate 496 l_in = l_in + l_in % 2 - (l_in +1) % 2 497 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one' 498 499 # modify the expression for the momenta 500 # P_i -> P_i + P_L and P_o -> -P_o - P_L 501 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names() 502 if P.startswith('_P')] 503 504 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]]) 505 for P in Pdep: 506 if P.particle == l_in: 507 sign = 1 508 else: 509 sign = -1 510 id = P.id 511 lorentz_ind = P.lorentz_ind[0] 512 P_Lid = aloha_object.P(lorentz_ind, 'L') 513 P_obj = aloha_object.P(lorentz_ind, P.particle) 514 new_expr = sign*(P_Lid + P_obj) 515 lorentz = lorentz.replace(id, new_expr) 516 517 # Compute the variable from which we need to split the expression 518 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3'] 519 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])] 520 size = aloha_writers.WriteALOHA.type_to_size[spin]-1 521 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)] 522 # compute their unique identifiant 523 veto_ids = aloha_lib.KERNEL.get_ids(var_veto) 524 525 lorentz = lorentz.expand(veto = veto_ids) 526 lorentz = lorentz.simplify() 527 coeff_expr = lorentz.split(veto_ids) 528 529 for key, expr in coeff_expr.items(): 530 expr = expr.simplify() 531 coeff_expr[key] = expr.factorize() 532 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag) 533 534 return coeff_expr
535
536 - def define_lorentz_expr(self, lorentz_expr):
537 """Define the expression""" 538 539 self.expr = lorentz_expr
540
541 - def define_routine_kernel(self, lorentz=None):
542 """Define the kernel at low level""" 543 544 if not lorentz: 545 logger.info('compute kernel %s' % self.counter) 546 AbstractRoutineBuilder.counter += 1 547 lorentz = eval(self.lorentz_expr) 548 549 if isinstance(lorentz, numbers.Number): 550 self.routine_kernel = lorentz 551 return lorentz 552 lorentz = lorentz.simplify() 553 lorentz = lorentz.expand() 554 lorentz = lorentz.simplify() 555 556 self.routine_kernel = lorentz 557 return lorentz
558 559 560 @staticmethod
561 - def get_routine_name(name, outgoing):
562 """return the name of the """ 563 564 name = '%s_%s' % (name, outgoing) 565 return name
566 567 @classmethod
568 - def load_library(cls, tag):
569 # load the library 570 if tag in cls.prop_lib: 571 return 572 else: 573 cls.prop_lib = create_prop_library(tag, cls.aloha_lib)
574
575 576 -class CombineRoutineBuilder(AbstractRoutineBuilder):
577 """A special builder for combine routine if needed to write those 578 explicitely. 579 """
580 - def __init__(self, l_lorentz, model=None):
581 """ initialize the run 582 l_lorentz: list of lorentz information analyzed (UFO format) 583 language: define in which language we write the output 584 modes: 0 for all incoming particles 585 >0 defines the outgoing part (start to count at 1) 586 """ 587 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model) 588 lorentz = l_lorentz[0] 589 self.spins = lorentz.spins 590 l_name = [l.name for l in l_lorentz] 591 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None) 592 self.conjg = [] 593 self.tag = [] 594 self.outgoing = None 595 self.lorentz_expr = [] 596 for i, lor in enumerate(l_lorentz): 597 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure)) 598 self.lorentz_expr = ' + '.join(self.lorentz_expr) 599 self.routine_kernel = None 600 self.contracted = {} 601 self.fct = {}
602
603 -class AbstractALOHAModel(dict):
604 """ A class to build and store the full set of Abstract ALOHA Routine""" 605 606 lastprint = 0 607
608 - def __init__(self, model_name, write_dir=None, format='Fortran', 609 explicit_combine=False):
610 """ load the UFO model and init the dictionary """ 611 612 # Option 613 self.explicit_combine = explicit_combine 614 615 # Extract the model name if combined with restriction 616 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$") 617 model_name_re = model_name_pattern.match(model_name) 618 if model_name_re: 619 name = model_name_re.group('name') 620 rest = model_name_re.group("rest") 621 if rest == 'full' or \ 622 os.path.isfile(os.path.join(root_path, "models", name, 623 "restrict_%s.dat" % rest)): 624 model_name = model_name_re.group("name") 625 626 # load the UFO model 627 try: 628 python_pos = model_name 629 __import__(python_pos) 630 except Exception: 631 python_pos = 'models.%s' % model_name 632 __import__(python_pos) 633 self.model = sys.modules[python_pos] 634 # find the position on the disk 635 self.model_pos = os.path.dirname(self.model.__file__) 636 637 # list the external routine 638 self.external_routines = [] 639 640 # init the dictionary 641 dict.__init__(self) 642 self.symmetries = {} 643 self.multiple_lor = {} 644 645 if write_dir: 646 self.main(write_dir,format=format)
647
648 - def main(self, output_dir, format='Fortran'):
649 """ Compute if not already compute. 650 Write file in models/MY_MODEL/MY_FORMAT. 651 copy the file to output_dir 652 """ 653 ext = {'Fortran':'f','Python':'py','CPP':'h'} 654 655 656 # Check if a pickle file exists 657 if not self.load(): 658 self.compute_all() 659 logger.info(' %s aloha routine' % len(self)) 660 661 # Check that output directory exists 662 if not output_dir: 663 output_dir = os.path.join(self.model_pos, format.lower()) 664 logger.debug('aloha output dir is %s' % output_dir) 665 if not os.path.exists(output_dir): 666 os.mkdir(output_dir) 667 668 # Check that all routine are generated at default places: 669 for (name, outgoing), abstract in self.items(): 670 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing) 671 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]): 672 abstract.write(output_dir, format) 673 else: 674 logger.info('File for %s already present, skip the writing of this file' % routine_name)
675 676
677 - def save(self, filepos=None):
678 """ save the current model in a pkl file """ 679 680 logger.info('save the aloha abstract routine in a pickle file') 681 if not filepos: 682 filepos = os.path.join(self.model_pos,'aloha.pkl') 683 684 fsock = open(filepos, 'w') 685 cPickle.dump(dict(self), fsock)
686
687 - def load(self, filepos=None):
688 """ reload the pickle file """ 689 return False 690 if not filepos: 691 filepos = os.path.join(self.model_pos,'aloha.pkl') 692 if os.path.exists(filepos): 693 fsock = open(filepos, 'r') 694 self.update(cPickle.load(fsock)) 695 return True 696 else: 697 return False
698
699 - def get(self, lorentzname, outgoing):
700 """ return the AbstractRoutine with a given lorentz name, and for a given 701 outgoing particle """ 702 703 try: 704 return self[(lorentzname, outgoing)] 705 except Exception: 706 logger.warning('(%s, %s) is not a valid key' % 707 (lorentzname, outgoing) ) 708 return None
709
710 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
711 """return some information about the aloha routine 712 - "rank": return the rank of the loop function 713 If the cached option is set to true, then the result is stored and 714 recycled if possible. 715 """ 716 717 if not aloha.loop_mode and any(t.startswith('L') for t in tag): 718 aloha.loop_mode = True 719 720 721 returned_dict = {} 722 # Make sure the input argument is a list 723 if isinstance(info, str): 724 infos = [info] 725 else: 726 infos = info 727 728 # First deal with the caching of infos 729 if hasattr(self, 'cached_interaction_infos'): 730 # Now try to recover it 731 for info_key in infos: 732 try: 733 returned_dict[info] = self.cached_interaction_infos[\ 734 (lorentzname,outgoing,tuple(tag),info)] 735 except KeyError: 736 # Some information has never been computed before, so they 737 # will be computed later. 738 pass 739 elif cached: 740 self.cached_interaction_infos = {} 741 742 init = False 743 for info_key in infos: 744 if info_key in returned_dict: 745 continue 746 elif not init: 747 # need to create the aloha object 748 lorentz = eval('self.model.lorentz.%s' % lorentzname) 749 abstract = AbstractRoutineBuilder(lorentz) 750 routine = abstract.compute_routine(outgoing, tag, factorize=False) 751 init = True 752 753 assert 'routine' in locals() 754 returned_dict[info_key] = routine.get_info(info_key) 755 if cached: 756 # Cache the information computed 757 self.cached_interaction_infos[\ 758 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key] 759 760 if isinstance(info, str): 761 return returned_dict[info] 762 else: 763 return returned_dict
764
765 - def set(self, lorentzname, outgoing, abstract_routine):
766 """ add in the dictionary """ 767 768 self[(lorentzname, outgoing)] = abstract_routine
769
770 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
771 """ define all the AbstractRoutine linked to a model """ 772 773 # Search identical particles in the vertices in order to avoid 774 #to compute identical contribution 775 self.look_for_symmetries() 776 conjugate_list = self.look_for_conjugate() 777 self.look_for_multiple_lorentz_interactions() 778 779 if not wanted_lorentz: 780 wanted_lorentz = [l.name for l in self.model.all_lorentz] 781 for lorentz in self.model.all_lorentz: 782 if not lorentz.name in wanted_lorentz: 783 # Only include the routines we ask for 784 continue 785 786 if -1 in lorentz.spins: 787 # No Ghost in ALOHA 788 continue 789 790 if lorentz.structure == 'external': 791 for i in range(len(lorentz.spins)): 792 self.external_routines.append('%s_%s' % (lorentz.name, i)) 793 continue 794 795 #standard routines 796 routines = [(i,[]) for i in range(len(lorentz.spins)+1)] 797 # search for special propagators 798 if custom_propa: 799 for vertex in self.model.all_vertices: 800 if lorentz in vertex.lorentz: 801 for i,part in enumerate(vertex.particles): 802 new_prop = False 803 if hasattr(part, 'propagator') and part.propagator: 804 new_prop = ['P%s' % part.propagator.name] 805 elif part.mass.name.lower() == 'zero': 806 new_prop = ['P0'] 807 if new_prop and (i+1, new_prop) not in routines: 808 routines.append((i+1, new_prop)) 809 810 builder = AbstractRoutineBuilder(lorentz, self.model) 811 self.compute_aloha(builder, routines=routines) 812 813 if lorentz.name in self.multiple_lor: 814 for m in self.multiple_lor[lorentz.name]: 815 for outgoing in range(len(lorentz.spins)+1): 816 try: 817 self[(lorentz.name, outgoing)].add_combine(m) 818 except Exception: 819 pass # this routine is a symmetric one, so it 820 # already has the combination. 821 822 if lorentz.name in conjugate_list: 823 conjg_builder_list= builder.define_all_conjugate_builder(\ 824 conjugate_list[lorentz.name]) 825 for conjg_builder in conjg_builder_list: 826 # No duplication of conjugation: 827 assert conjg_builder_list.count(conjg_builder) == 1 828 self.compute_aloha(conjg_builder, lorentz.name) 829 if lorentz.name in self.multiple_lor: 830 for m in self.multiple_lor[lorentz.name]: 831 for outgoing in range(len(lorentz.spins)+1): 832 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg]) 833 try: 834 self[(realname, outgoing)].add_combine(m) 835 except Exception,error: 836 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m) 837 838 if save: 839 self.save()
840
841 - def add_Lorentz_object(self, lorentzlist):
842 """add a series of Lorentz structure created dynamically""" 843 844 for lor in lorentzlist: 845 if not hasattr(self.model.lorentz, lor.name): 846 setattr(self.model.lorentz, lor.name, lor)
847
848 - def compute_subset(self, data):
849 """ create the requested ALOHA routine. 850 data should be a list of tuple (lorentz, tag, outgoing) 851 tag should be the list of special tag (like conjugation on pair) 852 to apply on the object """ 853 854 # Search identical particles in the vertices in order to avoid 855 #to compute identical contribution 856 self.look_for_symmetries() 857 # reorganize the data (in order to use optimization for a given lorentz 858 #structure 859 aloha.loop_mode = False 860 # self.explicit_combine = False 861 request = {} 862 863 for list_l_name, tag, outgoing in data: 864 #allow tag to have integer for retro-compatibility 865 all_tag = tag[:] 866 conjugate = [i for i in tag if isinstance(i, int)] 867 868 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 869 tag = tag + ['C%s'%i for i in conjugate] 870 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 871 872 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')]) 873 loop = any((t.startswith('L') for t in tag)) 874 if loop: 875 aloha.loop_mode = True 876 self.explicit_combine = True 877 878 for l_name in list_l_name: 879 try: 880 request[l_name][conjugate].append((outgoing,tag)) 881 except Exception: 882 try: 883 request[l_name][conjugate] = [(outgoing,tag)] 884 except Exception: 885 request[l_name] = {conjugate: [(outgoing,tag)]} 886 887 # Loop on the structure to build exactly what is request 888 for l_name in request: 889 lorentz = eval('self.model.lorentz.%s' % l_name) 890 if lorentz.structure == 'external': 891 for tmp in request[l_name]: 892 for outgoing, tag in request[l_name][tmp]: 893 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag) 894 if name not in self.external_routines: 895 self.external_routines.append(name) 896 continue 897 898 builder = AbstractRoutineBuilder(lorentz, self.model) 899 900 901 for conjg in request[l_name]: 902 #ensure that routines are in rising order (for symetries) 903 def sorting(a,b): 904 if a[0] < b[0]: return -1 905 else: return 1
906 routines = request[l_name][conjg] 907 routines.sort(sorting) 908 if not conjg: 909 # No need to conjugate -> compute directly 910 self.compute_aloha(builder, routines=routines) 911 else: 912 # Define the high level conjugate routine 913 conjg_builder = builder.define_conjugate_builder(conjg) 914 # Compute routines 915 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 916 routines=routines) 917 918 919 # Build mutiple lorentz call 920 for list_l_name, tag, outgoing in data: 921 if len(list_l_name) ==1: 922 continue 923 #allow tag to have integer for retrocompatibility 924 conjugate = [i for i in tag if isinstance(i, int)] 925 all_tag = tag[:] 926 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 927 tag = tag + ['C%s'%i for i in conjugate] 928 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 929 930 if not self.explicit_combine: 931 lorentzname = list_l_name[0] 932 lorentzname += ''.join(tag) 933 if self.has_key((lorentzname, outgoing)): 934 self[(lorentzname, outgoing)].add_combine(list_l_name[1:]) 935 else: 936 lorentz = eval('self.model.lorentz.%s' % lorentzname) 937 assert lorentz.structure == 'external' 938 else: 939 l_lorentz = [] 940 for l_name in list_l_name: 941 l_lorentz.append(eval('self.model.lorentz.%s' % l_name)) 942 builder = CombineRoutineBuilder(l_lorentz) 943 944 for conjg in request[list_l_name[0]]: 945 #ensure that routines are in rising order (for symetries) 946 def sorting(a,b): 947 if a[0] < b[0]: return -1 948 else: return 1
949 routines = request[list_l_name[0]][conjg] 950 routines.sort(sorting) 951 if not conjg: 952 # No need to conjugate -> compute directly 953 self.compute_aloha(builder, routines=routines) 954 else: 955 # Define the high level conjugate routine 956 conjg_builder = builder.define_conjugate_builder(conjg) 957 # Compute routines 958 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 959 routines=routines) 960 961 962
963 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
964 """ define all the AbstractRoutine linked to a given lorentz structure 965 symmetry authorizes to use the symmetry of anoter lorentz structure. 966 routines to define only a subset of the routines.""" 967 968 name = builder.name 969 if not symmetry: 970 symmetry = name 971 if not routines: 972 if not tag: 973 tag = ['C%s' % i for i in builder.conjg] 974 else: 975 addon = ['C%s' % i for i in builder.conjg] 976 tag = [(i,addon +onetag) for i,onetag in tag] 977 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )] 978 979 # Create the routines 980 for outgoing, tag in routines: 981 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines) 982 realname = name + ''.join(tag) 983 if (realname, outgoing) in self: 984 continue # already computed 985 986 if symmetric: 987 self.get(realname, symmetric).add_symmetry(outgoing) 988 else: 989 wavefunction = builder.compute_routine(outgoing, tag) 990 #Store the information 991 self.set(realname, outgoing, wavefunction)
992 993
994 - def compute_aloha_without_kernel(self, builder, symmetry=None, routines=None):
995 """define all the AbstractRoutine linked to a given lorentz structure 996 symmetry authorizes to use the symmetry of anoter lorentz structure. 997 routines to define only a subset of the routines. 998 Compare to compute_aloha, each routines are computed independently. 999 """ 1000 1001 name = builder.name 1002 if not routines: 1003 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )] 1004 1005 for outgoing, tag in routines: 1006 builder.routine_kernel = None 1007 wavefunction = builder.compute_routine(outgoing, tag) 1008 self.set(name, outgoing, wavefunction)
1009 1010
1011 - def write(self, output_dir, language):
1012 """ write the full set of Helicity Routine in output_dir""" 1013 for abstract_routine in self.values(): 1014 abstract_routine.write(output_dir, language) 1015 1016 for routine in self.external_routines: 1017 self.locate_external(routine, language, output_dir)
1018 1019 # if aloha_lib.KERNEL.unknow_fct: 1020 # if language == 'Fortran': 1021 # logger.warning('''Some function present in the lorentz structure are not 1022 # recognized. A Template file has been created: 1023 # %s 1024 # Please edit this file to include the associated definition.''' % \ 1025 # pjoin(output_dir, 'additional_aloha_function.f') ) 1026 # else: 1027 # logger.warning('''Some function present in the lorentz structure are 1028 # not recognized. Please edit the code to add the defnition of such function.''') 1029 # logger.info('list of missing fct: %s .' % \ 1030 # ','.join([a[0] for a in aloha_lib.KERNEL.unknow_fct])) 1031 # 1032 # for fct_name, nb_arg in aloha_lib.KERNEL.unknow_fct: 1033 # if language == 'Fortran': 1034 # aloha_writers.write_template_fct(fct_name, nb_arg, output_dir) 1035 1036 1037 1038 #self.write_aloha_file_inc(output_dir) 1039
1040 - def locate_external(self, name, language, output_dir=None):
1041 """search a valid external file and copy it to output_dir directory""" 1042 1043 language_to_ext = {'Python': 'py', 1044 'Fortran' : 'f', 1045 'CPP': 'C'} 1046 ext = language_to_ext[language] 1047 paths = [os.path.join(self.model_pos, language), self.model_pos, 1048 os.path.join(root_path, 'aloha', 'template_files', )] 1049 1050 ext_files = [] 1051 for path in paths: 1052 ext_files = misc.glob('%s.%s' % (name, ext), path) 1053 if ext_files: 1054 break 1055 else: 1056 1057 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \ 1058 (name, ext, '\n'.join(paths)) 1059 1060 if output_dir: 1061 for filepath in ext_files: 1062 1063 files.cp(filepath, output_dir) 1064 return ext_files
1065 1066 1067
1068 - def look_for_symmetries(self):
1069 """Search some symmetries in the vertices. 1070 We search if some identical particles are in a vertices in order 1071 to avoid to compute symmetrical contributions""" 1072 1073 for vertex in self.model.all_vertices: 1074 for i, part1 in enumerate(vertex.particles): 1075 for j in range(i-1,-1,-1): 1076 part2 = vertex.particles[j] 1077 if part1.pdg_code == part2.pdg_code and part1.color == 1: 1078 if part1.spin == 2 and (i % 2 != j % 2 ): 1079 continue 1080 for lorentz in vertex.lorentz: 1081 if self.symmetries.has_key(lorentz.name): 1082 if self.symmetries[lorentz.name].has_key(i+1): 1083 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1) 1084 else: 1085 self.symmetries[lorentz.name][i+1] = j+1 1086 else: 1087 self.symmetries[lorentz.name] = {i+1:j+1} 1088 break
1089
1090 - def look_for_multiple_lorentz_interactions(self):
1091 """Search the interaction associate with more than one lorentz structure. 1092 If those lorentz structure have the same order and the same color then 1093 associate a multiple lorentz routines to ALOHA """ 1094 1095 orders = {} 1096 for coup in self.model.all_couplings: 1097 orders[coup.name] = str(coup.order) 1098 1099 for vertex in self.model.all_vertices: 1100 if len(vertex.lorentz) == 1: 1101 continue 1102 #remove ghost 1103 #if -1 in vertex.lorentz[0].spins: 1104 # continue 1105 1106 # assign each order/color to a set of lorentz routine 1107 combine = {} 1108 for (id_col, id_lor), coups in vertex.couplings.items(): 1109 if not isinstance(coups, list): 1110 coups = [coups] 1111 for coup in coups: 1112 order = orders[coup.name] 1113 key = (id_col, order) 1114 if key in combine: 1115 combine[key].append(id_lor) 1116 else: 1117 combine[key] = [id_lor] 1118 1119 # Check if more than one routine are associated 1120 for list_lor in combine.values(): 1121 if len(list_lor) == 1: 1122 continue 1123 list_lor.sort() 1124 main = vertex.lorentz[list_lor[0]].name 1125 if main not in self.multiple_lor: 1126 self.multiple_lor[main] = [] 1127 1128 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]]) 1129 if info not in self.multiple_lor[main]: 1130 self.multiple_lor[main].append(info)
1131 1132
1133 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1134 """ This returns out if no symmetries are available, otherwise it finds 1135 the lowest equivalent outgoing by recursivally calling this function. 1136 auth is a list of authorize output, if define""" 1137 1138 try: 1139 equiv = self.symmetries[l_name][outgoing] 1140 except Exception: 1141 return out 1142 else: 1143 if not valid_output or equiv in valid_output: 1144 return self.has_symmetries(l_name, equiv, out=equiv, 1145 valid_output=valid_output) 1146 else: 1147 return self.has_symmetries(l_name, equiv, out=out, 1148 valid_output=valid_output)
1149
1150 - def look_for_conjugate(self):
1151 """ create a list for the routine needing to be conjugate """ 1152 1153 # Check if they are majorana in the model. 1154 need = False 1155 for particle in self.model.all_particles: 1156 if particle.spin == 2 and particle.selfconjugate: 1157 need = True 1158 break 1159 1160 if not need: 1161 for interaction in self.model.all_vertices: 1162 fermions = [p for p in interaction.particles if p.spin == 2] 1163 for i in range(0, len(fermions), 2): 1164 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0: 1165 # This is a fermion flow violating interaction 1166 need = True 1167 break 1168 1169 # No majorana particles 1170 if not need: 1171 return {} 1172 1173 conjugate_request = {} 1174 # Check each vertex if they are fermion and/or majorana 1175 for vertex in self.model.all_vertices: 1176 for i in range(0, len(vertex.particles), 2): 1177 part1 = vertex.particles[i] 1178 if part1.spin !=2: 1179 # deal only with fermion 1180 break 1181 # check if this pair contains a majorana 1182 if part1.selfconjugate: 1183 continue 1184 part2 = vertex.particles[i + 1] 1185 if part2.selfconjugate: 1186 continue 1187 1188 # No majorana => add the associate lorentz structure 1189 for lorentz in vertex.lorentz: 1190 try: 1191 conjugate_request[lorentz.name].add(i//2+1) 1192 except Exception: 1193 conjugate_request[lorentz.name] = set([i//2+1]) 1194 1195 for elem in conjugate_request: 1196 conjugate_request[elem] = list(conjugate_request[elem]) 1197 1198 return conjugate_request
1199
1200 1201 1202 -def write_aloha_file_inc(aloha_dir,file_ext, comp_ext):
1203 """find the list of Helicity routine in the directory and create a list 1204 of those files (but with compile extension)""" 1205 1206 aloha_files = [] 1207 1208 # Identify the valid files 1209 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext) 1210 for filename in os.listdir(aloha_dir): 1211 if os.path.isfile(os.path.join(aloha_dir, filename)): 1212 if alohafile_pattern.search(filename): 1213 aloha_files.append(filename.replace(file_ext, comp_ext)) 1214 1215 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')): 1216 aloha_files.append('additional_aloha_function.o') 1217 1218 text="ALOHARoutine = " 1219 text += ' '.join(aloha_files) 1220 text +='\n' 1221 1222 1223 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1224
1225 1226 1227 -def create_prop_library(tag, lib={}):
1228 1229 def create(obj): 1230 """ """ 1231 obj= obj.simplify() 1232 obj = obj.expand() 1233 obj = obj.simplify() 1234 return obj
1235 1236 # avoid to add tag in global 1237 old_tag = set(aloha_lib.KERNEL.use_tag) 1238 print 'create lib',tag 1239 name, i = tag 1240 if name == "Spin2Prop": 1241 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \ 1242 2 * _spin2_mult + i,'I2','I3', i) ) 1243 elif name == "Spin2PropMassless": 1244 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator( 1245 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3')) 1246 1247 aloha_lib.KERNEL.use_tag = old_tag 1248 return lib 1249 1250 1251 if '__main__' == __name__: 1252 logging.basicConfig(level=0) 1253 #create_library() 1254 import profile 1255 #model 1256 1257 start = time.time()
1258 - def main():
1259 alohagenerator = AbstractALOHAModel('sm') 1260 alohagenerator.compute_all(save=False) 1261 return alohagenerator
1262 - def write(alohagenerator):
1263 alohagenerator.write('/tmp/', 'Python')
1264 alohagenerator = main() 1265 logger.info('done in %s s' % (time.time()-start)) 1266 write(alohagenerator) 1267 #profile.run('main()') 1268 #profile.run('write(alohagenerator)') 1269 stop = time.time() 1270 logger.info('done in %s s' % (stop-start)) 1271