Merge lp:~maddevelopers/mg5amcnlo/2.8.1.py3 into lp:mg5amcnlo/lts

Proposed by Olivier Mattelaer
Status: Merged
Merged at revision: 290
Proposed branch: lp:~maddevelopers/mg5amcnlo/2.8.1.py3
Merge into: lp:mg5amcnlo/lts
Diff against target: 1112 lines (+365/-142)
23 files modified
MadSpin/interface_madspin.py (+0/-12)
Template/LO/Source/dsample.f (+4/-0)
UpdateNotes.txt (+7/-0)
VERSION (+2/-2)
bin/mg5_aMC (+14/-3)
madgraph/interface/loop_interface.py (+2/-0)
madgraph/interface/madevent_interface.py (+3/-0)
madgraph/interface/madgraph_interface.py (+59/-47)
madgraph/iolibs/export_fks.py (+4/-5)
madgraph/iolibs/export_v4.py (+11/-8)
madgraph/iolibs/group_subprocs.py (+2/-2)
madgraph/madevent/gen_crossxhtml.py (+7/-18)
madgraph/madevent/gen_ximprove.py (+1/-1)
madgraph/madevent/sum_html.py (+23/-19)
madgraph/various/banner.py (+28/-0)
madgraph/various/lhe_parser.py (+2/-0)
madgraph/various/misc.py (+30/-0)
mg5decay/decay_objects.py (+101/-6)
models/__init__.py (+9/-8)
models/check_param_card.py (+1/-1)
models/import_ufo.py (+47/-7)
models/model_reader.py (+5/-1)
models/write_param_card.py (+3/-2)
To merge this branch: bzr merge lp:~maddevelopers/mg5amcnlo/2.8.1.py3
Reviewer Review Type Date Requested Status
MadTeam Pending
Review via email: mp+391101@code.launchpad.net

Commit message

pass to 2.8.1

Description of the change

This is a pure bug fixing release.

Fixing mainly issue with python3 (comparison operator, model loading, model conversion,...)
It has also a couple of important bug fix like for the auto-width computation.

I would like to also include this "feature" branch as well (containing better support for FxFx)
lp:~maddevelopers/mg5amcnlo/2.8.1_fxfx
But I'm still waiting approval on that one.

Olivier

To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'MadSpin/interface_madspin.py'
--- MadSpin/interface_madspin.py 2020-08-20 15:33:24 +0000
+++ MadSpin/interface_madspin.py 2020-09-22 07:07:51 +0000
@@ -451,14 +451,6 @@
451 except ValueError:451 except ValueError:
452 raise self.InvalidCmd('second argument should be a real number.')452 raise self.InvalidCmd('second argument should be a real number.')
453 453
454 elif args[0] == 'BW_effect':
455 if args[1] in [0, False,'.false.', 'F', 'f', 'False', 'no']:
456 args[1] = 0
457 elif args[1] in [1, True,'.true.', 'T', 't', 'True', 'yes']:
458 args[1] = 1
459 else:
460 raise self.InvalidCmd('second argument should be either T or F.')
461
462 elif args[0] == 'curr_dir':454 elif args[0] == 'curr_dir':
463 if not os.path.isdir(args[1]):455 if not os.path.isdir(args[1]):
464 raise self.InvalidCmd('second argument should be a path to a existing directory')456 raise self.InvalidCmd('second argument should be a path to a existing directory')
@@ -499,8 +491,6 @@
499 opts = list(self.options.keys()) + ['seed', "spinmode"]491 opts = list(self.options.keys()) + ['seed', "spinmode"]
500 return self.list_completion(text, opts) 492 return self.list_completion(text, opts)
501 elif len(args) == 2:493 elif len(args) == 2:
502 if args[1] == 'BW_effect':
503 return self.list_completion(text, ['True', 'False'])
504 if args[1] == 'ms_dir':494 if args[1] == 'ms_dir':
505 return self.path_completion(text, '.', only_dirs = True)495 return self.path_completion(text, '.', only_dirs = True)
506 elif args[1] == 'ms_dir':496 elif args[1] == 'ms_dir':
@@ -517,8 +507,6 @@
517 print('')507 print('')
518 print('-- assign to a given option a given value')508 print('-- assign to a given option a given value')
519 print(' - set max_weight VALUE: pre-define the maximum_weight for the reweighting')509 print(' - set max_weight VALUE: pre-define the maximum_weight for the reweighting')
520 print(' - set BW_effect True|False: [default:True] reshuffle the momenta to describe')
521 print(' corrrectly the Breit-Wigner of the decayed particle')
522 print(' - set seed VALUE: fix the value of the seed to a given value.')510 print(' - set seed VALUE: fix the value of the seed to a given value.')
523 print(' by default use the current time to set the seed. random number are')511 print(' by default use the current time to set the seed. random number are')
524 print(' generated by the python module random using the Mersenne Twister generator.')512 print(' generated by the python module random using the Mersenne Twister generator.')
525513
=== modified file 'Template/LO/Source/dsample.f'
--- Template/LO/Source/dsample.f 2020-05-13 09:45:34 +0000
+++ Template/LO/Source/dsample.f 2020-09-22 07:07:51 +0000
@@ -453,6 +453,10 @@
453 CUMULATED_TIMING = t_after - CUMULATED_TIMING453 CUMULATED_TIMING = t_after - CUMULATED_TIMING
454454
455 if (N_EVALS.eq.0) then455 if (N_EVALS.eq.0) then
456 write(outUnit,*) '<lo_statistics> '
457 write(outUnit,*) '<cumulated_time>'//trim(toStr_real(CUMULATED_TIMING))
458 & //'</cumulated_time>'
459 write(outUnit,*) '</lo_statistics>'
456 return460 return
457 endif461 endif
458 462
459463
=== modified file 'UpdateNotes.txt'
--- UpdateNotes.txt 2020-08-21 08:59:01 +0000
+++ UpdateNotes.txt 2020-09-22 07:07:51 +0000
@@ -1,6 +1,13 @@
1Update notes for MadGraph5_aMC@NLO (in reverse time order)1Update notes for MadGraph5_aMC@NLO (in reverse time order)
22
33
42.8.1(22/09/22):
5 OM: Fix for the auto width for three body decay in presence of identical particles.
6 OM: add support for __header__ in UFO model
7 OM: allow restriction card to have auto-width
8 OM: fixing some html link (removed ajax link forbidden by major web browser)
9 OM: Various fix related to the python3 support
10 - including more efficient model conversion method
42.8.0 (21/08/20):112.8.0 (21/08/20):
5 OM: pass to python3 by default12 OM: pass to python3 by default
6 OM: For LO process, you can now set lpp1 and lpp2 to "4" for process with initial photon in order to get the13 OM: For LO process, you can now set lpp1 and lpp2 to "4" for process with initial photon in order to get the
714
=== modified file 'VERSION'
--- VERSION 2020-08-21 08:59:01 +0000
+++ VERSION 2020-09-22 07:07:51 +0000
@@ -1,5 +1,5 @@
1version = 2.8.01version = 2.8.1
2date = 2020-08-212date = 2020-09-22
33
44
55
66
=== modified file 'bin/mg5_aMC'
--- bin/mg5_aMC 2020-06-21 18:48:13 +0000
+++ bin/mg5_aMC 2020-09-22 07:07:51 +0000
@@ -23,8 +23,15 @@
2323
24import sys24import sys
25if sys.version_info[1] < 7:25if sys.version_info[1] < 7:
26 sys.exit('MadGraph5_aMC@NLO works only with python 2.7 or python 3.7 (and later).\n\26 if sys.version_info[0] ==2:
27 Please upgrate your version of python.')27 sys.exit("MadGraph5_aMC@NLO works only with python 2.7 or python 3.7 (and later).\n"+\
28 " You are currently using Python2.%s. Please use a more recent version of Python." % sys.version_info[1])
29 if sys.version_info[1] ==3:
30 sys.exit("MadGraph5_aMC@NLO works only with python 2.7 or python 3.7 (and later).\n"+\
31 " You are currently using Python 3.%i. So please upgrade your version of Python." % sys.version_info[1] +\
32 " If you have python2.7 installed you need to run the code as\n"+\
33 " python27 ./bin/mg5_aMC \n")
34
28try:35try:
29 import six36 import six
30except ImportError:37except ImportError:
@@ -75,8 +82,12 @@
75import logging.config82import logging.config
76import madgraph.interface.coloring_logging83import madgraph.interface.coloring_logging
7784
85if sys.version_info[0] ==2:
86 logging.warning("\033[91mpython2 support will be removed in last quarter 2021. If you use python2 due to issue with Python3, please report them on https://bugs.launchpad.net/mg5amcnlo\033[0m")
87
88
78if ' ' in os.getcwd():89if ' ' in os.getcwd():
79 logging.warning("Path does contains spaces. We advise that you change your current path to avoid to have space in the path.")90 logging.warning("\033[91mPath does contains spaces. We advise that you change your current path to avoid to have space in the path.\033[0m")
8091
81try: 92try:
82 import readline93 import readline
8394
=== modified file 'madgraph/interface/loop_interface.py'
--- madgraph/interface/loop_interface.py 2020-03-25 14:57:21 +0000
+++ madgraph/interface/loop_interface.py 2020-09-22 07:07:51 +0000
@@ -803,6 +803,7 @@
803 """Generate an amplitude for a given process and add to803 """Generate an amplitude for a given process and add to
804 existing amplitudes804 existing amplitudes
805 """805 """
806
806 args = self.split_arg(line)807 args = self.split_arg(line)
807 # Check the validity of the arguments808 # Check the validity of the arguments
808 self.check_add(args)809 self.check_add(args)
@@ -907,6 +908,7 @@
907 amp in myproc.get('amplitudes')])908 amp in myproc.get('amplitudes')])
908 logger.info("Process generated in %0.3f s" % \909 logger.info("Process generated in %0.3f s" % \
909 (cpu_time2 - cpu_time1))910 (cpu_time2 - cpu_time1))
911
910912
911class LoopInterfaceWeb(mg_interface.CheckValidForCmdWeb, LoopInterface):913class LoopInterfaceWeb(mg_interface.CheckValidForCmdWeb, LoopInterface):
912 pass914 pass
913915
=== modified file 'madgraph/interface/madevent_interface.py'
--- madgraph/interface/madevent_interface.py 2020-06-21 18:48:13 +0000
+++ madgraph/interface/madevent_interface.py 2020-09-22 07:07:51 +0000
@@ -3422,6 +3422,9 @@
3422 self.nb_refine += 13422 self.nb_refine += 1
3423 args = self.split_arg(line)3423 args = self.split_arg(line)
3424 treshold=None3424 treshold=None
3425
3426
3427
3425 for a in args:3428 for a in args:
3426 if a.startswith('--treshold='):3429 if a.startswith('--treshold='):
3427 treshold = float(a.split('=',1)[1])3430 treshold = float(a.split('=',1)[1])
34283431
=== modified file 'madgraph/interface/madgraph_interface.py'
--- madgraph/interface/madgraph_interface.py 2020-08-20 15:33:24 +0000
+++ madgraph/interface/madgraph_interface.py 2020-09-22 07:07:51 +0000
@@ -3082,7 +3082,7 @@
3082 existing amplitudes3082 existing amplitudes
3083 or merge two model3083 or merge two model
3084 """3084 """
30853085
3086 args = self.split_arg(line)3086 args = self.split_arg(line)
30873087
3088 3088
@@ -3183,47 +3183,51 @@
3183 3183
31843184
3185 self._curr_proc_defs.append(myprocdef)3185 self._curr_proc_defs.append(myprocdef)
3186 3186
3187 # Negative coupling order contraints can be given on at most one3187 try:
3188 # coupling order (and either in squared orders or orders, not both)3188 # Negative coupling order contraints can be given on at most one
3189 if len([1 for val in list(myprocdef.get('orders').values())+\3189 # coupling order (and either in squared orders or orders, not both)
3190 list(myprocdef.get('squared_orders').values()) if val<0])>1:3190 if len([1 for val in list(myprocdef.get('orders').values())+\
3191 raise MadGraph5Error("Negative coupling order constraints"+\3191 list(myprocdef.get('squared_orders').values()) if val<0])>1:
3192 " can only be given on one type of coupling and either on"+\3192 raise MadGraph5Error("Negative coupling order constraints"+\
3193 " squared orders or amplitude orders, not both.")3193 " can only be given on one type of coupling and either on"+\
31943194 " squared orders or amplitude orders, not both.")
3195 if myprocdef.get_ninitial() ==1 and myprocdef.get('squared_orders'):3195
3196 logger.warning('''Computation of interference term with decay is not 100% validated. 3196 if myprocdef.get_ninitial() ==1 and myprocdef.get('squared_orders'):
3197 Please check carefully your result.3197 logger.warning('''Computation of interference term with decay is not 100% validated.
3198 One suggestion is also to compare the generation of your process with and without3198 Please check carefully your result.
3199 set group_subprocesses True3199 One suggestion is also to compare the generation of your process with and without
3200 (to write Before the generate command)3200 set group_subprocesses True
3201 ''')3201 (to write Before the generate command)
32023202 ''')
3203 cpu_time1 = time.time()3203
32043204 cpu_time1 = time.time()
3205 # Generate processes3205
3206 if self.options['group_subprocesses'] == 'Auto':3206 # Generate processes
3207 collect_mirror_procs = True3207 if self.options['group_subprocesses'] == 'Auto':
3208 else:3208 collect_mirror_procs = True
3209 collect_mirror_procs = self.options['group_subprocesses']3209 else:
3210 ignore_six_quark_processes = \3210 collect_mirror_procs = self.options['group_subprocesses']
3211 self.options['ignore_six_quark_processes'] if \3211 ignore_six_quark_processes = \
3212 "ignore_six_quark_processes" in self.options \3212 self.options['ignore_six_quark_processes'] if \
3213 else []3213 "ignore_six_quark_processes" in self.options \
32143214 else []
3215 myproc = diagram_generation.MultiProcess(myprocdef,3215
3216 collect_mirror_procs = collect_mirror_procs,3216 myproc = diagram_generation.MultiProcess(myprocdef,
3217 ignore_six_quark_processes = ignore_six_quark_processes,3217 collect_mirror_procs = collect_mirror_procs,
3218 optimize=optimize, diagram_filter=diagram_filter)3218 ignore_six_quark_processes = ignore_six_quark_processes,
32193219 optimize=optimize, diagram_filter=diagram_filter)
32203220
3221 for amp in myproc.get('amplitudes'):3221
3222 if amp not in self._curr_amps:3222 for amp in myproc.get('amplitudes'):
3223 self._curr_amps.append(amp)3223 if amp not in self._curr_amps:
3224 elif warning_duplicate:3224 self._curr_amps.append(amp)
3225 raise self.InvalidCmd("Duplicate process %s found. Please check your processes." % \3225 elif warning_duplicate:
3226 amp.nice_string_processes())3226 raise self.InvalidCmd( "Duplicate process %s found. Please check your processes." % \
3227 amp.nice_string_processes())
3228 except Exception:
3229 self._curr_proc_defs.pop(-1)
3230 raise
32273231
3228 # Reset _done_export, since we have new process3232 # Reset _done_export, since we have new process
3229 self._done_export = False3233 self._done_export = False
@@ -3329,9 +3333,13 @@
3329 if answer != 'y':3333 if answer != 'y':
3330 return 3334 return
3331 3335
3332 #Object_library (.iteritems() -> .items())3336 #Object_library
3333 text = open(pjoin(model_dir, 'object_library.py')).read()3337 text = open(pjoin(model_dir, 'object_library.py')).read()
3338 #(.iteritems() -> .items())
3334 text = text.replace('.iteritems()', '.items()')3339 text = text.replace('.iteritems()', '.items()')
3340 # raise UFOError, "" -> raise UFOError()
3341 text = re.sub('raise (\w+)\s*,\s*["\']([^"]+)["\']',
3342 'raise \g<1>("\g<2>")', text)
3335 text = open(pjoin(model_dir, 'object_library.py'),'w').write(text)3343 text = open(pjoin(model_dir, 'object_library.py'),'w').write(text)
3336 3344
3337 # write_param_card.dat -> copy the one of the sm model3345 # write_param_card.dat -> copy the one of the sm model
@@ -5166,6 +5174,7 @@
51665174
5167 # Reset _done_export, since we have new process5175 # Reset _done_export, since we have new process
5168 self._done_export = False5176 self._done_export = False
5177 self._curr_proc_defs.append(myprocdef)
51695178
5170 cpu_time2 = time.time()5179 cpu_time2 = time.time()
51715180
@@ -5372,20 +5381,20 @@
5372 string. Returns a ProcessDefinition."""5381 string. Returns a ProcessDefinition."""
53735382
5374 # Start with process number (identified by "@") and overall orders5383 # Start with process number (identified by "@") and overall orders
5375 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*((\w+\s*=\s*\d+\s*)*)$")5384 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*((\w+\s*\<?=\s*\d+\s*)*)$")
5376 proc_number_re = proc_number_pattern.match(line)5385 proc_number_re = proc_number_pattern.match(line)
5377 overall_orders = {}5386 overall_orders = {}
5378 if proc_number_re:5387 if proc_number_re:
5379 proc_number = int(proc_number_re.group(2))5388 proc_number = int(proc_number_re.group(2))
5380 line = proc_number_re.group(1)5389 line = proc_number_re.group(1)
5381 if proc_number_re.group(3):5390 if proc_number_re.group(3):
5382 order_pattern = re.compile("^(.*?)\s*(\w+)\s*=\s*(\d+)\s*$")5391 order_pattern = re.compile("^(.*?)\s*(\w+)\s*\<?=\s*(\d+)\s*$")
5383 order_line = proc_number_re.group(3)5392 order_line = proc_number_re.group(3)
5384 order_re = order_pattern.match(order_line)5393 order_re = order_pattern.match(order_line)
5385 while order_re:5394 while order_re:
5386 overall_orders[order_re.group(2)] = int(order_re.group(3))5395 overall_orders[order_re.group(2)] = int(order_re.group(3))
5387 order_line = order_re.group(1)5396 order_line = order_re.group(1)
5388 order_re = order_pattern.match(order_line) 5397 order_re = order_pattern.match(order_line)
5389 logger.info(line)5398 logger.info(line)
5390 5399
53915400
@@ -6003,9 +6012,12 @@
6003 if six.PY3:6012 if six.PY3:
6004 self.options['lhapdf_py3'] = pjoin(prefix,'lhapdf6_py3','bin', 'lhapdf-config')6013 self.options['lhapdf_py3'] = pjoin(prefix,'lhapdf6_py3','bin', 'lhapdf-config')
6005 self.exec_cmd('save options %s lhapdf_py3' % config_file)6014 self.exec_cmd('save options %s lhapdf_py3' % config_file)
6015 self.options['lhapdf'] = self.options['lhapdf_py3']
6006 else:6016 else:
6007 self.options['lhapdf_py2'] = pjoin(prefix,'lhapdf6','bin', 'lhapdf-config')6017 self.options['lhapdf_py2'] = pjoin(prefix,'lhapdf6','bin', 'lhapdf-config')
6008 self.exec_cmd('save options %s lhapdf_py2' % config_file)6018 self.exec_cmd('save options %s lhapdf_py2' % config_file)
6019 self.options['lhapdf'] = self.options['lhapdf_py2']
6020
6009 elif tool == 'lhapdf5':6021 elif tool == 'lhapdf5':
6010 self.options['lhapdf'] = pjoin(prefix,'lhapdf5','bin', 'lhapdf-config')6022 self.options['lhapdf'] = pjoin(prefix,'lhapdf5','bin', 'lhapdf-config')
6011 self.exec_cmd('save options %s lhapdf' % config_file, printcmd=False, log=False) 6023 self.exec_cmd('save options %s lhapdf' % config_file, printcmd=False, log=False)
@@ -6801,7 +6813,7 @@
6801 data['last_check'] = time.time()6813 data['last_check'] = time.time()
68026814
6803 #check if we need to update.6815 #check if we need to update.
6804 if time.time() - data['last_check'] < update_delay:6816 if time.time() - float(data['last_check']) < float(update_delay):
6805 return6817 return
68066818
6807 logger.info('Checking if MG5 is up-to-date... (takes up to %ss)' % timeout)6819 logger.info('Checking if MG5 is up-to-date... (takes up to %ss)' % timeout)
68086820
=== modified file 'madgraph/iolibs/export_fks.py'
--- madgraph/iolibs/export_fks.py 2020-08-20 15:33:24 +0000
+++ madgraph/iolibs/export_fks.py 2020-09-22 07:07:51 +0000
@@ -16,7 +16,6 @@
1616
17from __future__ import absolute_import17from __future__ import absolute_import
18from __future__ import print_function18from __future__ import print_function
19from distutils import dir_util
20import glob19import glob
21import logging20import logging
22import os21import os
@@ -99,8 +98,8 @@
99 logger.info('initialize a new directory: %s' % \98 logger.info('initialize a new directory: %s' % \
100 os.path.basename(dir_path))99 os.path.basename(dir_path))
101 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)100 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
102 # distutils.dir_util.copy_tree since dir_path already exists101 # misc.copytree since dir_path already exists
103 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path)102 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path)
104 # Copy plot_card103 # Copy plot_card
105 for card in ['plot_card']:104 for card in ['plot_card']:
106 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):105 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
@@ -3396,8 +3395,8 @@
3396 logger.info('initialize a new directory: %s' % \3395 logger.info('initialize a new directory: %s' % \
3397 os.path.basename(dir_path))3396 os.path.basename(dir_path))
3398 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)3397 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
3399 # distutils.dir_util.copy_tree since dir_path already exists3398 # misc.copytree since dir_path already exists
3400 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),3399 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),
3401 dir_path)3400 dir_path)
3402 # Copy plot_card3401 # Copy plot_card
3403 for card in ['plot_card']:3402 for card in ['plot_card']:
34043403
=== modified file 'madgraph/iolibs/export_v4.py'
--- madgraph/iolibs/export_v4.py 2020-08-20 15:33:24 +0000
+++ madgraph/iolibs/export_v4.py 2020-09-22 07:07:51 +0000
@@ -20,7 +20,6 @@
2020
21import copy21import copy
22from six import StringIO22from six import StringIO
23from distutils import dir_util
24import itertools23import itertools
25import fractions24import fractions
26import glob25import glob
@@ -255,8 +254,8 @@
255 os.path.basename(self.dir_path))254 os.path.basename(self.dir_path))
256 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'),255 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'),
257 self.dir_path, True)256 self.dir_path, True)
258 # distutils.dir_util.copy_tree since dir_path already exists257 # misc.copytree since dir_path already exists
259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 258 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'),
260 self.dir_path)259 self.dir_path)
261 # copy plot_card260 # copy plot_card
262 for card in ['plot_card']:261 for card in ['plot_card']:
@@ -269,8 +268,8 @@
269 elif os.getcwd() == os.path.realpath(self.dir_path):268 elif os.getcwd() == os.path.realpath(self.dir_path):
270 logger.info('working in local directory: %s' % \269 logger.info('working in local directory: %s' % \
271 os.path.realpath(self.dir_path))270 os.path.realpath(self.dir_path))
272 # distutils.dir_util.copy_tree since dir_path already exists271 # misc.copytree since dir_path already exists
273 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 272 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'),
274 self.dir_path)273 self.dir_path)
275# for name in misc.glob('Template/LO/*', self.mgme_dir):274# for name in misc.glob('Template/LO/*', self.mgme_dir):
276# name = os.path.basename(name)275# name = os.path.basename(name)
@@ -279,8 +278,8 @@
279# files.cp(filename, pjoin(self.dir_path,name))278# files.cp(filename, pjoin(self.dir_path,name))
280# elif os.path.isdir(filename):279# elif os.path.isdir(filename):
281# shutil.copytree(filename, pjoin(self.dir_path,name), True)280# shutil.copytree(filename, pjoin(self.dir_path,name), True)
282 # distutils.dir_util.copy_tree since dir_path already exists281 # misc.copytree since dir_path already exists
283 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 282 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'),
284 self.dir_path)283 self.dir_path)
285 # Copy plot_card284 # Copy plot_card
286 for card in ['plot_card']:285 for card in ['plot_card']:
@@ -903,7 +902,11 @@
903 if hasattr(self, 'aloha_model'):902 if hasattr(self, 'aloha_model'):
904 aloha_model = self.aloha_model903 aloha_model = self.aloha_model
905 else:904 else:
906 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath')))905 try:
906 with misc.MuteLogger(['madgraph.models'], [60]):
907 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath')))
908 except ImportError:
909 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath'))
907 aloha_model.add_Lorentz_object(model.get('lorentz'))910 aloha_model.add_Lorentz_object(model.get('lorentz'))
908911
909 # Compute the subroutines912 # Compute the subroutines
910913
=== modified file 'madgraph/iolibs/group_subprocs.py'
--- madgraph/iolibs/group_subprocs.py 2020-08-20 15:33:24 +0000
+++ madgraph/iolibs/group_subprocs.py 2020-09-22 07:07:51 +0000
@@ -317,7 +317,7 @@
317 "Need matrix elements to run find_mapping_diagrams"317 "Need matrix elements to run find_mapping_diagrams"
318318
319 if max_tpropa == 0:319 if max_tpropa == 0:
320 max_tpropa = base_objects.Vertex.max_tpropa320 max_tpropa = int(base_objects.Vertex.max_tpropa)
321 321
322 matrix_elements = self.get('matrix_elements')322 matrix_elements = self.get('matrix_elements')
323 model = matrix_elements[0].get('processes')[0].get('model')323 model = matrix_elements[0].get('processes')[0].get('model')
@@ -363,7 +363,7 @@
363 max(diagram.get_vertex_leg_numbers()) > minvert:363 max(diagram.get_vertex_leg_numbers()) > minvert:
364 diagram_maps[ime].append(0)364 diagram_maps[ime].append(0)
365 continue365 continue
366 if diagram.get_nb_t_channel() > max_tpropa:366 if diagram.get_nb_t_channel() > int(max_tpropa):
367 diagram_maps[ime].append(0)367 diagram_maps[ime].append(0)
368 continue368 continue
369 # Create the equivalent diagram, in the format369 # Create the equivalent diagram, in the format
370370
=== modified file 'madgraph/madevent/gen_crossxhtml.py'
--- madgraph/madevent/gen_crossxhtml.py 2020-06-21 18:48:13 +0000
+++ madgraph/madevent/gen_crossxhtml.py 2020-09-22 07:07:51 +0000
@@ -68,19 +68,6 @@
68 }68 }
69 return http.status!=404;69 return http.status!=404;
70}70}
71function check_link(url,alt, id){
72 var obj = document.getElementById(id);
73 if ( ! UrlExists(url)){
74 if ( ! UrlExists(alt)){
75 obj.href = url;
76 return 1==1;
77 }
78 obj.href = alt;
79 return 1 == 2;
80 }
81 obj.href = url;
82 return 1==1;
83}
84</script> 71</script>
85 <H2 align=center> Results in the %(model)s for %(process)s </H2> 72 <H2 align=center> Results in the %(model)s for %(process)s </H2>
86 <HR>73 <HR>
@@ -1014,14 +1001,16 @@
1014 def special_link(self, link, level, name):1001 def special_link(self, link, level, name):
1015 1002
1016 id = '%s_%s_%s_%s' % (self['run_name'],self['tag'], level, name)1003 id = '%s_%s_%s_%s' % (self['run_name'],self['tag'], level, name)
1017 1004 return " <a id='%(id)s' href='%(link)s.gz'>%(name)s</a>" \
1018 return " <a id='%(id)s' href='%(link)s.gz' onClick=\"check_link('%(link)s.gz','%(link)s','%(id)s')\">%(name)s</a>" \
1019 % {'link': link, 'id': id, 'name':name}1005 % {'link': link, 'id': id, 'name':name}
1006 #return " <a id='%(id)s' href='%(link)s.gz' onClick=\"check_link('%(link)s.gz','%(link)s','%(id)s')\">%(name)s</a>" \
1007 # % {'link': link, 'id': id, 'name':name}
1020 1008
1021 def double_link(self, link1, link2, name, id):1009 def double_link(self, link1, link2, name, id):
1022 1010 return " <a id='%(id)s' href='%(link2)s'>%(name)s</a>" \
1023 return " <a id='%(id)s' href='%(link1)s' onClick=\"check_link('%(link1)s','%(link2)s','%(id)s')\">%(name)s</a>" \1011 % {'link1': link1, 'link2':link2, 'id': id, 'name':name}
1024 % {'link1': link1, 'link2':link2, 'id': id, 'name':name} 1012 #return " <a id='%(id)s' href='%(link2)s' onClick=\"check_link('%(link1)s','%(link2)s','%(id)s')\">%(name)s</a>" \
1013 # % {'link1': link1, 'link2':link2, 'id': id, 'name':name}
1025 1014
1026 def get_links(self, level):1015 def get_links(self, level):
1027 """ Get the links for a given level"""1016 """ Get the links for a given level"""
10281017
=== modified file 'madgraph/madevent/gen_ximprove.py'
--- madgraph/madevent/gen_ximprove.py 2020-06-21 18:48:13 +0000
+++ madgraph/madevent/gen_ximprove.py 2020-09-22 07:07:51 +0000
@@ -993,7 +993,7 @@
993 f.close()993 f.close()
994 994
995 def increase_precision(self, rate=3):995 def increase_precision(self, rate=3):
996 misc.sprint(rate)996 #misc.sprint(rate)
997 if rate < 3:997 if rate < 3:
998 self.max_event_in_iter = 20000998 self.max_event_in_iter = 20000
999 self.min_events = 7500999 self.min_events = 7500
10001000
=== modified file 'madgraph/madevent/sum_html.py'
--- madgraph/madevent/sum_html.py 2019-06-27 12:17:38 +0000
+++ madgraph/madevent/sum_html.py 2020-09-22 07:07:51 +0000
@@ -270,7 +270,7 @@
270 # this can happen if we force maxweight270 # this can happen if we force maxweight
271 self.th_nunwgt = 0 # associated number of event with th_maxwgt 271 self.th_nunwgt = 0 # associated number of event with th_maxwgt
272 #(this is theoretical do not correspond to a number of written event)272 #(this is theoretical do not correspond to a number of written event)
273273 self.timing = 0
274 return274 return
275 275
276 #@cluster.multiple_try(nb_try=5,sleep=20)276 #@cluster.multiple_try(nb_try=5,sleep=20)
@@ -286,7 +286,7 @@
286 286
287 i=0287 i=0
288 found_xsec_line = False288 found_xsec_line = False
289 for line in finput: 289 for line in finput:
290 # Exit as soon as we hit the xml part. Not elegant, but the part290 # Exit as soon as we hit the xml part. Not elegant, but the part
291 # below should eventually be xml anyway.291 # below should eventually be xml anyway.
292 if '<' in line:292 if '<' in line:
@@ -347,7 +347,7 @@
347 xml.append(line)347 xml.append(line)
348348
349 if xml:349 if xml:
350 self.parse_xml_results('\n'.join(xml)) 350 self.parse_xml_results('\n'.join(xml))
351 351
352 # this is for amcatnlo: the number of events has to be read from another file352 # this is for amcatnlo: the number of events has to be read from another file
353 if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \353 if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \
@@ -368,6 +368,12 @@
368 self.run_statistics.load_statistics(statistics_node[0])368 self.run_statistics.load_statistics(statistics_node[0])
369 except ValueError as IndexError:369 except ValueError as IndexError:
370 logger.warning('Fail to read run statistics from results.dat')370 logger.warning('Fail to read run statistics from results.dat')
371 else:
372 lo_statistics_node = dom.getElementsByTagName("lo_statistics")[0]
373 timing = lo_statistics_node.getElementsByTagName('cumulated_time')[0]
374 timing= timing.firstChild.nodeValue
375 self.timing = 0.3 + float(timing) #0.3 is the typical latency of bash script/...
376
371377
372 def set_mfactor(self, value):378 def set_mfactor(self, value):
373 self.mfactor = int(value)379 self.mfactor = int(value)
@@ -448,6 +454,7 @@
448 self.nunwgt = sum([one.nunwgt for one in self]) 454 self.nunwgt = sum([one.nunwgt for one in self])
449 self.wgt = 0455 self.wgt = 0
450 self.luminosity = min([0]+[one.luminosity for one in self])456 self.luminosity = min([0]+[one.luminosity for one in self])
457 self.timing = sum([one.timing for one in self])
451 if update_statistics:458 if update_statistics:
452 self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])459 self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])
453460
@@ -463,6 +470,7 @@
463 self.xsec = sum([one.xsec for one in self]) /nbjobs470 self.xsec = sum([one.xsec for one in self]) /nbjobs
464 self.xerrc = sum([one.xerrc for one in self]) /nbjobs471 self.xerrc = sum([one.xerrc for one in self]) /nbjobs
465 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs472 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs
473 self.timing = sum([one.timing for one in self]) #no average here
466 if error:474 if error:
467 self.xerrc = error475 self.xerrc = error
468 self.xerru = error476 self.xerru = error
@@ -547,7 +555,7 @@
547 table_line_template = \555 table_line_template = \
548"""556"""
549<tr><td align=right>%(P_title)s</td>557<tr><td align=right>%(P_title)s</td>
550 <td align=right><a id="%(P_link)s" href=%(P_link)s onClick="check_link('%(P_link)s','%(mod_P_link)s','%(P_link)s')"> %(cross)s </a> </td>558 <td align=right><a id="%(P_link)s" href=%(P_link)s > %(cross)s </a> </td>
551 <td align=right> %(error)s</td>559 <td align=right> %(error)s</td>
552 <td align=right> %(events)s</td>560 <td align=right> %(events)s</td>
553 <td align=right> %(unweighted)s</td>561 <td align=right> %(unweighted)s</td>
@@ -672,6 +680,10 @@
672 line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i], 680 line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i],
673 self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i]) 681 self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i])
674 fsock.writelines(line)682 fsock.writelines(line)
683
684 if self.timing:
685 text = """<lo_statistics>\n<cumulated_time> %s </cumulated_time>\n</lo_statistics>"""
686 fsock.writelines(text % self.timing)
675 687
676688
677689
@@ -694,19 +706,6 @@
694 }706 }
695 return http.status!=404;707 return http.status!=404;
696}708}
697function check_link(url,alt, id){
698 var obj = document.getElementById(id);
699 if ( ! UrlExists(url)){
700 if ( ! UrlExists(alt)){
701 obj.href = alt;
702 return true;
703 }
704 obj.href = alt;
705 return false;
706 }
707 obj.href = url;
708 return 1==1;
709}
710</script>709</script>
711""" 710"""
712711
@@ -716,7 +715,6 @@
716 run = cmd.results.current['run_name']715 run = cmd.results.current['run_name']
717 all = Combine_results(run)716 all = Combine_results(run)
718717
719
720 for Pdir in cmd.get_Pdir():718 for Pdir in cmd.get_Pdir():
721 P_comb = Combine_results(Pdir)719 P_comb = Combine_results(Pdir)
722 720
@@ -759,7 +757,13 @@
759 all.append(P_comb)757 all.append(P_comb)
760 all.compute_values()758 all.compute_values()
761759
762760 try:
761 all_channels = sum([list(P) for P in all],[])
762 timings = sum(x.timing for x in all_channels)
763 logger.info('sum of cpu time of last step: %s', misc.format_time(timings))
764 except Exception as error:
765 logger.debug(str(error))
766 pass
763767
764 return all768 return all
765769
766770
=== modified file 'madgraph/various/banner.py'
--- madgraph/various/banner.py 2020-08-21 08:59:01 +0000
+++ madgraph/various/banner.py 2020-09-22 07:07:51 +0000
@@ -3259,6 +3259,21 @@
3259 if abs(self['lpp1']) in [2, 3,4] and abs(self['lpp2']) in [2, 3,4] and not self['fixed_fac_scale']:3259 if abs(self['lpp1']) in [2, 3,4] and abs(self['lpp2']) in [2, 3,4] and not self['fixed_fac_scale']:
3260 raise InvalidRunCard("Having both beam in elastic photon mode requires fixec_fac_scale to be on True [since this is use as cutoff]")3260 raise InvalidRunCard("Having both beam in elastic photon mode requires fixec_fac_scale to be on True [since this is use as cutoff]")
32613261
3262 # check that ebeam is bigger than the associated mass.
3263 for i in [1,2]:
3264 if self['lpp%s' % i ] not in [1,2]:
3265 continue
3266 if self['mass_ion%i' % i] == -1:
3267 if self['ebeam%i' % i] < 0.938:
3268 if self['ebeam%i' %i] == 0:
3269 logger.warning("At rest proton mode set: Energy beam set to 0.938")
3270 self.set('ebeam%i' %i, 0.938)
3271 else:
3272 raise InvalidRunCard("Energy for beam %i lower than proton mass. Please fix this")
3273 elif self['ebeam%i' % i] < self['mass_ion%i' % i]:
3274 if self['ebeam%i' %i] == 0:
3275 logger.warning("At rest ion mode set: Energy beam set to %s" % self['mass_ion%i' % i])
3276 self.set('ebeam%i' %i, self['mass_ion%i' % i])
32623277
3263 def update_system_parameter_for_include(self):3278 def update_system_parameter_for_include(self):
3264 3279
@@ -4241,6 +4256,19 @@
4241 raise InvalidRunCard("'rw_fscale' has two or more identical entries. They have to be all different for the code to work correctly.")4256 raise InvalidRunCard("'rw_fscale' has two or more identical entries. They have to be all different for the code to work correctly.")
42424257
42434258
4259 # check that ebeam is bigger than the proton mass.
4260 for i in [1,2]:
4261 if self['lpp%s' % i ] not in [1,2]:
4262 continue
4263
4264 if self['ebeam%i' % i] < 0.938:
4265 if self['ebeam%i' %i] == 0:
4266 logger.warning("At rest proton mode set: Energy beam set to 0.938")
4267 self.set('ebeam%i' %i, 0.938)
4268 else:
4269 raise InvalidRunCard("Energy for beam %i lower than proton mass. Please fix this")
4270
4271
4244 def update_system_parameter_for_include(self):4272 def update_system_parameter_for_include(self):
4245 4273
4246 # set the pdg_for_cut fortran parameter4274 # set the pdg_for_cut fortran parameter
42474275
=== modified file 'madgraph/various/lhe_parser.py'
--- madgraph/various/lhe_parser.py 2020-06-21 18:48:13 +0000
+++ madgraph/various/lhe_parser.py 2020-09-22 07:07:51 +0000
@@ -2227,6 +2227,8 @@
2227 2227
2228 if other is None:2228 if other is None:
2229 return False2229 return False
2230 if len(self) != len(other):
2231 return False
2230 2232
2231 for i,p in enumerate(self):2233 for i,p in enumerate(self):
2232 if p.E != other[i].E:2234 if p.E != other[i].E:
22332235
=== modified file 'madgraph/various/misc.py'
--- madgraph/various/misc.py 2020-06-21 18:48:13 +0000
+++ madgraph/various/misc.py 2020-09-22 07:07:51 +0000
@@ -27,6 +27,7 @@
27import optparse27import optparse
28import time28import time
29import shutil29import shutil
30import stat
30import traceback31import traceback
31import gzip as ziplib32import gzip as ziplib
32from distutils.version import LooseVersion, StrictVersion33from distutils.version import LooseVersion, StrictVersion
@@ -939,6 +940,35 @@
939 940
940 str_out = out.stdout.read().decode().strip()941 str_out = out.stdout.read().decode().strip()
941 return str_out942 return str_out
943
944
945
946def copytree(src, dst, symlinks = False, ignore = None):
947 if not os.path.exists(dst):
948 os.makedirs(dst)
949 shutil.copystat(src, dst)
950 lst = os.listdir(src)
951 if ignore:
952 excl = ignore(src, lst)
953 lst = [x for x in lst if x not in excl]
954 for item in lst:
955 s = os.path.join(src, item)
956 d = os.path.join(dst, item)
957 if symlinks and os.path.islink(s):
958 if os.path.lexists(d):
959 os.remove(d)
960 os.symlink(os.readlink(s), d)
961 try:
962 st = os.lstat(s)
963 mode = stat.S_IMODE(st.st_mode)
964 os.lchmod(d, mode)
965 except:
966 pass # lchmod not available
967 elif os.path.isdir(s):
968 copytree(s, d, symlinks, ignore)
969 else:
970 shutil.copy2(s, d)
971
942 972
943973
944@multiple_try()974@multiple_try()
945975
=== modified file 'mg5decay/decay_objects.py'
--- mg5decay/decay_objects.py 2020-08-20 15:33:24 +0000
+++ mg5decay/decay_objects.py 2020-09-22 07:07:51 +0000
@@ -43,6 +43,7 @@
43from __future__ import print_function43from __future__ import print_function
44import array44import array
45import cmath45import cmath
46import collections
46import copy47import copy
47import itertools48import itertools
48import logging49import logging
@@ -1034,6 +1035,7 @@
10341035
1035 # Group channels into amplitudes1036 # Group channels into amplitudes
1036 self.group_channels_2_amplitudes(clevel, model, min_br)1037 self.group_channels_2_amplitudes(clevel, model, min_br)
1038
1037 1039
10381040
1039 def connect_channel_vertex(self, sub_channel, index, vertex, model):1041 def connect_channel_vertex(self, sub_channel, index, vertex, model):
@@ -1170,7 +1172,11 @@
1170 # Do not include the first leg (initial id)1172 # Do not include the first leg (initial id)
1171 if sorted([l.get('id') for l in amplt['process']['legs'][1:]])\1173 if sorted([l.get('id') for l in amplt['process']['legs'][1:]])\
1172 == final_pid:1174 == final_pid:
1173 amplt.add_std_diagram(channel)1175
1176 for symchan in channel.get_symmetric_channel():
1177 amplt.add_std_diagram(symchan)
1178
1179
1174 found = True1180 found = True
1175 break1181 break
11761182
@@ -1766,6 +1772,7 @@
1766 interaction = self.get('interaction_dict')[vertex['id']]1772 interaction = self.get('interaction_dict')[vertex['id']]
1767 decay_parts = [p for p in interaction['particles']]1773 decay_parts = [p for p in interaction['particles']]
1768 1774
1775 # avoid self decay
1769 if len([1 for p in decay_parts if abs(p['pdg_code'])==abs(initpart['pdg_code'])]) >1:1776 if len([1 for p in decay_parts if abs(p['pdg_code'])==abs(initpart['pdg_code'])]) >1:
1770 self['invalid_Npoint'].append(vertex['id'])1777 self['invalid_Npoint'].append(vertex['id'])
1771 return False1778 return False
@@ -1808,15 +1815,23 @@
1808 1815
1809 #check that all substructure are valid1816 #check that all substructure are valid
1810 #remove if any radiation and two times the same particle in a vertex1817 #remove if any radiation and two times the same particle in a vertex
1818 # 2020: relaxed to avoid only twice initial particle in the vertex
1811 for v in proc['vertices']:1819 for v in proc['vertices']:
1812 if any([get_mass(l)==0 for l in v.get('legs')]):1820 if any([get_mass(l)==0 for l in v.get('legs')]):
1813 self['invalid_Npoint'].append(vertex['id'])1821 self['invalid_Npoint'].append(vertex['id'])
1814 return False1822 return False
18151823 init_pdg = [l['id'] for l in v.get('legs') if l['number'] ==1][0]
1816 ids = set(abs(l['id']) for l in v.get('legs'))1824 nb_part = [1 for l in v.get('legs') if abs(l['id']) in [abs(init_pdg), abs(initpart.get('pdg_code'))]]
1817 if len(ids) != len(vertex.get('legs')):1825 if len(nb_part) > 1:
1818 self['invalid_Npoint'].append(vertex['id'])1826 self['invalid_Npoint'].append(vertex['id'])
1819 return False1827 return False
1828
1829 # before relaxation it was
1830 # seems to me to be always False
1831 #ids = set(abs(l['id']) for l in v.get('legs'))
1832 #if len(ids) != len(vertex.get('legs')):
1833 # self['invalid_Npoint'].append(vertex['id'])
1834 # return False
18201835
1821 # check onshell/offshell status 1836 # check onshell/offshell status
1822 prev_mass = 01837 prev_mass = 0
@@ -3376,6 +3391,85 @@
3376 self['fermionfactor'] = 13391 self['fermionfactor'] = 1
3377 3392
33783393
3394 def get_symmetric_channel(self, ignore=[]):
3395
3396 if self['s_factor'] == 1:
3397 return [self]
3398 elif len(self['vertices']) == 1:
3399 return [self]
3400 elif len(self['final_legs']) == len(set(l['id'] for l in self['final_legs'])):
3401 return [self]
3402
3403 # check if all symetry are already handle:
3404 if len(set(l['id'] for l in self['final_legs'] if l['id'] not in ignore)) ==\
3405 len([ l['id'] for l in self['final_legs'] if l['id'] not in ignore]):
3406 return [self]
3407
3408 nb_id = collections.defaultdict(int)
3409 for l in self['final_legs']:
3410 nb_id[l['id']] += 1
3411
3412 id_to_handle = [id for id in nb_id if nb_id[id] > 1 and id not in ignore]
3413
3414 handling = id_to_handle[0]
3415 remain_id = id_to_handle[1:]
3416 out = []
3417
3418 numbers = [l.get('number') for l in self['final_legs'] if l.get('id') == handling]
3419
3420 for new_numbers in itertools.permutations(numbers):
3421 mapping_id = dict([(o,n) for o,n in zip(numbers, new_numbers) if o!=n])
3422 if not mapping_id:
3423 out.append(self)
3424 continue
3425 channel = copy.copy(self)
3426 channel['vertices'] = base_objects.VertexList()
3427 # (real) DiagramTag
3428 channel['tag'] = []
3429 # IdentifyHelasTag
3430 channel['helastag'] = []
3431 # the number of the corresponding helas calls
3432 channel['helas_number'] = None
3433 # diagram written by IdentifyHelasTag
3434 channel['std_diagram'] = None
3435 for l,vertex in enumerate(self['vertices']):
3436 new_vertex = copy.copy(vertex)
3437 new_vertex['legs'] = base_objects.LegList()
3438 min_id = 99
3439 for leg in vertex['legs']:
3440 if leg['number'] in mapping_id:
3441 new_leg = copy.copy(leg)
3442 new_leg.set('number', mapping_id[leg['number']])
3443 new_vertex['legs'].append(new_leg)
3444 else:
3445 new_vertex['legs'].append(leg)
3446 min_id = min(min_id, leg['number'])
3447
3448 if min_id != new_vertex['legs'][-1]['number']:
3449 if l != len(self['vertices']) -1:
3450 mapping_id[new_vertex['legs'][-1]['number']] = min_id
3451 new_vertex['legs'][-1]['number'] = min_id
3452 channel['vertices'].append(new_vertex)
3453 out.append(channel)
3454
3455
3456 # do the recursion
3457 if len(remain_id) > 1:
3458 all_out = []
3459 for d in out:
3460 all_out += d.get_symmetric_channel(ignore=ignore)
3461 return all_out
3462 else:
3463 return out
3464
3465
3466
3467
3468
3469
3470
3471
3472
3379 def filter(self, name, value):3473 def filter(self, name, value):
3380 """Filter for valid diagram property values."""3474 """Filter for valid diagram property values."""
3381 3475
@@ -3494,6 +3588,7 @@
3494 tmp.sort()3588 tmp.sort()
3495 if base == tmp:3589 if base == tmp:
3496 return False3590 return False
3591
3497 return True3592 return True
3498 3593
34993594
@@ -4373,6 +4468,7 @@
4373 # of list.4468 # of list.
4374 if count != 1:4469 if count != 1:
4375 self['s_factor'] = self['s_factor'] * math.factorial(count)4470 self['s_factor'] = self['s_factor'] * math.factorial(count)
4471
4376 return math.sqrt((M ** 2+mass_list[0] ** 2-mass_list[1] ** 2) ** 2-\4472 return math.sqrt((M ** 2+mass_list[0] ** 2-mass_list[1] ** 2) ** 2-\
4377 (2* M *mass_list[0]) ** 2)* \4473 (2* M *mass_list[0]) ** 2)* \
4378 1./(8*math.pi*(M ** 2)*self['s_factor'])4474 1./(8*math.pi*(M ** 2)*self['s_factor'])
@@ -4664,7 +4760,6 @@
4664 non_std_numbers = [(l.get('id'),l.get('number')) \4760 non_std_numbers = [(l.get('id'),l.get('number')) \
4665 for l in new_dia.get_final_legs()]4761 for l in new_dia.get_final_legs()]
46664762
4667
4668 # initial leg4763 # initial leg
4669 non_std_numbers.append((new_dia.get_initial_id(model), 1))4764 non_std_numbers.append((new_dia.get_initial_id(model), 1))
4670 import operator4765 import operator
@@ -4680,7 +4775,6 @@
4680 if non_std_numbers == std_numbers:4775 if non_std_numbers == std_numbers:
4681 self['diagrams'].append(new_dia)4776 self['diagrams'].append(new_dia)
4682 return4777 return
4683
4684 # Conversion from non_std_number to std_number4778 # Conversion from non_std_number to std_number
4685 converted_dict = dict([(num[1], std_numbers[i][1])\4779 converted_dict = dict([(num[1], std_numbers[i][1])\
4686 for i, num in enumerate(non_std_numbers)])4780 for i, num in enumerate(non_std_numbers)])
@@ -4728,6 +4822,7 @@
47284822
4729 # Add this standard diagram into diagrams4823 # Add this standard diagram into diagrams
4730 self['diagrams'].append(new_dia)4824 self['diagrams'].append(new_dia)
4825
47314826
47324827
4733 def reset_width_br(self):4828 def reset_width_br(self):
47344829
=== modified file 'models/__init__.py'
--- models/__init__.py 2020-02-11 10:57:44 +0000
+++ models/__init__.py 2020-09-22 07:07:51 +0000
@@ -43,14 +43,15 @@
43 return sys.modules[model_pos]43 return sys.modules[model_pos]
44 except Exception as error:44 except Exception as error:
45 pass45 pass
46 for p in os.environ['PYTHONPATH'].split(':'):46 if 'PYTHONPATH' in os.environ:
47 new_name = os.path.join(p, name)47 for p in os.environ['PYTHONPATH'].split(':'):
48 try:48 new_name = os.path.join(p, name)
49 return load_model(new_name, decay)49 try:
50 except Exception:50 return load_model(new_name, decay)
51 pass51 except Exception:
52 except ImportError:52 pass
53 pass53 except ImportError:
54 pass
54 elif path_split[-1] in sys.modules:55 elif path_split[-1] in sys.modules:
55 model_path = os.path.realpath(os.sep.join(path_split))56 model_path = os.path.realpath(os.sep.join(path_split))
56 sys_path = os.path.realpath(os.path.dirname(sys.modules[path_split[-1]].__file__))57 sys_path = os.path.realpath(os.path.dirname(sys.modules[path_split[-1]].__file__))
5758
=== modified file 'models/check_param_card.py'
--- models/check_param_card.py 2019-04-17 18:52:07 +0000
+++ models/check_param_card.py 2020-09-22 07:07:51 +0000
@@ -1323,7 +1323,7 @@
1323 logger.log(log,'For model consistency, update %s with id %s to value %s',1323 logger.log(log,'For model consistency, update %s with id %s to value %s',
1324 (block, id, 1.0), '$MG:BOLD') 1324 (block, id, 1.0), '$MG:BOLD')
1325 elif log:1325 elif log:
1326 logger.log(log,'For model consistency, update %s with id %s to value %s',1326 logger.log(log,'For model consistency, update %s with id %s to value %s' %
1327 (block, id, 1.0))1327 (block, id, 1.0))
13281328
1329 1329
13301330
=== modified file 'models/import_ufo.py'
--- models/import_ufo.py 2020-02-27 13:38:00 +0000
+++ models/import_ufo.py 2020-09-22 07:07:51 +0000
@@ -18,6 +18,7 @@
18import collections18import collections
19import fractions19import fractions
20import logging20import logging
21import math
21import os22import os
22import re23import re
23import sys24import sys
@@ -445,10 +446,27 @@
445446
446 def __init__(self, model, auto=False):447 def __init__(self, model, auto=False):
447 """ initialize empty list for particles/interactions """448 """ initialize empty list for particles/interactions """
448 449
449 if hasattr(model, '__arxiv__'):450 if hasattr(model, '__header__'):
450 logger.info('Please cite %s when using this model', model.__arxiv__, '$MG:color:BLACK')451 header = model.__header__
451 452 if len(header) > 500 or header.count('\n') > 5:
453 logger.debug("Too long header")
454 else:
455 logger.info("\n"+header)
456 else:
457 f =collections.defaultdict(lambda : 'n/a')
458 for key in ['author', 'version', 'email', 'arxiv']:
459 if hasattr(model, '__%s__' % key):
460 val = getattr(model, '__%s__' % key)
461 if 'Duhr' in val:
462 continue
463 f[key] = getattr(model, '__%s__' % key)
464
465 if len(f)>2:
466 logger.info("This model [version %(version)s] is provided by %(author)s (email: %(email)s). Please cite %(arxiv)s" % f, '$MG:color:BLACK')
467 elif hasattr(model, '__arxiv__'):
468 logger.info('Please cite %s when using this model', model.__arxiv__, '$MG:color:BLACK')
469
452 self.particles = base_objects.ParticleList()470 self.particles = base_objects.ParticleList()
453 self.interactions = base_objects.InteractionList()471 self.interactions = base_objects.InteractionList()
454 self.non_qcd_gluon_emission = 0 # vertex where a gluon is emitted withou QCD interaction472 self.non_qcd_gluon_emission = 0 # vertex where a gluon is emitted withou QCD interaction
@@ -1860,6 +1878,11 @@
1860 self.rule_card = check_param_card.ParamCardRule()1878 self.rule_card = check_param_card.ParamCardRule()
1861 self.restrict_card = None1879 self.restrict_card = None
1862 self.coupling_order_dict ={}1880 self.coupling_order_dict ={}
1881 self.autowidth = []
1882
1883 def modify_autowidth(self, cards, id):
1884 self.autowidth.append([int(id[0])])
1885 return math.log10(2*len(self.autowidth))
1863 1886
1864 def restrict_model(self, param_card, rm_parameter=True, keep_external=False,1887 def restrict_model(self, param_card, rm_parameter=True, keep_external=False,
1865 complex_mass_scheme=None):1888 complex_mass_scheme=None):
@@ -1879,7 +1902,8 @@
1879 # compute the value of all parameters1902 # compute the value of all parameters
1880 # Get the list of definition of model functions, parameter values. 1903 # Get the list of definition of model functions, parameter values.
1881 model_definitions = self.set_parameters_and_couplings(param_card, 1904 model_definitions = self.set_parameters_and_couplings(param_card,
1882 complex_mass_scheme=complex_mass_scheme)1905 complex_mass_scheme=complex_mass_scheme,
1906 auto_width=self.modify_autowidth)
1883 1907
1884 # Simplify conditional statements1908 # Simplify conditional statements
1885 logger.log(self.log_level, 'Simplifying conditional expressions')1909 logger.log(self.log_level, 'Simplifying conditional expressions')
@@ -1932,8 +1956,23 @@
1932 self['parameter_dict'][name] = 11956 self['parameter_dict'][name] = 1
1933 elif value == 0.000001e-99:1957 elif value == 0.000001e-99:
1934 self['parameter_dict'][name] = 01958 self['parameter_dict'][name] = 0
19351959
1936 1960 #
1961 # restore auto-width value
1962 #
1963 #for lhacode in self.autowidth:
1964 for parameter in self['parameters'][('external',)]:
1965 if parameter.lhablock.lower() == 'decay' and parameter.lhacode in self.autowidth:
1966 parameter.value = 'auto'
1967 if parameter.name in self['parameter_dict']:
1968 self['parameter_dict'][parameter.name] = 'auto'
1969 elif parameter.name.startswith('mdl_'):
1970 self['parameter_dict'][parameter.name[4:]] = 'auto'
1971 else:
1972 raise Exception
1973
1974
1975
1937 def locate_coupling(self):1976 def locate_coupling(self):
1938 """ create a dict couplings_name -> vertex or (particle, counterterm_key) """1977 """ create a dict couplings_name -> vertex or (particle, counterterm_key) """
1939 1978
@@ -2480,6 +2519,7 @@
2480 logger_mod.log(self.log_level,'remove parameters: %s' % (param))2519 logger_mod.log(self.log_level,'remove parameters: %s' % (param))
2481 data = self['parameters'][param_info[param]['dep']]2520 data = self['parameters'][param_info[param]['dep']]
2482 data.remove(param_info[param]['obj'])2521 data.remove(param_info[param]['obj'])
2522
24832523
2484 def optimise_interaction(self, interaction):2524 def optimise_interaction(self, interaction):
2485 2525
24862526
=== modified file 'models/model_reader.py'
--- models/model_reader.py 2020-06-21 18:48:13 +0000
+++ models/model_reader.py 2020-09-22 07:07:51 +0000
@@ -58,7 +58,8 @@
58 super(ModelReader, self).default_setup()58 super(ModelReader, self).default_setup()
5959
60 def set_parameters_and_couplings(self, param_card = None, scale=None,60 def set_parameters_and_couplings(self, param_card = None, scale=None,
61 complex_mass_scheme=None):61 complex_mass_scheme=None,
62 auto_width=None):
62 """Read a param_card and calculate all parameters and63 """Read a param_card and calculate all parameters and
63 couplings. Set values directly in the parameters and64 couplings. Set values directly in the parameters and
64 couplings, plus add new dictionary coupling_dict from65 couplings, plus add new dictionary coupling_dict from
@@ -84,6 +85,9 @@
84 raise MadGraph5Error("No such file %s" % param_card)85 raise MadGraph5Error("No such file %s" % param_card)
85 param_card_text = param_card86 param_card_text = param_card
86 param_card = card_reader.ParamCard(param_card)87 param_card = card_reader.ParamCard(param_card)
88 for param in param_card.get('decay'):
89 if str(param.value).lower() == 'auto':
90 param.value = auto_width(param_card, param.lhacode)
87 #misc.sprint(type(param_card), card_reader.ParamCard, isinstance(param_card, card_reader.ParamCard))91 #misc.sprint(type(param_card), card_reader.ParamCard, isinstance(param_card, card_reader.ParamCard))
88 #assert isinstance(param_card, card_reader.ParamCard),'%s is not a ParamCard: %s' % (type(param_card), isinstance(param_card, card_reader.ParamCard)) 92 #assert isinstance(param_card, card_reader.ParamCard),'%s is not a ParamCard: %s' % (type(param_card), isinstance(param_card, card_reader.ParamCard))
89 93
9094
=== modified file 'models/write_param_card.py'
--- models/write_param_card.py 2019-06-27 12:21:53 +0000
+++ models/write_param_card.py 2020-09-22 07:07:51 +0000
@@ -243,9 +243,8 @@
243 if info.startswith('mdl_'):243 if info.startswith('mdl_'):
244 info = info[4:]244 info = info[4:]
245 245
246 if param.value.imag != 0:246 if param.value != 'auto' and param.value.imag != 0:
247 raise ParamCardWriterError('All External Parameter should be real (not the case for %s)'%param.name)247 raise ParamCardWriterError('All External Parameter should be real (not the case for %s)'%param.name)
248
249248
250 # avoid to keep special value used to avoid restriction249 # avoid to keep special value used to avoid restriction
251 if param.value == 9.999999e-1:250 if param.value == 9.999999e-1:
@@ -257,6 +256,8 @@
257 lhacode=' '.join(['%3s' % key for key in param.lhacode])256 lhacode=' '.join(['%3s' % key for key in param.lhacode])
258 if lhablock != 'DECAY':257 if lhablock != 'DECAY':
259 text = """ %s %e # %s \n""" % (lhacode, param.value.real, info) 258 text = """ %s %e # %s \n""" % (lhacode, param.value.real, info)
259 elif param.value == 'auto':
260 text = '''DECAY %s auto # %s \n''' % (lhacode, info)
260 else:261 else:
261 text = '''DECAY %s %e # %s \n''' % (lhacode, param.value.real, info)262 text = '''DECAY %s %e # %s \n''' % (lhacode, param.value.real, info)
262 self.fsock.write(text) 263 self.fsock.write(text)

Subscribers

People subscribed via source and target branches

to all changes: