Merge lp:~maddevelopers/mg5amcnlo/2.8.1.py3 into lp:mg5amcnlo/lts
- 2.8.1.py3
- Merge into series2.0
Proposed by
Olivier Mattelaer
Status: | Merged |
---|---|
Merged at revision: | 290 |
Proposed branch: | lp:~maddevelopers/mg5amcnlo/2.8.1.py3 |
Merge into: | lp:mg5amcnlo/lts |
Diff against target: |
1112 lines (+365/-142) 23 files modified
MadSpin/interface_madspin.py (+0/-12) Template/LO/Source/dsample.f (+4/-0) UpdateNotes.txt (+7/-0) VERSION (+2/-2) bin/mg5_aMC (+14/-3) madgraph/interface/loop_interface.py (+2/-0) madgraph/interface/madevent_interface.py (+3/-0) madgraph/interface/madgraph_interface.py (+59/-47) madgraph/iolibs/export_fks.py (+4/-5) madgraph/iolibs/export_v4.py (+11/-8) madgraph/iolibs/group_subprocs.py (+2/-2) madgraph/madevent/gen_crossxhtml.py (+7/-18) madgraph/madevent/gen_ximprove.py (+1/-1) madgraph/madevent/sum_html.py (+23/-19) madgraph/various/banner.py (+28/-0) madgraph/various/lhe_parser.py (+2/-0) madgraph/various/misc.py (+30/-0) mg5decay/decay_objects.py (+101/-6) models/__init__.py (+9/-8) models/check_param_card.py (+1/-1) models/import_ufo.py (+47/-7) models/model_reader.py (+5/-1) models/write_param_card.py (+3/-2) |
To merge this branch: | bzr merge lp:~maddevelopers/mg5amcnlo/2.8.1.py3 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
MadTeam | Pending | ||
Review via email: mp+391101@code.launchpad.net |
Commit message
pass to 2.8.1
Description of the change
This is a pure bug fixing release.
Fixing mainly issue with python3 (comparison operator, model loading, model conversion,...)
It has also a couple of important bug fix like for the auto-width computation.
I would like to also include this "feature" branch as well (containing better support for FxFx)
lp:~maddevelopers/mg5amcnlo/2.8.1_fxfx
But I'm still waiting approval on that one.
Olivier
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'MadSpin/interface_madspin.py' |
2 | --- MadSpin/interface_madspin.py 2020-08-20 15:33:24 +0000 |
3 | +++ MadSpin/interface_madspin.py 2020-09-22 07:07:51 +0000 |
4 | @@ -451,14 +451,6 @@ |
5 | except ValueError: |
6 | raise self.InvalidCmd('second argument should be a real number.') |
7 | |
8 | - elif args[0] == 'BW_effect': |
9 | - if args[1] in [0, False,'.false.', 'F', 'f', 'False', 'no']: |
10 | - args[1] = 0 |
11 | - elif args[1] in [1, True,'.true.', 'T', 't', 'True', 'yes']: |
12 | - args[1] = 1 |
13 | - else: |
14 | - raise self.InvalidCmd('second argument should be either T or F.') |
15 | - |
16 | elif args[0] == 'curr_dir': |
17 | if not os.path.isdir(args[1]): |
18 | raise self.InvalidCmd('second argument should be a path to a existing directory') |
19 | @@ -499,8 +491,6 @@ |
20 | opts = list(self.options.keys()) + ['seed', "spinmode"] |
21 | return self.list_completion(text, opts) |
22 | elif len(args) == 2: |
23 | - if args[1] == 'BW_effect': |
24 | - return self.list_completion(text, ['True', 'False']) |
25 | if args[1] == 'ms_dir': |
26 | return self.path_completion(text, '.', only_dirs = True) |
27 | elif args[1] == 'ms_dir': |
28 | @@ -517,8 +507,6 @@ |
29 | print('') |
30 | print('-- assign to a given option a given value') |
31 | print(' - set max_weight VALUE: pre-define the maximum_weight for the reweighting') |
32 | - print(' - set BW_effect True|False: [default:True] reshuffle the momenta to describe') |
33 | - print(' corrrectly the Breit-Wigner of the decayed particle') |
34 | print(' - set seed VALUE: fix the value of the seed to a given value.') |
35 | print(' by default use the current time to set the seed. random number are') |
36 | print(' generated by the python module random using the Mersenne Twister generator.') |
37 | |
38 | === modified file 'Template/LO/Source/dsample.f' |
39 | --- Template/LO/Source/dsample.f 2020-05-13 09:45:34 +0000 |
40 | +++ Template/LO/Source/dsample.f 2020-09-22 07:07:51 +0000 |
41 | @@ -453,6 +453,10 @@ |
42 | CUMULATED_TIMING = t_after - CUMULATED_TIMING |
43 | |
44 | if (N_EVALS.eq.0) then |
45 | + write(outUnit,*) '<lo_statistics> ' |
46 | + write(outUnit,*) '<cumulated_time>'//trim(toStr_real(CUMULATED_TIMING)) |
47 | + & //'</cumulated_time>' |
48 | + write(outUnit,*) '</lo_statistics>' |
49 | return |
50 | endif |
51 | |
52 | |
53 | === modified file 'UpdateNotes.txt' |
54 | --- UpdateNotes.txt 2020-08-21 08:59:01 +0000 |
55 | +++ UpdateNotes.txt 2020-09-22 07:07:51 +0000 |
56 | @@ -1,6 +1,13 @@ |
57 | Update notes for MadGraph5_aMC@NLO (in reverse time order) |
58 | |
59 | |
60 | +2.8.1(22/09/22): |
61 | + OM: Fix for the auto width for three body decay in presence of identical particles. |
62 | + OM: add support for __header__ in UFO model |
63 | + OM: allow restriction card to have auto-width |
64 | + OM: fixing some html link (removed ajax link forbidden by major web browser) |
65 | + OM: Various fix related to the python3 support |
66 | + - including more efficient model conversion method |
67 | 2.8.0 (21/08/20): |
68 | OM: pass to python3 by default |
69 | OM: For LO process, you can now set lpp1 and lpp2 to "4" for process with initial photon in order to get the |
70 | |
71 | === modified file 'VERSION' |
72 | --- VERSION 2020-08-21 08:59:01 +0000 |
73 | +++ VERSION 2020-09-22 07:07:51 +0000 |
74 | @@ -1,5 +1,5 @@ |
75 | -version = 2.8.0 |
76 | -date = 2020-08-21 |
77 | +version = 2.8.1 |
78 | +date = 2020-09-22 |
79 | |
80 | |
81 | |
82 | |
83 | === modified file 'bin/mg5_aMC' |
84 | --- bin/mg5_aMC 2020-06-21 18:48:13 +0000 |
85 | +++ bin/mg5_aMC 2020-09-22 07:07:51 +0000 |
86 | @@ -23,8 +23,15 @@ |
87 | |
88 | import sys |
89 | if sys.version_info[1] < 7: |
90 | - sys.exit('MadGraph5_aMC@NLO works only with python 2.7 or python 3.7 (and later).\n\ |
91 | - Please upgrate your version of python.') |
92 | + if sys.version_info[0] ==2: |
93 | + sys.exit("MadGraph5_aMC@NLO works only with python 2.7 or python 3.7 (and later).\n"+\ |
94 | + " You are currently using Python2.%s. Please use a more recent version of Python." % sys.version_info[1]) |
95 | + if sys.version_info[1] ==3: |
96 | + sys.exit("MadGraph5_aMC@NLO works only with python 2.7 or python 3.7 (and later).\n"+\ |
97 | + " You are currently using Python 3.%i. So please upgrade your version of Python." % sys.version_info[1] +\ |
98 | + " If you have python2.7 installed you need to run the code as\n"+\ |
99 | + " python27 ./bin/mg5_aMC \n") |
100 | + |
101 | try: |
102 | import six |
103 | except ImportError: |
104 | @@ -75,8 +82,12 @@ |
105 | import logging.config |
106 | import madgraph.interface.coloring_logging |
107 | |
108 | +if sys.version_info[0] ==2: |
109 | + logging.warning("\033[91mpython2 support will be removed in last quarter 2021. If you use python2 due to issue with Python3, please report them on https://bugs.launchpad.net/mg5amcnlo\033[0m") |
110 | + |
111 | + |
112 | if ' ' in os.getcwd(): |
113 | - logging.warning("Path does contains spaces. We advise that you change your current path to avoid to have space in the path.") |
114 | + logging.warning("\033[91mPath does contains spaces. We advise that you change your current path to avoid to have space in the path.\033[0m") |
115 | |
116 | try: |
117 | import readline |
118 | |
119 | === modified file 'madgraph/interface/loop_interface.py' |
120 | --- madgraph/interface/loop_interface.py 2020-03-25 14:57:21 +0000 |
121 | +++ madgraph/interface/loop_interface.py 2020-09-22 07:07:51 +0000 |
122 | @@ -803,6 +803,7 @@ |
123 | """Generate an amplitude for a given process and add to |
124 | existing amplitudes |
125 | """ |
126 | + |
127 | args = self.split_arg(line) |
128 | # Check the validity of the arguments |
129 | self.check_add(args) |
130 | @@ -907,6 +908,7 @@ |
131 | amp in myproc.get('amplitudes')]) |
132 | logger.info("Process generated in %0.3f s" % \ |
133 | (cpu_time2 - cpu_time1)) |
134 | + |
135 | |
136 | class LoopInterfaceWeb(mg_interface.CheckValidForCmdWeb, LoopInterface): |
137 | pass |
138 | |
139 | === modified file 'madgraph/interface/madevent_interface.py' |
140 | --- madgraph/interface/madevent_interface.py 2020-06-21 18:48:13 +0000 |
141 | +++ madgraph/interface/madevent_interface.py 2020-09-22 07:07:51 +0000 |
142 | @@ -3422,6 +3422,9 @@ |
143 | self.nb_refine += 1 |
144 | args = self.split_arg(line) |
145 | treshold=None |
146 | + |
147 | + |
148 | + |
149 | for a in args: |
150 | if a.startswith('--treshold='): |
151 | treshold = float(a.split('=',1)[1]) |
152 | |
153 | === modified file 'madgraph/interface/madgraph_interface.py' |
154 | --- madgraph/interface/madgraph_interface.py 2020-08-20 15:33:24 +0000 |
155 | +++ madgraph/interface/madgraph_interface.py 2020-09-22 07:07:51 +0000 |
156 | @@ -3082,7 +3082,7 @@ |
157 | existing amplitudes |
158 | or merge two model |
159 | """ |
160 | - |
161 | + |
162 | args = self.split_arg(line) |
163 | |
164 | |
165 | @@ -3183,47 +3183,51 @@ |
166 | |
167 | |
168 | self._curr_proc_defs.append(myprocdef) |
169 | - |
170 | - # Negative coupling order contraints can be given on at most one |
171 | - # coupling order (and either in squared orders or orders, not both) |
172 | - if len([1 for val in list(myprocdef.get('orders').values())+\ |
173 | - list(myprocdef.get('squared_orders').values()) if val<0])>1: |
174 | - raise MadGraph5Error("Negative coupling order constraints"+\ |
175 | - " can only be given on one type of coupling and either on"+\ |
176 | - " squared orders or amplitude orders, not both.") |
177 | - |
178 | - if myprocdef.get_ninitial() ==1 and myprocdef.get('squared_orders'): |
179 | - logger.warning('''Computation of interference term with decay is not 100% validated. |
180 | - Please check carefully your result. |
181 | - One suggestion is also to compare the generation of your process with and without |
182 | - set group_subprocesses True |
183 | - (to write Before the generate command) |
184 | - ''') |
185 | - |
186 | - cpu_time1 = time.time() |
187 | - |
188 | - # Generate processes |
189 | - if self.options['group_subprocesses'] == 'Auto': |
190 | - collect_mirror_procs = True |
191 | - else: |
192 | - collect_mirror_procs = self.options['group_subprocesses'] |
193 | - ignore_six_quark_processes = \ |
194 | - self.options['ignore_six_quark_processes'] if \ |
195 | - "ignore_six_quark_processes" in self.options \ |
196 | - else [] |
197 | - |
198 | - myproc = diagram_generation.MultiProcess(myprocdef, |
199 | - collect_mirror_procs = collect_mirror_procs, |
200 | - ignore_six_quark_processes = ignore_six_quark_processes, |
201 | - optimize=optimize, diagram_filter=diagram_filter) |
202 | - |
203 | - |
204 | - for amp in myproc.get('amplitudes'): |
205 | - if amp not in self._curr_amps: |
206 | - self._curr_amps.append(amp) |
207 | - elif warning_duplicate: |
208 | - raise self.InvalidCmd("Duplicate process %s found. Please check your processes." % \ |
209 | - amp.nice_string_processes()) |
210 | + |
211 | + try: |
212 | + # Negative coupling order contraints can be given on at most one |
213 | + # coupling order (and either in squared orders or orders, not both) |
214 | + if len([1 for val in list(myprocdef.get('orders').values())+\ |
215 | + list(myprocdef.get('squared_orders').values()) if val<0])>1: |
216 | + raise MadGraph5Error("Negative coupling order constraints"+\ |
217 | + " can only be given on one type of coupling and either on"+\ |
218 | + " squared orders or amplitude orders, not both.") |
219 | + |
220 | + if myprocdef.get_ninitial() ==1 and myprocdef.get('squared_orders'): |
221 | + logger.warning('''Computation of interference term with decay is not 100% validated. |
222 | + Please check carefully your result. |
223 | + One suggestion is also to compare the generation of your process with and without |
224 | + set group_subprocesses True |
225 | + (to write Before the generate command) |
226 | + ''') |
227 | + |
228 | + cpu_time1 = time.time() |
229 | + |
230 | + # Generate processes |
231 | + if self.options['group_subprocesses'] == 'Auto': |
232 | + collect_mirror_procs = True |
233 | + else: |
234 | + collect_mirror_procs = self.options['group_subprocesses'] |
235 | + ignore_six_quark_processes = \ |
236 | + self.options['ignore_six_quark_processes'] if \ |
237 | + "ignore_six_quark_processes" in self.options \ |
238 | + else [] |
239 | + |
240 | + myproc = diagram_generation.MultiProcess(myprocdef, |
241 | + collect_mirror_procs = collect_mirror_procs, |
242 | + ignore_six_quark_processes = ignore_six_quark_processes, |
243 | + optimize=optimize, diagram_filter=diagram_filter) |
244 | + |
245 | + |
246 | + for amp in myproc.get('amplitudes'): |
247 | + if amp not in self._curr_amps: |
248 | + self._curr_amps.append(amp) |
249 | + elif warning_duplicate: |
250 | + raise self.InvalidCmd( "Duplicate process %s found. Please check your processes." % \ |
251 | + amp.nice_string_processes()) |
252 | + except Exception: |
253 | + self._curr_proc_defs.pop(-1) |
254 | + raise |
255 | |
256 | # Reset _done_export, since we have new process |
257 | self._done_export = False |
258 | @@ -3329,9 +3333,13 @@ |
259 | if answer != 'y': |
260 | return |
261 | |
262 | - #Object_library (.iteritems() -> .items()) |
263 | + #Object_library |
264 | text = open(pjoin(model_dir, 'object_library.py')).read() |
265 | + #(.iteritems() -> .items()) |
266 | text = text.replace('.iteritems()', '.items()') |
267 | + # raise UFOError, "" -> raise UFOError() |
268 | + text = re.sub('raise (\w+)\s*,\s*["\']([^"]+)["\']', |
269 | + 'raise \g<1>("\g<2>")', text) |
270 | text = open(pjoin(model_dir, 'object_library.py'),'w').write(text) |
271 | |
272 | # write_param_card.dat -> copy the one of the sm model |
273 | @@ -5166,6 +5174,7 @@ |
274 | |
275 | # Reset _done_export, since we have new process |
276 | self._done_export = False |
277 | + self._curr_proc_defs.append(myprocdef) |
278 | |
279 | cpu_time2 = time.time() |
280 | |
281 | @@ -5372,20 +5381,20 @@ |
282 | string. Returns a ProcessDefinition.""" |
283 | |
284 | # Start with process number (identified by "@") and overall orders |
285 | - proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*((\w+\s*=\s*\d+\s*)*)$") |
286 | + proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*((\w+\s*\<?=\s*\d+\s*)*)$") |
287 | proc_number_re = proc_number_pattern.match(line) |
288 | overall_orders = {} |
289 | if proc_number_re: |
290 | proc_number = int(proc_number_re.group(2)) |
291 | line = proc_number_re.group(1) |
292 | if proc_number_re.group(3): |
293 | - order_pattern = re.compile("^(.*?)\s*(\w+)\s*=\s*(\d+)\s*$") |
294 | + order_pattern = re.compile("^(.*?)\s*(\w+)\s*\<?=\s*(\d+)\s*$") |
295 | order_line = proc_number_re.group(3) |
296 | order_re = order_pattern.match(order_line) |
297 | while order_re: |
298 | overall_orders[order_re.group(2)] = int(order_re.group(3)) |
299 | order_line = order_re.group(1) |
300 | - order_re = order_pattern.match(order_line) |
301 | + order_re = order_pattern.match(order_line) |
302 | logger.info(line) |
303 | |
304 | |
305 | @@ -6003,9 +6012,12 @@ |
306 | if six.PY3: |
307 | self.options['lhapdf_py3'] = pjoin(prefix,'lhapdf6_py3','bin', 'lhapdf-config') |
308 | self.exec_cmd('save options %s lhapdf_py3' % config_file) |
309 | + self.options['lhapdf'] = self.options['lhapdf_py3'] |
310 | else: |
311 | self.options['lhapdf_py2'] = pjoin(prefix,'lhapdf6','bin', 'lhapdf-config') |
312 | self.exec_cmd('save options %s lhapdf_py2' % config_file) |
313 | + self.options['lhapdf'] = self.options['lhapdf_py2'] |
314 | + |
315 | elif tool == 'lhapdf5': |
316 | self.options['lhapdf'] = pjoin(prefix,'lhapdf5','bin', 'lhapdf-config') |
317 | self.exec_cmd('save options %s lhapdf' % config_file, printcmd=False, log=False) |
318 | @@ -6801,7 +6813,7 @@ |
319 | data['last_check'] = time.time() |
320 | |
321 | #check if we need to update. |
322 | - if time.time() - data['last_check'] < update_delay: |
323 | + if time.time() - float(data['last_check']) < float(update_delay): |
324 | return |
325 | |
326 | logger.info('Checking if MG5 is up-to-date... (takes up to %ss)' % timeout) |
327 | |
328 | === modified file 'madgraph/iolibs/export_fks.py' |
329 | --- madgraph/iolibs/export_fks.py 2020-08-20 15:33:24 +0000 |
330 | +++ madgraph/iolibs/export_fks.py 2020-09-22 07:07:51 +0000 |
331 | @@ -16,7 +16,6 @@ |
332 | |
333 | from __future__ import absolute_import |
334 | from __future__ import print_function |
335 | -from distutils import dir_util |
336 | import glob |
337 | import logging |
338 | import os |
339 | @@ -99,8 +98,8 @@ |
340 | logger.info('initialize a new directory: %s' % \ |
341 | os.path.basename(dir_path)) |
342 | shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) |
343 | - # distutils.dir_util.copy_tree since dir_path already exists |
344 | - dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) |
345 | + # misc.copytree since dir_path already exists |
346 | + misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) |
347 | # Copy plot_card |
348 | for card in ['plot_card']: |
349 | if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): |
350 | @@ -3396,8 +3395,8 @@ |
351 | logger.info('initialize a new directory: %s' % \ |
352 | os.path.basename(dir_path)) |
353 | shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) |
354 | - # distutils.dir_util.copy_tree since dir_path already exists |
355 | - dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), |
356 | + # misc.copytree since dir_path already exists |
357 | + misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'), |
358 | dir_path) |
359 | # Copy plot_card |
360 | for card in ['plot_card']: |
361 | |
362 | === modified file 'madgraph/iolibs/export_v4.py' |
363 | --- madgraph/iolibs/export_v4.py 2020-08-20 15:33:24 +0000 |
364 | +++ madgraph/iolibs/export_v4.py 2020-09-22 07:07:51 +0000 |
365 | @@ -20,7 +20,6 @@ |
366 | |
367 | import copy |
368 | from six import StringIO |
369 | -from distutils import dir_util |
370 | import itertools |
371 | import fractions |
372 | import glob |
373 | @@ -255,8 +254,8 @@ |
374 | os.path.basename(self.dir_path)) |
375 | shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), |
376 | self.dir_path, True) |
377 | - # distutils.dir_util.copy_tree since dir_path already exists |
378 | - dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), |
379 | + # misc.copytree since dir_path already exists |
380 | + misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), |
381 | self.dir_path) |
382 | # copy plot_card |
383 | for card in ['plot_card']: |
384 | @@ -269,8 +268,8 @@ |
385 | elif os.getcwd() == os.path.realpath(self.dir_path): |
386 | logger.info('working in local directory: %s' % \ |
387 | os.path.realpath(self.dir_path)) |
388 | - # distutils.dir_util.copy_tree since dir_path already exists |
389 | - dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), |
390 | + # misc.copytree since dir_path already exists |
391 | + misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), |
392 | self.dir_path) |
393 | # for name in misc.glob('Template/LO/*', self.mgme_dir): |
394 | # name = os.path.basename(name) |
395 | @@ -279,8 +278,8 @@ |
396 | # files.cp(filename, pjoin(self.dir_path,name)) |
397 | # elif os.path.isdir(filename): |
398 | # shutil.copytree(filename, pjoin(self.dir_path,name), True) |
399 | - # distutils.dir_util.copy_tree since dir_path already exists |
400 | - dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), |
401 | + # misc.copytree since dir_path already exists |
402 | + misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), |
403 | self.dir_path) |
404 | # Copy plot_card |
405 | for card in ['plot_card']: |
406 | @@ -903,7 +902,11 @@ |
407 | if hasattr(self, 'aloha_model'): |
408 | aloha_model = self.aloha_model |
409 | else: |
410 | - aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) |
411 | + try: |
412 | + with misc.MuteLogger(['madgraph.models'], [60]): |
413 | + aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) |
414 | + except ImportError: |
415 | + aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) |
416 | aloha_model.add_Lorentz_object(model.get('lorentz')) |
417 | |
418 | # Compute the subroutines |
419 | |
420 | === modified file 'madgraph/iolibs/group_subprocs.py' |
421 | --- madgraph/iolibs/group_subprocs.py 2020-08-20 15:33:24 +0000 |
422 | +++ madgraph/iolibs/group_subprocs.py 2020-09-22 07:07:51 +0000 |
423 | @@ -317,7 +317,7 @@ |
424 | "Need matrix elements to run find_mapping_diagrams" |
425 | |
426 | if max_tpropa == 0: |
427 | - max_tpropa = base_objects.Vertex.max_tpropa |
428 | + max_tpropa = int(base_objects.Vertex.max_tpropa) |
429 | |
430 | matrix_elements = self.get('matrix_elements') |
431 | model = matrix_elements[0].get('processes')[0].get('model') |
432 | @@ -363,7 +363,7 @@ |
433 | max(diagram.get_vertex_leg_numbers()) > minvert: |
434 | diagram_maps[ime].append(0) |
435 | continue |
436 | - if diagram.get_nb_t_channel() > max_tpropa: |
437 | + if diagram.get_nb_t_channel() > int(max_tpropa): |
438 | diagram_maps[ime].append(0) |
439 | continue |
440 | # Create the equivalent diagram, in the format |
441 | |
442 | === modified file 'madgraph/madevent/gen_crossxhtml.py' |
443 | --- madgraph/madevent/gen_crossxhtml.py 2020-06-21 18:48:13 +0000 |
444 | +++ madgraph/madevent/gen_crossxhtml.py 2020-09-22 07:07:51 +0000 |
445 | @@ -68,19 +68,6 @@ |
446 | } |
447 | return http.status!=404; |
448 | } |
449 | -function check_link(url,alt, id){ |
450 | - var obj = document.getElementById(id); |
451 | - if ( ! UrlExists(url)){ |
452 | - if ( ! UrlExists(alt)){ |
453 | - obj.href = url; |
454 | - return 1==1; |
455 | - } |
456 | - obj.href = alt; |
457 | - return 1 == 2; |
458 | - } |
459 | - obj.href = url; |
460 | - return 1==1; |
461 | -} |
462 | </script> |
463 | <H2 align=center> Results in the %(model)s for %(process)s </H2> |
464 | <HR> |
465 | @@ -1014,14 +1001,16 @@ |
466 | def special_link(self, link, level, name): |
467 | |
468 | id = '%s_%s_%s_%s' % (self['run_name'],self['tag'], level, name) |
469 | - |
470 | - return " <a id='%(id)s' href='%(link)s.gz' onClick=\"check_link('%(link)s.gz','%(link)s','%(id)s')\">%(name)s</a>" \ |
471 | + return " <a id='%(id)s' href='%(link)s.gz'>%(name)s</a>" \ |
472 | % {'link': link, 'id': id, 'name':name} |
473 | + #return " <a id='%(id)s' href='%(link)s.gz' onClick=\"check_link('%(link)s.gz','%(link)s','%(id)s')\">%(name)s</a>" \ |
474 | + # % {'link': link, 'id': id, 'name':name} |
475 | |
476 | def double_link(self, link1, link2, name, id): |
477 | - |
478 | - return " <a id='%(id)s' href='%(link1)s' onClick=\"check_link('%(link1)s','%(link2)s','%(id)s')\">%(name)s</a>" \ |
479 | - % {'link1': link1, 'link2':link2, 'id': id, 'name':name} |
480 | + return " <a id='%(id)s' href='%(link2)s'>%(name)s</a>" \ |
481 | + % {'link1': link1, 'link2':link2, 'id': id, 'name':name} |
482 | + #return " <a id='%(id)s' href='%(link2)s' onClick=\"check_link('%(link1)s','%(link2)s','%(id)s')\">%(name)s</a>" \ |
483 | + # % {'link1': link1, 'link2':link2, 'id': id, 'name':name} |
484 | |
485 | def get_links(self, level): |
486 | """ Get the links for a given level""" |
487 | |
488 | === modified file 'madgraph/madevent/gen_ximprove.py' |
489 | --- madgraph/madevent/gen_ximprove.py 2020-06-21 18:48:13 +0000 |
490 | +++ madgraph/madevent/gen_ximprove.py 2020-09-22 07:07:51 +0000 |
491 | @@ -993,7 +993,7 @@ |
492 | f.close() |
493 | |
494 | def increase_precision(self, rate=3): |
495 | - misc.sprint(rate) |
496 | + #misc.sprint(rate) |
497 | if rate < 3: |
498 | self.max_event_in_iter = 20000 |
499 | self.min_events = 7500 |
500 | |
501 | === modified file 'madgraph/madevent/sum_html.py' |
502 | --- madgraph/madevent/sum_html.py 2019-06-27 12:17:38 +0000 |
503 | +++ madgraph/madevent/sum_html.py 2020-09-22 07:07:51 +0000 |
504 | @@ -270,7 +270,7 @@ |
505 | # this can happen if we force maxweight |
506 | self.th_nunwgt = 0 # associated number of event with th_maxwgt |
507 | #(this is theoretical do not correspond to a number of written event) |
508 | - |
509 | + self.timing = 0 |
510 | return |
511 | |
512 | #@cluster.multiple_try(nb_try=5,sleep=20) |
513 | @@ -286,7 +286,7 @@ |
514 | |
515 | i=0 |
516 | found_xsec_line = False |
517 | - for line in finput: |
518 | + for line in finput: |
519 | # Exit as soon as we hit the xml part. Not elegant, but the part |
520 | # below should eventually be xml anyway. |
521 | if '<' in line: |
522 | @@ -347,7 +347,7 @@ |
523 | xml.append(line) |
524 | |
525 | if xml: |
526 | - self.parse_xml_results('\n'.join(xml)) |
527 | + self.parse_xml_results('\n'.join(xml)) |
528 | |
529 | # this is for amcatnlo: the number of events has to be read from another file |
530 | if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \ |
531 | @@ -368,6 +368,12 @@ |
532 | self.run_statistics.load_statistics(statistics_node[0]) |
533 | except ValueError as IndexError: |
534 | logger.warning('Fail to read run statistics from results.dat') |
535 | + else: |
536 | + lo_statistics_node = dom.getElementsByTagName("lo_statistics")[0] |
537 | + timing = lo_statistics_node.getElementsByTagName('cumulated_time')[0] |
538 | + timing= timing.firstChild.nodeValue |
539 | + self.timing = 0.3 + float(timing) #0.3 is the typical latency of bash script/... |
540 | + |
541 | |
542 | def set_mfactor(self, value): |
543 | self.mfactor = int(value) |
544 | @@ -448,6 +454,7 @@ |
545 | self.nunwgt = sum([one.nunwgt for one in self]) |
546 | self.wgt = 0 |
547 | self.luminosity = min([0]+[one.luminosity for one in self]) |
548 | + self.timing = sum([one.timing for one in self]) |
549 | if update_statistics: |
550 | self.run_statistics.aggregate_statistics([_.run_statistics for _ in self]) |
551 | |
552 | @@ -463,6 +470,7 @@ |
553 | self.xsec = sum([one.xsec for one in self]) /nbjobs |
554 | self.xerrc = sum([one.xerrc for one in self]) /nbjobs |
555 | self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs |
556 | + self.timing = sum([one.timing for one in self]) #no average here |
557 | if error: |
558 | self.xerrc = error |
559 | self.xerru = error |
560 | @@ -547,7 +555,7 @@ |
561 | table_line_template = \ |
562 | """ |
563 | <tr><td align=right>%(P_title)s</td> |
564 | - <td align=right><a id="%(P_link)s" href=%(P_link)s onClick="check_link('%(P_link)s','%(mod_P_link)s','%(P_link)s')"> %(cross)s </a> </td> |
565 | + <td align=right><a id="%(P_link)s" href=%(P_link)s > %(cross)s </a> </td> |
566 | <td align=right> %(error)s</td> |
567 | <td align=right> %(events)s</td> |
568 | <td align=right> %(unweighted)s</td> |
569 | @@ -672,6 +680,10 @@ |
570 | line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i], |
571 | self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i]) |
572 | fsock.writelines(line) |
573 | + |
574 | + if self.timing: |
575 | + text = """<lo_statistics>\n<cumulated_time> %s </cumulated_time>\n</lo_statistics>""" |
576 | + fsock.writelines(text % self.timing) |
577 | |
578 | |
579 | |
580 | @@ -694,19 +706,6 @@ |
581 | } |
582 | return http.status!=404; |
583 | } |
584 | -function check_link(url,alt, id){ |
585 | - var obj = document.getElementById(id); |
586 | - if ( ! UrlExists(url)){ |
587 | - if ( ! UrlExists(alt)){ |
588 | - obj.href = alt; |
589 | - return true; |
590 | - } |
591 | - obj.href = alt; |
592 | - return false; |
593 | - } |
594 | - obj.href = url; |
595 | - return 1==1; |
596 | -} |
597 | </script> |
598 | """ |
599 | |
600 | @@ -716,7 +715,6 @@ |
601 | run = cmd.results.current['run_name'] |
602 | all = Combine_results(run) |
603 | |
604 | - |
605 | for Pdir in cmd.get_Pdir(): |
606 | P_comb = Combine_results(Pdir) |
607 | |
608 | @@ -759,7 +757,13 @@ |
609 | all.append(P_comb) |
610 | all.compute_values() |
611 | |
612 | - |
613 | + try: |
614 | + all_channels = sum([list(P) for P in all],[]) |
615 | + timings = sum(x.timing for x in all_channels) |
616 | + logger.info('sum of cpu time of last step: %s', misc.format_time(timings)) |
617 | + except Exception as error: |
618 | + logger.debug(str(error)) |
619 | + pass |
620 | |
621 | return all |
622 | |
623 | |
624 | === modified file 'madgraph/various/banner.py' |
625 | --- madgraph/various/banner.py 2020-08-21 08:59:01 +0000 |
626 | +++ madgraph/various/banner.py 2020-09-22 07:07:51 +0000 |
627 | @@ -3259,6 +3259,21 @@ |
628 | if abs(self['lpp1']) in [2, 3,4] and abs(self['lpp2']) in [2, 3,4] and not self['fixed_fac_scale']: |
629 | raise InvalidRunCard("Having both beam in elastic photon mode requires fixec_fac_scale to be on True [since this is use as cutoff]") |
630 | |
631 | + # check that ebeam is bigger than the associated mass. |
632 | + for i in [1,2]: |
633 | + if self['lpp%s' % i ] not in [1,2]: |
634 | + continue |
635 | + if self['mass_ion%i' % i] == -1: |
636 | + if self['ebeam%i' % i] < 0.938: |
637 | + if self['ebeam%i' %i] == 0: |
638 | + logger.warning("At rest proton mode set: Energy beam set to 0.938") |
639 | + self.set('ebeam%i' %i, 0.938) |
640 | + else: |
641 | + raise InvalidRunCard("Energy for beam %i lower than proton mass. Please fix this") |
642 | + elif self['ebeam%i' % i] < self['mass_ion%i' % i]: |
643 | + if self['ebeam%i' %i] == 0: |
644 | + logger.warning("At rest ion mode set: Energy beam set to %s" % self['mass_ion%i' % i]) |
645 | + self.set('ebeam%i' %i, self['mass_ion%i' % i]) |
646 | |
647 | def update_system_parameter_for_include(self): |
648 | |
649 | @@ -4241,6 +4256,19 @@ |
650 | raise InvalidRunCard("'rw_fscale' has two or more identical entries. They have to be all different for the code to work correctly.") |
651 | |
652 | |
653 | + # check that ebeam is bigger than the proton mass. |
654 | + for i in [1,2]: |
655 | + if self['lpp%s' % i ] not in [1,2]: |
656 | + continue |
657 | + |
658 | + if self['ebeam%i' % i] < 0.938: |
659 | + if self['ebeam%i' %i] == 0: |
660 | + logger.warning("At rest proton mode set: Energy beam set to 0.938") |
661 | + self.set('ebeam%i' %i, 0.938) |
662 | + else: |
663 | + raise InvalidRunCard("Energy for beam %i lower than proton mass. Please fix this") |
664 | + |
665 | + |
666 | def update_system_parameter_for_include(self): |
667 | |
668 | # set the pdg_for_cut fortran parameter |
669 | |
670 | === modified file 'madgraph/various/lhe_parser.py' |
671 | --- madgraph/various/lhe_parser.py 2020-06-21 18:48:13 +0000 |
672 | +++ madgraph/various/lhe_parser.py 2020-09-22 07:07:51 +0000 |
673 | @@ -2227,6 +2227,8 @@ |
674 | |
675 | if other is None: |
676 | return False |
677 | + if len(self) != len(other): |
678 | + return False |
679 | |
680 | for i,p in enumerate(self): |
681 | if p.E != other[i].E: |
682 | |
683 | === modified file 'madgraph/various/misc.py' |
684 | --- madgraph/various/misc.py 2020-06-21 18:48:13 +0000 |
685 | +++ madgraph/various/misc.py 2020-09-22 07:07:51 +0000 |
686 | @@ -27,6 +27,7 @@ |
687 | import optparse |
688 | import time |
689 | import shutil |
690 | +import stat |
691 | import traceback |
692 | import gzip as ziplib |
693 | from distutils.version import LooseVersion, StrictVersion |
694 | @@ -939,6 +940,35 @@ |
695 | |
696 | str_out = out.stdout.read().decode().strip() |
697 | return str_out |
698 | + |
699 | + |
700 | + |
701 | +def copytree(src, dst, symlinks = False, ignore = None): |
702 | + if not os.path.exists(dst): |
703 | + os.makedirs(dst) |
704 | + shutil.copystat(src, dst) |
705 | + lst = os.listdir(src) |
706 | + if ignore: |
707 | + excl = ignore(src, lst) |
708 | + lst = [x for x in lst if x not in excl] |
709 | + for item in lst: |
710 | + s = os.path.join(src, item) |
711 | + d = os.path.join(dst, item) |
712 | + if symlinks and os.path.islink(s): |
713 | + if os.path.lexists(d): |
714 | + os.remove(d) |
715 | + os.symlink(os.readlink(s), d) |
716 | + try: |
717 | + st = os.lstat(s) |
718 | + mode = stat.S_IMODE(st.st_mode) |
719 | + os.lchmod(d, mode) |
720 | + except: |
721 | + pass # lchmod not available |
722 | + elif os.path.isdir(s): |
723 | + copytree(s, d, symlinks, ignore) |
724 | + else: |
725 | + shutil.copy2(s, d) |
726 | + |
727 | |
728 | |
729 | @multiple_try() |
730 | |
731 | === modified file 'mg5decay/decay_objects.py' |
732 | --- mg5decay/decay_objects.py 2020-08-20 15:33:24 +0000 |
733 | +++ mg5decay/decay_objects.py 2020-09-22 07:07:51 +0000 |
734 | @@ -43,6 +43,7 @@ |
735 | from __future__ import print_function |
736 | import array |
737 | import cmath |
738 | +import collections |
739 | import copy |
740 | import itertools |
741 | import logging |
742 | @@ -1034,6 +1035,7 @@ |
743 | |
744 | # Group channels into amplitudes |
745 | self.group_channels_2_amplitudes(clevel, model, min_br) |
746 | + |
747 | |
748 | |
749 | def connect_channel_vertex(self, sub_channel, index, vertex, model): |
750 | @@ -1170,7 +1172,11 @@ |
751 | # Do not include the first leg (initial id) |
752 | if sorted([l.get('id') for l in amplt['process']['legs'][1:]])\ |
753 | == final_pid: |
754 | - amplt.add_std_diagram(channel) |
755 | + |
756 | + for symchan in channel.get_symmetric_channel(): |
757 | + amplt.add_std_diagram(symchan) |
758 | + |
759 | + |
760 | found = True |
761 | break |
762 | |
763 | @@ -1766,6 +1772,7 @@ |
764 | interaction = self.get('interaction_dict')[vertex['id']] |
765 | decay_parts = [p for p in interaction['particles']] |
766 | |
767 | + # avoid self decay |
768 | if len([1 for p in decay_parts if abs(p['pdg_code'])==abs(initpart['pdg_code'])]) >1: |
769 | self['invalid_Npoint'].append(vertex['id']) |
770 | return False |
771 | @@ -1808,15 +1815,23 @@ |
772 | |
773 | #check that all substructure are valid |
774 | #remove if any radiation and two times the same particle in a vertex |
775 | + # 2020: relaxed to avoid only twice initial particle in the vertex |
776 | for v in proc['vertices']: |
777 | if any([get_mass(l)==0 for l in v.get('legs')]): |
778 | self['invalid_Npoint'].append(vertex['id']) |
779 | return False |
780 | - |
781 | - ids = set(abs(l['id']) for l in v.get('legs')) |
782 | - if len(ids) != len(vertex.get('legs')): |
783 | + init_pdg = [l['id'] for l in v.get('legs') if l['number'] ==1][0] |
784 | + nb_part = [1 for l in v.get('legs') if abs(l['id']) in [abs(init_pdg), abs(initpart.get('pdg_code'))]] |
785 | + if len(nb_part) > 1: |
786 | self['invalid_Npoint'].append(vertex['id']) |
787 | return False |
788 | + |
789 | + # before relaxation it was |
790 | + # seems to me to be always False |
791 | + #ids = set(abs(l['id']) for l in v.get('legs')) |
792 | + #if len(ids) != len(vertex.get('legs')): |
793 | + # self['invalid_Npoint'].append(vertex['id']) |
794 | + # return False |
795 | |
796 | # check onshell/offshell status |
797 | prev_mass = 0 |
798 | @@ -3376,6 +3391,85 @@ |
799 | self['fermionfactor'] = 1 |
800 | |
801 | |
802 | + def get_symmetric_channel(self, ignore=[]): |
803 | + |
804 | + if self['s_factor'] == 1: |
805 | + return [self] |
806 | + elif len(self['vertices']) == 1: |
807 | + return [self] |
808 | + elif len(self['final_legs']) == len(set(l['id'] for l in self['final_legs'])): |
809 | + return [self] |
810 | + |
811 | + # check if all symetry are already handle: |
812 | + if len(set(l['id'] for l in self['final_legs'] if l['id'] not in ignore)) ==\ |
813 | + len([ l['id'] for l in self['final_legs'] if l['id'] not in ignore]): |
814 | + return [self] |
815 | + |
816 | + nb_id = collections.defaultdict(int) |
817 | + for l in self['final_legs']: |
818 | + nb_id[l['id']] += 1 |
819 | + |
820 | + id_to_handle = [id for id in nb_id if nb_id[id] > 1 and id not in ignore] |
821 | + |
822 | + handling = id_to_handle[0] |
823 | + remain_id = id_to_handle[1:] |
824 | + out = [] |
825 | + |
826 | + numbers = [l.get('number') for l in self['final_legs'] if l.get('id') == handling] |
827 | + |
828 | + for new_numbers in itertools.permutations(numbers): |
829 | + mapping_id = dict([(o,n) for o,n in zip(numbers, new_numbers) if o!=n]) |
830 | + if not mapping_id: |
831 | + out.append(self) |
832 | + continue |
833 | + channel = copy.copy(self) |
834 | + channel['vertices'] = base_objects.VertexList() |
835 | + # (real) DiagramTag |
836 | + channel['tag'] = [] |
837 | + # IdentifyHelasTag |
838 | + channel['helastag'] = [] |
839 | + # the number of the corresponding helas calls |
840 | + channel['helas_number'] = None |
841 | + # diagram written by IdentifyHelasTag |
842 | + channel['std_diagram'] = None |
843 | + for l,vertex in enumerate(self['vertices']): |
844 | + new_vertex = copy.copy(vertex) |
845 | + new_vertex['legs'] = base_objects.LegList() |
846 | + min_id = 99 |
847 | + for leg in vertex['legs']: |
848 | + if leg['number'] in mapping_id: |
849 | + new_leg = copy.copy(leg) |
850 | + new_leg.set('number', mapping_id[leg['number']]) |
851 | + new_vertex['legs'].append(new_leg) |
852 | + else: |
853 | + new_vertex['legs'].append(leg) |
854 | + min_id = min(min_id, leg['number']) |
855 | + |
856 | + if min_id != new_vertex['legs'][-1]['number']: |
857 | + if l != len(self['vertices']) -1: |
858 | + mapping_id[new_vertex['legs'][-1]['number']] = min_id |
859 | + new_vertex['legs'][-1]['number'] = min_id |
860 | + channel['vertices'].append(new_vertex) |
861 | + out.append(channel) |
862 | + |
863 | + |
864 | + # do the recursion |
865 | + if len(remain_id) > 1: |
866 | + all_out = [] |
867 | + for d in out: |
868 | + all_out += d.get_symmetric_channel(ignore=ignore) |
869 | + return all_out |
870 | + else: |
871 | + return out |
872 | + |
873 | + |
874 | + |
875 | + |
876 | + |
877 | + |
878 | + |
879 | + |
880 | + |
881 | def filter(self, name, value): |
882 | """Filter for valid diagram property values.""" |
883 | |
884 | @@ -3494,6 +3588,7 @@ |
885 | tmp.sort() |
886 | if base == tmp: |
887 | return False |
888 | + |
889 | return True |
890 | |
891 | |
892 | @@ -4373,6 +4468,7 @@ |
893 | # of list. |
894 | if count != 1: |
895 | self['s_factor'] = self['s_factor'] * math.factorial(count) |
896 | + |
897 | return math.sqrt((M ** 2+mass_list[0] ** 2-mass_list[1] ** 2) ** 2-\ |
898 | (2* M *mass_list[0]) ** 2)* \ |
899 | 1./(8*math.pi*(M ** 2)*self['s_factor']) |
900 | @@ -4664,7 +4760,6 @@ |
901 | non_std_numbers = [(l.get('id'),l.get('number')) \ |
902 | for l in new_dia.get_final_legs()] |
903 | |
904 | - |
905 | # initial leg |
906 | non_std_numbers.append((new_dia.get_initial_id(model), 1)) |
907 | import operator |
908 | @@ -4680,7 +4775,6 @@ |
909 | if non_std_numbers == std_numbers: |
910 | self['diagrams'].append(new_dia) |
911 | return |
912 | - |
913 | # Conversion from non_std_number to std_number |
914 | converted_dict = dict([(num[1], std_numbers[i][1])\ |
915 | for i, num in enumerate(non_std_numbers)]) |
916 | @@ -4728,6 +4822,7 @@ |
917 | |
918 | # Add this standard diagram into diagrams |
919 | self['diagrams'].append(new_dia) |
920 | + |
921 | |
922 | |
923 | def reset_width_br(self): |
924 | |
925 | === modified file 'models/__init__.py' |
926 | --- models/__init__.py 2020-02-11 10:57:44 +0000 |
927 | +++ models/__init__.py 2020-09-22 07:07:51 +0000 |
928 | @@ -43,14 +43,15 @@ |
929 | return sys.modules[model_pos] |
930 | except Exception as error: |
931 | pass |
932 | - for p in os.environ['PYTHONPATH'].split(':'): |
933 | - new_name = os.path.join(p, name) |
934 | - try: |
935 | - return load_model(new_name, decay) |
936 | - except Exception: |
937 | - pass |
938 | - except ImportError: |
939 | - pass |
940 | + if 'PYTHONPATH' in os.environ: |
941 | + for p in os.environ['PYTHONPATH'].split(':'): |
942 | + new_name = os.path.join(p, name) |
943 | + try: |
944 | + return load_model(new_name, decay) |
945 | + except Exception: |
946 | + pass |
947 | + except ImportError: |
948 | + pass |
949 | elif path_split[-1] in sys.modules: |
950 | model_path = os.path.realpath(os.sep.join(path_split)) |
951 | sys_path = os.path.realpath(os.path.dirname(sys.modules[path_split[-1]].__file__)) |
952 | |
953 | === modified file 'models/check_param_card.py' |
954 | --- models/check_param_card.py 2019-04-17 18:52:07 +0000 |
955 | +++ models/check_param_card.py 2020-09-22 07:07:51 +0000 |
956 | @@ -1323,7 +1323,7 @@ |
957 | logger.log(log,'For model consistency, update %s with id %s to value %s', |
958 | (block, id, 1.0), '$MG:BOLD') |
959 | elif log: |
960 | - logger.log(log,'For model consistency, update %s with id %s to value %s', |
961 | + logger.log(log,'For model consistency, update %s with id %s to value %s' % |
962 | (block, id, 1.0)) |
963 | |
964 | |
965 | |
966 | === modified file 'models/import_ufo.py' |
967 | --- models/import_ufo.py 2020-02-27 13:38:00 +0000 |
968 | +++ models/import_ufo.py 2020-09-22 07:07:51 +0000 |
969 | @@ -18,6 +18,7 @@ |
970 | import collections |
971 | import fractions |
972 | import logging |
973 | +import math |
974 | import os |
975 | import re |
976 | import sys |
977 | @@ -445,10 +446,27 @@ |
978 | |
979 | def __init__(self, model, auto=False): |
980 | """ initialize empty list for particles/interactions """ |
981 | - |
982 | - if hasattr(model, '__arxiv__'): |
983 | - logger.info('Please cite %s when using this model', model.__arxiv__, '$MG:color:BLACK') |
984 | - |
985 | + |
986 | + if hasattr(model, '__header__'): |
987 | + header = model.__header__ |
988 | + if len(header) > 500 or header.count('\n') > 5: |
989 | + logger.debug("Too long header") |
990 | + else: |
991 | + logger.info("\n"+header) |
992 | + else: |
993 | + f =collections.defaultdict(lambda : 'n/a') |
994 | + for key in ['author', 'version', 'email', 'arxiv']: |
995 | + if hasattr(model, '__%s__' % key): |
996 | + val = getattr(model, '__%s__' % key) |
997 | + if 'Duhr' in val: |
998 | + continue |
999 | + f[key] = getattr(model, '__%s__' % key) |
1000 | + |
1001 | + if len(f)>2: |
1002 | + logger.info("This model [version %(version)s] is provided by %(author)s (email: %(email)s). Please cite %(arxiv)s" % f, '$MG:color:BLACK') |
1003 | + elif hasattr(model, '__arxiv__'): |
1004 | + logger.info('Please cite %s when using this model', model.__arxiv__, '$MG:color:BLACK') |
1005 | + |
1006 | self.particles = base_objects.ParticleList() |
1007 | self.interactions = base_objects.InteractionList() |
1008 | self.non_qcd_gluon_emission = 0 # vertex where a gluon is emitted withou QCD interaction |
1009 | @@ -1860,6 +1878,11 @@ |
1010 | self.rule_card = check_param_card.ParamCardRule() |
1011 | self.restrict_card = None |
1012 | self.coupling_order_dict ={} |
1013 | + self.autowidth = [] |
1014 | + |
1015 | + def modify_autowidth(self, cards, id): |
1016 | + self.autowidth.append([int(id[0])]) |
1017 | + return math.log10(2*len(self.autowidth)) |
1018 | |
1019 | def restrict_model(self, param_card, rm_parameter=True, keep_external=False, |
1020 | complex_mass_scheme=None): |
1021 | @@ -1879,7 +1902,8 @@ |
1022 | # compute the value of all parameters |
1023 | # Get the list of definition of model functions, parameter values. |
1024 | model_definitions = self.set_parameters_and_couplings(param_card, |
1025 | - complex_mass_scheme=complex_mass_scheme) |
1026 | + complex_mass_scheme=complex_mass_scheme, |
1027 | + auto_width=self.modify_autowidth) |
1028 | |
1029 | # Simplify conditional statements |
1030 | logger.log(self.log_level, 'Simplifying conditional expressions') |
1031 | @@ -1932,8 +1956,23 @@ |
1032 | self['parameter_dict'][name] = 1 |
1033 | elif value == 0.000001e-99: |
1034 | self['parameter_dict'][name] = 0 |
1035 | - |
1036 | - |
1037 | + |
1038 | + # |
1039 | + # restore auto-width value |
1040 | + # |
1041 | + #for lhacode in self.autowidth: |
1042 | + for parameter in self['parameters'][('external',)]: |
1043 | + if parameter.lhablock.lower() == 'decay' and parameter.lhacode in self.autowidth: |
1044 | + parameter.value = 'auto' |
1045 | + if parameter.name in self['parameter_dict']: |
1046 | + self['parameter_dict'][parameter.name] = 'auto' |
1047 | + elif parameter.name.startswith('mdl_'): |
1048 | + self['parameter_dict'][parameter.name[4:]] = 'auto' |
1049 | + else: |
1050 | + raise Exception |
1051 | + |
1052 | + |
1053 | + |
1054 | def locate_coupling(self): |
1055 | """ create a dict couplings_name -> vertex or (particle, counterterm_key) """ |
1056 | |
1057 | @@ -2480,6 +2519,7 @@ |
1058 | logger_mod.log(self.log_level,'remove parameters: %s' % (param)) |
1059 | data = self['parameters'][param_info[param]['dep']] |
1060 | data.remove(param_info[param]['obj']) |
1061 | + |
1062 | |
1063 | def optimise_interaction(self, interaction): |
1064 | |
1065 | |
1066 | === modified file 'models/model_reader.py' |
1067 | --- models/model_reader.py 2020-06-21 18:48:13 +0000 |
1068 | +++ models/model_reader.py 2020-09-22 07:07:51 +0000 |
1069 | @@ -58,7 +58,8 @@ |
1070 | super(ModelReader, self).default_setup() |
1071 | |
1072 | def set_parameters_and_couplings(self, param_card = None, scale=None, |
1073 | - complex_mass_scheme=None): |
1074 | + complex_mass_scheme=None, |
1075 | + auto_width=None): |
1076 | """Read a param_card and calculate all parameters and |
1077 | couplings. Set values directly in the parameters and |
1078 | couplings, plus add new dictionary coupling_dict from |
1079 | @@ -84,6 +85,9 @@ |
1080 | raise MadGraph5Error("No such file %s" % param_card) |
1081 | param_card_text = param_card |
1082 | param_card = card_reader.ParamCard(param_card) |
1083 | + for param in param_card.get('decay'): |
1084 | + if str(param.value).lower() == 'auto': |
1085 | + param.value = auto_width(param_card, param.lhacode) |
1086 | #misc.sprint(type(param_card), card_reader.ParamCard, isinstance(param_card, card_reader.ParamCard)) |
1087 | #assert isinstance(param_card, card_reader.ParamCard),'%s is not a ParamCard: %s' % (type(param_card), isinstance(param_card, card_reader.ParamCard)) |
1088 | |
1089 | |
1090 | === modified file 'models/write_param_card.py' |
1091 | --- models/write_param_card.py 2019-06-27 12:21:53 +0000 |
1092 | +++ models/write_param_card.py 2020-09-22 07:07:51 +0000 |
1093 | @@ -243,9 +243,8 @@ |
1094 | if info.startswith('mdl_'): |
1095 | info = info[4:] |
1096 | |
1097 | - if param.value.imag != 0: |
1098 | + if param.value != 'auto' and param.value.imag != 0: |
1099 | raise ParamCardWriterError('All External Parameter should be real (not the case for %s)'%param.name) |
1100 | - |
1101 | |
1102 | # avoid to keep special value used to avoid restriction |
1103 | if param.value == 9.999999e-1: |
1104 | @@ -257,6 +256,8 @@ |
1105 | lhacode=' '.join(['%3s' % key for key in param.lhacode]) |
1106 | if lhablock != 'DECAY': |
1107 | text = """ %s %e # %s \n""" % (lhacode, param.value.real, info) |
1108 | + elif param.value == 'auto': |
1109 | + text = '''DECAY %s auto # %s \n''' % (lhacode, info) |
1110 | else: |
1111 | text = '''DECAY %s %e # %s \n''' % (lhacode, param.value.real, info) |
1112 | self.fsock.write(text) |