Merge lp:~maddevelopers/mg5amcnlo/2.3.3_test into lp:mg5amcnlo/lts

Proposed by Olivier Mattelaer
Status: Rejected
Rejected by: Olivier Mattelaer
Proposed branch: lp:~maddevelopers/mg5amcnlo/2.3.3_test
Merge into: lp:mg5amcnlo/lts
Diff against target: 56958 lines (+10298/-38295) (has conflicts)
261 files modified
MadSpin/decay.py (+10/-3)
MadSpin/interface_madspin.py (+24/-11)
Template/LO/Source/dsample.f (+2/-2)
Template/LO/Source/kin_functions.f (+5/-1)
Template/LO/bin/internal/Gridpack/compile (+2/-2)
Template/LO/bin/internal/create_matching_plots.C (+10/-0)
Template/LO/bin/internal/create_matching_plots.sh (+1/-3)
Template/LO/bin/internal/plot_tree.C (+5/-1)
Template/LO/bin/internal/read_tree_files.C (+0/-12)
Template/MadWeight/Python/Info.py (+5/-5)
Template/MadWeight/Python/clean.py (+2/-2)
Template/NLO/Cards/run_card.dat (+2/-0)
Template/NLO/Cards/shower_card.dat (+6/-6)
Template/NLO/MCatNLO/Scripts/MCatNLO_MadFKS_PYTHIA8.Script (+1/-0)
Template/NLO/Source/run.inc (+3/-0)
Template/NLO/Source/run_config.inc (+1/-1)
Template/NLO/SubProcesses/ajob_template (+38/-99)
Template/NLO/SubProcesses/combine_results.sh (+0/-55)
Template/NLO/SubProcesses/combine_results_FO.sh (+0/-52)
Template/NLO/SubProcesses/combine_root.sh (+1/-1)
Template/NLO/SubProcesses/driver_mintFO.f (+91/-115)
Template/NLO/SubProcesses/driver_mintMC.f (+28/-10)
Template/NLO/SubProcesses/fks_Sij.f (+13/-3)
Template/NLO/SubProcesses/fks_singular.f (+97/-34)
Template/NLO/SubProcesses/genps_fks.f (+32/-15)
Template/NLO/SubProcesses/iproc_map.f (+25/-32)
Template/NLO/SubProcesses/leshouche_inc_chooser.f (+1/-1)
Template/NLO/SubProcesses/montecarlocounter.f (+3/-2)
Template/NLO/SubProcesses/setcuts.f (+0/-2)
Template/NLO/SubProcesses/setscales.f (+9/-3)
Template/NLO/SubProcesses/sumres.py (+0/-241)
Template/NLO/SubProcesses/symmetry_fks_v3.f (+3/-10)
Template/NLO/SubProcesses/write_ajob.f (+1/-1)
Template/loop_material/StandAlone/Cards/MadLoopParams.dat (+2/-2)
UpdateNotes.txt (+24/-1)
VERSION (+6/-0)
aloha/aloha_lib.py (+2/-2)
aloha/aloha_object.py (+2/-2)
aloha/aloha_writers.py (+1/-1)
aloha/create_aloha.py (+1/-1)
aloha/template_files/aloha_functions_loop.f (+7/-6)
aloha/template_files/wavefunctions.py (+3/-3)
bin/mg5 (+1/-1)
madgraph/core/base_objects.py (+164/-88)
madgraph/core/diagram_generation.py (+3/-2)
madgraph/core/drawing.py (+3/-3)
madgraph/core/helas_objects.py (+1/-1)
madgraph/fks/fks_base.py (+13/-1)
madgraph/interface/amcatnlo_interface.py (+14/-6)
madgraph/interface/amcatnlo_run_interface.py (+886/-620)
madgraph/interface/common_run_interface.py (+58/-54)
madgraph/interface/extended_cmd.py (+4/-4)
madgraph/interface/launch_ext_program.py (+4/-3)
madgraph/interface/loop_interface.py (+13/-9)
madgraph/interface/madevent_interface.py (+50/-7)
madgraph/interface/madgraph_interface.py (+737/-131)
madgraph/interface/master_interface.py (+2/-1)
madgraph/interface/reweight_interface.py (+78/-48)
madgraph/iolibs/drawing_eps.py (+13/-13)
madgraph/iolibs/export_cpp.py (+92/-45)
madgraph/iolibs/export_fks.py (+11/-3)
madgraph/iolibs/export_v4.py (+130/-60)
madgraph/iolibs/import_v4.py (+2/-2)
madgraph/iolibs/template_files/loop/check_sa.inc (+1/-0)
madgraph/iolibs/template_files/loop/check_sa_loop_induced.inc (+1/-0)
madgraph/iolibs/template_files/matrix_standalone_splitOrders_v4.inc (+1/-1)
madgraph/iolibs/template_files/matrix_standalone_v4.inc (+1/-1)
madgraph/iolibs/template_files/parton_lum_n_fks.inc (+5/-1)
madgraph/iolibs/template_files/pythia8/pythia8.2_main_example_cc.inc (+63/-0)
madgraph/iolibs/template_files/pythia8/pythia8.2_main_makefile.inc (+36/-0)
madgraph/iolibs/template_files/pythia8/pythia8.2_makefile.inc (+104/-0)
madgraph/iolibs/template_files/pythia8/pythia8_main_example_cc.inc (+2/-2)
madgraph/iolibs/template_files/pythia8/pythia8_model_parameters_cc.inc (+1/-1)
madgraph/iolibs/template_files/pythia8/pythia8_model_parameters_h.inc (+3/-3)
madgraph/iolibs/template_files/pythia8/pythia8_process_h.inc (+1/-1)
madgraph/iolibs/template_files/pythia8/pythia8_process_hel_amp_h.inc (+1/-1)
madgraph/iolibs/template_files/super_auto_dsig_group_v4.inc (+7/-4)
madgraph/iolibs/ufo_expression_parsers.py (+55/-10)
madgraph/loop/loop_base_objects.py (+1/-1)
madgraph/loop/loop_diagram_generation.py (+58/-13)
madgraph/loop/loop_exporters.py (+2/-2)
madgraph/madevent/gen_crossxhtml.py (+3/-4)
madgraph/madevent/gen_ximprove.py (+16/-1)
madgraph/madevent/sum_html.py (+21/-18)
madgraph/madweight/create_param.py (+3/-3)
madgraph/madweight/diagram_class.py (+3/-3)
madgraph/madweight/verif_event.py (+122/-122)
madgraph/various/banner.py (+55/-35)
madgraph/various/cluster.py (+154/-155)
madgraph/various/histograms.py (+2/-2)
madgraph/various/lhe_parser.py (+151/-22)
madgraph/various/misc.py (+90/-11)
madgraph/various/process_checks.py (+2240/-98)
mg5decay/decay_objects.py (+22/-6)
models/check_param_card.py (+236/-14)
models/import_ufo.py (+64/-11)
models/loop_qcd_qed_sm/.restrict_parallel_test_MB.dat (+65/-0)
models/loop_qcd_qed_sm/CT_couplings.py (+2/-2)
models/loop_qcd_qed_sm/CT_parameters.py (+113/-106)
models/loop_qcd_qed_sm/function_library.py (+19/-5)
models/loop_qcd_qed_sm/object_library.py (+2/-41)
models/loop_qcd_qed_sm/parameters.py (+7/-0)
models/loop_qcd_qed_sm/particles.py (+3/-3)
models/loop_qcd_qed_sm/restrict_with_b_mass.dat (+65/-0)
models/loop_qcd_qed_sm/restrict_with_b_mass_no_widths.dat (+65/-0)
models/loop_qcd_qed_sm_Gmu/CT_couplings.py (+13/-13)
models/loop_qcd_qed_sm_Gmu/CT_parameters.py (+222/-76)
models/loop_qcd_qed_sm_Gmu/function_library.py (+19/-4)
models/loop_qcd_qed_sm_Gmu/object_library.py (+5/-5)
models/loop_qcd_qed_sm_Gmu/parameters.py (+8/-1)
models/loop_qcd_qed_sm_Gmu/particles.py (+3/-3)
models/loop_qcd_qed_sm_Gmu/restrict_ckm.dat (+2/-2)
models/loop_qcd_qed_sm_Gmu/restrict_default.dat (+4/-4)
models/loop_qcd_qed_sm_Gmu/restrict_no_widths.dat (+1/-1)
models/loop_qcd_qed_sm_Gmu__CMS__/.restrict_parallel_test.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/.restrict_parallel_test_WW.dat (+0/-66)
models/loop_qcd_qed_sm_Gmu__CMS__/.restrict_parallel_test_WZ.dat (+0/-66)
models/loop_qcd_qed_sm_Gmu__CMS__/.restrict_parallel_test_ZZ.dat (+0/-66)
models/loop_qcd_qed_sm_Gmu__CMS__/CT_couplings.py (+0/-7213)
models/loop_qcd_qed_sm_Gmu__CMS__/CT_parameters.py (+0/-600)
models/loop_qcd_qed_sm_Gmu__CMS__/CT_vertices.py (+0/-4849)
models/loop_qcd_qed_sm_Gmu__CMS__/__init__.py (+0/-27)
models/loop_qcd_qed_sm_Gmu__CMS__/coupling_orders.py (+0/-16)
models/loop_qcd_qed_sm_Gmu__CMS__/couplings.py (+0/-539)
models/loop_qcd_qed_sm_Gmu__CMS__/function_library.py (+0/-81)
models/loop_qcd_qed_sm_Gmu__CMS__/lorentz.py (+0/-361)
models/loop_qcd_qed_sm_Gmu__CMS__/object_library.py (+0/-313)
models/loop_qcd_qed_sm_Gmu__CMS__/parameters.py (+0/-1211)
models/loop_qcd_qed_sm_Gmu__CMS__/particles.py (+0/-381)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_ckm.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_default.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_no_widths.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_parallel_test.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_parallel_test_WW.dat (+0/-66)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_parallel_test_WZ.dat (+0/-66)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_parallel_test_ZZ.dat (+0/-66)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_with_b_mass.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/restrict_with_b_mass_no_width.dat (+0/-65)
models/loop_qcd_qed_sm_Gmu__CMS__/vertices.py (+0/-1037)
models/loop_qcd_qed_sm_Gmu__CMS__/write_param_card.py (+0/-181)
models/loop_qcd_qed_sm__CMS__/.restrict_parallel_test.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/.restrict_parallel_test_MB.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/CT_couplings.py (+0/-7213)
models/loop_qcd_qed_sm__CMS__/CT_parameters.py (+0/-600)
models/loop_qcd_qed_sm__CMS__/CT_vertices.py (+0/-4849)
models/loop_qcd_qed_sm__CMS__/__init__.py (+0/-27)
models/loop_qcd_qed_sm__CMS__/coupling_orders.py (+0/-16)
models/loop_qcd_qed_sm__CMS__/couplings.py (+0/-539)
models/loop_qcd_qed_sm__CMS__/function_library.py (+0/-81)
models/loop_qcd_qed_sm__CMS__/lorentz.py (+0/-361)
models/loop_qcd_qed_sm__CMS__/object_library.py (+0/-366)
models/loop_qcd_qed_sm__CMS__/parameters.py (+0/-1225)
models/loop_qcd_qed_sm__CMS__/particles.py (+0/-381)
models/loop_qcd_qed_sm__CMS__/restrict_ckm.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/restrict_default.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/restrict_no_widths.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/restrict_parallel_test.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/restrict_with_b_mass.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/restrict_with_b_mass_no_widths.dat (+0/-65)
models/loop_qcd_qed_sm__CMS__/vertices.py (+0/-1037)
models/loop_qcd_qed_sm__CMS__/write_param_card.py (+0/-181)
models/model_reader.py (+11/-5)
models/write_param_card.py (+2/-2)
tests/acceptance_tests/test_cmd_amcatnlo.py (+17/-11)
tests/acceptance_tests/test_cmd_madevent.py (+1/-1)
tests/acceptance_tests/test_cmd_madloop.py (+225/-24)
tests/input_files/IOTestsComparison/ExportV4IOTest/export_matrix_element_v4_standalone/matrix.f (+1/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fks_loonly/%SubProcesses%P0_gg_ttx%parton_lum_0.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fks_loonly/%SubProcesses%P0_uux_ttx%parton_lum_0.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fks_loonly/%SubProcesses%P0_uxu_ttx%parton_lum_0.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_gg_ttx%parton_lum_1.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_gg_ttx%parton_lum_2.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_gg_ttx%parton_lum_3.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_gg_ttx%parton_lum_4.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_gg_ttx%parton_lum_5.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_uux_ttx%parton_lum_1.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_uux_ttx%parton_lum_2.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_uux_ttx%parton_lum_3.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_uxu_ttx%parton_lum_1.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_uxu_ttx%parton_lum_2.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_pptt_fksreal/%SubProcesses%P0_uxu_ttx%parton_lum_3.f (+5/-1)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_001.f (+153/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_002.f (+153/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_003.f (+154/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_004.f (+154/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_005.f (+153/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_006.f (+154/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_007.f (+154/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%b_sf_008.f (+153/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born.f (+298/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_conf.inc (+8/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_decayBW.inc (+2/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_hel.f (+151/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_leshouche.inc (+8/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_maxamps.inc (+3/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_ngraphs.inc (+2/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_nhel.inc (+3/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%born_props.inc (+6/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%coloramps.inc (+2/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%config_subproc_map.inc (+1/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%configs_and_props_decl.inc (+12/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%fks_info.inc (+46/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%get_color.f (+54/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%leshouche_decl.inc (+6/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%matrix_1.f (+194/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%nFKSconfigs.inc (+4/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%ncombs.inc (+2/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%nexternal.inc (+4/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%ngraphs.inc (+2/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%parton_lum_1.f (+98/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%parton_lum_chooser.f (+21/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%pmass.inc (+5/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%real_from_born_configs.inc (+5/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%real_me_chooser.f (+20/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%sborn_sf.f (+45/-0)
tests/input_files/IOTestsComparison/IOExportFKSTest/test_tdecay_fksreal/%SubProcesses%P0_t_budx%sborn_sf_dum.f (+13/-0)
tests/input_files/IOTestsComparison/IOExportV4IOTest/export_matrix_element_v4_madevent_group/super_auto_dsig.f (+11/-5)
tests/input_files/IOTestsComparison/IOExportV4IOTest/export_matrix_element_v4_standalone/matrix.f (+1/-1)
tests/input_files/IOTestsComparison/MadLoop_output_from_the_interface/TIR_output/%ggttx_IOTest%SubProcesses%MadLoopCommons.f (+3/-3)
tests/input_files/IOTestsComparison/MadLoop_output_from_the_interface/TIR_output/%ggttx_IOTest%SubProcesses%P0_gg_ttx%check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/TestCmdMatchBox/MatchBoxOutput/%TEST%SubProcesses%P1_uux_uux%check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_default/dux_mumvmxg/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/long_ML_SMQCD_default/dux_mumvmxg/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_default/dux_mumvmxg/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_default/gg_wmtbx/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/long_ML_SMQCD_default/gg_wmtbx/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_default/gg_wmtbx/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_optimized/dux_mumvmxg/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/long_ML_SMQCD_optimized/dux_mumvmxg/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_optimized/dux_mumvmxg/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_optimized/gg_wmtbx/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/long_ML_SMQCD_optimized/gg_wmtbx/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/long_ML_SMQCD_optimized/gg_wmtbx/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_LoopInduced/gg_hh/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/short_ML_SMQCD_LoopInduced/gg_hh/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_LoopInduced/gg_hh/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_default/ddx_ttx/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_default/gg_ttx/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/short_ML_SMQCD_default/gg_ttx/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_default/gg_ttx/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_optimized/ddx_ttx/check_sa.f (+1/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_optimized/gg_ttx/%..%..%Source%MODEL%model_functions.f (+41/-6)
tests/input_files/IOTestsComparison/short_ML_SMQCD_optimized/gg_ttx/%..%..%Source%MODEL%model_functions.inc (+10/-0)
tests/input_files/IOTestsComparison/short_ML_SMQCD_optimized/gg_ttx/check_sa.f (+1/-0)
tests/input_files/LoopSMEWTest/CT_parameters.py (+3/-3)
tests/input_files/LoopSMEWTest/function_library.py (+2/-2)
tests/input_files/LoopSMEWTest/object_library.py (+27/-27)
tests/parallel_tests/test_ML5EW.py (+112/-2)
tests/time_db (+214/-211)
tests/unit_tests/core/test_base_objects.py (+13/-11)
tests/unit_tests/core/test_drawing.py (+5/-3)
tests/unit_tests/interface/test_edit_card.py (+8/-2)
tests/unit_tests/iolibs/test_export_cpp.py (+2/-1)
tests/unit_tests/iolibs/test_export_fks.py (+4/-0)
tests/unit_tests/loop/test_import_LoopUFOModel.py (+10/-9)
tests/unit_tests/various/test_check_param_card.py (+47/-0)
tests/unit_tests/various/test_decay.py (+6/-6)
vendor/CutTools/src/avh/avh_olo.f90 (+1/-1)
vendor/SMWidth/param_card.dat (+0/-65)
vendor/SMWidth/param_card_Gmu.dat (+0/-65)
vendor/SMWidth/param_card_MZ.dat (+0/-65)
Text conflict in UpdateNotes.txt
Text conflict in VERSION
To merge this branch: bzr merge lp:~maddevelopers/mg5amcnlo/2.3.3_test
Reviewer Review Type Date Requested Status
Olivier Mattelaer Disapprove
Review via email: mp+275037@code.launchpad.net

Description of the change

This is just a test to see if the procedure that I applied fix the conflict.
I will not perform this merge (if it works, I will push this insice 2.3.3)

To post a comment you must log in.
Revision history for this message
Olivier Mattelaer (olivier-mattelaer) wrote :

The test seems succesfull.
So closing it and I'm going to apply it to 2.3.3

review: Disapprove

Unmerged revisions

326. By Olivier Mattelaer

merge with 2.3.2.2

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'MadSpin/__init__.py' (properties changed: -x to +x)
2=== modified file 'MadSpin/decay.py'
3--- MadSpin/decay.py 2015-08-20 10:00:00 +0000
4+++ MadSpin/decay.py 2015-10-20 14:02:27 +0000
5@@ -906,7 +906,7 @@
6 if init[0] in self.banner.param_card['decay'].decay_table:
7 br *= self.banner.param_card['decay'].decay_table[init[0]].get(lhaid).value
8 br *= self.get_br(decay)
9- else:
10+ elif -init[0] in self.banner.param_card['decay'].decay_table:
11 init = -init[0]
12 lhaid=[x if self.model.get_particle(x)['self_antipart'] else -x
13 for x in final]
14@@ -914,6 +914,13 @@
15 lhaid = tuple([len(final)] + lhaid)
16 br *= self.banner.param_card['decay'].decay_table[init].get(lhaid).value
17 br *= self.get_br(decay)
18+ elif init[0] not in self.decay_ids and -init[0] not in self.decay_ids:
19+ logger.warning("No Branching ratio applied for %s. Please check if this is expected" % init[0])
20+ br *= self.get_br(decay)
21+ else:
22+ raise MadGraph5Error,"No valid decay for %s. No 2 body decay for that particle. (three body are not supported by MadSpin)" % init[0]
23+
24+
25
26 for decays in ids.values():
27 if len(decays) == 1:
28@@ -929,7 +936,7 @@
29 except ValueError:
30 break
31 br /= math.factorial(nb)
32-
33+
34 return br
35
36
37@@ -1013,7 +1020,7 @@
38 self[tag]['decaying'] = tuple(decaying)
39
40 # sanity check
41- assert self[tag]['total_br'] <= 1.01, self[tag]['total_br']
42+ assert self[tag]['total_br'] <= 1.01, "wrong BR for %s: %s " % (tag,self[tag]['total_br'])
43
44
45
46
47=== modified file 'MadSpin/interface_madspin.py' (properties changed: -x to +x)
48--- MadSpin/interface_madspin.py 2015-08-04 14:28:22 +0000
49+++ MadSpin/interface_madspin.py 2015-10-20 14:02:27 +0000
50@@ -34,6 +34,7 @@
51 import madgraph.interface.master_interface as master_interface
52 import madgraph.various.misc as misc
53 import madgraph.iolibs.files as files
54+import madgraph.iolibs.export_v4 as export_v4
55 import madgraph.various.banner as banner
56 import madgraph.various.lhe_parser as lhe_parser
57
58@@ -229,15 +230,24 @@
59 args.remove('--bypass_check')
60 bypass_check = True
61
62- if len(args) != 2:
63+ if len(args) == 1:
64+ logger.warning("""No param_card defined for the new model. We will use the default one but this might completely wrong.""")
65+ elif len(args) != 2:
66 return self.InvalidCmd, 'import model requires two arguments'
67
68 model_name = args[0]
69- card = args[1]
70- if not os.path.exists(card):
71- raise self.InvalidCmd('%s: no such file' % card)
72-
73 self.load_model(model_name, False, False)
74+
75+ if len(args) == 2:
76+ card = args[1]
77+ if not os.path.exists(card):
78+ raise self.InvalidCmd('%s: no such file' % card)
79+ else:
80+ card = "madspin_param_card.dat"
81+ export_v4.UFO_model_to_mg4.create_param_card_static(self.model,
82+ card, rule_card_path=None)
83+
84+
85
86 #Check the param_card
87 if not bypass_check:
88@@ -255,11 +265,15 @@
89 if diff:
90 raise self.InvalidCmd('''Original param_card differs on some parameters:
91 %s
92- Due to those preferences, we prefer not to proceed. If you are sure about what you are doing,
93+ Due to those differences, we prefer not to proceed. If you are sure about what you are doing,
94 you can use the command \'import model MODELNAME PARAM_CARD_PATH --bypass_check\''''
95 % diff.replace('\n','\n '))
96+
97+
98
99- #OK load the new param_card
100+ #OK load the new param_card (but back up the old one)
101+ if 'slha' in self.banner:
102+ self.banner['slha_original'] = self.banner['slha']
103 self.banner['slha'] = open(card).read()
104 if hasattr(self.banner, 'param_card'):
105 del self.banner.param_card
106@@ -749,7 +763,7 @@
107 # 2. Generate the events requested
108 with misc.MuteLogger(["madgraph", "madevent", "ALOHA", "cmdprint"], [50,50,50,50]):
109 mg5 = self.mg5cmd
110- modelpath = self.model.get('modelpath')
111+ modelpath = self.model.get('modelpath+restriction')
112 mg5.exec_cmd("import model %s" % modelpath)
113 to_event = {}
114 for pdg, nb_needed in to_decay.items():
115@@ -923,12 +937,11 @@
116 #base_model = import_ufo.import_model(model_path)
117
118 # Import model
119- base_model = import_ufo.import_model(name, decay=True)
120+ base_model = import_ufo.import_model(name, decay=True,
121+ complex_mass_scheme=complex_mass)
122
123 if use_mg_default:
124 base_model.pass_particles_name_in_mg_default()
125- if complex_mass:
126- base_model.change_mass_to_complex_scheme()
127
128 self.model = base_model
129 self.mg5cmd._curr_model = self.model
130
131=== modified file 'Template/LO/Source/dsample.f'
132--- Template/LO/Source/dsample.f 2015-06-08 16:01:11 +0000
133+++ Template/LO/Source/dsample.f 2015-10-20 14:02:27 +0000
134@@ -1608,7 +1608,7 @@
135 if (iteration .eq. cur_it) then
136 c Add the current point to the DiscreteSamplerGrid
137 call add_entry_to_discrete_dimensions(wgt)
138- if (kn.eq.0) then
139+ if (kn.eq.0.and.(iteration.eq.1.or.use_cut.eq.-2)) then
140 ! ensure that all cumulative variable are at zero (usefull for reset)
141 twgt1 = 0d0 !
142 iavg = 0 !Vars for averging to increase err estimate
143@@ -2156,7 +2156,7 @@
144 c $ access='append',err=129)
145 c write(22, 80) real(tmean), real(tsigma), real(chi2)
146 c 129 close(22)
147- tsigma = tsigma*sqrt(chi2) !This gives the 68% confidence cross section
148+ tsigma = tsigma*sqrt(max(0d0,chi2)) !This gives the 68% confidence cross section
149 cur_it = itm+20
150 return
151 endif
152
153=== modified file 'Template/LO/Source/kin_functions.f'
154--- Template/LO/Source/kin_functions.f 2014-09-11 15:22:26 +0000
155+++ Template/LO/Source/kin_functions.f 2015-10-20 14:02:27 +0000
156@@ -127,7 +127,11 @@
157 c pm=dsqrt(p(1)**2+p(2)**2+p(3)**2)
158
159 pm = p(0)
160- rap = .5d0*dlog((pm+p(3))/(pm-p(3)))+cm_rap
161+ if (pm.gt.p(3)) then
162+ rap = .5d0*dlog((pm+p(3))/(pm-p(3)))+cm_rap
163+ else
164+ rap = -1d99
165+ endif
166 end
167 DOUBLE PRECISION FUNCTION rap2(p)
168 c************************************************************************
169
170=== modified file 'Template/LO/bin/internal/Gridpack/compile'
171--- Template/LO/bin/internal/Gridpack/compile 2013-11-13 02:21:10 +0000
172+++ Template/LO/bin/internal/Gridpack/compile 2015-10-20 14:02:27 +0000
173@@ -37,8 +37,8 @@
174 fi
175
176 # Check for LHAPDF
177-c=`awk '/^[^#].*=.*pdlabel/{print $1}' Cards/run_card.dat`
178-if [[ $c == "'lhapdf'" ]]; then
179+c=`awk '/^[^#].*=[^#!]*pdlabel/{print $1}' Cards/run_card.dat`
180+if [[ $c == "'lhapdf'" || $c == "lhapdf" ]]; then
181 echo Using LHAPDF interface!
182 export lhapdf=true
183 else
184
185=== renamed file 'Template/LO/bin/internal/addmasses.py' => 'Template/LO/bin/internal/addmasses_optional.py'
186=== modified file 'Template/LO/bin/internal/create_matching_plots.C'
187--- Template/LO/bin/internal/create_matching_plots.C 2011-06-29 14:15:21 +0000
188+++ Template/LO/bin/internal/create_matching_plots.C 2015-10-20 14:02:27 +0000
189@@ -1,4 +1,14 @@
190 {
191+ const char* file="pythia.root";
192+ TFile *f = new TFile(file,"RECREATE");
193+ TTree *events = new TTree("events","Events");
194+ Long64_t nlines1 = events->ReadFile("events.tree","Npart:DJR1:DJR2:DJR3:DJR4:PTISR:PTFSR:PT2MX:NCJET:IFILE");
195+ cout << "Found "<< nlines1 << " events"<<endl;
196+ TTree *xsecs = new TTree("xsecs","Xsecs");
197+ Long64_t nlines2 = xsecs->ReadFile("xsecs.tree","Xsecfact");
198+ cout << "Found "<< nlines2 << " files"<<endl;
199+ f->Write();
200+
201 gROOT->ProcessLine(".x ../bin/internal/plot_tree.C(\"DJR1\")");
202 gROOT->ProcessLine(".x ../bin/internal/plot_tree.C(\"DJR2\")");
203 gROOT->ProcessLine(".x ../bin/internal/plot_tree.C(\"DJR3\")");
204
205=== modified file 'Template/LO/bin/internal/create_matching_plots.sh'
206--- Template/LO/bin/internal/create_matching_plots.sh 2012-07-20 06:20:21 +0000
207+++ Template/LO/bin/internal/create_matching_plots.sh 2015-10-20 14:02:27 +0000
208@@ -24,9 +24,7 @@
209 exit
210 fi
211
212-echo Running root
213-root -q -b -l ../bin/internal/read_tree_files.C &> /dev/null
214-echo Creating plots
215+echo Creating matching plots
216 root -q -b -l ../bin/internal/create_matching_plots.C &> /dev/null
217 mv pythia.root $1/$2_pythia.root
218
219
220=== modified file 'Template/LO/bin/internal/plot_tree.C'
221--- Template/LO/bin/internal/plot_tree.C 2014-09-24 19:41:18 +0000
222+++ Template/LO/bin/internal/plot_tree.C 2015-10-20 14:02:27 +0000
223@@ -1,7 +1,11 @@
224 #include <string>
225
226-bool plot_tree(char* quantity,char* plotdim="(100,0.,3.)",bool log=true)
227+bool plot_tree(const char* quantity, const char* plotdim = 0, bool log = true)
228 {
229+ const char* plotdim_default = "(100,0.,3.)";
230+ if(plotdim == 0){
231+ plotdim = plotdim_default;
232+ }
233 char tmp1[250];
234 char tmp2[300];
235 char tmp3[100];
236
237=== removed file 'Template/LO/bin/internal/read_tree_files.C'
238--- Template/LO/bin/internal/read_tree_files.C 2011-06-29 14:15:21 +0000
239+++ Template/LO/bin/internal/read_tree_files.C 1970-01-01 00:00:00 +0000
240@@ -1,12 +0,0 @@
241-{
242- char* file="pythia.root";
243- TFile *f = new TFile(file,"RECREATE");
244- TTree *events = new TTree("events","Events");
245- Long64_t nlines1 = events->ReadFile("events.tree","Npart:DJR1:DJR2:DJR3:DJR4:PTISR:PTFSR:PT2MX:NCJET:IFILE");
246- cout << "Found "<< nlines1 << " events"<<endl;
247- TTree *xsecs = new TTree("xsecs","Xsecs");
248- Long64_t nlines2 = xsecs->ReadFile("xsecs.tree","Xsecfact");
249- cout << "Found "<< nlines2 << " files"<<endl;
250-
251- f->Write();
252-}
253
254=== modified file 'Template/MadWeight/Python/Info.py'
255--- Template/MadWeight/Python/Info.py 2011-10-25 14:42:02 +0000
256+++ Template/MadWeight/Python/Info.py 2015-10-20 14:02:27 +0000
257@@ -1,11 +1,11 @@
258 #!/usr/bin/env python
259
260 def giveInfo(class_):
261- if type(class_)!=str:
262- class_=class_.__class__.__name__
263- for info in dir(eval(class_)):
264-
265- print class_+'.'+info+' : ',eval(class_+'.'+info+'.__doc__')
266+ if type(class_)!=str:
267+ class_=class_.__class__.__name__
268+ for info in dir(eval(class_)):
269+
270+ print class_+'.'+info+' : ',eval(class_+'.'+info+'.__doc__')
271
272
273
274
275=== modified file 'Template/MadWeight/Python/clean.py'
276--- Template/MadWeight/Python/clean.py 2011-10-25 14:42:02 +0000
277+++ Template/MadWeight/Python/clean.py 2015-10-20 14:02:27 +0000
278@@ -70,7 +70,7 @@
279 ls=os.listdir('.')
280 for element in ls:
281 if os.path.isdir(element) and element[0]=='P':
282- status,mess=clean.suppress_dir(os.path.join(element,run_name))
283+ status,mess=clean.suppress_dir(os.path.join(element,run_name))
284 #if not status:
285 # print 'supress ',element,' failed:'
286 # print mess
287@@ -93,7 +93,7 @@
288 ls=os.listdir('.')
289 for element in ls:
290 if os.path.isdir(element) and len(element)>4 and element[:4]=='MW_P':
291- status,mess=clean.suppress_dir(os.path.join(element,run_name))
292+ status,mess=clean.suppress_dir(os.path.join(element,run_name))
293 #if not status:
294 # print 'supress ',element,' failed:'
295 # print mess
296
297=== modified file 'Template/NLO/Cards/run_card.dat'
298--- Template/NLO/Cards/run_card.dat 2015-07-30 15:57:44 +0000
299+++ Template/NLO/Cards/run_card.dat 2015-10-20 14:02:27 +0000
300@@ -66,6 +66,8 @@
301 # WARNING: PYTHIA6PT works only for processes without FSR!!!! *
302 #***********************************************************************
303 %(parton_shower)s = parton_shower
304+ %(shower_scale_factor)s = shower_scale_factor ! multiply default shower starting
305+ ! scale by this factor
306 #***********************************************************************
307 # Renormalization and factorization scales *
308 # (Default functional form for the non-fixed scales is the sum of *
309
310=== modified file 'Template/NLO/Cards/shower_card.dat'
311--- Template/NLO/Cards/shower_card.dat 2015-03-09 17:19:46 +0000
312+++ Template/NLO/Cards/shower_card.dat 2015-10-20 14:02:27 +0000
313@@ -85,15 +85,15 @@
314 # *
315 # Examples of syntax: *
316 # Z -> e+ e- or mu+ mu- with BR = 0.5 each *
317-# DM_1 = 23 > -11 11 @ 0.5d0 @ 100 *
318-# DM_2 = 23 > -13 13 @ 0.5d0 @ 100 *
319+# DM_1 = 23 > -11 11 @ 0.5d0 @ 100
320+# DM_2 = 23 > -13 13 @ 0.5d0 @ 100
321 # H -> tau+ tau- with BR = 1 *
322-# DM_3 = 25 > -15 15 @ 1.0d0 @ 0 *
323+# DM_3 = 25 > -15 15 @ 1.0d0 @ 0
324 # t -> nu_e e+ b with BR = 1 (HERWIG) *
325-# DM_4 = 6 > 12 -11 5 @ 1d0 @ 100 *
326+# DM_4 = 6 > 12 -11 5 @ 1d0 @ 100
327 # t -> nu_e e+ b with BR = 1 (PYTHIA) *
328-# DM_5 = 6 > 24 5 @ 1d0 @ 100 *
329-# DM_6 = 24 > 12 -11 @ 1d0 @ 100 *
330+# DM_5 = 6 > 24 5 @ 1d0 @ 100
331+# DM_6 = 24 > 12 -11 @ 1d0 @ 100
332 #***********************************************************************
333 # Extra Libraries/analyses *
334 # The following lines need to be changed if the user does not want to *
335
336=== modified file 'Template/NLO/MCatNLO/Scripts/MCatNLO_MadFKS_PYTHIA8.Script'
337--- Template/NLO/MCatNLO/Scripts/MCatNLO_MadFKS_PYTHIA8.Script 2015-04-24 09:57:53 +0000
338+++ Template/NLO/MCatNLO/Scripts/MCatNLO_MadFKS_PYTHIA8.Script 2015-10-20 14:02:27 +0000
339@@ -517,6 +517,7 @@
340 TimeShower:MEafterFirst = off ! No Matrix-element corrections after first emission
341 TimeShower:phiPolAsym = on ! Azimuthal asymmetry induced by gluon polarization
342 TimeShower:alphaSuseCMW = false ! Use the CMW prescription in FSR
343+TimeShower:weightGluonToQuark = 1 ! Use normal Altarelli-Parisi kernels for g -> q qbar
344
345 ! 6) Initial-state shower.
346 SpaceShower:pTmaxMatch = 1 ! Use scalup (re-check)
347
348=== modified file 'Template/NLO/Source/run.inc'
349--- Template/NLO/Source/run.inc 2015-04-02 22:56:24 +0000
350+++ Template/NLO/Source/run.inc 2015-10-20 14:02:27 +0000
351@@ -39,6 +39,9 @@
352 common/cscales_current_values/muR2_current,muF12_current,
353 # muF22_current,QES2_current
354
355+c Re-scale of the shower starting scale
356+ double precision shower_scale_factor
357+ common/cshower_scale_factor/shower_scale_factor
358 c
359 c Collider
360 c
361
362=== modified file 'Template/NLO/Source/run_config.inc'
363--- Template/NLO/Source/run_config.inc 2012-08-28 21:06:34 +0000
364+++ Template/NLO/Source/run_config.inc 2015-10-20 14:02:27 +0000
365@@ -34,7 +34,7 @@
366 parameter (PBS_QUE = 'madgraph')
367
368 integer ChanPerJob
369- parameter (ChanPerJob=1) !Number of channels / job for survey
370+ parameter (ChanPerJob=100000000) !Number of channels / job for survey
371
372 c integer max_np
373 c parameter (max_np=1) !Number of channels / job for refine
374
375=== modified file 'Template/NLO/SubProcesses/ajob_template'
376--- Template/NLO/SubProcesses/ajob_template 2015-03-18 14:50:37 +0000
377+++ Template/NLO/SubProcesses/ajob_template 2015-10-20 14:02:27 +0000
378@@ -26,57 +26,38 @@
379 exit
380 fi
381
382+channel=$1
383+run_mode=$2
384+runnumber=$3
385+integration_step=$4
386+
387 TAGTAGTAGTAGTAGTAGTAG for i in 1 ; do
388
389- runnumber=0
390- if [[ $1 == '0' ]]; then
391- j=$2\_G$i
392- if [[ ! -e $j ]]; then
393- mkdir $j
394- fi
395- cd $j
396- if [[ "$4" != "" ]]; then
397- if [[ -e ../$4\_G$i ]]; then
398- if [[ $1 == '0' ]]; then
399- cp -f ../$4\_G$i/mint_grids . >/dev/null 2>&1
400- cp -f ../$4\_G$i/grid.MC_integer . >/dev/null 2>&1
401- elif [[ $1 == '1' ]]; then
402- cp -f ../$4\_G$i/mint_grids . >/dev/null 2>&1
403- cp -f ../$4\_G$i/grid.MC_integer . >/dev/null 2>&1
404- fi
405- else
406- echo "Cannot find directory ../$4\_G$i/" > log.txt
407- exit
408- fi
409- fi
410- elif [[ $1 == '2' ]]; then
411- j=G$2$i
412- if [[ ! -e $j ]]; then
413- mkdir $j
414- fi
415-
416- cd $j
417- if [[ "$4" != "" ]]; then
418- if [[ "$4" == "H" ||"$4" == "S" || "$4" == "V" || "$4" == "B" || "$4" == "F" ]]; then
419- if [[ -e ../G$4$i ]]; then
420- cp -f ../G$4$i/mint_grids ./preset_mint_grids >/dev/null 2>&1
421- cp -f ../G$4$i/grid.MC_integer . >/dev/null 2>&1
422- else
423- echo "Cannot find direcotry ../G$4$i/" > log.txt
424- exit
425- fi
426- else
427- runnumber=$4
428- if [[ ! -e ../${j}_$4 ]]; then
429- mkdir ../${j}_$4
430- fi
431- cd ../${j}_$4
432- ln -sf ../${j}/mint_grids
433- ln -sf ../${j}/mint_grids_NLO
434- ln -sf ../${j}/grid.MC_integer
435- ln -sf ../${j}/res_1
436- fi
437- fi
438+ if [[ $run_mode == 'all' || $run_mode == 'born' ]] ; then
439+ j=$run_mode\_G$i
440+ else
441+ if [[ $runnumber == '0' ]] ; then
442+ j=G$run_mode$i
443+ else
444+ j=G$run_mode$i\_$runnumber
445+ fi
446+ fi
447+ cd $j
448+
449+ if [[ -e res.dat ]] ; then
450+ rm -f res.dat
451+ fi
452+ if [[ -e log.txt ]] ; then
453+ rm -f log.txt
454+ fi
455+ if [[ -e MADatNLO.top ]] ; then
456+ rm -f MADatNLO.top
457+ fi
458+ if [[ -e MADatNLO.HwU ]] ; then
459+ rm -f MADatNLO.HwU
460+ fi
461+ if [[ -e MADatNLO.root ]] ; then
462+ rm -f MADatNLO.root
463 fi
464 if [[ -e randinit ]] ; then
465 rm -f randinit
466@@ -97,67 +78,25 @@
467 link1up FKS_params.dat
468 link1up configs_and_props_info.dat
469 link1up leshouche_info.dat
470-# Not necessary anymore
471-# link1up MadLoop5_resources
472 link1up OLE_order.olc
473 link1up param_card.dat
474 link1up initial_states_map.dat
475
476-# check where is the basic file for the creation of input_app.txt
477-#
478- if [[ $1 == '0' ]]; then
479- if [[ $3 == '-1' && -e ./madinM1 ]] ; then
480- input_template=./madinM1
481- else
482- if [[ -e ../madin.$2 ]] ; then
483- input_template=../madin.$2
484- else
485- input_template=../../madin.$2
486- fi
487- fi
488- elif [[ $1 == '2' ]]; then
489- if [[ $3 == '0' || $3 == '2' ]]; then
490- if [[ -e ../madinMMC_$2.2 ]] ; then
491- input_template=../madinMMC_$2.2
492- else
493- input_template=../../madinMMC_$2.2
494- fi
495- else
496- input_template=./madinM1
497- fi
498- fi
499
500- if [[ $1 == '0' ]]; then
501- head -n 5 $input_template >& input_app.txt
502- echo $i >> input_app.txt
503- tail -n 4 $input_template >> input_app.txt
504- T="$(date +%s)"
505+ T="$(date +%s)"
506+ if [[ $run_mode == 'all' || $run_mode == 'born' ]]; then
507 ../madevent_mintFO > log.txt <input_app.txt 2>&1
508- status=$?
509- T="$(($(date +%s)-T))"
510- echo "Time in seconds: ${T}" >>log.txt
511- elif [[ $1 == '2' ]]; then
512+ else
513 if [[ $runnumber != 0 ]]; then
514- tar --extract --file=../nevents.tar nevts_${j}_$runnumber
515- mv nevts_${j}_$runnumber nevts
516 echo "$runnumber" >& moffset.dat
517 fi
518- if [[ $3 == '0' || $3 == '2' ]]; then
519- head -n 6 $input_template > input_app.txt
520- echo $i >> input_app.txt
521- tail -n 3 $input_template >> input_app.txt
522- elif [[ $3 == '1' ]]; then
523- head -n 6 $input_template > input_app.txt
524- echo $i >> input_app.txt
525- tail -n 3 $input_template >> input_app.txt
526- fi
527- T="$(date +%s)"
528 ../madevent_mintMC > log.txt <input_app.txt 2>&1
529- status=$?
530- T="$(($(date +%s)-T))"
531- echo "Time in seconds: ${T}" >>log.txt
532- cp -f log.txt log_MINT$3.txt >/dev/null 2>&1
533 fi
534+ status=$?
535+ T="$(($(date +%s)-T))"
536+ echo "Time in seconds: ${T}" >>log.txt
537+ cp -f log.txt log_MINT$integration_step.txt >/dev/null 2>&1
538+ cp -f res.dat res_$integration_step.dat >/dev/null 2>&1
539 exit $status
540 done
541
542
543=== removed file 'Template/NLO/SubProcesses/combine_results.sh'
544--- Template/NLO/SubProcesses/combine_results.sh 2013-06-28 13:57:17 +0000
545+++ Template/NLO/SubProcesses/combine_results.sh 1970-01-01 00:00:00 +0000
546@@ -1,55 +0,0 @@
547-#!/bin/bash
548-
549-# find the correct directory
550-if [[ ! -d ./SubProcesses ]]; then
551- cd ../
552-fi
553-if [[ -d ./SubProcesses ]]; then
554- cd SubProcesses
555-fi
556-
557-if [[ -e res.txt ]]; then
558- rm -f res.txt
559-fi
560-if [[ -e dirs.txt ]]; then
561- rm -f dirs.txt
562-fi
563-if [[ -e nevents_unweighted ]]; then
564- rm -f nevents_unweighted
565-fi
566-
567-arg1=$1
568-arg2=$2
569-arg3=$3
570-# shift the list of arguments by 3
571-shift
572-shift
573-shift
574-if [[ "$@" == "" ]]; then
575- echo "Please give the G directories that should be combined,"
576- echo "e.g. 'GF* GV*', as final arguments of this script"
577- exit
578-fi
579-
580-touch res.txt
581-touch dirs.txt
582-NTOT=0
583-for dir in "$@" ; do
584- N=`ls -d P*/$dir | wc -l`
585- NTOT=`expr $NTOT + $N`
586- ls -d P*/$dir >> dirs.txt
587- grep -H 'Final result' P*/$dir/res_$arg1 >> res.txt
588-done
589-echo N of directories: $NTOT
590-if [[ $arg1 == '0' ]] ; then
591- echo 'Determining the number of unweighted events per channel'
592-elif [[ $arg1 == '1' ]] ; then
593- echo 'Updating the number of unweighted events per channel'
594-fi
595-./sumres.py $NTOT $arg2 $arg3
596-
597-echo 'Integrated abs(cross-section)'
598-tail -n2 res.txt | head -n1
599-echo 'Integrated cross-section'
600-tail -n1 res.txt
601-mv res.txt res_$arg1.txt
602
603=== removed file 'Template/NLO/SubProcesses/combine_results_FO.sh'
604--- Template/NLO/SubProcesses/combine_results_FO.sh 2013-06-28 13:57:17 +0000
605+++ Template/NLO/SubProcesses/combine_results_FO.sh 1970-01-01 00:00:00 +0000
606@@ -1,52 +0,0 @@
607-#!/bin/bash
608-
609-# find the correct directory
610-if [[ ! -d ./SubProcesses ]]; then
611- cd ../
612-fi
613-if [[ -d ./SubProcesses ]]; then
614- cd SubProcesses
615-fi
616-
617-if [[ $1 == "0" ]] ; then
618- mint_mode=0
619- shift
620-elif [[ $1 == "1" ]] ; then
621- mint_mode=1
622- shift
623-elif [[ $1 == "2" ]] ; then
624- echo "Cannot combine results for mint_mode 2"
625- exit
626-else
627- mint_mode=0
628-fi
629-
630-if [[ -e res.txt ]]; then
631- rm -f res.txt
632-fi
633-if [[ -e dirs.txt ]]; then
634- rm -f dirs.txt
635-fi
636-
637-req_acc=$1
638-shift
639-
640-touch res.txt
641-touch dirs.txt
642-NTOT=0
643-for dir in "$@" ; do
644- N=`ls -d P*/$dir | wc -l`
645- NTOT=`expr $NTOT + $N`
646- ls -d P*/$dir >> dirs.txt
647- grep -H 'Final result' P*/$dir/res_$mint_mode >> res.txt
648-done
649-
650-sed -i.bak s/"\+\/\-"/" \+\/\-"/ res.txt
651-
652-echo N of directories: $NTOT
653-
654-./sumres.py $NTOT -1 $req_acc
655-
656-rm -r res.txt.bak
657-
658-tail -n1 res.txt
659
660=== modified file 'Template/NLO/SubProcesses/combine_root.sh'
661--- Template/NLO/SubProcesses/combine_root.sh 2013-12-09 10:15:28 +0000
662+++ Template/NLO/SubProcesses/combine_root.sh 2015-10-20 14:02:27 +0000
663@@ -65,7 +65,7 @@
664 echo ".x combine_root.C" >> rootinput.txt
665 echo ".q" >> rootinput.txt
666
667-root < rootinput.txt
668+root -b < rootinput.txt
669
670 rm -f rootinput.txt
671 }
672
673=== modified file 'Template/NLO/SubProcesses/driver_mintFO.f'
674--- Template/NLO/SubProcesses/driver_mintFO.f 2015-03-09 18:27:17 +0000
675+++ Template/NLO/SubProcesses/driver_mintFO.f 2015-10-20 14:02:27 +0000
676@@ -18,7 +18,6 @@
677 C LOCAL
678 C
679 integer i,j,l,l1,l2,ndim
680- integer npoints
681 character*130 buf
682 c
683 c Global
684@@ -171,7 +170,7 @@
685 else
686 flat_grid=.false.
687 endif
688- ndim = 3*(nexternal-2)-4
689+ ndim = 3*(nexternal-nincoming)-4
690 if (abs(lpp(1)) .ge. 1) ndim=ndim+1
691 if (abs(lpp(2)) .ge. 1) ndim=ndim+1
692 c Don't proceed if muF1#muF2 (we need to work out the relevant formulae
693@@ -219,23 +218,7 @@
694 do j=1,nintervals_virt
695 read (12,*) (ave_virt(j,i),i=1,ndim)
696 enddo
697- if (ncall.gt.0 .and. accuracy.ne.0d0) then
698- read (12,*) ans(1),unc(1),ncall,itmax
699-c Update the number of PS points based on unc(1), ncall and accuracy
700- itmax_fl=itmax*(unc(1)/accuracy)**2
701- if (itmax_fl.le.4d0) then
702- itmax=max(nint(itmax_fl),2)
703- elseif (itmax_fl.gt.4d0 .and. itmax_fl.le.16d0) then
704- ncall=nint(ncall*itmax_fl/4d0)
705- itmax=4
706- else
707- itmax=nint(sqrt(itmax_fl))
708- ncall=nint(ncall*itmax_fl/nint(sqrt(itmax_fl)))
709- endif
710- accuracy=accuracy/ans(1) ! relative accuracy on the ABS X-section
711- else
712- read (12,*) ans(1),unc(1),dummy,dummy
713- endif
714+ read (12,*) ans(1),unc(1),dummy,dummy
715 read (12,*) virtual_fraction,average_virtual
716 close (12)
717 write (*,*) "Update iterations and points to",itmax,ncall
718@@ -253,10 +236,6 @@
719 call mint(sigint,ndim,ncall,itmax,imode,xgrid,ymax,ymax_virt
720 $ ,ans,unc,chi2)
721 call topout
722- open(unit=58,file='res_0',status='unknown')
723- write(58,*)'Final result [ABS]:',ans(1),' +/-',unc(1)
724- write(58,*)'Final result:',ans(2),' +/-',unc(2)
725- close(58)
726 write(*,*)'Final result [ABS]:',ans(1),' +/-',unc(1)
727 write(*,*)'Final result:',ans(2),' +/-',unc(2)
728 write(*,*)'chi**2 per D.o.F.:',chi2(1)
729@@ -337,6 +316,11 @@
730 write(*,*) 'Time spent in Other_tasks : ',tOther
731 write(*,*) 'Time spent in Total : ',tTot
732
733+ open (unit=12, file='res.dat',status='unknown')
734+ write (12,*)ans(1),unc(1),ans(2),unc(2),itmax,ncall,tTot
735+ close(12)
736+
737+
738 if(i_momcmp_count.ne.0)then
739 write(*,*)' '
740 write(*,*)'WARNING: genps_fks code 555555'
741@@ -680,8 +664,6 @@
742 character * 70 idstring
743 logical savegrid
744
745- character * 80 runstr
746- common/runstr/runstr
747 logical usexinteg,mint
748 common/cusexinteg/usexinteg,mint
749 logical unwgt
750@@ -692,84 +674,98 @@
751 double precision volh
752 common/mc_int2/volh,mc_hel,ihel,fillh
753
754-
755+ logical done
756+ character*100 buffer
757 c-----
758 c Begin Code
759 c-----
760 mint=.true.
761 unwgt=.false.
762- write(*,'(a)') 'Enter number of events and iterations: '
763- read(*,*) ncall,itmax
764- write(*,*) 'Number of events and iterations ',ncall,itmax
765- write(*,'(a)') 'Enter desired accuracy: '
766- read(*,*) accuracy
767- write(*,*) 'Desired absolute accuracy: ',accuracy
768-
769- write(*,'(a)') 'Enter 0 for fixed, 2 for adjustable grid: '
770- read(*,*) use_cut
771- if (use_cut .lt. 0 .or. use_cut .gt. 2) then
772- write(*,*) 'Bad choice, using 2',use_cut
773- use_cut = 2
774- endif
775-
776- write(*,10) 'Suppress amplitude (0 no, 1 yes)? '
777- read(*,*) i
778- if (i .eq. 1) then
779- multi_channel = .true.
780- write(*,*) 'Using suppressed amplitude.'
781- else
782- multi_channel = .false.
783- write(*,*) 'Using full amplitude.'
784- endif
785-
786- write(*,10) 'Exact helicity sum (0 yes, n = number/event)? '
787- read(*,*) i
788- if (i .eq. 0) then
789- mc_hel = 0
790- write(*,*) 'Explicitly summing over helicities for virt'
791- else
792- mc_hel= i
793- write(*,*) 'Summing over',i,' helicities/event for virt'
794- endif
795- isum_hel=0
796-
797- write(*,10) 'Enter Configuration Number: '
798- read(*,*) dconfig
799- iconfig = int(dconfig)
800- do i=1,mapconfig(0)
801- if (iconfig.eq.mapconfig(i)) then
802- iconfig=i
803- exit
804+ open (unit=83,file='input_app.txt',status='old')
805+ done=.false.
806+ do while (.not. done)
807+ read(83,'(a)',err=222,end=222) buffer
808+ if (buffer(1:7).eq.'NPOINTS') then
809+ buffer=buffer(10:100)
810+ read(buffer,*) ncall
811+ write (*,*) 'Number of phase-space points per iteration:',ncall
812+ elseif(buffer(1:11).eq.'NITERATIONS') then
813+ read(buffer(14:),*) itmax
814+ write (*,*) 'Maximum number of iterations is:',itmax
815+ elseif(buffer(1:8).eq.'ACCURACY') then
816+ read(buffer(11:),*) accuracy
817+ write (*,*) 'Desired accuracy is:',accuracy
818+ elseif(buffer(1:10).eq.'ADAPT_GRID') then
819+ read(buffer(13:),*) use_cut
820+ write (*,*) 'Using adaptive grids:',use_cut
821+ elseif(buffer(1:12).eq.'MULTICHANNEL') then
822+ read(buffer(15:),*) i
823+ if (i.eq.1) then
824+ multi_channel=.true.
825+ write (*,*) 'Using Multi-channel integration'
826+ else
827+ multi_channel=.false.
828+ write (*,*) 'Not using Multi-channel integration'
829+ endif
830+ elseif(buffer(1:12).eq.'SUM_HELICITY') then
831+ read(buffer(15:),*) i
832+ if (nincoming.eq.1) then
833+ write (*,*) 'Sum over helicities in the virtuals'/
834+ $ /' for decay process'
835+ mc_hel=0
836+ elseif (i.eq.0) then
837+ mc_hel=0
838+ write (*,*) 'Explicitly summing over helicities'/
839+ $ /' for the virtuals'
840+ else
841+ mc_hel=1
842+ write(*,*) 'Do MC over helicities for the virtuals'
843+ endif
844+ isum_hel=0
845+ elseif(buffer(1:7).eq.'CHANNEL') then
846+ read(buffer(10:),*) dconfig
847+ iconfig = int(dconfig)
848+ do i=1,mapconfig(0)
849+ if (iconfig.eq.mapconfig(i)) then
850+ iconfig=i
851+ exit
852+ endif
853+ enddo
854+ write(*,12) 'Running Configuration Number: ',iconfig
855+ elseif(buffer(1:5).eq.'SPLIT') then
856+ read(buffer(8:),*) i
857+ write (*,*) 'Splitting channel:',i
858+ elseif(buffer(1:8).eq.'RUN_MODE') then
859+ read(buffer(11:),*) abrvinput
860+ if(abrvinput(5:5).eq.'0')then
861+ nbody=.true.
862+ else
863+ nbody=.false.
864+ endif
865+ abrv=abrvinput(1:4)
866+ write (*,*) "doing the ",abrv," of this channel"
867+ if(nbody)then
868+ write (*,*) "integration Born/virtual with Sfunction=1"
869+ else
870+ write (*,*) "Normal integration (Sfunction != 1)"
871+ endif
872+ elseif(buffer(1:7).eq.'RESTART') then
873+ read(buffer(10:),*) irestart
874+ if (irestart.eq.0) then
875+ write (*,*) 'RESTART: Fresh run'
876+ elseif(irestart.eq.-1) then
877+ write (*,*) 'RESTART: Use old grids, but refil plots'
878+ elseif(irestart.eq.1) then
879+ write (*,*) 'RESTART: continue with existing run'
880+ else
881+ write (*,*) 'RESTART:',irestart
882+ endif
883 endif
884+ cycle
885+ 222 done=.true.
886 enddo
887- write(*,12) 'Running Configuration Number: ',iconfig
888-c
889-c Enter parameters that control Vegas grids
890-c
891- write(*,*)'enter id string for this run'
892- read(*,*) idstring
893- runstr=idstring
894- write(*,*)'enter 1 if you want restart files'
895- read (*,*) itmp
896- if(itmp.eq.1) then
897- savegrid = .true.
898- else
899- savegrid = .false.
900- endif
901- write(*,*)'enter 0 to exclude, 1 for new run, 2 to restart'
902- read(5,*)irestart
903+ close(83)
904
905- abrvinput=' '
906- write (*,*) "'all ', 'born', 'real', 'virt', 'novi' or 'grid'?"
907- write (*,*) "Enter 'born0' or 'virt0' to perform"
908- write (*,*) " a pure n-body integration (no S functions)"
909- read(5,*) abrvinput
910- if(abrvinput(5:5).eq.'0')then
911- nbody=.true.
912- else
913- nbody=.false.
914- endif
915- abrv=abrvinput(1:4)
916 if (fks_configs.eq.1) then
917 if (pdg_type_d(1,fks_i_d(1)).eq.-21) then
918 write (*,*) 'Process generated with [LOonly=QCD]. '/
919@@ -782,26 +778,6 @@
920 endif
921 endif
922 endif
923-c Options are way too many: make sure we understand all of them
924- if ( abrv.ne.'all '.and.abrv.ne.'born'.and.abrv.ne.'real'.and.
925- & abrv.ne.'virt'.and.
926- & abrv.ne.'viSC'.and.abrv.ne.'viLC'.and.abrv.ne.'novA'.and.
927- & abrv.ne.'novB'.and.abrv.ne.'viSA'.and.abrv.ne.'viSB') then
928- write(*,*)'Error in input: abrv is:',abrv
929- stop
930- endif
931- if(nbody.and.abrv.ne.'born'.and.abrv(1:2).ne.'vi'
932- & .and. abrv.ne.'grid')then
933- write(*,*)'Error in driver: inconsistent input',abrvinput
934- stop
935- endif
936-
937- write (*,*) "doing the ",abrv," of this channel"
938- if(nbody)then
939- write (*,*) "integration Born/virtual with Sfunction=1"
940- else
941- write (*,*) "Normal integration (Sfunction != 1)"
942- endif
943 c
944 c
945 c Here I want to set up with B.W. we map and which we don't
946
947=== modified file 'Template/NLO/SubProcesses/driver_mintMC.f'
948--- Template/NLO/SubProcesses/driver_mintMC.f 2015-08-13 12:43:02 +0000
949+++ Template/NLO/SubProcesses/driver_mintMC.f 2015-10-20 14:02:27 +0000
950@@ -108,6 +108,11 @@
951 call cpu_time(tBefore)
952 fixed_order=.false.
953 nlo_ps=.true.
954+ if (nincoming.ne.2) then
955+ write (*,*) 'Decay processes not supported for'/
956+ & /' event generation'
957+ stop 1
958+ endif
959
960 c Read general MadFKS parameters
961 c
962@@ -158,7 +163,7 @@
963 else
964 flat_grid=.false.
965 endif
966- ndim = 3*(nexternal-2)-4
967+ ndim = 3*(nexternal-nincoming)-4
968 if (abs(lpp(1)) .ge. 1) ndim=ndim+1
969 if (abs(lpp(2)) .ge. 1) ndim=ndim+1
970 c Don''t proceed if muF1#muF2 (we need to work out the relevant formulae
971@@ -189,7 +194,7 @@
972 enddo
973 else
974 c to restore grids:
975- open (unit=12, file='preset_mint_grids',status='old')
976+ open (unit=12, file='mint_grids',status='old')
977 do j=0,nintervals
978 read (12,*) (xgrid(j,i),i=1,ndim)
979 enddo
980@@ -285,7 +290,7 @@
981 close(58)
982
983 c to save grids:
984- open (unit=12, file='mint_grids_NLO',status='unknown')
985+ open (unit=12, file='mint_grids',status='unknown')
986 write (12,*) (xgrid(0,i),i=1,ndim)
987 do j=1,nintervals
988 write (12,*) (xgrid(j,i),i=1,ndim)
989@@ -301,7 +306,6 @@
990 write (12,*) virtual_fraction,average_virtual
991 close (12)
992
993-
994 c*************************************************************
995 c event generation
996 c*************************************************************
997@@ -323,7 +327,7 @@
998 ncall=nevts ! Update ncall with the number found in 'nevts'
999
1000 c to restore grids:
1001- open (unit=12, file='mint_grids_NLO',status='unknown')
1002+ open (unit=12, file='mint_grids',status='unknown')
1003 read (12,*) (xgrid(0,i),i=1,ndim)
1004 do j=1,nintervals
1005 read (12,*) (xgrid(j,i),i=1,ndim)
1006@@ -468,6 +472,15 @@
1007 write(*,*) 'Time spent in Other_tasks : ',tOther
1008 write(*,*) 'Time spent in Total : ',tTot
1009
1010+ open (unit=12, file='res.dat',status='unknown')
1011+ if (imode.eq.0) then
1012+ write (12,*)ans(1),unc(1),ans(2),unc(2),itmax,ncall,tTot
1013+ else
1014+ write (12,*)ans(1)+ans(5),sqrt(unc(1)**2+unc(5)**2),ans(2)
1015+ $ ,unc(2),itmax,ncall,tTot
1016+ endif
1017+ close(12)
1018+
1019 return
1020 999 write (*,*) 'nevts file not found'
1021 stop
1022@@ -630,12 +643,17 @@
1023
1024 write(*,10) 'Exact helicity sum (0 yes, n = number/event)? '
1025 read(*,*) i
1026- if (i .eq. 0) then
1027- mc_hel= 0
1028- write(*,*) 'Explicitly summing over helicities for virt'
1029+ if (nincoming.eq.1) then
1030+ write (*,*) 'Sum over helicities in the virtuals'/
1031+ $ /' for decay process'
1032+ mc_hel=0
1033+ elseif (i.eq.0) then
1034+ mc_hel=0
1035+ write (*,*) 'Explicitly summing over helicities'/
1036+ $ /' for the virtuals'
1037 else
1038- mc_hel= i
1039- write(*,*) 'Summing over',i,' helicities/event for virt'
1040+ mc_hel=1
1041+ write(*,*) 'Do MC over helicities for the virtuals'
1042 endif
1043 isum_hel = 0
1044
1045
1046=== modified file 'Template/NLO/SubProcesses/fks_Sij.f'
1047--- Template/NLO/SubProcesses/fks_Sij.f 2012-08-28 21:06:34 +0000
1048+++ Template/NLO/SubProcesses/fks_Sij.f 2015-10-20 14:02:27 +0000
1049@@ -90,9 +90,13 @@
1050
1051 c Consistency check -- call to set_cms_stuff() must be done prior to
1052 c entering this function
1053- shattmp=2d0*dot(p(0,1),p(0,2))
1054+ if (nincoming.eq.2) then
1055+ shattmp=2d0*dot(p(0,1),p(0,2))
1056+ else
1057+ shattmp=p(0,1)**2
1058+ endif
1059 if(abs(shattmp/shat-1.d0).gt.1.d-5)then
1060- write(*,*)'Error in fks_Sij: inconsistent shat'
1061+ write(*,*)'Error in fks_Sij: inconsistent shat #1'
1062 write(*,*)shattmp,shat
1063 stop
1064 endif
1065@@ -148,6 +152,7 @@
1066 do j=1,fks_j_from_i(i,0)
1067 kk = i
1068 ll = fks_j_from_i(i,j)
1069+ if (nincoming.ne.2 .and. ll.le.nincoming) cycle
1070 if ( ijskip(kk,ll).eq.0 .and. ijskip(ll,kk).eq.0 ) then
1071 ijskip(kk,ll) = 1
1072 elseif ( ijskip(kk,ll).eq.0 .and. ijskip(ll,kk).eq.1 ) then
1073@@ -178,6 +183,7 @@
1074 do j=1,fks_j_from_i(i,0)
1075 kk = i
1076 ll = fks_j_from_i(i,j)
1077+ if (nincoming.ne.2 .and. ll.le.nincoming) cycle
1078 if(ijskip(kk,ll).ne.1)goto 222
1079 if(particle_type(ll).eq.8.and.particle_type(kk).ne.8.and.
1080 # ll.gt.nincoming)then
1081@@ -567,7 +573,11 @@
1082
1083 c Consistency check -- call to set_cms_stuff() must be done prior to
1084 c entering this function
1085- shattmp=2d0*dot(p(0,1),p(0,2))
1086+ if (nincoming.eq.2) then
1087+ shattmp=2d0*dot(p(0,1),p(0,2))
1088+ else
1089+ shattmp=p(0,1)**2
1090+ endif
1091 if(abs(shattmp/shat-1.d0).gt.1.d-5)then
1092 write(*,*)'Error in fks_Hij: inconsistent shat'
1093 write(*,*)shattmp,shat
1094
1095=== modified file 'Template/NLO/SubProcesses/fks_singular.f'
1096--- Template/NLO/SubProcesses/fks_singular.f 2015-08-19 12:50:52 +0000
1097+++ Template/NLO/SubProcesses/fks_singular.f 2015-10-20 14:02:27 +0000
1098@@ -1034,6 +1034,10 @@
1099 $ ,pswgt_cnt(-2:2),jac_cnt(-2:2)
1100 common/counterevnts/p1_cnt,wgt_cnt,pswgt_cnt,jac_cnt
1101 if (wgt1.eq.0d0 .and. wgt2.eq.0d0 .and. wgt3.eq.0d0) return
1102+c Check for NaN's and INF's. Simply skip the contribution
1103+ if (wgt1.ne.wgt1) return
1104+ if (wgt2.ne.wgt2) return
1105+ if (wgt3.ne.wgt3) return
1106 icontr=icontr+1
1107 if (icontr.gt.max_contr) then
1108 write (*,*) 'ERROR in add_wgt: too many contributions'
1109@@ -1226,7 +1230,12 @@
1110 c PDG codes
1111 niproc(ict)=iproc
1112 do j=1,iproc
1113- parton_iproc(j,ict)=pd(j)*conv
1114+ if (nincoming.eq.2) then
1115+ parton_iproc(j,ict)=pd(j)*conv
1116+ else
1117+c Keep GeV's for decay processes (no conv. factor needed)
1118+ parton_iproc(j,ict)=pd(j)
1119+ endif
1120 do k=1,nexternal
1121 parton_pdg(k,j,ict)=idup_d(iFKS,k,j)
1122 if (k.lt.fks_j_d(iFKS)) then
1123@@ -2195,9 +2204,15 @@
1124 do ii=1,iproc_save(nFKS(ict))
1125 if (eto(ii,nFKS(ict)).ne.ipr) cycle
1126 n_ctr_found=n_ctr_found+1
1127- write (n_ctr_str(n_ctr_found),'(3(1x,d18.12),1x,i2)')
1128- & (wgt(j,ict)*conv,j=1,3),
1129- & nexternal
1130+ if (nincoming.eq.2) then
1131+ write (n_ctr_str(n_ctr_found),'(3(1x,d18.12),1x,i2)')
1132+ & (wgt(j,ict)*conv,j=1,3),
1133+ & nexternal
1134+ else
1135+ write (n_ctr_str(n_ctr_found),'(3(1x,d18.12),1x,i2)')
1136+ & (wgt(j,ict),j=1,3),
1137+ & nexternal
1138+ endif
1139 procid=''
1140 do j=1,nexternal
1141 write (str_temp,*) parton_pdg(j,ii,ict)
1142@@ -2223,9 +2238,15 @@
1143 c H-event
1144 ipr=iproc_picked
1145 n_ctr_found=n_ctr_found+1
1146- write (n_ctr_str(n_ctr_found),'(3(1x,d18.12),1x,i2)')
1147- & (wgt(j,ict)*conv,j=1,3),
1148- & nexternal
1149+ if (nincoming.eq.2) then
1150+ write (n_ctr_str(n_ctr_found),'(3(1x,d18.12),1x,i2)')
1151+ & (wgt(j,ict)*conv,j=1,3),
1152+ & nexternal
1153+ else
1154+ write (n_ctr_str(n_ctr_found),'(3(1x,d18.12),1x,i2)')
1155+ & (wgt(j,ict),j=1,3),
1156+ & nexternal
1157+ endif
1158 procid=''
1159 do j=1,nexternal
1160 write (str_temp,*) parton_pdg(j,ipr,ict)
1161@@ -2338,7 +2359,7 @@
1162 do i=0,3
1163 xsum(i)=0.d0
1164 xsuma(i)=0.d0
1165- do j=3,npart
1166+ do j=nincoming+1,npart
1167 xsum(i)=xsum(i)+xmom(i,j)
1168 xsuma(i)=xsuma(i)+abs(xmom(i,j))
1169 enddo
1170@@ -2405,9 +2426,14 @@
1171 pass=.true.
1172 jflag=0
1173 do i=0,3
1174- xsum(i)=-xmom(i,1)-xmom(i,2)
1175- xsuma(i)=abs(xmom(i,1))+abs(xmom(i,2))
1176- do j=3,npart
1177+ if (nincoming.eq.2) then
1178+ xsum(i)=-xmom(i,1)-xmom(i,2)
1179+ xsuma(i)=abs(xmom(i,1))+abs(xmom(i,2))
1180+ elseif(nincoming.eq.1) then
1181+ xsum(i)=-xmom(i,1)
1182+ xsuma(i)=abs(xmom(i,1))
1183+ endif
1184+ do j=nincoming+1,npart
1185 xsum(i)=xsum(i)+xmom(i,j)
1186 xsuma(i)=xsuma(i)+abs(xmom(i,j))
1187 enddo
1188@@ -2420,14 +2446,14 @@
1189 write(*,*)'Momentum is not conserved [nocms]'
1190 write(*,*)'i=',i
1191 do j=1,npart
1192- write(*,'(4(d14.8,1x))') (xmom(jj,j),jj=0,3)
1193+ write(*,'(i2,1x,4(d14.8,1x))') j,(xmom(jj,j),jj=0,3)
1194 enddo
1195 jflag=1
1196 endif
1197 enddo
1198 if(jflag.eq.1)then
1199- write(*,'(4(d14.8,1x))') (xsum(jj),jj=0,3)
1200- write(*,'(4(d14.8,1x))') (xrat(jj),jj=0,3)
1201+ write(*,'(a3,1x,4(d14.8,1x))') 'sum',(xsum(jj),jj=0,3)
1202+ write(*,'(a3,1x,4(d14.8,1x))') 'rat',(xrat(jj),jj=0,3)
1203 pass=.false.
1204 return
1205 endif
1206@@ -2454,7 +2480,11 @@
1207 endif
1208 enddo
1209 c
1210- ecmtmp=sqrt(2d0*dot(xmom(0,1),xmom(0,2)))
1211+ if (nincoming.eq.2) then
1212+ ecmtmp=sqrt(2d0*dot(xmom(0,1),xmom(0,2)))
1213+ elseif (nincoming.eq.1) then
1214+ ecmtmp=xmom(0,1)
1215+ endif
1216 if(abs(ecm-ecmtmp).gt.vtiny)then
1217 write(*,*)'Inconsistent shat [nocms]'
1218 write(*,*)'ecm given= ',ecm
1219@@ -2687,6 +2717,9 @@
1220
1221 double precision zero,tiny
1222 parameter (zero=0d0)
1223+
1224+ integer icount
1225+ data icount /0/
1226
1227 c Particle types (=color) of i_fks, j_fks and fks_mother
1228 integer i_type,j_type,m_type
1229@@ -2709,7 +2742,11 @@
1230
1231 c Consistency check -- call to set_cms_stuff() must be done prior to
1232 c entering this function
1233- shattmp=2d0*dot(pp(0,1),pp(0,2))
1234+ if (nincoming.eq.2) then
1235+ shattmp=2d0*dot(pp(0,1),pp(0,2))
1236+ else
1237+ shattmp=pp(0,1)**2
1238+ endif
1239 if(abs(shattmp/shat-1.d0).gt.1.d-5)then
1240 write(*,*)'Error in sreal: inconsistent shat'
1241 write(*,*)shattmp,shat
1242@@ -2717,9 +2754,9 @@
1243 endif
1244
1245 if (1d0-y_ij_fks.lt.tiny)then
1246- if (pmass(j_fks).eq.zero.and.j_fks.le.2)then
1247+ if (pmass(j_fks).eq.zero.and.j_fks.le.nincoming)then
1248 call sborncol_isr(pp,xi_i_fks,y_ij_fks,wgt)
1249- elseif (pmass(j_fks).eq.zero.and.j_fks.ge.3)then
1250+ elseif (pmass(j_fks).eq.zero.and.j_fks.ge.nincoming+1)then
1251 call sborncol_fsr(pp,xi_i_fks,y_ij_fks,wgt)
1252 else
1253 wgt=0d0
1254@@ -2741,11 +2778,20 @@
1255 endif
1256
1257 if(wgt.lt.0.d0)then
1258- write(*,*) 'Fatal error #2 in sreal',wgt,xi_i_fks,y_ij_fks
1259- do i=1,nexternal
1260- write(*,*) 'particle ',i,', ',(pp(j,i),j=0,3)
1261- enddo
1262- stop
1263+ icount=icount+1
1264+ if (icount.le.10) then
1265+ write(*,*) 'Warning, numerical problem found in sreal. '/
1266+ $ /'Setting weight to zero',wgt,xi_i_fks,y_ij_fks
1267+ do i=1,nexternal
1268+ write(*,*) 'particle ',i,', ',(pp(j,i),j=0,3)
1269+ enddo
1270+ if (icount.eq.25) then
1271+ write (*,*) 'ERROR 25 problems found... '/
1272+ $ /'stopping the code'
1273+ stop
1274+ endif
1275+ endif
1276+ wgt=0d0
1277 endif
1278
1279 return
1280@@ -2814,7 +2860,7 @@
1281 E_i_fks = p(0,i_fks)
1282 z = 1d0 - E_i_fks/(E_i_fks+E_j_fks)
1283 t = z * shat/4d0
1284- if(rotategranny .and. nexternal-1.ne.3)then
1285+ if(rotategranny .and. nexternal-1.ne.3 .and. nincoming.eq.2)then
1286 c Exclude 2->1 (at the Born level) processes: matrix elements are
1287 c independent of the PS point, but non-zero helicity configurations
1288 c might flip when rotating the momenta.
1289@@ -2934,7 +2980,7 @@
1290 c Thus, an extra factor z (implicit in the flux of the reduced Born
1291 c in FKS) has to be inserted here
1292 t = z*shat/4d0
1293- if(j_fks.eq.2 .and. nexternal-1.ne.3)then
1294+ if(j_fks.eq.2 .and. nexternal-1.ne.3 .and. nincoming.eq.2)then
1295 c Rotation according to innerpin.m. Use rotate_invar() if a more
1296 c general rotation is needed.
1297 c Exclude 2->1 (at the Born level) processes: matrix elements are
1298@@ -2965,7 +3011,7 @@
1299 pi(i)=p_i_fks_ev(i)
1300 pj(i)=p(i,j_fks)
1301 enddo
1302- if(j_fks.eq.2)then
1303+ if(j_fks.eq.2 .and. nincoming.eq.2)then
1304 c Rotation according to innerpin.m. Use rotate_invar() if a more
1305 c general rotation is needed
1306 pi(1)=-pi(1)
1307@@ -2986,7 +3032,7 @@
1308 azifact=Wij_angle/Wij_recta
1309 endif
1310 c Insert the extra factor due to Madgraph convention for polarization vectors
1311- if(j_fks.eq.2)then
1312+ if(j_fks.eq.2 .and. nincoming.eq.2)then
1313 cphi_mother=-1.d0
1314 sphi_mother=0.d0
1315 else
1316@@ -3461,7 +3507,11 @@
1317
1318 c Consistency check -- call to set_cms_stuff() must be done prior to
1319 c entering this function
1320- shattmp=2d0*dot(p(0,1),p(0,2))
1321+ if (nincoming.eq.2) then
1322+ shattmp=2d0*dot(p(0,1),p(0,2))
1323+ else
1324+ shattmp=p(0,1)**2
1325+ endif
1326 if(abs(shattmp/shat-1.d0).gt.1.d-5)then
1327 write(*,*)'Error in sreal: inconsistent shat'
1328 write(*,*)shattmp,shat
1329@@ -3770,7 +3820,7 @@
1330 enddo
1331 enddo
1332 do i=0,3
1333- if(j_fks.gt.2)then
1334+ if(j_fks.gt.nincoming)then
1335 xnum=p1_cnt(i,i_fks,inum)+p1_cnt(i,j_fks,inum)
1336 xden=p1_cnt(i,i_fks,iden)+p1_cnt(i,j_fks,iden)
1337 else
1338@@ -4370,7 +4420,11 @@
1339
1340 c Consistency check -- call to set_cms_stuff() must be done prior to
1341 c entering this function
1342- shattmp=2d0*dot(p(0,1),p(0,2))
1343+ if (nincoming.eq.2) then
1344+ shattmp=2d0*dot(p(0,1),p(0,2))
1345+ else
1346+ shattmp=p(0,1)**2
1347+ endif
1348 if(abs(shattmp/shat-1.d0).gt.1.d-5)then
1349 write(*,*)'Error in sreal: inconsistent shat'
1350 write(*,*)shattmp,shat
1351@@ -4447,7 +4501,7 @@
1352 enddo
1353 c
1354 do i=1,nincoming
1355- if (particle_type(i).ne.1)then
1356+ if (particle_type(i).ne.1 .and. pmass(i).eq.ZERO) then
1357 if (particle_type(i).eq.8) then
1358 aj=0
1359 elseif(abs(particle_type(i)).eq.3) then
1360@@ -4972,8 +5026,13 @@
1361 elseif(abrv.ne.'virt' .and. abrv.ne.'viSC' .and.
1362 # abrv.ne.'viLC')then
1363 c 1+2+3+4
1364- tmp=dlog(xicut_used**2*shat/QES2)-
1365- # 1/betai*dlog((1+betai)/(1-betai))
1366+ if (betai.gt.1d-6) then
1367+ tmp=dlog(xicut_used**2*shat/QES2)-
1368+ & 1/betai*dlog((1+betai)/(1-betai))
1369+ else
1370+ tmp=dlog(xicut_used**2*shat/QES2)-
1371+ & 2d0*(1d0+betai**2/3d0+betai**4/5d0)
1372+ endif
1373 else
1374 write(*,*)'Error #14 in eikonal_Ireg',abrv
1375 stop
1376@@ -5200,7 +5259,11 @@
1377 elseif(pmass(m).ne.zero.and.pmass(n).ne.zero)then
1378 kikj=dot(p(0,n),p(0,m))
1379 vij=sqrt(1-(pmass(n)*pmass(m)/kikj)**2)
1380- single=single+0.5d0*1/vij*log((1+vij)/(1-vij))*wgt
1381+ if (vij .gt. 1d-6) then
1382+ single=single+0.5d0*1/vij*log((1+vij)/(1-vij))*wgt
1383+ else
1384+ single=single+(1d0+vij**2/3d0+vij**4/5d0)*wgt
1385+ endif
1386 else
1387 write(*,*)'Error in getpoles',i,j,n,m,pmass(n),pmass(m)
1388 stop
1389
1390=== modified file 'Template/NLO/SubProcesses/genps_fks.f'
1391--- Template/NLO/SubProcesses/genps_fks.f 2015-02-04 13:26:19 +0000
1392+++ Template/NLO/SubProcesses/genps_fks.f 2015-10-20 14:02:27 +0000
1393@@ -182,15 +182,19 @@
1394 endif
1395 enddo
1396 if( firsttime .or. iconfig0.ne.iconfigsave ) then
1397- stot = 4d0*ebeam(1)*ebeam(2)
1398+ if (nincoming.eq.2) then
1399+ stot = 4d0*ebeam(1)*ebeam(2)
1400+ else
1401+ stot=pmass(1)**2
1402+ endif
1403 c Make sure have enough mass for external particles
1404 totmassin=0d0
1405- do i=3-nincoming,2
1406+ do i=1,nincoming
1407 totmassin=totmassin+m(i)
1408 enddo
1409 totmass=0d0
1410 nbranch = nexternal-3 ! nexternal is for n+1-body, while itree uses n-body
1411- do i=3,nbranch+2
1412+ do i=nincoming+1,nexternal-1
1413 totmass=totmass+m(i)
1414 enddo
1415 fksmass=totmass
1416@@ -294,7 +298,7 @@
1417 else
1418 pb(0,1)=sqrtshat_born
1419 do i=1,2
1420- pb(0,1)=0d0
1421+ pb(i,1)=0d0
1422 enddo
1423 p(3,1)=1e-14 ! For HELAS routine ixxxxx for neg. mass
1424 endif
1425@@ -453,22 +457,22 @@
1426 isolsign=1
1427 call generate_momenta_massless_final(icountevts,i_fks,j_fks
1428 & ,p_born(0,imother),shat,sqrtshat ,x(ixEi),xmrec2,xp
1429- & ,phi_i_fks, xiimax,xinorm,xi_i_fks,y_ij_fks,p_i_fks
1430+ & ,phi_i_fks,xiimax,xinorm,xi_i_fks,y_ij_fks,p_i_fks
1431 & ,xjac,xpswgt,pass)
1432 if (.not.pass) goto 112
1433 elseif(m_j_fks.gt.0d0) then
1434 call generate_momenta_massive_final(icountevts,isolsign
1435- & ,i_fks,j_fks,p_born(0,imother),shat,sqrtshat ,m_j_fks
1436- & ,x(ixEi),xmrec2,xp,phi_i_fks, xiimax,xinorm
1437+ & ,i_fks,j_fks,p_born(0,imother),shat,sqrtshat,m_j_fks
1438+ & ,x(ixEi),xmrec2,xp,phi_i_fks,xiimax,xinorm
1439 & ,xi_i_fks,y_ij_fks,p_i_fks,xjac,xpswgt,pass)
1440 if (.not.pass) goto 112
1441 endif
1442 elseif(j_fks.le.nincoming) then
1443 isolsign=1
1444 call generate_momenta_initial(icountevts,i_fks,j_fks,xbjrk_born
1445- & ,tau_born,ycm_born,ycmhat,shat_born,phi_i_fks ,xp ,x(ixEi)
1446- & ,shat,stot ,sqrtshat,tau,ycm,xbjrk ,p_i_fks,xiimax,xinorm
1447- & ,xi_i_fks,y_ij_fks ,xpswgt,xjac ,pass)
1448+ & ,tau_born,ycm_born,ycmhat,shat_born,phi_i_fks,xp ,x(ixEi)
1449+ & ,shat,stot,sqrtshat,tau,ycm,xbjrk,p_i_fks,xiimax,xinorm
1450+ & ,xi_i_fks,y_ij_fks ,xpswgt,xjac,pass)
1451 if (.not.pass) goto 112
1452 else
1453 write (*,*) 'Error #2 in genps_fks.f',j_fks
1454@@ -750,6 +754,7 @@
1455 c remove the following if no importance sampling towards soft
1456 c singularity is performed when integrating over xi_i_hat
1457 xjac=xjac*2d0*x(1)
1458+
1459 c Check that xii is in the allowed range
1460 if( icountevts.eq.-100 .or. abs(icountevts).eq.1 )then
1461 if(xi_i_fks.gt.(1-xmrec2/shat))then
1462@@ -829,7 +834,11 @@
1463 c mother four momenta
1464 do i=0,3
1465 xp_mother(i)=xp(i,i_fks)+xp(i,j_fks)
1466- recoil(i)=xp(i,1)+xp(i,2)-xp_mother(i)
1467+ if (nincoming.eq.2) then
1468+ recoil(i)=xp(i,1)+xp(i,2)-xp_mother(i)
1469+ else
1470+ recoil(i)=xp(i,1)-xp_mother(i)
1471+ endif
1472 enddo
1473 sumrec=recoil(0)+rho(recoil)
1474 sumrec2=sumrec**2
1475@@ -843,7 +852,7 @@
1476 xdir(j)=xp_mother(j)/x3len_fks_mother
1477 enddo
1478 c Perform the boost here
1479- do i=3,nexternal
1480+ do i=nincoming+1,nexternal
1481 if(i.ne.i_fks.and.i.ne.j_fks.and.shybst.ne.0.d0)
1482 & call boostwdir2(chybst,shybst,chybstmo,xdir,xp(0,i),xp(0,i))
1483 enddo
1484@@ -1145,7 +1154,11 @@
1485 c mother four momenta
1486 do i=0,3
1487 xp_mother(i)=xp(i,i_fks)+xp(i,j_fks)
1488- recoil(i)=xp(i,1)+xp(i,2)-xp_mother(i)
1489+ if (nincoming.eq.2) then
1490+ recoil(i)=xp(i,1)+xp(i,2)-xp_mother(i)
1491+ else
1492+ recoil(i)=xp(i,1)-xp_mother(i)
1493+ endif
1494 enddo
1495 c
1496 sumrec=recoil(0)+rho(recoil)
1497@@ -1168,7 +1181,7 @@
1498 xdir(j)=xp_mother(j)/x3len_fks_mother
1499 enddo
1500 c Boost the momenta
1501- do i=3,nexternal
1502+ do i=nincoming+1,nexternal
1503 if(i.ne.i_fks.and.i.ne.j_fks.and.shybst.ne.0.d0)
1504 & call boostwdir2(chybst,shybst,chybstmo,xdir,xp(0,i),xp(0,i))
1505 enddo
1506@@ -2357,7 +2370,11 @@
1507 external dot
1508 pass=.true.
1509 do i=0,3
1510- recoilbar(i)=p_born(i,1)+p_born(i,2)-p_born(i,imother)
1511+ if (nincoming.eq.2) then
1512+ recoilbar(i)=p_born(i,1)+p_born(i,2)-p_born(i,imother)
1513+ else
1514+ recoilbar(i)=p_born(i,1)-p_born(i,imother)
1515+ endif
1516 enddo
1517 xmrec2=dot(recoilbar,recoilbar)
1518 if(xmrec2.lt.0.d0)then
1519
1520=== modified file 'Template/NLO/SubProcesses/iproc_map.f'
1521--- Template/NLO/SubProcesses/iproc_map.f 2015-03-09 18:27:17 +0000
1522+++ Template/NLO/SubProcesses/iproc_map.f 2015-10-20 14:02:27 +0000
1523@@ -211,7 +211,8 @@
1524 include "appl_common.inc"
1525 *
1526 character*200 buffer
1527- integer procnum,found,i,l,j,ll
1528+ integer procnum,i,l,j,ll,found_a,found_m
1529+ logical found_appl(mxpdflumi),found_mg(maxproc)
1530 integer nmatch_total
1531
1532 * npdflumi is the number of pdf luminosities in this particular process
1533@@ -296,7 +297,7 @@
1534 1 ((pdgs(i,j,kpdflumi),i=1,2),j=1,nproc(kpdflumi))
1535 *
1536 if(kpdflumi.le.0.or.kpdflumi.gt.mxpdflumi) then
1537- write(6,*) "In fks_singular.f"
1538+ write(6,*) "In iproc_map.f"
1539 write(6,*) "Invalid value of kpdflumi = ",kpdflumi
1540 stop
1541 endif
1542@@ -341,33 +342,29 @@
1543 * Be careful with all possible permutations in initial_parton_map!
1544 * Check all possible npdflumi conbinations until a match is found
1545 do kpdflumi=1,npdflumi
1546-* First, check that the same number of
1547-* parton subprocesses is used
1548- if(maxproc_used.ne.nproc(kpdflumi)) continue
1549- if(flav_map_debug) then
1550- write(6,*) " "
1551- write(6,*) " kpdflumi = ",kpdflumi
1552- write(6,*) " nproc(kpdflumi) = ",nproc(kpdflumi)
1553- do l=1,nproc(kpdflumi)
1554- write(6,*) l,pdgs(1,l,kpdflumi),pdgs(2,l,kpdflumi)
1555- enddo
1556- write(6,*) " "
1557- endif
1558-
1559+
1560 * Initialization
1561- found = 0
1562+ do l=1,nproc(kpdflumi)
1563+ found_appl(l) = .false.
1564+ enddo
1565+ do ll=1,maxproc_used
1566+ found_mg(ll) = .false.
1567+ enddo
1568+ found_a=0
1569+ found_m=0
1570
1571 * Look for all possible pairs of
1572 * a) pdgs(1,l,kpdflumi),pdgs(2,l,kpdflumi) and
1573 * b) IDUP(1,ll), IDUP(2,ll)
1574 * with kpdflumi fixed
1575 do l=1,nproc(kpdflumi)
1576- do ll=1,nproc(kpdflumi)
1577+ do ll=1,maxproc_used
1578 if ( ( pdgs(1,l,kpdflumi).eq.
1579 1 IDUP(1,ll).and.pdgs(2,l,kpdflumi).
1580 2 eq.IDUP(2,ll) ) )
1581 4 then
1582- found = found + 1
1583+ found_appl(l)=.true.
1584+ found_mg(ll)=.true.
1585 if(flav_map_debug) then
1586 write(6,*) "match found!"
1587 write(6,*) "pdgs = ",pdgs(1,l,kpdflumi),
1588@@ -377,8 +374,15 @@
1589 endif
1590 enddo
1591 enddo
1592+ do l=1,nproc(kpdflumi)
1593+ if (found_appl(l)) found_a=found_a+1
1594+ enddo
1595+ do ll=1,maxproc_used
1596+ if (found_mg(ll)) found_m=found_m+1
1597+ enddo
1598 *
1599- if (found.eq.nproc(kpdflumi)) then
1600+ if ( found_a.eq.nproc(kpdflumi) .and.
1601+ & found_m.eq.maxproc_used ) then
1602 if(flav_map_debug) then
1603 write(6,*) " ------------------------------- "
1604 write(6,*) " Match found!"
1605@@ -389,9 +393,9 @@
1606 nmatch_total = nmatch_total+1
1607 endif
1608 enddo
1609- if(flavour_map(nFKSprocess).eq.0) then
1610+ if (nmatch_total.ne.nFKSprocess) then
1611 write(6,*)
1612- 1 "Problem with setup_flavourmap in fks_singular.f"
1613+ 1 "Problem with setup_flavourmap in iproc_map.f"
1614 write(6,*) "nFKSprocess = ",nFKSprocess
1615 write(6,*)" flavour_map(nFKSprocess)= ",
1616 1 flavour_map(nFKSprocess)
1617@@ -399,17 +403,6 @@
1618 endif
1619 enddo
1620
1621-* write(6,*) " "
1622-* write(6,*) " nmatch_total = ", nmatch_total
1623-* write(6,*) " "
1624- if( nmatch_total.ne.fks_configs) then
1625- write(6,*)
1626- 1 "Problem with setup_flavourmap in fks_singular.f"
1627- write(6,*) "nmatch_total, fks_configs"
1628- write(6,*) nmatch_total, fks_configs
1629- stop
1630- endif
1631-
1632 * Check flavor map properly initialized
1633 * All the entries of flavour_map(1:nFKSprocess) must be from
1634 * 1 to npdflumi
1635
1636=== modified file 'Template/NLO/SubProcesses/leshouche_inc_chooser.f'
1637--- Template/NLO/SubProcesses/leshouche_inc_chooser.f 2015-03-09 18:27:17 +0000
1638+++ Template/NLO/SubProcesses/leshouche_inc_chooser.f 2015-10-20 14:02:27 +0000
1639@@ -91,8 +91,8 @@
1640 icolup_d(1,1,nexternal,j)=-99999 ! should not be used
1641 icolup_d(1,2,nexternal,j)=-99999
1642 enddo
1643+ return
1644 endif
1645- return
1646 endif
1647
1648 open(unit=78, file='leshouche_info.dat', status='old')
1649
1650=== modified file 'Template/NLO/SubProcesses/montecarlocounter.f'
1651--- Template/NLO/SubProcesses/montecarlocounter.f 2014-10-06 16:45:08 +0000
1652+++ Template/NLO/SubProcesses/montecarlocounter.f 2015-10-20 14:02:27 +0000
1653@@ -2549,8 +2549,9 @@
1654 common/to_abrv/abrv
1655
1656 ref_scale=sqrt((1-xi)*shat)
1657- xscalemin=max(frac_low*ref_scale,scaleMClow)
1658- xscalemax=max(frac_upp*ref_scale,xscalemin+scaleMCdelta)
1659+ xscalemin=max(shower_scale_factor*frac_low*ref_scale,scaleMClow)
1660+ xscalemax=max(shower_scale_factor*frac_upp*ref_scale,
1661+ & xscalemin+scaleMCdelta)
1662 xscalemax=min(xscalemax,2d0*sqrt(ebeam(1)*ebeam(2)))
1663 xscalemin=min(xscalemin,xscalemax)
1664 c
1665
1666=== modified file 'Template/NLO/SubProcesses/setcuts.f'
1667--- Template/NLO/SubProcesses/setcuts.f 2015-07-24 18:59:18 +0000
1668+++ Template/NLO/SubProcesses/setcuts.f 2015-10-20 14:02:27 +0000
1669@@ -91,8 +91,6 @@
1670 lpp(2)=0
1671 ebeam(1)=pmass(1)/2d0
1672 ebeam(2)=pmass(1)/2d0
1673- scale=pmass(1)
1674- fixed_ren_scale=.true.
1675 endif
1676 c-check consistency of maxjetflavor in the run_card and with Nf
1677 c specified in coupl.inc
1678
1679=== modified file 'Template/NLO/SubProcesses/setscales.f'
1680--- Template/NLO/SubProcesses/setscales.f 2015-07-30 15:57:44 +0000
1681+++ Template/NLO/SubProcesses/setscales.f 2015-10-20 14:02:27 +0000
1682@@ -237,7 +237,10 @@
1683 $ ,FxFx_fac_scale
1684 c
1685 tmp=0
1686- if(ickkw.eq.3)then
1687+ if (nincoming.eq.1) then
1688+ tmp=pp(0,1) ! mass of the decaying particle
1689+ temp_scale_id='Mass of decaying particle'
1690+ elseif(ickkw.eq.3)then
1691 c FxFx merging scale:
1692 c Note that nFxFx_ren_scales includes the one scale that corresponds
1693 c to the real-emission one (and is zero for the n-body conf.). Skip
1694@@ -411,7 +414,7 @@
1695 tmp=0
1696 if(ickkw.eq.3)then
1697 c FxFx merging scale:
1698- tmp=min(FxFx_fac_scale(1),FxFx_fac_scale(2))
1699+ tmp=(FxFx_fac_scale(1)+FxFx_fac_scale(2))/2d0
1700 temp_scale_id='FxFx merging scale'
1701 elseif(imuftype.eq.1)then
1702 tmp=scale_global_reference(pp)
1703@@ -490,7 +493,10 @@
1704 parameter (iQEStype=1)
1705 c
1706 tmp=0
1707- if(iQEStype.eq.1)then
1708+ if (nincoming.eq.1) then
1709+ tmp=pp(0,1) ! mass of the decaying particle
1710+ temp_scale_id='Mass of decaying particle'
1711+ elseif(iQEStype.eq.1)then
1712 tmp=scale_global_reference(pp)
1713 elseif(iQEStype.eq.2)then
1714 do i=nincoming+1,nexternal
1715
1716=== removed file 'Template/NLO/SubProcesses/sumres.py'
1717--- Template/NLO/SubProcesses/sumres.py 2014-07-23 10:33:38 +0000
1718+++ Template/NLO/SubProcesses/sumres.py 1970-01-01 00:00:00 +0000
1719@@ -1,241 +0,0 @@
1720-#!/usr/bin/env python
1721-
1722-#script to combine reults and tell the number of events that need
1723-# to be generated in each channel.
1724-# Replaces the sumres.f and sumres2.f files
1725-# MZ, 2011-10-22
1726-
1727-from __future__ import division
1728-import math
1729-import sys
1730-import random
1731-import os
1732-
1733-nexpected=int(sys.argv[1])
1734-nevents=int(sys.argv[2])
1735-req_acc=float(sys.argv[3])
1736-# if nevents is >=0 the script will also determine the
1737-# number of events required for each process
1738-
1739-
1740-def Mirrorprocs(p1, p2):
1741- """determine if the folder names p1, p2 (with the _N already taken out)
1742- correspond to the same process with
1743- mirrror initial state. Returns true/false"""
1744- return False
1745-
1746-file=open("res.txt")
1747-content = file.read()
1748-file.close()
1749-lines = content.split("\n")
1750-processes=[]
1751-tot=0
1752-err=0
1753-totABS=0
1754-errABS=0
1755-
1756-# open the file containing the list of directories
1757-file=open("dirs.txt")
1758-dirs = file.read().split("\n")
1759-file.close()
1760-dirs.remove('')
1761-
1762-# The syntax of lines should be first the ABS cross section for the
1763-# channel and the line after that the cross section for the same
1764-# channel.
1765-for line in range(0,len(lines),2):
1766- list = lines[line].split()
1767- if list:
1768- proc={}
1769- proc['folder'] = list[0].split('/')[0]
1770- proc['subproc'] = proc['folder'][0:proc['folder'].rfind('_')]
1771- proc['channel'] = list[0].split('/')[1]
1772- dirs.remove(os.path.join(proc['folder'], proc['channel']))
1773- proc['resultABS'] = float(list[4])
1774- proc['errorABS'] = float(list[6])
1775- proc['err_percABS'] = proc['errorABS']/proc['resultABS']*100.
1776- processes.append(proc)
1777- totABS+= proc['resultABS']
1778- errABS+= math.pow(proc['errorABS'],2)
1779- list = lines[line+1].split()
1780- if list:
1781- proc['result'] = float(list[3])
1782- proc['error'] = float(list[5])
1783- proc['err_perc'] = proc['error']/proc['result']*100.
1784- tot+= proc['result']
1785- err+= math.pow(proc['error'],2)
1786-if dirs:
1787- print "%d jobs did not terminate correctly: " % len(dirs)
1788- print '\n'.join(dirs)
1789- print "The results are probably not correct. Please check the relevant log files corresponding to the above jobs for more information."
1790-
1791-processes.sort(key = lambda proc: -proc['errorABS'])
1792-
1793-correct = len(processes) == nexpected
1794-print "Found %d correctly terminated jobs " %len(processes)
1795-if not len(processes)==nexpected:
1796- print len(processes), nexpected
1797-
1798-subprocs_string=[]
1799-for proc in processes:
1800- subprocs_string.append(proc['subproc'])
1801-subprocs_string=set(subprocs_string)
1802-
1803-content+='\n\nCross-section per integration channel:\n'
1804-for proc in processes:
1805- content+='%(folder)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% \n' % proc
1806-
1807-content+='\n\nABS cross-section per integration channel:\n'
1808-for proc in processes:
1809- content+='%(folder)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% \n' % proc
1810-
1811-content+='\n\nCross-section per subprocess:\n'
1812-#for subpr in sorted(set(subprocs)):
1813-subprocesses=[]
1814-for sub in subprocs_string:
1815- subpr={}
1816- subpr['subproc']=sub
1817- subpr['xsect']=0.
1818- subpr['err']=0.
1819- for proc in processes:
1820- if proc['subproc'] == sub:
1821- subpr['xsect'] += proc['result']
1822- subpr['err'] += math.pow(proc['error'],2)
1823- subpr['err']=math.sqrt(subpr['err'])
1824- subprocesses.append(subpr)
1825-
1826-
1827-#find and combine mirror configurations (if in v4)
1828-for i1, s1 in enumerate(subprocesses):
1829- for i2, s2 in enumerate(subprocesses):
1830- if Mirrorprocs(s1['subproc'], s2['subproc']) and i1 >= i2:
1831- s1['xsect'] += s2['xsect']
1832- s1['err'] = math.sqrt(math.pow(s1['err'],2)+ math.pow(s2['err'],2))
1833- s2['toremove'] = True
1834-
1835-new = []
1836-for s in subprocesses:
1837- try:
1838- a= s['toremove']
1839- except KeyError:
1840- new.append(s)
1841-subprocesses= new
1842-
1843-
1844-subprocesses.sort(key = lambda proc: -proc['xsect'])
1845-for subpr in subprocesses:
1846- content+= '%(subproc)20s %(xsect)10.8e %(err)6.4e\n' % subpr
1847-
1848-
1849-content+='\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%)\n' %\
1850- (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,tot, math.sqrt(err), math.sqrt(err)/tot *100.)
1851-
1852-if not correct:
1853- sys.exit('ERROR: not all jobs terminated correctly\n')
1854-
1855-file=open("res.txt", 'w')
1856-
1857-file.write(content)
1858-file.close()
1859-
1860-#determine the events for each process:
1861-if nevents>=0:
1862- if req_acc<0:
1863- req_acc2_inv=nevents
1864- else:
1865- req_acc2_inv=1/(req_acc*req_acc)
1866- #get the random number seed from the randinit file
1867- file=open("randinit")
1868- exec file
1869- file.close
1870- print "random seed found in 'randinit' is", r
1871- random.seed(r)
1872- totevts=nevents
1873- for proc in processes:
1874- proc['lhefile'] = os.path.join(proc['folder'], proc['channel'], 'events.lhe')
1875- proc['nevents'] = 0
1876- while totevts :
1877- target = random.random() * totABS
1878- crosssum = 0.
1879- i = 0
1880- while i<len(processes) and crosssum < target:
1881- proc = processes[i]
1882- crosssum += proc['resultABS']
1883- i += 1
1884- totevts -= 1
1885- i -= 1
1886- processes[i]['nevents'] += 1
1887-
1888-#check that we now have all the events in the channels
1889- totevents = sum(proc['nevents'] for proc in processes)
1890- if totevents != nevents:
1891- sys.exit('failed to obtain the correct number of events. Required: %d, Obtained: %d' \
1892- % (nevents, totevents))
1893-
1894- content_evts = ''
1895- for proc in processes:
1896- content_evts+= ' '+proc['lhefile']+' %(nevents)10d %(resultABS)10.8e 1.0 \n' % proc
1897- nevts_file = open(os.path.join(proc['folder'], proc['channel'], 'nevts'),'w')
1898- nevts_file.write('%10d\n' % proc['nevents'])
1899- nevts_file.close()
1900- if proc['channel'][1] == 'B':
1901- fileinputs = open("madinMMC_B.2")
1902- elif proc['channel'][1] == 'F':
1903- fileinputs = open("madinMMC_F.2")
1904- elif proc['channel'][1] == 'V':
1905- fileinputs = open("madinMMC_V.2")
1906- else:
1907- sys.exit("ERROR, DONT KNOW WHICH INPUTS TO USE")
1908- fileinputschannel = open(os.path.join(proc['folder'], proc['channel'], 'madinM1'),'w')
1909- i=0
1910- for line in fileinputs:
1911- i += 1
1912- if i == 2:
1913- accuracy=min(math.sqrt(totABS/(req_acc2_inv*proc['resultABS'])),0.2)
1914- fileinputschannel.write('%10.8e\n' % accuracy)
1915- elif i == 8:
1916- fileinputschannel.write('1 ! MINT mode\n')
1917- else:
1918- fileinputschannel.write(line)
1919- fileinputschannel.close()
1920- fileinputs.close()
1921-
1922- evts_file = open('nevents_unweighted', 'w')
1923- evts_file.write(content_evts)
1924- evts_file.close()
1925-
1926-# if nevents = -1 and req_acc >= 0, we need to determine the required
1927-# accuracy in each of the channels: this is for fixed order running!
1928-elif req_acc>=0 and nevents==-1:
1929- req_accABS=req_acc*abs(tot)/totABS
1930- content_evts = ''
1931- for proc in processes:
1932- if proc['channel'][0:3] == 'all':
1933- fileinputs = open("madin.all")
1934- elif proc['channel'][0:4] == 'novB':
1935- fileinputs = open("madin.novB")
1936- elif proc['channel'][0:4] == 'born':
1937- fileinputs = open("madin.born")
1938- elif proc['channel'][0:4] == 'grid':
1939- fileinputs = open("madin.grid")
1940- elif proc['channel'][0:4] == 'viSB':
1941- fileinputs = open("madin.viSB")
1942- elif proc['channel'][0:4] == 'virt':
1943- fileinputs = open("madin.virt")
1944- elif proc['channel'][0:4] == 'novi':
1945- fileinputs = open("madin.novi")
1946- else:
1947- sys.exit("ERROR, DONT KNOW WHICH INPUTS TO USE")
1948- fileinputschannel = open(os.path.join(proc['folder'], proc['channel'], 'madinM1'),'w')
1949- i=0
1950- for line in fileinputs:
1951- i += 1
1952- if i == 2:
1953- accuracy=req_accABS*math.sqrt(totABS*proc['resultABS'])
1954- fileinputschannel.write('%10.8e\n' % accuracy)
1955- elif i == 9:
1956- fileinputschannel.write('-1 ! restart from existing grids\n')
1957- else:
1958- fileinputschannel.write(line)
1959- fileinputschannel.close()
1960- fileinputs.close()
1961
1962=== modified file 'Template/NLO/SubProcesses/symmetry_fks_v3.f'
1963--- Template/NLO/SubProcesses/symmetry_fks_v3.f 2014-06-26 08:45:41 +0000
1964+++ Template/NLO/SubProcesses/symmetry_fks_v3.f 2015-10-20 14:02:27 +0000
1965@@ -541,6 +541,8 @@
1966 lname=4
1967 mname='mg'
1968 call open_bash_file(26,fname,lname,mname)
1969+ call close_bash_file(26)
1970+ open(unit=26,file='channels.txt',status='unknown')
1971 ic = 0
1972 do i=1,mapconfig(0)
1973 if (use_config(i) .gt. 0) then
1974@@ -567,15 +569,6 @@
1975 done = .false.
1976 do while (.not. done)
1977 call enCode(icode,iarray,ibase,imax)
1978- ic=ic+1
1979- if (ic .gt. ChanPerJob) then
1980- call close_bash_file(26)
1981- fname='ajob'
1982- lname=4
1983- mname='mg'
1984- call open_bash_file(26,fname,lname,mname)
1985- ic = 1
1986- endif
1987 c write(*,*) 'mapping',ic,mapconfig(i)
1988 c$$$ if (r_from_b(mapconfig(i)) .lt. 10) then
1989 c$$$ write(26,'(i1$)') r_from_b(mapconfig(i))
1990@@ -611,7 +604,7 @@
1991 enddo
1992 endif
1993 enddo
1994- call close_bash_file(26)
1995+ close(26)
1996 if (mapconfig(0) .gt. 9999) then
1997 write(*,*) 'Only writing first 9999 jobs',mapconfig(0)
1998 endif
1999
2000=== modified file 'Template/NLO/SubProcesses/write_ajob.f'
2001--- Template/NLO/SubProcesses/write_ajob.f 2012-10-18 06:17:30 +0000
2002+++ Template/NLO/SubProcesses/write_ajob.f 2015-10-20 14:02:27 +0000
2003@@ -53,7 +53,7 @@
2004 if (index(buff,'TAGTAGTAGTAGTAG').ne.0) exit
2005 write(lun,15) buff
2006 enddo
2007- write(lun,'(a$)') 'for i in '
2008+ write(lun,'(a$)') 'for i in $channel '
2009 return
2010 99 write (*,*) 'ajob_template or ajob_template_cluster '/
2011 & /'does not have the correct format'
2012
2013=== modified file 'Template/loop_material/StandAlone/Cards/MadLoopParams.dat'
2014--- Template/loop_material/StandAlone/Cards/MadLoopParams.dat 2014-10-27 08:40:30 +0000
2015+++ Template/loop_material/StandAlone/Cards/MadLoopParams.dat 2015-10-20 14:02:27 +0000
2016@@ -4,8 +4,8 @@
2017
2018 !
2019 #MLReductionLib
2020-!1|4|3|2
2021-! Default :: 1|4|3|2
2022+!1|3|2
2023+! Default :: 1|3|2
2024 ! The tensor integral reduction library.The current choices are:
2025 ! 1 | CutTools
2026 ! 2 | PJFry++
2027
2028=== modified file 'UpdateNotes.txt'
2029--- UpdateNotes.txt 2015-09-07 18:05:30 +0000
2030+++ UpdateNotes.txt 2015-10-20 14:02:27 +0000
2031@@ -1,6 +1,29 @@
2032 Update notes for MadGraph5_aMC@NLO (in reverse time order)
2033
2034-2.3.2.2 (06/09/15)
2035+<<<<<<< TREE
2036+2.3.2.2 (06/09/15)
2037+=======
2038+
2039+2.3.3(XX/XX/XX)
2040+ OM: Allow new syntax for the param_card: instead of an entry you can enter scan:[val1, val2,...]
2041+ To perform a scan on this parameter.
2042+ OM: Having two mode for "output pythia8" one (default) for pythia8.2 and one for pythia8.1 (with --version=8.1)
2043+ RF: Rewriting of job-control for NLO processes. Better accuracy estimates for FO processes
2044+ RF: Fix for factorisation scale setting in FxFx merging when very large difference in scale in the
2045+ non-QCD part of a process.
2046+ RF: Better discarding of numerical instabilities in the real-emission matrix elements. Only of interested for
2047+ processes which have jets at Born level, but do not require generation cut (like t-channel single-top).
2048+ RF: Added an option to the run_card to allow for easier variation of the shower starting scale (NLO only).
2049+ RF: Fixed a problem in the setting of the flavour map used for runs with iAPPL >= 1.
2050+ RF: Allow for decay processes to compute (partial) decay widths at NLO accuracy (fixed order only).
2051+ OM: (SysCalc interface) Allow to bypass the pdf reweighting/alpsfact reweighting
2052+ MZ: fixed bug related to slurm clusters
2053+ OM: remove the addmasses.py script of running by default on gridpack mode.
2054+ if you want to have it running, you just have to rename the file madevent/bin/internal/addmasses_optional.py to
2055+ madevent/bin/internal/addmasses_optional.py and it will work as before. (Do not work with SysCalc tag)
2056+
2057+2.3.2.2 (06/09/15)
2058+>>>>>>> MERGE-SOURCE
2059 VH: Finalized the MG5aMC-GoSam interface
2060
2061 2.3.2(20/08/15)
2062
2063=== modified file 'VERSION'
2064--- VERSION 2015-09-07 18:04:08 +0000
2065+++ VERSION 2015-10-20 14:02:27 +0000
2066@@ -1,5 +1,11 @@
2067+<<<<<<< TREE
2068 version = 2.3.2.2
2069 date = 2015-09-06
2070+=======
2071+version = 2.3.3
2072+date = 2015-10-20
2073+
2074+>>>>>>> MERGE-SOURCE
2075
2076
2077
2078
2079=== modified file 'aloha/__init__.py' (properties changed: -x to +x)
2080=== modified file 'aloha/aloha_fct.py' (properties changed: -x to +x)
2081=== modified file 'aloha/aloha_lib.py' (properties changed: -x to +x)
2082--- aloha/aloha_lib.py 2015-07-30 15:43:54 +0000
2083+++ aloha/aloha_lib.py 2015-10-20 14:02:27 +0000
2084@@ -1073,9 +1073,9 @@
2085 self[(0,)] = representation[(0,)]
2086 except Exception:
2087 if representation:
2088- raise LorentzObjectRepresentation.LorentzObjectRepresentationError("There is no key of (0,) in representation.")
2089+ raise LorentzObjectRepresentation.LorentzObjectRepresentationError("There is no key of (0,) in representation.")
2090 else:
2091- self[(0,)] = 0
2092+ self[(0,)] = 0
2093 else:
2094 self[(0,)] = representation
2095
2096
2097=== modified file 'aloha/aloha_object.py' (properties changed: -x to +x)
2098--- aloha/aloha_object.py 2014-02-18 21:45:48 +0000
2099+++ aloha/aloha_object.py 2015-10-20 14:02:27 +0000
2100@@ -434,12 +434,12 @@
2101 self.sub11 = aloha_lib.Variable('T%s_6' % self.particle)
2102 self.sub12 = aloha_lib.Variable('T%s_7' % self.particle)
2103 self.sub13 = aloha_lib.Variable('T%s_8' % self.particle)
2104-
2105+
2106 self.sub20 = aloha_lib.Variable('T%s_9' % self.particle)
2107 self.sub21 = aloha_lib.Variable('T%s_10' % self.particle)
2108 self.sub22 = aloha_lib.Variable('T%s_11' % self.particle)
2109 self.sub23 = aloha_lib.Variable('T%s_12' % self.particle)
2110-
2111+
2112 self.sub30 = aloha_lib.Variable('T%s_13' % self.particle)
2113 self.sub31 = aloha_lib.Variable('T%s_14' % self.particle)
2114 self.sub32 = aloha_lib.Variable('T%s_15' % self.particle)
2115
2116=== modified file 'aloha/aloha_parsers.py' (properties changed: -x to +x)
2117=== modified file 'aloha/aloha_writers.py' (properties changed: -x to +x)
2118--- aloha/aloha_writers.py 2014-12-11 15:23:43 +0000
2119+++ aloha/aloha_writers.py 2015-10-20 14:02:27 +0000
2120@@ -689,7 +689,7 @@
2121 if number.imag == 1:
2122 out = 'CI'
2123 elif number.imag == -1:
2124- out = '-CI'
2125+ out = '-CI'
2126 else:
2127 out = '%s * CI' % self.change_number_format(number.imag)
2128 else:
2129
2130=== modified file 'aloha/create_aloha.py' (properties changed: -x to +x)
2131--- aloha/create_aloha.py 2015-06-28 14:44:22 +0000
2132+++ aloha/create_aloha.py 2015-10-20 14:02:27 +0000
2133@@ -854,7 +854,7 @@
2134 # reorganize the data (in order to use optimization for a given lorentz
2135 #structure
2136 aloha.loop_mode = False
2137- # self.explicit_combine = False
2138+ # self.explicit_combine = False
2139 request = {}
2140
2141 for list_l_name, tag, outgoing in data:
2142
2143=== modified file 'aloha/template_files/__init__.py' (properties changed: -x to +x)
2144=== modified file 'aloha/template_files/aloha_functions_loop.f'
2145--- aloha/template_files/aloha_functions_loop.f 2013-11-29 07:28:53 +0000
2146+++ aloha/template_files/aloha_functions_loop.f 2015-10-20 14:02:27 +0000
2147@@ -86,6 +86,7 @@
2148
2149 pp = min(p(0),dsqrt(p(1)**2+p(2)**2+p(3)**2))
2150
2151+
2152 if ( pp.eq.rZero ) then
2153
2154 sqm(0) = dsqrt(abs(fmass)) ! possibility of negative fermion masses
2155@@ -477,13 +478,13 @@
2156
2157 sqm(0) = dsqrt(abs(fmass)) ! possibility of negative fermion masses
2158 sqm(1) = sign(sqm(0),fmass) ! possibility of negative fermion masses
2159- ip = -((1+nh)/2)
2160- im = (1-nh)/2
2161
2162- fo(5) = im * sqm(im)
2163- fo(6) = ip*nsf * sqm(im)
2164- fo(7) = im*nsf * sqm(-ip)
2165- fo(8) = ip * sqm(-ip)
2166+ im = nhel * (1+nh)/2
2167+ ip = nhel * -1 * ((1-nh)/2)
2168+ fo(5) = im * sqm(abs(ip))
2169+ fo(6) = ip*nsf * sqm(abs(ip))
2170+ fo(7) = im*nsf * sqm(abs(im))
2171+ fo(8) = ip * sqm(abs(im))
2172
2173 else
2174
2175
2176=== modified file 'aloha/template_files/wavefunctions.py' (properties changed: -x to +x)
2177--- aloha/template_files/wavefunctions.py 2013-07-23 16:48:52 +0000
2178+++ aloha/template_files/wavefunctions.py 2015-10-20 14:02:27 +0000
2179@@ -316,7 +316,7 @@
2180 elif nhel == 1:
2181 for j in range(4):
2182 for i in range(4):
2183- ft[(i,j)] = sqh*( ep[i]*e0[j] + e0[i]*ep[j] )
2184+ ft[(i,j)] = sqh*( ep[i]*e0[j] + e0[i]*ep[j] )
2185 elif nhel == 0:
2186 for j in range(4):
2187 for i in range(4):
2188@@ -324,10 +324,10 @@
2189 elif nhel == -1:
2190 for j in range(4):
2191 for i in range(4):
2192- ft[(i,j)] = sqh*( em[i]*e0[j] + e0[i]*em[j] )
2193+ ft[(i,j)] = sqh*( em[i]*e0[j] + e0[i]*em[j] )
2194
2195 else:
2196- raise Exception, 'invalid helicity TXXXXXX'
2197+ raise Exception, 'invalid helicity TXXXXXX'
2198
2199
2200
2201
2202=== modified file 'bin/mg5'
2203--- bin/mg5 2014-07-17 13:34:15 +0000
2204+++ bin/mg5 2015-10-20 14:02:27 +0000
2205@@ -28,6 +28,6 @@
2206
2207 sys.argv.pop(0)
2208 if __debug__:
2209- os.system('%s %s %s' %(sys.executable, str(exe_path) , ' '.join(sys.argv) ))
2210+ os.system('%s -tt %s %s' %(sys.executable, str(exe_path) , ' '.join(sys.argv) ))
2211 else:
2212 os.system('%s -O %s %s' %(sys.executable, str(exe_path) , ' '.join(sys.argv) ))
2213
2214=== modified file 'madgraph/__init__.py' (properties changed: -x to +x)
2215=== modified file 'madgraph/core/__init__.py' (properties changed: -x to +x)
2216=== modified file 'madgraph/core/base_objects.py' (properties changed: -x to +x)
2217--- madgraph/core/base_objects.py 2015-08-16 22:19:30 +0000
2218+++ madgraph/core/base_objects.py 2015-10-20 14:02:27 +0000
2219@@ -202,8 +202,8 @@
2220
2221 sorted_keys = ['name', 'antiname', 'spin', 'color',
2222 'charge', 'mass', 'width', 'pdg_code',
2223- 'texname', 'antitexname', 'line', 'propagating', 'propagator',
2224- 'is_part', 'self_antipart', 'ghost', 'counterterm']
2225+ 'line', 'propagator',
2226+ 'is_part', 'self_antipart', 'type', 'counterterm']
2227
2228 def default_setup(self):
2229 """Default values for all properties"""
2230@@ -216,19 +216,55 @@
2231 self['mass'] = 'ZERO'
2232 self['width'] = 'ZERO'
2233 self['pdg_code'] = 0
2234- self['texname'] = 'none'
2235- self['antitexname'] = 'none'
2236+ #self['texname'] = 'none'
2237+ #self['antitexname'] = 'none'
2238 self['line'] = 'dashed'
2239- self['propagating'] = True
2240+ #self['propagating'] = True -> removed in favor or 'line' = None
2241 self['propagator'] = ''
2242 self['is_part'] = True
2243 self['self_antipart'] = False
2244 # True if ghost, False otherwise
2245- self['ghost'] = False
2246+ #self['ghost'] = False
2247+ self['type'] = '' # empty means normal can also be ghost or goldstone
2248 # Counterterm defined as a dictionary with format:
2249 # ('ORDER_OF_COUNTERTERM',((Particle_list_PDG))):{laurent_order:CTCouplingName}
2250 self['counterterm'] = {}
2251
2252+ def get(self, name):
2253+
2254+ if name == 'ghost':
2255+ return self['type'] == 'ghost'
2256+ elif name == 'goldstone':
2257+ return self['type'] == 'goldstone'
2258+ elif name == 'propagating':
2259+ return self['line'] is not None
2260+ else:
2261+ return super(Particle, self).get(name)
2262+
2263+ def set(self, name, value, force=False):
2264+
2265+ if name in ['texname', 'antitexname']:
2266+ return True
2267+ elif name == 'propagating':
2268+ if not value:
2269+ return self.set('line', None, force=force)
2270+ elif not self.get('line'):
2271+ return self.set('line', 'dashed',force=force)
2272+ return True
2273+ elif name in ['ghost', 'goldstone']:
2274+ if self.get('type') == name:
2275+ if value:
2276+ return True
2277+ else:
2278+ return self.set('type', '', force=force)
2279+ else:
2280+ if value:
2281+ return self.set('type', name, force=force)
2282+ else:
2283+ return True
2284+ return super(Particle, self).set(name, value,force=force)
2285+
2286+
2287 def filter(self, name, value):
2288 """Filter for valid particle property values."""
2289
2290@@ -1213,7 +1249,7 @@
2291 if isinstance(id, int):
2292 try:
2293 return self.get("particle_dict")[id]
2294- except Exception:
2295+ except Exception,error:
2296 return None
2297 else:
2298 if not hasattr(self, 'name2part'):
2299@@ -1547,9 +1583,9 @@
2300 def write_param_card(self):
2301 """Write out the param_card, and return as string."""
2302
2303- import models.write_param_card as writter
2304+ import models.write_param_card as writer
2305 out = StringIO.StringIO() # it's suppose to be written in a file
2306- param = writter.ParamCardWriter(self)
2307+ param = writer.ParamCardWriter(self)
2308 param.define_output_file(out)
2309 param.write_card()
2310 return out.getvalue()
2311@@ -1577,55 +1613,118 @@
2312 def change_electroweak_mode(self, mode):
2313 """Change the electroweak mode. The only valid mode now is external.
2314 Where in top of the default MW and sw2 are external parameters."""
2315-
2316- assert mode == "external"
2317+
2318+ assert mode in ["external",set(['mz','mw','alpha'])]
2319
2320 try:
2321 W = self.get('particle_dict')[24]
2322 except KeyError:
2323- raise InvalidCmd('No W particle in the model impossible to change the EW scheme!')
2324+ raise InvalidCmd('No W particle in the model impossible to '+
2325+ 'change the EW scheme!')
2326
2327- MW = self.get_parameter(W.get('mass'))
2328- if not isinstance(MW, ParamCardVariable):
2329- newMW = ParamCardVariable(MW.name, MW.value, 'MASS', [24])
2330- if not newMW.value:
2331- newMW.value = 80.385
2332- #remove the old definition
2333- self.get('parameters')[MW.depend].remove(MW)
2334- # add the new one
2335- self.add_param(newMW, ['external'])
2336-
2337- # Now check for sw2. if not define bypass this
2338- try:
2339- sw2 = self.get_parameter('sw2')
2340- except KeyError:
2341+ if mode=='external':
2342+ MW = self.get_parameter(W.get('mass'))
2343+ if not isinstance(MW, ParamCardVariable):
2344+ newMW = ParamCardVariable(MW.name, MW.value, 'MASS', [24])
2345+ if not newMW.value:
2346+ newMW.value = 80.385
2347+ #remove the old definition
2348+ self.get('parameters')[MW.depend].remove(MW)
2349+ # add the new one
2350+ self.add_param(newMW, ['external'])
2351+
2352+ # Now check for sw2. if not define bypass this
2353 try:
2354- sw2 = self.get_parameter('mdl_sw2')
2355+ sw2 = self.get_parameter('sw2')
2356 except KeyError:
2357- sw2=None
2358-
2359- if sw2:
2360- newsw2 = ParamCardVariable(sw2.name,sw2.value, 'SMINPUTS', [4])
2361- if not newsw2.value:
2362- newsw2.value = 0.222246485786
2363- #remove the old definition
2364- self.get('parameters')[sw2.depend].remove(sw2)
2365- # add the new one
2366- self.add_param(newsw2, ['external'])
2367-
2368- def change_mass_to_complex_scheme(self):
2369+ try:
2370+ sw2 = self.get_parameter('mdl_sw2')
2371+ except KeyError:
2372+ sw2=None
2373+
2374+ if sw2:
2375+ newsw2 = ParamCardVariable(sw2.name,sw2.value, 'SMINPUTS', [4])
2376+ if not newsw2.value:
2377+ newsw2.value = 0.222246485786
2378+ #remove the old definition
2379+ self.get('parameters')[sw2.depend].remove(sw2)
2380+ # add the new one
2381+ self.add_param(newsw2, ['external'])
2382+ # Force a refresh of the parameter dictionary
2383+ self.parameters_dict = None
2384+ return true
2385+
2386+ elif mode==set(['mz','mw','alpha']):
2387+ # For now, all we support is to go from mz, Gf, alpha to mz, mw, alpha
2388+ W = self.get('particle_dict')[24]
2389+ mass = self.get_parameter(W.get('mass'))
2390+ mass_expr = 'cmath.sqrt(%(prefix)sMZ__exp__2/2. + cmath.sqrt('+\
2391+ '%(prefix)sMZ__exp__4/4. - (%(prefix)saEW*cmath.pi*%(prefix)s'+\
2392+ 'MZ__exp__2)/(%(prefix)sGf*%(prefix)ssqrt__2)))'
2393+ if 'external' in mass.depend:
2394+ # Nothing to be done
2395+ return True
2396+ match = False
2397+ if mass.expr == mass_expr%{'prefix':''}:
2398+ prefix = ''
2399+ match = True
2400+ elif mass.expr == mass_expr%{'prefix':'mdl_'}:
2401+ prefix = 'mdl_'
2402+ match = True
2403+ if match:
2404+ MW = ParamCardVariable(mass.name, mass.value, 'MASS', [24])
2405+ if not MW.value:
2406+ MW.value = 80.385
2407+ self.get('parameters')[('external',)].append(MW)
2408+ self.get('parameters')[mass.depend].remove(mass)
2409+ # Make Gf an internal parameter
2410+ new_param = ModelVariable('Gf',
2411+ '-%(prefix)saEW*%(prefix)sMZ**2*cmath.pi/(cmath.sqrt(2)*%(MW)s**2*(%(MW)s**2 - %(prefix)sMZ**2))' %\
2412+ {'MW': mass.name,'prefix':prefix}, 'complex', mass.depend)
2413+ Gf = self.get_parameter('%sGf'%prefix)
2414+ self.get('parameters')[('external',)].remove(Gf)
2415+ self.add_param(new_param, ['%saEW'%prefix])
2416+ # Force a refresh of the parameter dictionary
2417+ self.parameters_dict = None
2418+ return True
2419+ else:
2420+ return False
2421+
2422+ def change_mass_to_complex_scheme(self, toCMS=True):
2423 """modify the expression changing the mass to complex mass scheme"""
2424
2425- # 1) Find All input parameter mass and width associated
2426+ # 1) Change the 'CMSParam' of loop_qcd_qed model to 1.0 so as to remove
2427+ # the 'real' prefix fromall UVCT wf renormalization expressions.
2428+ # If toCMS is False, then it makes sure CMSParam is 0.0 and returns
2429+ # immediatly.
2430+ # 2) Find All input parameter mass and width associated
2431 # Add a internal parameter and replace mass with that param
2432- # 2) Find All mass fixed by the model and width associated
2433+ # 3) Find All mass fixed by the model and width associated
2434 # -> Both need to be fixed with a real() /Imag()
2435- # 3) Find All width fixed by the model
2436- # -> Need to be fixed with a real()
2437- # 4) Fix the Yukawa mass to the value of the complex mass/ real mass
2438- # 5) Loop through all expression and modify those accordingly
2439+ # 4) Find All width set by the model
2440+ # -> Need to be set with a real()
2441+ # 5) Fix the Yukawa mass to the value of the complex mass/ real mass
2442+ # 6) Loop through all expression and modify those accordingly
2443 # Including all parameter expression as complex
2444-
2445+
2446+ try:
2447+ CMSParam = self.get_parameter('CMSParam')
2448+ except KeyError:
2449+ try:
2450+ CMSParam = self.get_parameter('mdl_CMSParam')
2451+ except KeyError:
2452+ CMSParam = None
2453+
2454+ # Handle the case where we want to make sure the CMS is turned off
2455+ if not toCMS:
2456+ if CMSParam:
2457+ CMSParam.expr = '0.0'
2458+ return
2459+
2460+ # Now handle the case where we want to turn to CMS.
2461+ if CMSParam:
2462+ CMSParam.expr = '1.0'
2463+
2464 to_change = {}
2465 mass_widths = [] # parameter which should stay real
2466 for particle in self.get('particles'):
2467@@ -1634,52 +1733,29 @@
2468 continue
2469 mass_widths.append(particle.get('width'))
2470 mass_widths.append(particle.get('mass'))
2471- if particle.get('width') == 'ZERO':
2472+ width = self.get_parameter(particle.get('width'))
2473+ if (isinstance(width.value, (complex,float)) and abs(width.value)==0.0) or \
2474+ width.name.lower() =='zero':
2475 #everything is fine since the width is zero
2476 continue
2477- width = self.get_parameter(particle.get('width'))
2478 if not isinstance(width, ParamCardVariable):
2479 width.expr = 're(%s)' % width.expr
2480- if particle.get('mass') != 'ZERO':
2481- mass = self.get_parameter(particle.get('mass'))
2482+ mass = self.get_parameter(particle.get('mass'))
2483+ if (isinstance(width.value, (complex,float)) and abs(width.value)!=0.0) or \
2484+ mass.name.lower() != 'zero':
2485 # special SM treatment to change the gauge scheme automatically.
2486- if particle.get('pdg_code') == 24:
2487- if hasattr(mass, 'expr') and mass.expr == 'cmath.sqrt(MZ__exp__2/2. + cmath.sqrt(MZ__exp__4/4. - (aEW*cmath.pi*MZ__exp__2)/(Gf*sqrt__2)))':
2488- # Make MW an external parameter
2489- MW = ParamCardVariable(mass.name, mass.value, 'MASS', [24])
2490- if not MW.value:
2491- MW.value = 80.385
2492- self.get('parameters')[('external',)].append(MW)
2493- self.get('parameters')[mass.depend].remove(mass)
2494- # Make Gf an internal parameter
2495- new_param = ModelVariable('Gf',
2496- '-aEW*MZ**2*cmath.pi/(cmath.sqrt(2)*%(MW)s**2*(%(MW)s**2 - MZ**2))' %\
2497- {'MW': mass.name}, 'complex', mass.depend)
2498- Gf = self.get_parameter('Gf')
2499- self.get('parameters')[('external',)].remove(Gf)
2500- self.add_param(new_param, ['aEW'])
2501- # Use the new mass for the future modification
2502- mass = MW
2503- #option with prefixing
2504- elif hasattr(mass, 'expr') and mass.expr == 'cmath.sqrt(mdl_MZ__exp__2/2. + cmath.sqrt(mdl_MZ__exp__4/4. - (mdl_aEW*cmath.pi*mdl_MZ__exp__2)/(mdl_Gf*mdl_sqrt__2)))':
2505- # Make MW an external parameter
2506- MW = ParamCardVariable(mass.name, mass.value, 'MASS', [24])
2507- if not MW.value:
2508- MW.value = 80.385
2509- self.get('parameters')[('external',)].append(MW)
2510- self.get('parameters')[mass.depend].remove(mass)
2511- # Make Gf an internal parameter
2512- new_param = ModelVariable('mdl_Gf',
2513- '-mdl_aEW*mdl_MZ**2*cmath.pi/(cmath.sqrt(2)*%(MW)s**2*(%(MW)s**2 - mdl_MZ**2))' %\
2514- {'MW': mass.name}, 'complex', mass.depend)
2515- Gf = self.get_parameter('mdl_Gf')
2516- self.get('parameters')[('external',)].remove(Gf)
2517- self.add_param(new_param, ['mdl_aEW'])
2518- # Use the new mass for the future modification
2519- mass = MW
2520- elif isinstance(mass, ModelVariable):
2521- logger.warning('W mass is not an external parameter. This is not adviced for the complex mass scheme.')
2522-
2523+ if particle.get('pdg_code') == 24 and isinstance(mass,
2524+ ModelVariable):
2525+ status = self.change_electroweak_mode(
2526+ set(['mz','mw','alpha']))
2527+ # Use the newly defined parameter for the W mass
2528+ mass = self.get_parameter(particle.get('mass'))
2529+ if not status:
2530+ logger.warning('The W mass is not an external '+
2531+ 'parameter in this model and the automatic change of'+
2532+ ' electroweak scheme changed. This is not advised for '+
2533+ 'applying the complex mass scheme.')
2534+
2535 # Add A new parameter CMASS
2536 #first compute the dependencies (as,...)
2537 depend = list(set(mass.depend + width.depend))
2538@@ -3638,7 +3714,7 @@
2539 my_isids = [leg.get('ids') for leg in self.get('legs') \
2540 if not leg.get('state')]
2541 my_fsids = [leg.get('ids') for leg in self.get('legs') \
2542- if leg.get('state')]
2543+ if leg.get('state')]
2544 for i, is_id in enumerate(initial_state_ids):
2545 assert is_id in my_isids[i]
2546 for i, fs_id in enumerate(final_state_ids):
2547
2548=== modified file 'madgraph/core/color_algebra.py' (properties changed: -x to +x)
2549=== modified file 'madgraph/core/color_amp.py' (properties changed: -x to +x)
2550=== modified file 'madgraph/core/diagram_generation.py' (properties changed: -x to +x)
2551--- madgraph/core/diagram_generation.py 2015-06-28 14:44:22 +0000
2552+++ madgraph/core/diagram_generation.py 2015-10-20 14:02:27 +0000
2553@@ -26,7 +26,8 @@
2554
2555 import madgraph.core.base_objects as base_objects
2556 import madgraph.various.misc as misc
2557-from madgraph import InvalidCmd
2558+from madgraph import InvalidCmd, MadGraph5Error
2559+
2560 logger = logging.getLogger('madgraph.diagram_generation')
2561
2562
2563@@ -1552,7 +1553,7 @@
2564 "%s is not a valid ProcessDefinitionList object" % str(value)
2565
2566 if name == 'amplitudes':
2567- if not isinstance(value, diagram_generation.AmplitudeList):
2568+ if not isinstance(value, AmplitudeList):
2569 raise self.PhysicsObjectError, \
2570 "%s is not a valid AmplitudeList object" % str(value)
2571
2572
2573=== modified file 'madgraph/core/drawing.py' (properties changed: -x to +x)
2574--- madgraph/core/drawing.py 2014-10-07 05:52:01 +0000
2575+++ madgraph/core/drawing.py 2015-10-20 14:02:27 +0000
2576@@ -1131,8 +1131,8 @@
2577 return []
2578
2579 assert self.min_level <= level <= self.max_level , \
2580- 'Incorrect value of min/max level: %s <= %s <= %s' % \
2581- (self.min_level, level, self.max_level)
2582+ 'Incorrect value of min/max level: %s <= %s <= %s' % \
2583+ (self.min_level, level, self.max_level)
2584
2585
2586 # At final level we should authorize min=0 and max=1 position
2587@@ -1188,7 +1188,7 @@
2588 # Assign position to each vertex
2589 for i, vertex in enumerate(vertex_at_level):
2590 vertex.def_position((level - self.min_level) / self.nb_level,
2591- min + dist * (begin_gap + i))
2592+ min + dist * (begin_gap + i))
2593
2594 return vertex_at_level
2595
2596
2597=== modified file 'madgraph/core/helas_objects.py' (properties changed: -x to +x)
2598--- madgraph/core/helas_objects.py 2015-07-02 02:29:22 +0000
2599+++ madgraph/core/helas_objects.py 2015-10-20 14:02:27 +0000
2600@@ -3164,7 +3164,7 @@
2601 """Calculate the actual coupling orders of this diagram"""
2602
2603 wavefunctions = HelasWavefunctionList.extract_wavefunctions(\
2604- self.get('amplitudes')[0].get('mothers'))
2605+ self.get('amplitudes')[0].get('mothers'))
2606
2607 coupling_orders = {}
2608 for wf in wavefunctions + [self.get('amplitudes')[0]]:
2609
2610=== modified file 'madgraph/fks/__init__.py' (properties changed: -x to +x)
2611=== modified file 'madgraph/fks/fks_base.py' (properties changed: -x to +x)
2612--- madgraph/fks/fks_base.py 2015-05-25 20:43:18 +0000
2613+++ madgraph/fks/fks_base.py 2015-10-20 14:02:27 +0000
2614@@ -132,6 +132,9 @@
2615 perturbation = []
2616 for procdef in self['process_definitions']:
2617 soft_particles = []
2618+ # do not warn for decay processes
2619+ if [ i['state'] for i in procdef['legs']].count(False) == 1:
2620+ continue
2621 for pert in procdef['perturbation_couplings']:
2622 if pert not in perturbation:
2623 perturbation.append(pert)
2624@@ -550,10 +553,19 @@
2625 """finds the FKS real configurations for a given process"""
2626 if range(len(self.leglist)) != [l['number']-1 for l in self.leglist]:
2627 raise fks_common.FKSProcessError('Disordered numbers of leglist')
2628+
2629+ if [ i['state'] for i in self.leglist].count(False) == 1:
2630+ decay_process=True
2631+ else:
2632+ decay_process=False
2633+
2634 for i in self.leglist:
2635 i_i = i['number'] - 1
2636 self.reals.append([])
2637- self.splittings[i_i] = fks_common.find_splittings(i, self.born_proc['model'], {}, pert_order)
2638+ if decay_process and not i['state']:
2639+ self.splittings[i_i]=[]
2640+ else:
2641+ self.splittings[i_i] = fks_common.find_splittings(i, self.born_proc['model'], {}, pert_order)
2642 for split in self.splittings[i_i]:
2643 self.reals[i_i].append(
2644 fks_common.insert_legs(self.leglist, i, split,pert=pert_order))
2645
2646=== modified file 'madgraph/fks/fks_common.py' (properties changed: -x to +x)
2647=== modified file 'madgraph/fks/fks_helas_objects.py' (properties changed: -x to +x)
2648=== modified file 'madgraph/interface/__init__.py' (properties changed: -x to +x)
2649=== modified file 'madgraph/interface/amcatnlo_interface.py' (properties changed: -x to +x)
2650--- madgraph/interface/amcatnlo_interface.py 2015-08-12 20:50:38 +0000
2651+++ madgraph/interface/amcatnlo_interface.py 2015-10-20 14:02:27 +0000
2652@@ -30,6 +30,7 @@
2653 import madgraph.interface.extended_cmd as cmd
2654 import madgraph.interface.madgraph_interface as mg_interface
2655 import madgraph.interface.madevent_interface as me_interface
2656+import madgraph.interface.extended_cmd as extended_cmd
2657 import madgraph.interface.amcatnlo_run_interface as run_interface
2658 import madgraph.interface.launch_ext_program as launch_ext
2659 import madgraph.interface.loop_interface as Loop_interface
2660@@ -542,8 +543,13 @@
2661 me.get('processes')[0].set('uid', uid)
2662 for fksreal in me.real_processes:
2663 # Pick out all initial state particles for the two beams
2664- initial_states.append(sorted(list(set((p.get_initial_pdg(1),p.get_initial_pdg(2)) for \
2665- p in fksreal.matrix_element.get('processes')))))
2666+ try:
2667+ initial_states.append(sorted(list(set((p.get_initial_pdg(1),p.get_initial_pdg(2)) for \
2668+ p in fksreal.matrix_element.get('processes')))))
2669+ except IndexError:
2670+ initial_states.append(sorted(list(set((p.get_initial_pdg(1)) for \
2671+ p in fksreal.matrix_element.get('processes')))))
2672+
2673
2674 # remove doubles from the list
2675 checked = []
2676@@ -569,10 +575,10 @@
2677
2678 #_curr_matrix_element is a FKSHelasMultiProcess Object
2679 self._fks_directories = []
2680- proc_charac = banner_mod.ProcCharacteristic()
2681+ proc_charac = self._curr_exporter.proc_characteristic
2682 for charac in ['has_isr', 'has_fsr', 'has_loops']:
2683 proc_charac[charac] = self._curr_matrix_elements[charac]
2684- proc_charac.write(pjoin(path, 'proc_characteristics'))
2685+
2686
2687 for ime, me in \
2688 enumerate(self._curr_matrix_elements.get('matrix_elements')):
2689@@ -624,7 +630,7 @@
2690 # self.options['automatic_html_opening'] = False
2691
2692 if options['interactive']:
2693- if hasattr(self, 'do_shell'):
2694+ if isinstance(self, extended_cmd.CmdShell):
2695 ME = run_interface.aMCatNLOCmdShell(me_dir=argss[0], options=self.options)
2696 else:
2697 ME = run_interface.aMCatNLOCmd(me_dir=argss[0],options=self.options)
2698@@ -636,7 +642,9 @@
2699 stop = self.define_child_cmd_interface(ME)
2700 return stop
2701
2702- ext_program = launch_ext.aMCatNLOLauncher(argss[0], self, run_mode=argss[1], **options)
2703+ ext_program = launch_ext.aMCatNLOLauncher(argss[0], self, run_mode=argss[1],
2704+ shell = isinstance(self, extended_cmd.CmdShell),
2705+ **options)
2706 ext_program.run()
2707
2708
2709
2710=== modified file 'madgraph/interface/amcatnlo_run_interface.py' (properties changed: -x to +x)
2711--- madgraph/interface/amcatnlo_run_interface.py 2015-09-05 17:49:40 +0000
2712+++ madgraph/interface/amcatnlo_run_interface.py 2015-10-20 14:02:27 +0000
2713@@ -119,6 +119,11 @@
2714 #this can be improved/better written to handle the output
2715 misc.call(['./%s' % (test)], cwd=this_dir,
2716 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'))
2717+ if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) :
2718+ tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz',
2719+ dereference=True)
2720+ tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources')
2721+ tf.close()
2722
2723 if not options['reweightonly']:
2724 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
2725@@ -1155,7 +1160,7 @@
2726 functions, such as generate_events or calculate_xsect
2727 mode gives the list of switch needed for the computation (usefull for banner_run)
2728 """
2729-
2730+
2731 if not argss and not options:
2732 self.start_time = time.time()
2733 argss = self.split_arg(line)
2734@@ -1164,6 +1169,7 @@
2735 options = options.__dict__
2736 self.check_launch(argss, options)
2737
2738+
2739 if 'run_name' in options.keys() and options['run_name']:
2740 self.run_name = options['run_name']
2741 # if a dir with the given run_name already exists
2742@@ -1183,6 +1189,7 @@
2743
2744 if not switch:
2745 mode = argss[0]
2746+
2747 if mode in ['LO', 'NLO']:
2748 options['parton'] = True
2749 mode = self.ask_run_configuration(mode, options)
2750@@ -1202,7 +1209,7 @@
2751 self.compile(mode, options)
2752 evt_file = self.run(mode, options)
2753
2754- if int(self.run_card['nevents']) == 0 and not mode in ['LO', 'NLO']:
2755+ if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']:
2756 logger.info('No event file generated: grids have been set-up with a '\
2757 'relative precision of %s' % self.run_card['req_acc'])
2758 return
2759@@ -1222,14 +1229,40 @@
2760
2761
2762 self.update_status('', level='all', update_results=True)
2763- if int(self.run_card['ickkw']) == 3 and mode in ['noshower', 'aMC@NLO']:
2764+ if self.run_card['ickkw'] == 3 and mode in ['noshower', 'aMC@NLO']:
2765 logger.warning("""You are running with FxFx merging enabled.
2766 To be able to merge samples of various multiplicities without double counting,
2767 you have to remove some events after showering 'by hand'.
2768 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
2769
2770
2771-
2772+ #check if the param_card defines a scan.
2773+ if self.param_card_iterator:
2774+ param_card_iterator = self.param_card_iterator
2775+ self.param_card_iterator = [] #avoid to next generate go trough here
2776+ param_card_iterator.store_entry(self.run_name, self.results.current['cross'])
2777+ orig_name = self.run_name
2778+ #go trough the scal
2779+ for i,card in enumerate(param_card_iterator):
2780+ card.write(pjoin(self.me_dir,'Cards','param_card.dat'))
2781+ if not options['force']:
2782+ options['force'] = True
2783+ if options['run_name']:
2784+ options['run_name'] = '%s_%s' % (orig_name, i+1)
2785+ if not argss:
2786+ argss = [mode, "-f"]
2787+ elif argss[0] == "auto":
2788+ argss[0] = mode
2789+ self.do_launch("", options=options, argss=argss, switch=switch)
2790+ #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False)
2791+ param_card_iterator.store_entry(self.run_name, self.results.current['cross'])
2792+ #restore original param_card
2793+ param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat'))
2794+ name = misc.get_scan_name(orig_name, self.run_name)
2795+ path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name)
2796+ logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK')
2797+ param_card_iterator.write_summary(path)
2798+
2799 ############################################################################
2800 def do_compile(self, line):
2801 """Advanced commands: just compile the executables """
2802@@ -1248,59 +1281,11 @@
2803
2804 self.update_status('', level='all', update_results=True)
2805
2806- def print_results_in_shell(self, data):
2807- """Have a nice results prints in the shell,
2808- data should be of type: gen_crossxhtml.OneTagResults"""
2809- if not data:
2810- return
2811- logger.info(" === Results Summary for run: %s tag: %s ===\n" % (data['run_name'],data['tag']))
2812- if self.ninitial == 1:
2813- logger.info(" Width : %.4g +- %.4g GeV" % (data['cross'], data['error']))
2814- else:
2815- logger.info(" Cross-section : %.4g +- %.4g pb" % (data['cross'], data['error']))
2816- logger.info(" Nb of events : %s" % data['nb_event'] )
2817- #if data['cross_pythia'] and data['nb_event_pythia']:
2818- # if self.ninitial == 1:
2819- # logger.info(" Matched Width : %.4g +- %.4g GeV" % (data['cross_pythia'], data['error_pythia']))
2820- # else:
2821- # logger.info(" Matched Cross-section : %.4g +- %.4g pb" % (data['cross_pythia'], data['error_pythia']))
2822- # logger.info(" Nb of events after Matching : %s" % data['nb_event_pythia'])
2823- # if self.run_card['use_syst'] in self.true:
2824- # logger.info(" Be carefull that matched information are here NOT for the central value. Refer to SysCalc output for it")
2825- logger.info(" " )
2826-
2827- def print_results_in_file(self, data, path, mode='w'):
2828- """Have a nice results prints in the shell,
2829- data should be of type: gen_crossxhtml.OneTagResults"""
2830- if not data:
2831- return
2832-
2833- fsock = open(path, mode)
2834-
2835- fsock.write(" === Results Summary for run: %s tag: %s process: %s ===\n" % \
2836- (data['run_name'],data['tag'], os.path.basename(self.me_dir)))
2837-
2838- if self.ninitial == 1:
2839- fsock.write(" Width : %.4g +- %.4g GeV\n" % (data['cross'], data['error']))
2840- else:
2841- fsock.write(" Cross-section : %.4g +- %.4g pb\n" % (data['cross'], data['error']))
2842- fsock.write(" Nb of events : %s\n" % data['nb_event'] )
2843- #if data['cross_pythia'] and data['nb_event_pythia']:
2844- # if self.ninitial == 1:
2845- # fsock.write(" Matched Width : %.4g +- %.4g GeV\n" % (data['cross_pythia'], data['error_pythia']))
2846- # else:
2847- # fsock.write(" Matched Cross-section : %.4g +- %.4g pb\n" % (data['cross_pythia'], data['error_pythia']))
2848- # fsock.write(" Nb of events after Matching : %s\n" % data['nb_event_pythia'])
2849- fsock.write(" \n" )
2850-
2851-
2852-
2853-
2854
2855 def update_random_seed(self):
2856 """Update random number seed with the value from the run_card.
2857 If this is 0, update the number according to a fresh one"""
2858- iseed = int(self.run_card['iseed'])
2859+ iseed = self.run_card['iseed']
2860 if iseed == 0:
2861 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
2862 iseed = int(randinit.read()[2:]) + 1
2863@@ -1317,206 +1302,95 @@
2864 if not 'only_generation' in options.keys():
2865 options['only_generation'] = False
2866
2867+ # for second step in applgrid mode, do only the event generation step
2868 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']:
2869 options['only_generation'] = True
2870 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
2871-
2872- if self.cluster_mode == 1:
2873- cluster_name = self.options['cluster_type']
2874- self.cluster = cluster.from_name[cluster_name](**self.options)
2875- if self.cluster_mode == 2:
2876- try:
2877- import multiprocessing
2878- if not self.nb_core:
2879- try:
2880- self.nb_core = int(self.options['nb_core'])
2881- except TypeError:
2882- self.nb_core = multiprocessing.cpu_count()
2883- logger.info('Using %d cores' % self.nb_core)
2884- except ImportError:
2885- self.nb_core = 1
2886- logger.warning('Impossible to detect the number of cores => Using One.\n'+
2887- 'Use set nb_core X in order to set this number and be able to'+
2888- 'run in multicore.')
2889-
2890- self.cluster = cluster.MultiCore(**self.options)
2891+ self.setup_cluster_or_multicore()
2892 self.update_random_seed()
2893 #find and keep track of all the jobs
2894 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
2895 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
2896 folder_names['noshower'] = folder_names['aMC@NLO']
2897 folder_names['noshowerLO'] = folder_names['aMC@LO']
2898- job_dict = {}
2899 p_dirs = [d for d in \
2900 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
2901- #find jobs and clean previous results
2902- if not options['only_generation'] and not options['reweightonly']:
2903- self.update_status('Cleaning previous results', level=None)
2904- for dir in p_dirs:
2905- job_dict[dir] = [file for file in \
2906- os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2907- if file.startswith('ajob')]
2908- #find old folders to be removed
2909- for obj in folder_names[mode]:
2910- to_rm = [file for file in \
2911- os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2912- if file.startswith(obj[:-1]) and \
2913- (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2914- os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2915- #always clean dirs for the splitted event generation
2916- # do not include the born_G/ grid_G which should be kept when
2917- # doing a f.o. run keeping old grids
2918- to_always_rm = [file for file in \
2919- os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2920- if file.startswith(obj[:-1]) and
2921- '_' in file and not '_G' in file and \
2922- (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2923- os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2924-
2925- if not options['only_generation'] and not options['reweightonly']:
2926- to_always_rm.extend(to_rm)
2927- if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
2928- to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
2929- files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
2930-
2931- mcatnlo_status = ['Setting up grid', 'Computing upper envelope', 'Generating events']
2932-
2933- if self.run_card['iappl'] == 2:
2934- self.applgrid_distribute(options,mode,p_dirs)
2935+ #Clean previous results
2936+ self.clean_previous_results(options,p_dirs,folder_names[mode])
2937+
2938+ mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events']
2939+
2940
2941 if options['reweightonly']:
2942 event_norm=self.run_card['event_norm']
2943- nevents=int(self.run_card['nevents'])
2944+ nevents=self.run_card['nevents']
2945 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
2946
2947 devnull = os.open(os.devnull, os.O_RDWR)
2948+
2949 if mode in ['LO', 'NLO']:
2950 # this is for fixed order runs
2951 mode_dict = {'NLO': 'all', 'LO': 'born'}
2952 logger.info('Doing fixed order %s' % mode)
2953 req_acc = self.run_card['req_acc_FO']
2954- if not options['only_generation'] and req_acc != -1:
2955- self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, '-1', '6','0.10')
2956- self.update_status('Setting up grids', level=None)
2957- self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids')
2958- elif not options['only_generation']:
2959- npoints = self.run_card['npoints_FO_grid']
2960- niters = self.run_card['niters_FO_grid']
2961- self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, npoints, niters)
2962- self.update_status('Setting up grids', level=None)
2963- self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids')
2964-
2965- npoints = self.run_card['npoints_FO']
2966- niters = self.run_card['niters_FO']
2967- self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], -1, npoints, niters)
2968- # collect the results and logs
2969- self.collect_log_files(folder_names[mode], 0)
2970- p = misc.Popen(['./combine_results_FO.sh', str(req_acc), '%s_G*' % mode_dict[mode]], \
2971- stdout=subprocess.PIPE, \
2972- cwd=pjoin(self.me_dir, 'SubProcesses'))
2973- output = p.communicate()
2974-
2975- self.cross_sect_dict = self.read_results(output, mode)
2976- self.print_summary(options, 0, mode)
2977- cross, error = sum_html.make_all_html_results(self, ['%s*' % mode_dict[mode]])
2978- self.results.add_detail('cross', cross)
2979- self.results.add_detail('error', error)
2980-
2981- self.update_status('Computing cross-section', level=None)
2982- self.run_all(job_dict, [['0', mode_dict[mode], '0', mode_dict[mode]]], 'Computing cross-section')
2983-
2984- # collect the results and logs
2985- self.collect_log_files(folder_names[mode], 1)
2986- p = misc.Popen(['./combine_results_FO.sh', '-1'] + folder_names[mode], \
2987- stdout=subprocess.PIPE,
2988- cwd=pjoin(self.me_dir, 'SubProcesses'))
2989- output = p.communicate()
2990- self.cross_sect_dict = self.read_results(output, mode)
2991-
2992- # collect the scale and PDF uncertainties
2993- scale_pdf_info={}
2994- if self.run_card['reweight_scale'] or self.run_card['reweight_PDF']:
2995- data_files=[]
2996- for dir in p_dirs:
2997- for obj in folder_names[mode]:
2998- for file in os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)):
2999- if file.startswith(obj[:-1]) and \
3000- (os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file,'scale_pdf_dependence.dat'))):
3001- data_files.append(pjoin(dir,file,'scale_pdf_dependence.dat'))
3002- scale_pdf_info = self.pdf_scale_from_reweighting(data_files)
3003- # print the results:
3004- self.print_summary(options, 1, mode, scale_pdf_info)
3005-
3006- files.cp(pjoin(self.me_dir, 'SubProcesses', 'res.txt'),
3007- pjoin(self.me_dir, 'Events', self.run_name))
3008-
3009- if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
3010- misc.call(['./combine_plots_FO.sh'] + folder_names[mode], \
3011- stdout=devnull,
3012- cwd=pjoin(self.me_dir, 'SubProcesses'))
3013- files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
3014- pjoin(self.me_dir, 'Events', self.run_name))
3015- logger.info('The results of this run and the TopDrawer file with the plots' + \
3016- ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3017- elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
3018- self.combine_plots_HwU(folder_names[mode])
3019- files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.HwU'),
3020- pjoin(self.me_dir, 'Events', self.run_name))
3021- files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.gnuplot'),
3022- pjoin(self.me_dir, 'Events', self.run_name))
3023- try:
3024- misc.call(['gnuplot','MADatNLO.gnuplot'],\
3025- stdout=os.open(os.devnull, os.O_RDWR),\
3026- stderr=os.open(os.devnull, os.O_RDWR),\
3027- cwd=pjoin(self.me_dir, 'Events', self.run_name))
3028- except Exception:
3029- pass
3030-
3031-
3032- logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
3033- ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3034- elif self.analyse_card['fo_analysis_format'].lower() == 'root':
3035- misc.call(['./combine_root.sh'] + folder_names[mode], \
3036- stdout=devnull,
3037- cwd=pjoin(self.me_dir, 'SubProcesses'))
3038- files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
3039- pjoin(self.me_dir, 'Events', self.run_name))
3040- logger.info('The results of this run and the ROOT file with the plots' + \
3041- ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3042- else:
3043- logger.info('The results of this run' + \
3044- ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3045-
3046- cross, error = sum_html.make_all_html_results(self, folder_names[mode])
3047- self.results.add_detail('cross', cross)
3048- self.results.add_detail('error', error)
3049- if self.run_card['iappl'] != 0:
3050- self.applgrid_combine(cross,error)
3051+
3052+ # Re-distribute the grids for the 2nd step of the applgrid
3053+ # running
3054+ if self.run_card['iappl'] == 2:
3055+ self.applgrid_distribute(options,mode_dict[mode],p_dirs)
3056+
3057+ # create a list of dictionaries "jobs_to_run" with all the
3058+ # jobs that need to be run
3059+ integration_step=-1
3060+ jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
3061+ req_acc,mode_dict[mode],integration_step,mode,fixed_order=True)
3062+ self.prepare_directories(jobs_to_run,mode)
3063+
3064+ # loop over the integration steps. After every step, check
3065+ # if we have the required accuracy. If this is the case,
3066+ # stop running, else do another step.
3067+ while True:
3068+ integration_step=integration_step+1
3069+ self.run_all_jobs(jobs_to_run,integration_step)
3070+ self.collect_log_files(jobs_to_run,integration_step)
3071+ jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
3072+ jobs_to_collect,integration_step,mode,mode_dict[mode])
3073+ if not jobs_to_run:
3074+ # there are no more jobs to run (jobs_to_run is empty)
3075+ break
3076+ # We are done.
3077+ self.finalise_run_FO(folder_names[mode],jobs_to_collect)
3078 self.update_status('Run complete', level='parton', update_results=True)
3079-
3080 return
3081
3082 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
3083+ if self.ninitial == 1:
3084+ raise aMCatNLOError('Decay processes can only be run at fixed order.')
3085+ mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\
3086+ 'noshower': 'all', 'noshowerLO': 'born'}
3087 shower = self.run_card['parton_shower'].upper()
3088- nevents = int(self.run_card['nevents'])
3089+ nevents = self.run_card['nevents']
3090 req_acc = self.run_card['req_acc']
3091- if nevents == 0 and float(req_acc) < 0 :
3092+ if nevents == 0 and req_acc < 0 :
3093 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
3094 'of events, because 0 events requested. Please set '\
3095- 'the "req_acc" parameter in the run_card to a value between 0 and 1')
3096- elif float(req_acc) >1 or float(req_acc) == 0 :
3097+ 'the "req_acc" parameter in the run_card to a value '\
3098+ 'between 0 and 1')
3099+ elif req_acc >1 or req_acc == 0 :
3100 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
3101 'be between larger than 0 and smaller than 1, '\
3102- 'or set to -1 for automatic determination. Current value is %s' % req_acc)
3103+ 'or set to -1 for automatic determination. Current '\
3104+ 'value is %f' % req_acc)
3105 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card)
3106- elif float(req_acc) < 0 and nevents > 1000000 :
3107- req_acc='0.001'
3108+ elif req_acc < 0 and nevents > 1000000 :
3109+ req_acc=0.001
3110
3111 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
3112
3113 if not shower in shower_list:
3114- raise aMCatNLOError('%s is not a valid parton shower. Please use one of the following: %s' \
3115- % (shower, ', '.join(shower_list)))
3116+ raise aMCatNLOError('%s is not a valid parton shower. '\
3117+ 'Please use one of the following: %s' \
3118+ % (shower, ', '.join(shower_list)))
3119
3120 # check that PYTHIA6PT is not used for processes with FSR
3121 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']:
3122@@ -1529,98 +1403,556 @@
3123 elif options['only_generation']:
3124 logger.info('Generating events starting from existing results')
3125
3126-
3127- for i, status in enumerate(mcatnlo_status):
3128- #check if need to split jobs
3129- # at least one channel must have enough events
3130- try:
3131- nevents_unweighted = open(pjoin(self.me_dir,
3132- 'SubProcesses',
3133- 'nevents_unweighted')).read().split('\n')
3134- except IOError:
3135- nevents_unweighted = []
3136-
3137- split = i == 2 and \
3138- int(self.run_card['nevt_job']) > 0
3139-
3140- if i == 2 or not options['only_generation']:
3141- # if the number of events requested is zero,
3142- # skip mint step 2
3143- if i==2 and nevents==0:
3144- self.print_summary(options, 2,mode)
3145- return
3146-
3147- if split:
3148- # split the event generation
3149- misc.call([pjoin(self.me_dir, 'bin', 'internal', 'split_jobs.py')] + \
3150- [str(self.run_card['nevt_job'])],
3151- stdout = devnull,
3152- cwd = pjoin(self.me_dir, 'SubProcesses'))
3153- assert os.path.exists(pjoin(self.me_dir, 'SubProcesses',
3154- 'nevents_unweighted_splitted'))
3155-
3156- self.update_status(status, level='parton')
3157- if mode in ['aMC@NLO', 'noshower']:
3158- self.write_madinMMC_file(pjoin(self.me_dir, 'SubProcesses'), 'all', i)
3159- self.run_all(job_dict, [['2', 'F', '%d' % i]], status, split_jobs = split)
3160-
3161- elif mode in ['aMC@LO', 'noshowerLO']:
3162- self.write_madinMMC_file(
3163- pjoin(self.me_dir, 'SubProcesses'), 'born', i)
3164- self.run_all(job_dict,
3165- [['2', 'B', '%d' % i]],
3166- '%s at LO' % status, split_jobs = split)
3167-
3168- if (i < 2 and not options['only_generation']) or i == 1 :
3169- # collect the results and logs
3170- self.collect_log_files(folder_names[mode], i)
3171- p = misc.Popen(['./combine_results.sh'] + \
3172- ['%d' % i,'%d' % nevents, '%s' % req_acc ] + \
3173- folder_names[mode],
3174- stdout=subprocess.PIPE,
3175- cwd = pjoin(self.me_dir, 'SubProcesses'))
3176- output = p.communicate()
3177- files.cp(pjoin(self.me_dir, 'SubProcesses', 'res_%d.txt' % i), \
3178- pjoin(self.me_dir, 'Events', self.run_name))
3179-
3180- self.cross_sect_dict = self.read_results(output, mode)
3181- self.print_summary(options, i, mode)
3182-
3183- cross, error = sum_html.make_all_html_results(self, folder_names[mode])
3184- self.results.add_detail('cross', cross)
3185- self.results.add_detail('error', error)
3186-
3187- #check that split jobs are all correctly terminated
3188- if split:
3189- self.check_event_files()
3190-
3191- if self.cluster_mode == 1:
3192- #if cluster run, wait 15 sec so that event files are transferred back
3193- self.update_status(
3194+ jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
3195+ req_acc,mode_dict[mode],1,mode,fixed_order=False)
3196+
3197+ # Make sure to update all the jobs to be ready for the event generation step
3198+ if options['only_generation']:
3199+ jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
3200+ jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False)
3201+ else:
3202+ self.prepare_directories(jobs_to_run,mode,fixed_order=False)
3203+
3204+
3205+ # Main loop over the three MINT generation steps:
3206+ for mint_step, status in enumerate(mcatnlo_status):
3207+ if options['only_generation'] and mint_step < 2:
3208+ continue
3209+ self.update_status(status, level='parton')
3210+ self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False)
3211+ self.collect_log_files(jobs_to_run,mint_step)
3212+ if mint_step+1==2 and nevents==0:
3213+ self.print_summary(options,2,mode)
3214+ return
3215+ jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
3216+ jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False)
3217+ # Sanity check on the event files. If error the jobs are resubmitted
3218+ self.check_event_files(jobs_to_collect)
3219+
3220+ if self.cluster_mode == 1:
3221+ #if cluster run, wait 10 sec so that event files are transferred back
3222+ self.update_status(
3223 'Waiting while files are transferred back from the cluster nodes',
3224 level='parton')
3225- time.sleep(10)
3226- if split:
3227- files.cp(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted'), \
3228- pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted'))
3229-
3230-
3231- event_norm=self.run_card['event_norm']
3232- self.collect_log_files(folder_names[mode], 2)
3233- return self.reweight_and_collect_events(options, mode, nevents, event_norm)
3234-
3235- def combine_plots_HwU(self,folder_names):
3236+ time.sleep(10)
3237+
3238+ event_norm=self.run_card['event_norm']
3239+ return self.reweight_and_collect_events(options, mode, nevents, event_norm)
3240+
3241+ def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\
3242+ integration_step,mode,fixed_order=True):
3243+ """Creates a list of dictionaries with all the jobs to be run"""
3244+ jobs_to_run=[]
3245+ if not options['only_generation']:
3246+ # Fresh, new run. Check all the P*/channels.txt files
3247+ # (created by the 'gensym' executable) to set-up all the
3248+ # jobs using the default inputs.
3249+ npoints = self.run_card['npoints_FO_grid']
3250+ niters = self.run_card['niters_FO_grid']
3251+ for p_dir in p_dirs:
3252+ try:
3253+ with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file:
3254+ channels=chan_file.readline().split()
3255+ except IOError:
3256+ logger.warning('No integration channels found for contribution %s' % p_dir)
3257+ continue
3258+ for channel in channels:
3259+ job={}
3260+ job['p_dir']=p_dir
3261+ job['channel']=channel
3262+ job['split']=0
3263+ if fixed_order and req_acc == -1:
3264+ job['accuracy']=0
3265+ job['niters']=niters
3266+ job['npoints']=npoints
3267+ elif fixed_order and req_acc > 0:
3268+ job['accuracy']=0.10
3269+ job['niters']=6
3270+ job['npoints']=-1
3271+ elif not fixed_order:
3272+ job['accuracy']=0.03
3273+ job['niters']=12
3274+ job['npoints']=-1
3275+ else:
3276+ raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+
3277+ 'between 0 and 1 or set it equal to -1.')
3278+ job['mint_mode']=0
3279+ job['run_mode']=run_mode
3280+ job['wgt_frac']=1.0
3281+ jobs_to_run.append(job)
3282+ jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs
3283+ else:
3284+ # if options['only_generation'] is true, we need to loop
3285+ # over all the existing G* directories and create the jobs
3286+ # from there.
3287+ name_suffix={'born' :'B', 'all':'F'}
3288+ for p_dir in p_dirs:
3289+ for chan_dir in os.listdir(pjoin(self.me_dir,'SubProcesses',p_dir)):
3290+ if ((chan_dir.startswith(run_mode+'_G') and fixed_order) or\
3291+ (chan_dir.startswith('G'+name_suffix[run_mode]) and (not fixed_order))) and \
3292+ (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)) or \
3293+ os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir))):
3294+ job={}
3295+ job['p_dir']=p_dir
3296+ if fixed_order:
3297+ channel=chan_dir.split('_')[1]
3298+ job['channel']=channel[1:] # remove the 'G'
3299+ if len(chan_dir.split('_')) == 3:
3300+ split=int(chan_dir.split('_')[2])
3301+ else:
3302+ split=0
3303+ else:
3304+ if len(chan_dir.split('_')) == 2:
3305+ split=int(chan_dir.split('_')[1])
3306+ channel=chan_dir.split('_')[0]
3307+ job['channel']=channel[2:] # remove the 'G'
3308+ else:
3309+ job['channel']=chan_dir[2:] # remove the 'G'
3310+ split=0
3311+ job['split']=split
3312+ job['run_mode']=run_mode
3313+ job['dirname']=pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)
3314+ job['wgt_frac']=1.0
3315+ if not fixed_order: job['mint_mode']=1
3316+ jobs_to_run.append(job)
3317+ jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs
3318+ if fixed_order:
3319+ jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run,
3320+ jobs_to_collect,integration_step,mode,run_mode)
3321+ # Update the integration_step to make sure that nothing will be overwritten
3322+ integration_step=1
3323+ for job in jobs_to_run:
3324+ while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)):
3325+ integration_step=integration_step+1
3326+ integration_step=integration_step-1
3327+ else:
3328+ self.append_the_results(jobs_to_collect,integration_step)
3329+ return jobs_to_run,jobs_to_collect,integration_step
3330+
3331+ def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
3332+ """Set-up the G* directories for running"""
3333+ name_suffix={'born' :'B' , 'all':'F'}
3334+ for job in jobs_to_run:
3335+ if job['split'] == 0:
3336+ if fixed_order :
3337+ dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
3338+ job['run_mode']+'_G'+job['channel'])
3339+ else:
3340+ dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
3341+ 'G'+name_suffix[job['run_mode']]+job['channel'])
3342+ else:
3343+ if fixed_order :
3344+ dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
3345+ job['run_mode']+'_G'+job['channel']+'_'+str(job['split']))
3346+ else:
3347+ dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
3348+ 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split']))
3349+ job['dirname']=dirname
3350+ if not os.path.isdir(dirname):
3351+ os.makedirs(dirname)
3352+ self.write_input_file(job,fixed_order)
3353+ if not fixed_order:
3354+ # copy the grids from the base directory to the split directory:
3355+ if job['split'] != 0:
3356+ for f in ['grid.MC_integer','mint_grids','res_1']:
3357+ if not os.path.isfile(pjoin(job['dirname'],f)):
3358+ files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
3359+
3360+
3361+ def write_input_file(self,job,fixed_order):
3362+ """write the input file for the madevent_mint* executable in the appropriate directory"""
3363+ if fixed_order:
3364+ content= \
3365+"""NPOINTS = %(npoints)s
3366+NITERATIONS = %(niters)s
3367+ACCURACY = %(accuracy)s
3368+ADAPT_GRID = 2
3369+MULTICHANNEL = 1
3370+SUM_HELICITY = 1
3371+CHANNEL = %(channel)s
3372+SPLIT = %(split)s
3373+RUN_MODE = %(run_mode)s
3374+RESTART = %(mint_mode)s
3375+""" \
3376+ % job
3377+ else:
3378+ content = \
3379+"""-1 12 ! points, iterations
3380+%(accuracy)s ! desired fractional accuracy
3381+1 -0.1 ! alpha, beta for Gsoft
3382+-1 -0.1 ! alpha, beta for Gazi
3383+1 ! Suppress amplitude (0 no, 1 yes)?
3384+1 ! Exact helicity sum (0 yes, n = number/event)?
3385+%(channel)s ! Enter Configuration Number:
3386+%(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events
3387+1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij
3388+%(run_mode)s ! all, born, real, virt
3389+""" \
3390+ % job
3391+ with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file:
3392+ input_file.write(content)
3393+
3394+
3395+ def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
3396+ """Loops over the jobs_to_run and executes them using the function 'run_exe'"""
3397+ if fixed_order:
3398+ if integration_step == 0:
3399+ self.update_status('Setting up grids', level=None)
3400+ else:
3401+ self.update_status('Refining results, step %i' % integration_step, level=None)
3402+ self.ijob = 0
3403+ name_suffix={'born' :'B', 'all':'F'}
3404+ if fixed_order:
3405+ run_type="Fixed order integration step %s" % integration_step
3406+ else:
3407+ run_type="MINT step %s" % integration_step
3408+ for job in jobs_to_run:
3409+ executable='ajob1'
3410+ if fixed_order:
3411+ arguments=[job['channel'],job['run_mode'], \
3412+ str(job['split']),str(integration_step)]
3413+ else:
3414+ arguments=[job['channel'],name_suffix[job['run_mode']], \
3415+ str(job['split']),str(integration_step)]
3416+ self.run_exe(executable,arguments,run_type,
3417+ cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir']))
3418+
3419+ if self.cluster_mode == 2:
3420+ time.sleep(1) # security to allow all jobs to be launched
3421+ self.njobs=len(jobs_to_run)
3422+ self.wait_for_complete(run_type)
3423+
3424+
3425+ def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\
3426+ integration_step,mode,run_mode,fixed_order=True):
3427+ """Collect the results, make HTML pages, print the summary and
3428+ determine if there are more jobs to run. Returns the list
3429+ of the jobs that still need to be run, as well as the
3430+ complete list of jobs that need to be collected to get the
3431+ final answer.
3432+ """
3433+# Get the results of the current integration/MINT step
3434+ self.append_the_results(jobs_to_run,integration_step)
3435+ self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step)
3436+# Update HTML pages
3437+ if fixed_order:
3438+ cross, error = sum_html.make_all_html_results(self, ['%s*' % run_mode])
3439+ else:
3440+ name_suffix={'born' :'B' , 'all':'F'}
3441+ cross, error = sum_html.make_all_html_results(self, ['G%s*' % name_suffix[run_mode]])
3442+ self.results.add_detail('cross', cross)
3443+ self.results.add_detail('error', error)
3444+# Set-up jobs for the next iteration/MINT step
3445+ jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order)
3446+ # if there are no more jobs, we are done!
3447+# Print summary
3448+ if (not jobs_to_run_new) and fixed_order:
3449+ # print final summary of results (for fixed order)
3450+ scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect)
3451+ self.print_summary(options,integration_step,mode,scale_pdf_info,done=True)
3452+ return jobs_to_run_new,jobs_to_collect
3453+ elif jobs_to_run_new:
3454+ # print intermediate summary of results
3455+ scale_pdf_info={}
3456+ self.print_summary(options,integration_step,mode,scale_pdf_info,done=False)
3457+ else:
3458+ # When we are done for (N)LO+PS runs, do not print
3459+ # anything yet. This will be done after the reweighting
3460+ # and collection of the events
3461+ scale_pdf_info={}
3462+# Prepare for the next integration/MINT step
3463+ if (not fixed_order) and integration_step+1 == 2 :
3464+ # next step is event generation (mint_step 2)
3465+ jobs_to_run_new,jobs_to_collect_new= \
3466+ self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect)
3467+ self.prepare_directories(jobs_to_run_new,mode,fixed_order)
3468+ self.write_nevents_unweighted_file(jobs_to_collect_new)
3469+ self.write_nevts_files(jobs_to_run_new)
3470+ else:
3471+ self.prepare_directories(jobs_to_run_new,mode,fixed_order)
3472+ jobs_to_collect_new=jobs_to_collect
3473+ return jobs_to_run_new,jobs_to_collect_new
3474+
3475+
3476+ def write_nevents_unweighted_file(self,jobs):
3477+ """writes the nevents_unweighted file in the SubProcesses directory"""
3478+ content=[]
3479+ for job in jobs:
3480+ path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
3481+ lhefile=pjoin(path,'events.lhe')
3482+ content.append(' %s %d %9e %9e' % \
3483+ (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac']))
3484+ with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f:
3485+ f.write('\n'.join(content)+'\n')
3486+
3487+ def write_nevts_files(self,jobs):
3488+ """write the nevts files in the SubProcesses/P*/G*/ directories"""
3489+ for job in jobs:
3490+ with open(pjoin(job['dirname'],'nevts'),'w') as f:
3491+ f.write('%i\n' % job['nevents'])
3492+
3493+ def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
3494+ """Looks in the jobs_to_run to see if there is the need to split the
3495+ event generation step. Updates jobs_to_run and
3496+ jobs_to_collect to replace the split-job by its
3497+ splits. Also removes jobs that do not need any events.
3498+ """
3499+ nevt_job=self.run_card['nevt_job']
3500+ if nevt_job > 0:
3501+ jobs_to_collect_new=copy.copy(jobs_to_collect)
3502+ for job in jobs_to_run:
3503+ nevents=job['nevents']
3504+ if nevents == 0:
3505+ jobs_to_collect_new.remove(job)
3506+ elif nevents > nevt_job:
3507+ jobs_to_collect_new.remove(job)
3508+ if nevents % nevt_job != 0 :
3509+ nsplit=int(nevents/nevt_job)+1
3510+ else:
3511+ nsplit=int(nevents/nevt_job)
3512+ for i in range(1,nsplit+1):
3513+ job_new=copy.copy(job)
3514+ left_over=nevents % nsplit
3515+ if i <= left_over:
3516+ job_new['nevents']=int(nevents/nsplit)+1
3517+ job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
3518+ else:
3519+ job_new['nevents']=int(nevents/nsplit)
3520+ job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
3521+ job_new['split']=i
3522+ job_new['dirname']=job['dirname']+'_%i' % job_new['split']
3523+ jobs_to_collect_new.append(job_new)
3524+ jobs_to_run_new=copy.copy(jobs_to_collect_new)
3525+ else:
3526+ jobs_to_run_new=copy.copy(jobs_to_collect)
3527+ for job in jobs_to_collect:
3528+ if job['nevents'] == 0:
3529+ jobs_to_run_new.remove(job)
3530+ jobs_to_collect_new=copy.copy(jobs_to_run_new)
3531+
3532+ return jobs_to_run_new,jobs_to_collect_new
3533+
3534+
3535+ def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
3536+ """
3537+ For (N)LO+PS: determines the number of events and/or the required
3538+ accuracy per job.
3539+ For fixed order: determines which jobs need higher precision and
3540+ returns those with the newly requested precision.
3541+ """
3542+ err=self.cross_sect_dict['errt']
3543+ tot=self.cross_sect_dict['xsect']
3544+ errABS=self.cross_sect_dict['erra']
3545+ totABS=self.cross_sect_dict['xseca']
3546+ jobs_new=[]
3547+ if fixed_order:
3548+ if req_acc == -1:
3549+ if step+1 == 1:
3550+ npoints = self.run_card['npoints_FO']
3551+ niters = self.run_card['niters_FO']
3552+ for job in jobs:
3553+ job['mint_mode']=-1
3554+ job['niters']=niters
3555+ job['npoints']=npoints
3556+ jobs_new.append(job)
3557+ elif step+1 == 2:
3558+ pass
3559+ elif step+1 > 2:
3560+ raise aMCatNLOError('Cannot determine number of iterations and PS points '+
3561+ 'for integration step %i' % step )
3562+ elif ( req_acc > 0 and err/tot > req_acc*1.2 ) or step == 0:
3563+ req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec.
3564+ for job in jobs:
3565+ job['mint_mode']=-1
3566+ # Determine relative required accuracy on the ABS for this job
3567+ job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS'])
3568+ # If already accurate enough, skip running
3569+ if job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0:
3570+ continue
3571+ # Update the number of PS points based on errorABS, ncall and accuracy
3572+ itmax_fl=job['niters_done']*math.pow(job['errorABS']/
3573+ (job['accuracy']*job['resultABS']),2)
3574+ if itmax_fl <= 4.0 :
3575+ job['niters']=max(int(round(itmax_fl)),2)
3576+ job['npoints']=job['npoints_done']*2
3577+ elif itmax_fl > 4.0 and itmax_fl <= 16.0 :
3578+ job['niters']=4
3579+ job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2
3580+ else:
3581+ if itmax_fl > 100.0 : itmax_fl=50.0
3582+ job['niters']=int(round(math.sqrt(itmax_fl)))
3583+ job['npoints']=int(round(job['npoints_done']*itmax_fl/
3584+ round(math.sqrt(itmax_fl))))*2
3585+ # Add the job to the list of jobs that need to be run
3586+ jobs_new.append(job)
3587+ return jobs_new
3588+ elif step+1 <= 2:
3589+ nevents=self.run_card['nevents']
3590+ # Total required accuracy for the upper bounding envelope
3591+ if req_acc<0:
3592+ req_acc2_inv=nevents
3593+ else:
3594+ req_acc2_inv=1/(req_acc*req_acc)
3595+ if step+1 == 1 or step+1 == 2 :
3596+ # determine the req. accuracy for each of the jobs for Mint-step = 1
3597+ for job in jobs:
3598+ accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2)
3599+ job['accuracy']=accuracy
3600+ if step+1 == 2:
3601+ # Randomly (based on the relative ABS Xsec of the job) determine the
3602+ # number of events each job needs to generate for MINT-step = 2.
3603+ r=self.get_randinit_seed()
3604+ random.seed(r)
3605+ totevts=nevents
3606+ for job in jobs:
3607+ job['nevents'] = 0
3608+ while totevts :
3609+ target = random.random() * totABS
3610+ crosssum = 0.
3611+ i = 0
3612+ while i<len(jobs) and crosssum < target:
3613+ job = jobs[i]
3614+ crosssum += job['resultABS']
3615+ i += 1
3616+ totevts -= 1
3617+ i -= 1
3618+ jobs[i]['nevents'] += 1
3619+ for job in jobs:
3620+ job['mint_mode']=step+1 # next step
3621+ return jobs
3622+ else:
3623+ return []
3624+
3625+
3626+ def get_randinit_seed(self):
3627+ """ Get the random number seed from the randinit file """
3628+ with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit:
3629+ # format of the file is "r=%d".
3630+ iseed = int(randinit.read()[2:])
3631+ return iseed
3632+
3633+
3634+ def append_the_results(self,jobs,integration_step):
3635+ """Appends the results for each of the jobs in the job list"""
3636+ error_found=False
3637+ for job in jobs:
3638+ try:
3639+ if integration_step >= 0 :
3640+ with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file:
3641+ results=res_file.readline().split()
3642+ else:
3643+ # should only be here when doing fixed order with the 'only_generation'
3644+ # option equal to True. Take the results from the final run done.
3645+ with open(pjoin(job['dirname'],'res.dat')) as res_file:
3646+ results=res_file.readline().split()
3647+ except IOError:
3648+ if not error_found:
3649+ error_found=True
3650+ error_log=[]
3651+ error_log.append(pjoin(job['dirname'],'log.txt'))
3652+ continue
3653+ job['resultABS']=float(results[0])
3654+ job['errorABS']=float(results[1])
3655+ job['result']=float(results[2])
3656+ job['error']=float(results[3])
3657+ job['niters_done']=int(results[4])
3658+ job['npoints_done']=int(results[5])
3659+ job['time_spend']=float(results[6])
3660+ job['err_percABS'] = job['errorABS']/job['resultABS']*100.
3661+ job['err_perc'] = job['error']/job['result']*100.
3662+ if error_found:
3663+ raise aMCatNLOError('An error occurred during the collection of results.\n' +
3664+ 'Please check the .log files inside the directories which failed:\n' +
3665+ '\n'.join(error_log)+'\n')
3666+
3667+
3668+
3669+ def write_res_txt_file(self,jobs,integration_step):
3670+ """writes the res.txt files in the SubProcess dir"""
3671+ jobs.sort(key = lambda job: -job['errorABS'])
3672+ content=[]
3673+ content.append('\n\nCross-section per integration channel:')
3674+ for job in jobs:
3675+ content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job)
3676+ content.append('\n\nABS cross-section per integration channel:')
3677+ for job in jobs:
3678+ content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job)
3679+ totABS=0
3680+ errABS=0
3681+ tot=0
3682+ err=0
3683+ for job in jobs:
3684+ totABS+= job['resultABS']
3685+ errABS+= math.pow(job['errorABS'],2)
3686+ tot+= job['result']
3687+ err+= math.pow(job['error'],2)
3688+ if jobs:
3689+ content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\
3690+ (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\
3691+ tot, math.sqrt(err), math.sqrt(err)/tot *100.))
3692+ with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file:
3693+ res_file.write('\n'.join(content))
3694+ randinit=self.get_randinit_seed()
3695+ return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\
3696+ 'erra':math.sqrt(errABS),'randinit':randinit}
3697+
3698+
3699+ def collect_scale_pdf_info(self,options,jobs):
3700+ """read the scale_pdf_dependence.dat files and collects there results"""
3701+ scale_pdf_info={}
3702+ if self.run_card['reweight_scale'] or self.run_card['reweight_PDF']:
3703+ data_files=[]
3704+ for job in jobs:
3705+ data_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat'))
3706+ scale_pdf_info = self.pdf_scale_from_reweighting(data_files)
3707+ return scale_pdf_info
3708+
3709+
3710+ def combine_plots_FO(self,folder_name,jobs):
3711+ """combines the plots and puts then in the Events/run* directory"""
3712+ devnull = os.open(os.devnull, os.O_RDWR)
3713+ if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
3714+ misc.call(['./combine_plots_FO.sh'] + folder_name, \
3715+ stdout=devnull,
3716+ cwd=pjoin(self.me_dir, 'SubProcesses'))
3717+ files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
3718+ pjoin(self.me_dir, 'Events', self.run_name))
3719+ logger.info('The results of this run and the TopDrawer file with the plots' + \
3720+ ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3721+ elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
3722+ self.combine_plots_HwU(jobs)
3723+ files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.HwU'),
3724+ pjoin(self.me_dir, 'Events', self.run_name))
3725+ files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.gnuplot'),
3726+ pjoin(self.me_dir, 'Events', self.run_name))
3727+ try:
3728+ misc.call(['gnuplot','MADatNLO.gnuplot'],\
3729+ stdout=devnull,stderr=devnull,\
3730+ cwd=pjoin(self.me_dir, 'Events', self.run_name))
3731+ except Exception:
3732+ pass
3733+ logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
3734+ ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3735+ elif self.analyse_card['fo_analysis_format'].lower() == 'root':
3736+ misc.call(['./combine_root.sh'] + folder_name, \
3737+ stdout=devnull,
3738+ cwd=pjoin(self.me_dir, 'SubProcesses'))
3739+ files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
3740+ pjoin(self.me_dir, 'Events', self.run_name))
3741+ logger.info('The results of this run and the ROOT file with the plots' + \
3742+ ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3743+ else:
3744+ logger.info('The results of this run' + \
3745+ ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
3746+
3747+
3748+ def combine_plots_HwU(self,jobs):
3749 """Sums all the plots in the HwU format."""
3750-
3751 logger.debug('Combining HwU plots.')
3752-
3753- with open(pjoin(self.me_dir,'SubProcesses','dirs.txt')) as dirf:
3754- all_histo_paths = dirf.readlines()
3755- all_histo_paths = [pjoin(self.me_dir,'SubProcesses',
3756- path.rstrip(),"MADatNLO.HwU") for path in all_histo_paths]
3757-
3758+ all_histo_paths=[]
3759+ for job in jobs:
3760+ all_histo_paths.append(pjoin(job['dirname'],"MADatNLO.HwU"))
3761 histogram_list = histograms.HwUList(all_histo_paths[0])
3762-
3763 for histo_path in all_histo_paths[1:]:
3764 for i, histo in enumerate(histograms.HwUList(histo_path)):
3765 # First make sure the plots have the same weight labels and such
3766@@ -1632,19 +1964,18 @@
3767 histogram_list.output(pjoin(self.me_dir,'SubProcesses',"MADatNLO"),
3768 format = 'gnuplot')
3769
3770- def applgrid_combine(self,cross,error):
3771+ def applgrid_combine(self,cross,error,jobs):
3772 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
3773 logger.debug('Combining APPLgrids \n')
3774 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),
3775 'applgrid-combine')
3776- with open(pjoin(self.me_dir,'SubProcesses','dirs.txt')) as dirf:
3777- all_jobs=dirf.readlines()
3778+ all_jobs=[]
3779+ for job in jobs:
3780+ all_jobs.append(job['dirname'])
3781 ngrids=len(all_jobs)
3782- nobs =len([name for name in os.listdir(pjoin(self.me_dir,'SubProcesses',
3783- all_jobs[0].rstrip())) if name.endswith("_out.root")])
3784+ nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")])
3785 for obs in range(0,nobs):
3786- gdir = [pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+
3787- str(obs)+"_out.root") for job in all_jobs]
3788+ gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
3789 # combine APPLgrids from different channels for observable 'obs'
3790 if self.run_card["iappl"] == 1:
3791 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,
3792@@ -1656,8 +1987,7 @@
3793 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',
3794 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
3795 for job in all_jobs:
3796- os.remove(pjoin(self.me_dir,'SubProcesses',job.rstrip(),
3797- "grid_obs_"+str(obs)+"_in.root"))
3798+ os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root"))
3799 else:
3800 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
3801 # after combining, delete the original grids
3802@@ -1698,14 +2028,10 @@
3803 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
3804 raise self.InvalidCmd('No APPLgrid name currently defined.'+
3805 'Please provide this information.')
3806- if mode == 'NLO':
3807- gdir='all_G'
3808- elif mode == 'LO':
3809- gdir='born_G'
3810 #copy the grid to all relevant directories
3811 for pdir in p_dirs:
3812 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,
3813- "SubProcesses",pdir)) if file.startswith(gdir) and
3814+ "SubProcesses",pdir)) if file.startswith(mode+'_G') and
3815 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
3816 for g_dir in g_dirs:
3817 for grid in all_grids:
3818@@ -1714,28 +2040,20 @@
3819 'grid_obs_'+obs+'_in.root'))
3820
3821
3822- def collect_log_files(self, folders, istep):
3823+
3824+
3825+ def collect_log_files(self, jobs, integration_step):
3826 """collect the log files and put them in a single, html-friendly file
3827- inside the run_... directory"""
3828- step_list = ['Grid setting', 'Cross-section computation',
3829- 'Event generation']
3830+ inside the Events/run_.../ directory"""
3831 log_file = pjoin(self.me_dir, 'Events', self.run_name,
3832- 'alllogs_%d.html' % istep)
3833- # this keeps track of which step has been computed for which channel
3834- channel_dict = {}
3835- log_files = []
3836- for folder in folders:
3837- log_files += glob.glob(pjoin(self.me_dir, 'SubProcesses', 'P*',
3838- folder, 'log.txt'))
3839+ 'alllogs_%d.html' % integration_step)
3840+ outfile = open(log_file, 'w')
3841
3842 content = ''
3843-
3844- outfile = open(log_file, 'w')
3845-
3846 content += '<HTML><BODY>\n<font face="courier" size=2>'
3847- for log in log_files:
3848- channel_dict[os.path.dirname(log)] = [istep]
3849+ for job in jobs:
3850 # put an anchor
3851+ log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step)
3852 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(
3853 pjoin(self.me_dir,'SubProcesses'),''))
3854 # and put some nice header
3855@@ -1743,7 +2061,7 @@
3856 content += '<br>LOG file for integration channel %s, %s <br>' % \
3857 (os.path.dirname(log).replace(pjoin(self.me_dir,
3858 'SubProcesses'), ''),
3859- step_list[istep])
3860+ integration_step)
3861 content += '</font>\n'
3862 #then just flush the content of the small log inside the big log
3863 #the PRE tag prints everything verbatim
3864@@ -1756,53 +2074,80 @@
3865 outfile.close()
3866
3867
3868- def read_results(self, output, mode):
3869- """extract results (cross-section, absolute cross-section and errors)
3870- from output, which should be formatted as
3871- Found 4 correctly terminated jobs
3872- random seed found in 'randinit' is 33
3873- Integrated abs(cross-section)
3874- 7.94473937e+03 +- 2.9953e+01 (3.7702e-01%)
3875- Integrated cross-section
3876- 6.63392298e+03 +- 3.7669e+01 (5.6782e-01%)
3877- for aMC@NLO/aMC@LO, and as
3878-
3879- for NLO/LO
3880- The cross_sect_dict is returned"""
3881- res = {}
3882- if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']:
3883- pat = re.compile(\
3884-'''Found (\d+) correctly terminated jobs
3885-random seed found in 'randinit' is (\d+)
3886-Integrated abs\(cross-section\)
3887-\s*(\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\d+\.\d+e[+-]\d+)\%\)
3888-Integrated cross-section
3889-\s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''')
3890- else:
3891- pat = re.compile(\
3892-'''Found (\d+) correctly terminated jobs
3893-\s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''')
3894- pass
3895-
3896- match = re.search(pat, output[0])
3897- if not match or output[1]:
3898- logger.info('Return code of the event collection: '+str(output[1]))
3899- logger.info('Output of the event collection:\n'+output[0])
3900- raise aMCatNLOError('An error occurred during the collection of results.\n' +
3901- 'Please check the .log files inside the directories which failed.')
3902-# if int(match.groups()[0]) != self.njobs:
3903-# raise aMCatNLOError('Not all jobs terminated successfully')
3904- if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']:
3905- return {'randinit' : int(match.groups()[1]),
3906- 'xseca' : float(match.groups()[2]),
3907- 'erra' : float(match.groups()[3]),
3908- 'xsect' : float(match.groups()[5]),
3909- 'errt' : float(match.groups()[6])}
3910- else:
3911- return {'xsect' : float(match.groups()[1]),
3912- 'errt' : float(match.groups()[2])}
3913-
3914- def print_summary(self, options, step, mode, scale_pdf_info={}):
3915+ def finalise_run_FO(self,folder_name,jobs):
3916+ """Combine the plots and put the res*.txt files in the Events/run.../ folder."""
3917+ # Copy the res_*.txt files to the Events/run* folder
3918+ res_files=glob.glob(pjoin(self.me_dir, 'SubProcesses', 'res_*.txt'))
3919+ for res_file in res_files:
3920+ files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
3921+ # Collect the plots and put them in the Events/run* folder
3922+ self.combine_plots_FO(folder_name,jobs)
3923+ # If doing the applgrid-stuff, also combine those grids
3924+ # and put those in the Events/run* folder
3925+ if self.run_card['iappl'] != 0:
3926+ cross=self.cross_sect_dict['xsect']
3927+ error=self.cross_sect_dict['errt']
3928+ self.applgrid_combine(cross,error)
3929+
3930+
3931+ def setup_cluster_or_multicore(self):
3932+ """setup the number of cores for multicore, and the cluster-type for cluster runs"""
3933+ if self.cluster_mode == 1:
3934+ cluster_name = self.options['cluster_type']
3935+ self.cluster = cluster.from_name[cluster_name](**self.options)
3936+ if self.cluster_mode == 2:
3937+ try:
3938+ import multiprocessing
3939+ if not self.nb_core:
3940+ try:
3941+ self.nb_core = int(self.options['nb_core'])
3942+ except TypeError:
3943+ self.nb_core = multiprocessing.cpu_count()
3944+ logger.info('Using %d cores' % self.nb_core)
3945+ except ImportError:
3946+ self.nb_core = 1
3947+ logger.warning('Impossible to detect the number of cores => Using One.\n'+
3948+ 'Use set nb_core X in order to set this number and be able to'+
3949+ 'run in multicore.')
3950+
3951+ self.cluster = cluster.MultiCore(**self.options)
3952+
3953+
3954+ def clean_previous_results(self,options,p_dirs,folder_name):
3955+ """Clean previous results.
3956+ o. If doing only the reweighting step, do not delete anything and return directlty.
3957+ o. Always remove all the G*_* files (from split event generation).
3958+ o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only."""
3959+ if options['reweightonly']:
3960+ return
3961+ if not options['only_generation']:
3962+ self.update_status('Cleaning previous results', level=None)
3963+ for dir in p_dirs:
3964+ #find old folders to be removed
3965+ for obj in folder_name:
3966+ # list all the G* (or all_G* or born_G*) directories
3967+ to_rm = [file for file in \
3968+ os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
3969+ if file.startswith(obj[:-1]) and \
3970+ (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
3971+ os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
3972+ # list all the G*_* directories (from split event generation)
3973+ to_always_rm = [file for file in \
3974+ os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
3975+ if file.startswith(obj[:-1]) and
3976+ '_' in file and not '_G' in file and \
3977+ (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
3978+ os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
3979+
3980+ if not options['only_generation']:
3981+ to_always_rm.extend(to_rm)
3982+ if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
3983+ to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
3984+ files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
3985+ return
3986+
3987+
3988+ def print_summary(self, options, step, mode, scale_pdf_info={}, done=True):
3989 """print a summary of the results contained in self.cross_sect_dict.
3990 step corresponds to the mintMC step, if =2 (i.e. after event generation)
3991 some additional infos are printed"""
3992@@ -1813,25 +2158,35 @@
3993 if line.startswith('generate') or line.startswith('add process'):
3994 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
3995 lpp = {0:'l', 1:'p', -1:'pbar'}
3996- proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
3997- (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
3998- self.run_card['ebeam1'], self.run_card['ebeam2'])
3999-
4000+ if self.ninitial == 1:
4001+ proc_info = '\n Process %s' % process[:-3]
4002+ else:
4003+ proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
4004+ (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
4005+ self.run_card['ebeam1'], self.run_card['ebeam2'])
4006+
4007+ if self.ninitial == 1:
4008+ self.cross_sect_dict['unit']='GeV'
4009+ self.cross_sect_dict['xsec_string']='(Partial) decay width'
4010+ self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)'
4011+ else:
4012+ self.cross_sect_dict['unit']='pb'
4013+ self.cross_sect_dict['xsec_string']='Total cross-section'
4014+ self.cross_sect_dict['axsec_string']='Total abs(cross-section)'
4015 # Gather some basic statistics for the run and extracted from the log files.
4016 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
4017 log_GV_files = glob.glob(pjoin(self.me_dir, \
4018 'SubProcesses', 'P*','G*','log_MINT*.txt'))
4019- all_log_files = glob.glob(pjoin(self.me_dir, \
4020- 'SubProcesses', 'P*','G*','log*.txt'))
4021+ all_log_files = log_GV_files
4022 elif mode == 'NLO':
4023 log_GV_files = glob.glob(pjoin(self.me_dir, \
4024- 'SubProcesses', 'P*','all_G*','log*.txt'))
4025- all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*',
4026- '%sG*'%foldName,'log*.txt')) for foldName in ['all_']],[])
4027+ 'SubProcesses', 'P*','all_G*','log_MINT*.txt'))
4028+ all_log_files = log_GV_files
4029+
4030 elif mode == 'LO':
4031 log_GV_files = ''
4032- all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*',
4033- '%sG*'%foldName,'log*.txt')) for foldName in ['born_']],[])
4034+ all_log_files = glob.glob(pjoin(self.me_dir, \
4035+ 'SubProcesses', 'P*','born_G*','log_MINT*.txt'))
4036 else:
4037 raise aMCatNLOError, 'Running mode %s not supported.'%mode
4038
4039@@ -1843,13 +2198,13 @@
4040 if step != 2:
4041 message = status[step] + '\n\n Intermediate results:' + \
4042 ('\n Random seed: %(randinit)d' + \
4043- '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' + \
4044- '\n Total abs(cross-section): %(xseca)8.3e +- %(erra)6.1e pb \n') \
4045+ '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \
4046+ '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \
4047 % self.cross_sect_dict
4048 else:
4049
4050 message = '\n ' + status[step] + proc_info + \
4051- '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
4052+ '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
4053 self.cross_sect_dict
4054
4055 if self.run_card['nevents']>=10000 and self.run_card['reweight_scale']:
4056@@ -1874,18 +2229,22 @@
4057 misc.format_timer(time.time()-self.start_time))
4058
4059 elif mode in ['NLO', 'LO']:
4060- status = ['Results after grid setup (cross-section is non-physical):',
4061+ status = ['Results after grid setup:','Current results:',
4062 'Final results and run summary:']
4063- if step == 0:
4064- message = '\n ' + status[step] + \
4065- '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
4066- self.cross_sect_dict
4067- elif step == 1:
4068- message = '\n ' + status[step] + proc_info + \
4069- '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
4070+ if (not done) and (step == 0):
4071+ message = '\n ' + status[0] + \
4072+ '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
4073+ self.cross_sect_dict
4074+ elif not done:
4075+ message = '\n ' + status[1] + \
4076+ '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
4077+ self.cross_sect_dict
4078+ elif done:
4079+ message = '\n ' + status[2] + proc_info + \
4080+ '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
4081 self.cross_sect_dict
4082 if self.run_card['reweight_scale']:
4083- if int(self.run_card['ickkw'])!=-1:
4084+ if self.run_card['ickkw'] != -1:
4085 message = message + \
4086 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \
4087 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low'])
4088@@ -1898,7 +2257,7 @@
4089 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \
4090 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low'])
4091
4092- if (mode in ['NLO', 'LO'] and step!=1) or \
4093+ if (mode in ['NLO', 'LO'] and not done) or \
4094 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
4095 logger.info(message+'\n')
4096 return
4097@@ -2359,7 +2718,6 @@
4098 scale_pdf_info={}
4099 if self.run_card['reweight_scale'] or self.run_card['reweight_PDF'] :
4100 scale_pdf_info = self.run_reweight(options['reweightonly'])
4101-
4102 self.update_status('Collecting events', level='parton', update_results=True)
4103 misc.compile(['collect_events'],
4104 cwd=pjoin(self.me_dir, 'SubProcesses'))
4105@@ -2383,6 +2741,10 @@
4106 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
4107 if not options['reweightonly']:
4108 self.print_summary(options, 2, mode, scale_pdf_info)
4109+ res_files=glob.glob(pjoin(self.me_dir, 'SubProcesses', 'res*.txt'))
4110+ for res_file in res_files:
4111+ files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
4112+
4113 logger.info('The %s file has been generated.\n' % (evt_file))
4114 self.results.add_detail('nb_event', nevents)
4115 self.update_status('Events generated', level='parton', update_results=True)
4116@@ -2403,9 +2765,9 @@
4117
4118 #check that the number of split event files divides the number of
4119 # events, otherwise set it to 1
4120- if int(int(self.banner.get_detail('run_card', 'nevents')) / \
4121+ if int(self.banner.get_detail('run_card', 'nevents') / \
4122 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
4123- != int(self.banner.get_detail('run_card', 'nevents')):
4124+ != self.banner.get_detail('run_card', 'nevents'):
4125 logger.warning(\
4126 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
4127 'Setting it to 1.')
4128@@ -2413,7 +2775,7 @@
4129
4130 # don't split jobs if the user asks to shower only a part of the events
4131 if self.shower_card['nevents'] > 0 and \
4132- self.shower_card['nevents'] < int(self.banner.get_detail('run_card', 'nevents')) and \
4133+ self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \
4134 self.shower_card['nsplit_jobs'] != 1:
4135 logger.warning(\
4136 'Only a part of the events will be showered.\n' + \
4137@@ -2503,6 +2865,18 @@
4138
4139 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
4140 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
4141+
4142+
4143+ # libdl may be needded for pythia 82xx
4144+ if shower == 'PYTHIA8' and not \
4145+ os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \
4146+ 'dl' not in self.shower_card['extralibs'].split():
4147+ # 'dl' has to be linked with the extralibs
4148+ self.shower_card['extralibs'] += ' dl'
4149+ logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \
4150+ "It is needed for the correct running of PY8.2xx.\n" + \
4151+ "If this library cannot be found on your system, a crash will occur.")
4152+
4153 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
4154 stderr=open(mcatnlo_log, 'w'),
4155 cwd=pjoin(self.me_dir, 'MCatNLO'))
4156@@ -2554,10 +2928,10 @@
4157 # special treatment for pythia8
4158 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
4159 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
4160- if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
4161+ if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx
4162 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
4163 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
4164- else:
4165+ else: # this is PY8.2xxx
4166 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
4167 #link the hwpp exe in the rundir
4168 if shower == 'HERWIGPP':
4169@@ -2987,8 +3361,8 @@
4170 init_dict = self.get_init_dict(evt_file)
4171
4172 if nevents < 0 or \
4173- nevents > int(self.banner.get_detail('run_card', 'nevents')):
4174- nevents = int(self.banner.get_detail('run_card', 'nevents'))
4175+ nevents > self.banner.get_detail('run_card', 'nevents'):
4176+ nevents = self.banner.get_detail('run_card', 'nevents')
4177
4178 nevents = nevents / self.shower_card['nsplit_jobs']
4179
4180@@ -3000,7 +3374,7 @@
4181
4182 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
4183 content += 'NEVENTS=%d\n' % nevents
4184- content += 'NEVENTS_TOT=%d\n' % (int(self.banner.get_detail('run_card', 'nevents')) /\
4185+ content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\
4186 self.shower_card['nsplit_jobs'])
4187 content += 'MCMODE=%s\n' % shower
4188 content += 'PDLABEL=%s\n' % pdlabel
4189@@ -3113,7 +3487,7 @@
4190
4191
4192 def run_reweight(self, only):
4193- """runs the reweight_xsec_events eecutables on each sub-event file generated
4194+ """runs the reweight_xsec_events executables on each sub-event file generated
4195 to compute on the fly scale and/or PDF uncertainities"""
4196 logger.info(' Doing reweight')
4197
4198@@ -3231,7 +3605,7 @@
4199 scale_pdf_info['scale_low'] = 0.0
4200
4201 # get the pdf uncertainty in percent (according to the Hessian method)
4202- lhaid=int(self.run_card['lhaid'])
4203+ lhaid=self.run_card['lhaid']
4204 pdf_upp=0.0
4205 pdf_low=0.0
4206 if lhaid <= 90000:
4207@@ -3246,7 +3620,6 @@
4208 else:
4209 scale_pdf_info['pdf_upp'] = 0.0
4210 scale_pdf_info['pdf_low'] = 0.0
4211-
4212 else:
4213 # use Gaussian method (NNPDF)
4214 pdf_stdev=0.0
4215@@ -3263,7 +3636,6 @@
4216
4217 def wait_for_complete(self, run_type):
4218 """this function waits for jobs on cluster to complete their run."""
4219-
4220 starttime = time.time()
4221 #logger.info(' Waiting for submitted jobs to complete')
4222 update_status = lambda i, r, f: self.update_status((i, r, f, run_type),
4223@@ -3276,29 +3648,15 @@
4224
4225 def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4226 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
4227- njob_split = 0
4228 self.ijob = 0
4229-
4230- # this is to keep track, if splitting evt generation, of the various
4231- # folders/args in order to resubmit the jobs if some of them fail
4232- self.split_folders = {}
4233-
4234 if run_type != 'shower':
4235 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
4236 for args in arg_list:
4237 for Pdir, jobs in job_dict.items():
4238 for job in jobs:
4239- if not split_jobs:
4240- self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
4241- else:
4242- for n in self.find_jobs_to_split(Pdir, job, args[1]):
4243- self.run_exe(job, args + [n], run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
4244- njob_split += 1
4245- # print some statistics if running serially
4246+ self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
4247 if self.cluster_mode == 2:
4248 time.sleep(1) # security to allow all jobs to be launched
4249- if njob_split > 0:
4250- self.njobs = njob_split
4251 else:
4252 self.njobs = len(arg_list)
4253 for args in arg_list:
4254@@ -3309,37 +3667,27 @@
4255
4256
4257
4258- def check_event_files(self):
4259+ def check_event_files(self,jobs):
4260 """check the integrity of the event files after splitting, and resubmit
4261 those which are not nicely terminated"""
4262- to_resubmit = []
4263- for dir in self.split_folders.keys():
4264+ jobs_to_resubmit = []
4265+ for job in jobs:
4266 last_line = ''
4267 try:
4268 last_line = subprocess.Popen(
4269- ['tail', '-n1', pjoin(dir, 'events.lhe')], \
4270+ ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \
4271 stdout = subprocess.PIPE).stdout.read().strip()
4272 except IOError:
4273 pass
4274-
4275 if last_line != "</LesHouchesEvents>":
4276- to_resubmit.append(dir)
4277-
4278+ jobs_to_resubmit.append(job)
4279 self.njobs = 0
4280- if to_resubmit:
4281+ if jobs_to_resubmit:
4282 run_type = 'Resubmitting broken jobs'
4283 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
4284- logger.debug('Resubmitting\n' + '\n'.join(to_resubmit) + '\n')
4285- for dir in to_resubmit:
4286- files.rm([dir])
4287- job = self.split_folders[dir][0]
4288- args = self.split_folders[dir][1:]
4289- run_type = 'monitor'
4290- cwd = os.path.split(dir)[0]
4291- self.run_exe(job, args, run_type, cwd=cwd )
4292- self.njobs +=1
4293-
4294- self.wait_for_complete(run_type)
4295+ for job in jobs_to_resubmit:
4296+ logger.debug('Resubmitting ' + job['dirname'] + '\n')
4297+ self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4298
4299
4300 def find_jobs_to_split(self, pdir, job, arg):
4301@@ -3412,16 +3760,16 @@
4302 # the 'standard' amcatnlo job
4303 # check if args is a list of string
4304 if type(args[0]) == str:
4305- input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd, args)
4306+ input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args)
4307 #submitting
4308 self.cluster.submit2(exe, args, cwd=cwd,
4309 input_files=input_files, output_files=output_files,
4310 required_output=required_output)
4311
4312- # keep track of folders and arguments for splitted evt gen
4313- subfolder=output_files[-1].split('/')[0]
4314- if len(args) == 4 and '_' in subfolder:
4315- self.split_folders[pjoin(cwd,subfolder)] = [exe] + args
4316+# # keep track of folders and arguments for splitted evt gen
4317+# subfolder=output_files[-1].split('/')[0]
4318+# if len(args) == 4 and '_' in subfolder:
4319+# self.split_folders[pjoin(cwd,subfolder)] = [exe] + args
4320
4321 elif 'shower' in exe:
4322 # a shower job
4323@@ -3487,7 +3835,6 @@
4324 # use local disk if possible => need to stands what are the
4325 # input/output files
4326
4327- keep_fourth_arg = False
4328 output_files = []
4329 required_output = []
4330 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'),
4331@@ -3509,91 +3856,58 @@
4332 input_files.append(pjoin(cwd, 'OLE_order.olc'))
4333
4334 # File for the loop (might not be present if MadLoop is not used)
4335- if os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
4336+ if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \
4337 cluster.need_transfer(self.options):
4338 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4339+ elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
4340+ cluster.need_transfer(self.options):
4341 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
4342 dereference=True)
4343 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
4344 tf.close()
4345-
4346- Ire = re.compile("for i in ([\d\s]*) ; do")
4347- try :
4348- fsock = open(exe)
4349- except IOError:
4350- fsock = open(pjoin(cwd,exe))
4351- text = fsock.read()
4352- data = Ire.findall(text)
4353- subdir = ' '.join(data).split()
4354+ input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4355
4356- if args[0] == '0':
4357+ if args[1] == 'born' or args[1] == 'all':
4358 # MADEVENT MINT FO MODE
4359 input_files.append(pjoin(cwd, 'madevent_mintFO'))
4360- input_files.append(pjoin(self.me_dir, 'SubProcesses','madin.%s' % args[1]))
4361- #j=$2\_G$i
4362- for i in subdir:
4363- current = '%s_G%s' % (args[1],i)
4364- if os.path.exists(pjoin(cwd,current)):
4365- input_files.append(pjoin(cwd, current))
4366- output_files.append(current)
4367+ if args[2] == '0':
4368+ current = '%s_G%s' % (args[1],args[0])
4369+ else:
4370+ current = '%s_G%s_%s' % (args[1],args[0],args[2])
4371+ if os.path.exists(pjoin(cwd,current)):
4372+ input_files.append(pjoin(cwd, current))
4373+ output_files.append(current)
4374
4375- required_output.append('%s/results.dat' % current)
4376- required_output.append('%s/log.txt' % current)
4377- required_output.append('%s/mint_grids' % current)
4378- required_output.append('%s/grid.MC_integer' % current)
4379- if len(args) == 4:
4380- required_output.append('%s/scale_pdf_dependence.dat' % current)
4381- args[2] = '-1'
4382- # use a grid train on another part
4383- base = '%s_G%s' % (args[3],i)
4384- if args[0] == '0':
4385- to_move = ['grid.MC_integer','mint_grids']
4386- elif args[0] == '1':
4387- to_move = ['mint_grids', 'grid.MC_integer']
4388- else:
4389- to_move = []
4390- if self.run_card['iappl'] == 2:
4391- for grid in glob.glob(pjoin(cwd,base,'grid_obs_*_in.root')):
4392- to_move.append(grid)
4393- if not os.path.exists(pjoin(cwd,current)):
4394- os.mkdir(pjoin(cwd,current))
4395- input_files.append(pjoin(cwd, current))
4396- for name in to_move:
4397- files.cp(pjoin(cwd,base, name),
4398- pjoin(cwd,current))
4399- files.cp(pjoin(cwd,base, 'grid.MC_integer'),
4400- pjoin(cwd,current))
4401+ required_output.append('%s/results.dat' % current)
4402+ required_output.append('%s/res_%s.dat' % (current,args[3]))
4403+ required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4404+ required_output.append('%s/mint_grids' % current)
4405+ required_output.append('%s/grid.MC_integer' % current)
4406+ if args[3] != '0':
4407+ required_output.append('%s/scale_pdf_dependence.dat' % current)
4408
4409- elif args[0] == '2':
4410+ elif args[1] == 'F' or args[1] == 'B':
4411 # MINTMC MODE
4412 input_files.append(pjoin(cwd, 'madevent_mintMC'))
4413- if args[2] in ['0','2']:
4414- input_files.append(pjoin(self.me_dir, 'SubProcesses','madinMMC_%s.2' % args[1]))
4415-
4416- for i in subdir:
4417- current = 'G%s%s' % (args[1], i)
4418- if os.path.exists(pjoin(cwd,current)):
4419- input_files.append(pjoin(cwd, current))
4420- output_files.append(current)
4421- if len(args) == 4 and args[3] in ['H','S','V','B','F']:
4422- # use a grid train on another part
4423- base = '%s_%s' % (args[3],i)
4424- files.ln(pjoin(cwd,base,'mint_grids'), name = 'preset_mint_grids',
4425- starting_dir=pjoin(cwd,current))
4426- files.ln(pjoin(cwd,base,'grid.MC_integer'),
4427- starting_dir=pjoin(cwd,current))
4428- elif len(args) ==4:
4429- keep_fourth_arg = True
4430- # this is for the split event generation
4431- output_files.append('G%s%s_%s' % (args[1], i, args[3]))
4432- required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1], i, args[3],args[2]))
4433-
4434- else:
4435- required_output.append('%s/log_MINT%s.txt' % (current,args[2]))
4436- if args[2] in ['0','1']:
4437- required_output.append('%s/results.dat' % current)
4438- if args[2] == '1':
4439- output_files.append('%s/results.dat' % current)
4440+
4441+ if args[2] == '0':
4442+ current = 'G%s%s' % (args[1],args[0])
4443+ else:
4444+ current = 'G%s%s_%s' % (args[1],args[0],args[2])
4445+ if os.path.exists(pjoin(cwd,current)):
4446+ input_files.append(pjoin(cwd, current))
4447+ output_files.append(current)
4448+ if args[2] > '0':
4449+ # this is for the split event generation
4450+ output_files.append('G%s%s_%s' % (args[1], args[0], args[2]))
4451+ required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3]))
4452+
4453+ else:
4454+ required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4455+ if args[3] in ['0','1']:
4456+ required_output.append('%s/results.dat' % current)
4457+ if args[3] == '1':
4458+ output_files.append('%s/results.dat' % current)
4459
4460 else:
4461 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
4462@@ -3601,73 +3915,9 @@
4463 #Find the correct PDF input file
4464 pdfinput = self.get_pdf_input_filename()
4465 if os.path.exists(pdfinput):
4466- input_files.append(pdfinput)
4467-
4468- if len(args) == 4 and not keep_fourth_arg:
4469- args = args[:3]
4470-
4471+ input_files.append(pdfinput)
4472 return input_files, output_files, required_output, args
4473-
4474- def write_madinMMC_file(self, path, run_mode, mint_mode):
4475- """writes the madinMMC_?.2 file"""
4476- #check the validity of the arguments
4477- run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB']
4478- if run_mode not in run_modes:
4479- raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \
4480- % (run_mode, ', '.join(run_modes)))
4481- mint_modes = [0, 1, 2]
4482- if mint_mode not in mint_modes:
4483- raise aMCatNLOError('%s is not a valid mode for mintMC. Please use one of the following: %s' \
4484- % (mint_mode, ', '.join(mint_modes)))
4485- if run_mode in ['born']:
4486- name_suffix = 'B'
4487- elif run_mode in ['virt', 'viSB']:
4488- name_suffix = 'V'
4489- else:
4490- name_suffix = 'F'
4491-
4492- content = \
4493-"""-1 12 ! points, iterations
4494-0.03 ! desired fractional accuracy
4495-1 -0.1 ! alpha, beta for Gsoft
4496--1 -0.1 ! alpha, beta for Gazi
4497-1 ! Suppress amplitude (0 no, 1 yes)?
4498-1 ! Exact helicity sum (0 yes, n = number/event)?
4499-1 ! Enter Configuration Number:
4500-%1d ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events
4501-1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij
4502-%s ! all, born, real, virt
4503-""" \
4504- % (mint_mode, run_mode)
4505- file = open(pjoin(path, 'madinMMC_%s.2' % name_suffix), 'w')
4506- file.write(content)
4507- file.close()
4508-
4509- def write_madin_file(self, path, run_mode, vegas_mode, npoints, niters, accuracy='0'):
4510- """writes the madin.run_mode file"""
4511- #check the validity of the arguments
4512- run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB', 'grid']
4513- if run_mode not in run_modes:
4514- raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \
4515- % (run_mode, ', '.join(run_modes)))
4516- name_suffix = run_mode
4517-
4518- content = \
4519-"""%s %s ! points, iterations
4520-%s ! accuracy
4521-2 ! 0 fixed grid 2 adjust
4522-1 ! 1 suppress amp, 0 doesnt
4523-1 ! 0 for exact hel sum
4524-1 ! hel configuration numb
4525-'test'
4526-1 ! 1 to save grids
4527-%s ! 0 to exclude, 1 for new run, 2 to restart, 3 to reset w/ keeping grid
4528-%s ! all, born, real, virt
4529-""" \
4530- % (npoints,niters,accuracy,vegas_mode,run_mode)
4531- file = open(pjoin(path, 'madin.%s' % name_suffix), 'w')
4532- file.write(content)
4533- file.close()
4534+
4535
4536 def compile(self, mode, options):
4537 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
4538@@ -3730,10 +3980,10 @@
4539
4540 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
4541 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4542- lhaid_list = [int(self.run_card['lhaid'])]
4543+ lhaid_list = [self.run_card['lhaid']]
4544 if self.run_card['reweight_PDF']:
4545- lhaid_list.append(int(self.run_card['PDF_set_min']))
4546- lhaid_list.append(int(self.run_card['PDF_set_max']))
4547+ lhaid_list.append(self.run_card['PDF_set_min'])
4548+ lhaid_list.append(self.run_card['PDF_set_max'])
4549 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4550
4551 else:
4552@@ -4045,18 +4295,23 @@
4553 void = 'NOT INSTALLED'
4554 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight']
4555 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void,
4556- 'madspin': void}
4557+ 'madspin': void,'reweight':'OFF'}
4558 if not switch:
4559 switch = switch_default
4560 else:
4561 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch))
4562-
4563 default_switch = ['ON', 'OFF']
4564+
4565+
4566 allowed_switch_value = {'order': ['LO', 'NLO'],
4567 'fixed_order': default_switch,
4568 'shower': default_switch,
4569 'madspin': default_switch,
4570 'reweight': default_switch}
4571+
4572+
4573+
4574+
4575
4576 description = {'order': 'Perturbative order of the calculation:',
4577 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):',
4578@@ -4072,22 +4327,35 @@
4579 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']
4580
4581 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void )
4582-
4583+
4584+ if self.proc_characteristics['ninitial'] == 1:
4585+ switch['fixed_order'] = 'ON'
4586+ switch['shower'] = 'Not available for decay'
4587+ switch['madspin'] = 'Not available for decay'
4588+ switch['reweight'] = 'Not available for decay'
4589+ allowed_switch_value['fixed_order'] = ['ON']
4590+ allowed_switch_value['shower'] = ['OFF']
4591+ allowed_switch_value['madspin'] = ['OFF']
4592+ allowed_switch_value['reweight'] = ['OFF']
4593+ available_mode = ['0','1']
4594+ special_values = ['LO', 'NLO']
4595+ else:
4596+ # Init the switch value according to the current status
4597+ available_mode = ['0', '1', '2','3']
4598
4599 if mode == 'auto':
4600 mode = None
4601 if not mode and (options['parton'] or options['reweightonly']):
4602 mode = 'noshower'
4603
4604- # Init the switch value according to the current status
4605- available_mode = ['0', '1', '2']
4606- available_mode.append('3')
4607- if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
4608- switch['shower'] = 'ON'
4609- else:
4610- switch['shower'] = 'OFF'
4611+
4612+ if '3' in available_mode:
4613+ if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
4614+ switch['shower'] = 'ON'
4615+ else:
4616+ switch['shower'] = 'OFF'
4617
4618- if not aMCatNLO or self.options['mg5_path']:
4619+ if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode:
4620 available_mode.append('4')
4621 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4622 switch['madspin'] = 'ON'
4623@@ -4102,8 +4370,7 @@
4624 else:
4625 switch['reweight'] = 'Not available (requires NumPy)'
4626
4627-
4628- if 'do_reweight' in options and options['do_reweight']:
4629+ if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode:
4630 if switch['reweight'] == "OFF":
4631 switch['reweight'] = "ON"
4632 elif switch['reweight'] != "ON":
4633@@ -4113,12 +4380,12 @@
4634 switch['madspin'] = 'ON'
4635 elif switch['madspin'] != "ON":
4636 logger.critical("Cannot run MadSpin module: %s" % switch['reweight'])
4637-
4638-
4639+
4640 answers = list(available_mode) + ['auto', 'done']
4641 alias = {}
4642 for id, key in enumerate(switch_order):
4643- if switch[key] != void and switch[key] in allowed_switch_value[key]:
4644+ if switch[key] != void and switch[key] in allowed_switch_value[key] and \
4645+ len(allowed_switch_value[key]) >1:
4646 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]]
4647 #allow lower case for on/off
4648 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s))
4649@@ -4149,7 +4416,7 @@
4650 elif answer in ['0', 'auto', 'done']:
4651 return
4652 elif answer in special_values:
4653- logger.info('Enter mode value: Go to the related mode', '$MG:color:BLACK')
4654+ logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK')
4655 #assign_switch('reweight', 'OFF')
4656 #assign_switch('madspin', 'OFF')
4657 if answer == 'LO':
4658@@ -4180,7 +4447,6 @@
4659 return
4660 return switch
4661
4662-
4663 modify_switch(mode, self.last_mode, switch)
4664 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4665 assign_switch('madspin', 'ON')
4666@@ -4272,9 +4538,9 @@
4667 if mode in ['LO','aMC@LO','noshowerLO']:
4668 self.run_name += '_LO'
4669 self.set_run_name(self.run_name, self.run_tag, 'parton')
4670- if int(self.run_card['ickkw']) == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
4671+ if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
4672 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""")
4673- elif int(self.run_card['ickkw']) == 3 and mode in ['aMC@NLO', 'noshower']:
4674+ elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']:
4675 logger.warning("""You are running with FxFx merging enabled. To be able to merge
4676 samples of various multiplicities without double counting, you
4677 have to remove some events after showering 'by hand'. Please
4678@@ -4290,7 +4556,7 @@
4679 error = '''Stop opertation'''
4680 self.ask_run_configuration(mode, options)
4681 # raise aMCatNLOError(error)
4682- elif int(self.run_card['ickkw']) == -1 and mode in ['aMC@NLO', 'noshower']:
4683+ elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']:
4684 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs.
4685 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""")
4686 if 'aMC@' in mode or mode == 'onlyshower':
4687
4688=== modified file 'madgraph/interface/coloring_logging.py' (properties changed: -x to +x)
4689=== modified file 'madgraph/interface/common_run_interface.py' (properties changed: -x to +x)
4690--- madgraph/interface/common_run_interface.py 2015-08-27 19:32:34 +0000
4691+++ madgraph/interface/common_run_interface.py 2015-10-20 14:02:27 +0000
4692@@ -221,19 +221,17 @@
4693 #restrict_file = None
4694 #if os.path.exists(pjoin(ufo_path, 'restrict_default.dat')):
4695 # restrict_file = pjoin(ufo_path, 'restrict_default.dat')
4696+
4697+ force_CMS = self.mother and self.mother.options['complex_mass_scheme']
4698 model = import_ufo.import_model(modelname, decay=True,
4699- restrict=True)
4700- if self.mother and self.mother.options['complex_mass_scheme']:
4701- model.change_mass_to_complex_scheme()
4702+ restrict=True, complex_mass_scheme=force_CMS)
4703 else:
4704- model = import_ufo.import_model(pjoin(
4705- self.me_dir,'bin','internal', 'ufomodel'),decay=True)
4706 #pattern for checking complex mass scheme.
4707 has_cms = re.compile(r'''set\s+complex_mass_scheme\s*(True|T|1|true|$|;)''')
4708- if has_cms.search(open(pjoin(self.me_dir,'Cards','proc_card_mg5.dat')\
4709- ).read()):
4710- model.change_mass_to_complex_scheme()
4711-
4712+ force_CMS = has_cms.search(open(pjoin(self.me_dir,'Cards',
4713+ 'proc_card_mg5.dat')).read())
4714+ model = import_ufo.import_model(pjoin(self.me_dir,'bin','internal',
4715+ 'ufomodel'), decay=True, complex_mass_scheme=force_CMS)
4716
4717 # if not hasattr(model.get('particles')[0], 'partial_widths'):
4718 # raise self.InvalidCmd, 'The UFO model does not include partial widths information. Impossible to compute widths automatically'
4719@@ -527,6 +525,8 @@
4720
4721 self.me_dir = me_dir
4722 self.options = options
4723+
4724+ self.param_card_iterator = [] #an placeholder containing a generator of paramcard for scanning
4725
4726 # usefull shortcut
4727 self.status = pjoin(self.me_dir, 'status')
4728@@ -658,7 +658,7 @@
4729
4730 if amcatnlo and not keepwidth:
4731 # force particle in final states to have zero width
4732- pids = self.get_pid_final_states()
4733+ pids = self.get_pid_final_initial_states()
4734 # check those which are charged under qcd
4735 if not MADEVENT and pjoin(self.me_dir,'bin','internal') not in sys.path:
4736 sys.path.insert(0,pjoin(self.me_dir,'bin','internal'))
4737@@ -1351,8 +1351,8 @@
4738 self.update_status('delphes done', level='delphes', makehtml=False)
4739
4740 ############################################################################
4741- def get_pid_final_states(self):
4742- """Find the pid of all particles in the final states"""
4743+ def get_pid_final_initial_states(self):
4744+ """Find the pid of all particles in the final and initial states"""
4745 pids = set()
4746 subproc = [l.strip() for l in open(pjoin(self.me_dir,'SubProcesses',
4747 'subproc.mg'))]
4748@@ -1363,7 +1363,7 @@
4749 group = pat.findall(text)
4750 for particles in group:
4751 particles = particles.split(',')
4752- pids.update(set(particles[nb_init:]))
4753+ pids.update(set(particles))
4754
4755 return pids
4756
4757@@ -1594,12 +1594,29 @@
4758
4759
4760 def check_param_card(self, path, run=True):
4761- """Check that all the width are define in the param_card.
4762- If some width are set on 'Auto', call the computation tools."""
4763+ """
4764+ 1) Check that no scan parameter are present
4765+ 2) Check that all the width are define in the param_card.
4766+ - If a scan parameter is define. create the iterator and recall this fonction
4767+ on the first element.
4768+ - If some width are set on 'Auto', call the computation tools."""
4769
4770- pattern = re.compile(r'''decay\s+(\+?\-?\d+)\s+auto(@NLO|)''',re.I)
4771+ pattern_scan = re.compile(r'''^(decay)?[\s\d]*scan''', re.I+re.M)
4772+ pattern_width = re.compile(r'''decay\s+(\+?\-?\d+)\s+auto(@NLO|)''',re.I)
4773 text = open(path).read()
4774- pdg_info = pattern.findall(text)
4775+
4776+ if pattern_scan.search(text):
4777+ if not isinstance(self, cmd.CmdShell):
4778+ # we are in web mode => forbid scan due to security risk
4779+ raise Exception, "Scan are not allowed in web mode"
4780+ # at least one scan parameter found. create an iterator to go trough the cards
4781+ main_card = check_param_card.ParamCardIterator(text)
4782+ self.param_card_iterator = main_card
4783+ first_card = main_card.next(autostart=True)
4784+ first_card.write(path)
4785+ return self.check_param_card(path, run)
4786+
4787+ pdg_info = pattern_width.findall(text)
4788 if pdg_info:
4789 if run:
4790 logger.info('Computing the width set on auto in the param_card.dat')
4791@@ -2320,40 +2337,8 @@
4792 # Read the comment of the param_card_default to find name variable for
4793 # the param_card also check which value seems to be constrained in the
4794 # model.
4795- for bname, block in default_param.items():
4796- for lha_id, param in block.param_dict.items():
4797- all_var = []
4798- comment = param.comment
4799- # treat merge parameter
4800- if comment.strip().startswith('set of param :'):
4801- all_var = list(re.findall(r'''[^-]1\*(\w*)\b''', comment))
4802- # just the variable name as comment
4803- elif len(comment.split()) == 1:
4804- all_var = [comment.strip().lower()]
4805- # either contraction or not formatted
4806- else:
4807- split = comment.split()
4808- if len(split) >2 and split[1] == ':':
4809- # NO VAR associated
4810- self.restricted_value[(bname, lha_id)] = ' '.join(split[1:])
4811- elif len(split) == 2:
4812- if re.search(r'''\[[A-Z]\]eV\^''', split[1]):
4813- all_var = [comment.strip().lower()]
4814- elif len(split) >=2 and split[1].startswith('('):
4815- all_var = [split[0].strip().lower()]
4816- else:
4817- if not bname.startswith('qnumbers'):
4818- logger.debug("not recognize information for %s %s : %s",
4819- bname, lha_id, comment)
4820- # not recognized format
4821- continue
4822-
4823- for var in all_var:
4824- var = var.lower()
4825- if var in self.pname2block:
4826- self.pname2block[var].append((bname, lha_id))
4827- else:
4828- self.pname2block[var] = [(bname, lha_id)]
4829+ self.pname2block, self.restricted_value = \
4830+ default_param.analyze_param_card()
4831
4832 if run_card_def:
4833 self.run_set = run_card_def.keys() + self.run_card.hidden_param
4834@@ -2622,12 +2607,15 @@
4835 """ edit the value of one parameter in the card"""
4836
4837 args = self.split_arg(line)
4838+ # fix some formatting problem
4839 if '=' in args[-1]:
4840 arg1, arg2 = args.pop(-1).split('=')
4841 args += [arg1, arg2]
4842 if '=' in args:
4843 args.remove('=')
4844- args[:-1] = [ a.lower() for a in args[:-1]]
4845+ # do not set lowercase the case-sensitive parameters from the shower_card
4846+ if args[0].lower() not in ['analyse', 'extralibs', 'extrapaths', 'includepaths']:
4847+ args[:-1] = [ a.lower() for a in args[:-1]]
4848 # special shortcut:
4849 if args[0] in self.special_shortcut:
4850 if len(args) == 1:
4851@@ -2793,6 +2781,11 @@
4852 ### PARAM_CARD WITH BLOCK NAME -----------------------------------------
4853 elif (args[start] in self.param_card or args[start] == 'width') \
4854 and card in ['','param_card']:
4855+ #special treatment for scan
4856+ if any(t.startswith('scan') for t in args):
4857+ index = [i for i,t in enumerate(args) if t.startswith('scan')][0]
4858+ args = args[:index] + [' '.join(args[index:])]
4859+
4860 if args[start] in self.conflict and card == '':
4861 text = 'ambiguous name (present in more than one card). Please specify which card to edit'
4862 text += ' in the format < set card parameter value>'
4863@@ -2836,7 +2829,7 @@
4864 text += "You need to match this expression for external program (such pythia)."
4865 logger.warning(text)
4866
4867- if args[-1].lower() in ['default', 'auto', 'auto@nlo']:
4868+ if args[-1].lower() in ['default', 'auto', 'auto@nlo'] or args[-1].startswith('scan'):
4869 self.setP(args[start], key, args[-1])
4870 else:
4871 try:
4872@@ -3027,6 +3020,18 @@
4873 if block != 'decay':
4874 logger.warning('Invalid input: \'Auto\' value only valid for DECAY')
4875 return
4876+ elif value.startswith('scan'):
4877+ if ':' not in value:
4878+ logger.warning('Invalid input: \'scan\' mode requires a \':\' before the definition.')
4879+ return
4880+ tag = value.split(':')[0]
4881+ tag = tag[4:].strip()
4882+ if tag and not tag.isdigit():
4883+ logger.warning('Invalid input: scan tag need to be integer and not "%s"' % tag)
4884+ return
4885+
4886+
4887+ pass
4888 else:
4889 try:
4890 value = float(value)
4891@@ -3111,7 +3116,6 @@
4892 logger.warning("invalid command for decay. Line ignored")
4893 return
4894
4895- misc.sprint( line, "-add" in line)
4896 if "-add" in line:
4897 # just to have to add the line to the end of the file
4898 particle = line.split('>')[0].strip()
4899
4900=== modified file 'madgraph/interface/extended_cmd.py' (properties changed: -x to +x)
4901--- madgraph/interface/extended_cmd.py 2015-08-13 17:08:17 +0000
4902+++ madgraph/interface/extended_cmd.py 2015-10-20 14:02:27 +0000
4903@@ -80,7 +80,7 @@
4904 if readline and not 'libedit' in readline.__doc__:
4905 readline.set_completion_display_matches_hook(self.print_suggestions)
4906
4907- def deal_multiple_categories(self, dico):
4908+ def deal_multiple_categories(self, dico, forceCategory=False):
4909 """convert the multiple category in a formatted list understand by our
4910 specific readline parser"""
4911
4912@@ -92,7 +92,7 @@
4913 return out
4914
4915 # check if more than one categories but only one value:
4916- if all(len(s) <= 1 for s in dico.values() ):
4917+ if not forceCategory and all(len(s) <= 1 for s in dico.values() ):
4918 values = set((s[0] for s in dico.values() if len(s)==1))
4919 if len(values) == 1:
4920 return values
4921@@ -115,9 +115,9 @@
4922 opt.sort()
4923 out += opt
4924
4925-
4926- if valid == 1:
4927+ if not forceCategory and valid == 1:
4928 out = out[1:]
4929+
4930 return out
4931
4932 @debug()
4933
4934=== modified file 'madgraph/interface/launch_ext_program.py' (properties changed: -x to +x)
4935--- madgraph/interface/launch_ext_program.py 2015-08-18 09:16:14 +0000
4936+++ madgraph/interface/launch_ext_program.py 2015-10-20 14:02:27 +0000
4937@@ -481,7 +481,7 @@
4938 assert hasattr(self, 'cluster')
4939 assert hasattr(self, 'multicore')
4940 assert hasattr(self, 'name')
4941-# assert hasattr(self, 'shell')
4942+ assert hasattr(self, 'shell')
4943
4944 self.unit = unit
4945 self.run_mode = run_mode
4946@@ -519,7 +519,8 @@
4947 nb_node=max_node
4948
4949 import madgraph.interface.amcatnlo_run_interface as run_int
4950- if hasattr(self, 'shell'):
4951+
4952+ if hasattr(self, 'shell') and self.shell:
4953 usecmd = run_int.aMCatNLOCmdShell(me_dir=self.running_dir, options = self.cmd_int.options)
4954 else:
4955 usecmd = run_int.aMCatNLOCmd(me_dir=self.running_dir, options = self.cmd_int.options)
4956@@ -542,7 +543,7 @@
4957 usecmd, interface=False)
4958 #launch.me_dir = self.running_dir
4959 option_line = ' '.join([' --%s' % opt for opt in self.options.keys() \
4960- if self.options[opt] and not opt in ['cluster', 'multicore', 'name', 'appl_start_grid']])
4961+ if self.options[opt] and not opt in ['cluster', 'multicore', 'name', 'appl_start_grid','shell']])
4962 if self.options['name']:
4963 option_line += ' --name %s' % self.options['name']
4964 if 'appl_start_grid' in self.options and self.options['appl_start_grid']:
4965
4966=== modified file 'madgraph/interface/loop_interface.py' (properties changed: -x to +x)
4967--- madgraph/interface/loop_interface.py 2015-08-14 16:02:08 +0000
4968+++ madgraph/interface/loop_interface.py 2015-10-20 14:02:27 +0000
4969@@ -223,17 +223,20 @@
4970 if self._curr_amps and self._curr_amps[0].get_ninitial() != \
4971 proc.get_ninitial():
4972 raise self.InvalidCmd("Can not mix processes with different number of initial states.")
4973-
4974+
4975+# It is partially supported for now if the initial state is not charged
4976+# under the gauge group perturbed.
4977 # if proc.get_ninitial()==1 and tool=='aMC@NLO':
4978 # raise self.InvalidCmd("At this stage %s cannot handle decay process."%tool+\
4979 # "\nIt is however a straight-forward extension which "+\
4980 # "will come out with the next release.")
4981
4982- if isinstance(proc, base_objects.ProcessDefinition) and mode.startswith('ML5'):
4983- if proc.has_multiparticle_label():
4984- raise self.InvalidCmd, \
4985+# Now all checks should support multi-particle label for loops as well.
4986+ if isinstance(proc, base_objects.ProcessDefinition) and mode=='ML5':
4987+ if proc.has_multiparticle_label():
4988+ raise self.InvalidCmd(
4989 "When running ML5 standalone, multiparticle labels cannot be"+\
4990- " employed."
4991+ " employed.")
4992
4993 if proc['decay_chains']:
4994 raise self.InvalidCmd(
4995@@ -303,7 +306,7 @@
4996 model_path = self._curr_model.get('modelpath')
4997 model_name = self._curr_model.get('name')
4998 if model_name.split('-')[0]=='loop_sm':
4999- model_name = model_name[5:]
5000+ model_name = model_name[5:]
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches

to all changes: