Merge lp:~fabien-morin/unifield-server/fm-us-2339-new into lp:unifield-server
- fm-us-2339-new
- Merge into trunk
Status: | Merged |
---|---|
Merged at revision: | 4237 |
Proposed branch: | lp:~fabien-morin/unifield-server/fm-us-2339-new |
Merge into: | lp:unifield-server |
Diff against target: |
243 lines (+83/-46) 7 files modified
bin/addons/msf_profile/i18n/fr_MF.po (+7/-0) bin/addons/report_webkit/webkit_report.py (+51/-3) bin/addons/spreadsheet_xml/spreadsheet_xml.py (+1/-1) bin/addons/sync_client/backup.py (+2/-2) bin/tools/misc.py (+1/-1) setup.py (+13/-13) setup_py2exe_custom.py (+8/-26) |
To merge this branch: | bzr merge lp:~fabien-morin/unifield-server/fm-us-2339-new |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
UniField Reviewer Team | Pending | ||
Review via email: mp+317788@code.launchpad.net |
Commit message
Description of the change
Jeff Allen (jr.allen) : | # |
- 4206. By Fabien MORIN
-
US-2339 [FIX] no need to parse two times the same xml document
- 4207. By Fabien MORIN
-
US-2339 [MERGE] with latest trunk
- 4208. By Fabien MORIN
-
US-2339 [FIX] revert us-2374 as it breaks the AIO generation
- 4209. By Fabien MORIN
-
US-2339 [IMP] avoid crashing by ensuring name and sheet_name cannot be None
- 4210. By Fabien MORIN
-
US-2339 [ADD] translation for the Sheet name
- 4211. By Fabien MORIN
-
US-2339 [FIX] if xml is modified in any of this two check :
- worksheet name
- malformed data
the modified xml should be returned.
If not modified, return the original xml - 4212. By Fabien MORIN
-
US-2339 [FIX] getiterator is depreciated since long time, use iter instead
[FIX] iter (and getiterator now) cannot be restricted by
receiving a QName object but only (from
http://lxml.de/ api/lxml. etree._ Element- class.html# iter)
"Can be restricted to find only elements with a specific tag: pass
"{ns}localname" as tag. Either or both of ns and localname can be * for a
wildcard; ns can be empty for no namespace. "localname" is equivalent to
"{}localname" (i.e. no namespace) but "*" is "{*}*" (any or no namespace), not
"{}*".
Preview Diff
1 | === modified file 'bin/addons/msf_profile/i18n/fr_MF.po' | |||
2 | --- bin/addons/msf_profile/i18n/fr_MF.po 2017-02-20 16:10:18 +0000 | |||
3 | +++ bin/addons/msf_profile/i18n/fr_MF.po 2017-02-21 15:07:51 +0000 | |||
4 | @@ -76441,3 +76441,10 @@ | |||
5 | 76441 | #, python-format | 76441 | #, python-format |
6 | 76442 | msgid "No journal found to book the reversal FX entry." | 76442 | msgid "No journal found to book the reversal FX entry." |
7 | 76443 | msgstr "Pas de journal trouvé pour enregistrer l'annulation de l'écriture de différence de change." | 76443 | msgstr "Pas de journal trouvé pour enregistrer l'annulation de l'écriture de différence de change." |
8 | 76444 | |||
9 | 76445 | #. module: report_webkit | ||
10 | 76446 | #: code:addons/report_webkit/webkit_report.py:435 | ||
11 | 76447 | #: code:addons/report_webkit/webkit_report.py:466 | ||
12 | 76448 | #, python-format | ||
13 | 76449 | msgid "Sheet 1" | ||
14 | 76450 | msgstr "Feuille 1" | ||
15 | 76444 | 76451 | ||
16 | === modified file 'bin/addons/report_webkit/webkit_report.py' | |||
17 | --- bin/addons/report_webkit/webkit_report.py 2017-02-09 13:41:23 +0000 | |||
18 | +++ bin/addons/report_webkit/webkit_report.py 2017-02-21 15:07:51 +0000 | |||
19 | @@ -423,6 +423,26 @@ | |||
20 | 423 | 423 | ||
21 | 424 | return result | 424 | return result |
22 | 425 | 425 | ||
23 | 426 | def sanitizeWorksheetName(self, name): | ||
24 | 427 | ''' | ||
25 | 428 | according to microsoft documentation : | ||
26 | 429 | https://msdn.microsoft.com/en-us/library/office/aa140066(v=office.10).aspx#odc_xmlss_ss:worksheet | ||
27 | 430 | The following caracters are not allowed : /, \, ?, *, [, ] | ||
28 | 431 | It also seems that microsoft excel do not accept Worksheet name longer | ||
29 | 432 | than 31 characters. | ||
30 | 433 | ''' | ||
31 | 434 | if not name: | ||
32 | 435 | return _('Sheet 1') | ||
33 | 436 | replacement_char = '-' | ||
34 | 437 | not_allowed_char_list = ['/', '\\', '?', '*', '[', ']'] | ||
35 | 438 | new_name = name | ||
36 | 439 | if set(new_name).intersection(not_allowed_char_list): | ||
37 | 440 | for char in not_allowed_char_list: | ||
38 | 441 | if char in new_name: | ||
39 | 442 | new_name = new_name.replace(char, replacement_char) | ||
40 | 443 | |||
41 | 444 | return new_name[:31] | ||
42 | 445 | |||
43 | 426 | def check_malformed_xml_spreadsheet(self, xml_string, report_name): | 446 | def check_malformed_xml_spreadsheet(self, xml_string, report_name): |
44 | 427 | '''Check that the xml spreadsheet doesn't contain | 447 | '''Check that the xml spreadsheet doesn't contain |
45 | 428 | node <Date ss:Type="DateTime"> with 'False' in the values | 448 | node <Date ss:Type="DateTime"> with 'False' in the values |
46 | @@ -431,10 +451,37 @@ | |||
47 | 431 | logger = logging.getLogger('mako_spreadsheet') | 451 | logger = logging.getLogger('mako_spreadsheet') |
48 | 432 | file_dom = etree.fromstring(xml_string) | 452 | file_dom = etree.fromstring(xml_string) |
49 | 433 | namespaces = { | 453 | namespaces = { |
50 | 454 | 'o': 'urn:schemas-microsoft-com:office:office', | ||
51 | 455 | 'x': 'urn:schemas-microsoft-com:office:excel', | ||
52 | 434 | 'ss': 'urn:schemas-microsoft-com:office:spreadsheet', | 456 | 'ss': 'urn:schemas-microsoft-com:office:spreadsheet', |
54 | 435 | 'spreadsheet': 'urn:schemas-microsoft-com:office:spreadsheet' | 457 | 'html': 'http://www.w3.org/TR/REC-html40' |
55 | 436 | } | 458 | } |
57 | 437 | data_time_elements = file_dom.xpath('//spreadsheet:Data[@ss:Type="DateTime"]', | 459 | |
58 | 460 | spreadsheet_elements = file_dom.xpath('//ss:Worksheet', | ||
59 | 461 | namespaces=namespaces) | ||
60 | 462 | |||
61 | 463 | xml_modified = False | ||
62 | 464 | sheet_name_dict = {} | ||
63 | 465 | count = 0 | ||
64 | 466 | for sheet in spreadsheet_elements: | ||
65 | 467 | sheet_name = sheet.get('{%(ss)s}Name' % namespaces, _('Sheet 1')) | ||
66 | 468 | new_name = self.sanitizeWorksheetName(sheet_name) | ||
67 | 469 | if new_name != sheet_name: | ||
68 | 470 | # if the sheet name already exists, modify it to add | ||
69 | 471 | # a counter to the name | ||
70 | 472 | if new_name in sheet_name_dict: | ||
71 | 473 | sheet_name_dict[new_name] += 1 | ||
72 | 474 | count = sheet_name_dict[new_name] | ||
73 | 475 | new_name = '%s_%s' % (new_name[:28], count) | ||
74 | 476 | else: | ||
75 | 477 | sheet_name_dict[new_name] = 1 | ||
76 | 478 | sheet.attrib['{urn:schemas-microsoft-com:office:spreadsheet}Name'] = new_name | ||
77 | 479 | xml_modified = True | ||
78 | 480 | else: | ||
79 | 481 | if new_name not in sheet_name_dict: | ||
80 | 482 | sheet_name_dict[new_name] = 1 | ||
81 | 483 | |||
82 | 484 | data_time_elements = file_dom.xpath('//ss:Data[@ss:Type="DateTime"]', | ||
83 | 438 | namespaces=namespaces) | 485 | namespaces=namespaces) |
84 | 439 | element_to_remove = [] | 486 | element_to_remove = [] |
85 | 440 | for element in data_time_elements: | 487 | for element in data_time_elements: |
86 | @@ -446,7 +493,8 @@ | |||
87 | 446 | # if a malformed node exists, replace it with an empty String cell | 493 | # if a malformed node exists, replace it with an empty String cell |
88 | 447 | element.attrib['{urn:schemas-microsoft-com:office:spreadsheet}Type'] = 'String' | 494 | element.attrib['{urn:schemas-microsoft-com:office:spreadsheet}Type'] = 'String' |
89 | 448 | element.text = '' | 495 | element.text = '' |
91 | 449 | if element_to_remove: | 496 | xml_modified = True |
92 | 497 | if xml_modified: | ||
93 | 450 | # return modified xml | 498 | # return modified xml |
94 | 451 | return etree.tostring(file_dom, xml_declaration=True, encoding="utf-8") | 499 | return etree.tostring(file_dom, xml_declaration=True, encoding="utf-8") |
95 | 452 | return xml_string | 500 | return xml_string |
96 | 453 | 501 | ||
97 | === modified file 'bin/addons/spreadsheet_xml/spreadsheet_xml.py' | |||
98 | --- bin/addons/spreadsheet_xml/spreadsheet_xml.py 2016-08-26 10:10:58 +0000 | |||
99 | +++ bin/addons/spreadsheet_xml/spreadsheet_xml.py 2017-02-21 15:07:51 +0000 | |||
100 | @@ -137,7 +137,7 @@ | |||
101 | 137 | 137 | ||
102 | 138 | def getRows(self,worksheet=1): | 138 | def getRows(self,worksheet=1): |
103 | 139 | table = self.xmlobj.xpath('//ss:Worksheet[%d]/ss:Table[1]'%(worksheet, ), **self.xa) | 139 | table = self.xmlobj.xpath('//ss:Worksheet[%d]/ss:Table[1]'%(worksheet, ), **self.xa) |
105 | 140 | return SpreadsheetRow(table[0].getiterator(etree.QName(self.defaultns, 'Row'))) | 140 | return SpreadsheetRow(table[0].iter('{%s}Row' % self.defaultns)) |
106 | 141 | 141 | ||
107 | 142 | def enc(self, s): | 142 | def enc(self, s): |
108 | 143 | if isinstance(s, unicode): | 143 | if isinstance(s, unicode): |
109 | 144 | 144 | ||
110 | === modified file 'bin/addons/sync_client/backup.py' | |||
111 | --- bin/addons/sync_client/backup.py 2017-02-16 15:52:37 +0000 | |||
112 | +++ bin/addons/sync_client/backup.py 2017-02-21 15:07:51 +0000 | |||
113 | @@ -76,12 +76,12 @@ | |||
114 | 76 | return 'UNKNOWN_VERSION' | 76 | return 'UNKNOWN_VERSION' |
115 | 77 | 77 | ||
116 | 78 | def _set_pg_psw_env_var(self): | 78 | def _set_pg_psw_env_var(self): |
118 | 79 | if tools.config['db_password'] and not os.environ.get('PGPASSWORD', ''): | 79 | if os.name == 'nt' and not os.environ.get('PGPASSWORD', ''): |
119 | 80 | os.environ['PGPASSWORD'] = tools.config['db_password'] | 80 | os.environ['PGPASSWORD'] = tools.config['db_password'] |
120 | 81 | self._pg_psw_env_var_is_set = True | 81 | self._pg_psw_env_var_is_set = True |
121 | 82 | 82 | ||
122 | 83 | def _unset_pg_psw_env_var(self): | 83 | def _unset_pg_psw_env_var(self): |
124 | 84 | if self._pg_psw_env_var_is_set: | 84 | if os.name == 'nt' and self._pg_psw_env_var_is_set: |
125 | 85 | os.environ['PGPASSWORD'] = '' | 85 | os.environ['PGPASSWORD'] = '' |
126 | 86 | 86 | ||
127 | 87 | def exp_dump_for_state(self, cr, uid, state, context=None, force=False): | 87 | def exp_dump_for_state(self, cr, uid, state, context=None, force=False): |
128 | 88 | 88 | ||
129 | === modified file 'bin/tools/misc.py' | |||
130 | --- bin/tools/misc.py 2017-02-17 15:44:19 +0000 | |||
131 | +++ bin/tools/misc.py 2017-02-21 15:07:51 +0000 | |||
132 | @@ -142,7 +142,7 @@ | |||
133 | 142 | return None | 142 | return None |
134 | 143 | 143 | ||
135 | 144 | def _set_env_pg(remove=False): | 144 | def _set_env_pg(remove=False): |
137 | 145 | if config['db_password']: | 145 | if os.name == 'nt': |
138 | 146 | if not remove and not os.environ.get('PGPASSWORD', ''): | 146 | if not remove and not os.environ.get('PGPASSWORD', ''): |
139 | 147 | os.environ['PGPASSWORD'] = config['db_password'] | 147 | os.environ['PGPASSWORD'] = config['db_password'] |
140 | 148 | if remove and os.environ.get('PGPASSWORD'): | 148 | if remove and os.environ.get('PGPASSWORD'): |
141 | 149 | 149 | ||
142 | === modified file 'setup.py' | |||
143 | --- setup.py 2017-02-16 15:52:37 +0000 | |||
144 | +++ setup.py 2017-02-21 15:07:51 +0000 | |||
145 | @@ -58,7 +58,7 @@ | |||
146 | 58 | "packages": [ | 58 | "packages": [ |
147 | 59 | "lxml", "lxml.builder", "lxml._elementpath", "lxml.etree", | 59 | "lxml", "lxml.builder", "lxml._elementpath", "lxml.etree", |
148 | 60 | "lxml.objectify", "decimal", "xml", "xml", "xml.dom", | 60 | "lxml.objectify", "decimal", "xml", "xml", "xml.dom", |
150 | 61 | "encodings", "dateutil", "wizard", "PIL", "pyparsing", | 61 | "encodings", "dateutil", "wizard", "pychart", "PIL", "pyparsing", |
151 | 62 | "pydot", "asyncore","asynchat", "reportlab", "vobject", | 62 | "pydot", "asyncore","asynchat", "reportlab", "vobject", |
152 | 63 | "HTMLParser", "select", "mako", "poplib", | 63 | "HTMLParser", "select", "mako", "poplib", |
153 | 64 | "imaplib", "smtplib", "email", "yaml", "DAV", | 64 | "imaplib", "smtplib", "email", "yaml", "DAV", |
154 | @@ -240,18 +240,18 @@ | |||
155 | 240 | }, | 240 | }, |
156 | 241 | package_dir = find_package_dirs(), | 241 | package_dir = find_package_dirs(), |
157 | 242 | install_requires = [ | 242 | install_requires = [ |
170 | 243 | 'lxml==2.2.4', | 243 | 'lxml', |
171 | 244 | 'mako==0.2.5', | 244 | 'mako', |
172 | 245 | 'python-dateutil==2.5.3', | 245 | 'python-dateutil', |
173 | 246 | 'psycopg2==2.0.13', | 246 | 'psycopg2', |
174 | 247 | 'pydot==1.0.2', | 247 | 'pychart', |
175 | 248 | 'pytz==2010b0', | 248 | 'pydot', |
176 | 249 | 'reportlab==2.4', | 249 | 'pytz', |
177 | 250 | 'pyyaml==3.12', | 250 | 'reportlab', |
178 | 251 | 'egenix-mx-base==3.2.9', | 251 | 'caldav', |
179 | 252 | 'passlib==1.6.5', | 252 | 'pyyaml', |
180 | 253 | 'bcrypt==3.1.1', | 253 | 'pywebdav', |
181 | 254 | 'xlwt==1.1.2', | 254 | 'feedparser', |
182 | 255 | ], | 255 | ], |
183 | 256 | extras_require={ | 256 | extras_require={ |
184 | 257 | 'SSL' : ['pyopenssl'], | 257 | 'SSL' : ['pyopenssl'], |
185 | 258 | 258 | ||
186 | === modified file 'setup_py2exe_custom.py' | |||
187 | --- setup_py2exe_custom.py 2017-02-16 15:52:37 +0000 | |||
188 | +++ setup_py2exe_custom.py 2017-02-21 15:07:51 +0000 | |||
189 | @@ -24,25 +24,7 @@ | |||
190 | 24 | 24 | ||
191 | 25 | import os | 25 | import os |
192 | 26 | import tempfile | 26 | import tempfile |
212 | 27 | import sys | 27 | from py2exe.build_exe import py2exe as build_exe, fancy_split |
194 | 28 | |||
195 | 29 | if sys.platform == 'nt': | ||
196 | 30 | from py2exe.build_exe import py2exe as build_exe, fancy_split | ||
197 | 31 | else: | ||
198 | 32 | # fake it for non-Windows, so that setup.py can be run for | ||
199 | 33 | # installing dependencies. | ||
200 | 34 | class _be(dict): | ||
201 | 35 | def __init__(self, arg1,arg2,arg3): | ||
202 | 36 | pass | ||
203 | 37 | def __dir__(self): | ||
204 | 38 | return tuple(self) | ||
205 | 39 | def __getattribute__(self, name): | ||
206 | 40 | if name == 'user_options': | ||
207 | 41 | return [] | ||
208 | 42 | else: | ||
209 | 43 | raise AttributeError(name) | ||
210 | 44 | build_exe = _be(1, 2, 3) | ||
211 | 45 | fancy_split = None | ||
213 | 46 | 28 | ||
214 | 47 | def fixup_data_pytz_zoneinfo(): | 29 | def fixup_data_pytz_zoneinfo(): |
215 | 48 | r = {} | 30 | r = {} |
216 | @@ -54,8 +36,8 @@ | |||
217 | 54 | return r.items() | 36 | return r.items() |
218 | 55 | 37 | ||
219 | 56 | def byte_compile_noop(py_files, optimize=0, force=0, | 38 | def byte_compile_noop(py_files, optimize=0, force=0, |
222 | 57 | target_dir=None, verbose=1, dry_run=0, | 39 | target_dir=None, verbose=1, dry_run=0, |
223 | 58 | direct=None): | 40 | direct=None): |
224 | 59 | 41 | ||
225 | 60 | compiled_files = [] | 42 | compiled_files = [] |
226 | 61 | from distutils.dir_util import mkpath | 43 | from distutils.dir_util import mkpath |
227 | @@ -177,11 +159,11 @@ | |||
228 | 177 | # Run fake compilation - just copy raw .py file into their | 159 | # Run fake compilation - just copy raw .py file into their |
229 | 178 | # destination directory | 160 | # destination directory |
230 | 179 | self.no_compiled_files = byte_compile_noop(py_files, | 161 | self.no_compiled_files = byte_compile_noop(py_files, |
236 | 180 | target_dir=self.collect_dir, | 162 | target_dir=self.collect_dir, |
237 | 181 | optimize=self.optimize, | 163 | optimize=self.optimize, |
238 | 182 | force=0, | 164 | force=0, |
239 | 183 | verbose=self.verbose, | 165 | verbose=self.verbose, |
240 | 184 | dry_run=self.dry_run) | 166 | dry_run=self.dry_run) |
241 | 185 | 167 | ||
242 | 186 | # Force relocate of specific packages data within collected libs dir | 168 | # Force relocate of specific packages data within collected libs dir |
243 | 187 | def fixup_location(l): | 169 | def fixup_location(l): |