Merge lp:~unifield-team/unifield-wm/sync-odoo-UF-2433 into lp:unifield-wm/sync

Proposed by jftempo
Status: Merged
Merged at revision: 508
Proposed branch: lp:~unifield-team/unifield-wm/sync-odoo-UF-2433
Merge into: lp:unifield-wm/sync
Diff against target: 549 lines (+185/-37) (has conflicts)
7 files modified
sync_client/monitor.py (+38/-1)
sync_client/monitor_view.xml (+11/-1)
sync_client/rpc.py (+40/-20)
sync_client/sync_client.py (+80/-10)
sync_client/sync_client_view.xml (+4/-0)
sync_client/timeout_transport.py (+11/-4)
sync_server/sync_server.py (+1/-1)
Text conflict in sync_client/sync_client.py
To merge this branch: bzr merge lp:~unifield-team/unifield-wm/sync-odoo-UF-2433
Reviewer Review Type Date Requested Status
UniField Sync Reviewer Pending
Review via email: mp+239802@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'sync_client/monitor.py'
2--- sync_client/monitor.py 2014-09-29 15:35:02 +0000
3+++ sync_client/monitor.py 2014-10-28 09:21:32 +0000
4@@ -35,9 +35,21 @@
5 self.info = {
6 'status' : 'in-progress',
7 'data_pull' : 'null',
8+ 'data_pull_receive' : 'null',
9+ 'data_pull_execute' : 'null',
10+
11 'msg_pull' : 'null',
12+ 'msg_pull_receive' : 'null',
13+ 'msg_pull_execute' : 'null',
14+
15+ 'msg_push' : 'null',
16+ 'msg_push_create' : 'null',
17+ 'msg_push_send' : 'null',
18+
19 'data_push' : 'null',
20- 'msg_push' : 'null',
21+ 'data_push_create' : 'null',
22+ 'data_push_send' : 'null',
23+
24 }
25 self.info.update(defaults)
26 self.final_status = 'ok'
27@@ -140,6 +152,18 @@
28 return [
29 (rec.id, "(%d) %s" % (rec.sequence_number, rec.start))
30 for rec in self.browse(cr, user, ids, context=context) ]
31+
32+ def interrupt(self, cr, uid, ids, context=None):
33+ return self.pool.get('sync.client.entity').interrupt_sync(cr, uid, context=context)
34+
35+ def _is_syncing(self, cr, uid, ids, name, arg, context=None):
36+ res = dict.fromkeys(ids, "not_syncing")
37+ is_syncing = self.pool.get('sync.client.entity').is_syncing()
38+ for monitor in self.browse(cr, uid, ids, context=context):
39+ if monitor.status == 'in-progress' and is_syncing:
40+ res[monitor.id] = "syncing"
41+
42+ return res
43
44 _rec_name = 'start'
45
46@@ -148,11 +172,24 @@
47 'start' : fields.datetime("Start Date", readonly=True, required=True),
48 'end' : fields.datetime("End Date", readonly=True),
49 'data_pull' : fields.selection(status_dict.items(), string="Data Pull", readonly=True),
50+ 'data_pull_receive' : fields.selection(status_dict.items(), string="DP receive", readonly=True),
51+ 'data_pull_execute' : fields.selection(status_dict.items(), string="DP execute", readonly=True),
52+
53 'msg_pull' : fields.selection(status_dict.items(), string="Msg Pull", readonly=True),
54+ 'msg_pull_receive' : fields.selection(status_dict.items(), string="MP receive", readonly=True),
55+ 'msg_pull_execute' : fields.selection(status_dict.items(), string="Msg execute", readonly=True),
56+
57 'data_push' : fields.selection(status_dict.items(), string="Data Push", readonly=True),
58+ 'data_push_create' : fields.selection(status_dict.items(), string="DP create", readonly=True),
59+ 'data_push_send' : fields.selection(status_dict.items(), string="DP send", readonly=True),
60+
61 'msg_push' : fields.selection(status_dict.items(), string="Msg Push", readonly=True),
62+ 'msg_push_create' : fields.selection(status_dict.items(), string="MP Create", readonly=True),
63+ 'msg_push_send' : fields.selection(status_dict.items(), string="MP Send", readonly=True),
64 'status' : fields.selection(status_dict.items(), string="Status", readonly=True),
65 'error' : fields.text("Messages", readonly=True),
66+ 'state' : fields.function(_is_syncing, method=True, type='selection', string="Is Syncing", selection=[('syncing', 'Syncing'), ('not_syncing', 'Not Syncing')]),
67+
68 }
69
70 _defaults = {
71
72=== modified file 'sync_client/monitor_view.xml'
73--- sync_client/monitor_view.xml 2014-09-05 08:41:11 +0000
74+++ sync_client/monitor_view.xml 2014-10-28 09:21:32 +0000
75@@ -48,10 +48,20 @@
76 <field name="start"/>
77 <field name="end"/>
78 <field name="data_pull"/>
79+ <field name="data_pull_receive"/>
80+ <field name="data_pull_execute"/>
81 <field name="msg_pull"/>
82+ <field name="msg_pull_receive"/>
83+ <field name="msg_pull_execute"/>
84 <field name="data_push"/>
85+ <field name="data_push_create" />
86+ <field name="data_push_send" />
87 <field name="msg_push"/>
88- <field name="status"/>
89+ <field name="msg_push_create"/>
90+ <field name="msg_push_send"/>
91+ <field name="status" />
92+ <field name="state" />
93+ <button name="interrupt" type="object" string="Interrupt" icon="gtk-cancel" states="syncing" />
94 <field name="error"/>
95 </tree>
96 </field>
97
98=== modified file 'sync_client/rpc.py'
99--- sync_client/rpc.py 2014-09-04 13:12:41 +0000
100+++ sync_client/rpc.py 2014-10-28 09:21:32 +0000
101@@ -30,7 +30,6 @@
102
103 #logging.config.fileConfig('logging.cfg')
104
105-TIMEOUT = 3600
106 GZIP_MAGIC = '\x78\xda' # magic when max compression used
107 NB_RETRY = 10
108
109@@ -70,13 +69,14 @@
110
111 _logger = logging.getLogger('connector')
112
113- def __init__(self, hostname, port):
114+ def __init__(self, hostname, port, timeout):
115 """
116 :param hostname: Host name of the server
117 :param port: Port for the connection to the server
118 """
119 self.hostname = hostname
120 self.port = port
121+ self.timeout = timeout
122
123 class XmlRPCConnector(Connector):
124 """
125@@ -84,16 +84,35 @@
126 """
127 PROTOCOL = 'xmlrpc'
128
129- def __init__(self, hostname, port=8069):
130- Connector.__init__(self, hostname, port)
131+ def __init__(self, hostname, port=8069, timeout=10.0, retry=0):
132+ Connector.__init__(self, hostname, port, timeout=timeout)
133 self._logger = logging.getLogger('connector.xmlrpc')
134 self.url = 'http://%s:%s/xmlrpc' % (self.hostname, self.port)
135+ self.retry = retry
136
137 def send(self, service_name, method, *args):
138 url = '%s/%s' % (self.url, service_name)
139- timeout_transport = TimeoutTransport(timeout=TIMEOUT)
140- service = xmlrpclib.ServerProxy(url, allow_none=1, transport=timeout_transport)
141- return getattr(service, method)(*args)
142+ transport = TimeoutTransport(timeout=self.timeout)
143+ service = xmlrpclib.ServerProxy(url, allow_none=1, transport=transport)
144+ return self._send(service, method, *args)
145+
146+ def _send(self, service, method, *args):
147+ i = 0
148+ retry = True
149+ while retry:
150+ try:
151+ retry = False
152+ return getattr(service, method)(*args)
153+ except Exception, e:
154+ error = e
155+ if i < self.retry:
156+ print 'retry xml_rpc', i
157+ retry = True
158+ self._logger.debug("retry to connect %s, error : %s" ,i, e)
159+ i += 1
160+ if error:
161+ raise osv.except_osv(_('Error!'), "Unable to proceed for the following reason:\n%s" % (e.faultCode if hasattr(e, 'faultCode') else tools.ustr(e)))
162+
163
164 """Modified version of xmlrcpclib.Transport.request (same in Python 2.4, 2.5, 2.6)
165 to workaround Python bug http://bugs.python.org/issue1223
166@@ -131,14 +150,14 @@
167 """
168 PROTOCOL = 'xmlrpcs'
169
170- def __init__(self, hostname, port=8070):
171- XmlRPCConnector.__init__(self, hostname, port)
172+ def __init__(self, hostname, port=8070, timeout=10.0, retry=0):
173+ XmlRPCConnector.__init__(self, hostname, port, timeout=timeout, retry=retry)
174 self.url = 'https://%s:%s/xmlrpc' % (self.hostname, self.port)
175
176 def send(self, service_name, method, *args):
177 url = '%s/%s' % (self.url, service_name)
178 service = xmlrpclib.ServerProxy(url, allow_none=1)
179- return getattr(service, method)(*args)
180+ return self._send(service, method, *args)
181
182 class GzipXmlRPCConnector(XmlRPCConnector):
183 """
184@@ -148,9 +167,9 @@
185
186 def send(self, service_name, method, *args):
187 url = '%s/%s' % (self.url, service_name)
188- gzip_transport = GzipTransport(timeout=TIMEOUT)
189+ gzip_transport = GzipTransport(timeout=self.timeout)
190 service = xmlrpclib.ServerProxy(url, allow_none=1, transport=gzip_transport)
191- return getattr(service, method)(*args)
192+ return self._send(service, method, *args)
193
194 class GzipXmlRPCSConnector(GzipXmlRPCConnector):
195 PROTOCOL = 'gzipxmlrpcs'
196@@ -172,13 +191,13 @@
197 self.args = (faultCode, faultString)
198
199 class NetRPC:
200- def __init__(self, sock=None, is_gzip=False):
201+ def __init__(self, sock=None, is_gzip=False, timeout=10.0):
202 if sock is None:
203 self.sock = socket.socket(
204 socket.AF_INET, socket.SOCK_STREAM)
205 else:
206 self.sock = sock
207- self.sock.settimeout(TIMEOUT)
208+ self.sock.settimeout(timeout)
209 self.is_gzip = is_gzip
210 self._logger = logging.getLogger('netrpc')
211
212@@ -202,7 +221,7 @@
213 raw_size = len(msg)
214 msg = zlib.compress(msg, zlib.Z_BEST_COMPRESSION)
215 gzipped_size = len(msg)
216- saving = 100*(float(raw_size-gzipped_size))/gzipped_size if gzipped_size else 0
217+ #saving = 100*(float(raw_size-gzipped_size))/gzipped_size if gzipped_size else 0
218 #self._logger.debug('payload size: raw %s, gzipped %s, saving %.2f%%', raw_size, gzipped_size, saving)
219 size = len(msg)
220 self.sock.send('%8d' % size)
221@@ -237,7 +256,7 @@
222 gzipped_size = len(msg)
223 msg = zlib.decompress(msg)
224 raw_size = len(msg)
225- saving = 100*(float(raw_size-gzipped_size))/gzipped_size if gzipped_size else 0
226+ #saving = 100*(float(raw_size-gzipped_size))/gzipped_size if gzipped_size else 0
227 #self._logger.debug('payload size: raw %s, gzipped %s, saving %.2f%%', raw_size, gzipped_size, saving)
228 res = SafeUnpickler.loads(msg)
229
230@@ -251,10 +270,11 @@
231 class NetRPCConnector(Connector):
232 PROTOCOL = 'netrpc'
233
234- def __init__(self, hostname, port=8070, is_gzip=False):
235- Connector.__init__(self, hostname, port)
236+ def __init__(self, hostname, port=8070, is_gzip=False, timeout=10.0, retry=10):
237+ Connector.__init__(self, hostname, port, timeout=timeout)
238 self._logger = logging.getLogger('connector.netrpc')
239 self.is_gzip = is_gzip
240+ self.retry = retry
241
242 def send(self, service_name, method, *args):
243 i = 0
244@@ -264,13 +284,13 @@
245 while retry:
246 try:
247 retry = False
248- socket = NetRPC(is_gzip=self.is_gzip)
249+ socket = NetRPC(is_gzip=self.is_gzip, timeout=self.timeout)
250 socket.connect(self.hostname, self.port)
251 socket.mysend((service_name, method, )+args)
252 result = socket.myreceive()
253 except Exception, e:
254 error = e
255- if i < NB_RETRY:
256+ if i < self.retry:
257 retry = True
258 self._logger.debug("retry to connect %s, error : %s" ,i, e)
259 i += 1
260
261=== modified file 'sync_client/sync_client.py'
262--- sync_client/sync_client.py 2014-10-16 09:10:47 +0000
263+++ sync_client/sync_client.py 2014-10-28 09:21:32 +0000
264@@ -90,7 +90,44 @@
265 pass
266 finally:
267 cr.close()
268-
269+
270+def sync_subprocess(step='status', defaults_logger={}):
271+ def decorator(fn):
272+
273+ @functools.wraps(fn)
274+ def wrapper(self, cr, uid, *args, **kwargs):
275+ context = kwargs['context'] = kwargs.get('context') is not None and dict(kwargs.get('context', {})) or {}
276+ logger = context.get('logger')
277+ logger.switch(step, 'in-progress')
278+ logger.write()
279+ try:
280+ res = fn(self, self.sync_cursor, uid, *args, **kwargs)
281+ except osv.except_osv, e:
282+ logger.switch(step, 'failed')
283+ raise
284+ except BaseException, e:
285+ # Handle aborting of synchronization
286+ if isinstance(e, OperationalError) and e.message == 'Unable to use the cursor after having closed it':
287+ logger.switch(step, 'aborted')
288+ self.sync_cursor = None
289+ raise
290+ logger.switch(step, 'failed')
291+ error = "%s: %s" % (e.__class__.__name__, getattr(e, 'message', tools.ustr(e)))
292+ self._logger.exception('Error in sync_process at step %s' % step)
293+ logger.append(error, step)
294+ raise
295+ else:
296+ logger.switch(step, 'ok')
297+ if isinstance(res, (str, unicode)) and res:
298+ logger.append(res, step)
299+ finally:
300+ # gotcha!
301+ logger.write()
302+ return res
303+ return wrapper
304+ return decorator
305+
306+
307 def sync_process(step='status', need_connection=True, defaults_logger={}):
308 is_step = not (step == 'status')
309
310@@ -372,10 +409,10 @@
311
312
313 return True
314-
315+
316+ @sync_subprocess('data_push_create')
317 def create_update(self, cr, uid, context=None):
318 context = context or {}
319- logger = context.get('logger')
320 updates = self.pool.get(context.get('update_to_send_model', 'sync.client.update_to_send'))
321
322 def set_rules(identifier):
323@@ -403,6 +440,7 @@
324 return updates_count
325 #state init => update_send
326
327+ @sync_subprocess('data_push_send')
328 def send_update(self, cr, uid, context=None):
329 context = context or {}
330 logger = context.get('logger')
331@@ -508,13 +546,13 @@
332
333 return True
334
335+ @sync_subprocess('data_pull_receive')
336 def retrieve_update(self, cr, uid, max_packet_size, recover=False, context=None):
337 context = context or {}
338 logger = context.get('logger')
339 updates = self.pool.get(context.get('update_received_model', 'sync.client.update_received'))
340
341 entity = self.get_entity(cr, uid, context)
342- last = False
343 last_seq = entity.update_last
344 max_seq = entity.max_update
345 offset = entity.update_offset
346@@ -546,6 +584,7 @@
347
348 return updates_count
349
350+ @sync_subprocess('data_pull_execute')
351 def execute_updates(self, cr, uid, context=None):
352 context = context or {}
353 logger = context.get('logger')
354@@ -637,6 +676,7 @@
355
356 return True
357
358+ @sync_subprocess('msg_push_create')
359 def create_message(self, cr, uid, context=None):
360 context = context or {}
361 messages = self.pool.get(context.get('message_to_send_model', 'sync.client.message_to_send'))
362@@ -656,7 +696,8 @@
363 messages_count += messages.create_from_rule(cr, uid, rule, None, context=context)
364
365 return messages_count
366-
367+
368+ @sync_subprocess('msg_push_send')
369 def send_message(self, cr, uid, context=None):
370 context = context or {}
371 logger = context.get('logger')
372@@ -696,8 +737,11 @@
373 @sync_process('msg_pull')
374 def pull_message(self, cr, uid, recover=False, context=None):
375 context = context or {}
376+<<<<<<< TREE
377 logger = context.get('logger')
378 proxy = self.pool.get("sync.client.sync_server_connection").get_connection(cr, uid, "sync.server.sync_manager")
379+=======
380+>>>>>>> MERGE-SOURCE
381
382 entity = self.get_entity(cr, uid, context=context)
383 if recover:
384@@ -712,7 +756,8 @@
385 msg_count = self.execute_message(cr, uid, context=context)
386 self._logger.info("Pull message :: Number of messages pulled: %s" % msg_count)
387 return True
388-
389+
390+ @sync_subprocess('msg_pull_receive')
391 def get_message(self, cr, uid, context=None):
392 context = context or {}
393 logger = context.get('logger')
394@@ -746,6 +791,7 @@
395
396 return messages_count
397
398+ @sync_subprocess('msg_pull_execute')
399 def execute_message(self, cr, uid, context=None):
400 context = context or {}
401 logger = context.get('logger')
402@@ -894,6 +940,15 @@
403 % (_(monitor.status_dict[last_log[0]]), last_log[1])
404
405 return "Connected"
406+
407+ def interrupt_sync(self, cr, uid, context=None):
408+ if self.is_syncing():
409+ try:
410+ self._renew_sync_lock()
411+ except StandardError:
412+ return False
413+ self.sync_cursor.close()
414+ return True
415
416 Entity()
417
418@@ -952,6 +1007,9 @@
419 'password': fields.function(_get_password, fnct_inv=_set_password, string='Password', type='char', method=True, store=False),
420 'state' : fields.function(_get_state, method=True, string='State', type="char", readonly=True, store=False),
421 'max_size' : fields.integer("Max Packet Size"),
422+ 'timeout' : fields.float("Timeout"),
423+ 'netrpc_retry' : fields.integer("NetRPC retry"),
424+ 'xmlrpc_retry' : fields.integer("XmlRPC retry"),
425 }
426
427 _defaults = {
428@@ -962,6 +1020,9 @@
429 'login' : 'admin',
430 'max_size' : 500,
431 'database' : 'SYNC_SERVER',
432+ 'timeout' : 10.0,
433+ 'netrpc_retry' : 0,
434+ 'xmlrpc_retry' : 0,
435 }
436
437 def _get_connection_manager(self, cr, uid, context=None):
438@@ -972,17 +1033,21 @@
439
440 def connector_factory(self, con):
441 if con.protocol == 'xmlrpc':
442- connector = rpc.XmlRPCConnector(con.host, con.port)
443+ connector = rpc.XmlRPCConnector(con.host, con.port, timeout=con.timeout, retry=con.xmlrpc_retry)
444 elif con.protocol == 'gzipxmlrpc':
445+<<<<<<< TREE
446 connector = rpc.GzipXmlRPCConnector(con.host, con.port)
447 elif con.protocol == 'gzipxmlrpcs':
448 connector = rpc.GzipXmlRPCSConnector(con.host, con.port)
449+=======
450+ connector = rpc.GzipXmlRPCConnector(con.host, con.port, timeout=con.timeout, retry=con.xmlrpc_retry)
451+>>>>>>> MERGE-SOURCE
452 elif con.protocol == 'xmlrpcs':
453- connector = rpc.SecuredXmlRPCConnector(con.host, con.port)
454+ connector = rpc.SecuredXmlRPCConnector(con.host, con.port, timeout=con.timeout, retry=con.xmlrpc_retry)
455 elif con.protocol == 'netrpc':
456- connector = rpc.NetRPCConnector(con.host, con.port)
457+ connector = rpc.NetRPCConnector(con.host, con.port, timeout=con.timeout, retry=con.netrpc_retry)
458 elif con.protocol == 'netrpc_gzip':
459- connector = rpc.GzipNetRPCConnector(con.host, con.port)
460+ connector = rpc.GzipNetRPCConnector(con.host, con.port, timeout=con.timeout, retry=con.netrpc_retry)
461 else:
462 raise osv.except_osv('Connection Error','Unknown protocol: %s' % con.protocol)
463 return connector
464@@ -1030,6 +1095,7 @@
465 return rpc.Object(cnx, model)
466
467 def disconnect(self, cr, uid, context=None):
468+<<<<<<< TREE
469 con = self._get_connection_manager(cr, uid, context=context)
470 sync_args = {
471 'client_name': cr.dbname,
472@@ -1043,6 +1109,10 @@
473 self._logger.warning('Error during the disconnection of client \'%(client_name)s\'' % sync_args)
474 return False
475 entity.sync_cursor.close()
476+=======
477+ if not self.pool.get('sync.client.entity').interrupt_sync(cr, uid, context=context):
478+ return False
479+>>>>>>> MERGE-SOURCE
480 self._uid = False
481 self._logger.info('Client \'%(client_name)s\' succesfully disconnected from the sync. server \'%(server_name)s\'' % sync_args)
482 return True
483
484=== modified file 'sync_client/sync_client_view.xml'
485--- sync_client/sync_client_view.xml 2014-09-08 08:48:22 +0000
486+++ sync_client/sync_client_view.xml 2014-10-28 09:21:32 +0000
487@@ -32,6 +32,10 @@
488 <field name="login" required="1"/>
489 <field name="password" required="1" password="1"/>
490 <field name="max_size" required="1"/>
491+ <field name="timeout" required="1"/>
492+ <label string="" colspan="2" />
493+ <field name="netrpc_retry" required="1" />
494+ <field name="xmlrpc_retry" required="1" />
495 <separator string="Connection state" colspan="4"/>
496 <!-- the following line break the 6.1 web client backport because its a functional field of type integer -->
497 <!-- <field name="uid" invisible="1"/> -->
498
499=== modified file 'sync_client/timeout_transport.py'
500--- sync_client/timeout_transport.py 2014-09-04 13:12:41 +0000
501+++ sync_client/timeout_transport.py 2014-10-28 09:21:32 +0000
502@@ -1,5 +1,6 @@
503 import httplib
504 import xmlrpclib
505+import sys
506
507 class TimeoutHTTPConnection(httplib.HTTPConnection):
508
509@@ -7,6 +8,9 @@
510 httplib.HTTPConnection.connect(self)
511 if self.timeout is not None:
512 self.sock.settimeout(self.timeout)
513+
514+ def set_timeout(self, timeout):
515+ self.timeout = timeout
516
517 class TimeoutHTTP(httplib.HTTP):
518
519@@ -22,10 +26,13 @@
520 self.timeout = timeout
521
522 def make_connection(self, host):
523- # TODO: check make_connection for python > 2.6
524- conn = TimeoutHTTP(host)
525- conn.set_timeout(self.timeout)
526- return conn
527+ chost, self._extra_headers, _ = self.get_host_info(host)
528+ if sys.version_info < (2,7):
529+ self._connection = host, TimeoutHTTP(host)
530+ else:
531+ self._connection = host, TimeoutHTTPConnection(chost)
532+ self._connection[1].set_timeout(self.timeout)
533+ return self._connection[1]
534
535 class TimeoutHTTPSConnection(httplib.HTTPSConnection):
536
537
538=== modified file 'sync_server/sync_server.py'
539--- sync_server/sync_server.py 2014-09-19 08:10:11 +0000
540+++ sync_server/sync_server.py 2014-10-28 09:21:32 +0000
541@@ -346,7 +346,7 @@
542 return (True, {
543 'name': entity.name,
544 'identifier': entity.identifier,
545- 'parent': entity.parent_id.name,
546+ 'parent': entity.parent_id.name or '',
547 'email': entity.email,
548 'entity_status': entity.state,
549 'group': ', '.join([group.name for group in entity.group_ids]),

Subscribers

People subscribed via source and target branches

to all changes: