Merge lp:~unifield-team/unifield-wm/sync-sp-170 into lp:unifield-wm/sync

Proposed by jftempo
Status: Merged
Merged at revision: 492
Proposed branch: lp:~unifield-team/unifield-wm/sync-sp-170
Merge into: lp:unifield-wm/sync
Diff against target: 234 lines (+49/-12) (has conflicts)
4 files modified
sync_client/sync_client.py (+35/-12)
sync_client/update.py (+6/-0)
sync_server/message.py (+2/-0)
sync_server/update.py (+6/-0)
Text conflict in sync_client/sync_client.py
Text conflict in sync_client/update.py
To merge this branch: bzr merge lp:~unifield-team/unifield-wm/sync-sp-170
Reviewer Review Type Date Requested Status
UniField Sync Reviewer Pending
Review via email: mp+234951@code.launchpad.net
To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'sync_client/sync_client.py'
2--- sync_client/sync_client.py 2014-08-28 12:44:27 +0000
3+++ sync_client/sync_client.py 2014-09-17 12:19:58 +0000
4@@ -33,7 +33,6 @@
5 from psycopg2 import OperationalError
6
7 import logging
8-from sync_common import sync_log
9
10 from threading import Thread, RLock, Lock
11 import pooler
12@@ -325,7 +324,6 @@
13 @sync_process('data_push')
14 def push_update(self, cr, uid, context=None):
15 context = context or {}
16- logger = context.get('logger')
17
18 entity = self.get_entity(cr, uid, context)
19
20@@ -337,18 +335,19 @@
21 updates_count = self.create_update(cr, uid, context=context)
22 cr.commit()
23 cont = updates_count > 0
24- self._logger.info("init")
25+ self._logger.info("Push data :: Updates created: %d" % updates_count)
26 if cont or entity.state == 'update_send':
27 updates_count = self.send_update(cr, uid, context=context)
28 cr.commit()
29 cont = True
30- self._logger.info("sent update")
31+ self._logger.info("Push data :: Updates sent: %d" % updates_count)
32 if cont or entity.state == 'update_validate':
33 server_sequence = self.validate_update(cr, uid, context=context)
34 cr.commit()
35- if logger and server_sequence:
36- logger.append(_("Update's server sequence: %d") % server_sequence)
37- self._logger.info("update validated")
38+ if server_sequence:
39+ self._logger.info(_("Push data :: New server's sequence number: %d") % server_sequence)
40+
41+
42
43 return True
44
45@@ -447,7 +446,6 @@
46 @sync_process('data_pull')
47 def pull_update(self, cr, uid, recover=False, context=None):
48 context = context or {}
49- logger = context.get('logger')
50
51 entity = self.get_entity(cr, uid, context=context)
52 if entity.state not in ('init', 'update_pull'):
53@@ -466,8 +464,10 @@
54 updates_count = self.retrieve_update(cr, uid, max_packet_size, recover=recover, context=context)
55 self._logger.info("::::::::The instance " + entity.name + " pulled: " + str(res[1]) + " messages and " + str(updates_count) + " updates.")
56 updates_executed = self.execute_updates(cr, uid, context=context)
57- if updates_executed == 0 and updates_count > 0 and logger:
58- logger.append(_("Warning: no update to execute, this case should never occurs."))
59+ if updates_executed == 0 and updates_count > 0:
60+ self._logger.warning("No update to execute, this case should never occurs.")
61+
62+ self._logger.info("Pull data :: Number of data pull: %s" % updates_count)
63 return True
64
65 def set_last_sequence(self, cr, uid, context=None):
66@@ -475,6 +475,7 @@
67 proxy = self.pool.get("sync.client.sync_server_connection").get_connection(cr, uid, "sync.server.sync_manager")
68 res = proxy.get_max_sequence(entity.identifier)
69 if res and res[0]:
70+ self._logger.info("Pull data :: Last sequence: %s" % res[1])
71 return self.write(cr, uid, entity.id, {'max_update' : res[1]}, context=context)
72 elif res and not res[0]:
73 raise Exception, res[1]
74@@ -593,7 +594,6 @@
75 @sync_process('msg_push')
76 def push_message(self, cr, uid, context=None):
77 context = context or {}
78- logger = context.get('logger')
79 entity = self.get_entity(cr, uid, context)
80
81 if entity.state not in ['init', 'msg_push']:
82@@ -603,9 +603,11 @@
83 self.create_message(cr, uid, context=context)
84 cr.commit()
85
86- self.send_message(cr, uid, context=context)
87+ nb_msg = self.send_message(cr, uid, context=context)
88 cr.commit()
89
90+ self._logger.info("Push messages :: Number of messages pushed: %d" % nb_msg)
91+
92 return True
93
94 def create_message(self, cr, uid, context=None):
95@@ -677,9 +679,14 @@
96 raise SkipStep
97
98 self.get_message(cr, uid, context=context)
99+<<<<<<< TREE
100 # UTP-1177: Reset the message ids of the entity at the server side
101 proxy.reset_message_ids(entity.identifier)
102 self.execute_message(cr, uid, context=context)
103+=======
104+ msg_count = self.execute_message(cr, uid, context=context)
105+ self._logger.info("Pull message :: Number of messages pulled: %s" % msg_count)
106+>>>>>>> MERGE-SOURCE
107 return True
108
109 def get_message(self, cr, uid, context=None):
110@@ -769,10 +776,12 @@
111
112 @sync_process()
113 def sync(self, cr, uid, context=None):
114+ self._logger.info("Start synchronization")
115 self.pull_update(cr, uid, context=context)
116 self.pull_message(cr, uid, context=context)
117 self.push_update(cr, uid, context=context)
118 self.push_message(cr, uid, context=context)
119+ self._logger.info("Synchronization succesfully done")
120 return True
121
122 def get_upgrade_status(self, cr, uid, context=None):
123@@ -816,6 +825,7 @@
124 This class is also a singleton
125
126 """
127+ _logger = logging.getLogger('sync.client.sync_server_connection')
128
129 def _auto_init(self,cr,context=None):
130 res = super(Connection, self)._auto_init(cr, context=context)
131@@ -897,6 +907,11 @@
132 return True
133 try:
134 con = self._get_connection_manager(cr, uid, context=context)
135+ sync_args = {
136+ 'client_name': cr.dbname,
137+ 'server_name': con.database,
138+ }
139+ self._logger.info('Client \'%(client_name)s\' attempts to connect to sync. server \'%(server_name)s\'' % sync_args)
140 connector = self.connector_factory(con)
141 if not getattr(self, '_password', False):
142 self._password = con.login
143@@ -912,6 +927,7 @@
144 except BaseException, e:
145 raise osv.except_osv(_("Error"), _(unicode(e)))
146
147+ self._logger.info('Client \'%(client_name)s\' succesfully connected to sync. server \'%(server_name)s\'' % sync_args)
148 return True
149
150 def action_connect(self, cr, uid, ids, context=None):
151@@ -929,14 +945,21 @@
152 return rpc.Object(cnx, model)
153
154 def disconnect(self, cr, uid, context=None):
155+ con = self._get_connection_manager(cr, uid, context=context)
156+ sync_args = {
157+ 'client_name': cr.dbname,
158+ 'server_name': con.database,
159+ }
160 entity = self.pool.get('sync.client.entity')
161 if entity.is_syncing():
162 try:
163 entity._renew_sync_lock()
164 except StandardError:
165+ self._logger.warning('Error during the disconnection of client \'%(client_name)s\'' % sync_args)
166 return False
167 entity.sync_cursor.close()
168 self._uid = False
169+ self._logger.info('Client \'%(client_name)s\' succesfully disconnected from the sync. server \'%(server_name)s\'' % sync_args)
170 return True
171
172 def action_disconnect(self, cr, uid, ids, context=None):
173
174=== modified file 'sync_client/update.py'
175--- sync_client/update.py 2014-08-27 09:07:35 +0000
176+++ sync_client/update.py 2014-09-17 12:19:58 +0000
177@@ -471,7 +471,13 @@
178 #1 conflict detection
179 if self._conflict(cr, uid, update.sdref, update.version, context=context):
180 #2 if conflict => manage conflict according rules : report conflict and how it's solve
181+<<<<<<< TREE
182 logs[update.id] = sync_log(self, "Conflict detected!", 'warning', data=(update.id, update.sdref)) + "\n"
183+=======
184+ index_id = eval(update.fields).index('id')
185+ sd_ref = eval(update.values)[index_id]
186+ logs[update.id] = sync_log(self, "Conflict detected!", 'warning', data=(update.id, sd_ref)) + "\n"
187+>>>>>>> MERGE-SOURCE
188
189 if bad_fields:
190 import_fields = [import_fields[i] for i in range(len(import_fields)) if i not in bad_fields]
191
192=== modified file 'sync_server/message.py'
193--- sync_server/message.py 2014-08-27 12:52:35 +0000
194+++ sync_server/message.py 2014-09-17 12:19:58 +0000
195@@ -94,6 +94,7 @@
196 'source': entity.id,
197 }, context=context)
198
199+ self._logger.info("[%s] Message push :: Number of message pushed: %s" % (entity.name, len(package)))
200 return (True, "Message received")
201
202 def _get_destination(self, cr, uid, dest, context=None):
203@@ -154,6 +155,7 @@
204 }
205 packet.append(message)
206
207+ self._logger.info("[%s] Message pull :: Number of message pulled: %s" % (entity.name, len(packet)))
208 return packet
209
210 def set_message_as_received(self, cr, uid, entity, message_uuids, context=None):
211
212=== modified file 'sync_server/update.py'
213--- sync_server/update.py 2014-01-20 09:02:10 +0000
214+++ sync_server/update.py 2014-09-17 12:19:58 +0000
215@@ -261,6 +261,9 @@
216 if update_ids:
217 sequence = self._get_next_sequence(cr, uid, context=context)
218 self.write(cr, 1, update_ids, {'sequence' : sequence}, context=context)
219+ self._logger.info("[%s] Data Push :: Number of data pushed: %d" % (entity.name, len(update_ids)))
220+ if sequence:
221+ self._logger.info("[%s] Data Push :: New server's sequence number: %s" % (entity.name, sequence))
222 return (True, sequence)
223
224 def _get_next_sequence(self, cr, uid, context=None):
225@@ -444,6 +447,9 @@
226 'handle_priority' : update.handle_priority,
227 })
228
229+ self._logger.info("[%s] Data pull :: Client last sequence number: %s" % (entity.name, last_seq))
230+ self._logger.info("[%s] Data pull :: Server last sequence number: %s" % (entity.name, self.get_last_sequence(cr, uid)))
231+ self._logger.info("[%s] Data pull :: Number of data pulled: %s" % (entity.name, len(update_to_send)))
232 return data
233
234 def get_additional_forced_field(self, update):

Subscribers

People subscribed via source and target branches

to all changes: