Merge lp:~dobey/ubuntuone-client/update-3-0 into lp:ubuntuone-client/stable-3-0

Proposed by dobey
Status: Merged
Approved by: dobey
Approved revision: 1196
Merged at revision: 1196
Proposed branch: lp:~dobey/ubuntuone-client/update-3-0
Merge into: lp:ubuntuone-client/stable-3-0
Diff against target: 78 lines (+23/-9)
2 files modified
tests/syncdaemon/test_sync.py (+15/-7)
ubuntuone/syncdaemon/sync.py (+8/-2)
To merge this branch: bzr merge lp:~dobey/ubuntuone-client/update-3-0
Reviewer Review Type Date Requested Status
Brian Curtin (community) Approve
Review via email: mp+116340@code.launchpad.net

Commit message

Force a "reput from local" when the server_hash (that came in the delta) is ""

To post a comment you must log in.
Revision history for this message
Brian Curtin (brian.curtin) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'tests/syncdaemon/test_sync.py'
--- tests/syncdaemon/test_sync.py 2012-04-09 20:07:05 +0000
+++ tests/syncdaemon/test_sync.py 2012-07-23 18:16:23 +0000
@@ -387,20 +387,29 @@
387 self.sync._handle_SV_HASH_NEW(mdobj.share_id, mdobj.node_id, '')387 self.sync._handle_SV_HASH_NEW(mdobj.share_id, mdobj.node_id, '')
388 self.assertTrue(self.called)388 self.assertTrue(self.called)
389389
390 def test_SV_HASH_NEW_with_file_uploadinterrupted_coverage(self):390 def test_SV_HASH_NEW_with_special_hash(self):
391 """A SV_HASH_NEW is received after upload interrupted."""391 """A SV_HASH_NEW is received with hash in None, don't care state."""
392 self.called = False392 self.called = False
393393
394 # create a file and put it in local, without server_hash, as394 def fake_meth(_, event, params, hash):
395 # if the upload was cut in the middle after the make file395 """Wrap SSMR.reput_file_from_local to test."""
396 self.assertEqual(event, 'SV_HASH_NEW')
397 self.assertEqual(hash, '')
398 self.called = True
399 self.patch(SyncStateMachineRunner, 'reput_file_from_local', fake_meth)
400
401 # create a file and leave it as NONE state
396 somepath = os.path.join(self.root, 'somepath')402 somepath = os.path.join(self.root, 'somepath')
397 mdid = self.fsm.create(somepath, '', node_id='node_id')403 mdid = self.fsm.create(somepath, '', node_id='node_id')
398 self.fsm.set_by_mdid(mdid, local_hash='somehash', crc32='crc32',404 self.fsm.set_by_mdid(mdid, local_hash='somehsh', server_hash='somehsh',
399 stat='stat', size='size')405 crc32='crc32', stat='stat', size='size')
400406
401 # send the event with no content and check407 # send the event with no content and check
402 mdobj = self.fsm.get_by_mdid(mdid)408 mdobj = self.fsm.get_by_mdid(mdid)
403 self.sync._handle_SV_HASH_NEW(mdobj.share_id, mdobj.node_id, '')409 self.sync._handle_SV_HASH_NEW(mdobj.share_id, mdobj.node_id, '')
410 self.assertTrue(self.called)
411 mdobj = self.fsm.get_by_mdid(mdid)
412 self.assertEqual(mdobj.server_hash, '')
404413
405 def test_AQ_FILE_NEW_OK_with_md_in_none(self):414 def test_AQ_FILE_NEW_OK_with_md_in_none(self):
406 """Created the file, and MD says it's in NONE."""415 """Created the file, and MD says it's in NONE."""
@@ -991,7 +1000,6 @@
991 with self._test_putcontent_upload_id(with_upload_id=False):1000 with self._test_putcontent_upload_id(with_upload_id=False):
992 self.ssmr.reput_file_from_local("SV_HASH_NEW", None, '')1001 self.ssmr.reput_file_from_local("SV_HASH_NEW", None, '')
9931002
994
995 def test_commit_file_without_partial(self):1003 def test_commit_file_without_partial(self):
996 """The .partial is lost when commiting the file."""1004 """The .partial is lost when commiting the file."""
997 # create the partial correctly, and break it!1005 # create the partial correctly, and break it!
9981006
=== modified file 'ubuntuone/syncdaemon/sync.py'
--- ubuntuone/syncdaemon/sync.py 2012-04-09 20:08:42 +0000
+++ ubuntuone/syncdaemon/sync.py 2012-07-23 18:16:23 +0000
@@ -589,10 +589,10 @@
589 """Starts the scan again on a dir."""589 """Starts the scan again on a dir."""
590 self.m.lr.scan_dir(self.key['mdid'], self.key['path'], udfmode)590 self.m.lr.scan_dir(self.key['mdid'], self.key['path'], udfmode)
591591
592 def reput_file_from_local(self, event, params, hash):592 def reput_file_from_local(self, event, params, hash_value):
593 """Re put the file from its local state."""593 """Re put the file from its local state."""
594 self.m.action_q.cancel_upload(share_id=self.key['share_id'],594 self.m.action_q.cancel_upload(share_id=self.key['share_id'],
595 node_id=self.key['node_id'])595 node_id=self.key['node_id'])
596596
597 local_hash = self.key['local_hash']597 local_hash = self.key['local_hash']
598 previous_hash = self.key['server_hash']598 previous_hash = self.key['server_hash']
@@ -841,7 +841,13 @@
841 """on SV_HASH_NEW. No longer called by EQ, only internally."""841 """on SV_HASH_NEW. No longer called by EQ, only internally."""
842 key = FSKey(self.m.fs, share_id=share_id, node_id=node_id)842 key = FSKey(self.m.fs, share_id=share_id, node_id=node_id)
843 log = FileLogger(self.logger, key)843 log = FileLogger(self.logger, key)
844
844 ssmr = SyncStateMachineRunner(self.fsm, self.m, key, log)845 ssmr = SyncStateMachineRunner(self.fsm, self.m, key, log)
846 if hash == "":
847 # Special case for hash == "", aka broken node.
848 # Set the server_hash = hash to force LOCAL state
849 key.set(server_hash=hash)
850 key.sync()
845 ssmr.signal_event_with_hash("SV_HASH_NEW", hash)851 ssmr.signal_event_with_hash("SV_HASH_NEW", hash)
846852
847 def _handle_SV_FILE_NEW(self, share_id, node_id, parent_id, name):853 def _handle_SV_FILE_NEW(self, share_id, node_id, parent_id, name):

Subscribers

People subscribed via source and target branches

to all changes: