Merge lp:~facundo/ubuntuone-client/query-no-content-file into lp:ubuntuone-client

Proposed by Facundo Batista
Status: Merged
Approved by: Natalia Bidart
Approved revision: 518
Merged at revision: 517
Proposed branch: lp:~facundo/ubuntuone-client/query-no-content-file
Merge into: lp:ubuntuone-client
Diff against target: 211 lines (+91/-21)
3 files modified
tests/syncdaemon/test_sync.py (+72/-14)
ubuntuone/syncdaemon/sync.py (+16/-4)
ubuntuone/syncdaemon/u1fsfsm.py (+3/-3)
To merge this branch: bzr merge lp:~facundo/ubuntuone-client/query-no-content-file
Reviewer Review Type Date Requested Status
Natalia Bidart (community) Approve
John O'Brien (community) Approve
Review via email: mp+24859@code.launchpad.net

Commit message

Queue an upload again on SV_HASH_NEW if being LOCAL with hashes ok.

Description of the change

Queue an upload again on SV_HASH_NEW if being LOCAL with hashes ok.

Refactored the tests a little to include more cases.

To post a comment you must log in.
Revision history for this message
John O'Brien (jdobrien) wrote :

I keep getting protocol errors when running make check and I'm using the latest protocol in trunk.

review: Needs Information
Revision history for this message
John O'Brien (jdobrien) wrote :

Looks good, all tests pass.

review: Approve
Revision history for this message
Natalia Bidart (nataliabidart) wrote :

Very nice.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'tests/syncdaemon/test_sync.py'
2--- tests/syncdaemon/test_sync.py 2010-04-27 22:09:05 +0000
3+++ tests/syncdaemon/test_sync.py 2010-05-06 19:17:36 +0000
4@@ -30,7 +30,7 @@
5 from contrib.testing.testcase import (
6 FakeVolumeManager,
7 BaseTwistedTestCase,
8- MementoHandler, DummyClass
9+ MementoHandler
10 )
11
12 from contrib.testing import testcase
13@@ -181,8 +181,8 @@
14 self.fsm.get_by_path(path).server_hash)
15
16
17-class TestSync(BaseTwistedTestCase):
18- """Test for Sync."""
19+class BaseSync(BaseTwistedTestCase):
20+ """Base test infrastructure for Sync."""
21
22 def setUp(self):
23 """Init."""
24@@ -219,6 +219,10 @@
25 raise exc_info[0], exc_info[1], exc_info[2]
26 BaseTwistedTestCase.tearDown(self)
27
28+
29+class TestSync(BaseSync):
30+ """Test for Sync."""
31+
32 def test_deleting_open_files_is_no_cause_for_despair(self):
33 """test_deleting_open_files_is_no_cause_for_despair."""
34 def cb(_):
35@@ -257,8 +261,8 @@
36 def faked_nothing(ssmr, event, params, *args):
37 """Wrap SSMR.nothing to test."""
38 self.called = True
39+ self.patch(SyncStateMachineRunner, 'nothing', faked_nothing)
40
41- SyncStateMachineRunner.nothing = faked_nothing
42 kwargs = dict(share_id='share_id', node_id='node_id')
43 sync.handle_AQ_DOWNLOAD_DOES_NOT_EXIST(**kwargs)
44 self.assertTrue(self.called, 'nothing was called')
45@@ -271,7 +275,7 @@
46 def faked_nothing(ssmr, event, params, *args):
47 """Wrap SSMR.nothing to test."""
48 self.called = True
49- SyncStateMachineRunner.nothing = faked_nothing
50+ self.patch(SyncStateMachineRunner, 'nothing', faked_nothing)
51
52 # create a file and put it in local
53 fsm = self.main.fs
54@@ -283,27 +287,60 @@
55 sync.handle_FS_FILE_CREATE(somepath)
56 self.assertTrue(self.called)
57
58-
59-class SyncStateMachineRunnerTestCase(BaseTwistedTestCase):
60+ def test_SV_HASH_NEW_with_file_uploadinterrupted(self):
61+ """A SV_HASH_NEW is received after upload interrupted."""
62+ sync = Sync(main=self.main)
63+ self.called = False
64+
65+ def fake_meth(_, event, params, hash):
66+ """Wrap SSMR.reput_file_from_local to test."""
67+ self.assertEqual(event, 'SV_HASH_NEW')
68+ self.assertEqual(hash, '')
69+ self.called = True
70+ self.patch(SyncStateMachineRunner, 'reput_file_from_local', fake_meth)
71+
72+ # create a file and put it in local, without server_hash, as
73+ # if the upload was cut in the middle after the make file
74+ fsm = self.main.fs
75+ somepath = os.path.join(self.root, 'somepath')
76+ mdid = fsm.create(somepath, '', node_id='node_id')
77+ fsm.set_by_mdid(mdid, local_hash='somehash', crc32='crc32',
78+ stat='stat', size='size')
79+
80+ # send the event and check
81+ mdobj = fsm.get_by_mdid(mdid)
82+ sync.handle_SV_HASH_NEW(mdobj.share_id, mdobj.node_id, '') # no content
83+ self.assertTrue(self.called)
84+
85+
86+class SyncStateMachineRunnerTestCase(BaseSync):
87 """Tests for the SyncStateMachineRunner."""
88
89 def setUp(self):
90 """Init."""
91- BaseTwistedTestCase.setUp(self)
92- self.ssmr = SyncStateMachineRunner(fsm=None, main=None,
93- key=DummyClass(), logger=None)
94+ BaseSync.setUp(self)
95+
96+ # create a file
97+ self.fsm = self.main.fs
98+ somepath = os.path.join(self.root, 'somepath')
99+ self.mdid = self.fsm.create(somepath, '', node_id='node_id')
100+
101+ key = FSKey(self.main.fs, share_id='', node_id='node_id')
102+ self.ssmr = SyncStateMachineRunner(fsm=self.main.fs, main=self.main,
103+ key=key, logger=None)
104 self.root = self.mktemp('root')
105
106 def tearDown(self):
107 """Clean up."""
108 self.ssmr = None
109- shutil.rmtree(self.root)
110- BaseTwistedTestCase.tearDown(self)
111+ BaseSync.tearDown(self)
112
113- def test_delete_file(self):
114- """delete_file can be called with or without the server hash."""
115+ def test_delete_file_without_hash(self):
116+ """Delete_file can be called without the server hash."""
117 self.ssmr.delete_file(event='AQ_DOWNLOAD_ERROR', params=None)
118
119+ def test_delete_file_with_hash(self):
120+ """Delete_file can be called with the server hash."""
121 self.ssmr.delete_file(event='AQ_DOWNLOAD_ERROR', params=None,
122 server_hash='')
123
124@@ -333,3 +370,24 @@
125 self.assertTrue("st_mtime" in log_msg)
126 self.ssmr.log.removeHandler(handler)
127
128+ def test_put_file_stores_info(self):
129+ """The put_file method should store the info in FSM."""
130+ self.ssmr.put_file('HQ_HASH_NEW', None, 'hash', 'crc', 'size', 'stt')
131+
132+ # check the info is stored
133+ mdobj = self.fsm.get_by_mdid(self.mdid)
134+ self.assertEqual(mdobj.local_hash, 'hash')
135+ self.assertEqual(mdobj.crc32, 'crc')
136+ self.assertEqual(mdobj.size, 'size')
137+ self.assertEqual(mdobj.stat, 'stt')
138+
139+ def test_reput_file_stores_info(self):
140+ """The reput_file method should store the info in FSM."""
141+ self.ssmr.reput_file('HQ_HASH_NEW', None, 'hash', 'crc', 'size', 'stt')
142+
143+ # check the info is stored
144+ mdobj = self.fsm.get_by_mdid(self.mdid)
145+ self.assertEqual(mdobj.local_hash, 'hash')
146+ self.assertEqual(mdobj.crc32, 'crc')
147+ self.assertEqual(mdobj.size, 'size')
148+ self.assertEqual(mdobj.stat, 'stt')
149
150=== modified file 'ubuntuone/syncdaemon/sync.py'
151--- ubuntuone/syncdaemon/sync.py 2010-04-27 16:32:20 +0000
152+++ ubuntuone/syncdaemon/sync.py 2010-05-06 19:17:36 +0000
153@@ -670,10 +670,22 @@
154 """Starts the scan again on a dir."""
155 self.m.lr.scan_dir(self.key['mdid'], self.key['path'], udfmode)
156
157+ def reput_file_from_local(self, event, params, hash):
158+ """Re put the file from its local state."""
159+ local_hash = self.key['local_hash']
160+ previous_hash = self.key['server_hash']
161+ crc32 = self.key['crc32']
162+ size = self.key['size']
163+ share_id = self.key['share_id']
164+ node_id = self.key['node_id']
165+
166+ self.m.action_q.upload(share_id, node_id, previous_hash, local_hash,
167+ crc32, size, self.key.open_file)
168+
169 def put_file(self, event, params, hash, crc32, size, stat):
170- """upload the file to the server."""
171+ """Upload the file to the server."""
172 previous_hash = self.key['server_hash']
173- self.key.set(local_hash=hash, stat=stat)
174+ self.key.set(local_hash=hash, stat=stat, crc32=crc32, size=size)
175 self.key.sync()
176
177 self.m.action_q.upload(share_id=self.key['share_id'],
178@@ -700,12 +712,12 @@
179
180
181 def reput_file(self, event, param, hash, crc32, size, stat):
182- """put the file again."""
183+ """Put the file again."""
184 self.m.action_q.cancel_upload(share_id=self.key['share_id'],
185 node_id=self.key['node_id'])
186 previous_hash = self.key['server_hash']
187
188- self.key.set(local_hash=hash, stat=stat)
189+ self.key.set(local_hash=hash, stat=stat, crc32=crc32, size=size)
190 self.key.sync()
191 self.m.action_q.upload(share_id=self.key['share_id'],
192 node_id=self.key['node_id'], previous_hash=previous_hash,
193
194=== modified file 'ubuntuone/syncdaemon/u1fsfsm.ods'
195Binary files ubuntuone/syncdaemon/u1fsfsm.ods 2010-04-22 21:14:56 +0000 and ubuntuone/syncdaemon/u1fsfsm.ods 2010-05-06 19:17:36 +0000 differ
196=== modified file 'ubuntuone/syncdaemon/u1fsfsm.py'
197--- ubuntuone/syncdaemon/u1fsfsm.py 2010-04-22 21:14:56 +0000
198+++ ubuntuone/syncdaemon/u1fsfsm.py 2010-05-06 19:17:36 +0000
199@@ -2445,9 +2445,9 @@
200 'STATE_OUT': {u'changed': u'*',
201 u'has_metadata': u'*',
202 u'is_directory': u'*'}},
203- {'ACTION': u'CONFLICT',
204- 'ACTION_FUNC': u'nothing',
205- 'COMMENTS': u'',
206+ {'ACTION': u'aq.upload()',
207+ 'ACTION_FUNC': u'reput_file_from_local',
208+ 'COMMENTS': u'The upload was interrupted, just try it again. ',
209 'PARAMETERS': {u'hash_eq_local_hash': u'F',
210 u'hash_eq_server_hash': u'T',
211 u'not_authorized': u'NA',

Subscribers

People subscribed via source and target branches