Merge lp:~verterok/ubuntuone-client/fix-598189 into lp:ubuntuone-client

Proposed by Guillermo Gonzalez
Status: Merged
Approved by: John O'Brien
Approved revision: 689
Merged at revision: 698
Proposed branch: lp:~verterok/ubuntuone-client/fix-598189
Merge into: lp:ubuntuone-client
Diff against target: 122 lines (+94/-0)
2 files modified
tests/syncdaemon/test_localrescan.py (+72/-0)
ubuntuone/syncdaemon/local_rescan.py (+22/-0)
To merge this branch: bzr merge lp:~verterok/ubuntuone-client/fix-598189
Reviewer Review Type Date Requested Status
John O'Brien (community) Approve
Facundo Batista (community) Approve
Review via email: mp+35013@code.launchpad.net

Commit message

Fix Local rescan to process read only shares and resume interrupted downloads.

Description of the change

Local rescan now process read only shares and resume interrupted downloads.

To post a comment you must log in.
Revision history for this message
Facundo Batista (facundo) wrote :

I would use local vars for self.fsm.changed and self.fsm.CHANGED_SERVER instead of accessing self and fsm (twice) inside the double for.

Otherwise, it's ok.

review: Approve
Revision history for this message
Guillermo Gonzalez (verterok) wrote :

good point, fixed and pushed.

689. By Guillermo Gonzalez

avoid dict lookups in the nested for loop

Revision history for this message
John O'Brien (jdobrien) wrote :

Tests say [OK] and I do too.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'tests/syncdaemon/test_localrescan.py'
2--- tests/syncdaemon/test_localrescan.py 2010-09-07 19:41:16 +0000
3+++ tests/syncdaemon/test_localrescan.py 2010-09-13 16:08:46 +0000
4@@ -1894,6 +1894,78 @@
5 self.startTest(check)
6 return self.deferred
7
8+ def test_SERVER_file_ro_share(self):
9+ """We were downloading the file, but it was interrupted in RO share."""
10+ # create the file in metadata
11+ ro_share = self.create_share('share_ro_id', 'share_ro2', self.fsm,
12+ self.shares_dir, access_level='View')
13+ self.fsm.create(ro_share.path, ro_share.id, is_dir=True)
14+ self.fsm.set_node_id(ro_share.path, "uuidshare")
15+ path = os.path.join(ro_share.path, "a")
16+ with self.fsm._enable_share_write(ro_share.id, path):
17+ open(path, "w").close()
18+ mdid = self.fsm.create(path, ro_share.volume_id, is_dir=False)
19+ partial_path = os.path.join(self.fsm.partials_dir,
20+ mdid + ".u1partial." + os.path.basename(path))
21+ self.fsm.set_node_id(path, "uuid")
22+
23+ # start the download, never complete it
24+ self.fsm.set_by_mdid(mdid, server_hash="blah-hash-blah")
25+ self.fsm.create_partial("uuid", ro_share.volume_id)
26+ fh = self.fsm.get_partial_for_writing("uuid", ro_share.volume_id)
27+ fh.write("foobar")
28+ self.assertTrue(os.path.exists(partial_path))
29+
30+ def check(_):
31+ """Compare predictable args, and check fh factory."""
32+ mdobj = self.fsm.get_by_mdid(mdid)
33+ self.assertEqual(self.aq.downloaded[:3], (
34+ mdobj.share_id, mdobj.node_id, mdobj.server_hash))
35+ factory = self.aq.downloaded[-1]
36+ self.assertTrue(isinstance(factory(), file))
37+ self.assertTrue(self.handler.check_debug("comp yield", "SERVER"))
38+
39+ self.startTest(check)
40+ return self.deferred
41+
42+ def test_SERVER_dir_ro_share(self):
43+ """Found a dir in SERVER in a ro_share.
44+
45+ This was valid before, but no more, so we just fix and log in warning.
46+ """
47+ # create the dir in metadata
48+ ro_share = self.create_share('share_ro_id', 'share_ro2', self.fsm,
49+ self.shares_dir, access_level='View')
50+ self.fsm.create(ro_share.path, ro_share.id, is_dir=True)
51+ self.fsm.set_node_id(ro_share.path, "uuidshare")
52+
53+ path = os.path.join(ro_share.path, "a")
54+ mdid = self.fsm.create(path, ro_share.volume_id, is_dir=True)
55+ partial_path = os.path.join(self.fsm.partials_dir,
56+ mdid + ".u1partial." + os.path.basename(path))
57+ self.fsm.set_node_id(path, "uuid")
58+
59+ # start the download, never complete it
60+ self.fsm.set_by_mdid(mdid, server_hash="blah-hash-blah")
61+ self.fsm.create_partial("uuid", ro_share.volume_id)
62+ fh = self.fsm.get_partial_for_writing("uuid", ro_share.volume_id)
63+ fh.write("foobar")
64+ self.assertTrue(os.path.exists(partial_path))
65+
66+ def check(_):
67+ """Several checks."""
68+ mdobj = self.fsm.get_by_mdid(mdid)
69+ # it should be in NONE
70+ self.assertFalse(mdobj.info.is_partial)
71+ self.assertEqual(mdobj.server_hash, mdobj.local_hash)
72+ self.assertFalse(os.path.exists(partial_path))
73+ # logged in warning
74+ self.assertTrue(self.handler.check_warning(
75+ "Found a directory in SERVER"))
76+
77+ self.startTest(check)
78+ return self.deferred
79+
80
81 class RootBadStateTests(TwistedBase):
82 """Test what happens with volume roots left in a bad state last time."""
83
84=== modified file 'ubuntuone/syncdaemon/local_rescan.py'
85--- ubuntuone/syncdaemon/local_rescan.py 2010-09-07 19:41:16 +0000
86+++ ubuntuone/syncdaemon/local_rescan.py 2010-09-13 16:08:46 +0000
87@@ -76,6 +76,7 @@
88 # first of all, remove old partials and clean trash
89 self._remove_partials()
90 self._process_limbo()
91+ self._process_ro_shares()
92
93 yield self._queue_scan()
94 self._show_broken_nodes()
95@@ -129,6 +130,27 @@
96 self.aq.move(share_id, node_id, old_parent_id,
97 new_parent_id, new_name)
98
99+ def _process_ro_shares(self):
100+ """Process ro shares and reschedule interrupted downloads."""
101+ # to avoid the lookups in the nested for
102+ changed = self.fsm.changed
103+ CHANGED_SERVER = self.fsm.CHANGED_SERVER
104+ for share in self._get_volumes(all_volumes=False, access_level="View"):
105+ for mdobj in self.fsm.get_mdobjs_by_share_id(share.id):
106+ if changed(mdid=mdobj.mdid) == CHANGED_SERVER:
107+ fullname = os.path.join(share.path, mdobj.path)
108+ if mdobj.is_dir:
109+ # old state, no sense now with generations
110+ # but required for the migration path.
111+ log_warning("Found a directory in SERVER: %r", fullname)
112+ mdobj = self.fsm.get_by_path(fullname)
113+ self.fsm.set_by_mdid(mdobj.mdid,
114+ server_hash=mdobj.local_hash)
115+ self.fsm.remove_partial(mdobj.node_id, mdobj.share_id)
116+ else:
117+ log_debug("comp yield: file %r in SERVER", fullname)
118+ self._resume_download(fullname)
119+
120 def _show_broken_nodes(self):
121 """Log the broken nodes."""
122 for mdobj in self.fsm.get_dirty_nodes():

Subscribers

People subscribed via source and target branches