Merge lp:~alecu/ubuntuone-client/ziggy-for-filesync into lp:ubuntuone-client

Proposed by Alejandro J. Cura
Status: Merged
Approved by: dobey
Approved revision: 779
Merged at revision: 774
Proposed branch: lp:~alecu/ubuntuone-client/ziggy-for-filesync
Merge into: lp:ubuntuone-client
Diff against target: 1754 lines (+1401/-83)
8 files modified
contrib/testing/testcase.py (+1/-1)
tests/syncdaemon/test_event_logging.py (+930/-74)
tests/syncdaemon/test_fsm.py (+36/-0)
tests/syncdaemon/test_sync.py (+55/-3)
ubuntuone/syncdaemon/event_logging.py (+359/-1)
ubuntuone/syncdaemon/event_queue.py (+6/-0)
ubuntuone/syncdaemon/filesystem_manager.py (+4/-1)
ubuntuone/syncdaemon/sync.py (+10/-3)
To merge this branch: bzr merge lp:~alecu/ubuntuone-client/ziggy-for-filesync
Reviewer Review Type Date Requested Status
Facundo Batista (community) Approve
Natalia Bidart (community) Approve
Review via email: mp+43495@code.launchpad.net

Commit message

Remaining SyncDaemon events to be logged into Zeitgeist.

Description of the change

Remaining SyncDaemon events to be logged into Zeitgeist.
https://wiki.ubuntu.com/UbuntuOne/Specs/ZeitgeistIntegration/EventsSpec

Directory and files synchronization and conflict, UDF creation/deletion/subscribing, file publishing.

To post a comment you must log in.
Revision history for this message
Natalia Bidart (nataliabidart) wrote :

Great work. Really.

review: Approve
Revision history for this message
Facundo Batista (facundo) wrote :

Like it

review: Approve
Revision history for this message
Ubuntu One Auto Pilot (otto-pilot) wrote :

There are additional revisions which have not been approved in review. Please seek review and approval of these new revisions.

Revision history for this message
Ubuntu One Auto Pilot (otto-pilot) wrote :
Download full text (292.7 KiB)

The attempt to merge lp:~alecu/ubuntuone-client/ziggy-for-filesync into lp:ubuntuone-client failed. Below is the output from the failed tests.

/usr/bin/gnome-autogen.sh
checking for autoconf >= 2.53...
  testing autoconf2.50... not found.
  testing autoconf... found 2.67
checking for automake >= 1.10...
  testing automake-1.11... found 1.11.1
checking for libtool >= 1.5...
  testing libtoolize... found 2.2.6b
checking for intltool >= 0.30...
  testing intltoolize... found 0.41.1
checking for pkg-config >= 0.14.0...
  testing pkg-config... found 0.25
checking for gtk-doc >= 1.0...
  testing gtkdocize... found 1.15
Checking for required M4 macros...
Checking for forbidden M4 macros...
Processing ./configure.ac
Running libtoolize...
libtoolize: putting auxiliary files in `.'.
libtoolize: copying file `./ltmain.sh'
libtoolize: putting macros in AC_CONFIG_MACRO_DIR, `m4'.
libtoolize: copying file `m4/libtool.m4'
libtoolize: copying file `m4/ltoptions.m4'
libtoolize: copying file `m4/ltsugar.m4'
libtoolize: copying file `m4/ltversion.m4'
libtoolize: copying file `m4/lt~obsolete.m4'
Running intltoolize...
Running gtkdocize...
Running aclocal-1.11...
Running autoconf...
Running autoheader...
Running automake-1.11...
Running ./configure --enable-gtk-doc --enable-debug ...
checking for a BSD-compatible install... /usr/bin/install -c
checking whether build environment is sane... yes
checking for a thread-safe mkdir -p... /bin/mkdir -p
checking for gawk... gawk
checking whether make sets $(MAKE)... yes
checking for style of include used by make... GNU
checking for gcc... gcc
checking whether the C compiler works... yes
checking for C compiler default output file name... a.out
checking for suffix of executables...
checking whether we are cross compiling... no
checking for suffix of object files... o
checking whether we are using the GNU C compiler... yes
checking whether gcc accepts -g... yes
checking for gcc option to accept ISO C89... none needed
checking dependency style of gcc... gcc3
checking for library containing strerror... none required
checking for gcc... (cached) gcc
checking whether we are using the GNU C compiler... (cached) yes
checking whether gcc accepts -g... (cached) yes
checking for gcc option to accept ISO C89... (cached) none needed
checking dependency style of gcc... (cached) gcc3
checking build system type... i686-pc-linux-gnu
checking host system type... i686-pc-linux-gnu
checking for a sed that does not truncate output... /bin/sed
checking for grep that handles long lines and -e... /bin/grep
checking for egrep... /bin/grep -E
checking for fgrep... /bin/grep -F
checking for ld used by gcc... /usr/bin/ld
checking if the linker (/usr/bin/ld) is GNU ld... yes
checking for BSD- or MS-compatible name lister (nm)... /usr/bin/nm -B
checking the name lister (/usr/bin/nm -B) interface... BSD nm
checking whether ln -s works... yes
checking the maximum length of command line arguments... 1572864
checking whether the shell understands some XSI constructs... yes
checking whether the shell understands "+="... yes
checking for /usr/bin/ld option to reload object files... -r
checking for objdump... objdump
checking how to recognize dependent lib...

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'contrib/testing/testcase.py'
2--- contrib/testing/testcase.py 2010-12-02 17:50:57 +0000
3+++ contrib/testing/testcase.py 2010-12-13 23:42:15 +0000
4@@ -143,7 +143,7 @@
5
6 cancel_download = cancel_upload = download = upload = make_dir = disconnect
7 make_file = move = unlink = list_shares = disconnect
8- list_volumes = create_share = inquire_free_space = disconnect
9+ list_volumes = create_share = create_udf = inquire_free_space = disconnect
10 inquire_account_info = delete_volume = change_public_access = disconnect
11 query_volumes = get_delta = rescan_from_scratch = disconnect
12 node_is_with_queued_move = disconnect
13
14=== modified file 'tests/syncdaemon/test_event_logging.py'
15--- tests/syncdaemon/test_event_logging.py 2010-12-09 14:28:21 +0000
16+++ tests/syncdaemon/test_event_logging.py 2010-12-13 23:42:15 +0000
17@@ -1,3 +1,5 @@
18+# -*- coding: utf-8 -*-
19+#
20 # tests.syncdaemon.test_event_logging - test logging ZG events
21 #
22 # Author: Alejandro J. Cura <alecu@canonical.com>
23@@ -17,23 +19,41 @@
24 # with this program. If not, see <http://www.gnu.org/licenses/>.
25 """Test the event logging from SyncDaemon into Zeitgeist."""
26
27+import logging
28 import os
29+import shutil
30 import uuid
31
32 from twisted.internet import defer
33 from zeitgeist.datamodel import Interpretation, Manifestation
34
35-from contrib.testing.testcase import FakeMainTestCase
36+from contrib.testing.testcase import (
37+ FakeMain, FakeMainTestCase, BaseTwistedTestCase, MementoHandler)
38+from ubuntuone.platform.linux import get_udf_path
39+from ubuntuone.storageprotocol import client, delta
40+from ubuntuone.storageprotocol.request import ROOT
41 from ubuntuone.storageprotocol.sharersp import NotifyShareHolder
42+from ubuntuone.syncdaemon.action_queue import (
43+ RequestQueue, Upload, MakeFile, MakeDir)
44 from ubuntuone.syncdaemon.event_logging import (
45 zglog, ZeitgeistListener, ACTOR_UBUNTUONE,
46 EVENT_INTERPRETATION_U1_FOLDER_SHARED,
47 EVENT_INTERPRETATION_U1_FOLDER_UNSHARED,
48 EVENT_INTERPRETATION_U1_SHARE_ACCEPTED,
49 EVENT_INTERPRETATION_U1_SHARE_UNACCEPTED,
50+ EVENT_INTERPRETATION_U1_CONFLICT_RENAME,
51+ EVENT_INTERPRETATION_U1_UDF_CREATED,
52+ EVENT_INTERPRETATION_U1_UDF_DELETED,
53+ EVENT_INTERPRETATION_U1_UDF_SUBSCRIBED,
54+ EVENT_INTERPRETATION_U1_UDF_UNSUBSCRIBED,
55 MANIFESTATION_U1_CONTACT_DATA_OBJECT, DIRECTORY_MIMETYPE,
56- INTERPRETATION_U1_CONTACT, URI_PROTOCOL_U1, STORAGE_NETWORK)
57-from ubuntuone.syncdaemon.volume_manager import Share, Shared
58+ INTERPRETATION_U1_CONTACT, URI_PROTOCOL_U1,
59+ STORAGE_DELETED, STORAGE_NETWORK, STORAGE_LOCAL)
60+from ubuntuone.syncdaemon.sync import Sync
61+from ubuntuone.syncdaemon.volume_manager import Share, Shared, UDF
62+from test_action_queue import ConnectedBaseTestCase
63+
64+VOLUME = uuid.UUID('12345678-1234-1234-1234-123456789abc')
65
66
67 class MockLogger(object):
68@@ -47,6 +67,33 @@
69 """Log the event."""
70 self.events.append(event)
71
72+def listen_for(event_q, event, callback, count=1, collect=False):
73+ """Setup a EQ listener for the specified event."""
74+ class Listener(object):
75+ """A basic listener to handle the pushed event."""
76+
77+ def __init__(self):
78+ self.hits = 0
79+ self.events = []
80+
81+ def _handle_event(self, *args, **kwargs):
82+ self.hits += 1
83+ if collect:
84+ self.events.append((args, kwargs))
85+ if self.hits == count:
86+ event_q.unsubscribe(self)
87+ if collect:
88+ callback(self.events)
89+ elif kwargs:
90+ callback((args, kwargs))
91+ else:
92+ callback(args)
93+
94+ listener = Listener()
95+ setattr(listener, 'handle_'+event, listener._handle_event)
96+ event_q.subscribe(listener)
97+ return listener
98+
99
100 class ZeitgeistListenerTestCase(FakeMainTestCase):
101 """Tests for ZeitgeistListener."""
102@@ -58,33 +105,12 @@
103 self.listener = ZeitgeistListener(self.fs, self.vm)
104 self.event_q.subscribe(self.listener)
105
106- def _listen_for(self, event, callback, count=1, collect=False):
107- """Setup a EQ listener for the specified event."""
108- event_q = self.main.event_q
109- class Listener(object):
110- """A basic listener to handle the pushed event."""
111-
112- def __init__(self):
113- self.hits = 0
114- self.events = []
115-
116- def _handle_event(self, *args, **kwargs):
117- self.hits += 1
118- if collect:
119- self.events.append((args, kwargs))
120- if self.hits == count:
121- event_q.unsubscribe(self)
122- if collect:
123- callback(self.events)
124- elif kwargs:
125- callback((args, kwargs))
126- else:
127- callback(args)
128-
129- listener = Listener()
130- setattr(listener, 'handle_'+event, listener._handle_event)
131- event_q.subscribe(listener)
132- return listener
133+ def _listen_for(self, *args, **kwargs):
134+ return listen_for(self.main.event_q, *args, **kwargs)
135+
136+
137+class ZeitgeistSharesTestCase(ZeitgeistListenerTestCase):
138+ """Tests for all Share-related zeitgeist events."""
139
140 def test_share_created_with_username_is_logged(self):
141 """A ShareCreated event is logged."""
142@@ -129,28 +155,28 @@
143 def assert_folder_shared_is_logged(self, path, fake_username):
144 """Assert that the FolderShared event was logged."""
145
146- self.assertEquals(len(self.listener.zg.events), 1)
147+ self.assertEqual(len(self.listener.zg.events), 1)
148 event = self.listener.zg.events[0]
149
150- self.assertEquals(event.interpretation,
151+ self.assertEqual(event.interpretation,
152 EVENT_INTERPRETATION_U1_FOLDER_SHARED)
153- self.assertEquals(event.manifestation,
154+ self.assertEqual(event.manifestation,
155 Manifestation.USER_ACTIVITY)
156- self.assertEquals(event.actor, ACTOR_UBUNTUONE)
157+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
158
159 folder = event.subjects[0]
160 self.assertTrue(folder.uri.startswith(URI_PROTOCOL_U1))
161- self.assertEquals(folder.interpretation, Interpretation.FOLDER)
162- self.assertEquals(folder.manifestation,
163+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
164+ self.assertEqual(folder.manifestation,
165 Manifestation.REMOTE_DATA_OBJECT)
166 self.assertTrue(folder.origin.endswith(path))
167- self.assertEquals(folder.mimetype, DIRECTORY_MIMETYPE)
168- self.assertEquals(folder.storage, STORAGE_NETWORK)
169+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
170+ self.assertEqual(folder.storage, STORAGE_NETWORK)
171
172 other_user = event.subjects[1]
173- self.assertEquals(other_user.uri, "mailto:" + fake_username)
174- self.assertEquals(other_user.interpretation, INTERPRETATION_U1_CONTACT)
175- self.assertEquals(other_user.manifestation,
176+ self.assertEqual(other_user.uri, "mailto:" + fake_username)
177+ self.assertEqual(other_user.interpretation, INTERPRETATION_U1_CONTACT)
178+ self.assertEqual(other_user.manifestation,
179 MANIFESTATION_U1_CONTACT_DATA_OBJECT)
180
181
182@@ -178,28 +204,28 @@
183 self.vm.delete_share(share.volume_id)
184 yield d
185
186- self.assertEquals(len(self.listener.zg.events), 1)
187+ self.assertEqual(len(self.listener.zg.events), 1)
188 event = self.listener.zg.events[0]
189
190- self.assertEquals(event.interpretation,
191+ self.assertEqual(event.interpretation,
192 EVENT_INTERPRETATION_U1_FOLDER_UNSHARED)
193- self.assertEquals(event.manifestation,
194+ self.assertEqual(event.manifestation,
195 Manifestation.USER_ACTIVITY)
196- self.assertEquals(event.actor, ACTOR_UBUNTUONE)
197+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
198
199 folder = event.subjects[0]
200 self.assertTrue(folder.uri.startswith(URI_PROTOCOL_U1))
201- self.assertEquals(folder.interpretation, Interpretation.FOLDER)
202- self.assertEquals(folder.manifestation,
203+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
204+ self.assertEqual(folder.manifestation,
205 Manifestation.REMOTE_DATA_OBJECT)
206 self.assertTrue(folder.origin.endswith(path))
207- self.assertEquals(folder.mimetype, DIRECTORY_MIMETYPE)
208- self.assertEquals(folder.storage, STORAGE_NETWORK)
209+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
210+ self.assertEqual(folder.storage, STORAGE_NETWORK)
211
212 other_user = event.subjects[1]
213- self.assertEquals(other_user.uri, "mailto:" + fake_username)
214- self.assertEquals(other_user.interpretation, INTERPRETATION_U1_CONTACT)
215- self.assertEquals(other_user.manifestation,
216+ self.assertEqual(other_user.uri, "mailto:" + fake_username)
217+ self.assertEqual(other_user.interpretation, INTERPRETATION_U1_CONTACT)
218+ self.assertEqual(other_user.manifestation,
219 MANIFESTATION_U1_CONTACT_DATA_OBJECT)
220
221 def test_share_accepted_is_logged(self):
222@@ -214,28 +240,28 @@
223 other_username=fake_username)
224 self.vm.add_share(share)
225
226- self.assertEquals(len(self.listener.zg.events), 1)
227+ self.assertEqual(len(self.listener.zg.events), 1)
228 event = self.listener.zg.events[0]
229
230- self.assertEquals(event.interpretation,
231+ self.assertEqual(event.interpretation,
232 EVENT_INTERPRETATION_U1_SHARE_ACCEPTED)
233- self.assertEquals(event.manifestation,
234+ self.assertEqual(event.manifestation,
235 Manifestation.USER_ACTIVITY)
236- self.assertEquals(event.actor, ACTOR_UBUNTUONE)
237+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
238
239 folder = event.subjects[0]
240 self.assertTrue(folder.uri.startswith(URI_PROTOCOL_U1))
241- self.assertEquals(folder.interpretation, Interpretation.FOLDER)
242- self.assertEquals(folder.manifestation,
243+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
244+ self.assertEqual(folder.manifestation,
245 Manifestation.REMOTE_DATA_OBJECT)
246 self.assertTrue(folder.origin.endswith(share_path))
247- self.assertEquals(folder.mimetype, DIRECTORY_MIMETYPE)
248- self.assertEquals(folder.storage, STORAGE_NETWORK)
249+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
250+ self.assertEqual(folder.storage, STORAGE_NETWORK)
251
252 other_user = event.subjects[1]
253- self.assertEquals(other_user.uri, "mailto:" + fake_username)
254- self.assertEquals(other_user.interpretation, INTERPRETATION_U1_CONTACT)
255- self.assertEquals(other_user.manifestation,
256+ self.assertEqual(other_user.uri, "mailto:" + fake_username)
257+ self.assertEqual(other_user.interpretation, INTERPRETATION_U1_CONTACT)
258+ self.assertEqual(other_user.manifestation,
259 MANIFESTATION_U1_CONTACT_DATA_OBJECT)
260
261
262@@ -257,26 +283,856 @@
263 self.main.event_q.push('SV_SHARE_DELETED', holder.share_id)
264 yield d
265
266- self.assertEquals(len(self.listener.zg.events), 2)
267+ self.assertEqual(len(self.listener.zg.events), 2)
268 event = self.listener.zg.events[1]
269
270- self.assertEquals(event.interpretation,
271+ self.assertEqual(event.interpretation,
272 EVENT_INTERPRETATION_U1_SHARE_UNACCEPTED)
273- self.assertEquals(event.manifestation,
274+ self.assertEqual(event.manifestation,
275 Manifestation.USER_ACTIVITY)
276- self.assertEquals(event.actor, ACTOR_UBUNTUONE)
277+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
278
279 folder = event.subjects[0]
280 self.assertTrue(folder.uri.startswith(URI_PROTOCOL_U1))
281- self.assertEquals(folder.interpretation, Interpretation.FOLDER)
282- self.assertEquals(folder.manifestation,
283+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
284+ self.assertEqual(folder.manifestation,
285 Manifestation.REMOTE_DATA_OBJECT)
286 self.assertTrue(folder.origin.endswith(share_path))
287- self.assertEquals(folder.mimetype, DIRECTORY_MIMETYPE)
288- self.assertEquals(folder.storage, STORAGE_NETWORK)
289+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
290+ self.assertEqual(folder.storage, STORAGE_NETWORK)
291
292 other_user = event.subjects[1]
293- self.assertEquals(other_user.uri, "mailto:" + fake_username)
294- self.assertEquals(other_user.interpretation, INTERPRETATION_U1_CONTACT)
295- self.assertEquals(other_user.manifestation,
296+ self.assertEqual(other_user.uri, "mailto:" + fake_username)
297+ self.assertEqual(other_user.interpretation, INTERPRETATION_U1_CONTACT)
298+ self.assertEqual(other_user.manifestation,
299 MANIFESTATION_U1_CONTACT_DATA_OBJECT)
300+
301+
302+class ZeitgeistUDFsTestCase(ZeitgeistListenerTestCase):
303+ """Tests for all UDFs-related zeitgeist events."""
304+
305+ def setUp(self):
306+ """Initialize this test instance."""
307+ super(ZeitgeistUDFsTestCase, self).setUp()
308+ self.home_dir = self.mktemp('ubuntuonehacker')
309+ self._old_home = os.environ['HOME']
310+ os.environ['HOME'] = self.home_dir
311+
312+ def tearDown(self):
313+ """Finalize this test instance."""
314+ self.rmtree(self.home_dir)
315+ os.environ['HOME'] = self._old_home
316+ super(ZeitgeistUDFsTestCase, self).tearDown()
317+
318+ def _create_udf(self, id, node_id, suggested_path, subscribed=True):
319+ """Create an UDF and returns it and the volume."""
320+ path = get_udf_path(suggested_path)
321+ # make sure suggested_path is unicode
322+ if isinstance(suggested_path, str):
323+ suggested_path = suggested_path.decode('utf-8')
324+ udf = UDF(str(id), str(node_id), suggested_path, path, subscribed)
325+ return udf
326+
327+ @defer.inlineCallbacks
328+ def test_udf_create_is_logged(self):
329+ """Test for Folders.create."""
330+ path = os.path.join(self.home_dir, u'ñoño'.encode('utf-8'))
331+ id = uuid.uuid4()
332+ node_id = uuid.uuid4()
333+
334+ def create_udf(path, name, marker):
335+ """Fake create_udf."""
336+ # check that the marker is the full path to the udf
337+ expanded_path = os.path.expanduser(path.encode('utf-8'))
338+ udf_path = os.path.join(expanded_path, name.encode('utf-8'))
339+ if str(marker) != udf_path:
340+ d.errback(ValueError("marker != path - "
341+ "marker: %r path: %r" % (marker, udf_path)))
342+ self.main.event_q.push("AQ_CREATE_UDF_OK", **dict(volume_id=id,
343+ node_id=node_id,
344+ marker=marker))
345+
346+ self.patch(self.main.action_q, "create_udf", create_udf)
347+
348+ d = defer.Deferred()
349+ self._listen_for('VM_UDF_CREATED', d.callback, 1, collect=True)
350+ self.vm.create_udf(path.encode('utf-8'))
351+ yield d
352+
353+ self.assertEqual(len(self.listener.zg.events), 1)
354+ event = self.listener.zg.events[0]
355+
356+ self.assertEqual(event.interpretation,
357+ EVENT_INTERPRETATION_U1_UDF_CREATED)
358+ self.assertEqual(event.manifestation,
359+ Manifestation.USER_ACTIVITY)
360+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
361+
362+ folder = event.subjects[0]
363+ self.assertTrue(folder.uri.startswith(URI_PROTOCOL_U1))
364+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
365+ self.assertEqual(folder.manifestation,
366+ Manifestation.REMOTE_DATA_OBJECT)
367+ self.assertTrue(folder.origin.endswith(path))
368+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
369+ self.assertEqual(folder.storage, STORAGE_NETWORK)
370+
371+ @defer.inlineCallbacks
372+ def test_udf_delete_is_logged(self):
373+ """Test for Folders.delete."""
374+ id = uuid.uuid4()
375+ node_id = uuid.uuid4()
376+ path = os.path.join(self.home_dir, u'ñoño'.encode('utf-8'))
377+
378+ def create_udf(path, name, marker):
379+ """Fake create_udf."""
380+ # check that the marker is the full path to the udf
381+ expanded_path = os.path.expanduser(path.encode('utf-8'))
382+ udf_path = os.path.join(expanded_path, name.encode('utf-8'))
383+ if str(marker) != udf_path:
384+ d.errback(ValueError("marker != path - "
385+ "marker: %r path: %r" % (marker, udf_path)))
386+ self.main.event_q.push("AQ_CREATE_UDF_OK", **dict(volume_id=id,
387+ node_id=node_id,
388+ marker=marker))
389+
390+ self.patch(self.main.action_q, "create_udf", create_udf)
391+
392+ d = defer.Deferred()
393+ self._listen_for('VM_UDF_CREATED', d.callback, 1, collect=True)
394+ self.vm.create_udf(path.encode('utf-8'))
395+ yield d
396+
397+ def delete_volume(path):
398+ """Fake delete_volume."""
399+ self.main.event_q.push("AQ_DELETE_VOLUME_OK", volume_id=id)
400+
401+ self.patch(self.main.action_q, "delete_volume", delete_volume)
402+
403+ self.vm.delete_volume(str(id))
404+
405+ self.assertEqual(len(self.listener.zg.events), 1)
406+ event = self.listener.zg.events[0]
407+
408+ d = defer.Deferred()
409+ self._listen_for('VM_VOLUME_DELETED', d.callback, 1, collect=True)
410+ yield d
411+
412+ self.assertEqual(len(self.listener.zg.events), 2)
413+ event = self.listener.zg.events[1]
414+
415+ self.assertEqual(event.interpretation,
416+ EVENT_INTERPRETATION_U1_UDF_DELETED)
417+ self.assertEqual(event.manifestation,
418+ Manifestation.USER_ACTIVITY)
419+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
420+
421+ folder = event.subjects[0]
422+ self.assertTrue(folder.uri.startswith(URI_PROTOCOL_U1))
423+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
424+ self.assertEqual(folder.manifestation,
425+ Manifestation.DELETED_RESOURCE)
426+ self.assertTrue(folder.origin.endswith(path))
427+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
428+ self.assertEqual(folder.storage, STORAGE_DELETED)
429+
430+ @defer.inlineCallbacks
431+ def test_udf_subscribe_is_logged(self):
432+ """Test for Folders.subscribe."""
433+ suggested_path = u'~/ñoño'
434+ udf = self._create_udf(uuid.uuid4(), 'node_id', suggested_path,
435+ subscribed=False)
436+ yield self.main.vm.add_udf(udf)
437+ d = defer.Deferred()
438+ self._listen_for('VM_UDF_SUBSCRIBED', d.callback, 1, collect=True)
439+ self.vm.subscribe_udf(udf.volume_id)
440+ yield d
441+
442+ self.assertEqual(len(self.listener.zg.events), 2)
443+ event = self.listener.zg.events[1]
444+
445+ self.assertEqual(event.interpretation,
446+ EVENT_INTERPRETATION_U1_UDF_SUBSCRIBED)
447+ self.assertEqual(event.manifestation,
448+ Manifestation.USER_ACTIVITY)
449+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
450+
451+ folder = event.subjects[0]
452+ self.assertTrue(folder.uri.endswith(udf.path))
453+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
454+ self.assertEqual(folder.manifestation,
455+ Manifestation.FILE_DATA_OBJECT)
456+ self.assertTrue(folder.origin.startswith(URI_PROTOCOL_U1))
457+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
458+ self.assertEqual(folder.storage, STORAGE_LOCAL)
459+
460+ @defer.inlineCallbacks
461+ def test_udf_unsubscribe_is_logged(self):
462+ """Test for Folders.unsubscribe."""
463+ suggested_path = u'~/ñoño'
464+ udf = self._create_udf(uuid.uuid4(), 'node_id', suggested_path,
465+ subscribed=True)
466+ yield self.main.vm.add_udf(udf)
467+ d = defer.Deferred()
468+ self._listen_for('VM_UDF_UNSUBSCRIBED', d.callback, 1, collect=True)
469+ self.vm.unsubscribe_udf(udf.volume_id)
470+ yield d
471+
472+ self.assertEqual(len(self.listener.zg.events), 2)
473+ event = self.listener.zg.events[1]
474+
475+ self.assertEqual(event.interpretation,
476+ EVENT_INTERPRETATION_U1_UDF_UNSUBSCRIBED)
477+ self.assertEqual(event.manifestation,
478+ Manifestation.USER_ACTIVITY)
479+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
480+
481+ folder = event.subjects[0]
482+ self.assertTrue(folder.uri.endswith(udf.path))
483+ self.assertEqual(folder.interpretation, Interpretation.FOLDER)
484+ self.assertEqual(folder.manifestation,
485+ Manifestation.DELETED_RESOURCE)
486+ self.assertTrue(folder.origin.startswith(URI_PROTOCOL_U1))
487+ self.assertEqual(folder.mimetype, DIRECTORY_MIMETYPE)
488+ self.assertEqual(folder.storage, STORAGE_DELETED)
489+
490+
491+class ZeitgeistRemoteFileSyncTestCase(ConnectedBaseTestCase):
492+ """File sync events are logged into Zeitgeist."""
493+
494+ def setUp(self):
495+ """Initialize this test instance."""
496+ ConnectedBaseTestCase.setUp(self)
497+ self.rq = request_queue = RequestQueue(name='FOO',
498+ action_queue=self.action_queue)
499+
500+ class MyUpload(Upload):
501+ """Just to allow monkeypatching."""
502+
503+ self.share_id = ""
504+ self.command = MyUpload(request_queue, share_id=self.share_id,
505+ node_id='a_node_id', previous_hash='prev_hash',
506+ hash='yadda', crc32=0, size=0,
507+ fileobj_factory=lambda: None,
508+ tempfile_factory=lambda: None)
509+ self.command.pre_queue_setup() # create the logger
510+ self.fsm = self.action_queue.main.fs
511+ self.vm = self.action_queue.main.vm
512+ self.patch(zglog, "ZeitgeistLogger", MockLogger)
513+ self.listener = ZeitgeistListener(self.fsm, self.vm)
514+ self.action_queue.event_queue.subscribe(self.listener)
515+ self.root_id = "roootid"
516+ self.sync = Sync(main=self.main)
517+
518+ def tearDown(self):
519+ """Finalize this test instance."""
520+ ConnectedBaseTestCase.tearDown(self)
521+
522+ def test_syncdaemon_creates_file_on_server_is_logged(self):
523+ """Files created by SyncDaemon on the server are logged."""
524+ filename = "filename.mp3"
525+ path = os.path.join(self.vm.root.path, filename)
526+ self.fsm.create(path, "")
527+ self.fsm.set_node_id(path, "a_node_id")
528+
529+ request = client.MakeFile(self.action_queue.client, self.share_id,
530+ 'parent', filename)
531+ request.new_id = 'a_node_id'
532+ request.new_generation = 13
533+
534+ # create a command and trigger it success
535+ cmd = MakeFile(self.rq, self.share_id, 'parent', filename, 'marker')
536+ res = cmd.handle_success(request)
537+ assert res is request
538+
539+ # create a request and fill it with succesful information
540+ request = client.PutContent(self.action_queue.client, self.share_id,
541+ 'node', 'prvhash', 'newhash', 'crc32',
542+ 'size', 'deflated', 'fd')
543+ request.new_generation = 13
544+
545+ # trigger success in the command
546+ self.command.handle_success(request)
547+
548+ # check for successful event
549+ kwargs = dict(share_id=self.command.share_id, node_id='a_node_id',
550+ hash='yadda', new_generation=13)
551+
552+ info = dict(marker='marker', new_id='a_node_id', new_generation=13,
553+ volume_id=self.share_id)
554+ events = [
555+ ('AQ_FILE_NEW_OK', (), info),
556+ ('AQ_UPLOAD_FINISHED', (), kwargs)
557+ ]
558+ self.assertEqual(events, self.command.action_queue.event_queue.events)
559+
560+ self.assertEqual(len(self.listener.zg.events), 1)
561+ event = self.listener.zg.events[0]
562+
563+ self.assertEqual(event.interpretation,
564+ Interpretation.CREATE_EVENT)
565+ self.assertEqual(event.manifestation,
566+ Manifestation.SCHEDULED_ACTIVITY)
567+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
568+
569+ remote_file = event.subjects[0]
570+ self.assertTrue(remote_file.uri.startswith(URI_PROTOCOL_U1))
571+ self.assertEqual(remote_file.interpretation, Interpretation.AUDIO)
572+ self.assertEqual(remote_file.manifestation,
573+ Manifestation.REMOTE_DATA_OBJECT)
574+ self.assertTrue(remote_file.origin.endswith(filename))
575+ self.assertEqual(remote_file.mimetype, "audio/mpeg")
576+ self.assertEqual(remote_file.storage, STORAGE_NETWORK)
577+
578+ def test_syncdaemon_creates_dir_on_server_is_logged(self):
579+ """Dirs created by SyncDaemon on the server are logged."""
580+ dirname = "dirname"
581+ path = os.path.join(self.vm.root.path, dirname)
582+ self.fsm.create(path, "")
583+ self.fsm.set_node_id(path, "a_node_id")
584+
585+ request = client.MakeDir(self.action_queue.client, self.share_id,
586+ 'parent', dirname)
587+ request.new_id = 'a_node_id'
588+ request.new_generation = 13
589+
590+ # create a command and trigger it success
591+ cmd = MakeDir(self.rq, self.share_id, 'parent', dirname, 'marker')
592+ res = cmd.handle_success(request)
593+ assert res is request
594+
595+ # check for successful event
596+ info = dict(marker='marker', new_id='a_node_id', new_generation=13,
597+ volume_id=self.share_id)
598+ events = [
599+ ('AQ_DIR_NEW_OK', (), info),
600+ ]
601+ self.assertEqual(events, self.command.action_queue.event_queue.events)
602+
603+ self.assertEqual(len(self.listener.zg.events), 1)
604+ event = self.listener.zg.events[0]
605+
606+ self.assertEqual(event.interpretation,
607+ Interpretation.CREATE_EVENT)
608+ self.assertEqual(event.manifestation,
609+ Manifestation.SCHEDULED_ACTIVITY)
610+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
611+
612+ remote_file = event.subjects[0]
613+ self.assertTrue(remote_file.uri.startswith(URI_PROTOCOL_U1))
614+ self.assertEqual(remote_file.interpretation, Interpretation.FOLDER)
615+ self.assertEqual(remote_file.manifestation,
616+ Manifestation.REMOTE_DATA_OBJECT)
617+ self.assertTrue(remote_file.origin.endswith(dirname))
618+ self.assertEqual(remote_file.mimetype, DIRECTORY_MIMETYPE)
619+ self.assertEqual(remote_file.storage, STORAGE_NETWORK)
620+
621+ def test_syncdaemon_modifies_on_server_is_logged(self):
622+ """Files modified by SyncDaemon on the server are logged."""
623+ filename = "filename.mp3"
624+ path = os.path.join(self.vm.root.path, filename)
625+ self.fsm.create(path, "")
626+ self.fsm.set_node_id(path, "a_node_id")
627+
628+ # create a request and fill it with succesful information
629+ request = client.PutContent(self.action_queue.client, self.share_id,
630+ 'node', 'prvhash', 'newhash', 'crc32',
631+ 'size', 'deflated', 'fd')
632+ request.new_generation = 13
633+
634+ # trigger success in the command
635+ self.command.handle_success(request)
636+
637+ # check for successful event
638+ kwargs = dict(share_id=self.command.share_id, node_id='a_node_id',
639+ hash='yadda', new_generation=13)
640+
641+ events = [('AQ_UPLOAD_FINISHED', (), kwargs)]
642+ self.assertEqual(events, self.command.action_queue.event_queue.events)
643+
644+ self.assertEqual(len(self.listener.zg.events), 1)
645+ event = self.listener.zg.events[0]
646+
647+ self.assertEqual(event.interpretation,
648+ Interpretation.MODIFY_EVENT)
649+ self.assertEqual(event.manifestation,
650+ Manifestation.SCHEDULED_ACTIVITY)
651+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
652+
653+ remote_file = event.subjects[0]
654+ self.assertTrue(remote_file.uri.startswith(URI_PROTOCOL_U1))
655+ self.assertEqual(remote_file.interpretation, Interpretation.AUDIO)
656+ self.assertEqual(remote_file.manifestation,
657+ Manifestation.REMOTE_DATA_OBJECT)
658+ self.assertTrue(remote_file.origin.endswith(filename))
659+ self.assertEqual(remote_file.mimetype, "audio/mpeg")
660+ self.assertEqual(remote_file.storage, STORAGE_NETWORK)
661+
662+ @defer.inlineCallbacks
663+ def test_syncdaemon_deletes_file_on_server_is_logged(self):
664+ """Files deleted by SD on the server are logged."""
665+ d = defer.Deferred()
666+ listen_for(self.main.event_q, 'AQ_UNLINK_OK', d.callback)
667+
668+ path = os.path.join(self.main.vm.root.path, "filename.mp3")
669+ self.main.fs.create(path, "")
670+ self.main.fs.set_node_id(path, "node_id")
671+ self.main.event_q.push("AQ_UNLINK_OK", share_id="",
672+ parent_id="parent_id",
673+ node_id="node_id", new_generation=13)
674+ yield d
675+
676+ self.assertEqual(len(self.listener.zg.events), 1)
677+ event = self.listener.zg.events[0]
678+
679+ self.assertEqual(event.interpretation,
680+ Interpretation.DELETE_EVENT)
681+ self.assertEqual(event.manifestation,
682+ Manifestation.SCHEDULED_ACTIVITY)
683+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
684+
685+ remote_folder = event.subjects[0]
686+ self.assertTrue(remote_folder.uri.startswith(URI_PROTOCOL_U1))
687+ self.assertEqual(remote_folder.interpretation, Interpretation.AUDIO)
688+ self.assertEqual(remote_folder.manifestation,
689+ Manifestation.DELETED_RESOURCE)
690+ self.assertTrue(remote_folder.origin.endswith("filename.mp3"))
691+ self.assertEqual(remote_folder.mimetype, "audio/mpeg")
692+ self.assertEqual(remote_folder.storage, STORAGE_DELETED)
693+
694+ @defer.inlineCallbacks
695+ def test_syncdaemon_deletes_dir_on_server_is_logged(self):
696+ """Files deleted by SD on the server are logged."""
697+ d = defer.Deferred()
698+ listen_for(self.main.event_q, 'AQ_UNLINK_OK', d.callback)
699+
700+ path = os.path.join(self.main.vm.root.path, "folder name")
701+ self.main.fs.create(path, "", is_dir=True)
702+ self.main.fs.set_node_id(path, "node_id")
703+ self.main.event_q.push("AQ_UNLINK_OK", share_id="",
704+ parent_id="parent_id",
705+ node_id="node_id", new_generation=13)
706+ yield d
707+
708+ self.assertEqual(len(self.listener.zg.events), 1)
709+ event = self.listener.zg.events[0]
710+
711+ self.assertEqual(event.interpretation,
712+ Interpretation.DELETE_EVENT)
713+ self.assertEqual(event.manifestation,
714+ Manifestation.SCHEDULED_ACTIVITY)
715+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
716+
717+ remote_folder = event.subjects[0]
718+ self.assertTrue(remote_folder.uri.startswith(URI_PROTOCOL_U1))
719+ self.assertEqual(remote_folder.interpretation, Interpretation.FOLDER)
720+ self.assertEqual(remote_folder.manifestation,
721+ Manifestation.DELETED_RESOURCE)
722+ self.assertTrue(remote_folder.origin.endswith("folder name"))
723+ self.assertEqual(remote_folder.mimetype, DIRECTORY_MIMETYPE)
724+ self.assertEqual(remote_folder.storage, STORAGE_DELETED)
725+
726+
727+class ZeitgeistLocalFileSyncTestCase(BaseTwistedTestCase):
728+ """Zeitgeist events coming from the server."""
729+ timeout = 5
730+
731+ def setUp(self):
732+ """Initialize this instance."""
733+ BaseTwistedTestCase.setUp(self)
734+ self.root = self.mktemp('root')
735+ self.shares = self.mktemp('shares')
736+ self.data = self.mktemp('data')
737+ self.partials_dir = self.mktemp('partials_dir')
738+ self.handler = MementoHandler()
739+ self.handler.setLevel(logging.ERROR)
740+ FakeMain._sync_class = Sync
741+ self.main = FakeMain(root_dir=self.root, shares_dir=self.shares,
742+ data_dir=self.data,
743+ partials_dir=self.partials_dir)
744+ self._logger = logging.getLogger('ubuntuone.SyncDaemon')
745+ self._logger.addHandler(self.handler)
746+
747+ self.root_id = root_id = "roootid"
748+ self.main.vm._got_root(root_id)
749+ self.filemp3delta = delta.FileInfoDelta(
750+ generation=5, is_live=True, file_type=delta.FILE,
751+ parent_id=self.root_id, share_id=ROOT, node_id=uuid.uuid4(),
752+ name=u"fileñ.mp3", is_public=False, content_hash="hash",
753+ crc32=1, size=10, last_modified=0)
754+
755+ self.dirdelta = delta.FileInfoDelta(
756+ generation=6, is_live=True, file_type=delta.DIRECTORY,
757+ parent_id=root_id, share_id=ROOT, node_id=uuid.uuid4(),
758+ name=u"directory_ñ", is_public=False, content_hash="hash",
759+ crc32=1, size=10, last_modified=0)
760+
761+ self.patch(zglog, "ZeitgeistLogger", MockLogger)
762+ self.listener = ZeitgeistListener(self.main.fs, self.main.vm)
763+ self.main.event_q.subscribe(self.listener)
764+
765+ def tearDown(self):
766+ """Clean up this instance."""
767+ self._logger.removeHandler(self.handler)
768+ self.main.shutdown()
769+ FakeMain._sync_class = None
770+ shutil.rmtree(self.root)
771+ shutil.rmtree(self.shares)
772+ shutil.rmtree(self.data)
773+ for record in self.handler.records:
774+ exc_info = getattr(record, 'exc_info', None)
775+ if exc_info is not None:
776+ raise exc_info[0], exc_info[1], exc_info[2]
777+ BaseTwistedTestCase.tearDown(self)
778+
779+ @defer.inlineCallbacks
780+ def test_syncdaemon_creates_file_locally_is_logged(self):
781+ """Files created locally by SyncDaemon are logged."""
782+ d = defer.Deferred()
783+ d2 = defer.Deferred()
784+ listen_for(self.main.event_q, 'SV_FILE_NEW', d.callback)
785+ listen_for(self.main.event_q, 'AQ_DOWNLOAD_FINISHED', d2.callback)
786+
787+ deltas = [ self.filemp3delta ]
788+ kwargs = dict(volume_id=ROOT, delta_content=deltas, end_generation=11,
789+ full=True, free_bytes=10)
790+ self.main.sync.handle_AQ_DELTA_OK(**kwargs)
791+
792+ # check that the file is created
793+ node = self.main.fs.get_by_node_id(ROOT, self.filemp3delta.node_id)
794+ self.assertEqual(node.path, self.filemp3delta.name.encode('utf8'))
795+ self.assertEqual(node.is_dir, False)
796+ self.assertEqual(node.generation, self.filemp3delta.generation)
797+
798+ yield d # wait for SV_FILE_NEW
799+
800+ dlargs = dict(
801+ share_id=self.filemp3delta.share_id,
802+ node_id=self.filemp3delta.node_id,
803+ server_hash="server hash")
804+ self.main.event_q.push("AQ_DOWNLOAD_FINISHED", **dlargs)
805+
806+ yield d2 # wait for AQ_DOWNLOAD_FINISHED
807+
808+ self.assertEqual(len(self.listener.zg.events), 1)
809+ event = self.listener.zg.events[0]
810+
811+ self.assertEqual(event.interpretation,
812+ Interpretation.CREATE_EVENT)
813+ self.assertEqual(event.manifestation,
814+ Manifestation.WORLD_ACTIVITY)
815+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
816+
817+ local_file = event.subjects[0]
818+ self.assertTrue(local_file.uri.endswith(self.filemp3delta.name))
819+ self.assertEqual(local_file.interpretation, Interpretation.AUDIO)
820+ self.assertEqual(local_file.manifestation,
821+ Manifestation.FILE_DATA_OBJECT)
822+ self.assertTrue(local_file.origin.startswith(URI_PROTOCOL_U1))
823+ self.assertEqual(local_file.mimetype, "audio/mpeg")
824+ self.assertEqual(local_file.storage, STORAGE_LOCAL)
825+
826+ @defer.inlineCallbacks
827+ def test_syncdaemon_creates_dir_locally_is_logged(self):
828+ """Dirs created locally by SyncDaemon are logged."""
829+ d = defer.Deferred()
830+ listen_for(self.main.event_q, 'SV_DIR_NEW', d.callback)
831+
832+ deltas = [ self.dirdelta ]
833+ kwargs = dict(volume_id=ROOT, delta_content=deltas, end_generation=11,
834+ full=True, free_bytes=10)
835+ self.main.sync.handle_AQ_DELTA_OK(**kwargs)
836+
837+ # check that the dir is created
838+ node = self.main.fs.get_by_node_id(ROOT, self.dirdelta.node_id)
839+ self.assertEqual(node.path, self.dirdelta.name.encode('utf8'))
840+ self.assertEqual(node.is_dir, True)
841+ self.assertEqual(node.generation, self.dirdelta.generation)
842+
843+ yield d # wait for SV_DIR_NEW
844+
845+ self.assertEqual(len(self.listener.zg.events), 1)
846+ event = self.listener.zg.events[0]
847+
848+ self.assertEqual(event.interpretation,
849+ Interpretation.CREATE_EVENT)
850+ self.assertEqual(event.manifestation,
851+ Manifestation.WORLD_ACTIVITY)
852+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
853+
854+ local_file = event.subjects[0]
855+ self.assertTrue(local_file.uri.endswith(self.dirdelta.name))
856+ self.assertEqual(local_file.interpretation, Interpretation.FOLDER)
857+ self.assertEqual(local_file.manifestation,
858+ Manifestation.FILE_DATA_OBJECT)
859+ self.assertTrue(local_file.origin.startswith(URI_PROTOCOL_U1))
860+ self.assertEqual(local_file.mimetype, DIRECTORY_MIMETYPE)
861+ self.assertEqual(local_file.storage, STORAGE_LOCAL)
862+
863+ @defer.inlineCallbacks
864+ def test_syncdaemon_modifies_locally_is_logged(self):
865+ """Files modified locally by SyncDaemon are logged."""
866+ d = defer.Deferred()
867+ d2 = defer.Deferred()
868+ listen_for(self.main.event_q, 'SV_FILE_NEW', d.callback)
869+ listen_for(self.main.event_q, 'AQ_DOWNLOAD_FINISHED', d2.callback)
870+
871+ deltas = [ self.filemp3delta ]
872+ kwargs = dict(volume_id=ROOT, delta_content=deltas, end_generation=11,
873+ full=True, free_bytes=10)
874+ self.main.sync.handle_AQ_DELTA_OK(**kwargs)
875+
876+ # check that the file is modified
877+ node = self.main.fs.get_by_node_id(ROOT, self.filemp3delta.node_id)
878+ self.assertEqual(node.path, self.filemp3delta.name.encode('utf8'))
879+ self.assertEqual(node.is_dir, False)
880+ self.assertEqual(node.generation, self.filemp3delta.generation)
881+
882+ yield d # wait for SV_FILE_NEW
883+
884+ # remove from the recent list
885+ local_file_id = (self.filemp3delta.share_id, self.filemp3delta.node_id)
886+ self.listener.newly_created_local_files.remove(local_file_id)
887+
888+ dlargs = dict(
889+ share_id=self.filemp3delta.share_id,
890+ node_id=self.filemp3delta.node_id,
891+ server_hash="server hash")
892+ self.main.event_q.push("AQ_DOWNLOAD_FINISHED", **dlargs)
893+
894+ yield d2 # wait for AQ_DOWNLOAD_FINISHED
895+
896+ self.assertEqual(len(self.listener.zg.events), 1)
897+ event = self.listener.zg.events[0]
898+
899+ self.assertEqual(event.interpretation,
900+ Interpretation.MODIFY_EVENT)
901+ self.assertEqual(event.manifestation,
902+ Manifestation.WORLD_ACTIVITY)
903+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
904+
905+ local_file = event.subjects[0]
906+ self.assertTrue(local_file.uri.endswith(self.filemp3delta.name))
907+ self.assertEqual(local_file.interpretation, Interpretation.AUDIO)
908+ self.assertEqual(local_file.manifestation,
909+ Manifestation.FILE_DATA_OBJECT)
910+ self.assertTrue(local_file.origin.startswith(URI_PROTOCOL_U1))
911+ self.assertEqual(local_file.mimetype, "audio/mpeg")
912+ self.assertEqual(local_file.storage, STORAGE_LOCAL)
913+
914+ @defer.inlineCallbacks
915+ def test_syncdaemon_deletes_file_locally_is_logged(self):
916+ """Files deleted locally by SyncDaemon are logged."""
917+ d = defer.Deferred()
918+ listen_for(self.main.event_q, 'SV_FILE_DELETED', d.callback)
919+
920+ filename = self.filemp3delta.name.encode("utf-8")
921+ path = os.path.join(self.main.vm.root.path, filename)
922+ self.main.fs.create(path, "")
923+ self.main.fs.set_node_id(path, "node_id")
924+ self.main.event_q.push("SV_FILE_DELETED", volume_id="",
925+ node_id="node_id", is_dir=False)
926+
927+ yield d
928+
929+ self.assertEqual(len(self.listener.zg.events), 1)
930+ event = self.listener.zg.events[0]
931+
932+ self.assertEqual(event.interpretation,
933+ Interpretation.DELETE_EVENT)
934+ self.assertEqual(event.manifestation,
935+ Manifestation.WORLD_ACTIVITY)
936+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
937+
938+ local_file = event.subjects[0]
939+ self.assertTrue(local_file.uri.endswith(self.filemp3delta.name))
940+ self.assertEqual(local_file.interpretation, Interpretation.AUDIO)
941+ self.assertEqual(local_file.manifestation,
942+ Manifestation.DELETED_RESOURCE)
943+ self.assertTrue(local_file.origin.startswith(URI_PROTOCOL_U1))
944+ self.assertEqual(local_file.mimetype, "audio/mpeg")
945+ self.assertEqual(local_file.storage, STORAGE_DELETED)
946+
947+ @defer.inlineCallbacks
948+ def test_syncdaemon_deletes_dir_locally_is_logged(self):
949+ """Dirs deleted locally by SyncDaemon are logged."""
950+ d = defer.Deferred()
951+ listen_for(self.main.event_q, 'SV_FILE_DELETED', d.callback)
952+
953+ path = os.path.join(self.main.vm.root.path, "folder name")
954+ self.main.fs.create(path, "", is_dir=True)
955+ self.main.fs.set_node_id(path, "node_id")
956+ self.main.event_q.push("SV_FILE_DELETED", volume_id="",
957+ node_id="node_id", is_dir=True)
958+
959+ yield d
960+
961+ self.assertEqual(len(self.listener.zg.events), 1)
962+ event = self.listener.zg.events[0]
963+
964+ self.assertEqual(event.interpretation,
965+ Interpretation.DELETE_EVENT)
966+ self.assertEqual(event.manifestation,
967+ Manifestation.WORLD_ACTIVITY)
968+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
969+
970+ local_folder = event.subjects[0]
971+ self.assertTrue(local_folder.uri.endswith("folder name"))
972+ self.assertEqual(local_folder.interpretation, Interpretation.FOLDER)
973+ self.assertEqual(local_folder.manifestation,
974+ Manifestation.DELETED_RESOURCE)
975+ self.assertTrue(local_folder.origin.startswith(URI_PROTOCOL_U1))
976+ self.assertEqual(local_folder.mimetype, DIRECTORY_MIMETYPE)
977+ self.assertEqual(local_folder.storage, STORAGE_DELETED)
978+
979+ @defer.inlineCallbacks
980+ def test_file_sync_conflict_is_logged(self):
981+ """Files renamed because of conflict are logged."""
982+ d = defer.Deferred()
983+ listen_for(self.main.event_q, 'FSM_FILE_CONFLICT', d.callback)
984+
985+ testfile = os.path.join(self.main.vm.root.path, 'sample.mp3')
986+ mdid = self.main.fs.create(testfile, "")
987+ self.main.fs.set_node_id(testfile, "uuid")
988+ with open(testfile, "w") as fh:
989+ fh.write("this is music!")
990+
991+ self.main.fs.move_to_conflict(mdid)
992+
993+ yield d
994+
995+ self.assertEqual(len(self.listener.zg.events), 1)
996+ event = self.listener.zg.events[0]
997+
998+ self.assertEqual(event.interpretation,
999+ EVENT_INTERPRETATION_U1_CONFLICT_RENAME)
1000+ self.assertEqual(event.manifestation,
1001+ Manifestation.WORLD_ACTIVITY)
1002+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
1003+
1004+ local_file = event.subjects[0]
1005+ new_name = testfile + self.main.fs.CONFLICT_SUFFIX
1006+ self.assertTrue(local_file.uri.endswith(new_name))
1007+ self.assertEqual(local_file.interpretation, Interpretation.AUDIO)
1008+ self.assertEqual(local_file.manifestation,
1009+ Manifestation.FILE_DATA_OBJECT)
1010+ self.assertTrue(local_file.origin.endswith(testfile))
1011+ self.assertEqual(local_file.mimetype, "audio/mpeg")
1012+ self.assertEqual(local_file.storage, STORAGE_LOCAL)
1013+
1014+ @defer.inlineCallbacks
1015+ def test_dir_sync_conflict_is_logged(self):
1016+ """Dirs renamed because of conflict are logged."""
1017+ d = defer.Deferred()
1018+ listen_for(self.main.event_q, 'FSM_DIR_CONFLICT', d.callback)
1019+
1020+ testdir = os.path.join(self.main.vm.root.path, 'sampledir')
1021+ mdid = self.main.fs.create(testdir, "", is_dir=True)
1022+ self.main.fs.set_node_id(testdir, "uuid")
1023+ os.mkdir(testdir)
1024+
1025+ self.main.fs.move_to_conflict(mdid)
1026+
1027+ yield d
1028+
1029+ self.assertEqual(len(self.listener.zg.events), 1)
1030+ event = self.listener.zg.events[0]
1031+
1032+ self.assertEqual(event.interpretation,
1033+ EVENT_INTERPRETATION_U1_CONFLICT_RENAME)
1034+ self.assertEqual(event.manifestation,
1035+ Manifestation.WORLD_ACTIVITY)
1036+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
1037+
1038+ local_file = event.subjects[0]
1039+ new_name = testdir + self.main.fs.CONFLICT_SUFFIX
1040+ self.assertTrue(local_file.uri.endswith(new_name))
1041+ self.assertEqual(local_file.interpretation, Interpretation.FOLDER)
1042+ self.assertEqual(local_file.manifestation,
1043+ Manifestation.FILE_DATA_OBJECT)
1044+ self.assertTrue(local_file.origin.endswith(testdir))
1045+ self.assertEqual(local_file.mimetype, DIRECTORY_MIMETYPE)
1046+ self.assertEqual(local_file.storage, STORAGE_LOCAL)
1047+
1048+class ZeitgeistPublicFilesTestCase(ZeitgeistListenerTestCase):
1049+ """Public files events are logged into Zeitgeist."""
1050+
1051+ @defer.inlineCallbacks
1052+ def test_publish_url_is_logged(self):
1053+ """Publishing a file with a url is logged."""
1054+ share_id = "share"
1055+ node_id = "node_id"
1056+ is_public = True
1057+ public_url = 'http://example.com/foo.mp3'
1058+
1059+ share_path = os.path.join(self.shares_dir, 'share')
1060+ self.main.vm.add_share(Share(path=share_path, volume_id='share',
1061+ other_username='other username'))
1062+ path = os.path.join(share_path, "foo.mp3")
1063+ self.main.fs.create(path, str(share_id))
1064+ self.main.fs.set_node_id(path, str(node_id))
1065+
1066+ d = defer.Deferred()
1067+ self._listen_for('AQ_CHANGE_PUBLIC_ACCESS_OK', d.callback)
1068+ self.main.event_q.push('AQ_CHANGE_PUBLIC_ACCESS_OK',
1069+ share_id=share_id, node_id=node_id,
1070+ is_public=is_public, public_url=public_url)
1071+ yield d
1072+
1073+ self.assertEqual(len(self.listener.zg.events), 2)
1074+ event = self.listener.zg.events[1]
1075+
1076+ self.assertEqual(event.interpretation,
1077+ Interpretation.CREATE_EVENT)
1078+ self.assertEqual(event.manifestation,
1079+ Manifestation.USER_ACTIVITY)
1080+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
1081+
1082+ public_file = event.subjects[0]
1083+ self.assertEqual(public_file.uri, public_url)
1084+ self.assertEqual(public_file.interpretation, Interpretation.AUDIO)
1085+ self.assertEqual(public_file.manifestation,
1086+ Manifestation.REMOTE_DATA_OBJECT)
1087+ self.assertTrue(public_file.origin.endswith(node_id))
1088+ self.assertEqual(public_file.mimetype, "audio/mpeg")
1089+ self.assertEqual(public_file.storage, STORAGE_NETWORK)
1090+
1091+ @defer.inlineCallbacks
1092+ def test_unpublish_url_is_logged(self):
1093+ """Unpublishing a file with a url is logged."""
1094+ share_id = "share"
1095+ node_id = "node_id"
1096+ is_public = False
1097+ public_url = 'http://example.com/foo.mp3'
1098+
1099+ share_path = os.path.join(self.shares_dir, 'share')
1100+ self.main.vm.add_share(Share(path=share_path, volume_id='share',
1101+ other_username='other username'))
1102+ path = os.path.join(share_path, "foo.mp3")
1103+ self.main.fs.create(path, str(share_id))
1104+ self.main.fs.set_node_id(path, str(node_id))
1105+
1106+ d = defer.Deferred()
1107+ self._listen_for('AQ_CHANGE_PUBLIC_ACCESS_OK', d.callback)
1108+ self.main.event_q.push('AQ_CHANGE_PUBLIC_ACCESS_OK',
1109+ share_id=share_id, node_id=node_id,
1110+ is_public=is_public, public_url=public_url)
1111+ yield d
1112+
1113+ self.assertEqual(len(self.listener.zg.events), 2)
1114+ event = self.listener.zg.events[1]
1115+
1116+ self.assertEqual(event.interpretation,
1117+ Interpretation.DELETE_EVENT)
1118+ self.assertEqual(event.manifestation,
1119+ Manifestation.USER_ACTIVITY)
1120+ self.assertEqual(event.actor, ACTOR_UBUNTUONE)
1121+
1122+ public_file = event.subjects[0]
1123+ self.assertEqual(public_file.uri, public_url)
1124+ self.assertEqual(public_file.interpretation, Interpretation.AUDIO)
1125+ self.assertEqual(public_file.manifestation,
1126+ Manifestation.DELETED_RESOURCE)
1127+ self.assertTrue(public_file.origin.endswith(node_id))
1128+ self.assertEqual(public_file.mimetype, "audio/mpeg")
1129+ self.assertEqual(public_file.storage, STORAGE_DELETED)
1130
1131=== modified file 'tests/syncdaemon/test_fsm.py'
1132--- tests/syncdaemon/test_fsm.py 2010-11-30 19:06:17 +0000
1133+++ tests/syncdaemon/test_fsm.py 2010-12-13 23:42:15 +0000
1134@@ -29,6 +29,7 @@
1135 from contrib.testing.testcase import (
1136 FakeVolumeManager,
1137 FakeMain,
1138+ Listener,
1139 MementoHandler
1140 )
1141
1142@@ -1874,6 +1875,41 @@
1143 # invalid uuid
1144 self.assertRaises(KeyError, self.fsm.move_to_conflict, "no-such-mdid")
1145
1146+ def test_conflict_file_pushes_event(self):
1147+ """A conflict with a file pushes FSM_FILE_CONFLICT."""
1148+ listener = Listener()
1149+ self.eq.subscribe(listener)
1150+
1151+ testfile = os.path.join(self.share_path, "path")
1152+ mdid = self.fsm.create(testfile, "share")
1153+ self.fsm.set_node_id(testfile, "uuid")
1154+ with open(testfile, "w") as fh:
1155+ fh.write("test!")
1156+
1157+ self.fsm.move_to_conflict(mdid)
1158+
1159+ new_name = testfile + self.fsm.CONFLICT_SUFFIX
1160+ kwargs = dict(old_name=testfile, new_name=new_name)
1161+
1162+ self.assertTrue(("FSM_FILE_CONFLICT", (), kwargs) in listener.events)
1163+
1164+ def test_conflict_dir_pushes_event(self):
1165+ """A conflict with a dir pushes FSM_DIR_CONFLICT."""
1166+ listener = Listener()
1167+ self.eq.subscribe(listener)
1168+
1169+ testdir = os.path.join(self.share_path, "path")
1170+ mdid = self.fsm.create(testdir, "share", is_dir=True)
1171+ self.fsm.set_node_id(testdir, "uuid")
1172+ os.mkdir(testdir)
1173+
1174+ self.fsm.move_to_conflict(mdid)
1175+
1176+ new_name = testdir + self.fsm.CONFLICT_SUFFIX
1177+ kwargs = dict(old_name=testdir, new_name=new_name)
1178+
1179+ self.assertTrue(("FSM_DIR_CONFLICT", (), kwargs) in listener.events)
1180+
1181 def test_upload_finished(self):
1182 """Test upload finished."""
1183 path = os.path.join(self.share_path, "path")
1184
1185=== modified file 'tests/syncdaemon/test_sync.py'
1186--- tests/syncdaemon/test_sync.py 2010-11-30 17:53:14 +0000
1187+++ tests/syncdaemon/test_sync.py 2010-12-13 23:42:15 +0000
1188@@ -1518,7 +1518,8 @@
1189 self.sync.handle_AQ_RESCAN_FROM_SCRATCH_OK(ROOT, [self.rootdt],
1190 100, 100)
1191
1192- self.assertEqual(called, [((ROOT, self.filetxtdelta.node_id), {})])
1193+ args = (ROOT, self.filetxtdelta.node_id, False)
1194+ self.assertEqual(called, [(args, {})])
1195
1196 def test_deletes_file_in_delta(self):
1197 """Files in delta should not be deleted."""
1198@@ -1569,5 +1570,56 @@
1199 [self.rootdt], 100, 100)
1200
1201 self.assertEqual(called, [
1202- (ROOT, dt.node_id),
1203- (ROOT, self.dirdelta.node_id)])
1204+ (ROOT, dt.node_id, False),
1205+ (ROOT, self.dirdelta.node_id, True)])
1206+
1207+
1208+class TestSyncEvents(BaseSync):
1209+ """Testing sync stuff related to events."""
1210+
1211+ def setUp(self):
1212+ """Do the setUp."""
1213+ super(TestSyncEvents, self).setUp()
1214+ self.sync = Sync(main=self.main)
1215+ self.handler.setLevel(logging.DEBUG)
1216+
1217+ key = FSKey(self.main.fs, share_id='', node_id='node_id')
1218+ self.ssmr = SyncStateMachineRunner(fsm=self.main.fs, main=self.main,
1219+ key=key, logger=None)
1220+ self.vm = self.main.vm
1221+ self.listener = Listener()
1222+ self.main.event_q.subscribe(self.listener)
1223+
1224+ def test_server_new_file_sends_event(self):
1225+ """When a new file is created on the server, an event is sent."""
1226+ # create the fake file
1227+ self.main.vm._got_root("parent_id")
1228+ self.sync._handle_SV_FILE_NEW(ROOT, "node_id", "parent_id", "file")
1229+
1230+ # check event
1231+ kwargs = dict(volume_id=ROOT, node_id='node_id', parent_id="parent_id",
1232+ name="file")
1233+ self.assertIn(("SV_FILE_NEW", (), kwargs), self.listener.events)
1234+
1235+ def test_server_new_dir_sends_event(self):
1236+ """When a new directory is created on the server, an event is sent."""
1237+
1238+ # create the fake dir
1239+ self.main.vm._got_root("parent_id")
1240+ self.sync._handle_SV_DIR_NEW(ROOT, "node_id", "parent_id", "file")
1241+
1242+ # check event
1243+ kwargs = dict(volume_id=ROOT, node_id='node_id', parent_id="parent_id",
1244+ name="file")
1245+ self.assertIn(("SV_DIR_NEW", (), kwargs), self.listener.events)
1246+
1247+ def test_server_file_deleted_sends_event(self):
1248+ """When a file is deleted, an event is sent."""
1249+
1250+ # delete the fake file
1251+ self.main.vm._got_root("parent_id")
1252+ self.sync._handle_SV_FILE_DELETED(ROOT, "node_id", True)
1253+
1254+ # check event
1255+ kwargs = dict(volume_id=ROOT, node_id='node_id', is_dir=True)
1256+ self.assertIn(("SV_FILE_DELETED", (), kwargs), self.listener.events)
1257
1258=== modified file 'ubuntuone/syncdaemon/event_logging.py'
1259--- ubuntuone/syncdaemon/event_logging.py 2010-12-09 14:28:21 +0000
1260+++ ubuntuone/syncdaemon/event_logging.py 2010-12-13 23:42:15 +0000
1261@@ -17,21 +17,33 @@
1262 # with this program. If not, see <http://www.gnu.org/licenses/>.
1263 """Event logging from SyncDaemon into Zeitgeist."""
1264
1265+import mimetypes
1266+
1267 from zeitgeist.datamodel import Event, Interpretation, Manifestation, Subject
1268+from zeitgeist.mimetypes import get_interpretation_for_mimetype
1269
1270 from ubuntuone.eventlog import zglog
1271-from ubuntuone.syncdaemon.volume_manager import Share
1272+from ubuntuone.syncdaemon.volume_manager import Share, UDF
1273
1274 ACTOR_UBUNTUONE = "dbus://com.ubuntuone.SyncDaemon.service"
1275 DIRECTORY_MIMETYPE = "inode/directory"
1276+DEFAULT_MIME = "application/octet-stream"
1277+DEFAULT_INTERPRETATION = Interpretation.DOCUMENT
1278 EVENT_INTERPRETATION_U1_FOLDER_SHARED = "u1://FolderShared"
1279 EVENT_INTERPRETATION_U1_FOLDER_UNSHARED = "u1://FolderUnshared"
1280 EVENT_INTERPRETATION_U1_SHARE_ACCEPTED = "u1://ShareAccepted"
1281 EVENT_INTERPRETATION_U1_SHARE_UNACCEPTED = "u1://ShareUnaccepted"
1282+EVENT_INTERPRETATION_U1_CONFLICT_RENAME = "u1://ConflictRename"
1283+EVENT_INTERPRETATION_U1_UDF_CREATED = "u1://UserFolderCreated"
1284+EVENT_INTERPRETATION_U1_UDF_DELETED = "u1://UserFolderDeleted"
1285+EVENT_INTERPRETATION_U1_UDF_SUBSCRIBED = "u1://UserFolderSubscribed"
1286+EVENT_INTERPRETATION_U1_UDF_UNSUBSCRIBED = "u1://UserFolderUnsubscribed"
1287 MANIFESTATION_U1_CONTACT_DATA_OBJECT = "u1://ContactDataObject"
1288 INTERPRETATION_U1_CONTACT = "u1://Contact"
1289 URI_PROTOCOL_U1 = "ubuntuone:"
1290+STORAGE_LOCAL = ""
1291 STORAGE_NETWORK = "net"
1292+STORAGE_DELETED = "deleted"
1293
1294 class ZeitgeistListener(object):
1295 """An Event Queue listener that logs into ZG."""
1296@@ -41,6 +53,8 @@
1297 self.fsm = fsm
1298 self.vm = vm
1299 self.zg = zglog.ZeitgeistLogger()
1300+ self.newly_created_server_files = set()
1301+ self.newly_created_local_files = set()
1302
1303 def handle_AQ_CREATE_SHARE_OK(self, share_id=None, marker=None):
1304 """Log the 'directory shared thru the server' event."""
1305@@ -153,7 +167,351 @@
1306
1307 self.zg.log(event)
1308
1309+ def log_udf_deleted(self, volume):
1310+ """Log the udf deleted event."""
1311+ folder = Subject.new_for_values(
1312+ uri=URI_PROTOCOL_U1 + str(volume.node_id),
1313+ interpretation=Interpretation.FOLDER,
1314+ manifestation=Manifestation.DELETED_RESOURCE,
1315+ origin="file:///" + volume.path,
1316+ mimetype=DIRECTORY_MIMETYPE,
1317+ storage=STORAGE_DELETED)
1318+
1319+ event = Event.new_for_values(
1320+ interpretation=EVENT_INTERPRETATION_U1_UDF_DELETED,
1321+ manifestation=Manifestation.USER_ACTIVITY,
1322+ actor=ACTOR_UBUNTUONE,
1323+ subjects=[folder])
1324+
1325+ self.zg.log(event)
1326+
1327 def handle_VM_VOLUME_DELETED(self, volume):
1328 """Log the share/UDF unaccepted event."""
1329 if isinstance(volume, Share):
1330 self.log_share_unaccepted(volume)
1331+ if isinstance(volume, UDF):
1332+ self.log_udf_deleted(volume)
1333+
1334+ def handle_VM_UDF_CREATED(self, udf):
1335+ """An udf was created. Log it into Zeitgeist."""
1336+ folder = Subject.new_for_values(
1337+ uri=URI_PROTOCOL_U1 + str(udf.node_id),
1338+ interpretation=Interpretation.FOLDER,
1339+ manifestation=Manifestation.REMOTE_DATA_OBJECT,
1340+ origin="file:///" + udf.path,
1341+ mimetype=DIRECTORY_MIMETYPE,
1342+ storage=STORAGE_NETWORK)
1343+
1344+ event = Event.new_for_values(
1345+ interpretation=EVENT_INTERPRETATION_U1_UDF_CREATED,
1346+ manifestation=Manifestation.USER_ACTIVITY,
1347+ actor=ACTOR_UBUNTUONE,
1348+ subjects=[folder])
1349+
1350+ self.zg.log(event)
1351+
1352+ def handle_VM_UDF_SUBSCRIBED(self, udf):
1353+ """An udf was subscribed."""
1354+
1355+ folder = Subject.new_for_values(
1356+ uri="file:///" + udf.path,
1357+ interpretation=Interpretation.FOLDER,
1358+ manifestation=Manifestation.FILE_DATA_OBJECT,
1359+ origin=URI_PROTOCOL_U1 + str(udf.node_id),
1360+ mimetype=DIRECTORY_MIMETYPE,
1361+ storage=STORAGE_LOCAL)
1362+
1363+ event = Event.new_for_values(
1364+ interpretation=EVENT_INTERPRETATION_U1_UDF_SUBSCRIBED,
1365+ manifestation=Manifestation.USER_ACTIVITY,
1366+ actor=ACTOR_UBUNTUONE,
1367+ subjects=[folder])
1368+
1369+ self.zg.log(event)
1370+
1371+ def handle_VM_UDF_UNSUBSCRIBED(self, udf):
1372+ """An udf was unsubscribed."""
1373+
1374+ folder = Subject.new_for_values(
1375+ uri="file:///" + udf.path,
1376+ interpretation=Interpretation.FOLDER,
1377+ manifestation=Manifestation.DELETED_RESOURCE,
1378+ origin=URI_PROTOCOL_U1 + str(udf.node_id),
1379+ mimetype=DIRECTORY_MIMETYPE,
1380+ storage=STORAGE_DELETED)
1381+
1382+ event = Event.new_for_values(
1383+ interpretation=EVENT_INTERPRETATION_U1_UDF_UNSUBSCRIBED,
1384+ manifestation=Manifestation.USER_ACTIVITY,
1385+ actor=ACTOR_UBUNTUONE,
1386+ subjects=[folder])
1387+
1388+ self.zg.log(event)
1389+
1390+ def handle_AQ_FILE_NEW_OK(self, volume_id, marker, new_id, new_generation):
1391+ """A new file was created on server. Store and wait till it uploads."""
1392+ self.newly_created_server_files.add((volume_id, new_id))
1393+
1394+ def get_mime_and_interpretation_for_filepath(self, filepath):
1395+ """Try to guess the mime and the interpretation from the path."""
1396+ mime, encoding = mimetypes.guess_type(filepath)
1397+ if mime is None:
1398+ return DEFAULT_MIME, DEFAULT_INTERPRETATION
1399+ interpret = get_interpretation_for_mimetype(mime)
1400+ if interpret is None:
1401+ return DEFAULT_MIME, Interpretation.DOCUMENT
1402+ return mime, interpret
1403+
1404+ def handle_AQ_UPLOAD_FINISHED(self, share_id, node_id, hash,
1405+ new_generation):
1406+ """A file finished uploading to the server."""
1407+
1408+ mdo = self.fsm.get_by_node_id(share_id, node_id)
1409+ path = self.fsm.get_abspath(share_id, mdo.path)
1410+
1411+ if (share_id, node_id) in self.newly_created_server_files:
1412+ self.newly_created_server_files.remove((share_id, node_id))
1413+ event_interpretation = Interpretation.CREATE_EVENT
1414+ else:
1415+ event_interpretation = Interpretation.MODIFY_EVENT
1416+
1417+ mime, interp = self.get_mime_and_interpretation_for_filepath(path)
1418+
1419+ file_subject = Subject.new_for_values(
1420+ uri=URI_PROTOCOL_U1 + str(node_id),
1421+ interpretation=interp,
1422+ manifestation=Manifestation.REMOTE_DATA_OBJECT,
1423+ origin="file:///" + path,
1424+ mimetype=mime,
1425+ storage=STORAGE_NETWORK)
1426+
1427+ event = Event.new_for_values(
1428+ interpretation=event_interpretation,
1429+ manifestation=Manifestation.SCHEDULED_ACTIVITY,
1430+ actor=ACTOR_UBUNTUONE,
1431+ subjects=[file_subject])
1432+
1433+ self.zg.log(event)
1434+
1435+ def handle_AQ_DIR_NEW_OK(self, volume_id, marker, new_id, new_generation):
1436+ """A dir was created on the server."""
1437+
1438+ mdo = self.fsm.get_by_node_id(volume_id, new_id)
1439+ path = self.fsm.get_abspath(volume_id, mdo.path)
1440+
1441+ file_subject = Subject.new_for_values(
1442+ uri=URI_PROTOCOL_U1 + str(new_id),
1443+ interpretation=Interpretation.FOLDER,
1444+ manifestation=Manifestation.REMOTE_DATA_OBJECT,
1445+ origin="file:///" + path,
1446+ mimetype=DIRECTORY_MIMETYPE,
1447+ storage=STORAGE_NETWORK)
1448+
1449+ event = Event.new_for_values(
1450+ interpretation=Interpretation.CREATE_EVENT,
1451+ manifestation=Manifestation.SCHEDULED_ACTIVITY,
1452+ actor=ACTOR_UBUNTUONE,
1453+ subjects=[file_subject])
1454+
1455+ self.zg.log(event)
1456+
1457+ def handle_SV_FILE_NEW(self, volume_id, node_id, parent_id, name):
1458+ """A file was created locally by Syncdaemon."""
1459+ self.newly_created_local_files.add((volume_id, node_id))
1460+
1461+ def handle_AQ_DOWNLOAD_FINISHED(self, share_id, node_id, server_hash):
1462+ """A file finished downloading from the server."""
1463+
1464+ mdo = self.fsm.get_by_node_id(share_id, node_id)
1465+ path = self.fsm.get_abspath(share_id, mdo.path)
1466+
1467+ if (share_id, node_id) in self.newly_created_local_files:
1468+ self.newly_created_local_files.remove((share_id, node_id))
1469+ event_interpretation = Interpretation.CREATE_EVENT
1470+ else:
1471+ event_interpretation = Interpretation.MODIFY_EVENT
1472+
1473+ mime, interp = self.get_mime_and_interpretation_for_filepath(path)
1474+
1475+ file_subject = Subject.new_for_values(
1476+ uri="file:///" + path,
1477+ interpretation=interp,
1478+ manifestation=Manifestation.FILE_DATA_OBJECT,
1479+ origin=URI_PROTOCOL_U1 + str(node_id),
1480+ mimetype=mime,
1481+ storage=STORAGE_LOCAL)
1482+
1483+ event = Event.new_for_values(
1484+ interpretation=event_interpretation,
1485+ manifestation=Manifestation.WORLD_ACTIVITY,
1486+ actor=ACTOR_UBUNTUONE,
1487+ subjects=[file_subject])
1488+
1489+ self.zg.log(event)
1490+
1491+ def handle_SV_DIR_NEW(self, volume_id, node_id, parent_id, name):
1492+ """A file finished downloading from the server."""
1493+
1494+ mdo = self.fsm.get_by_node_id(volume_id, node_id)
1495+ path = self.fsm.get_abspath(volume_id, mdo.path)
1496+
1497+ file_subject = Subject.new_for_values(
1498+ uri="file:///" + path,
1499+ interpretation=Interpretation.FOLDER,
1500+ manifestation=Manifestation.FILE_DATA_OBJECT,
1501+ origin=URI_PROTOCOL_U1 + str(node_id),
1502+ mimetype=DIRECTORY_MIMETYPE,
1503+ storage=STORAGE_LOCAL)
1504+
1505+ event = Event.new_for_values(
1506+ interpretation=Interpretation.CREATE_EVENT,
1507+ manifestation=Manifestation.WORLD_ACTIVITY,
1508+ actor=ACTOR_UBUNTUONE,
1509+ subjects=[file_subject])
1510+
1511+ self.zg.log(event)
1512+
1513+ def handle_SV_FILE_DELETED(self, volume_id, node_id, is_dir):
1514+ """A file or folder was deleted locally by Syncdaemon."""
1515+ mdo = self.fsm.get_by_node_id(volume_id, node_id)
1516+ path = self.fsm.get_abspath(volume_id, mdo.path)
1517+
1518+ if is_dir:
1519+ mime, interp = DIRECTORY_MIMETYPE, Interpretation.FOLDER
1520+ else:
1521+ mime, interp = self.get_mime_and_interpretation_for_filepath(path)
1522+
1523+ file_subject = Subject.new_for_values(
1524+ uri="file:///" + path,
1525+ interpretation=interp,
1526+ manifestation=Manifestation.DELETED_RESOURCE,
1527+ origin=URI_PROTOCOL_U1 + str(node_id),
1528+ mimetype=mime,
1529+ storage=STORAGE_DELETED)
1530+
1531+ event = Event.new_for_values(
1532+ interpretation=Interpretation.DELETE_EVENT,
1533+ manifestation=Manifestation.WORLD_ACTIVITY,
1534+ actor=ACTOR_UBUNTUONE,
1535+ subjects=[file_subject])
1536+
1537+ self.zg.log(event)
1538+
1539+ def handle_AQ_UNLINK_OK(self, share_id, parent_id, node_id,
1540+ new_generation):
1541+ """A file or folder was deleted on the server by Syncdaemon,"""
1542+ mdo = self.fsm.get_by_node_id(share_id, node_id)
1543+ path = self.fsm.get_abspath(share_id, mdo.path)
1544+
1545+ if mdo.is_dir:
1546+ mime, interp = DIRECTORY_MIMETYPE, Interpretation.FOLDER
1547+ else:
1548+ mime, interp = self.get_mime_and_interpretation_for_filepath(path)
1549+
1550+ file_subject = Subject.new_for_values(
1551+ uri=URI_PROTOCOL_U1 + str(node_id),
1552+ interpretation=interp,
1553+ manifestation=Manifestation.DELETED_RESOURCE,
1554+ origin="file:///" + path,
1555+ mimetype=mime,
1556+ storage=STORAGE_DELETED)
1557+
1558+ event = Event.new_for_values(
1559+ interpretation=Interpretation.DELETE_EVENT,
1560+ manifestation=Manifestation.SCHEDULED_ACTIVITY,
1561+ actor=ACTOR_UBUNTUONE,
1562+ subjects=[file_subject])
1563+
1564+ self.zg.log(event)
1565+
1566+ def handle_FSM_FILE_CONFLICT(self, old_name, new_name):
1567+ """A file was renamed because of conflict."""
1568+ mime, interp = self.get_mime_and_interpretation_for_filepath(old_name)
1569+
1570+ file_subject = Subject.new_for_values(
1571+ uri="file:///" + new_name,
1572+ interpretation=interp,
1573+ manifestation=Manifestation.FILE_DATA_OBJECT,
1574+ origin="file:///" + old_name,
1575+ mimetype=mime,
1576+ storage=STORAGE_LOCAL)
1577+
1578+ event = Event.new_for_values(
1579+ interpretation=EVENT_INTERPRETATION_U1_CONFLICT_RENAME,
1580+ manifestation=Manifestation.WORLD_ACTIVITY,
1581+ actor=ACTOR_UBUNTUONE,
1582+ subjects=[file_subject])
1583+
1584+ self.zg.log(event)
1585+
1586+ def handle_FSM_DIR_CONFLICT(self, old_name, new_name):
1587+ """A dir was renamed because of conflict."""
1588+ folder_subject = Subject.new_for_values(
1589+ uri="file:///" + new_name,
1590+ interpretation=Interpretation.FOLDER,
1591+ manifestation=Manifestation.FILE_DATA_OBJECT,
1592+ origin="file:///" + old_name,
1593+ mimetype=DIRECTORY_MIMETYPE,
1594+ storage=STORAGE_LOCAL)
1595+
1596+ event = Event.new_for_values(
1597+ interpretation=EVENT_INTERPRETATION_U1_CONFLICT_RENAME,
1598+ manifestation=Manifestation.WORLD_ACTIVITY,
1599+ actor=ACTOR_UBUNTUONE,
1600+ subjects=[folder_subject])
1601+
1602+ self.zg.log(event)
1603+
1604+ def handle_AQ_CHANGE_PUBLIC_ACCESS_OK(self, share_id, node_id, is_public,
1605+ public_url):
1606+ """The status of a published resource changed. Log it!"""
1607+ if is_public:
1608+ self.log_publishing(share_id, node_id, is_public, public_url)
1609+ else:
1610+ self.log_unpublishing(share_id, node_id, is_public, public_url)
1611+
1612+ def log_publishing(self, share_id, node_id, is_public, public_url):
1613+ """Log the publishing of a resource."""
1614+ mime, interp = self.get_mime_and_interpretation_for_filepath(
1615+ public_url)
1616+
1617+ origin = "" if node_id is None else URI_PROTOCOL_U1 + str(node_id)
1618+
1619+ public_file = Subject.new_for_values(
1620+ uri=public_url,
1621+ interpretation=interp,
1622+ manifestation=Manifestation.REMOTE_DATA_OBJECT,
1623+ origin=origin,
1624+ mimetype=mime,
1625+ storage=STORAGE_NETWORK)
1626+
1627+ event = Event.new_for_values(
1628+ interpretation=Interpretation.CREATE_EVENT,
1629+ manifestation=Manifestation.USER_ACTIVITY,
1630+ actor=ACTOR_UBUNTUONE,
1631+ subjects=[public_file])
1632+
1633+ self.zg.log(event)
1634+
1635+ def log_unpublishing(self, share_id, node_id, is_public, public_url):
1636+ """Log the unpublishing of a resource."""
1637+ mime, interp = self.get_mime_and_interpretation_for_filepath(
1638+ public_url)
1639+
1640+ origin = "" if node_id is None else URI_PROTOCOL_U1 + str(node_id)
1641+
1642+ public_file = Subject.new_for_values(
1643+ uri=public_url,
1644+ interpretation=interp,
1645+ manifestation=Manifestation.DELETED_RESOURCE,
1646+ origin=origin,
1647+ mimetype=mime,
1648+ storage=STORAGE_DELETED)
1649+
1650+ event = Event.new_for_values(
1651+ interpretation=Interpretation.DELETE_EVENT,
1652+ manifestation=Manifestation.USER_ACTIVITY,
1653+ actor=ACTOR_UBUNTUONE,
1654+ subjects=[public_file])
1655+
1656+ self.zg.log(event)
1657
1658=== modified file 'ubuntuone/syncdaemon/event_queue.py'
1659--- ubuntuone/syncdaemon/event_queue.py 2010-12-09 21:05:56 +0000
1660+++ ubuntuone/syncdaemon/event_queue.py 2010-12-13 23:42:15 +0000
1661@@ -101,6 +101,9 @@
1662 'SV_VOLUME_CREATED': ('volume',),
1663 'SV_VOLUME_DELETED': ('volume_id',),
1664 'SV_VOLUME_NEW_GENERATION': ('volume_id', 'generation'),
1665+ 'SV_FILE_NEW': ('volume_id', 'node_id', 'parent_id', 'name'),
1666+ 'SV_DIR_NEW': ('volume_id', 'node_id', 'parent_id', 'name'),
1667+ 'SV_FILE_DELETED': ('volume_id', 'node_id', 'is_dir'),
1668
1669 'HQ_HASH_NEW': ('path', 'hash', 'crc32', 'size', 'stat'),
1670 'HQ_HASH_ERROR': ('mdid',),
1671@@ -138,6 +141,9 @@
1672 'SYS_QUOTA_EXCEEDED': ('volume_id', 'free_bytes'),
1673 'SYS_BROKEN_NODE': ('volume_id', 'node_id', 'path', 'mdid'),
1674
1675+ 'FSM_FILE_CONFLICT': ('old_name', 'new_name'),
1676+ 'FSM_DIR_CONFLICT': ('old_name', 'new_name'),
1677+
1678 'VM_UDF_SUBSCRIBED': ('udf',),
1679 'VM_UDF_SUBSCRIBE_ERROR': ('udf_id', 'error'),
1680 'VM_UDF_UNSUBSCRIBED': ('udf',),
1681
1682=== modified file 'ubuntuone/syncdaemon/filesystem_manager.py'
1683--- ubuntuone/syncdaemon/filesystem_manager.py 2010-12-02 21:27:56 +0000
1684+++ ubuntuone/syncdaemon/filesystem_manager.py 2010-12-13 23:42:15 +0000
1685@@ -840,7 +840,8 @@
1686 while path_exists(to_path):
1687 ind += 1
1688 to_path = base_to_path + "." + str(ind)
1689- if mdobj["is_dir"]:
1690+ is_dir = mdobj["is_dir"]
1691+ if is_dir:
1692 expected_event = "FS_DIR_MOVE"
1693 else:
1694 expected_event = "FS_FILE_MOVE"
1695@@ -848,6 +849,8 @@
1696 try:
1697 self.eq.add_to_mute_filter(expected_event, path, to_path)
1698 rename(path, to_path)
1699+ event = "FSM_DIR_CONFLICT" if is_dir else "FSM_FILE_CONFLICT"
1700+ self.eq.push(event, old_name=path, new_name=to_path)
1701 except OSError, e:
1702 self.eq.rm_from_mute_filter(expected_event, path, to_path)
1703 if e.errno == errno.ENOENT:
1704
1705=== modified file 'ubuntuone/syncdaemon/sync.py'
1706--- ubuntuone/syncdaemon/sync.py 2010-11-08 16:45:50 +0000
1707+++ ubuntuone/syncdaemon/sync.py 2010-12-13 23:42:15 +0000
1708@@ -830,6 +830,8 @@
1709 log = FileLogger(self.logger, key)
1710 ssmr = SyncStateMachineRunner(self.fsm, self.m, key, log)
1711 ssmr.on_event("SV_FILE_NEW", {}, share_id, node_id, parent_id, name)
1712+ self.m.event_q.push('SV_FILE_NEW', volume_id=share_id,
1713+ node_id=node_id, parent_id=parent_id, name=name)
1714
1715 def _handle_SV_DIR_NEW(self, share_id, node_id, parent_id, name):
1716 """on SV_DIR_NEW"""
1717@@ -839,13 +841,17 @@
1718 log = FileLogger(self.logger, key)
1719 ssmr = SyncStateMachineRunner(self.fsm, self.m, key, log)
1720 ssmr.on_event("SV_DIR_NEW", {}, share_id, node_id, parent_id, name)
1721+ self.m.event_q.push('SV_DIR_NEW', volume_id=share_id,
1722+ node_id=node_id, parent_id=parent_id, name=name)
1723
1724- def _handle_SV_FILE_DELETED(self, share_id, node_id):
1725+ def _handle_SV_FILE_DELETED(self, share_id, node_id, is_dir):
1726 """on SV_FILE_DELETED. Not called by EQ anymore."""
1727 key = FSKey(self.m.fs, share_id=share_id, node_id=node_id)
1728 log = FileLogger(self.logger, key)
1729 ssmr = SyncStateMachineRunner(self.fsm, self.m, key, log)
1730 ssmr.on_event("SV_FILE_DELETED", {})
1731+ self.m.event_q.push('SV_FILE_DELETED', volume_id=share_id,
1732+ node_id=node_id, is_dir=is_dir)
1733
1734 def handle_AQ_DOWNLOAD_FINISHED(self, share_id, node_id, server_hash):
1735 """on AQ_DOWNLOAD_FINISHED"""
1736@@ -1078,7 +1084,8 @@
1737 # about it
1738 if not dt.is_live:
1739 to_delete.append(dt)
1740- self._handle_SV_FILE_DELETED(dt.share_id, dt.node_id)
1741+ self._handle_SV_FILE_DELETED(dt.share_id, dt.node_id,
1742+ is_dir)
1743 # nothing else should happen with this
1744 continue
1745
1746@@ -1194,7 +1201,7 @@
1747 continue
1748
1749 if not node_id in live_nodes:
1750- self._handle_SV_FILE_DELETED(volume_id, node_id)
1751+ self._handle_SV_FILE_DELETED(volume_id, node_id, node.is_dir)
1752 deletes += 1
1753
1754 self.logger.info(

Subscribers

People subscribed via source and target branches