Merge lp:~mandel/ubuntuone-client/implement_basic_remote_objects into lp:ubuntuone-client

Proposed by Manuel de la Peña
Status: Merged
Approved by: Manuel de la Peña
Approved revision: 888
Merged at revision: 897
Proposed branch: lp:~mandel/ubuntuone-client/implement_basic_remote_objects
Merge into: lp:ubuntuone-client
Prerequisite: lp:~mandel/ubuntuone-client/add_public_files_remote_object_tests
Diff against target: 1277 lines (+898/-74)
3 files modified
tests/platform/test_interaction_interfaces.py (+35/-35)
tests/platform/windows/test_ipc.py (+56/-34)
ubuntuone/platform/windows/dbus_interface.py (+807/-5)
To merge this branch: bzr merge lp:~mandel/ubuntuone-client/implement_basic_remote_objects
Reviewer Review Type Date Requested Status
Roberto Alsina (community) Approve
Facundo Batista (community) Approve
Review via email: mp+50885@code.launchpad.net

Commit message

Adds the basic implementation of the remotes objects that will expose the DBus API found on Linux on Windows using twisted.pb.

Description of the change

Adds the basic implementation of the remotes objects that will expose the DBus API found on Linux on Windows using twisted.pb. To run the tests executed on windows:

u1trial tests/platform/windows/test_ipc.py
u1trial tests/platform/test_interaction_interfaces.py

The tests on Linux should all pas and can be ran with 'make check'

To post a comment you must log in.
Revision history for this message
Facundo Batista (facundo) wrote :

In linux all tests pass. Not tested in windows.

review: Approve
Revision history for this message
Manuel de la Peña (mandel) wrote :

> In linux all tests pass. Not tested in windows

Tx! I'll make sure I get a windows review.

Revision history for this message
Roberto Alsina (ralsina) wrote :

+1

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'tests/platform/test_interaction_interfaces.py'
2--- tests/platform/test_interaction_interfaces.py 2011-02-16 10:04:56 +0000
3+++ tests/platform/test_interaction_interfaces.py 2011-02-23 09:03:55 +0000
4@@ -17,7 +17,7 @@
5 # with this program. If not, see <http://www.gnu.org/licenses/>.
6 """Test that the interaction_interfaces are correctly called."""
7
8-from mocker import MockerTestCase
9+from mocker import MockerTestCase, MATCH
10 from tests.platform import IPCTestCase
11
12 class TestStatusIPC(MockerTestCase, IPCTestCase):
13@@ -146,10 +146,10 @@
14 """Test if the method is relayed."""
15 result = 'nirvana'
16 last_event_interval = 'interval'
17- reply_handler = 'handler'
18- error_handler = 'error'
19- self.service.wait_for_nirvana(last_event_interval, reply_handler,
20- error_handler)
21+ reply_handler = lambda: None
22+ error_handler = lambda: None
23+ self.service.wait_for_nirvana(last_event_interval, MATCH(callable),
24+ MATCH(callable))
25 self.mocker.result(result)
26 self.mocker.replay()
27 self.assertEqual(result, self.sync.wait_for_nirvana(
28@@ -157,9 +157,9 @@
29
30 def test_quit(self):
31 """Test if the method is relayed."""
32- reply_handler = 'handler'
33- error_handler = 'error'
34- self.service.quit(reply_handler, error_handler)
35+ reply_handler = lambda: None
36+ error_handler = lambda: None
37+ self.service.quit(MATCH(callable), MATCH(callable))
38 self.mocker.replay()
39 self.sync.quit(reply_handler, error_handler)
40
41@@ -238,20 +238,20 @@
42 def test_accept_share(self):
43 """Test if the method is relayed."""
44 share_id = 'id'
45- reply_handler = 'handler'
46- error_handler = 'error'
47- self.syncdaemon_shares.accept_share(share_id, reply_handler,
48- error_handler)
49+ reply_handler = lambda: None
50+ error_handler = lambda: None
51+ self.syncdaemon_shares.accept_share(share_id, MATCH(callable),
52+ MATCH(callable))
53 self.mocker.replay()
54 self.shares.accept_share(share_id, reply_handler, error_handler)
55
56 def test_reject_share(self):
57 """Test if the method is relayed."""
58 share_id = 'id'
59- reply_handler = 'handler'
60- error_handler = 'error'
61- self.syncdaemon_shares.reject_share(share_id, reply_handler,
62- error_handler)
63+ reply_handler = lambda: None
64+ error_handler = lambda: None
65+ self.syncdaemon_shares.reject_share(share_id, MATCH(callable),
66+ MATCH(callable))
67 self.mocker.replay()
68 self.shares.reject_share(share_id, reply_handler, error_handler)
69
70@@ -324,10 +324,10 @@
71
72 def test_get_throttling_limits(self):
73 """Test if the method is relayed."""
74- reply_handler = 'handler'
75- error_handler = 'error'
76- self.syncdaemon_config.get_throttling_limits(reply_handler,
77- error_handler)
78+ reply_handler = lambda: None
79+ error_handler = lambda: None
80+ self.syncdaemon_config.get_throttling_limits(MATCH(callable),
81+ MATCH(callable))
82 self.mocker.replay()
83 self.config.get_throttling_limits(reply_handler, error_handler)
84
85@@ -335,39 +335,39 @@
86 """Test if the method is relayed."""
87 download = 'download'
88 upload = 'upload'
89- reply_handler = 'handler'
90- error_handler = 'error'
91+ reply_handler = lambda: None
92+ error_handler = lambda: None
93 self.syncdaemon_config.set_throttling_limits(download, upload,
94- reply_handler, error_handler)
95+ MATCH(callable), MATCH(callable))
96 self.mocker.replay()
97 self.config.set_throttling_limits(download, upload, reply_handler,
98 error_handler)
99
100 def test_enable_bandwidth_throttling(self):
101 """Test if the method is relayed."""
102- reply_handler = 'handler'
103- error_handler = 'error'
104- self.syncdaemon_config.enable_bandwidth_throttling(reply_handler,
105- error_handler)
106+ reply_handler = lambda: None
107+ error_handler = lambda: None
108+ self.syncdaemon_config.enable_bandwidth_throttling(MATCH(callable),
109+ MATCH(callable))
110 self.mocker.replay()
111 self.config.enable_bandwidth_throttling(reply_handler, error_handler)
112
113 def test_disable_bandwidth_throttling(self):
114 """Test if the method is relayed."""
115- reply_handler = 'handler'
116- error_handler = 'error'
117- self.syncdaemon_config.disable_bandwidth_throttling(reply_handler,
118- error_handler)
119+ reply_handler = lambda: None
120+ error_handler = lambda: None
121+ self.syncdaemon_config.disable_bandwidth_throttling(MATCH(callable),
122+ MATCH(callable))
123 self.mocker.replay()
124 self.config.disable_bandwidth_throttling(reply_handler, error_handler)
125
126 def test_bandwidth_throttling_enabled(self):
127 """Test if the method is relayed."""
128 result = 1
129- reply_handler = 'handler'
130- error_handler = 'error'
131- self.syncdaemon_config.bandwidth_throttling_enabled(reply_handler,
132- error_handler)
133+ reply_handler = lambda: None
134+ error_handler = lambda: None
135+ self.syncdaemon_config.bandwidth_throttling_enabled(MATCH(callable),
136+ MATCH(callable))
137 self.mocker.result(result)
138 self.mocker.replay()
139 self.assertEqual(result, self.config.bandwidth_throttling_enabled(
140
141=== modified file 'tests/platform/windows/test_ipc.py'
142--- tests/platform/windows/test_ipc.py 2011-02-23 09:03:55 +0000
143+++ tests/platform/windows/test_ipc.py 2011-02-23 09:03:55 +0000
144@@ -21,8 +21,17 @@
145
146 from unittest import TestCase
147
148-# to be later implemented, TDD work here
149-SignalBroadcaster = None
150+from ubuntuone.platform.windows.dbus_interface import (
151+ Config,
152+ Events,
153+ Folders,
154+ FileSystem,
155+ PublicFiles,
156+ Shares,
157+ SignalBroadcaster,
158+ Status,
159+ SyncDaemon
160+)
161
162 class PerspectiveBrokerTestCase(TestCase):
163 """Base test case for the IPC used on Windows."""
164@@ -30,12 +39,14 @@
165 def setUp(self):
166 """Setup tests."""
167 super(PerspectiveBrokerTestCase, self).setUp()
168- self.status = None
169- self.events = None
170- self.sync = None
171- self.shares = None
172- self.folders = None
173- self.public_files = None
174+ self.config = Config(None, None)
175+ self.status = Status(None, None, None)
176+ self.events = Events(None)
177+ self.sync = SyncDaemon(None, None, None, None)
178+ self.shares = Shares(None, None, None)
179+ self.folders = Folders(None, None)
180+ self.public_files = PublicFiles(None, None)
181+ self.fs = FileSystem(None, None)
182
183 class TestSignalBroadcaster(MockerTestCase):
184 """Test the signal brocaster code."""
185@@ -44,6 +55,7 @@
186 super(TestSignalBroadcaster, self).setUp()
187 self.client = self.mocker.mock()
188 self.broad_caster = SignalBroadcaster()
189+ self.broad_caster.clients.append(self.client)
190
191 def test_remote_register_to_signals(self):
192 """Assert that the client was added."""
193@@ -58,7 +70,8 @@
194 word = 'word'
195 signal_name = 'on_test'
196 self.client.callRemote(signal_name, first, second, word=word)
197- self.broad_caster.emit_gisnal(signal_name, first, second, word=word)
198+ self.mocker.replay()
199+ self.broad_caster.emit_signal(signal_name, first, second, word=word)
200
201
202 class TestStatusEmitSignals(PerspectiveBrokerTestCase, MockerTestCase):
203@@ -103,7 +116,7 @@
204 self.mocker.result(status)
205 self.signal_method('on_status_changed', status)
206 self.mocker.replay()
207- self.status.emit_status_changed()
208+ self.status.emit_status_changed(status)
209
210 def test_emit_download_started(self):
211 """Emit DownloadStarted."""
212@@ -118,7 +131,7 @@
213 string_info = {'test':'2', 'name':'3'}
214 self.signal_method('on_download_file_progress', download, string_info)
215 self.mocker.replay()
216- self.status.emit_download_file_progress(download, test=1, name=2)
217+ self.status.emit_download_file_progress(download, test=2, name=3)
218
219 def test_emit_download_finished(self):
220 """Emit DownloadFinished."""
221@@ -126,7 +139,7 @@
222 string_info = {'test':'2', 'name':'3'}
223 self.signal_method('on_download_finished', download, string_info)
224 self.mocker.replay()
225- self.status.emit_download_finished(download, test=1, name=2)
226+ self.status.emit_download_finished(download, test=2, name=3)
227
228 def test_emit_upload_started(self):
229 """Emit UploadStarted."""
230@@ -141,7 +154,7 @@
231 string_info = {'test':'2', 'name':'3'}
232 self.signal_method('on_upload_file_progress', upload, string_info)
233 self.mocker.replay()
234- self.status.emit_upload_file_progress(upload, test=1, name=2)
235+ self.status.emit_upload_file_progress(upload, test=2, name=3)
236
237 def test_emit_upload_finished(self):
238 """Emit UploadFinished."""
239@@ -149,7 +162,7 @@
240 string_info = {'test':'2', 'name':'3'}
241 self.signal_method('on_upload_finished', upload, string_info)
242 self.mocker.replay()
243- self.status.emit_upload_finished(upload, test=1, name=2)
244+ self.status.emit_upload_finished(upload, test=2, name=3)
245
246 def test_emit_account_changed(self):
247 """Emit AccountChanged."""
248@@ -219,7 +232,7 @@
249
250 def setUp(self):
251 """Setup tests."""
252- super(TestEventsEmitSignals, self).setUp()
253+ super(TestSharesEmitSignals, self).setUp()
254 self.signal_method = self.mocker.mock()
255 self.shares.emit_signal = self.signal_method
256 self.shares.syncdaemon_shares = self.mocker.mock()
257@@ -256,6 +269,8 @@
258 self.get_share_dict(share)
259 self.mocker.result(share_dict)
260 self.signal_method('on_share_deleted_error', share_dict, error)
261+ self.mocker.replay()
262+ self.shares.emit_share_delete_error(share, error)
263
264 def test_emit_free_space(self):
265 """Emit ShareChanged when free space changes """
266@@ -269,14 +284,14 @@
267 self.mocker.result(shares)
268 self.get_share_dict(share)
269 self.mocker.result(share_dict)
270- self.signal_method(share_dict)
271+ self.signal_method('on_share_changed', share_dict)
272 self.mocker.replay()
273 self.shares.emit_free_space(share_id, free_bytes)
274
275 def test_emit_share_created(self):
276 """Emit ShareCreated signal """
277 share_info = 'info'
278- self.signal_method(share_info)
279+ self.signal_method('on_share_created', share_info)
280 self.mocker.replay()
281 self.shares.emit_share_created(share_info)
282
283@@ -287,7 +302,7 @@
284 info = 'info'
285 self.shares.syncdaemon_shares.get_create_error_share_info(share_info)
286 self.mocker.result(info)
287- self.signal_method(info, error)
288+ self.signal_method('on_share_create_error', info, error)
289 self.mocker.replay()
290 self.shares.emit_share_create_error(share_info, error)
291
292@@ -297,7 +312,7 @@
293 answer = 'yes'
294 error = 'boom'
295 answer_info = dict(volume_id=share_id, answer=answer, error=error)
296- self.signal_method(answer_info)
297+ self.signal_method('on_share_answer_response', answer_info)
298 self.mocker.replay()
299 self.shares.emit_share_answer_response(share_id, answer, error)
300
301@@ -310,7 +325,7 @@
302 self.mocker.result(share)
303 self.get_share_dict(share)
304 self.mocker.result(share_dict)
305- self.signal_method(share_dict)
306+ self.signal_method('on_new_share', share_dict)
307 self.mocker.replay()
308 self.shares.emit_new_share(share_id)
309
310@@ -320,7 +335,7 @@
311 share_dict = {'share' : 'id'}
312 self.get_share_dict(share)
313 self.mocker.result(share_dict)
314- self.signal_method(share_dict)
315+ self.signal_method('on_share_subscribed', share_dict)
316 self.mocker.replay()
317 self.shares.emit_share_subscribed(share)
318
319@@ -328,7 +343,8 @@
320 """Emit the ShareSubscribeError signal"""
321 share_id = 'id'
322 error = 'error'
323- self.signal_method({'id': share_id}, str(error))
324+ self.signal_method('on_share_subscribed_error',
325+ {'id': share_id}, str(error))
326 self.mocker.replay()
327 self.shares.emit_share_subscribe_error(share_id, error)
328
329@@ -338,7 +354,7 @@
330 share_dict = {'share':'id'}
331 self.get_share_dict(share)
332 self.mocker.result(share_dict)
333- self.signal_method(share_dict)
334+ self.signal_method('on_share_unsubscribed', share_dict)
335 self.mocker.replay()
336 self.shares.emit_share_unsubscribed(share)
337
338@@ -346,7 +362,7 @@
339 """Emit the ShareUnSubscribeError signal"""
340 share_id = 'id'
341 error = 'error'
342- self.signal_method({'id': share_id}, str(error))
343+ self.signal_method('on_share_unsubscribed_error',{'id': share_id}, str(error))
344 self.mocker.replay()
345 self.shares.emit_share_unsubscribe_error(share_id, error)
346
347@@ -358,6 +374,7 @@
348 """Setup tests."""
349 super(TestFoldersEmitSignals, self).setUp()
350 self.signal_method = self.mocker.mock()
351+ self.folders.emit_signal = self.signal_method
352 self.get_udf_dict = self.mocker.replace(
353 'ubuntuone.syncdaemon.interaction_interfaces.get_udf_dict')
354
355@@ -367,7 +384,7 @@
356 udf_dict = {'udf':'id'}
357 self.get_udf_dict(folder)
358 self.mocker.result(udf_dict)
359- self.signal_method(udf_dict)
360+ self.signal_method('on_folder_deleted', udf_dict)
361 self.mocker.replay()
362 self.folders.emit_folder_deleted(folder)
363
364@@ -378,7 +395,7 @@
365 udf_dict = {'udf':'id'}
366 self.get_udf_dict(folder)
367 self.mocker.result(udf_dict)
368- self.signal_method(udf_dict, str(error))
369+ self.signal_method('on_folder_delete_error', udf_dict, str(error))
370 self.mocker.replay()
371 self.folders.emit_folder_delete_error(folder, error)
372
373@@ -388,7 +405,7 @@
374 udf_dict = {'udf':'id'}
375 self.get_udf_dict(folder)
376 self.mocker.result(udf_dict)
377- self.signal_method(udf_dict)
378+ self.signal_method('on_folder_subscribed', udf_dict)
379 self.mocker.replay()
380 self.folders.emit_folder_subscribed(folder)
381
382@@ -396,7 +413,8 @@
383 """Emit the FolderSubscribeError signal"""
384 folder_id = 'id'
385 error = 'error'
386- self.signal_method({'id':folder_id}, str(error))
387+ self.signal_method('on_folder_subscribe_error',
388+ {'id':folder_id}, str(error))
389 self.mocker.replay()
390 self.folders.emit_folder_subscribe_error(folder_id, error)
391
392@@ -406,7 +424,7 @@
393 udf_dict = {'udf':'id'}
394 self.get_udf_dict(folder)
395 self.mocker.result(udf_dict)
396- self.signal_method(udf_dict)
397+ self.signal_method('on_folder_unsubscribed', udf_dict)
398 self.mocker.replay()
399 self.folders.emit_folder_unsubscribed(folder)
400
401@@ -414,7 +432,8 @@
402 """Emit the FolderUnSubscribeError signal"""
403 folder_id = 'id'
404 error = 'error'
405- self.signal_method({'id':folder_id}, str(error))
406+ self.signal_method('on_folder_unsubscribe_error',
407+ {'id':folder_id}, str(error))
408 self.mocker.replay()
409 self.folders.emit_folder_unsubscribe_error(folder_id, error)
410
411@@ -426,6 +445,7 @@
412 """Setup tests."""
413 super(TestPublicFilesEmitSignals, self).setUp()
414 self.signal_method = self.mocker.mock()
415+ self.public_files.emit_signal = self.signal_method
416 self.public_files.syncdaemon_public_files = self.mocker.mock()
417 self.bool_str = self.mocker.replace(
418 'ubuntuone.syncdaemon.interaction_interfaces.bool_str')
419@@ -441,7 +461,8 @@
420 self.mocker.result(path)
421 self.bool_str(is_public)
422 self.mocker.result('True')
423- self.signal_method(dict(share_id=share_id, node_id=node_id,
424+ self.signal_method('on_public_access_changed',
425+ dict(share_id=share_id, node_id=node_id,
426 is_public='True', public_url=public_url,
427 path=path))
428 self.mocker.replay()
429@@ -456,7 +477,8 @@
430 path = 'path'
431 self.public_files.syncdaemon_public_files.get_path(share_id, node_id)
432 self.mocker.result(path)
433- self.signal_method(dict(share_id=share_id, node_id=node_id, path=path),
434+ self.signal_method('on_public_access_change_error',
435+ dict(share_id=share_id, node_id=node_id, path=path),
436 error)
437 self.mocker.replay()
438 self.public_files.emit_public_access_change_error(share_id, node_id,
439@@ -474,14 +496,14 @@
440 public_url=public_url, path=path)]
441 self.public_files.syncdaemon_public_files.get_path(volume_id, node_id)
442 self.mocker.result(path)
443- self.signal_method(files)
444+ self.signal_method('on_public_files_list',files)
445 self.mocker.replay()
446 self.public_files.emit_public_files_list(public_files)
447
448 def test_emit_public_files_list_error(self):
449 """Emit the PublicFilesListError signal."""
450 error = 'error'
451- self.signal_method(error)
452+ self.signal_method('on_public_files_list_error', error)
453 self.mocker.replay()
454 self.public_files.emit_public_files_list_error(error)
455
456
457=== modified file 'ubuntuone/platform/windows/dbus_interface.py'
458--- ubuntuone/platform/windows/dbus_interface.py 2011-02-14 11:56:44 +0000
459+++ ubuntuone/platform/windows/dbus_interface.py 2011-02-23 09:03:55 +0000
460@@ -17,9 +17,811 @@
461 # with this program. If not, see <http://www.gnu.org/licenses/>.
462 """IPC implementation that replaces Dbus."""
463
464+import logging
465+
466+from twisted.spread.pb import Referenceable
467+from ubuntuone.syncdaemon.interaction_interfaces import (
468+ bool_str,
469+ get_share_dict,
470+ get_udf_dict,
471+ SyncdaemonConfig,
472+ SyncdaemonEvents,
473+ SyncdaemonFileSystem,
474+ SyncdaemonFolders,
475+ SyncdaemonPublicFiles,
476+ SyncdaemonService,
477+ SyncdaemonShares,
478+ SyncdaemonStatus
479+)
480+
481+logger = logging.getLogger("ubuntuone.SyncDaemon.Pb")
482+
483+
484+def remote_handler(handler):
485+ if handler:
486+ handler = lambda x: handler.callRemote('execute', x)
487+ return handler
488+
489+class RemoteMeta(type):
490+ """Append remte_ to the remote methods.
491+
492+ Remote has to be appended to the remote method to work over pb but this
493+ names cannot be used since the other platforms do not expect the remote
494+ prefix. This metaclass create those prefix so that the methods can be
495+ correctly called.
496+ """
497+
498+ def __new__(cls, name, bases, attrs):
499+ remote_calls = attrs.get('remote_calls', None)
500+ if remote_calls:
501+ for current in remote_calls:
502+ attrs['remote_' + current] = attrs[current]
503+ return super(RemoteMeta, cls).__new__(cls, name, bases, attrs)
504+
505+
506+class SignalBroadcaster(object):
507+ """Object that allows to emit signals to clients over the IPC."""
508+
509+ def __init__(self):
510+ """Create a new instance."""
511+ self.clients = []
512+
513+ def remote_register_to_signals(self, client):
514+ """Allow a client to register to a signal."""
515+ self.clients.append(client)
516+
517+ def emit_signal(self, signal_name, *args, **kwargs):
518+ """Emit the given signal to the clients."""
519+ for current_client in self.clients:
520+ try:
521+ current_client.callRemote(signal_name, *args, **kwargs)
522+ except:
523+ logger.warn('Could not emit signal to %s', current_client)
524+
525+class Status(Referenceable, SignalBroadcaster):
526+ """ Represent the status of the syncdaemon """
527+
528+ __metaclass__ = RemoteMeta
529+
530+ # calls that will be accessible remotly
531+ remote_calls = [
532+ 'current_status',
533+ 'current_downloads',
534+ 'waiting_metadata',
535+ 'waiting_content',
536+ 'schedule_next',
537+ 'current_uploads',
538+ ]
539+
540+ def __init__(self, main, action_queue, fs_manager):
541+ """ Creates the instance."""
542+ super(Status, self).__init__()
543+ self.syncdaemon_status = SyncdaemonStatus(main, action_queue,
544+ fs_manager)
545+
546+ def current_status(self):
547+ """ return the current status of the system, one of: local_rescan,
548+ offline, trying_to_connect, server_rescan or online.
549+ """
550+ logger.debug('called current_status')
551+ return self.syncdaemon_status.current_status()
552+
553+ def current_downloads(self):
554+ """Return a list of files with a download in progress."""
555+ logger.debug('called current_downloads')
556+ return self.syncdaemon_status.current_downloads()
557+
558+ def waiting_metadata(self):
559+ """Return a list of the operations in the meta-queue.
560+
561+ As we don't have meta-queue anymore, this is faked.
562+ """
563+ logger.debug('called waiting_metadata')
564+ return self.syncdaemon_status.waiting_metadata()
565+
566+ def waiting_content(self):
567+ """Return a list of files that are waiting to be up- or downloaded.
568+
569+ As we don't have content-queue anymore, this is faked.
570+ """
571+ logger.debug('called waiting_content')
572+ return self.syncdaemon_status.waiting_content()
573+
574+ def schedule_next(self, share_id, node_id):
575+ """
576+ Make the command on the given share and node be next in the
577+ queue of waiting commands.
578+ """
579+ logger.debug('called schedule_next')
580+ self.syncdaemon_status.schedule_next(share_id, node_id)
581+
582+ def current_uploads(self):
583+ """ return a list of files with a upload in progress """
584+ logger.debug('called current_uploads')
585+ return self.syncdaemon_status.current_uploads()
586+
587+ def emit_content_queue_changed(self):
588+ """Emit ContentQueueChanged."""
589+ self.emit_signal('on_content_queue_changed')
590+
591+ def emit_invalid_name(self, dirname, filename):
592+ """Emit InvalidName."""
593+ self.emit_signal('on_invalid_name', unicode(dirname), str(filename))
594+
595+ def emit_broken_node(self, volume_id, node_id, mdid, path):
596+ """Emit BrokenNode."""
597+ if mdid is None:
598+ mdid = ''
599+ if path is None:
600+ path = ''
601+ self.emit_signal('on_broken_node', volume_id, node_id, mdid,
602+ path.decode('utf8'))
603+
604+ def emit_status_changed(self, state):
605+ """Emit StatusChanged."""
606+ self.emit_signal('on_status_changed',
607+ self.syncdaemon_status.current_status())
608+
609+ def emit_download_started(self, download):
610+ """Emit DownloadStarted."""
611+ self.emit_signal('on_download_started', download)
612+
613+ def emit_download_file_progress(self, download, **info):
614+ """Emit DownloadFileProgress."""
615+ for k, v in info.copy().items():
616+ info[str(k)] = str(v)
617+ self.emit_signal('on_download_file_progress', download, info)
618+
619+ def emit_download_finished(self, download, **info):
620+ """Emit DownloadFinished."""
621+ for k, v in info.copy().items():
622+ info[str(k)] = str(v)
623+ self.emit_signal('on_download_finished', download, info)
624+
625+ def emit_upload_started(self, upload):
626+ """Emit UploadStarted."""
627+ self.emit_signal('on_upload_started', upload)
628+
629+ def emit_upload_file_progress(self, upload, **info):
630+ """Emit UploadFileProgress."""
631+ for k, v in info.copy().items():
632+ info[str(k)] = str(v)
633+ self.emit_signal('on_upload_file_progress', upload, info)
634+
635+ def emit_upload_finished(self, upload, **info):
636+ """Emit UploadFinished."""
637+ for k, v in info.copy().items():
638+ info[str(k)] = str(v)
639+ self.emit_signal('on_upload_finished', upload, info)
640+
641+ def emit_account_changed(self, account_info):
642+ """Emit AccountChanged."""
643+ info_dict = {'purchased_bytes': unicode(account_info.purchased_bytes)}
644+ self.emit_signal('on_account_changed', info_dict)
645+
646+ def emit_metaqueue_changed(self):
647+ """Emit MetaQueueChanged."""
648+ self.emit_signal('on_metaqueue_changed')
649+
650
651-class Status(object):
652- """ Represent the status of the syncdaemon """
653-
654- def __init__(self, bus_name, dbus_iface, syncdaemon_status=None):
655- pass
656\ No newline at end of file
657+class Events(Referenceable, SignalBroadcaster):
658+ """The events of the system translated to ipc signals."""
659+
660+ __metaclass__ = RemoteMeta
661+
662+ # calls that will be accessible remotly
663+ remote_calls = [
664+ 'push_event',
665+ ]
666+
667+ def __init__(self, event_queue):
668+ super(Events, self).__init__()
669+ self.events = SyncdaemonEvents(event_queue)
670+
671+ def emit_event(self, event):
672+ """Emit the signal."""
673+ event_dict = {}
674+ for key, value in event.iteritems():
675+ event_dict[str(key)] = str(value)
676+ self.emit_signal('on_event', event_dict)
677+
678+ def push_event(self, event_name, args):
679+ """Push an event to the event queue."""
680+ logger.debug('push_event: %r with %r', event_name, args)
681+ self.events.push_event(event_name, args)
682+
683+
684+class SyncDaemon(Referenceable, SignalBroadcaster):
685+ """ The Daemon ipc interface. """
686+
687+ __metaclass__ = RemoteMeta
688+
689+ # calls that will be accessible remotly
690+ remote_calls = [
691+ 'connect',
692+ 'disconnect',
693+ 'get_rootdir',
694+ 'get_sharesdir',
695+ 'get_sharesdir_link',
696+ 'wait_for_nirvana',
697+ 'quit',
698+ 'rescan_from_scratch',
699+ ]
700+
701+ def __init__(self, root, main, volume_manager, action_queue):
702+ """ Creates the instance."""
703+ self.service = SyncdaemonService(root, main, volume_manager,
704+ action_queue)
705+ self.clients = []
706+
707+ def connect(self):
708+ """ Connect to the server. """
709+ logger.debug('connect requested')
710+ self.service.connect()
711+
712+ def disconnect(self):
713+ """ Disconnect from the server. """
714+ logger.debug('disconnect requested')
715+ self.service.disconnect()
716+
717+ def get_rootdir(self):
718+ """ Returns the root dir/mount point. """
719+ logger.debug('called get_rootdir')
720+ return self.service.get_rootdir()
721+
722+ def get_sharesdir(self):
723+ """ Returns the shares dir/mount point. """
724+ logger.debug('called get_sharesdir')
725+ return self.service.get_sharesdir()
726+
727+ def get_sharesdir_link(self):
728+ """ Returns the shares dir/mount point. """
729+ logger.debug('called get_sharesdir_link')
730+ return self.service.get_sharesdir_link()
731+
732+ def wait_for_nirvana(self, last_event_interval,
733+ reply_handler=None, error_handler=None):
734+ """ call the reply handler when there are no more
735+ events or transfers.
736+ """
737+ logger.debug('called wait_for_nirvana')
738+ return self.service.wait_for_nirvana(last_event_interval,
739+ remote_handler(reply_handler), remote_handler(error_handler))
740+
741+ def quit(self, reply_handler=None, error_handler=None):
742+ """ shutdown the syncdaemon. """
743+ logger.debug('Quit requested')
744+ self.service.quit(remote_handler(reply_handler),
745+ remote_handler(error_handler))
746+
747+ def rescan_from_scratch(self, volume_id):
748+ """Request a rescan from scratch of the volume with volume_id."""
749+ self.service.rescan_from_scratch(volume_id)
750+
751+ def emit_root_mismatch(self, root_id, new_root_id):
752+ """Emit RootMismatch signal."""
753+ self.emit_signal('on_root_mismatch', root_id, new_root_id)
754+
755+ def emit_quota_exceeded(self, volume_dict):
756+ """Emit QuotaExceeded signal."""
757+ self.emit_signal('on_quota_exceeded', volume_dict)
758+
759+
760+class FileSystem(object, Referenceable):
761+ """ An ipc interface to the FileSystem Manager. """
762+
763+ __metaclass__ = RemoteMeta
764+
765+ # calls that will be accessible remotly
766+ remote_calls = [
767+ 'get_metadata',
768+ 'get_metadata_by_node',
769+ 'get_metadata_and_quick_tree_synced',
770+ 'get_dirty_nodes',
771+ ]
772+
773+ def __init__(self, fs_manager, action_queue):
774+ """ Creates the instance. """
775+ super(FileSystem, self).__init__()
776+ self.syncdaemon_filesystem = SyncdaemonFileSystem(fs_manager,
777+ action_queue)
778+
779+ def get_metadata(self, path):
780+ """Return the metadata (as a dict) for the specified path."""
781+ logger.debug('get_metadata by path: %r', path)
782+ return self.syncdaemon_filesystem.get_metadata(path)
783+
784+ def get_metadata_by_node(self, share_id, node_id):
785+ """Return the metadata (as a dict) for the specified share/node."""
786+ logger.debug('get_metadata by share: %r node: %r', share_id, node_id)
787+ return self.syncdaemon_filesystem.get_metadata_by_node(share_id,
788+ node_id)
789+
790+ def get_metadata_and_quick_tree_synced(self, path):
791+ """ returns the dict with the attributes of the metadata for
792+ the specified path, including the quick subtree status.
793+ """
794+ logger.debug('get_metadata_and_quick_tree_synced: %r', path)
795+ return self.syncdaemon_filesystem.get_metadata_and_quick_tree_synced(
796+ path)
797+
798+ def get_dirty_nodes(self):
799+ """Rerturn a list of dirty nodes."""
800+ return self.syncdaemon_filesystem.get_dirty_nodes()
801+
802+
803+class Shares(Referenceable, SignalBroadcaster):
804+ """A ipc interface to interact with shares."""
805+
806+ __metaclass__ = RemoteMeta
807+
808+ # calls that will be accessible remotly
809+ remote_calls = [
810+ 'get_shares',
811+ 'accept_share',
812+ 'reject_share',
813+ 'delete_share',
814+ 'subscribe',
815+ 'unsubscribe',
816+ 'create_share',
817+ 'create_shares',
818+ 'refresh_shares',
819+ 'get_shared',
820+ ]
821+
822+ def __init__(self, bus_name, fs_manager, volume_manager):
823+ """Create the instance."""
824+ self.syncdaemon_shares = SyncdaemonShares(fs_manager, volume_manager)
825+
826+ def get_shares(self):
827+ """Return a list of dicts, each dict represents a share."""
828+ logger.debug('called get_shares')
829+ return self.syncdaemon_shares.get_shares()
830+
831+ def accept_share(self, share_id, reply_handler=None, error_handler=None):
832+ """Accept a share.
833+
834+ A ShareAnswerOk|Error signal will be fired in the future as a
835+ success/failure indicator.
836+
837+ """
838+ logger.debug('accept_share: %r', share_id)
839+ self.syncdaemon_shares.accept_share(share_id,
840+ remote_handler(reply_handler), remote_handler(error_handler))
841+
842+ def reject_share(self, share_id, reply_handler=None, error_handler=None):
843+ """Reject a share."""
844+ logger.debug('reject_share: %r', share_id)
845+ self.syncdaemon_shares.reject_share(share_id,
846+ remote_handler(reply_handler),
847+ remote_handler(error_handler))
848+
849+ def delete_share(self, share_id):
850+ """Delete a Share, both kinds: "to me" and "from me"."""
851+ logger.debug('delete_share: %r', share_id)
852+ try:
853+ self.syncdaemon_shares.delete_share(share_id)
854+ except Exception, e:
855+ logger.exception('Error while deleting share: %r', share_id)
856+ self.emit_share_delete_error({'volume_id':share_id}, str(e))
857+ # propagate the error
858+ raise
859+
860+ def subscribe(self, share_id):
861+ """Subscribe to the specified share."""
862+ logger.debug('Shares.subscribe: %r', share_id)
863+ self.syncdaemon_shares.subscribe(share_id)
864+
865+ def unsubscribe(self, share_id):
866+ """Unsubscribe from the specified share."""
867+ logger.debug('Shares.unsubscribe: %r', share_id)
868+ self.syncdaemon_shares.unsubscribe(share_id)
869+
870+ def emit_share_changed(self, message, share):
871+ """ emits ShareChanged or ShareDeleted signal for the share
872+ notification.
873+ """
874+ logger.debug('emit_share_changed: message %r, share %r.',
875+ message, share)
876+ if message == 'deleted':
877+ self.emit_signal('on_share_deleted', get_share_dict(share))
878+ elif message == 'changed':
879+ self.emit_signal('on_share_changed', get_share_dict(share))
880+
881+ def emit_share_delete_error(self, share, error):
882+ """Emits ShareDeleteError signal."""
883+ logger.info('emit_share_delete_error: share %r, error %r.',
884+ share, error)
885+ self.emit_signal('on_share_deleted_error',
886+ get_share_dict(share), error)
887+
888+ def emit_free_space(self, share_id, free_bytes):
889+ """ emits ShareChanged when free space changes """
890+ if share_id in self.syncdaemon_shares.shares:
891+ share = self.syncdaemon_shares.shares[share_id]
892+ share_dict = get_share_dict(share)
893+ share_dict['free_bytes'] = unicode(free_bytes)
894+ self.emit_signal('on_share_changed',
895+ share_dict)
896+
897+ def create_share(self, path, username, name, access_level):
898+ """ Share a subtree to the user identified by username.
899+
900+ @param path: that path to share (the root of the subtree)
901+ @param username: the username to offer the share to
902+ @param name: the name of the share
903+ @param access_level: 'View' or 'Modify'
904+ """
905+ logger.debug('create share: %r, %r, %r, %r',
906+ path, username, name, access_level)
907+ self.syncdaemon_shares.create_share(path, username, name, access_level)
908+
909+ def create_shares(self, path, usernames, name, access_level):
910+ """Share a subtree with several users at once.
911+
912+ @param path: that path to share (the root of the subtree)
913+ @param usernames: the user names to offer the share to
914+ @param name: the name of the share
915+ @param access_level: 'View' or 'Modify'
916+ """
917+ logger.debug('create shares: %r, %r, %r, %r',
918+ path, usernames, name, access_level)
919+ for user in usernames:
920+ self.syncdaemon_shares.create_share(path, user, name,
921+ access_level)
922+
923+ def emit_share_created(self, share_info):
924+ """ emits ShareCreated signal """
925+ logger.debug('emit_share_created: share_info %r.', share_info)
926+ self.emit_signal('on_share_created',
927+ share_info)
928+
929+ def emit_share_create_error(self, share_info, error):
930+ """Emit ShareCreateError signal."""
931+ info = self.syncdaemon_shares.get_create_error_share_info(share_info)
932+ logger.info('emit_share_create_error: share_info %r, error %r.',
933+ info, error)
934+ self.emit_signal('on_share_create_error', info, error)
935+
936+ def refresh_shares(self):
937+ """ Refresh the share list, requesting it to the server. """
938+ self.syncdaemon_shares.refresh_shares()
939+
940+ def get_shared(self):
941+ """ returns a list of dicts, each dict represents a shared share.
942+ A share might not have the path set, as we might be still fetching the
943+ nodes from the server. In this cases the path is ''
944+ """
945+ logger.debug('called get_shared')
946+ return self.syncdaemon_shares.get_shared()
947+
948+ def emit_share_answer_response(self, share_id, answer, error=None):
949+ """Emits ShareAnswerResponse signal."""
950+ answer_info = dict(volume_id=share_id, answer=answer)
951+ if error:
952+ answer_info['error'] = error
953+ logger.debug('emit_share_answer_response: answer_info %r.', answer_info)
954+ self.emit_signal('on_share_answer_response', answer_info)
955+
956+ def emit_new_share(self, share_id):
957+ """Emits NewShare signal."""
958+ share = self.syncdaemon_shares.get_volume(share_id)
959+ logger.debug('emit_new_share: share_id %r.', share_id)
960+ self.emit_signal('on_new_share', get_share_dict(share))
961+
962+ def emit_share_subscribed(self, share):
963+ """Emit the ShareSubscribed signal"""
964+ self.emit_signal('on_share_subscribed', get_share_dict(share))
965+
966+ def emit_share_subscribe_error(self, share_id, error):
967+ """Emit the ShareSubscribeError signal"""
968+ self.emit_signal('on_share_subscribed_error', {'id': share_id},
969+ str(error))
970+
971+ def emit_share_unsubscribed(self, share):
972+ """Emit the ShareUnSubscribed signal"""
973+ self.emit_signal('on_share_unsubscribed', get_share_dict(share))
974+
975+ def emit_share_unsubscribe_error(self, share_id, error):
976+ """Emit the ShareUnSubscribeError signal"""
977+ self.emit_signal('on_share_unsubscribed_error', {'id': share_id},
978+ str(error))
979+
980+class Config(object, Referenceable):
981+ """ The Syncdaemon config/settings ipc interface. """
982+
983+ __metaclass__ = RemoteMeta
984+
985+ # calls that will be accessible remotly
986+ remote_calls = [
987+ 'get_throttling_limits',
988+ 'set_throttling_limits',
989+ 'enable_bandwidth_throttling',
990+ 'disable_bandwidth_throttling',
991+ 'bandwidth_throttling_enabled',
992+ 'udf_autosubscribe_enabled',
993+ 'enable_udf_autosubscribe',
994+ 'share_autosubscribe_enabled',
995+ 'enable_share_autosubscribe',
996+ 'disable_share_autosubscribe',
997+ 'set_files_sync_enabled',
998+ 'autoconnect_enabled',
999+ 'set_autoconnect_enabled',
1000+ 'show_all_notifications_enabled',
1001+ 'enable_show_all_notifications',
1002+ 'disable_show_all_notifications'
1003+ ]
1004+
1005+ def __init__(self, main, action_queue):
1006+ """ Creates the instance."""
1007+ super(Config, self).__init__()
1008+ self.syncdaemon_config = SyncdaemonConfig(main, action_queue)
1009+
1010+ def get_throttling_limits(self, reply_handler=None, error_handler=None):
1011+ """Get the read/write limit from AQ and return a dict.
1012+ Returns a dict(download=int, upload=int), if int is -1 the value isn't
1013+ configured.
1014+ The values are bytes/second
1015+ """
1016+ logger.debug("called get_throttling_limits")
1017+ return self.syncdaemon_config.get_throttling_limits(
1018+ remote_handler(reply_handler), remote_handler(error_handler))
1019+
1020+ def set_throttling_limits(self, download, upload,
1021+ reply_handler=None, error_handler=None):
1022+ """Set the read and write limits. The expected values are bytes/sec."""
1023+ logger.debug("called set_throttling_limits")
1024+ self.syncdaemon_config.set_throttling_limits(download, upload,
1025+ remote_handler(reply_handler), remote_handler(error_handler))
1026+
1027+ def enable_bandwidth_throttling(self, reply_handler=None,
1028+ error_handler=None):
1029+ """Enable bandwidth throttling."""
1030+ self.syncdaemon_config.enable_bandwidth_throttling(
1031+ remote_handler(reply_handler), remote_handler(error_handler))
1032+
1033+ def disable_bandwidth_throttling(self, reply_handler=None,
1034+ error_handler=None):
1035+ """Disable bandwidth throttling."""
1036+ self.syncdaemon_config.disable_bandwidth_throttling(
1037+ remote_handler(reply_handler), remote_handler(error_handler))
1038+
1039+ def bandwidth_throttling_enabled(self, reply_handler=None,
1040+ error_handler=None):
1041+ """Returns True (actually 1) if bandwidth throttling is enabled and
1042+ False (0) otherwise.
1043+ """
1044+ return self.syncdaemon_config.bandwidth_throttling_enabled(
1045+ remote_handler(reply_handler), remote_handler(error_handler))
1046+
1047+ def udf_autosubscribe_enabled(self):
1048+ """Return the udf_autosubscribe config value."""
1049+ return self.syncdaemon_config.udf_autosubscribe_enabled()
1050+
1051+ def enable_udf_autosubscribe(self):
1052+ """Enable UDF autosubscribe."""
1053+ self.syncdaemon_config.enable_udf_autosubscribe()
1054+
1055+ def disable_udf_autosubscribe(self):
1056+ """Enable UDF autosubscribe."""
1057+ self.syncdaemon_config.disable_udf_autosubscribe()
1058+
1059+ def share_autosubscribe_enabled(self):
1060+ """Return the share_autosubscribe config value."""
1061+ return self.syncdaemon_config.share_autosubscribe_enabled()
1062+
1063+ def enable_share_autosubscribe(self):
1064+ """Enable UDF autosubscribe."""
1065+ self.syncdaemon_config.enable_share_autosubscribe()
1066+
1067+ def disable_share_autosubscribe(self):
1068+ """Enable UDF autosubscribe."""
1069+ self.syncdaemon_config.disable_share_autosubscribe()
1070+
1071+ def set_files_sync_enabled(self, enabled):
1072+ """Enable/disable file sync service."""
1073+ logger.debug('called set_files_sync_enabled %d', enabled)
1074+ self.syncdaemon_config.set_files_sync_enabled(enabled)
1075+
1076+ def files_sync_enabled(self):
1077+ """Return the files_sync_enabled config value."""
1078+ logger.debug('called files_sync_enabled')
1079+ return self.syncdaemon_config.files_sync_enabled()
1080+
1081+ def autoconnect_enabled(self):
1082+ """Return the autoconnect config value."""
1083+ return self.syncdaemon_config.autoconnect_enabled()
1084+
1085+ def set_autoconnect_enabled(self, enabled):
1086+ """Enable syncdaemon autoconnect."""
1087+ self.syncdaemon_config.set_autoconnect_enabled(enabled)
1088+
1089+ def show_all_notifications_enabled(self):
1090+ """Return the show_all_notifications config value."""
1091+ return self.syncdaemon_config.show_all_notifications_enabled()
1092+
1093+ def enable_show_all_notifications(self):
1094+ """Enable showing all notifications."""
1095+ self.syncdaemon_config.enable_show_all_notifications()
1096+
1097+ def disable_show_all_notifications(self):
1098+ """Disable showing all notifications."""
1099+ self.syncdaemon_config.disable_show_all_notifications()
1100+
1101+
1102+class Folders(Referenceable, SignalBroadcaster):
1103+ """An interface to interact with User Defined Folders"""
1104+
1105+ __metaclass__ = RemoteMeta
1106+
1107+ # calls that will be accessible remotly
1108+ remote_calls = [
1109+ 'create',
1110+ 'delete',
1111+ 'get_folders',
1112+ 'subscribe',
1113+ 'unsubscribe',
1114+ 'get_info',
1115+ 'refresh_volumes',
1116+ ]
1117+
1118+ def __init__(self, volume_manager, fs_manager):
1119+ """Creates the instance."""
1120+ super(Folders, self).__init__()
1121+ self.syncdaemon_folders = SyncdaemonFolders(volume_manager, fs_manager)
1122+
1123+ def create(self, path):
1124+ """Create a user defined folder in the specified path."""
1125+ logger.debug('Folders.create: %r', path)
1126+ try:
1127+ self.syncdaemon_folders.create(path)
1128+ except Exception, e:
1129+ logger.exception('Error while creating udf: %r', path)
1130+ self.emit_folder_create_error(path, str(e))
1131+
1132+ def delete(self, folder_id):
1133+ """Delete the folder specified by folder_id"""
1134+ from ubuntuone.syncdaemon.volume_manager import VolumeDoesNotExist
1135+ logger.debug('Folders.delete: %r', folder_id)
1136+ try:
1137+ self.syncdaemon_folders.delete(folder_id)
1138+ except VolumeDoesNotExist, e:
1139+ self.emit_folder_delete_error(folder_id, e)
1140+ except Exception, e:
1141+ logger.exception('Error while deleting volume: %r', folder_id)
1142+ self.emit_folder_delete_error(folder_id, e)
1143+
1144+ def get_folders(self):
1145+ """Return the list of folders (a list of dicts)"""
1146+ logger.debug('Folders.get_folders')
1147+ return self.syncdaemon_folders.get_folders()
1148+
1149+ def subscribe(self, folder_id):
1150+ """Subscribe to the specified folder"""
1151+ logger.debug('Folders.subscribe: %r', folder_id)
1152+ self.syncdaemon_folders.subscribe(folder_id)
1153+
1154+ def unsubscribe(self, folder_id):
1155+ """Unsubscribe from the specified folder"""
1156+ logger.debug('Folders.unsubscribe: %r', folder_id)
1157+ self.syncdaemon_folders.unsubscribe(folder_id)
1158+
1159+ def get_info(self, path):
1160+ """Returns a dict containing the folder information."""
1161+ logger.debug('Folders.get_info: %r', path)
1162+ return self.syncdaemon_folders.get_info(path)
1163+
1164+ def refresh_volumes(self):
1165+ """Refresh the volumes list, requesting it to the server."""
1166+ self.syncdaemon_folders.refresh_volumes()
1167+
1168+ def emit_folder_created(self, folder):
1169+ """Emit the FolderCreated signal"""
1170+ udf_dict = get_udf_dict(folder)
1171+ self.emit_signal('on_folder_created', udf_dict)
1172+
1173+ def emit_folder_create_error(self, path, error):
1174+ """Emit the FolderCreateError signal"""
1175+ info = dict(path=path.decode('utf-8'))
1176+ self.emit_signal('on_folder_create_error', info, str(error))
1177+
1178+ def emit_folder_deleted(self, folder):
1179+ """Emit the FolderCreated signal"""
1180+ udf_dict = get_udf_dict(folder)
1181+ self.emit_signal('on_folder_deleted', udf_dict)
1182+
1183+ def emit_folder_delete_error(self, folder, error):
1184+ """Emit the FolderCreateError signal"""
1185+ udf_dict = get_udf_dict(folder)
1186+ self.emit_signal('on_folder_delete_error', udf_dict, str(error))
1187+
1188+ def emit_folder_subscribed(self, folder):
1189+ """Emit the FolderSubscribed signal"""
1190+ udf_dict = get_udf_dict(folder)
1191+ self.emit_signal('on_folder_subscribed', udf_dict)
1192+
1193+ def emit_folder_subscribe_error(self, folder_id, error):
1194+ """Emit the FolderSubscribeError signal"""
1195+ self.emit_signal('on_folder_subscribe_error', {'id':folder_id},
1196+ str(error))
1197+
1198+ def emit_folder_unsubscribed(self, folder):
1199+ """Emit the FolderUnSubscribed signal"""
1200+ udf_dict = get_udf_dict(folder)
1201+ self.emit_signal('on_folder_unsubscribed', udf_dict)
1202+
1203+ def emit_folder_unsubscribe_error(self, folder_id, error):
1204+ """Emit the FolderUnSubscribeError signal"""
1205+ self.emit_signal('on_folder_unsubscribe_error',
1206+ {'id':folder_id}, str(error))
1207+
1208+
1209+class PublicFiles(Referenceable, SignalBroadcaster):
1210+ """An IPC interface for handling public files."""
1211+
1212+ __metaclass__ = RemoteMeta
1213+
1214+ # calls that will be accessible remotly
1215+ remote_calls = [
1216+ 'change_public_access',
1217+ 'get_public_files',
1218+ ]
1219+
1220+ def __init__(self, fs_manager, action_queue):
1221+ super(PublicFiles, self).__init__()
1222+ self.syncdaemon_public_files = SyncdaemonPublicFiles(fs_manager,
1223+ action_queue)
1224+
1225+ def change_public_access(self, share_id, node_id, is_public):
1226+ """Change the public access of a file."""
1227+ logger.debug('PublicFiles.change_public_access: %r, %r, %r',
1228+ share_id, node_id, is_public)
1229+ self.syncdaemon_public_files.change_public_access(share_id, node_id,
1230+ is_public)
1231+
1232+ def get_public_files(self):
1233+ """Request the list of public files to the server.
1234+
1235+ The result will be send in a PublicFilesList signal.
1236+ """
1237+ self.syncdaemon_public_files.get_public_files()
1238+
1239+ def emit_public_access_changed(self, share_id, node_id, is_public,
1240+ public_url):
1241+ """Emit the PublicAccessChanged signal."""
1242+ share_id = str(share_id) if share_id else ''
1243+ node_id = str(node_id)
1244+ path = self.syncdaemon_public_files.get_path(share_id, node_id)
1245+ info = dict(
1246+ share_id=str(share_id) if share_id else '',
1247+ node_id=str(node_id),
1248+ is_public=bool_str(is_public),
1249+ public_url=public_url if public_url else '',
1250+ path=path)
1251+ self.emit_signal('on_public_access_changed', info)
1252+
1253+ def emit_public_access_change_error(self, share_id, node_id, error):
1254+ """Emit the PublicAccessChangeError signal."""
1255+ path = self.syncdaemon_public_files.get_path(share_id, node_id)
1256+ info = dict(
1257+ share_id=str(share_id) if share_id else '',
1258+ node_id=str(node_id),
1259+ path=path)
1260+ self.emit_signal('on_public_access_change_error', info, str(error))
1261+
1262+ def emit_public_files_list(self, public_files):
1263+ """Emit the PublicFilesList signal."""
1264+ files = []
1265+ for pf in public_files:
1266+ volume_id = str(pf['volume_id'])
1267+ node_id = str(pf['node_id'])
1268+ public_url = str(pf['public_url'])
1269+ path = self.syncdaemon_public_files.get_path(volume_id ,
1270+ node_id).decode('utf-8')
1271+ files.append(dict(volume_id=volume_id, node_id=node_id,
1272+ public_url=public_url, path=path))
1273+ self.emit_signal('on_public_files_list', files)
1274+
1275+ def emit_public_files_list_error(self, error):
1276+ """Emit the PublicFilesListError signal."""
1277+ self.emit_signal('on_public_files_list_error', str(error))

Subscribers

People subscribed via source and target branches