Merge lp:~mandel/ubuntuone-client/implement_basic_remote_objects into lp:ubuntuone-client

Proposed by Manuel de la Peña
Status: Merged
Approved by: Manuel de la Peña
Approved revision: 888
Merged at revision: 897
Proposed branch: lp:~mandel/ubuntuone-client/implement_basic_remote_objects
Merge into: lp:ubuntuone-client
Prerequisite: lp:~mandel/ubuntuone-client/add_public_files_remote_object_tests
Diff against target: 1277 lines (+898/-74)
3 files modified
tests/platform/test_interaction_interfaces.py (+35/-35)
tests/platform/windows/test_ipc.py (+56/-34)
ubuntuone/platform/windows/dbus_interface.py (+807/-5)
To merge this branch: bzr merge lp:~mandel/ubuntuone-client/implement_basic_remote_objects
Reviewer Review Type Date Requested Status
Roberto Alsina (community) Approve
Facundo Batista (community) Approve
Review via email: mp+50885@code.launchpad.net

Commit message

Adds the basic implementation of the remotes objects that will expose the DBus API found on Linux on Windows using twisted.pb.

Description of the change

Adds the basic implementation of the remotes objects that will expose the DBus API found on Linux on Windows using twisted.pb. To run the tests executed on windows:

u1trial tests/platform/windows/test_ipc.py
u1trial tests/platform/test_interaction_interfaces.py

The tests on Linux should all pas and can be ran with 'make check'

To post a comment you must log in.
Revision history for this message
Facundo Batista (facundo) wrote :

In linux all tests pass. Not tested in windows.

review: Approve
Revision history for this message
Manuel de la Peña (mandel) wrote :

> In linux all tests pass. Not tested in windows

Tx! I'll make sure I get a windows review.

Revision history for this message
Roberto Alsina (ralsina) wrote :

+1

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'tests/platform/test_interaction_interfaces.py'
--- tests/platform/test_interaction_interfaces.py 2011-02-16 10:04:56 +0000
+++ tests/platform/test_interaction_interfaces.py 2011-02-23 09:03:55 +0000
@@ -17,7 +17,7 @@
17# with this program. If not, see <http://www.gnu.org/licenses/>.17# with this program. If not, see <http://www.gnu.org/licenses/>.
18"""Test that the interaction_interfaces are correctly called."""18"""Test that the interaction_interfaces are correctly called."""
1919
20from mocker import MockerTestCase20from mocker import MockerTestCase, MATCH
21from tests.platform import IPCTestCase21from tests.platform import IPCTestCase
2222
23class TestStatusIPC(MockerTestCase, IPCTestCase):23class TestStatusIPC(MockerTestCase, IPCTestCase):
@@ -146,10 +146,10 @@
146 """Test if the method is relayed."""146 """Test if the method is relayed."""
147 result = 'nirvana'147 result = 'nirvana'
148 last_event_interval = 'interval'148 last_event_interval = 'interval'
149 reply_handler = 'handler'149 reply_handler = lambda: None
150 error_handler = 'error'150 error_handler = lambda: None
151 self.service.wait_for_nirvana(last_event_interval, reply_handler,151 self.service.wait_for_nirvana(last_event_interval, MATCH(callable),
152 error_handler)152 MATCH(callable))
153 self.mocker.result(result)153 self.mocker.result(result)
154 self.mocker.replay()154 self.mocker.replay()
155 self.assertEqual(result, self.sync.wait_for_nirvana(155 self.assertEqual(result, self.sync.wait_for_nirvana(
@@ -157,9 +157,9 @@
157157
158 def test_quit(self):158 def test_quit(self):
159 """Test if the method is relayed."""159 """Test if the method is relayed."""
160 reply_handler = 'handler'160 reply_handler = lambda: None
161 error_handler = 'error'161 error_handler = lambda: None
162 self.service.quit(reply_handler, error_handler)162 self.service.quit(MATCH(callable), MATCH(callable))
163 self.mocker.replay()163 self.mocker.replay()
164 self.sync.quit(reply_handler, error_handler)164 self.sync.quit(reply_handler, error_handler)
165165
@@ -238,20 +238,20 @@
238 def test_accept_share(self):238 def test_accept_share(self):
239 """Test if the method is relayed."""239 """Test if the method is relayed."""
240 share_id = 'id'240 share_id = 'id'
241 reply_handler = 'handler'241 reply_handler = lambda: None
242 error_handler = 'error'242 error_handler = lambda: None
243 self.syncdaemon_shares.accept_share(share_id, reply_handler,243 self.syncdaemon_shares.accept_share(share_id, MATCH(callable),
244 error_handler)244 MATCH(callable))
245 self.mocker.replay()245 self.mocker.replay()
246 self.shares.accept_share(share_id, reply_handler, error_handler)246 self.shares.accept_share(share_id, reply_handler, error_handler)
247247
248 def test_reject_share(self):248 def test_reject_share(self):
249 """Test if the method is relayed."""249 """Test if the method is relayed."""
250 share_id = 'id'250 share_id = 'id'
251 reply_handler = 'handler'251 reply_handler = lambda: None
252 error_handler = 'error'252 error_handler = lambda: None
253 self.syncdaemon_shares.reject_share(share_id, reply_handler,253 self.syncdaemon_shares.reject_share(share_id, MATCH(callable),
254 error_handler)254 MATCH(callable))
255 self.mocker.replay()255 self.mocker.replay()
256 self.shares.reject_share(share_id, reply_handler, error_handler)256 self.shares.reject_share(share_id, reply_handler, error_handler)
257257
@@ -324,10 +324,10 @@
324324
325 def test_get_throttling_limits(self):325 def test_get_throttling_limits(self):
326 """Test if the method is relayed."""326 """Test if the method is relayed."""
327 reply_handler = 'handler'327 reply_handler = lambda: None
328 error_handler = 'error'328 error_handler = lambda: None
329 self.syncdaemon_config.get_throttling_limits(reply_handler,329 self.syncdaemon_config.get_throttling_limits(MATCH(callable),
330 error_handler)330 MATCH(callable))
331 self.mocker.replay()331 self.mocker.replay()
332 self.config.get_throttling_limits(reply_handler, error_handler)332 self.config.get_throttling_limits(reply_handler, error_handler)
333333
@@ -335,39 +335,39 @@
335 """Test if the method is relayed."""335 """Test if the method is relayed."""
336 download = 'download'336 download = 'download'
337 upload = 'upload'337 upload = 'upload'
338 reply_handler = 'handler'338 reply_handler = lambda: None
339 error_handler = 'error'339 error_handler = lambda: None
340 self.syncdaemon_config.set_throttling_limits(download, upload,340 self.syncdaemon_config.set_throttling_limits(download, upload,
341 reply_handler, error_handler)341 MATCH(callable), MATCH(callable))
342 self.mocker.replay()342 self.mocker.replay()
343 self.config.set_throttling_limits(download, upload, reply_handler,343 self.config.set_throttling_limits(download, upload, reply_handler,
344 error_handler)344 error_handler)
345345
346 def test_enable_bandwidth_throttling(self):346 def test_enable_bandwidth_throttling(self):
347 """Test if the method is relayed."""347 """Test if the method is relayed."""
348 reply_handler = 'handler'348 reply_handler = lambda: None
349 error_handler = 'error'349 error_handler = lambda: None
350 self.syncdaemon_config.enable_bandwidth_throttling(reply_handler,350 self.syncdaemon_config.enable_bandwidth_throttling(MATCH(callable),
351 error_handler)351 MATCH(callable))
352 self.mocker.replay()352 self.mocker.replay()
353 self.config.enable_bandwidth_throttling(reply_handler, error_handler)353 self.config.enable_bandwidth_throttling(reply_handler, error_handler)
354354
355 def test_disable_bandwidth_throttling(self):355 def test_disable_bandwidth_throttling(self):
356 """Test if the method is relayed."""356 """Test if the method is relayed."""
357 reply_handler = 'handler'357 reply_handler = lambda: None
358 error_handler = 'error'358 error_handler = lambda: None
359 self.syncdaemon_config.disable_bandwidth_throttling(reply_handler,359 self.syncdaemon_config.disable_bandwidth_throttling(MATCH(callable),
360 error_handler)360 MATCH(callable))
361 self.mocker.replay()361 self.mocker.replay()
362 self.config.disable_bandwidth_throttling(reply_handler, error_handler)362 self.config.disable_bandwidth_throttling(reply_handler, error_handler)
363363
364 def test_bandwidth_throttling_enabled(self):364 def test_bandwidth_throttling_enabled(self):
365 """Test if the method is relayed."""365 """Test if the method is relayed."""
366 result = 1366 result = 1
367 reply_handler = 'handler'367 reply_handler = lambda: None
368 error_handler = 'error'368 error_handler = lambda: None
369 self.syncdaemon_config.bandwidth_throttling_enabled(reply_handler,369 self.syncdaemon_config.bandwidth_throttling_enabled(MATCH(callable),
370 error_handler)370 MATCH(callable))
371 self.mocker.result(result)371 self.mocker.result(result)
372 self.mocker.replay()372 self.mocker.replay()
373 self.assertEqual(result, self.config.bandwidth_throttling_enabled(373 self.assertEqual(result, self.config.bandwidth_throttling_enabled(
374374
=== modified file 'tests/platform/windows/test_ipc.py'
--- tests/platform/windows/test_ipc.py 2011-02-23 09:03:55 +0000
+++ tests/platform/windows/test_ipc.py 2011-02-23 09:03:55 +0000
@@ -21,8 +21,17 @@
2121
22from unittest import TestCase22from unittest import TestCase
2323
24# to be later implemented, TDD work here24from ubuntuone.platform.windows.dbus_interface import (
25SignalBroadcaster = None25 Config,
26 Events,
27 Folders,
28 FileSystem,
29 PublicFiles,
30 Shares,
31 SignalBroadcaster,
32 Status,
33 SyncDaemon
34)
2635
27class PerspectiveBrokerTestCase(TestCase):36class PerspectiveBrokerTestCase(TestCase):
28 """Base test case for the IPC used on Windows."""37 """Base test case for the IPC used on Windows."""
@@ -30,12 +39,14 @@
30 def setUp(self):39 def setUp(self):
31 """Setup tests."""40 """Setup tests."""
32 super(PerspectiveBrokerTestCase, self).setUp()41 super(PerspectiveBrokerTestCase, self).setUp()
33 self.status = None42 self.config = Config(None, None)
34 self.events = None43 self.status = Status(None, None, None)
35 self.sync = None44 self.events = Events(None)
36 self.shares = None45 self.sync = SyncDaemon(None, None, None, None)
37 self.folders = None46 self.shares = Shares(None, None, None)
38 self.public_files = None47 self.folders = Folders(None, None)
48 self.public_files = PublicFiles(None, None)
49 self.fs = FileSystem(None, None)
3950
40class TestSignalBroadcaster(MockerTestCase):51class TestSignalBroadcaster(MockerTestCase):
41 """Test the signal brocaster code."""52 """Test the signal brocaster code."""
@@ -44,6 +55,7 @@
44 super(TestSignalBroadcaster, self).setUp()55 super(TestSignalBroadcaster, self).setUp()
45 self.client = self.mocker.mock()56 self.client = self.mocker.mock()
46 self.broad_caster = SignalBroadcaster()57 self.broad_caster = SignalBroadcaster()
58 self.broad_caster.clients.append(self.client)
4759
48 def test_remote_register_to_signals(self):60 def test_remote_register_to_signals(self):
49 """Assert that the client was added."""61 """Assert that the client was added."""
@@ -58,7 +70,8 @@
58 word = 'word'70 word = 'word'
59 signal_name = 'on_test'71 signal_name = 'on_test'
60 self.client.callRemote(signal_name, first, second, word=word)72 self.client.callRemote(signal_name, first, second, word=word)
61 self.broad_caster.emit_gisnal(signal_name, first, second, word=word)73 self.mocker.replay()
74 self.broad_caster.emit_signal(signal_name, first, second, word=word)
6275
6376
64class TestStatusEmitSignals(PerspectiveBrokerTestCase, MockerTestCase):77class TestStatusEmitSignals(PerspectiveBrokerTestCase, MockerTestCase):
@@ -103,7 +116,7 @@
103 self.mocker.result(status)116 self.mocker.result(status)
104 self.signal_method('on_status_changed', status)117 self.signal_method('on_status_changed', status)
105 self.mocker.replay()118 self.mocker.replay()
106 self.status.emit_status_changed()119 self.status.emit_status_changed(status)
107120
108 def test_emit_download_started(self):121 def test_emit_download_started(self):
109 """Emit DownloadStarted."""122 """Emit DownloadStarted."""
@@ -118,7 +131,7 @@
118 string_info = {'test':'2', 'name':'3'}131 string_info = {'test':'2', 'name':'3'}
119 self.signal_method('on_download_file_progress', download, string_info)132 self.signal_method('on_download_file_progress', download, string_info)
120 self.mocker.replay()133 self.mocker.replay()
121 self.status.emit_download_file_progress(download, test=1, name=2)134 self.status.emit_download_file_progress(download, test=2, name=3)
122135
123 def test_emit_download_finished(self):136 def test_emit_download_finished(self):
124 """Emit DownloadFinished."""137 """Emit DownloadFinished."""
@@ -126,7 +139,7 @@
126 string_info = {'test':'2', 'name':'3'}139 string_info = {'test':'2', 'name':'3'}
127 self.signal_method('on_download_finished', download, string_info)140 self.signal_method('on_download_finished', download, string_info)
128 self.mocker.replay()141 self.mocker.replay()
129 self.status.emit_download_finished(download, test=1, name=2)142 self.status.emit_download_finished(download, test=2, name=3)
130143
131 def test_emit_upload_started(self):144 def test_emit_upload_started(self):
132 """Emit UploadStarted."""145 """Emit UploadStarted."""
@@ -141,7 +154,7 @@
141 string_info = {'test':'2', 'name':'3'}154 string_info = {'test':'2', 'name':'3'}
142 self.signal_method('on_upload_file_progress', upload, string_info)155 self.signal_method('on_upload_file_progress', upload, string_info)
143 self.mocker.replay()156 self.mocker.replay()
144 self.status.emit_upload_file_progress(upload, test=1, name=2)157 self.status.emit_upload_file_progress(upload, test=2, name=3)
145158
146 def test_emit_upload_finished(self):159 def test_emit_upload_finished(self):
147 """Emit UploadFinished."""160 """Emit UploadFinished."""
@@ -149,7 +162,7 @@
149 string_info = {'test':'2', 'name':'3'}162 string_info = {'test':'2', 'name':'3'}
150 self.signal_method('on_upload_finished', upload, string_info)163 self.signal_method('on_upload_finished', upload, string_info)
151 self.mocker.replay()164 self.mocker.replay()
152 self.status.emit_upload_finished(upload, test=1, name=2)165 self.status.emit_upload_finished(upload, test=2, name=3)
153166
154 def test_emit_account_changed(self):167 def test_emit_account_changed(self):
155 """Emit AccountChanged."""168 """Emit AccountChanged."""
@@ -219,7 +232,7 @@
219232
220 def setUp(self):233 def setUp(self):
221 """Setup tests.""" 234 """Setup tests."""
222 super(TestEventsEmitSignals, self).setUp()235 super(TestSharesEmitSignals, self).setUp()
223 self.signal_method = self.mocker.mock()236 self.signal_method = self.mocker.mock()
224 self.shares.emit_signal = self.signal_method237 self.shares.emit_signal = self.signal_method
225 self.shares.syncdaemon_shares = self.mocker.mock()238 self.shares.syncdaemon_shares = self.mocker.mock()
@@ -256,6 +269,8 @@
256 self.get_share_dict(share)269 self.get_share_dict(share)
257 self.mocker.result(share_dict)270 self.mocker.result(share_dict)
258 self.signal_method('on_share_deleted_error', share_dict, error)271 self.signal_method('on_share_deleted_error', share_dict, error)
272 self.mocker.replay()
273 self.shares.emit_share_delete_error(share, error)
259274
260 def test_emit_free_space(self):275 def test_emit_free_space(self):
261 """Emit ShareChanged when free space changes """276 """Emit ShareChanged when free space changes """
@@ -269,14 +284,14 @@
269 self.mocker.result(shares)284 self.mocker.result(shares)
270 self.get_share_dict(share)285 self.get_share_dict(share)
271 self.mocker.result(share_dict)286 self.mocker.result(share_dict)
272 self.signal_method(share_dict)287 self.signal_method('on_share_changed', share_dict)
273 self.mocker.replay()288 self.mocker.replay()
274 self.shares.emit_free_space(share_id, free_bytes)289 self.shares.emit_free_space(share_id, free_bytes)
275290
276 def test_emit_share_created(self):291 def test_emit_share_created(self):
277 """Emit ShareCreated signal """292 """Emit ShareCreated signal """
278 share_info = 'info'293 share_info = 'info'
279 self.signal_method(share_info)294 self.signal_method('on_share_created', share_info)
280 self.mocker.replay()295 self.mocker.replay()
281 self.shares.emit_share_created(share_info)296 self.shares.emit_share_created(share_info)
282297
@@ -287,7 +302,7 @@
287 info = 'info'302 info = 'info'
288 self.shares.syncdaemon_shares.get_create_error_share_info(share_info)303 self.shares.syncdaemon_shares.get_create_error_share_info(share_info)
289 self.mocker.result(info)304 self.mocker.result(info)
290 self.signal_method(info, error)305 self.signal_method('on_share_create_error', info, error)
291 self.mocker.replay()306 self.mocker.replay()
292 self.shares.emit_share_create_error(share_info, error)307 self.shares.emit_share_create_error(share_info, error)
293308
@@ -297,7 +312,7 @@
297 answer = 'yes'312 answer = 'yes'
298 error = 'boom'313 error = 'boom'
299 answer_info = dict(volume_id=share_id, answer=answer, error=error)314 answer_info = dict(volume_id=share_id, answer=answer, error=error)
300 self.signal_method(answer_info)315 self.signal_method('on_share_answer_response', answer_info)
301 self.mocker.replay()316 self.mocker.replay()
302 self.shares.emit_share_answer_response(share_id, answer, error)317 self.shares.emit_share_answer_response(share_id, answer, error)
303318
@@ -310,7 +325,7 @@
310 self.mocker.result(share)325 self.mocker.result(share)
311 self.get_share_dict(share)326 self.get_share_dict(share)
312 self.mocker.result(share_dict)327 self.mocker.result(share_dict)
313 self.signal_method(share_dict)328 self.signal_method('on_new_share', share_dict)
314 self.mocker.replay()329 self.mocker.replay()
315 self.shares.emit_new_share(share_id)330 self.shares.emit_new_share(share_id)
316331
@@ -320,7 +335,7 @@
320 share_dict = {'share' : 'id'}335 share_dict = {'share' : 'id'}
321 self.get_share_dict(share)336 self.get_share_dict(share)
322 self.mocker.result(share_dict)337 self.mocker.result(share_dict)
323 self.signal_method(share_dict)338 self.signal_method('on_share_subscribed', share_dict)
324 self.mocker.replay()339 self.mocker.replay()
325 self.shares.emit_share_subscribed(share)340 self.shares.emit_share_subscribed(share)
326341
@@ -328,7 +343,8 @@
328 """Emit the ShareSubscribeError signal"""343 """Emit the ShareSubscribeError signal"""
329 share_id = 'id'344 share_id = 'id'
330 error = 'error'345 error = 'error'
331 self.signal_method({'id': share_id}, str(error))346 self.signal_method('on_share_subscribed_error',
347 {'id': share_id}, str(error))
332 self.mocker.replay()348 self.mocker.replay()
333 self.shares.emit_share_subscribe_error(share_id, error)349 self.shares.emit_share_subscribe_error(share_id, error)
334350
@@ -338,7 +354,7 @@
338 share_dict = {'share':'id'}354 share_dict = {'share':'id'}
339 self.get_share_dict(share)355 self.get_share_dict(share)
340 self.mocker.result(share_dict) 356 self.mocker.result(share_dict)
341 self.signal_method(share_dict)357 self.signal_method('on_share_unsubscribed', share_dict)
342 self.mocker.replay()358 self.mocker.replay()
343 self.shares.emit_share_unsubscribed(share)359 self.shares.emit_share_unsubscribed(share)
344360
@@ -346,7 +362,7 @@
346 """Emit the ShareUnSubscribeError signal"""362 """Emit the ShareUnSubscribeError signal"""
347 share_id = 'id'363 share_id = 'id'
348 error = 'error'364 error = 'error'
349 self.signal_method({'id': share_id}, str(error))365 self.signal_method('on_share_unsubscribed_error',{'id': share_id}, str(error))
350 self.mocker.replay()366 self.mocker.replay()
351 self.shares.emit_share_unsubscribe_error(share_id, error)367 self.shares.emit_share_unsubscribe_error(share_id, error)
352368
@@ -358,6 +374,7 @@
358 """Setup tests."""374 """Setup tests."""
359 super(TestFoldersEmitSignals, self).setUp()375 super(TestFoldersEmitSignals, self).setUp()
360 self.signal_method = self.mocker.mock()376 self.signal_method = self.mocker.mock()
377 self.folders.emit_signal = self.signal_method
361 self.get_udf_dict = self.mocker.replace(378 self.get_udf_dict = self.mocker.replace(
362 'ubuntuone.syncdaemon.interaction_interfaces.get_udf_dict')379 'ubuntuone.syncdaemon.interaction_interfaces.get_udf_dict')
363380
@@ -367,7 +384,7 @@
367 udf_dict = {'udf':'id'}384 udf_dict = {'udf':'id'}
368 self.get_udf_dict(folder)385 self.get_udf_dict(folder)
369 self.mocker.result(udf_dict)386 self.mocker.result(udf_dict)
370 self.signal_method(udf_dict)387 self.signal_method('on_folder_deleted', udf_dict)
371 self.mocker.replay()388 self.mocker.replay()
372 self.folders.emit_folder_deleted(folder)389 self.folders.emit_folder_deleted(folder)
373390
@@ -378,7 +395,7 @@
378 udf_dict = {'udf':'id'}395 udf_dict = {'udf':'id'}
379 self.get_udf_dict(folder)396 self.get_udf_dict(folder)
380 self.mocker.result(udf_dict)397 self.mocker.result(udf_dict)
381 self.signal_method(udf_dict, str(error))398 self.signal_method('on_folder_delete_error', udf_dict, str(error))
382 self.mocker.replay()399 self.mocker.replay()
383 self.folders.emit_folder_delete_error(folder, error)400 self.folders.emit_folder_delete_error(folder, error)
384401
@@ -388,7 +405,7 @@
388 udf_dict = {'udf':'id'}405 udf_dict = {'udf':'id'}
389 self.get_udf_dict(folder)406 self.get_udf_dict(folder)
390 self.mocker.result(udf_dict)407 self.mocker.result(udf_dict)
391 self.signal_method(udf_dict)408 self.signal_method('on_folder_subscribed', udf_dict)
392 self.mocker.replay()409 self.mocker.replay()
393 self.folders.emit_folder_subscribed(folder)410 self.folders.emit_folder_subscribed(folder)
394411
@@ -396,7 +413,8 @@
396 """Emit the FolderSubscribeError signal"""413 """Emit the FolderSubscribeError signal"""
397 folder_id = 'id'414 folder_id = 'id'
398 error = 'error'415 error = 'error'
399 self.signal_method({'id':folder_id}, str(error))416 self.signal_method('on_folder_subscribe_error',
417 {'id':folder_id}, str(error))
400 self.mocker.replay()418 self.mocker.replay()
401 self.folders.emit_folder_subscribe_error(folder_id, error)419 self.folders.emit_folder_subscribe_error(folder_id, error)
402420
@@ -406,7 +424,7 @@
406 udf_dict = {'udf':'id'}424 udf_dict = {'udf':'id'}
407 self.get_udf_dict(folder)425 self.get_udf_dict(folder)
408 self.mocker.result(udf_dict)426 self.mocker.result(udf_dict)
409 self.signal_method(udf_dict)427 self.signal_method('on_folder_unsubscribed', udf_dict)
410 self.mocker.replay()428 self.mocker.replay()
411 self.folders.emit_folder_unsubscribed(folder)429 self.folders.emit_folder_unsubscribed(folder)
412430
@@ -414,7 +432,8 @@
414 """Emit the FolderUnSubscribeError signal"""432 """Emit the FolderUnSubscribeError signal"""
415 folder_id = 'id'433 folder_id = 'id'
416 error = 'error'434 error = 'error'
417 self.signal_method({'id':folder_id}, str(error))435 self.signal_method('on_folder_unsubscribe_error',
436 {'id':folder_id}, str(error))
418 self.mocker.replay()437 self.mocker.replay()
419 self.folders.emit_folder_unsubscribe_error(folder_id, error)438 self.folders.emit_folder_unsubscribe_error(folder_id, error)
420439
@@ -426,6 +445,7 @@
426 """Setup tests."""445 """Setup tests."""
427 super(TestPublicFilesEmitSignals, self).setUp()446 super(TestPublicFilesEmitSignals, self).setUp()
428 self.signal_method = self.mocker.mock()447 self.signal_method = self.mocker.mock()
448 self.public_files.emit_signal = self.signal_method
429 self.public_files.syncdaemon_public_files = self.mocker.mock()449 self.public_files.syncdaemon_public_files = self.mocker.mock()
430 self.bool_str = self.mocker.replace(450 self.bool_str = self.mocker.replace(
431 'ubuntuone.syncdaemon.interaction_interfaces.bool_str')451 'ubuntuone.syncdaemon.interaction_interfaces.bool_str')
@@ -441,7 +461,8 @@
441 self.mocker.result(path)461 self.mocker.result(path)
442 self.bool_str(is_public)462 self.bool_str(is_public)
443 self.mocker.result('True')463 self.mocker.result('True')
444 self.signal_method(dict(share_id=share_id, node_id=node_id,464 self.signal_method('on_public_access_changed',
465 dict(share_id=share_id, node_id=node_id,
445 is_public='True', public_url=public_url,466 is_public='True', public_url=public_url,
446 path=path))467 path=path))
447 self.mocker.replay()468 self.mocker.replay()
@@ -456,7 +477,8 @@
456 path = 'path'477 path = 'path'
457 self.public_files.syncdaemon_public_files.get_path(share_id, node_id)478 self.public_files.syncdaemon_public_files.get_path(share_id, node_id)
458 self.mocker.result(path)479 self.mocker.result(path)
459 self.signal_method(dict(share_id=share_id, node_id=node_id, path=path),480 self.signal_method('on_public_access_change_error',
481 dict(share_id=share_id, node_id=node_id, path=path),
460 error)482 error)
461 self.mocker.replay()483 self.mocker.replay()
462 self.public_files.emit_public_access_change_error(share_id, node_id,484 self.public_files.emit_public_access_change_error(share_id, node_id,
@@ -474,14 +496,14 @@
474 public_url=public_url, path=path)]496 public_url=public_url, path=path)]
475 self.public_files.syncdaemon_public_files.get_path(volume_id, node_id)497 self.public_files.syncdaemon_public_files.get_path(volume_id, node_id)
476 self.mocker.result(path)498 self.mocker.result(path)
477 self.signal_method(files)499 self.signal_method('on_public_files_list',files)
478 self.mocker.replay()500 self.mocker.replay()
479 self.public_files.emit_public_files_list(public_files)501 self.public_files.emit_public_files_list(public_files)
480502
481 def test_emit_public_files_list_error(self):503 def test_emit_public_files_list_error(self):
482 """Emit the PublicFilesListError signal."""504 """Emit the PublicFilesListError signal."""
483 error = 'error'505 error = 'error'
484 self.signal_method(error)506 self.signal_method('on_public_files_list_error', error)
485 self.mocker.replay()507 self.mocker.replay()
486 self.public_files.emit_public_files_list_error(error)508 self.public_files.emit_public_files_list_error(error)
487509
488510
=== modified file 'ubuntuone/platform/windows/dbus_interface.py'
--- ubuntuone/platform/windows/dbus_interface.py 2011-02-14 11:56:44 +0000
+++ ubuntuone/platform/windows/dbus_interface.py 2011-02-23 09:03:55 +0000
@@ -17,9 +17,811 @@
17# with this program. If not, see <http://www.gnu.org/licenses/>.17# with this program. If not, see <http://www.gnu.org/licenses/>.
18"""IPC implementation that replaces Dbus."""18"""IPC implementation that replaces Dbus."""
1919
20import logging
21
22from twisted.spread.pb import Referenceable
23from ubuntuone.syncdaemon.interaction_interfaces import (
24 bool_str,
25 get_share_dict,
26 get_udf_dict,
27 SyncdaemonConfig,
28 SyncdaemonEvents,
29 SyncdaemonFileSystem,
30 SyncdaemonFolders,
31 SyncdaemonPublicFiles,
32 SyncdaemonService,
33 SyncdaemonShares,
34 SyncdaemonStatus
35)
36
37logger = logging.getLogger("ubuntuone.SyncDaemon.Pb")
38
39
40def remote_handler(handler):
41 if handler:
42 handler = lambda x: handler.callRemote('execute', x)
43 return handler
44
45class RemoteMeta(type):
46 """Append remte_ to the remote methods.
47
48 Remote has to be appended to the remote method to work over pb but this
49 names cannot be used since the other platforms do not expect the remote
50 prefix. This metaclass create those prefix so that the methods can be
51 correctly called.
52 """
53
54 def __new__(cls, name, bases, attrs):
55 remote_calls = attrs.get('remote_calls', None)
56 if remote_calls:
57 for current in remote_calls:
58 attrs['remote_' + current] = attrs[current]
59 return super(RemoteMeta, cls).__new__(cls, name, bases, attrs)
60
61
62class SignalBroadcaster(object):
63 """Object that allows to emit signals to clients over the IPC."""
64
65 def __init__(self):
66 """Create a new instance."""
67 self.clients = []
68
69 def remote_register_to_signals(self, client):
70 """Allow a client to register to a signal."""
71 self.clients.append(client)
72
73 def emit_signal(self, signal_name, *args, **kwargs):
74 """Emit the given signal to the clients."""
75 for current_client in self.clients:
76 try:
77 current_client.callRemote(signal_name, *args, **kwargs)
78 except:
79 logger.warn('Could not emit signal to %s', current_client)
80
81class Status(Referenceable, SignalBroadcaster):
82 """ Represent the status of the syncdaemon """
83
84 __metaclass__ = RemoteMeta
85
86 # calls that will be accessible remotly
87 remote_calls = [
88 'current_status',
89 'current_downloads',
90 'waiting_metadata',
91 'waiting_content',
92 'schedule_next',
93 'current_uploads',
94 ]
95
96 def __init__(self, main, action_queue, fs_manager):
97 """ Creates the instance."""
98 super(Status, self).__init__()
99 self.syncdaemon_status = SyncdaemonStatus(main, action_queue,
100 fs_manager)
101
102 def current_status(self):
103 """ return the current status of the system, one of: local_rescan,
104 offline, trying_to_connect, server_rescan or online.
105 """
106 logger.debug('called current_status')
107 return self.syncdaemon_status.current_status()
108
109 def current_downloads(self):
110 """Return a list of files with a download in progress."""
111 logger.debug('called current_downloads')
112 return self.syncdaemon_status.current_downloads()
113
114 def waiting_metadata(self):
115 """Return a list of the operations in the meta-queue.
116
117 As we don't have meta-queue anymore, this is faked.
118 """
119 logger.debug('called waiting_metadata')
120 return self.syncdaemon_status.waiting_metadata()
121
122 def waiting_content(self):
123 """Return a list of files that are waiting to be up- or downloaded.
124
125 As we don't have content-queue anymore, this is faked.
126 """
127 logger.debug('called waiting_content')
128 return self.syncdaemon_status.waiting_content()
129
130 def schedule_next(self, share_id, node_id):
131 """
132 Make the command on the given share and node be next in the
133 queue of waiting commands.
134 """
135 logger.debug('called schedule_next')
136 self.syncdaemon_status.schedule_next(share_id, node_id)
137
138 def current_uploads(self):
139 """ return a list of files with a upload in progress """
140 logger.debug('called current_uploads')
141 return self.syncdaemon_status.current_uploads()
142
143 def emit_content_queue_changed(self):
144 """Emit ContentQueueChanged."""
145 self.emit_signal('on_content_queue_changed')
146
147 def emit_invalid_name(self, dirname, filename):
148 """Emit InvalidName."""
149 self.emit_signal('on_invalid_name', unicode(dirname), str(filename))
150
151 def emit_broken_node(self, volume_id, node_id, mdid, path):
152 """Emit BrokenNode."""
153 if mdid is None:
154 mdid = ''
155 if path is None:
156 path = ''
157 self.emit_signal('on_broken_node', volume_id, node_id, mdid,
158 path.decode('utf8'))
159
160 def emit_status_changed(self, state):
161 """Emit StatusChanged."""
162 self.emit_signal('on_status_changed',
163 self.syncdaemon_status.current_status())
164
165 def emit_download_started(self, download):
166 """Emit DownloadStarted."""
167 self.emit_signal('on_download_started', download)
168
169 def emit_download_file_progress(self, download, **info):
170 """Emit DownloadFileProgress."""
171 for k, v in info.copy().items():
172 info[str(k)] = str(v)
173 self.emit_signal('on_download_file_progress', download, info)
174
175 def emit_download_finished(self, download, **info):
176 """Emit DownloadFinished."""
177 for k, v in info.copy().items():
178 info[str(k)] = str(v)
179 self.emit_signal('on_download_finished', download, info)
180
181 def emit_upload_started(self, upload):
182 """Emit UploadStarted."""
183 self.emit_signal('on_upload_started', upload)
184
185 def emit_upload_file_progress(self, upload, **info):
186 """Emit UploadFileProgress."""
187 for k, v in info.copy().items():
188 info[str(k)] = str(v)
189 self.emit_signal('on_upload_file_progress', upload, info)
190
191 def emit_upload_finished(self, upload, **info):
192 """Emit UploadFinished."""
193 for k, v in info.copy().items():
194 info[str(k)] = str(v)
195 self.emit_signal('on_upload_finished', upload, info)
196
197 def emit_account_changed(self, account_info):
198 """Emit AccountChanged."""
199 info_dict = {'purchased_bytes': unicode(account_info.purchased_bytes)}
200 self.emit_signal('on_account_changed', info_dict)
201
202 def emit_metaqueue_changed(self):
203 """Emit MetaQueueChanged."""
204 self.emit_signal('on_metaqueue_changed')
205
20206
21class Status(object):
22 """ Represent the status of the syncdaemon """
23
24 def __init__(self, bus_name, dbus_iface, syncdaemon_status=None):
25 pass
26\ No newline at end of file207\ No newline at end of file
208class Events(Referenceable, SignalBroadcaster):
209 """The events of the system translated to ipc signals."""
210
211 __metaclass__ = RemoteMeta
212
213 # calls that will be accessible remotly
214 remote_calls = [
215 'push_event',
216 ]
217
218 def __init__(self, event_queue):
219 super(Events, self).__init__()
220 self.events = SyncdaemonEvents(event_queue)
221
222 def emit_event(self, event):
223 """Emit the signal."""
224 event_dict = {}
225 for key, value in event.iteritems():
226 event_dict[str(key)] = str(value)
227 self.emit_signal('on_event', event_dict)
228
229 def push_event(self, event_name, args):
230 """Push an event to the event queue."""
231 logger.debug('push_event: %r with %r', event_name, args)
232 self.events.push_event(event_name, args)
233
234
235class SyncDaemon(Referenceable, SignalBroadcaster):
236 """ The Daemon ipc interface. """
237
238 __metaclass__ = RemoteMeta
239
240 # calls that will be accessible remotly
241 remote_calls = [
242 'connect',
243 'disconnect',
244 'get_rootdir',
245 'get_sharesdir',
246 'get_sharesdir_link',
247 'wait_for_nirvana',
248 'quit',
249 'rescan_from_scratch',
250 ]
251
252 def __init__(self, root, main, volume_manager, action_queue):
253 """ Creates the instance."""
254 self.service = SyncdaemonService(root, main, volume_manager,
255 action_queue)
256 self.clients = []
257
258 def connect(self):
259 """ Connect to the server. """
260 logger.debug('connect requested')
261 self.service.connect()
262
263 def disconnect(self):
264 """ Disconnect from the server. """
265 logger.debug('disconnect requested')
266 self.service.disconnect()
267
268 def get_rootdir(self):
269 """ Returns the root dir/mount point. """
270 logger.debug('called get_rootdir')
271 return self.service.get_rootdir()
272
273 def get_sharesdir(self):
274 """ Returns the shares dir/mount point. """
275 logger.debug('called get_sharesdir')
276 return self.service.get_sharesdir()
277
278 def get_sharesdir_link(self):
279 """ Returns the shares dir/mount point. """
280 logger.debug('called get_sharesdir_link')
281 return self.service.get_sharesdir_link()
282
283 def wait_for_nirvana(self, last_event_interval,
284 reply_handler=None, error_handler=None):
285 """ call the reply handler when there are no more
286 events or transfers.
287 """
288 logger.debug('called wait_for_nirvana')
289 return self.service.wait_for_nirvana(last_event_interval,
290 remote_handler(reply_handler), remote_handler(error_handler))
291
292 def quit(self, reply_handler=None, error_handler=None):
293 """ shutdown the syncdaemon. """
294 logger.debug('Quit requested')
295 self.service.quit(remote_handler(reply_handler),
296 remote_handler(error_handler))
297
298 def rescan_from_scratch(self, volume_id):
299 """Request a rescan from scratch of the volume with volume_id."""
300 self.service.rescan_from_scratch(volume_id)
301
302 def emit_root_mismatch(self, root_id, new_root_id):
303 """Emit RootMismatch signal."""
304 self.emit_signal('on_root_mismatch', root_id, new_root_id)
305
306 def emit_quota_exceeded(self, volume_dict):
307 """Emit QuotaExceeded signal."""
308 self.emit_signal('on_quota_exceeded', volume_dict)
309
310
311class FileSystem(object, Referenceable):
312 """ An ipc interface to the FileSystem Manager. """
313
314 __metaclass__ = RemoteMeta
315
316 # calls that will be accessible remotly
317 remote_calls = [
318 'get_metadata',
319 'get_metadata_by_node',
320 'get_metadata_and_quick_tree_synced',
321 'get_dirty_nodes',
322 ]
323
324 def __init__(self, fs_manager, action_queue):
325 """ Creates the instance. """
326 super(FileSystem, self).__init__()
327 self.syncdaemon_filesystem = SyncdaemonFileSystem(fs_manager,
328 action_queue)
329
330 def get_metadata(self, path):
331 """Return the metadata (as a dict) for the specified path."""
332 logger.debug('get_metadata by path: %r', path)
333 return self.syncdaemon_filesystem.get_metadata(path)
334
335 def get_metadata_by_node(self, share_id, node_id):
336 """Return the metadata (as a dict) for the specified share/node."""
337 logger.debug('get_metadata by share: %r node: %r', share_id, node_id)
338 return self.syncdaemon_filesystem.get_metadata_by_node(share_id,
339 node_id)
340
341 def get_metadata_and_quick_tree_synced(self, path):
342 """ returns the dict with the attributes of the metadata for
343 the specified path, including the quick subtree status.
344 """
345 logger.debug('get_metadata_and_quick_tree_synced: %r', path)
346 return self.syncdaemon_filesystem.get_metadata_and_quick_tree_synced(
347 path)
348
349 def get_dirty_nodes(self):
350 """Rerturn a list of dirty nodes."""
351 return self.syncdaemon_filesystem.get_dirty_nodes()
352
353
354class Shares(Referenceable, SignalBroadcaster):
355 """A ipc interface to interact with shares."""
356
357 __metaclass__ = RemoteMeta
358
359 # calls that will be accessible remotly
360 remote_calls = [
361 'get_shares',
362 'accept_share',
363 'reject_share',
364 'delete_share',
365 'subscribe',
366 'unsubscribe',
367 'create_share',
368 'create_shares',
369 'refresh_shares',
370 'get_shared',
371 ]
372
373 def __init__(self, bus_name, fs_manager, volume_manager):
374 """Create the instance."""
375 self.syncdaemon_shares = SyncdaemonShares(fs_manager, volume_manager)
376
377 def get_shares(self):
378 """Return a list of dicts, each dict represents a share."""
379 logger.debug('called get_shares')
380 return self.syncdaemon_shares.get_shares()
381
382 def accept_share(self, share_id, reply_handler=None, error_handler=None):
383 """Accept a share.
384
385 A ShareAnswerOk|Error signal will be fired in the future as a
386 success/failure indicator.
387
388 """
389 logger.debug('accept_share: %r', share_id)
390 self.syncdaemon_shares.accept_share(share_id,
391 remote_handler(reply_handler), remote_handler(error_handler))
392
393 def reject_share(self, share_id, reply_handler=None, error_handler=None):
394 """Reject a share."""
395 logger.debug('reject_share: %r', share_id)
396 self.syncdaemon_shares.reject_share(share_id,
397 remote_handler(reply_handler),
398 remote_handler(error_handler))
399
400 def delete_share(self, share_id):
401 """Delete a Share, both kinds: "to me" and "from me"."""
402 logger.debug('delete_share: %r', share_id)
403 try:
404 self.syncdaemon_shares.delete_share(share_id)
405 except Exception, e:
406 logger.exception('Error while deleting share: %r', share_id)
407 self.emit_share_delete_error({'volume_id':share_id}, str(e))
408 # propagate the error
409 raise
410
411 def subscribe(self, share_id):
412 """Subscribe to the specified share."""
413 logger.debug('Shares.subscribe: %r', share_id)
414 self.syncdaemon_shares.subscribe(share_id)
415
416 def unsubscribe(self, share_id):
417 """Unsubscribe from the specified share."""
418 logger.debug('Shares.unsubscribe: %r', share_id)
419 self.syncdaemon_shares.unsubscribe(share_id)
420
421 def emit_share_changed(self, message, share):
422 """ emits ShareChanged or ShareDeleted signal for the share
423 notification.
424 """
425 logger.debug('emit_share_changed: message %r, share %r.',
426 message, share)
427 if message == 'deleted':
428 self.emit_signal('on_share_deleted', get_share_dict(share))
429 elif message == 'changed':
430 self.emit_signal('on_share_changed', get_share_dict(share))
431
432 def emit_share_delete_error(self, share, error):
433 """Emits ShareDeleteError signal."""
434 logger.info('emit_share_delete_error: share %r, error %r.',
435 share, error)
436 self.emit_signal('on_share_deleted_error',
437 get_share_dict(share), error)
438
439 def emit_free_space(self, share_id, free_bytes):
440 """ emits ShareChanged when free space changes """
441 if share_id in self.syncdaemon_shares.shares:
442 share = self.syncdaemon_shares.shares[share_id]
443 share_dict = get_share_dict(share)
444 share_dict['free_bytes'] = unicode(free_bytes)
445 self.emit_signal('on_share_changed',
446 share_dict)
447
448 def create_share(self, path, username, name, access_level):
449 """ Share a subtree to the user identified by username.
450
451 @param path: that path to share (the root of the subtree)
452 @param username: the username to offer the share to
453 @param name: the name of the share
454 @param access_level: 'View' or 'Modify'
455 """
456 logger.debug('create share: %r, %r, %r, %r',
457 path, username, name, access_level)
458 self.syncdaemon_shares.create_share(path, username, name, access_level)
459
460 def create_shares(self, path, usernames, name, access_level):
461 """Share a subtree with several users at once.
462
463 @param path: that path to share (the root of the subtree)
464 @param usernames: the user names to offer the share to
465 @param name: the name of the share
466 @param access_level: 'View' or 'Modify'
467 """
468 logger.debug('create shares: %r, %r, %r, %r',
469 path, usernames, name, access_level)
470 for user in usernames:
471 self.syncdaemon_shares.create_share(path, user, name,
472 access_level)
473
474 def emit_share_created(self, share_info):
475 """ emits ShareCreated signal """
476 logger.debug('emit_share_created: share_info %r.', share_info)
477 self.emit_signal('on_share_created',
478 share_info)
479
480 def emit_share_create_error(self, share_info, error):
481 """Emit ShareCreateError signal."""
482 info = self.syncdaemon_shares.get_create_error_share_info(share_info)
483 logger.info('emit_share_create_error: share_info %r, error %r.',
484 info, error)
485 self.emit_signal('on_share_create_error', info, error)
486
487 def refresh_shares(self):
488 """ Refresh the share list, requesting it to the server. """
489 self.syncdaemon_shares.refresh_shares()
490
491 def get_shared(self):
492 """ returns a list of dicts, each dict represents a shared share.
493 A share might not have the path set, as we might be still fetching the
494 nodes from the server. In this cases the path is ''
495 """
496 logger.debug('called get_shared')
497 return self.syncdaemon_shares.get_shared()
498
499 def emit_share_answer_response(self, share_id, answer, error=None):
500 """Emits ShareAnswerResponse signal."""
501 answer_info = dict(volume_id=share_id, answer=answer)
502 if error:
503 answer_info['error'] = error
504 logger.debug('emit_share_answer_response: answer_info %r.', answer_info)
505 self.emit_signal('on_share_answer_response', answer_info)
506
507 def emit_new_share(self, share_id):
508 """Emits NewShare signal."""
509 share = self.syncdaemon_shares.get_volume(share_id)
510 logger.debug('emit_new_share: share_id %r.', share_id)
511 self.emit_signal('on_new_share', get_share_dict(share))
512
513 def emit_share_subscribed(self, share):
514 """Emit the ShareSubscribed signal"""
515 self.emit_signal('on_share_subscribed', get_share_dict(share))
516
517 def emit_share_subscribe_error(self, share_id, error):
518 """Emit the ShareSubscribeError signal"""
519 self.emit_signal('on_share_subscribed_error', {'id': share_id},
520 str(error))
521
522 def emit_share_unsubscribed(self, share):
523 """Emit the ShareUnSubscribed signal"""
524 self.emit_signal('on_share_unsubscribed', get_share_dict(share))
525
526 def emit_share_unsubscribe_error(self, share_id, error):
527 """Emit the ShareUnSubscribeError signal"""
528 self.emit_signal('on_share_unsubscribed_error', {'id': share_id},
529 str(error))
530
531class Config(object, Referenceable):
532 """ The Syncdaemon config/settings ipc interface. """
533
534 __metaclass__ = RemoteMeta
535
536 # calls that will be accessible remotly
537 remote_calls = [
538 'get_throttling_limits',
539 'set_throttling_limits',
540 'enable_bandwidth_throttling',
541 'disable_bandwidth_throttling',
542 'bandwidth_throttling_enabled',
543 'udf_autosubscribe_enabled',
544 'enable_udf_autosubscribe',
545 'share_autosubscribe_enabled',
546 'enable_share_autosubscribe',
547 'disable_share_autosubscribe',
548 'set_files_sync_enabled',
549 'autoconnect_enabled',
550 'set_autoconnect_enabled',
551 'show_all_notifications_enabled',
552 'enable_show_all_notifications',
553 'disable_show_all_notifications'
554 ]
555
556 def __init__(self, main, action_queue):
557 """ Creates the instance."""
558 super(Config, self).__init__()
559 self.syncdaemon_config = SyncdaemonConfig(main, action_queue)
560
561 def get_throttling_limits(self, reply_handler=None, error_handler=None):
562 """Get the read/write limit from AQ and return a dict.
563 Returns a dict(download=int, upload=int), if int is -1 the value isn't
564 configured.
565 The values are bytes/second
566 """
567 logger.debug("called get_throttling_limits")
568 return self.syncdaemon_config.get_throttling_limits(
569 remote_handler(reply_handler), remote_handler(error_handler))
570
571 def set_throttling_limits(self, download, upload,
572 reply_handler=None, error_handler=None):
573 """Set the read and write limits. The expected values are bytes/sec."""
574 logger.debug("called set_throttling_limits")
575 self.syncdaemon_config.set_throttling_limits(download, upload,
576 remote_handler(reply_handler), remote_handler(error_handler))
577
578 def enable_bandwidth_throttling(self, reply_handler=None,
579 error_handler=None):
580 """Enable bandwidth throttling."""
581 self.syncdaemon_config.enable_bandwidth_throttling(
582 remote_handler(reply_handler), remote_handler(error_handler))
583
584 def disable_bandwidth_throttling(self, reply_handler=None,
585 error_handler=None):
586 """Disable bandwidth throttling."""
587 self.syncdaemon_config.disable_bandwidth_throttling(
588 remote_handler(reply_handler), remote_handler(error_handler))
589
590 def bandwidth_throttling_enabled(self, reply_handler=None,
591 error_handler=None):
592 """Returns True (actually 1) if bandwidth throttling is enabled and
593 False (0) otherwise.
594 """
595 return self.syncdaemon_config.bandwidth_throttling_enabled(
596 remote_handler(reply_handler), remote_handler(error_handler))
597
598 def udf_autosubscribe_enabled(self):
599 """Return the udf_autosubscribe config value."""
600 return self.syncdaemon_config.udf_autosubscribe_enabled()
601
602 def enable_udf_autosubscribe(self):
603 """Enable UDF autosubscribe."""
604 self.syncdaemon_config.enable_udf_autosubscribe()
605
606 def disable_udf_autosubscribe(self):
607 """Enable UDF autosubscribe."""
608 self.syncdaemon_config.disable_udf_autosubscribe()
609
610 def share_autosubscribe_enabled(self):
611 """Return the share_autosubscribe config value."""
612 return self.syncdaemon_config.share_autosubscribe_enabled()
613
614 def enable_share_autosubscribe(self):
615 """Enable UDF autosubscribe."""
616 self.syncdaemon_config.enable_share_autosubscribe()
617
618 def disable_share_autosubscribe(self):
619 """Enable UDF autosubscribe."""
620 self.syncdaemon_config.disable_share_autosubscribe()
621
622 def set_files_sync_enabled(self, enabled):
623 """Enable/disable file sync service."""
624 logger.debug('called set_files_sync_enabled %d', enabled)
625 self.syncdaemon_config.set_files_sync_enabled(enabled)
626
627 def files_sync_enabled(self):
628 """Return the files_sync_enabled config value."""
629 logger.debug('called files_sync_enabled')
630 return self.syncdaemon_config.files_sync_enabled()
631
632 def autoconnect_enabled(self):
633 """Return the autoconnect config value."""
634 return self.syncdaemon_config.autoconnect_enabled()
635
636 def set_autoconnect_enabled(self, enabled):
637 """Enable syncdaemon autoconnect."""
638 self.syncdaemon_config.set_autoconnect_enabled(enabled)
639
640 def show_all_notifications_enabled(self):
641 """Return the show_all_notifications config value."""
642 return self.syncdaemon_config.show_all_notifications_enabled()
643
644 def enable_show_all_notifications(self):
645 """Enable showing all notifications."""
646 self.syncdaemon_config.enable_show_all_notifications()
647
648 def disable_show_all_notifications(self):
649 """Disable showing all notifications."""
650 self.syncdaemon_config.disable_show_all_notifications()
651
652
653class Folders(Referenceable, SignalBroadcaster):
654 """An interface to interact with User Defined Folders"""
655
656 __metaclass__ = RemoteMeta
657
658 # calls that will be accessible remotly
659 remote_calls = [
660 'create',
661 'delete',
662 'get_folders',
663 'subscribe',
664 'unsubscribe',
665 'get_info',
666 'refresh_volumes',
667 ]
668
669 def __init__(self, volume_manager, fs_manager):
670 """Creates the instance."""
671 super(Folders, self).__init__()
672 self.syncdaemon_folders = SyncdaemonFolders(volume_manager, fs_manager)
673
674 def create(self, path):
675 """Create a user defined folder in the specified path."""
676 logger.debug('Folders.create: %r', path)
677 try:
678 self.syncdaemon_folders.create(path)
679 except Exception, e:
680 logger.exception('Error while creating udf: %r', path)
681 self.emit_folder_create_error(path, str(e))
682
683 def delete(self, folder_id):
684 """Delete the folder specified by folder_id"""
685 from ubuntuone.syncdaemon.volume_manager import VolumeDoesNotExist
686 logger.debug('Folders.delete: %r', folder_id)
687 try:
688 self.syncdaemon_folders.delete(folder_id)
689 except VolumeDoesNotExist, e:
690 self.emit_folder_delete_error(folder_id, e)
691 except Exception, e:
692 logger.exception('Error while deleting volume: %r', folder_id)
693 self.emit_folder_delete_error(folder_id, e)
694
695 def get_folders(self):
696 """Return the list of folders (a list of dicts)"""
697 logger.debug('Folders.get_folders')
698 return self.syncdaemon_folders.get_folders()
699
700 def subscribe(self, folder_id):
701 """Subscribe to the specified folder"""
702 logger.debug('Folders.subscribe: %r', folder_id)
703 self.syncdaemon_folders.subscribe(folder_id)
704
705 def unsubscribe(self, folder_id):
706 """Unsubscribe from the specified folder"""
707 logger.debug('Folders.unsubscribe: %r', folder_id)
708 self.syncdaemon_folders.unsubscribe(folder_id)
709
710 def get_info(self, path):
711 """Returns a dict containing the folder information."""
712 logger.debug('Folders.get_info: %r', path)
713 return self.syncdaemon_folders.get_info(path)
714
715 def refresh_volumes(self):
716 """Refresh the volumes list, requesting it to the server."""
717 self.syncdaemon_folders.refresh_volumes()
718
719 def emit_folder_created(self, folder):
720 """Emit the FolderCreated signal"""
721 udf_dict = get_udf_dict(folder)
722 self.emit_signal('on_folder_created', udf_dict)
723
724 def emit_folder_create_error(self, path, error):
725 """Emit the FolderCreateError signal"""
726 info = dict(path=path.decode('utf-8'))
727 self.emit_signal('on_folder_create_error', info, str(error))
728
729 def emit_folder_deleted(self, folder):
730 """Emit the FolderCreated signal"""
731 udf_dict = get_udf_dict(folder)
732 self.emit_signal('on_folder_deleted', udf_dict)
733
734 def emit_folder_delete_error(self, folder, error):
735 """Emit the FolderCreateError signal"""
736 udf_dict = get_udf_dict(folder)
737 self.emit_signal('on_folder_delete_error', udf_dict, str(error))
738
739 def emit_folder_subscribed(self, folder):
740 """Emit the FolderSubscribed signal"""
741 udf_dict = get_udf_dict(folder)
742 self.emit_signal('on_folder_subscribed', udf_dict)
743
744 def emit_folder_subscribe_error(self, folder_id, error):
745 """Emit the FolderSubscribeError signal"""
746 self.emit_signal('on_folder_subscribe_error', {'id':folder_id},
747 str(error))
748
749 def emit_folder_unsubscribed(self, folder):
750 """Emit the FolderUnSubscribed signal"""
751 udf_dict = get_udf_dict(folder)
752 self.emit_signal('on_folder_unsubscribed', udf_dict)
753
754 def emit_folder_unsubscribe_error(self, folder_id, error):
755 """Emit the FolderUnSubscribeError signal"""
756 self.emit_signal('on_folder_unsubscribe_error',
757 {'id':folder_id}, str(error))
758
759
760class PublicFiles(Referenceable, SignalBroadcaster):
761 """An IPC interface for handling public files."""
762
763 __metaclass__ = RemoteMeta
764
765 # calls that will be accessible remotly
766 remote_calls = [
767 'change_public_access',
768 'get_public_files',
769 ]
770
771 def __init__(self, fs_manager, action_queue):
772 super(PublicFiles, self).__init__()
773 self.syncdaemon_public_files = SyncdaemonPublicFiles(fs_manager,
774 action_queue)
775
776 def change_public_access(self, share_id, node_id, is_public):
777 """Change the public access of a file."""
778 logger.debug('PublicFiles.change_public_access: %r, %r, %r',
779 share_id, node_id, is_public)
780 self.syncdaemon_public_files.change_public_access(share_id, node_id,
781 is_public)
782
783 def get_public_files(self):
784 """Request the list of public files to the server.
785
786 The result will be send in a PublicFilesList signal.
787 """
788 self.syncdaemon_public_files.get_public_files()
789
790 def emit_public_access_changed(self, share_id, node_id, is_public,
791 public_url):
792 """Emit the PublicAccessChanged signal."""
793 share_id = str(share_id) if share_id else ''
794 node_id = str(node_id)
795 path = self.syncdaemon_public_files.get_path(share_id, node_id)
796 info = dict(
797 share_id=str(share_id) if share_id else '',
798 node_id=str(node_id),
799 is_public=bool_str(is_public),
800 public_url=public_url if public_url else '',
801 path=path)
802 self.emit_signal('on_public_access_changed', info)
803
804 def emit_public_access_change_error(self, share_id, node_id, error):
805 """Emit the PublicAccessChangeError signal."""
806 path = self.syncdaemon_public_files.get_path(share_id, node_id)
807 info = dict(
808 share_id=str(share_id) if share_id else '',
809 node_id=str(node_id),
810 path=path)
811 self.emit_signal('on_public_access_change_error', info, str(error))
812
813 def emit_public_files_list(self, public_files):
814 """Emit the PublicFilesList signal."""
815 files = []
816 for pf in public_files:
817 volume_id = str(pf['volume_id'])
818 node_id = str(pf['node_id'])
819 public_url = str(pf['public_url'])
820 path = self.syncdaemon_public_files.get_path(volume_id ,
821 node_id).decode('utf-8')
822 files.append(dict(volume_id=volume_id, node_id=node_id,
823 public_url=public_url, path=path))
824 self.emit_signal('on_public_files_list', files)
825
826 def emit_public_files_list_error(self, error):
827 """Emit the PublicFilesListError signal."""
828 self.emit_signal('on_public_files_list_error', str(error))

Subscribers

People subscribed via source and target branches