Merge lp:~ubuntuone-control-tower/ubuntu/lucid/ubuntuone-client/trunk into lp:ubuntu/lucid/ubuntuone-client

Proposed by dobey
Status: Merged
Merged at revision: not available
Proposed branch: lp:~ubuntuone-control-tower/ubuntu/lucid/ubuntuone-client/trunk
Merge into: lp:ubuntu/lucid/ubuntuone-client
Diff against target: 3336 lines (+1510/-530)
20 files modified
bin/u1sdtool (+6/-0)
bin/ubuntuone-preferences (+378/-163)
configure (+10/-10)
configure.ac (+1/-1)
contrib/testing/testcase.py (+0/-2)
debian/changelog (+9/-0)
debian/control (+1/-0)
docs/man/u1sdtool.1 (+6/-0)
docs/syncdaemon_dbus_api.txt (+25/-10)
nautilus/ubuntuone-nautilus.c (+236/-46)
tests/syncdaemon/test_action_queue.py (+449/-203)
tests/syncdaemon/test_dbus.py (+104/-6)
tests/syncdaemon/test_tools.py (+19/-9)
tests/syncdaemon/test_u1sdtool.py (+18/-9)
tests/test_preferences.py (+141/-53)
ubuntuone/syncdaemon/action_queue.py (+26/-13)
ubuntuone/syncdaemon/dbus_interface.py (+60/-3)
ubuntuone/syncdaemon/local_rescan.py (+1/-1)
ubuntuone/syncdaemon/main.py (+3/-1)
ubuntuone/syncdaemon/tools.py (+17/-0)
To merge this branch: bzr merge lp:~ubuntuone-control-tower/ubuntu/lucid/ubuntuone-client/trunk
Reviewer Review Type Date Requested Status
Ubuntu branches Pending
Review via email: mp+21100@code.launchpad.net
To post a comment you must log in.
23. By Elliot Murphy

* New upstream release.
  - Bring in UI for public files handling in Nautilus. (LP: #525803)
  - Bring in UI for managing devices. (LP: #525803)
* Add dependency on python-httplib2 to packaging. (LP: #535207)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/u1sdtool'
2--- bin/u1sdtool 2010-03-04 16:47:43 +0000
3+++ bin/u1sdtool 2010-03-10 23:53:15 +0000
4@@ -42,6 +42,7 @@
5 show_error,
6 show_state,
7 show_waiting_content,
8+ show_waiting_metadata,
9 show_public_file_info,
10 is_running,
11 )
12@@ -161,6 +162,9 @@
13 elif options.status:
14 d = sync_daemon_tool.get_status()
15 d.addCallback(lambda r: show_state(r, out))
16+ elif options.waiting_metadata:
17+ d = sync_daemon_tool.waiting_metadata()
18+ d.addCallback(lambda r: show_waiting_metadata(r, out))
19 elif options.waiting_content:
20 d = sync_daemon_tool.waiting_content()
21 d.addCallback(lambda r: show_waiting_content(r, out))
22@@ -237,6 +241,8 @@
23 help="Get the current status of syncdaemon")
24 parser.add_option("", "--waiting-content", dest="waiting_content",
25 action='store_true', help="Get the waiting content list")
26+ parser.add_option("", "--waiting-metadata", dest="waiting_metadata",
27+ action='store_true', help="Get the waiting metadata list")
28 parser.add_option("", "--schedule-next", dest="schedule_next",
29 metavar="SHARE_ID NODE_ID", nargs=2, type='string',
30 help="Move the node to be the next in the queue of "
31
32=== modified file 'bin/ubuntuone-preferences'
33--- bin/ubuntuone-preferences 2010-03-04 16:47:43 +0000
34+++ bin/ubuntuone-preferences 2010-03-10 23:53:15 +0000
35@@ -23,6 +23,7 @@
36 import pygtk
37 pygtk.require('2.0')
38 import gobject
39+import glib
40 import gtk
41 import os
42 import gettext
43@@ -32,6 +33,11 @@
44 from oauth import oauth
45 from ubuntuone import clientdefs
46 from ubuntuone.syncdaemon.tools import SyncDaemonTool
47+from ubuntuone.syncdaemon.logger import LOGFOLDER
48+
49+import logging
50+import sys
51+import httplib, urlparse, socket
52
53 import dbus.service
54 from ConfigParser import ConfigParser
55@@ -39,6 +45,11 @@
56 from dbus.mainloop.glib import DBusGMainLoop
57 from xdg.BaseDirectory import xdg_config_home
58
59+logging.basicConfig(
60+ filename=os.path.join(LOGFOLDER, 'u1-prefs.log'),
61+ level=logging.DEBUG,
62+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
63+logger = logging.getLogger("ubuntuone-preferences")
64 DBusGMainLoop(set_as_default=True)
65
66 _ = gettext.gettext
67@@ -65,10 +76,363 @@
68 pass
69
70
71+def get_access_token(keyring):
72+ items = []
73+ items = keyring.find_items_sync(
74+ keyring.ITEM_GENERIC_SECRET,
75+ {'ubuntuone-realm': "https://ubuntuone.com",
76+ 'oauth-consumer-key': 'ubuntuone'})
77+ secret = items[0].secret
78+ return oauth.OAuthToken.from_string(secret)
79+
80+
81+class DevicesWidget(gtk.Table):
82+ """
83+ the Devices tab.
84+ """
85+ def __init__(self,
86+ bus,
87+ keyring=gnomekeyring,
88+ realm='https://ubuntuone.com',
89+ consumer_key='ubuntuone',
90+ url='https://one.ubuntu.com/api/1.0/devices/'):
91+ super(DevicesWidget, self).__init__(rows=2, columns=3)
92+ self.bus = bus
93+ self.keyring = keyring
94+ self.sdtool = SyncDaemonTool(bus)
95+ self.set_border_width(6)
96+ self.set_row_spacings(6)
97+ self.set_col_spacings(6)
98+ self.devices = None
99+ self.realm = realm
100+ self.consumer_key = consumer_key
101+ self.base_url = url
102+ self.conn = None
103+ self.consumer = None
104+ self.table_widgets = []
105+
106+ self.connected = None # i.e. unknown
107+ self.conn_btn = None
108+ self.up_spin = None
109+ self.dn_spin = None
110+ self.bw_chk = None
111+ self.bw_limited = False
112+ self.up_limit = 2097152
113+ self.dn_limit = 2097152
114+
115+ self._update_id = 0
116+
117+ self.status_label = gtk.Label("")
118+ self.attach(self.status_label, 0, 3, 2, 3)
119+
120+ self.description = gtk.Label(_("The devices connected to with your"
121+ " personal cloud network"
122+ " are listed below"))
123+ self.description.set_alignment(0., .5)
124+ self.description.set_line_wrap(True)
125+ self.attach(self.description, 0, 3, 0, 1, xpadding=12, ypadding=12)
126+
127+ def update_bw_settings(self):
128+ """
129+ Push the bandwidth settings at syncdaemon.
130+ """
131+ try:
132+ client = self.bus.get_object(DBUS_IFACE_NAME, "/config",
133+ follow_name_owner_changes=True)
134+ iface = dbus.Interface(client, DBUS_IFACE_CONFIG_NAME)
135+ iface.set_throttling_limits(self.dn_limit, self.up_limit,
136+ reply_handler=dbus_async,
137+ error_handler=self.error)
138+ if self.bw_limited:
139+ iface.enable_bandwidth_throttling(reply_handler=dbus_async,
140+ error_handler=self.error)
141+ else:
142+ iface.disable_bandwidth_throttling(reply_handler=dbus_async,
143+ error_handler=self.error)
144+ except DBusException, e:
145+ self.error(str(e))
146+
147+ def handle_bw_controls_changed(self, *a):
148+ """
149+ Sync the bandwidth throttling model with the view.
150+
151+ Start a timer to sync with syncdaemon too.
152+ """
153+ # Remove the timeout ...
154+ if self._update_id != 0:
155+ gobject.source_remove(self._update_id)
156+
157+ # sync the model ...
158+ self.bw_limited = self.bw_chk.get_active()
159+ self.up_limit = self.up_spin.get_value_as_int() * 1024
160+ self.dn_limit = self.dn_spin.get_value_as_int() * 1024
161+
162+ # ... and add the timeout back
163+ self._update_id = gobject.timeout_add_seconds(
164+ 1, self.update_bw_settings)
165+
166+ def handle_bw_checkbox_toggled(self, checkbox, *widgets):
167+ """
168+ Callback for the bandwidth togglebutton.
169+ """
170+ active = checkbox.get_active()
171+ for widget in widgets:
172+ widget.set_sensitive(active)
173+ self.handle_bw_controls_changed()
174+
175+ def handle_limits(self, limits):
176+ """
177+ Callback for when syncdaemon tells us its throttling limits.
178+ """
179+ self.up_limit = int(limits['upload'])
180+ self.dn_limit = int(limits['download'])
181+ if self.up_spin is not None and self.dn_spin is not None:
182+ self.up_spin.set_value(self.up_limit / 1024)
183+ self.dn_spin.set_value(self.dn_limit / 1024)
184+
185+ def handle_throttling_enabled(self, enabled):
186+ """
187+ Callback for when syncdaemon tells us whether throttling is enabled.
188+ """
189+ self.bw_limited = enabled
190+ if self.bw_chk is not None:
191+ self.bw_chk.set_active(enabled)
192+
193+ def handle_state_change(self, new_state):
194+ """
195+ Callback for when syncdaemon's state changes.
196+ """
197+ if new_state['is_error']:
198+ # this syncdaemon isn't going to connect no more
199+ self.connected = None
200+ else:
201+ self.connected = new_state['is_connected']
202+ if self.conn_btn is not None:
203+ if self.connected:
204+ self.conn_btn.set_label(_("Disconnect"))
205+ else:
206+ self.conn_btn.set_label(_("Connect"))
207+ if self.connected is None:
208+ self.conn_btn.set_sensitive(False)
209+ else:
210+ self.conn_btn.set_sensitive(True)
211+
212+ def error(self, msg):
213+ """
214+ Clear the table and show the error message in its place.
215+
216+ This might be better as an error dialog.
217+ """
218+ self.clear_devices_view()
219+ self.status_label.set_markup("<b>Error:</b> %s" % msg)
220+ logger.error(msg)
221+
222+ def request(self, path='', method='GET'):
223+ """
224+ Helper that makes an oauth-wrapped rest request.
225+
226+ XXX duplication with request_REST_info (but this one should be async).
227+ """
228+ url = self.base_url + path
229+
230+ token = get_access_token(self.keyring)
231+
232+ oauth_request = oauth.OAuthRequest.from_consumer_and_token(
233+ http_url=url,
234+ http_method=method,
235+ oauth_consumer=self.consumer,
236+ token=token,
237+ parameters='')
238+ oauth_request.sign_request(oauth.OAuthSignatureMethod_HMAC_SHA1(),
239+ self.consumer, token)
240+
241+ scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
242+
243+ conn = httplib.HTTPSConnection(netloc)
244+ try:
245+ conn.request(method, path, headers=oauth_request.to_header())
246+ except socket.error:
247+ return None
248+ return conn
249+
250+ def get_devices(self):
251+ """
252+ Ask the server for a list of devices
253+
254+ Hook up parse_devices to run on the result (when it gets here).
255+ """
256+ try:
257+ token = get_access_token(self.keyring)
258+ except gnomekeyring.NoMatchError:
259+ self.error("No token in the keyring")
260+ self.devices = []
261+ else:
262+ self.consumer = oauth.OAuthConsumer("ubuntuone", "hammertime")
263+
264+ self.conn = self.request()
265+ if self.conn is None:
266+ self.clear_devices_view()
267+ self.error('Unable to connect')
268+ else:
269+ glib.io_add_watch(
270+ self.conn.sock,
271+ glib.IO_IN | glib.IO_PRI | glib.IO_ERR | glib.IO_HUP,
272+ self.parse_devices)
273+
274+ def parse_devices(self, *a):
275+ """
276+ Parse the list of devices, and hook up list_devices if it worked.
277+ """
278+ response = self.conn.getresponse() # shouldn't block
279+ if response.status == 200:
280+ response = response.read() # neither should this
281+ self.devices = simplejson.loads(response)
282+ gobject.idle_add(self.list_devices)
283+ else:
284+ self.clear_devices_view()
285+ self.error(response.reason)
286+ return False
287+
288+ def clear_devices_view(self):
289+ """
290+ Clear out almost all the widgets.
291+
292+ All except from the table, the description and the
293+ status_label get destroyed.
294+ """
295+ for i in self.get_children():
296+ if i not in (self.description, self.status_label):
297+ i.destroy()
298+ self.conn_btn = None
299+ self.up_spin = None
300+ self.dn_spin = None
301+ self.bw_chk = None
302+
303+ def list_devices(self):
304+ """
305+ Populate the table with the list of devices.
306+
307+ If the list of devices is empty, make a fake one that refers
308+ to the local machine (to get the connect/restart buttons).
309+ """
310+ self.resize(len(self.devices)+1, 3)
311+
312+ self.clear_devices_view()
313+
314+ token = get_access_token(self.keyring)
315+
316+ if not self.devices:
317+ # a stopgap device so you can at least try to connect
318+ self.devices = [{'kind': 'Computer',
319+ 'description': _("<LOCAL MACHINE>"),
320+ 'token': token.key,
321+ 'FAKE': 'YES'}]
322+
323+ self.status_label.set_label("")
324+
325+ i = 0
326+ for row in self.devices:
327+ i += 1
328+ img = gtk.Image()
329+ img.set_from_icon_name(row['kind'].lower(), gtk.ICON_SIZE_DND)
330+ desc = gtk.Label(row['description'])
331+ desc.set_alignment(0., .5)
332+ self.attach(img, 0, 1, i, i+1)
333+ self.attach(desc, 1, 2, i, i+1)
334+ if 'FAKE' not in row:
335+ # we don't include the "Remove" button for the fake entry :)
336+ butn = gtk.Button(_('Remove'))
337+ butn.connect('clicked', self.remove,
338+ row['kind'], row.get('token'))
339+ self.attach(butn, 2, 3, i, i+1, xoptions=0, yoptions=0)
340+ if row.get('token') == token.key:
341+ self.bw_chk = ck_btn = gtk.CheckButton(
342+ _("_Limit Bandwidth Usage"))
343+ ck_btn.set_active(self.bw_limited)
344+ up_lbl = gtk.Label(_("Maximum _upload speed (KB/s):"))
345+ up_lbl.set_alignment(0., .5)
346+ adj = gtk.Adjustment(value=self.up_limit/1024.,
347+ lower=0.0, upper=4096.0,
348+ step_incr=1.0, page_incr=16.0)
349+ self.up_spin = up_btn = gtk.SpinButton(adj)
350+ up_btn.connect("value-changed", self.handle_bw_controls_changed)
351+ up_lbl.set_mnemonic_widget(up_btn)
352+ dn_lbl = gtk.Label(_("Maximum _download speed (KB/s):"))
353+ dn_lbl.set_alignment(0., .5)
354+ adj = gtk.Adjustment(value=self.dn_limit/1024.,
355+ lower=0.0, upper=4096.0,
356+ step_incr=1.0, page_incr=16.0)
357+ self.dn_spin = dn_btn = gtk.SpinButton(adj)
358+ dn_btn.connect("value-changed", self.handle_bw_controls_changed)
359+ dn_lbl.set_mnemonic_widget(dn_btn)
360+ ck_btn.connect('toggled', self.handle_bw_checkbox_toggled,
361+ up_lbl, up_btn, dn_lbl, dn_btn)
362+ self.handle_bw_checkbox_toggled(ck_btn,
363+ up_lbl, up_btn, dn_lbl, dn_btn)
364+
365+ self.conn_btn = gtk.Button(_('Connect'))
366+ if self.connected is None:
367+ self.conn_btn.set_sensitive(False)
368+ elif self.connected:
369+ self.conn_btn.set_label(_('Disconnect'))
370+ self.conn_btn.connect('clicked', self.handle_connect_button)
371+ restart_btn = gtk.Button(_('Restart'))
372+ restart_btn.connect('clicked', self.handle_restart_button)
373+ btn_box = gtk.HButtonBox()
374+ btn_box.add(self.conn_btn)
375+ btn_box.add(restart_btn)
376+
377+ i += 1
378+ self.attach(ck_btn, 1, 3, i, i+1)
379+ i += 1
380+ self.attach(up_lbl, 1, 2, i, i+1)
381+ self.attach(up_btn, 2, 3, i, i+1)
382+ i += 1
383+ self.attach(dn_lbl, 1, 2, i, i+1)
384+ self.attach(dn_btn, 2, 3, i, i+1)
385+ i += 1
386+ self.attach(btn_box, 1, 3, i, i+1)
387+ i += 2
388+ self.show_all()
389+
390+ def handle_connect_button(self, *a):
391+ """
392+ Callback for the Connect/Disconnect button.
393+ """
394+ self.conn_btn.set_sensitive(False)
395+ if self.connected:
396+ d = self.sdtool.disconnect()
397+ else:
398+ d = self.sdtool.connect()
399+
400+ def handle_restart_button(self, *a):
401+ """
402+ Callback for the Restart button.
403+ """
404+ self.sdtool.quit().addCallbacks(lambda _: self.sdtool.start())
405+
406+ def remove(self, button, kind, token):
407+ """
408+ Callback for the Remove button.
409+
410+ Starts an async request to remove a device.
411+ """
412+ self.conn = self.request('remove/%s/%s' % (kind.lower(), token))
413+ if self.conn is None:
414+ self.clear_devices_view()
415+ self.error('Unable to connect')
416+ else:
417+ glib.io_add_watch(
418+ self.conn.sock,
419+ glib.IO_IN | glib.IO_PRI | glib.IO_ERR | glib.IO_HUP,
420+ self.parse_devices)
421+
422+
423+
424 class UbuntuOneDialog(gtk.Dialog):
425 """Preferences dialog."""
426
427- def __init__(self, config=None, *args, **kw):
428+ def __init__(self, config=None, keyring=gnomekeyring, *args, **kw):
429 """Initializes our config dialog."""
430 super(UbuntuOneDialog, self).__init__(*args, **kw)
431 self.set_title(_("Ubuntu One Preferences"))
432@@ -80,12 +444,8 @@
433 self.connect("close", self.__handle_response, gtk.RESPONSE_CLOSE)
434 self.connect("response", self.__handle_response)
435
436- self.bw_enabled = False
437- self.up_limit = 2097152
438- self.dn_limit = 2097152
439-
440 self.__bus = dbus.SessionBus()
441- self.keyring = gnomekeyring
442+ self.keyring = keyring
443
444 self.__bus.add_signal_receiver(
445 handler_function=self.__got_state,
446@@ -108,15 +468,13 @@
447 # Timeout ID to avoid spamming DBus from spinbutton changes
448 self.__update_id = 0
449
450- # Connectivity status
451- self.connected = False
452-
453 # SD Tool object
454 self.sdtool = SyncDaemonTool(self.__bus)
455 self.sdtool.get_status().addCallbacks(lambda _: self.__got_state,
456 self.__sd_error)
457 # Build the dialog
458 self.__construct()
459+ logger.debug("starting")
460
461 def quit(self):
462 """Exit the main loop."""
463@@ -132,97 +490,23 @@
464
465 def __got_state(self, state):
466 """Got the state of syncdaemon."""
467- self.connected = bool(state['is_connected'])
468- if self.connected:
469- self.conn_btn.set_label(_("Disconnect"))
470- else:
471- self.conn_btn.set_label(_("Connect"))
472- self.conn_btn.set_sensitive(True)
473+ self.devices.handle_state_change(state)
474
475 def __got_limits(self, limits):
476 """Got the throttling limits."""
477- self.up_limit = int(limits['upload'])
478- self.dn_limit = int(limits['download'])
479- self.up_spin.set_value(self.up_limit / 1024)
480- self.dn_spin.set_value(self.dn_limit / 1024)
481+ logger.debug("got limits: %s" % (limits,))
482+ self.devices.handle_limits(limits)
483
484 def __got_enabled(self, enabled):
485 """Got the throttling enabled config."""
486- self.bw_enabled = bool(enabled)
487- self.limit_check.set_active(self.bw_enabled)
488-
489- def __update_bw_settings(self):
490- """Update the bandwidth throttling config in syncdaemon."""
491- self.bw_enabled = self.limit_check.get_active()
492- self.up_limit = self.up_spin.get_value_as_int() * 1024
493- self.dn_limit = self.dn_spin.get_value_as_int() * 1024
494-
495- try:
496- client = self.__bus.get_object(DBUS_IFACE_NAME, "/config",
497- follow_name_owner_changes=True)
498- iface = dbus.Interface(client, DBUS_IFACE_CONFIG_NAME)
499- iface.set_throttling_limits(self.dn_limit, self.up_limit,
500- reply_handler=dbus_async,
501- error_handler=self.__dbus_error)
502- if self.bw_enabled:
503- iface.enable_bandwidth_throttling(
504- reply_handler=dbus_async,
505- error_handler=self.__dbus_error)
506- else:
507- iface.disable_bandwidth_throttling(
508- reply_handler=dbus_async,
509- error_handler=self.__dbus_error)
510- except DBusException, e:
511- self.__dbus_error(e)
512+ self.devices.handle_throttling_enabled(enabled)
513
514 def __handle_response(self, dialog, response):
515 """Handle the dialog's response."""
516 self.hide()
517- self.__update_bw_settings()
518+ self.devices.update_bw_settings()
519 gtk.main_quit()
520
521- def __bw_limit_toggled(self, button, data=None):
522- """Toggle the bw limit panel."""
523- self.bw_enabled = self.limit_check.get_active()
524- self.bw_table.set_sensitive(self.bw_enabled)
525- try:
526- client = self.__bus.get_object(DBUS_IFACE_NAME, "/config",
527- follow_name_owner_changes=True)
528- iface = dbus.Interface(client, DBUS_IFACE_CONFIG_NAME)
529- iface.set_throttling_limits(self.dn_limit, self.up_limit,
530- reply_handler=dbus_async,
531- error_handler=self.__dbus_error)
532- if self.bw_enabled:
533- iface.enable_bandwidth_throttling(
534- reply_handler=dbus_async,
535- error_handler=self.__dbus_error)
536- else:
537- iface.disable_bandwidth_throttling(
538- reply_handler=dbus_async,
539- error_handler=self.__dbus_error)
540- except DBusException, e:
541- self.__dbus_error(e)
542-
543- def __spinner_changed(self, button, data=None):
544- """Remove timeout and add anew."""
545- if self.__update_id != 0:
546- gobject.source_remove(self.__update_id)
547-
548- self.__update_id = gobject.timeout_add_seconds(
549- 1, self.__update_bw_settings)
550-
551- def __connect_toggled(self, button, data=None):
552- """Toggle the connection state..."""
553- self.conn_btn.set_sensitive(False)
554- if self.connected:
555- self.sdtool.start().addCallbacks(
556- lambda _: self.sdtool.disconnect(),
557- self.__sd_error)
558- else:
559- self.sdtool.start().addCallbacks(
560- lambda _: self.sdtool.connect(),
561- self.__sd_error)
562-
563 def _format_for_gb_display(self, bytes):
564 """Format bytes into reasonable gb display."""
565 gb = bytes / 1024 / 1024 / 1024
566@@ -247,12 +531,7 @@
567 def request_REST_info(self, url, method):
568 """Make a REST request and return the resulting dict, or None."""
569 consumer = oauth.OAuthConsumer("ubuntuone", "hammertime")
570- items = []
571- items = self.keyring.find_items_sync(
572- gnomekeyring.ITEM_GENERIC_SECRET,
573- {'ubuntuone-realm': "https://ubuntuone.com",
574- 'oauth-consumer-key': consumer.key})
575- token = oauth.OAuthToken.from_string(items[0].secret)
576+ token = get_access_token(self.keyring)
577 request = oauth.OAuthRequest.from_consumer_and_token(
578 http_url=url, http_method=method, oauth_consumer=consumer,
579 token=token)
580@@ -403,75 +682,11 @@
581 self.mail_label.show()
582
583 # Devices tab
584- devices = gtk.VBox(spacing=12)
585- devices.set_border_width(6)
586- self.notebook.append_page(devices)
587- self.notebook.set_tab_label_text(devices, _("Devices"))
588- devices.show()
589-
590- # Bandwidth limiting
591- self.limit_check = gtk.CheckButton(_("_Limit Bandwidth Usage"))
592- self.limit_check.connect("toggled", self.__bw_limit_toggled)
593- devices.pack_start(self.limit_check, False, False)
594- self.limit_check.show()
595-
596- hbox = gtk.HBox(spacing=12)
597- devices.pack_start(hbox, False, False)
598- hbox.show()
599-
600- label = gtk.Label()
601- hbox.pack_start(label, False, False)
602- label.show()
603-
604- rbox = gtk.VBox(spacing=12)
605- hbox.pack_start(rbox, False, False)
606- rbox.show()
607-
608- # Now put the bw limit bits in a table too
609- self.bw_table = gtk.Table(rows=2, columns=2)
610- self.bw_table.set_row_spacings(6)
611- self.bw_table.set_col_spacings(6)
612- self.bw_table.set_sensitive(False)
613- rbox.pack_start(self.bw_table, False, False)
614- self.bw_table.show()
615-
616- # Upload speed
617- label = gtk.Label(_("Maximum _upload speed (KB/s):"))
618- label.set_use_underline(True)
619- label.set_alignment(0, 0.5)
620- self.bw_table.attach(label, 0, 1, 0, 1)
621- label.show()
622-
623- adjustment = gtk.Adjustment(value=2048.0, lower=0.0, upper=4096.0,
624- step_incr=64.0, page_incr=128.0)
625- self.up_spin = gtk.SpinButton(adjustment)
626- self.up_spin.connect("value-changed", self.__spinner_changed)
627- label.set_mnemonic_widget(self.up_spin)
628- self.bw_table.attach(self.up_spin, 1, 2, 0, 1)
629- self.up_spin.show()
630-
631- # Download speed
632- label = gtk.Label(_("Maximum _download speed (KB/s):"))
633- label.set_use_underline(True)
634- label.set_alignment(0, 0.5)
635- self.bw_table.attach(label, 0, 1, 1, 2)
636- label.show()
637- adjustment = gtk.Adjustment(value=2048.0, lower=64.0, upper=8192.0,
638- step_incr=64.0, page_incr=128.0)
639- self.dn_spin = gtk.SpinButton(adjustment)
640- self.dn_spin.connect("value-changed", self.__spinner_changed)
641- label.set_mnemonic_widget(self.dn_spin)
642- self.bw_table.attach(self.dn_spin, 1, 2, 1, 2)
643- self.dn_spin.show()
644-
645- alignment = gtk.Alignment(1.0, 0.5)
646- rbox.pack_end(alignment, False, False)
647- alignment.show()
648-
649- self.conn_btn = gtk.Button(_("Connect"))
650- self.conn_btn.connect('clicked', self.__connect_toggled)
651- alignment.add(self.conn_btn)
652- self.conn_btn.show()
653+ self.devices = DevicesWidget(self.__bus, self.keyring)
654+ self.notebook.append_page(self.devices)
655+ self.notebook.set_tab_label_text(self.devices, _("Devices"))
656+ self.devices.show_all()
657+ self.devices.get_devices()
658
659 # Services tab
660 services = gtk.VBox(spacing=12)
661
662=== modified file 'configure'
663--- configure 2010-03-04 16:47:43 +0000
664+++ configure 2010-03-10 23:53:15 +0000
665@@ -1,6 +1,6 @@
666 #! /bin/sh
667 # Guess values for system-dependent variables and create Makefiles.
668-# Generated by GNU Autoconf 2.65 for ubuntuone-client 1.1.3.
669+# Generated by GNU Autoconf 2.65 for ubuntuone-client 1.1.4.
670 #
671 #
672 # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
673@@ -698,8 +698,8 @@
674 # Identity of this package.
675 PACKAGE_NAME='ubuntuone-client'
676 PACKAGE_TARNAME='ubuntuone-client'
677-PACKAGE_VERSION='1.1.3'
678-PACKAGE_STRING='ubuntuone-client 1.1.3'
679+PACKAGE_VERSION='1.1.4'
680+PACKAGE_STRING='ubuntuone-client 1.1.4'
681 PACKAGE_BUGREPORT=''
682 PACKAGE_URL=''
683
684@@ -1476,7 +1476,7 @@
685 # Omit some internal or obsolete options to make the list less imposing.
686 # This message is too long to be a string in the A/UX 3.1 sh.
687 cat <<_ACEOF
688-\`configure' configures ubuntuone-client 1.1.3 to adapt to many kinds of systems.
689+\`configure' configures ubuntuone-client 1.1.4 to adapt to many kinds of systems.
690
691 Usage: $0 [OPTION]... [VAR=VALUE]...
692
693@@ -1547,7 +1547,7 @@
694
695 if test -n "$ac_init_help"; then
696 case $ac_init_help in
697- short | recursive ) echo "Configuration of ubuntuone-client 1.1.3:";;
698+ short | recursive ) echo "Configuration of ubuntuone-client 1.1.4:";;
699 esac
700 cat <<\_ACEOF
701
702@@ -1660,7 +1660,7 @@
703 test -n "$ac_init_help" && exit $ac_status
704 if $ac_init_version; then
705 cat <<\_ACEOF
706-ubuntuone-client configure 1.1.3
707+ubuntuone-client configure 1.1.4
708 generated by GNU Autoconf 2.65
709
710 Copyright (C) 2009 Free Software Foundation, Inc.
711@@ -1938,7 +1938,7 @@
712 This file contains any messages produced by compilers while
713 running configure, to aid debugging if configure makes a mistake.
714
715-It was created by ubuntuone-client $as_me 1.1.3, which was
716+It was created by ubuntuone-client $as_me 1.1.4, which was
717 generated by GNU Autoconf 2.65. Invocation command line was
718
719 $ $0 $@
720@@ -2748,7 +2748,7 @@
721
722 # Define the identity of the package.
723 PACKAGE='ubuntuone-client'
724- VERSION='1.1.3'
725+ VERSION='1.1.4'
726
727
728 cat >>confdefs.h <<_ACEOF
729@@ -12972,7 +12972,7 @@
730 # report actual input values of CONFIG_FILES etc. instead of their
731 # values after options handling.
732 ac_log="
733-This file was extended by ubuntuone-client $as_me 1.1.3, which was
734+This file was extended by ubuntuone-client $as_me 1.1.4, which was
735 generated by GNU Autoconf 2.65. Invocation command line was
736
737 CONFIG_FILES = $CONFIG_FILES
738@@ -13038,7 +13038,7 @@
739 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
740 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
741 ac_cs_version="\\
742-ubuntuone-client config.status 1.1.3
743+ubuntuone-client config.status 1.1.4
744 configured by $0, generated by GNU Autoconf 2.65,
745 with options \\"\$ac_cs_config\\"
746
747
748=== modified file 'configure.ac'
749--- configure.ac 2010-03-04 16:47:43 +0000
750+++ configure.ac 2010-03-10 23:53:15 +0000
751@@ -1,7 +1,7 @@
752 dnl Process this file with autoconf to produce a configure script.
753 AC_PREREQ(2.53)
754
755-AC_INIT([ubuntuone-client], [1.1.3])
756+AC_INIT([ubuntuone-client], [1.1.4])
757 AC_CONFIG_SRCDIR([config.h.in])
758
759 AM_INIT_AUTOMAKE([1.10 foreign])
760
761=== modified file 'contrib/__init__.pyc'
762Binary files contrib/__init__.pyc 2010-03-04 16:47:43 +0000 and contrib/__init__.pyc 2010-03-10 23:53:15 +0000 differ
763=== modified file 'contrib/dbus_util.pyc'
764Binary files contrib/dbus_util.pyc 2010-03-04 16:47:43 +0000 and contrib/dbus_util.pyc 2010-03-10 23:53:15 +0000 differ
765=== modified file 'contrib/mocker.pyc'
766Binary files contrib/mocker.pyc 2010-03-04 16:47:43 +0000 and contrib/mocker.pyc 2010-03-10 23:53:15 +0000 differ
767=== modified file 'contrib/testing/__init__.pyc'
768Binary files contrib/testing/__init__.pyc 2010-03-04 16:47:43 +0000 and contrib/testing/__init__.pyc 2010-03-10 23:53:15 +0000 differ
769=== modified file 'contrib/testing/testcase.py'
770--- contrib/testing/testcase.py 2010-03-04 16:47:43 +0000
771+++ contrib/testing/testcase.py 2010-03-10 23:53:15 +0000
772@@ -348,8 +348,6 @@
773 def __init__(self, eq, *args, **kwargs):
774 """ Creates the instance """
775 self.eq = self.event_queue = eq
776- self.client = action_queue.ActionQueue.protocol()
777- self.client.disconnect = lambda: None
778 self.uploading = {}
779 self.downloading = {}
780 # pylint: disable-msg=C0103
781
782=== modified file 'contrib/testing/testcase.pyc'
783Binary files contrib/testing/testcase.pyc 2010-03-04 16:47:43 +0000 and contrib/testing/testcase.pyc 2010-03-10 23:53:15 +0000 differ
784=== modified file 'debian/changelog'
785--- debian/changelog 2010-03-05 15:47:48 +0000
786+++ debian/changelog 2010-03-10 23:53:15 +0000
787@@ -1,3 +1,12 @@
788+ubuntuone-client (1.1.4-0ubuntu1) UNRELEASED; urgency=low
789+
790+ * New upstream release.
791+ - Bring in UI for public files handling in Nautilus. (LP: #525803)
792+ - Bring in UI for managing devices. (LP: #525803)
793+ * Add dependency on python-httplib2 to packaging. (LP: #535207)
794+
795+ -- Rodney Dawes <rodney.dawes@canonical.com> Wed, 10 Mar 2010 18:36:57 -0500
796+
797 ubuntuone-client (1.1.3-0ubuntu1) lucid; urgency=low
798
799 * New upstream release.
800
801=== modified file 'debian/control'
802--- debian/control 2010-03-04 16:52:43 +0000
803+++ debian/control 2010-03-10 23:53:15 +0000
804@@ -41,6 +41,7 @@
805 Depends: ${shlibs:Depends}, ${misc:Depends}, ${python:Depends},
806 ubuntuone-client (= ${source:Version}),
807 python-gtk2 (>= 2.10),
808+ python-httplib2,
809 python-simplejson
810 Replaces: ubuntuone-client (<= 1.1.1)
811 Conflicts: ubuntuone-client (<= 1.1.1)
812
813=== modified file 'docs/man/u1sdtool.1'
814--- docs/man/u1sdtool.1 2010-02-17 23:51:29 +0000
815+++ docs/man/u1sdtool.1 2010-03-10 23:53:15 +0000
816@@ -63,6 +63,9 @@
817 \-\-status
818 .br
819 .B u1sdtool
820+\-\-waiting-metadata
821+.br
822+.B u1sdtool
823 \-\-waiting-content
824 .br
825 .B u1sdtool
826@@ -192,6 +195,9 @@
827 \fB\-\-status\fR
828 Get the current status of syncdaemon
829 .TP
830+\fB\-\-waiting-metadata\fR
831+Get the waiting metadata list
832+.TP
833 \fB\-\-waiting-content\fR
834 Get the waiting content list
835 .TP
836
837=== modified file 'docs/syncdaemon_dbus_api.txt'
838--- docs/syncdaemon_dbus_api.txt 2010-02-17 23:51:29 +0000
839+++ docs/syncdaemon_dbus_api.txt 2010-03-10 23:53:15 +0000
840@@ -19,20 +19,15 @@
841 Object path: /status
842 Interface: com.ubuntuone.SyncDaemon.Status
843 Methods:
844- waiting_content()
845-
846- returns a list of files that are waiting to be up- or downloaded
847-
848-
849- current_downloads()
850- return list of files with a download in progress.
851-
852 schedule_next(share_id=s,node_id=s)
853
854 Make the command on the given share and node be next in the
855 queue of waiting commands.
856
857
858+ waiting_metadata()
859+ Return a list of the operations in the meta-queue.
860+
861 current_status()
862 return the current status of the system, one of: local_rescan,
863 offline, trying_to_connect, server_rescan or online.
864@@ -41,6 +36,14 @@
865 current_uploads()
866 return a list of files with a upload in progress
867
868+ current_downloads()
869+ return list of files with a download in progress.
870+
871+ waiting_content()
872+
873+ returns a list of files that are waiting to be up- or downloaded
874+
875+
876 Signals:
877 UploadFinished(path=s,info=a{ss})
878 Fire a D-BUS signal, notifying an upload has finished.
879@@ -115,6 +118,9 @@
880 Returns the root dir/mount point.
881
882 Signals:
883+ RootMismatch(root_id=s,new_root_id=s)
884+ RootMismatch signal, the user connected with a different account.
885+
886 SignalError(signal=s,extra_args=a{ss})
887 An error ocurred while trying to emit a signal.
888
889@@ -170,8 +176,14 @@
890 reject_share(share_id=s)
891 Rejects a share.
892
893- refresh_shares()
894- Refresh the share list, requesting it to the server.
895+ create_shares(path=s,usernames=as,name=s,access_level=s)
896+ Share a subtree with several users at once.
897+
898+ @param path: that path to share (the root of the subtree)
899+ @param usernames: the user names to offer the share to
900+ @param name: the name of the share
901+ @param access_level: 'View' or 'Modify'
902+
903
904 create_share(path=s,username=s,name=s,access_level=s)
905 Share a subtree to the user identified by username.
906@@ -193,6 +205,9 @@
907 future as a success/failure indicator.
908
909
910+ refresh_shares()
911+ Refresh the share list, requesting it to the server.
912+
913 Signals:
914 ShareAnswerResponse(answer_info=a{ss})
915 The answer to share was succesfull
916
917=== modified file 'nautilus/ubuntuone-nautilus.c'
918--- nautilus/ubuntuone-nautilus.c 2010-03-04 16:47:43 +0000
919+++ nautilus/ubuntuone-nautilus.c 2010-03-10 23:53:15 +0000
920@@ -52,7 +52,7 @@
921 #define UBUNTUONE_TYPE_NAUTILUS (ubuntuone_nautilus_get_type ())
922 #define UBUNTUONE_NAUTILUS(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), UBUNTUONE_TYPE_NAUTILUS, UbuntuOneNautilus))
923
924-typedef struct _ShareCBData ShareCBData;
925+typedef struct _CBData CBData;
926
927 typedef struct {
928 GObject parent_slot;
929@@ -92,7 +92,7 @@
930 GHashTable * observed;
931
932 /* Extra data we need to free on finalization */
933- ShareCBData * share_cb_data;
934+ CBData * cb_data;
935 } UbuntuOneNautilus;
936
937 typedef struct {
938@@ -149,6 +149,13 @@
939 static void ubuntuone_nautilus_udf_deleted (DBusGProxy * proxy,
940 GHashTable * hash,
941 gpointer user_data);
942+static void ubuntuone_nautilus_file_published (DBusGProxy * proxy,
943+ GHashTable * hash,
944+ gpointer user_data);
945+static void ubuntuone_nautilus_publish_error (DBusGProxy * proxy,
946+ GHashTable * hash,
947+ gchar * error,
948+ gpointer user_data);
949 static void ubuntuone_nautilus_share_created (DBusGProxy * proxy,
950 GHashTable * hash,
951 gpointer user_data);
952@@ -289,10 +296,14 @@
953 } else if (g_hash_table_lookup (uon->needsupdating, path)) {
954 nautilus_file_info_add_emblem (file, "ubuntuone-unsynchronized");
955 ubuntuone_nautilus_add_observed (uon, path, file);
956- } else if (g_hash_table_lookup (uon->udfs, path)) {
957+ }
958+
959+ if (g_hash_table_lookup (uon->udfs, path)) {
960 nautilus_file_info_add_emblem (file, "ubuntuone-enabled");
961 ubuntuone_nautilus_add_observed (uon, path, file);
962- } else if (g_hash_table_lookup (uon->public, path)) {
963+ }
964+
965+ if (g_hash_table_lookup (uon->public, path)) {
966 nautilus_file_info_add_emblem (file, "ubuntuone-public");
967 ubuntuone_nautilus_add_observed (uon, path, file);
968 }
969@@ -354,8 +365,9 @@
970 iface->get_widget = ubuntuone_nautilus_get_location_widget;
971 }
972
973-/* Magical struct for passing data in sharing callbacks */
974-struct _ShareCBData {
975+
976+/* Magical struct for passing data in callbacks */
977+struct _CBData {
978 UbuntuOneNautilus * uon;
979 gchar * path;
980 GtkWidget * parent;
981@@ -364,9 +376,12 @@
982 GtkWidget * user_picker;
983 GtkWidget * name_entry;
984 GtkWidget * allow_mods;
985+
986+ /* Whether to make a file public or private */
987+ gboolean make_public;
988 };
989
990-static void __share_cb_data_free (struct _ShareCBData * data) {
991+static void __cb_data_free (struct _CBData * data) {
992 if (!data)
993 return;
994
995@@ -388,7 +403,7 @@
996 }
997
998 /* Share on Ubuntu One dialog constructor */
999-static GtkWidget * ubuntuone_nautilus_share_dialog_construct (struct _ShareCBData * data) {
1000+static GtkWidget * ubuntuone_nautilus_share_dialog_construct (struct _CBData * data) {
1001 GtkWidget * dialog;
1002 GtkWidget * area, * table, * label;
1003
1004@@ -445,6 +460,8 @@
1005 gtk_table_attach (GTK_TABLE (table), data->allow_mods, 0, 2, 3, 4, GTK_FILL, GTK_FILL, 3, 3);
1006 gtk_widget_show (data->allow_mods);
1007
1008+ gtk_widget_set_size_request (dialog, 500, 450);
1009+
1010 return dialog;
1011 }
1012
1013@@ -452,12 +469,14 @@
1014 static void ubuntuone_nautilus_share_dialog_response (GtkDialog * dialog,
1015 gint response,
1016 gpointer user_data) {
1017- struct _ShareCBData * data = (struct _ShareCBData *) user_data;
1018+ struct _CBData * data = (struct _CBData *) user_data;
1019
1020 switch (response) {
1021 case GTK_RESPONSE_ACCEPT: {
1022 gchar * name, * modify;
1023- GSList *emails;
1024+ GSList *emails, *l;
1025+ gchar **emails_array;
1026+ gint i;
1027 gboolean allow_mods = FALSE;
1028
1029 emails = u1_contacts_picker_get_selected_emails (U1_CONTACTS_PICKER (data->user_picker));
1030@@ -465,8 +484,7 @@
1031 GtkWidget *error_dialog;
1032
1033 error_dialog = gtk_message_dialog_new (GTK_WINDOW (dialog),
1034- GTK_DIALOG_DESTROY_WITH_PARENT |
1035- GTK_DIALOG_NO_SEPARATOR,
1036+ GTK_DIALOG_DESTROY_WITH_PARENT,
1037 GTK_MESSAGE_ERROR,
1038 GTK_BUTTONS_CLOSE,
1039 _("You need to select at least one contact to share this folder with"));
1040@@ -475,18 +493,25 @@
1041
1042 return;
1043 }
1044+
1045+ emails_array = g_new0 (gchar *, g_slist_length (emails));
1046+ for (l = emails, i = 0; l != NULL; l = l->next, i++)
1047+ emails_array[i] = g_strdup (l->data);
1048+
1049 name = g_strdup (gtk_entry_get_text (GTK_ENTRY (data->name_entry)));
1050 allow_mods = gtk_toggle_button_get_active (GTK_TOGGLE_BUTTON (data->allow_mods));
1051 modify = g_strdup_printf ("%d", allow_mods);
1052
1053- dbus_g_proxy_begin_call (data->uon->u1_shares, "create_share",
1054+ dbus_g_proxy_begin_call (data->uon->u1_shares, "create_shares",
1055 ubuntuone_nautilus_end_dbus_call, NULL,
1056 NULL,
1057 G_TYPE_STRING, data->path,
1058- G_TYPE_STRING, emails->data, /* FIXME: need to support more than 1 email */
1059+ G_TYPE_STRV, emails_array,
1060 G_TYPE_STRING, name,
1061 G_TYPE_STRING, modify,
1062 G_TYPE_INVALID);
1063+
1064+ g_strfreev (emails_array);
1065 u1_contacts_picker_free_selection_list (emails);
1066 g_free (name);
1067 g_free (modify);
1068@@ -503,9 +528,63 @@
1069 user_data);
1070 }
1071
1072+static void ubuntuone_nautilus_public_meta (DBusGProxy * proxy,
1073+ DBusGProxyCall * call_id,
1074+ gpointer user_data) {
1075+ struct _CBData * data = (struct _CBData *) user_data;
1076+ GHashTable * metadata;
1077+ GError * error = NULL;
1078+ gchar * share_id, * node_id;
1079+
1080+ g_return_if_fail (proxy != NULL);
1081+
1082+ if (!dbus_g_proxy_end_call (proxy, call_id, &error,
1083+ dbus_g_type_get_map ("GHashTable",
1084+ G_TYPE_STRING,
1085+ G_TYPE_STRING),
1086+ &metadata,
1087+ G_TYPE_INVALID)) {
1088+ g_warning ("ERROR: %s", error->message);
1089+ return;
1090+ }
1091+
1092+ share_id = g_hash_table_lookup (metadata, "share_id");
1093+ node_id = g_hash_table_lookup (metadata, "node_id");
1094+
1095+ dbus_g_proxy_begin_call (data->uon->u1_public, "change_public_access",
1096+ ubuntuone_nautilus_end_dbus_call, NULL,
1097+ NULL,
1098+ G_TYPE_STRING, share_id,
1099+ G_TYPE_STRING, node_id,
1100+ G_TYPE_BOOLEAN, data->make_public,
1101+ G_TYPE_INVALID);
1102+}
1103+
1104+static void ubuntuone_nautilus_copy_public_url (NautilusMenuItem * item,
1105+ gpointer user_data) {
1106+ struct _CBData * data = (struct _CBData *) user_data;
1107+ gchar * url;
1108+
1109+ url = g_hash_table_lookup (data->uon->public, data->path);
1110+ gtk_clipboard_set_text (gtk_clipboard_get(GDK_SELECTION_CLIPBOARD),
1111+ url, strlen (url));
1112+ gtk_clipboard_store (gtk_clipboard_get(GDK_SELECTION_CLIPBOARD));
1113+}
1114+
1115+static void ubuntuone_nautilus_toggle_publicity (NautilusMenuItem * item,
1116+ gpointer user_data) {
1117+ struct _CBData * data = (struct _CBData *) user_data;
1118+
1119+ dbus_g_proxy_begin_call (data->uon->u1_fs, "get_metadata",
1120+ ubuntuone_nautilus_public_meta, data,
1121+ NULL,
1122+ G_TYPE_STRING, data->path,
1123+ G_TYPE_INVALID);
1124+}
1125+
1126 static void ubuntuone_nautilus_share_folder (NautilusMenuItem * item,
1127 gpointer * user_data) {
1128- struct _ShareCBData * data = (struct _ShareCBData *) user_data;
1129+ struct _CBData * data = (struct _CBData *) user_data;
1130 GtkWidget * dialog;
1131
1132 dialog = ubuntuone_nautilus_share_dialog_construct (data);
1133@@ -517,10 +596,22 @@
1134 gtk_widget_show (dialog);
1135 }
1136
1137+static void ubuntuone_nautilus_unshare_folder (NautilusMenuItem * item,
1138+ gpointer * user_data) {
1139+ struct _CBData * data = (struct _CBData *) user_data;
1140+ gchar * share_id;
1141+
1142+ share_id = g_hash_table_lookup (data->uon->shares, data->path);
1143+ dbus_g_proxy_begin_call (data->uon->u1_shares, "delete_share",
1144+ ubuntuone_nautilus_end_dbus_call, NULL,
1145+ NULL,
1146+ G_TYPE_STRING, share_id,
1147+ G_TYPE_INVALID);
1148+}
1149
1150 static void ubuntuone_nautilus_synchronize_folder (NautilusMenuItem * item,
1151 gpointer * user_data) {
1152- struct _ShareCBData * data = (struct _ShareCBData *) user_data;
1153+ struct _CBData * data = (struct _CBData *) user_data;
1154
1155 dbus_g_proxy_begin_call (data->uon->u1_folders, "create",
1156 ubuntuone_nautilus_end_dbus_call,
1157@@ -531,7 +622,7 @@
1158
1159 static void ubuntuone_nautilus_delete_folder (NautilusMenuItem * item,
1160 gpointer * user_data) {
1161- struct _ShareCBData * data = (struct _ShareCBData *) user_data;
1162+ struct _CBData * data = (struct _CBData *) user_data;
1163 gchar * udf_id;
1164
1165 udf_id = g_hash_table_lookup (data->uon->udfs, data->path);
1166@@ -551,10 +642,10 @@
1167 NautilusFileInfo * file;
1168 GList * items = NULL;
1169 gchar * path;
1170- gboolean is_managed, is_root, is_udf, is_public;
1171- struct _ShareCBData * share_cb_data;
1172+ gboolean is_managed, is_root, is_udf, is_public, is_shared;
1173+ struct _CBData * cb_data;
1174
1175- is_managed = is_root = is_udf = is_public = FALSE;
1176+ is_managed = is_root = is_udf = is_public = is_shared = FALSE;
1177
1178 if (g_list_length (files) != 1)
1179 return NULL;
1180@@ -576,15 +667,21 @@
1181 if (g_hash_table_lookup (uon->udfs, path) != NULL)
1182 is_udf = TRUE;
1183
1184- if (uon->share_cb_data)
1185- __share_cb_data_free (uon->share_cb_data);
1186-
1187- share_cb_data = g_new0 (struct _ShareCBData, 1);
1188- share_cb_data->uon = uon;
1189- share_cb_data->parent = window;
1190- share_cb_data->path = g_strdup (path);
1191-
1192- uon->share_cb_data = share_cb_data;
1193+ if (g_hash_table_lookup (uon->public, path) != NULL)
1194+ is_public = TRUE;
1195+
1196+ if (g_hash_table_lookup (uon->shares, path) != NULL)
1197+ is_shared = TRUE;
1198+
1199+ if (uon->cb_data)
1200+ __cb_data_free (uon->cb_data);
1201+
1202+ cb_data = g_new0 (struct _CBData, 1);
1203+ cb_data->uon = uon;
1204+ cb_data->parent = window;
1205+ cb_data->path = g_strdup (path);
1206+
1207+ uon->cb_data = cb_data;
1208
1209 if ((is_managed || is_udf) && !is_root && nautilus_file_info_is_directory (file)) {
1210 NautilusMenuItem * item;
1211@@ -595,7 +692,20 @@
1212 "ubuntuone");
1213 g_signal_connect (item, "activate",
1214 G_CALLBACK (ubuntuone_nautilus_share_folder),
1215- share_cb_data);
1216+ cb_data);
1217+ items = g_list_prepend (items, item);
1218+ }
1219+
1220+ if ((is_managed && is_shared) && !is_root && nautilus_file_info_is_directory (file)) {
1221+ NautilusMenuItem * item;
1222+
1223+ item = nautilus_menu_item_new ("ubuntuone-unshare",
1224+ _("Stop sharing on Ubuntu One..."),
1225+ _("Stop sharing this folder on Ubuntu One"),
1226+ "ubuntuone");
1227+ g_signal_connect (item, "activate",
1228+ G_CALLBACK (ubuntuone_nautilus_unshare_folder),
1229+ cb_data);
1230 items = g_list_prepend (items, item);
1231 }
1232
1233@@ -604,27 +714,55 @@
1234
1235 item = nautilus_menu_item_new ("ubuntuone-udf-delete",
1236 _("Stop synchronizing on Ubuntu One"),
1237- _("Stop synchronizing this folder with Ubuntu One"),
1238- "");
1239+ _("Stop synchronizing this folder with Ubuntu One."),
1240+ "ubuntuone");
1241 g_signal_connect (item, "activate",
1242 G_CALLBACK (ubuntuone_nautilus_delete_folder),
1243- share_cb_data);
1244+ cb_data);
1245 items = g_list_append (items, item);
1246 }
1247
1248- if (!is_managed && !is_udf && nautilus_file_info_is_directory (file)) {
1249+ if (!is_managed && !is_udf && nautilus_file_info_is_directory (file) && (nautilus_file_info_get_file_type (file) != G_FILE_TYPE_SYMBOLIC_LINK)) {
1250 NautilusMenuItem * item;
1251
1252 item = nautilus_menu_item_new ("ubuntuone-udf-create",
1253 _("Synchronize on Ubuntu One"),
1254- _("Synchronize this folder with Ubuntu One"),
1255- "");
1256+ _("Synchronize this folder with Ubuntu One."),
1257+ "ubuntuone");
1258 g_signal_connect (item, "activate",
1259 G_CALLBACK (ubuntuone_nautilus_synchronize_folder),
1260- share_cb_data);
1261+ cb_data);
1262 items = g_list_append (items, item);
1263- } else if (is_udf) {
1264+ }
1265
1266+ if (is_managed && (nautilus_file_info_get_file_type (file) == G_FILE_TYPE_REGULAR)) {
1267+ NautilusMenuItem * item, * urlitem;
1268+
1269+ if (is_public) {
1270+ urlitem = nautilus_menu_item_new ("ubuntuone-geturl",
1271+ _("Copy Ubuntu One public URL"),
1272+ _("Copy the Ubuntu One public URL for this file to the clipboard."),
1273+ "ubuntuone");
1274+ g_signal_connect (urlitem, "activate",
1275+ G_CALLBACK (ubuntuone_nautilus_copy_public_url),
1276+ cb_data);
1277+ items = g_list_append (items, urlitem);
1278+ item = nautilus_menu_item_new ("ubuntuone-unpublish",
1279+ _("Stop publishing via Ubuntu One"),
1280+ _("No longer share this file with everyone."),
1281+ "ubuntuone");
1282+ cb_data->make_public = FALSE;
1283+ } else {
1284+ item = nautilus_menu_item_new ("ubuntuone-publish",
1285+ _("Publish via Ubuntu One"),
1286+ _("Make this file available to anyone."),
1287+ "ubuntuone");
1288+ cb_data->make_public = TRUE;
1289+ }
1290+ g_signal_connect (item, "activate",
1291+ G_CALLBACK (ubuntuone_nautilus_toggle_publicity),
1292+ cb_data);
1293+ items = g_list_append (items, item);
1294 }
1295
1296 done:
1297@@ -818,6 +956,25 @@
1298 dbus_g_proxy_connect_signal (uon->u1_folders, "FolderUnSubscribed",
1299 G_CALLBACK (ubuntuone_nautilus_udf_deleted),
1300 uon, NULL);
1301+
1302+
1303+ dbus_g_proxy_add_signal (uon->u1_public, "PublicAccessChanged",
1304+ dbus_g_type_get_map ("GHashTable",
1305+ G_TYPE_STRING,
1306+ G_TYPE_STRING),
1307+ G_TYPE_INVALID);
1308+ dbus_g_proxy_connect_signal (uon->u1_public, "PublicAccessChanged",
1309+ G_CALLBACK (ubuntuone_nautilus_file_published),
1310+ uon, NULL);
1311+ dbus_g_proxy_add_signal (uon->u1_public, "PublicAccessChangeError",
1312+ dbus_g_type_get_map ("GHashTable",
1313+ G_TYPE_STRING,
1314+ G_TYPE_STRING),
1315+ G_TYPE_STRING,
1316+ G_TYPE_INVALID);
1317+ dbus_g_proxy_connect_signal (uon->u1_public, "PublicAccessChangeError",
1318+ G_CALLBACK (ubuntuone_nautilus_publish_error),
1319+ uon, NULL);
1320 }
1321
1322 static void ubuntuone_nautilus_class_init (UbuntuOneNautilusClass * klass) {
1323@@ -829,7 +986,7 @@
1324 static void ubuntuone_nautilus_finalize(GObject * object) {
1325 UbuntuOneNautilus * uon = UBUNTUONE_NAUTILUS(object);
1326
1327- __share_cb_data_free (uon->share_cb_data);
1328+ __cb_data_free (uon->cb_data);
1329
1330 if (uon->u1_proxy)
1331 g_object_unref (uon->u1_proxy);
1332@@ -1185,17 +1342,51 @@
1333 ubuntuone_nautilus_reset_emblem (uon, path);
1334 }
1335
1336+static void ubuntuone_nautilus_file_published (DBusGProxy * proxy,
1337+ GHashTable * hash,
1338+ gpointer user_data) {
1339+ UbuntuOneNautilus * uon = UBUNTUONE_NAUTILUS (user_data);
1340+ gchar * path, * url, * public;
1341+ gboolean is_public;
1342+
1343+ path = g_hash_table_lookup (hash, "path");
1344+ url = g_hash_table_lookup (hash, "public_url");
1345+ public = g_hash_table_lookup (hash, "is_public");
1346+ is_public = (public != NULL && strlen (public) != 0);
1347+
1348+ if (!is_public && g_hash_table_lookup (uon->public, path))
1349+ g_hash_table_remove (uon->public, path);
1350+
1351+ if (is_public)
1352+ g_hash_table_replace (uon->public, g_strdup (path), g_strdup (url));
1353+
1354+ ubuntuone_nautilus_reset_emblem (uon, path);
1355+}
1356+
1357+static void ubuntuone_nautilus_publish_error (DBusGProxy * proxy,
1358+ GHashTable * hash,
1359+ gchar * error,
1360+ gpointer user_data) {
1361+ UbuntuOneNautilus * uon = UBUNTUONE_NAUTILUS (user_data);
1362+ gchar * path;
1363+
1364+ path = g_hash_table_lookup (hash, "path");
1365+ g_hash_table_remove (uon->public, path);
1366+ ubuntuone_nautilus_reset_emblem (uon, path);
1367+}
1368+
1369 static void ubuntuone_nautilus_share_created (DBusGProxy * proxy,
1370 GHashTable * hash,
1371 gpointer user_data) {
1372 UbuntuOneNautilus * uon = UBUNTUONE_NAUTILUS (user_data);
1373- gchar * path;
1374+ gchar * path, * id;
1375
1376 path = g_hash_table_lookup (hash, "path");
1377+ id = g_hash_table_lookup (hash, "volume_id");
1378+
1379 if (!g_hash_table_lookup (uon->shares, path)) {
1380- gchar *new_share = g_strdup (path);
1381- g_hash_table_insert (uon->shares, new_share, new_share);
1382- ubuntuone_nautilus_reset_emblem (uon, new_share);
1383+ g_hash_table_insert (uon->shares, g_strdup (path), g_strdup (id));
1384+ ubuntuone_nautilus_reset_emblem (uon, path);
1385 }
1386 }
1387
1388@@ -1209,9 +1400,8 @@
1389
1390 path = g_hash_table_lookup (hash, "path");
1391
1392- dialog = gtk_message_dialog_new (GTK_WINDOW (uon->share_cb_data->parent),
1393- GTK_DIALOG_DESTROY_WITH_PARENT |
1394- GTK_DIALOG_NO_SEPARATOR,
1395+ dialog = gtk_message_dialog_new (GTK_WINDOW (uon->cb_data->parent),
1396+ GTK_DIALOG_DESTROY_WITH_PARENT,
1397 GTK_MESSAGE_ERROR,
1398 GTK_BUTTONS_CLOSE,
1399 _("Error creating share."));
1400
1401=== modified file 'tests/__init__.pyc'
1402Binary files tests/__init__.pyc 2010-03-04 16:47:43 +0000 and tests/__init__.pyc 2010-03-10 23:53:15 +0000 differ
1403=== modified file 'tests/oauthdesktop/__init__.pyc'
1404Binary files tests/oauthdesktop/__init__.pyc 2010-03-04 16:47:43 +0000 and tests/oauthdesktop/__init__.pyc 2010-03-10 23:53:15 +0000 differ
1405=== modified file 'tests/oauthdesktop/test_auth.pyc'
1406Binary files tests/oauthdesktop/test_auth.pyc 2010-03-04 16:47:43 +0000 and tests/oauthdesktop/test_auth.pyc 2010-03-10 23:53:15 +0000 differ
1407=== modified file 'tests/oauthdesktop/test_config.pyc'
1408Binary files tests/oauthdesktop/test_config.pyc 2010-03-04 16:47:43 +0000 and tests/oauthdesktop/test_config.pyc 2010-03-10 23:53:15 +0000 differ
1409=== modified file 'tests/oauthdesktop/test_key_acls.pyc'
1410Binary files tests/oauthdesktop/test_key_acls.pyc 2010-03-04 16:47:43 +0000 and tests/oauthdesktop/test_key_acls.pyc 2010-03-10 23:53:15 +0000 differ
1411=== modified file 'tests/oauthdesktop/test_main.pyc'
1412Binary files tests/oauthdesktop/test_main.pyc 2010-03-04 16:47:43 +0000 and tests/oauthdesktop/test_main.pyc 2010-03-10 23:53:15 +0000 differ
1413=== modified file 'tests/syncdaemon/__init__.pyc'
1414Binary files tests/syncdaemon/__init__.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/__init__.pyc 2010-03-10 23:53:15 +0000 differ
1415=== modified file 'tests/syncdaemon/fsm/__init__.pyc'
1416Binary files tests/syncdaemon/fsm/__init__.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/fsm/__init__.pyc 2010-03-10 23:53:15 +0000 differ
1417=== modified file 'tests/syncdaemon/fsm/test_fsm.pyc'
1418Binary files tests/syncdaemon/fsm/test_fsm.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/fsm/test_fsm.pyc 2010-03-10 23:53:15 +0000 differ
1419=== modified file 'tests/syncdaemon/fsm/test_fsm_run.pyc'
1420Binary files tests/syncdaemon/fsm/test_fsm_run.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/fsm/test_fsm_run.pyc 2010-03-10 23:53:15 +0000 differ
1421=== modified file 'tests/syncdaemon/fsm/test_run_hello.pyc'
1422Binary files tests/syncdaemon/fsm/test_run_hello.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/fsm/test_run_hello.pyc 2010-03-10 23:53:15 +0000 differ
1423=== modified file 'tests/syncdaemon/test_action_predicates.pyc'
1424Binary files tests/syncdaemon/test_action_predicates.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_action_predicates.pyc 2010-03-10 23:53:15 +0000 differ
1425=== modified file 'tests/syncdaemon/test_action_queue.py'
1426--- tests/syncdaemon/test_action_queue.py 2010-03-04 16:47:43 +0000
1427+++ tests/syncdaemon/test_action_queue.py 2010-03-10 23:53:15 +0000
1428@@ -24,29 +24,30 @@
1429 import logging
1430 import os
1431 import shutil
1432-from StringIO import StringIO
1433 import unittest
1434 import urllib2
1435 import uuid
1436
1437 import dbus
1438 from dbus.mainloop.glib import DBusGMainLoop
1439+from functools import wraps
1440+from StringIO import StringIO
1441 from twisted.internet import defer, threads, reactor
1442 from twisted.python.failure import DefaultException, Failure
1443-from twisted.trial.unittest import TestCase as TwistedTestCase
1444 from twisted.web import server
1445
1446 from contrib.testing.testcase import (
1447- BaseTwistedTestCase, FakeMain,
1448- MementoHandler, DummyClass
1449+ BaseTwistedTestCase, MementoHandler
1450 )
1451
1452 from ubuntuone.storageprotocol import client, volumes
1453+from ubuntuone.syncdaemon import states
1454 from ubuntuone.syncdaemon.dbus_interface import DBusInterface
1455 from ubuntuone.syncdaemon.main import Main
1456 from ubuntuone.syncdaemon.action_queue import (
1457 ActionQueue, ActionQueueCommand, ChangePublicAccess, CreateUDF,
1458- DeleteVolume, ListDir, ListVolumes, NoisyRequestQueue, RequestQueue
1459+ DeleteVolume, ListDir, ListVolumes, NoisyRequestQueue, RequestQueue,
1460+ Upload
1461 )
1462 from ubuntuone.syncdaemon.event_queue import EventQueue, EVENTS
1463 from ubuntuone.syncdaemon.volume_manager import UDF
1464@@ -60,6 +61,19 @@
1465 NODE = uuid.UUID('FEDCBA98-7654-3211-2345-6789ABCDEF12')
1466 USER = u'Dude'
1467
1468+def fire_and_check(f, deferred, check):
1469+ """Callback a deferred."""
1470+ @wraps(f)
1471+ def inner(*args, **kwargs):
1472+ """Execute f and fire the deferred."""
1473+ result = f(*args, **kwargs)
1474+ error = check()
1475+ if not error:
1476+ deferred.callback(True)
1477+ else:
1478+ deferred.errback(error)
1479+ return result
1480+ return inner
1481
1482 class FakeCommand(object):
1483 """Yet another fake action queue command."""
1484@@ -73,31 +87,83 @@
1485 return True
1486
1487
1488+class FakedEventQueue(EventQueue):
1489+ """Faked event queue."""
1490+
1491+ def __init__(self, fs=None):
1492+ """Initialize a faked event queue."""
1493+ super(FakedEventQueue, self).__init__(fs=fs)
1494+ self.events = []
1495+
1496+ def push(self, event_name, *args, **kwargs):
1497+ """Faked event pushing."""
1498+ self.events.append((event_name, args, kwargs))
1499+ super(FakedEventQueue, self).push(event_name, *args, **kwargs)
1500+
1501+
1502+class FakedVolume(object):
1503+ """Faked volume."""
1504+
1505+
1506+class TestingProtocol(ActionQueue.protocol):
1507+ """Protocol for testing."""
1508+
1509+ def connectionMade(self):
1510+ """connectionMade."""
1511+ ActionQueue.protocol.connectionMade(self)
1512+ # proper event is pushed
1513+ expected = [('SYS_CONNECTION_MADE', (), {})]
1514+ actual = self.factory.event_queue.events
1515+ assert expected == actual, \
1516+ 'events must be %s not %s' % (expected, actual)
1517+
1518+ self.factory.event_queue.events = [] # reset events
1519+ self.testing_deferred.callback(True)
1520+
1521+
1522 class BasicTestCase(BaseTwistedTestCase):
1523 """Basic tests to check ActionQueue."""
1524
1525 def setUp(self):
1526- """
1527- prepare to run the test
1528- """
1529+ """Init."""
1530 BaseTwistedTestCase.setUp(self)
1531+
1532 self.root = self.mktemp('root')
1533+ self.home = self.mktemp('home')
1534+ self.data = self.mktemp('data')
1535 self.shares = self.mktemp('shares')
1536- self.data = self.mktemp('data')
1537 self.partials = self.mktemp('partials')
1538- self.handler = MementoHandler()
1539- self.handler.setLevel(logging.ERROR)
1540+
1541 self.main = Main(root_dir=self.root,
1542 shares_dir=self.shares,
1543 data_dir=self.data,
1544 partials_dir=self.partials,
1545- host='localhost', port=0,
1546+ host='127.0.0.1', port=55555,
1547 dns_srv=False, ssl=False,
1548 disable_ssl_verify=True,
1549 realm='fake.realm',
1550 mark_interval=60,
1551 handshake_timeout=2,
1552 glib_loop=DBusGMainLoop(set_as_default=True))
1553+
1554+ self.action_queue = self.main.action_q
1555+ self.action_queue.connection_timeout=3
1556+ self.action_queue.event_queue.events = []
1557+
1558+ def keep_a_copy(f):
1559+ """Keep a copy of the pushed events."""
1560+ @wraps(f)
1561+ def recording(event_name, *args, **kwargs):
1562+ """Keep a copy of the pushed events."""
1563+ value = (event_name, args, kwargs)
1564+ self.action_queue.event_queue.events.append(value)
1565+ return f(event_name, *args, **kwargs)
1566+ return recording
1567+
1568+ self.main.event_q.push = keep_a_copy(self.main.event_q.push)
1569+
1570+ self.handler = MementoHandler()
1571+ self.handler.setLevel(logging.ERROR)
1572 logging.getLogger('ubuntuone.SyncDaemon').addHandler(self.handler)
1573
1574 # fake local rescan call to not be executed
1575@@ -106,25 +172,24 @@
1576 dbus.service.BusName.__del__ = lambda _: None
1577
1578 def tearDown(self):
1579- """
1580- cleanup after the test
1581- """
1582+ """Cleanup."""
1583 self.main.shutdown()
1584+
1585 shutil.rmtree(self.root)
1586 shutil.rmtree(self.shares)
1587 shutil.rmtree(self.data)
1588 shutil.rmtree(self.partials)
1589+
1590 for record in self.handler.records:
1591 exc_info = getattr(record, 'exc_info', None)
1592 if exc_info is not None:
1593 raise exc_info[0], exc_info[1], exc_info[2]
1594+
1595 BaseTwistedTestCase.tearDown(self)
1596
1597 def test_creation_requires_main(self):
1598 """Main instance is needed at creation time."""
1599- aq = ActionQueue(event_queue=FakedEventQueue(), main=self.main,
1600- host='localhost', port=0, dns_srv=False)
1601- self.assertEquals(self.main, aq.main)
1602+ self.assertEquals(self.main, self.action_queue.main)
1603
1604 def test_content_queue_has_only_one_op_per_node(self):
1605 """
1606@@ -198,82 +263,32 @@
1607 ])
1608
1609
1610-class FactoryBaseTestCase(unittest.TestCase):
1611- """Helper for by-pass Twisted."""
1612-
1613- def setUp(self):
1614- """Init."""
1615- self.factory = ActionQueue(event_queue=FakedEventQueue(), main=None,
1616- host='localhost', port=0, dns_srv=False)
1617- self.factory.connector = DummyClass()
1618-
1619- self.client = self.factory.protocol()
1620- self.client.factory = self.factory
1621- self.client.transport = DummyClass()
1622- self.client.connectionMade() # simulate connection
1623-
1624- self.factory.event_queue.events = [] # reset events
1625-
1626- def tearDown(self):
1627- """Clean up."""
1628- self.factory = None
1629-
1630-
1631-class FakedEventQueue(EventQueue):
1632- """Faked event queue."""
1633-
1634- def __init__(self):
1635- """Initialize a faked event queue."""
1636- # pylint: disable-msg=W0231
1637- self._listeners = []
1638- self.events = []
1639-
1640- def push(self, event_name, *args, **kwargs):
1641- """Faked event pushing."""
1642- self.events.append((event_name, args, kwargs))
1643-
1644-
1645-class FakedVolume(object):
1646- """Faked volume."""
1647-
1648-
1649-class TestingProtocol(ActionQueue.protocol):
1650- """Protocol for testing."""
1651-
1652- def connectionMade(self):
1653- """connectionMade."""
1654- ActionQueue.protocol.connectionMade(self)
1655- # proper event is pushed
1656- assert [('SYS_CONNECTION_MADE', (), {})] == \
1657- self.factory.event_queue.events
1658-
1659- self.factory.event_queue.events = [] # reset events
1660- self.testing_deferred.callback(True)
1661-
1662- def connectionLost(self, reason):
1663- """Handle to avoid Failures as test results."""
1664-
1665-
1666-class FactoryBaseTestCase(TwistedTestCase): #unittest.TestCase):
1667- """Helper for by-pass Twisted."""
1668-
1669- timeout = 2
1670+class FactoryBaseTestCase(BaseTwistedTestCase):
1671+ """Helper for by-pass Twisted."""
1672+
1673+ timeout = 5
1674
1675 def _start_sample_webserver(self):
1676 """Start a web server serving content at its root"""
1677 website = server.Site(None)
1678 webport = reactor.listenTCP(55555, website)
1679+
1680+ transport_class = webport.transport
1681+ def save_an_instance(skt, protocol, addr, sself, s, sreactor):
1682+ self.server_transport = transport_class(skt, protocol, addr, sself,
1683+ s, sreactor)
1684+ return self.server_transport
1685+ webport.transport = save_an_instance
1686+
1687 self.addCleanup(webport.stopListening)
1688- ##server_str = "http://localhost:%d/" % (webport.getHost().port)
1689- ##print '\n===== (fake) server listening at %s' % server_str
1690 return webport
1691
1692 def _connect_factory(self):
1693 """Connect the instance factory."""
1694 self.server = self._start_sample_webserver()
1695
1696- self.factory.protocol = TestingProtocol
1697- orig = self.factory.buildProtocol
1698+ self.action_queue.protocol = TestingProtocol
1699+ orig = self.action_queue.buildProtocol
1700
1701 d = defer.Deferred()
1702 def faked_buildProtocol(*args, **kwargs):
1703@@ -282,15 +297,15 @@
1704 protocol.testing_deferred = d
1705 return protocol
1706
1707- self.factory.buildProtocol = faked_buildProtocol
1708- self.factory.connect()
1709+ self.action_queue.buildProtocol = faked_buildProtocol
1710+ self.action_queue.connect()
1711
1712 return d
1713
1714 def _disconnect_factory(self):
1715 """Disconnect the instance factory."""
1716- if self.factory.client is not None:
1717- orig = self.factory.client.connectionLost
1718+ if self.action_queue.client is not None:
1719+ orig = self.action_queue.client.connectionLost
1720
1721 d = defer.Deferred()
1722 def faked_connectionLost(reason):
1723@@ -298,39 +313,39 @@
1724 orig(reason)
1725 d.callback(True)
1726
1727- self.factory.client.connectionLost = faked_connectionLost
1728+ self.action_queue.client.connectionLost = faked_connectionLost
1729 else:
1730 d = defer.succeed(True)
1731
1732- if self.factory.connect_in_progress:
1733- self.factory.disconnect()
1734+ if self.action_queue.connect_in_progress:
1735+ self.action_queue.disconnect()
1736
1737 return d
1738
1739 def setUp(self):
1740 """Init."""
1741- TwistedTestCase.setUp(self)
1742+ BaseTwistedTestCase.setUp(self)
1743
1744 kwargs = dict(event_queue=FakedEventQueue(), main=None,
1745- host='127.0.0.1', port=55555, dns_srv=False)
1746- self.factory = ActionQueue(**kwargs)
1747+ host='127.0.0.1', port=55555, dns_srv=False,
1748+ connection_timeout=3)
1749+ self.action_queue = ActionQueue(**kwargs)
1750
1751 def tearDown(self):
1752 """Clean up."""
1753- self.factory = None
1754- TwistedTestCase.tearDown(self)
1755+ self.action_queue.event_queue.shutdown()
1756+ self.action_queue = None
1757+ BaseTwistedTestCase.tearDown(self)
1758
1759
1760 class ConnectionTestCase(FactoryBaseTestCase):
1761 """Test TCP/SSL connection mechanism for ActionQueue."""
1762
1763- timeout = 3
1764-
1765 def assert_connection_state_reset(self):
1766 """Test connection state is properly reset."""
1767- self.assertTrue(self.factory.client is None)
1768- self.assertTrue(self.factory.connector is None)
1769- self.assertEquals(False, self.factory.connect_in_progress)
1770+ self.assertTrue(self.action_queue.client is None)
1771+ self.assertTrue(self.action_queue.connector is None)
1772+ self.assertEquals(False, self.action_queue.connect_in_progress)
1773
1774 def test_init(self):
1775 """Test connection init state."""
1776@@ -341,25 +356,25 @@
1777 """Test that double connections are avoided."""
1778 yield self._connect_factory()
1779
1780- assert self.factory.connector is not None
1781- assert self.factory.connect_in_progress == True
1782+ assert self.action_queue.connector is not None
1783+ assert self.action_queue.connect_in_progress == True
1784 # double connect, it returns None instead of a Deferred
1785- result = self.factory.connect()
1786+ result = self.action_queue.connect()
1787 self.assertTrue(result is None, 'not connecting again')
1788
1789 yield self._disconnect_factory()
1790
1791 @defer.inlineCallbacks
1792 def test_disconnect_if_connected(self):
1793- """self.factory.connector.disconnect was called."""
1794+ """self.action_queue.connector.disconnect was called."""
1795 yield self._connect_factory()
1796
1797- self.factory.event_queue.events = [] # cleanup events
1798- assert self.factory.connector.state == 'connected'
1799- self.factory.disconnect()
1800+ self.action_queue.event_queue.events = [] # cleanup events
1801+ assert self.action_queue.connector.state == 'connected'
1802+ self.action_queue.disconnect()
1803
1804 self.assert_connection_state_reset()
1805- self.assertEquals([], self.factory.event_queue.events)
1806+ self.assertEquals([], self.action_queue.event_queue.events)
1807
1808 yield self._disconnect_factory()
1809
1810@@ -368,32 +383,31 @@
1811 """Test clientConnectionFailed.
1812
1813 The connection will not be completed since the server will be down.
1814- So, self.factory.connector will never leave the 'connecting' state.
1815+ So, self.action_queue.connector will never leave the 'connecting' state.
1816 When interrupting the connection attempt, twisted automatically calls
1817- self.factory.clientConnectionFailed.
1818+ self.action_queue.clientConnectionFailed.
1819
1820 """
1821-
1822- self.factory.event_queue.events = []
1823- orig = self.factory.clientConnectionFailed
1824+ self.action_queue.event_queue.events = []
1825+ orig = self.action_queue.clientConnectionFailed
1826
1827 d = defer.Deferred()
1828 def faked_clientConnectionFailed(connector, reason):
1829 """Receive connection failed and check."""
1830- self.factory.deferred.errback = lambda _: None
1831+ self.action_queue.deferred.errback = lambda _: None
1832 orig(connector, reason)
1833 self.assert_connection_state_reset()
1834 self.assertEquals([('SYS_CONNECTION_FAILED', (), {})],
1835- self.factory.event_queue.events)
1836- self.factory.clientConnectionFailed = orig
1837+ self.action_queue.event_queue.events)
1838+ self.action_queue.clientConnectionFailed = orig
1839 d.callback(True)
1840
1841- self.factory.clientConnectionFailed = faked_clientConnectionFailed
1842+ self.action_queue.clientConnectionFailed = faked_clientConnectionFailed
1843 # factory will never finish the connection, server was never started
1844- self.factory.connect()
1845+ self.action_queue.connect()
1846 # stopConnecting() will be called since the connection is in progress
1847- assert self.factory.connector.state == 'connecting'
1848- self.factory.connector.disconnect()
1849+ assert self.action_queue.connector.state == 'connecting'
1850+ self.action_queue.connector.disconnect()
1851
1852 yield d
1853
1854@@ -402,16 +416,15 @@
1855 """Test clientConnectionLost
1856
1857 The connection will be completed successfully.
1858- So, self.factory.connector will be in the 'connected' state.
1859+ So, self.action_queue.connector will be in the 'connected' state.
1860 When disconnecting the connector, twisted automatically calls
1861- self.factory.clientConnectionLost.
1862+ self.action_queue.clientConnectionLost.
1863
1864 """
1865-
1866 yield self._connect_factory()
1867
1868- self.factory.event_queue.events = []
1869- orig = self.factory.clientConnectionLost
1870+ self.action_queue.event_queue.events = []
1871+ orig = self.action_queue.clientConnectionLost
1872
1873 d = defer.Deferred()
1874 def faked_clientConnectionLost(connector, reason):
1875@@ -419,65 +432,231 @@
1876 orig(connector, reason)
1877 self.assert_connection_state_reset()
1878 self.assertEquals([('SYS_CONNECTION_LOST', (), {})],
1879- self.factory.event_queue.events)
1880- self.factory.clientConnectionLost = orig
1881+ self.action_queue.event_queue.events)
1882+ self.action_queue.clientConnectionLost = orig
1883 d.callback(True)
1884
1885- self.factory.clientConnectionLost = faked_clientConnectionLost
1886+ self.action_queue.clientConnectionLost = faked_clientConnectionLost
1887 # loseConnection() will be called since the connection was completed
1888- assert self.factory.connector.state == 'connected'
1889- self.factory.connector.disconnect()
1890- yield d
1891-
1892+ assert self.action_queue.connector.state == 'connected'
1893+ self.action_queue.connector.disconnect()
1894+ yield d
1895+
1896+ yield self._disconnect_factory()
1897+
1898+ @defer.inlineCallbacks
1899+ def test_server_disconnect(self):
1900+ """Test factory's connection when the server goes down."""
1901+
1902+ yield self._connect_factory()
1903+
1904+ self.action_queue.event_queue.events = []
1905+ orig = self.action_queue.clientConnectionLost
1906+
1907+ d = defer.Deferred()
1908+ def faked_connectionLost(*args, **kwargs):
1909+ """Receive connection lost and check."""
1910+ orig(*args, **kwargs)
1911+ self.assert_connection_state_reset()
1912+ self.assertEquals([('SYS_CONNECTION_LOST', (), {})],
1913+ self.action_queue.event_queue.events)
1914+ self.action_queue.clientConnectionLost = orig
1915+ d.callback(True)
1916+
1917+ self.action_queue.clientConnectionLost = faked_connectionLost
1918+ # simulate a server failure!
1919+ yield self.server_transport.loseConnection()
1920+ yield d
1921 yield self._disconnect_factory()
1922
1923 def test_buildProtocol(self):
1924 """Test buildProtocol."""
1925- protocol = self.factory.buildProtocol(addr=None)
1926- self.assertTrue(protocol is self.factory.client)
1927- self.assertTrue(self.factory is self.factory.client.factory)
1928+ protocol = self.action_queue.buildProtocol(addr=None)
1929+ self.assertTrue(protocol is self.action_queue.client)
1930+ self.assertTrue(self.action_queue is self.action_queue.client.factory)
1931
1932 # callbacks are connected
1933 # pylint: disable-msg=W0212
1934- self.assertEquals(self.factory.client._node_state_callback,
1935- self.factory._node_state_callback)
1936- self.assertEquals(self.factory.client._share_change_callback,
1937- self.factory._share_change_callback)
1938- self.assertEquals(self.factory.client._share_answer_callback,
1939- self.factory._share_answer_callback)
1940- self.assertEquals(self.factory.client._free_space_callback,
1941- self.factory._free_space_callback)
1942- self.assertEquals(self.factory.client._account_info_callback,
1943- self.factory._account_info_callback)
1944- self.assertEquals(self.factory.client._volume_created_callback,
1945- self.factory._volume_created_callback)
1946- self.assertEquals(self.factory.client._volume_deleted_callback,
1947- self.factory._volume_deleted_callback)
1948+ self.assertEquals(self.action_queue.client._node_state_callback,
1949+ self.action_queue._node_state_callback)
1950+ self.assertEquals(self.action_queue.client._share_change_callback,
1951+ self.action_queue._share_change_callback)
1952+ self.assertEquals(self.action_queue.client._share_answer_callback,
1953+ self.action_queue._share_answer_callback)
1954+ self.assertEquals(self.action_queue.client._free_space_callback,
1955+ self.action_queue._free_space_callback)
1956+ self.assertEquals(self.action_queue.client._account_info_callback,
1957+ self.action_queue._account_info_callback)
1958+ self.assertEquals(self.action_queue.client._volume_created_callback,
1959+ self.action_queue._volume_created_callback)
1960+ self.assertEquals(self.action_queue.client._volume_deleted_callback,
1961+ self.action_queue._volume_deleted_callback)
1962
1963 @defer.inlineCallbacks
1964 def test_connector_gets_assigned_on_connect(self):
1965 """Test factory's connector gets assigned on connect."""
1966 yield self._connect_factory()
1967
1968- self.assertTrue(self.factory.connector is not None)
1969-
1970- yield self._disconnect_factory()
1971+ self.assertTrue(self.action_queue.connector is not None)
1972+
1973+ yield self._disconnect_factory()
1974+
1975+ @defer.inlineCallbacks
1976+ def test_cleanup_doesnt_disconnect(self):
1977+ """cleanup() doesn't disconnect the factory."""
1978+ yield self._connect_factory()
1979+
1980+ self.action_queue.cleanup()
1981+ self.assertTrue(self.action_queue.connector is not None)
1982+ self.assertEquals(self.action_queue.connector.state, 'connected')
1983+
1984+ yield self._disconnect_factory()
1985+
1986+
1987+class NetworkmanagerTestCase(BasicTestCase, FactoryBaseTestCase):
1988+ """Base test case generating a connected factory."""
1989+
1990+ timeout = 15
1991+
1992+ def fake_answer(self, answer):
1993+ """Push an event faking a server answer."""
1994+ return (lambda *_: self.action_queue.event_queue.push(answer))
1995+
1996+ def setUp(self):
1997+ """Init."""
1998+ BasicTestCase.setUp(self)
1999+
2000+ self.action_queue.local_rescan = \
2001+ self.fake_answer('SYS_LOCAL_RESCAN_DONE')
2002+ self.action_queue.check_version = \
2003+ self.fake_answer('SYS_PROTOCOL_VERSION_OK')
2004+ self.action_queue.set_capabilities = \
2005+ self.fake_answer('SYS_SET_CAPABILITIES_OK')
2006+ self.action_queue.authenticate = \
2007+ self.fake_answer('SYS_AUTH_OK')
2008+ self.action_queue.server_rescan = \
2009+ self.fake_answer('SYS_SERVER_RESCAN_DONE')
2010+
2011+ self.main.start()
2012+
2013+ def tearDown(self):
2014+ """Clean up."""
2015+ BasicTestCase.tearDown(self)
2016+
2017+ @defer.inlineCallbacks
2018+ def test_wrong_disconnect(self):
2019+ """Test factory's connection when SYS_NET_DISCONNECTED."""
2020+
2021+ d1 = self.main.wait_for('SYS_CONNECTION_MADE')
2022+ d2 = self.main.wait_for('SYS_CONNECTION_LOST')
2023+
2024+ self.server = self._start_sample_webserver()
2025+ self.action_queue.event_queue.push('SYS_USER_CONNECT',
2026+ access_token='ble')
2027+ yield d1
2028+
2029+ self.action_queue.event_queue.push('SYS_NET_DISCONNECTED')
2030+ yield d2
2031+
2032+ @defer.inlineCallbacks
2033+ def test_disconnect_twice(self):
2034+ """Test connection when SYS_NET_DISCONNECTED is received twice."""
2035+
2036+ d1 = self.main.wait_for('SYS_CONNECTION_MADE')
2037+ d2 = self.main.wait_for('SYS_CONNECTION_LOST')
2038+
2039+ self.server = self._start_sample_webserver()
2040+
2041+ self.action_queue.event_queue.push('SYS_USER_CONNECT',
2042+ access_token='ble')
2043+ yield d1
2044+
2045+ self.action_queue.event_queue.push('SYS_NET_DISCONNECTED')
2046+ yield d2
2047+
2048+ self.action_queue.event_queue.events = []
2049+ self.action_queue.event_queue.push('SYS_NET_DISCONNECTED')
2050+ self.assertEquals([('SYS_NET_DISCONNECTED', (), {})],
2051+ self.action_queue.event_queue.events,
2052+ 'No new events after a missplaced SYS_NET_DISCONNECTED')
2053+
2054+
2055+ @defer.inlineCallbacks
2056+ def test_net_connected_if_already_connected(self):
2057+ """Test connection when SYS_NET_CONNECTED is received twice."""
2058+
2059+ d1 = self.main.wait_for('SYS_CONNECTION_MADE')
2060+
2061+ self.server = self._start_sample_webserver()
2062+
2063+ self.action_queue.event_queue.push('SYS_USER_CONNECT',
2064+ access_token='ble')
2065+ yield d1
2066+
2067+ self.action_queue.event_queue.events = []
2068+ self.action_queue.event_queue.push('SYS_NET_CONNECTED')
2069+ self.assertEquals([('SYS_NET_CONNECTED', (), {})],
2070+ self.action_queue.event_queue.events,
2071+ 'No new events after a missplaced SYS_NET_CONNECTED')
2072+
2073+ @defer.inlineCallbacks
2074+ def test_messy_mix(self):
2075+ """Test connection when a messy mix of events is received."""
2076+ orig_waiting = states.MAX_WAITING
2077+ states.MAX_WAITING = 1
2078+
2079+ self.action_queue.event_queue.events = []
2080+ self.server = self._start_sample_webserver()
2081+
2082+ conn_made = self.main.wait_for('SYS_CONNECTION_MADE')
2083+ self.action_queue.event_queue.push('SYS_USER_CONNECT',
2084+ access_token='ble')
2085+ yield conn_made
2086+
2087+ events = ['SYS_NET_CONNECTED', 'SYS_NET_DISCONNECTED',
2088+ 'SYS_NET_CONNECTED', 'SYS_NET_CONNECTED',
2089+ 'SYS_NET_DISCONNECTED', 'SYS_NET_DISCONNECTED',
2090+ 'SYS_NET_CONNECTED']
2091+
2092+ for i in events:
2093+ self.action_queue.event_queue.push(i)
2094+
2095+ yield self.main.wait_for_nirvana()
2096+
2097+ expected = ['SYS_NET_CONNECTED', # from the DBus fake NetworkManager
2098+ 'SYS_USER_CONNECT', 'SYS_CONNECTION_MADE',
2099+ 'SYS_NET_CONNECTED', 'SYS_NET_DISCONNECTED',
2100+ 'SYS_CONNECTION_LOST', 'SYS_CONNECTION_RETRY',
2101+ 'SYS_NET_CONNECTED', 'SYS_NET_CONNECTED',
2102+ 'SYS_CONNECTION_MADE', 'SYS_NET_DISCONNECTED',
2103+ 'SYS_NET_DISCONNECTED']
2104+
2105+ avoid = ('SYS_STATE_CHANGED', 'SYS_LOCAL_RESCAN_DONE',
2106+ 'SYS_PROTOCOL_VERSION_OK', 'SYS_SET_CAPABILITIES_OK',
2107+ 'SYS_AUTH_OK', 'SYS_SERVER_RESCAN_DONE')
2108+ actual = [event for (event, args, kwargs) in
2109+ self.action_queue.event_queue.events
2110+ if event not in avoid]
2111+ self.assertEquals(sorted(expected), sorted(actual))
2112+
2113+ states.MAX_WAITING = orig_waiting
2114
2115
2116 class ConnectedBaseTestCase(FactoryBaseTestCase):
2117 """Base test case generating a connected factory."""
2118
2119+ @defer.inlineCallbacks
2120 def setUp(self):
2121 """Init."""
2122 FactoryBaseTestCase.setUp(self)
2123- d = self._connect_factory()
2124- return d
2125+ yield self._connect_factory()
2126+ assert self.action_queue.connector.state == 'connected'
2127
2128+ @defer.inlineCallbacks
2129 def tearDown(self):
2130 """Clean up."""
2131- d = self._disconnect_factory()
2132+ yield self._disconnect_factory()
2133 FactoryBaseTestCase.tearDown(self)
2134- return d
2135
2136
2137 class VolumeManagementTestCase(ConnectedBaseTestCase):
2138@@ -486,16 +665,16 @@
2139 def test_volume_created_push_event(self):
2140 """Volume created callback push proper event."""
2141 volume = FakedVolume()
2142- self.factory._volume_created_callback(volume)
2143+ self.action_queue._volume_created_callback(volume)
2144 self.assertEquals([('SV_VOLUME_CREATED', (), {'volume': volume})],
2145- self.factory.event_queue.events)
2146+ self.action_queue.event_queue.events)
2147
2148 def test_volume_deleted_push_event(self):
2149 """Volume deleted callback push proper event."""
2150 volume_id = VOLUME
2151- self.factory._volume_deleted_callback(volume_id)
2152+ self.action_queue._volume_deleted_callback(volume_id)
2153 self.assertEquals([('SV_VOLUME_DELETED', (), {'volume_id': volume_id})],
2154- self.factory.event_queue.events)
2155+ self.action_queue.event_queue.events)
2156
2157 def test_valid_events(self):
2158 """Volume events are valid in EventQueue."""
2159@@ -520,18 +699,18 @@
2160 """Test volume creation."""
2161 path = PATH
2162 name = NAME
2163- res = self.factory.create_udf(path, name, marker=None)
2164+ res = self.action_queue.create_udf(path, name, marker=None)
2165 self.assertTrue(res is None) # this is what start returns
2166
2167 def test_list_volumes(self):
2168 """Test volume listing."""
2169- res = self.factory.list_volumes()
2170+ res = self.action_queue.list_volumes()
2171 self.assertTrue(res is None) # this is what start returns
2172
2173 def test_delete_volume(self):
2174 """Test volume deletion."""
2175 volume_id = VOLUME
2176- res = self.factory.delete_volume(volume_id)
2177+ res = self.action_queue.delete_volume(volume_id)
2178 self.assertTrue(res is None) # this is what start returns
2179
2180
2181@@ -542,7 +721,7 @@
2182 """Init."""
2183 res = super(CreateUDFTestCase, self).setUp()
2184
2185- request_queue = RequestQueue(name='foo', action_queue=self.factory)
2186+ request_queue = RequestQueue(name='foo', action_queue=self.action_queue)
2187 self.marker = VOLUME
2188 self.command = CreateUDF(request_queue, PATH, NAME, marker=self.marker)
2189
2190@@ -584,7 +763,7 @@
2191
2192 def test_handle_success_push_event(self):
2193 """Test AQ_CREATE_UDF_OK is pushed on success."""
2194- request = client.CreateUDF(self.factory.client, PATH, NAME)
2195+ request = client.CreateUDF(self.action_queue.client, PATH, NAME)
2196 request.volume_id = VOLUME
2197 request.node_id = NODE
2198 res = self.command.handle_success(success=request)
2199@@ -612,7 +791,7 @@
2200 """Init."""
2201 res = super(ListVolumesTestCase, self).setUp()
2202
2203- request_queue = RequestQueue(name='foo', action_queue=self.factory)
2204+ request_queue = RequestQueue(name='foo', action_queue=self.action_queue)
2205 self.command = ListVolumes(request_queue)
2206
2207 return res
2208@@ -645,7 +824,7 @@
2209
2210 def test_handle_success_push_event(self):
2211 """Test AQ_LIST_VOLUMES is pushed on success."""
2212- request = client.ListVolumes(self.factory.client)
2213+ request = client.ListVolumes(self.action_queue.client)
2214 request.volumes = [FakedVolume(), FakedVolume()]
2215 res = self.command.handle_success(success=request)
2216 events = [('AQ_LIST_VOLUMES', (), {'volumes': request.volumes})]
2217@@ -669,7 +848,7 @@
2218 """Init."""
2219 res = super(DeleteVolumeTestCase, self).setUp()
2220
2221- request_queue = RequestQueue(name='foo', action_queue=self.factory)
2222+ request_queue = RequestQueue(name='foo', action_queue=self.action_queue)
2223 self.command = DeleteVolume(request_queue, VOLUME)
2224
2225 return res
2226@@ -707,7 +886,7 @@
2227
2228 def test_handle_success_push_event(self):
2229 """Test AQ_DELETE_VOLUME_OK is pushed on success."""
2230- request = client.DeleteVolume(self.factory.client, volume_id=VOLUME)
2231+ request = client.DeleteVolume(self.action_queue.client, volume_id=VOLUME)
2232 res = self.command.handle_success(success=request)
2233 events = [('AQ_DELETE_VOLUME_OK', (), {'volume_id': VOLUME})]
2234 self.assertEquals(events, self.command.action_queue.event_queue.events)
2235@@ -723,47 +902,24 @@
2236 self.assertTrue(res is None)
2237
2238
2239-class FilterEventsTestCase(BaseTwistedTestCase):
2240+class FilterEventsTestCase(BasicTestCase):
2241 """Tests for event filtering when a volume is not of our interest."""
2242
2243 def setUp(self):
2244 """Init."""
2245- BaseTwistedTestCase.setUp(self)
2246- self.log = logging.getLogger("ubuntuone.SyncDaemon.TEST")
2247- self.log.info("starting test %s.%s", self.__class__.__name__,
2248- self._testMethodName)
2249- self.root_dir = self.mktemp('root_dir')
2250- self.home_dir = self.mktemp('home_dir')
2251- self.data_dir = self.mktemp('data_dir')
2252- self.shares_dir = self.mktemp('shares_dir')
2253- self.partials_dir = self.mktemp('partials_dir')
2254- self.main = FakeMain(self.root_dir, self.shares_dir,
2255- self.data_dir, self.partials_dir)
2256+ BasicTestCase.setUp(self)
2257 self.vm = self.main.vm
2258-
2259- self.action_queue = ActionQueue(FakedEventQueue(), self.main,
2260- host='localhost', port=0,
2261- dns_srv=False)
2262 self.old_home = os.environ.get('HOME', None)
2263- os.environ['HOME'] = self.home_dir
2264-
2265+ os.environ['HOME'] = self.home
2266
2267 def tearDown(self):
2268 """Clean up."""
2269- self.action_queue = None
2270-
2271- self.main.shutdown()
2272- self.rmtree(self.root_dir)
2273- self.rmtree(self.data_dir)
2274- self.rmtree(self.shares_dir)
2275- self.log.info("finished test %s.%s", self.__class__.__name__,
2276- self._testMethodName)
2277 if self.old_home is None:
2278 os.environ.pop('HOME')
2279 else:
2280 os.environ['HOME'] = self.old_home
2281
2282- return BaseTwistedTestCase.tearDown(self)
2283+ BasicTestCase.tearDown(self)
2284
2285 @defer.inlineCallbacks
2286 def test_SV_HASH_NEW_is_pushed_for_subscrined_volume(self):
2287@@ -776,8 +932,9 @@
2288 yield self.vm.add_udf(udf)
2289 yield self.vm.subscribe_udf(udf_id)
2290 assert self.vm.udfs[udf_id].subscribed
2291+ self.action_queue.event_queue.events = [] # reset events
2292
2293- kwargs = dict(share_id=udf_id, node_id=None, hash=None)
2294+ kwargs = dict(share_id=udf_id, node_id=NODE, hash=None)
2295 self.action_queue._node_state_callback(**kwargs)
2296 self.assertEquals([('SV_HASH_NEW', (), kwargs)],
2297 self.action_queue.event_queue.events)
2298@@ -794,6 +951,7 @@
2299 yield self.vm.add_udf(udf)
2300 yield self.vm.unsubscribe_udf(udf_id)
2301 assert not self.vm.udfs[udf_id].subscribed
2302+ self.action_queue.event_queue.events = [] # reset events
2303
2304 self.action_queue._node_state_callback(share_id=udf_id,
2305 node_id=None, hash=None)
2306@@ -803,8 +961,9 @@
2307 """SV_HASH_NEW keeps working like before for non-udfs."""
2308 other_id = 'not in udfs'
2309 assert other_id not in self.vm.udfs
2310+ self.action_queue.event_queue.events = [] # reset events
2311
2312- kwargs = dict(share_id=other_id, node_id=None, hash=None)
2313+ kwargs = dict(share_id=other_id, node_id=NODE, hash=None)
2314 self.action_queue._node_state_callback(**kwargs)
2315 self.assertEquals([('SV_HASH_NEW', (), kwargs)],
2316 self.action_queue.event_queue.events)
2317@@ -815,12 +974,12 @@
2318
2319 def setUp(self):
2320 super(ChangePublicAccessTests, self).setUp()
2321- request_queue = RequestQueue(name='foo', action_queue=self.factory)
2322+ request_queue = RequestQueue(name='foo', action_queue=self.action_queue)
2323 self.command = ChangePublicAccess(request_queue, VOLUME, NODE, True)
2324
2325 def test_change_public_access(self):
2326 """Test the change_public_access method.."""
2327- res = self.factory.change_public_access(VOLUME, NODE, True)
2328+ res = self.action_queue.change_public_access(VOLUME, NODE, True)
2329 self.assertTrue(res is None) # this is what start returns
2330
2331 def test_is_action_queue_command(self):
2332@@ -905,11 +1064,11 @@
2333 """Init."""
2334 res = super(ListDirTestCase, self).setUp()
2335
2336- request_queue = RequestQueue(name='FOO', action_queue=self.factory)
2337+ request_queue = RequestQueue(name='FOO', action_queue=self.action_queue)
2338 self.command = ListDir(request_queue, share_id='a_share_id',
2339 node_id='a_node_id', server_hash='a_server_hash',
2340 fileobj_factory=lambda: None)
2341- self.command.start_unqueued()
2342+ self.command.start_unqueued() # create the logger
2343
2344 return res
2345
2346@@ -963,18 +1122,79 @@
2347 self.assertEquals(('share_id', 'node_id'), EVENTS[event])
2348
2349
2350+class UploadTestCase(ConnectedBaseTestCase):
2351+ """Test for Upload ActionQueueCommand."""
2352+
2353+ def setUp(self):
2354+ """Init."""
2355+ res = super(UploadTestCase, self).setUp()
2356+
2357+ request_queue = RequestQueue(name='FOO', action_queue=self.action_queue)
2358+ self.command = Upload(request_queue, share_id='a_share_id',
2359+ node_id='a_node_id', previous_hash='prev_hash',
2360+ hash='yadda', crc32=0, size=0,
2361+ fileobj_factory=lambda: None,
2362+ tempfile_factory=lambda: None)
2363+ self.command.start_unqueued() # create the logger
2364+
2365+ return res
2366+
2367+ def test_handle_success_push_event(self):
2368+ """Test AQ_UPLOAD_FINISHED is pushed on success."""
2369+ self.command.handle_success(None)
2370+ kwargs = dict(share_id='a_share_id', node_id='a_node_id', hash='yadda')
2371+ events = [('AQ_UPLOAD_FINISHED', (), kwargs)]
2372+ self.assertEquals(events, self.command.action_queue.event_queue.events)
2373+
2374+ def test_handle_failure_push_event(self):
2375+ """Test AQ_UPLOAD_ERROR is pushed on failure."""
2376+ msg = 'Something went wrong'
2377+ failure = Failure(DefaultException(msg))
2378+ res = self.command.handle_failure(failure=failure)
2379+ kwargs = dict(share_id='a_share_id', node_id='a_node_id',
2380+ hash='yadda', error=msg)
2381+ events = [('AQ_UPLOAD_ERROR', (), kwargs)]
2382+ self.assertEquals(events, self.command.action_queue.event_queue.events)
2383+ self.assertTrue(res is None)
2384+
2385+ def test_handle_failure_removes_temp_file(self):
2386+ """Test temp file is removed on failure."""
2387+ class TempFile(object): pass
2388+ self.command.tempfile = TempFile()
2389+ self.command.tempfile.name = os.path.join(self.tmpdir, 'remove-me.zip')
2390+ open(self.command.tempfile.name, 'w').close()
2391+ assert os.path.exists(self.command.tempfile.name)
2392+
2393+ msg = 'Something went wrong'
2394+ failure = Failure(DefaultException(msg))
2395+ self.command.handle_failure(failure=failure)
2396+
2397+ self.assertFalse(os.path.exists(self.command.tempfile.name))
2398+
2399+ @defer.inlineCallbacks
2400+ def test_compress_failed_pushes_upload_error(self):
2401+ msg = 'Zip can not be accomplished.'
2402+ error = DefaultException(msg)
2403+ self.action_queue.zip_queue.zip = lambda upload: defer.fail(error)
2404+ yield self.command.run()
2405+ kwargs = dict(share_id='a_share_id', node_id='a_node_id',
2406+ hash='yadda', error=msg)
2407+ events = [('AQ_UPLOAD_ERROR', (), kwargs)]
2408+ self.assertEquals(events, self.command.action_queue.event_queue.events)
2409+
2410+
2411 class RequestQueueManager(FactoryBaseTestCase):
2412 """Test how RequestQueue manages the queues."""
2413
2414 def setUp(self):
2415 FactoryBaseTestCase.setUp(self)
2416
2417- self.queue = self.factory.meta_queue
2418+ self.queue = self.action_queue.meta_queue
2419 self.cmd = FakeCommand()
2420
2421 def _events(self):
2422 """Helper method to see only the events."""
2423- return [x[0] for x in self.factory.event_queue.events]
2424+ return [x[0] for x in self.action_queue.event_queue.events]
2425
2426 def test_empty_gets_one(self):
2427 """Queue and get the event."""
2428@@ -1050,3 +1270,29 @@
2429 self.assertEqual(len(self.queue), 1)
2430 yield self.queue.run()
2431 self.assertEqual(len(self.queue), 0)
2432+
2433+
2434+class SimpleAQTestCase(BasicTestCase):
2435+ """Simple tests for AQ API."""
2436+
2437+ def test_aq_server_rescan(self):
2438+ """Check the API of AQ.server_rescan."""
2439+ self.main.start()
2440+ d = defer.Deferred()
2441+ def get_root(mdid):
2442+ """Fake get_root."""
2443+ d.callback(mdid)
2444+
2445+ class FakeClient(object):
2446+ """Fake Client"""
2447+ def __getattribute__(self, attr):
2448+ """Return a noop."""
2449+ return lambda *args, **kwargs: None
2450+
2451+ self.action_queue.client = FakeClient()
2452+ self.action_queue.get_root = get_root
2453+ self.action_queue.server_rescan('foo', lambda: list())
2454+ def check(result):
2455+ self.assertEquals('foo', result)
2456+ d.addCallback(check)
2457+ return d
2458
2459=== modified file 'tests/syncdaemon/test_action_queue.pyc'
2460Binary files tests/syncdaemon/test_action_queue.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_action_queue.pyc 2010-03-10 23:53:15 +0000 differ
2461=== modified file 'tests/syncdaemon/test_config.pyc'
2462Binary files tests/syncdaemon/test_config.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_config.pyc 2010-03-10 23:53:15 +0000 differ
2463=== modified file 'tests/syncdaemon/test_dbus.py'
2464--- tests/syncdaemon/test_dbus.py 2010-03-04 16:47:43 +0000
2465+++ tests/syncdaemon/test_dbus.py 2010-03-10 23:53:15 +0000
2466@@ -63,6 +63,12 @@
2467 """Just succeed."""
2468 return defer.succeed(None)
2469
2470+ def __str__(self):
2471+ name = self.__class__.__name__
2472+ tmpl = name + "(" + "share_id=%s, node_id=%s" + ")"
2473+ return tmpl % (self.share_id,
2474+ self.node_id)
2475+
2476
2477 class DBusInterfaceTests(DBusTwistedTestCase):
2478 """ Basic tests to the objects exposed with D-Bus"""
2479@@ -253,6 +259,31 @@
2480 error_handler=self.error_handler)
2481 return d
2482
2483+ def test_waiting_metadata(self):
2484+ """Test Status.waiting_metadata with fake data in the AQ."""
2485+ # inject the fake data
2486+ self.action_q.meta_queue.waiting.extend([
2487+ FakeCommand("node_a_foo", "node_a_bar"),
2488+ FakeCommand("node_b_foo", "node_b_bar")])
2489+ # OK, testing time
2490+ client = DBusClient(self.bus, '/status', DBUS_IFACE_STATUS_NAME)
2491+ d = defer.Deferred()
2492+ def waiting_handler(result):
2493+ """waiting_metadata reply handler."""
2494+ self.assertEquals(2, len(result))
2495+ # the second time we're called, the result should be reversed
2496+ node_a, node_b = result
2497+ self.assertEquals(str(FakeCommand("node_a_foo", "node_a_bar")),
2498+ node_a)
2499+ self.assertEquals(str(FakeCommand("node_b_foo", "node_b_bar")),
2500+ node_b)
2501+ d.callback(True)
2502+ client.call_method('waiting_metadata',
2503+ reply_handler=waiting_handler,
2504+ error_handler=self.error_handler)
2505+ return d
2506+
2507+
2508 def test_contq_changed(self):
2509 """Test the Status.ContentQueueChanged signal."""
2510 # prepare the VM so it lies for us
2511@@ -608,6 +639,37 @@
2512 error_handler=self.error_handler)
2513 return d
2514
2515+ def test_create_shares(self):
2516+ """Test share offering to multiple users at once"""
2517+ a_dir = os.path.join(self.root_dir, "a_dir")
2518+ self.fs_manager.create(a_dir, "", is_dir=True)
2519+ self.fs_manager.set_node_id(a_dir, "node_id")
2520+ client = DBusClient(self.bus, '/shares', DBUS_IFACE_SHARES_NAME)
2521+ d = defer.Deferred()
2522+ # helper functions, pylint: disable-msg=C0111
2523+ def fake_create_share(*result):
2524+ # pylint: disable-msg=W0612
2525+ node_id, username, name, access_level, marker = result
2526+ self.assertEquals('node_id', node_id)
2527+ mdobj = self.fs_manager.get_by_path(a_dir)
2528+ self.assertEquals(self.fs_manager.get_abspath("", mdobj.path),
2529+ os.path.join(self.main.root_dir, a_dir))
2530+ self.assertEquals(u'test_user', username[:-1])
2531+ self.assertEquals(u'share_a_dir', name)
2532+ self.assertEquals('View', access_level)
2533+
2534+ self.action_q.create_share = fake_create_share
2535+
2536+ def reply_handler(result):
2537+ d.callback(result)
2538+
2539+ client.call_method('create_shares',
2540+ a_dir, ['test_user1', 'test_user2', 'test_user3'],
2541+ 'share_a_dir', 'View',
2542+ reply_handler=reply_handler,
2543+ error_handler=self.error_handler)
2544+ return d
2545+
2546 def test_query_by_path(self):
2547 """ test that query_by_path method work as expected. """
2548 a_dir = os.path.join(self.root_dir, "a_dir")
2549@@ -1249,16 +1311,24 @@
2550 signal_name='PublicAccessChanged')
2551 self.signal_receivers.add(match)
2552
2553- share_id = uuid.uuid4()
2554- node_id = uuid.uuid4()
2555+ share_id = "share"
2556+ node_id = "node_id"
2557 is_public = True
2558 public_url = 'http://example.com'
2559+
2560+ share_path = os.path.join(self.shares_dir, 'share')
2561+ self.main.vm.add_share(Share(path=share_path, volume_id='share'))
2562+ path = os.path.join(share_path, "foo")
2563+ self.fs_manager.create(path, str(share_id))
2564+ self.fs_manager.set_node_id(path, str(node_id))
2565+
2566 def check(file_info):
2567 """Check the result."""
2568 expected_dict = dict(share_id=str(share_id),
2569 node_id=str(node_id),
2570 is_public=str(is_public),
2571- public_url=public_url)
2572+ public_url=public_url,
2573+ path=path)
2574 self.assertEquals(expected_dict, file_info)
2575 d.addCallback(check)
2576 self.event_q.push('AQ_CHANGE_PUBLIC_ACCESS_OK',
2577@@ -1277,13 +1347,21 @@
2578 access_change_error_handler, signal_name='PublicAccessChangeError')
2579 self.signal_receivers.add(match)
2580
2581- share_id = uuid.uuid4()
2582- node_id = uuid.uuid4()
2583+ share_id = "share"
2584+ node_id = "node_id"
2585 expected_error = 'error message'
2586+
2587+ share_path = os.path.join(self.shares_dir, 'share')
2588+ self.main.vm.add_share(Share(path=share_path, volume_id='share'))
2589+ path = os.path.join(share_path, "foo")
2590+ self.fs_manager.create(path, str(share_id))
2591+ self.fs_manager.set_node_id(path, str(node_id))
2592+
2593 def check((file_info, error)):
2594 """Check the result."""
2595 expected_dict = dict(share_id=str(share_id),
2596- node_id=str(node_id))
2597+ node_id=str(node_id),
2598+ path=path)
2599 self.assertEquals(expected_dict, file_info)
2600 self.assertEquals(expected_error, error)
2601 d.addCallback(check)
2602@@ -1292,6 +1370,26 @@
2603 error=expected_error)
2604 return d
2605
2606+ def test_root_mismatch(self):
2607+ """Test RootMismatch signal."""
2608+ d = defer.Deferred()
2609+ def root_mismatch_handler(root_id, new_root_id):
2610+ """Handler for RootMismatch signal."""
2611+ d.callback((root_id, new_root_id))
2612+
2613+ match = self.bus.add_signal_receiver(root_mismatch_handler,
2614+ signal_name='RootMismatch')
2615+ self.signal_receivers.add(match)
2616+
2617+ def check((root_id, new_root_id)):
2618+ """Check the result."""
2619+ self.assertEquals('root_id', root_id)
2620+ self.assertEquals('another_root_id', new_root_id)
2621+ d.addCallback(check)
2622+ self.event_q.push('SYS_ROOT_RECEIVED', 'root_id')
2623+ self.event_q.push('SYS_ROOT_RECEIVED', 'another_root_id')
2624+ return d
2625+
2626
2627 class TestDBusRestart(DBusTwistedTestCase):
2628 """
2629
2630=== modified file 'tests/syncdaemon/test_dbus.pyc'
2631Binary files tests/syncdaemon/test_dbus.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_dbus.pyc 2010-03-10 23:53:15 +0000 differ
2632=== modified file 'tests/syncdaemon/test_eq_inotify.pyc'
2633Binary files tests/syncdaemon/test_eq_inotify.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_eq_inotify.pyc 2010-03-10 23:53:15 +0000 differ
2634=== modified file 'tests/syncdaemon/test_eventqueue.pyc'
2635Binary files tests/syncdaemon/test_eventqueue.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_eventqueue.pyc 2010-03-10 23:53:15 +0000 differ
2636=== modified file 'tests/syncdaemon/test_eventsnanny.pyc'
2637Binary files tests/syncdaemon/test_eventsnanny.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_eventsnanny.pyc 2010-03-10 23:53:15 +0000 differ
2638=== modified file 'tests/syncdaemon/test_fileshelf.pyc'
2639Binary files tests/syncdaemon/test_fileshelf.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_fileshelf.pyc 2010-03-10 23:53:15 +0000 differ
2640=== modified file 'tests/syncdaemon/test_fsm.pyc'
2641Binary files tests/syncdaemon/test_fsm.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_fsm.pyc 2010-03-10 23:53:15 +0000 differ
2642=== modified file 'tests/syncdaemon/test_hashqueue.pyc'
2643Binary files tests/syncdaemon/test_hashqueue.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_hashqueue.pyc 2010-03-10 23:53:15 +0000 differ
2644=== modified file 'tests/syncdaemon/test_localrescan.pyc'
2645Binary files tests/syncdaemon/test_localrescan.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_localrescan.pyc 2010-03-10 23:53:15 +0000 differ
2646=== modified file 'tests/syncdaemon/test_logger.pyc'
2647Binary files tests/syncdaemon/test_logger.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_logger.pyc 2010-03-10 23:53:15 +0000 differ
2648=== modified file 'tests/syncdaemon/test_main.pyc'
2649Binary files tests/syncdaemon/test_main.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_main.pyc 2010-03-10 23:53:15 +0000 differ
2650=== modified file 'tests/syncdaemon/test_states.pyc'
2651Binary files tests/syncdaemon/test_states.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_states.pyc 2010-03-10 23:53:15 +0000 differ
2652=== modified file 'tests/syncdaemon/test_sync.pyc'
2653Binary files tests/syncdaemon/test_sync.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_sync.pyc 2010-03-10 23:53:15 +0000 differ
2654=== modified file 'tests/syncdaemon/test_tools.py'
2655--- tests/syncdaemon/test_tools.py 2010-03-04 16:47:43 +0000
2656+++ tests/syncdaemon/test_tools.py 2010-03-10 23:53:15 +0000
2657@@ -26,6 +26,7 @@
2658 volume_manager,
2659 states,
2660 )
2661+from tests.syncdaemon.test_dbus import FakeCommand
2662 from contrib.testing.testcase import (
2663 DBusTwistedTestCase,
2664 )
2665@@ -298,17 +299,26 @@
2666 d.addCallbacks(handler, self.fail)
2667 return d
2668
2669+ def test_waiting_metadata(self):
2670+ """Test SyncDaemonTool.waiting_metadata."""
2671+ # inject the fake data
2672+ self.action_q.meta_queue.waiting.extend([
2673+ FakeCommand("node_a_foo", "node_a_bar"),
2674+ FakeCommand("node_b_foo", "node_b_bar")])
2675+ d = self.tool.waiting_metadata()
2676+ def check(result):
2677+ """waiting_metadata reply handler."""
2678+ self.assertEquals(2, len(result))
2679+ # the second time we're called, the result should be reversed
2680+ node_a, node_b = result
2681+ self.assertEquals(str(FakeCommand("node_a_foo", "node_a_bar")),
2682+ node_a)
2683+ self.assertEquals(str(FakeCommand("node_b_foo", "node_b_bar")),
2684+ node_b)
2685+ return d
2686+
2687 def test_waiting_content_schedule_next(self):
2688 """Test waiting_content and schedule_next"""
2689- class FakeCommand(object):
2690- """A fake command"""
2691- def __init__(self, share_id, node_id):
2692- """create it"""
2693- self.share_id = share_id
2694- self.node_id = node_id
2695- def is_runnable(self):
2696- """is runnable"""
2697- return True
2698 path = os.path.join(self.root_dir, "foo")
2699 self.fs_manager.create(path, "")
2700 self.fs_manager.set_node_id(path, "node_id")
2701
2702=== modified file 'tests/syncdaemon/test_tools.pyc'
2703Binary files tests/syncdaemon/test_tools.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_tools.pyc 2010-03-10 23:53:15 +0000 differ
2704=== modified file 'tests/syncdaemon/test_u1fsfsm.pyc'
2705Binary files tests/syncdaemon/test_u1fsfsm.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_u1fsfsm.pyc 2010-03-10 23:53:15 +0000 differ
2706=== modified file 'tests/syncdaemon/test_u1sdtool.py'
2707--- tests/syncdaemon/test_u1sdtool.py 2010-03-04 16:47:43 +0000
2708+++ tests/syncdaemon/test_u1sdtool.py 2010-03-10 23:53:15 +0000
2709@@ -29,7 +29,9 @@
2710 show_folders,
2711 show_state,
2712 show_waiting_content,
2713+ show_waiting_metadata,
2714 )
2715+from tests.syncdaemon.test_dbus import FakeCommand
2716 from twisted.internet import defer
2717
2718
2719@@ -216,17 +218,24 @@
2720 d.addCallback(check)
2721 return d
2722
2723+ def test_show_waiting_metadata(self):
2724+ """Test the output of --waiting-metadata"""
2725+ # inject the fake data
2726+ cmd = FakeCommand("share_id", "node_id")
2727+ cmd1 = FakeCommand("share_id_1", "node_id_1")
2728+ self.action_q.meta_queue.waiting.extend([cmd, cmd1])
2729+ out = StringIO()
2730+ expected = " " + str(cmd) + "\n "+ str(cmd1) + "\n"
2731+ d = self.tool.waiting_metadata()
2732+ d.addCallback(lambda result: show_waiting_metadata(result, out))
2733+ def check(result):
2734+ """check the output"""
2735+ self.assertEquals(out.getvalue(), expected)
2736+ d.addCallback(check)
2737+ return d
2738+
2739 def test_show_waiting_content(self):
2740 """Test the output of --waiting-content"""
2741- class FakeCommand(object):
2742- """A fake command"""
2743- def __init__(self, share_id, node_id):
2744- """create it"""
2745- self.share_id = share_id
2746- self.node_id = node_id
2747- def is_runnable(self):
2748- """is runnable"""
2749- return True
2750 class FakeCommand2(FakeCommand):
2751 pass
2752 path = os.path.join(self.root_dir, "foo")
2753
2754=== modified file 'tests/syncdaemon/test_u1sdtool.pyc'
2755Binary files tests/syncdaemon/test_u1sdtool.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_u1sdtool.pyc 2010-03-10 23:53:15 +0000 differ
2756=== modified file 'tests/syncdaemon/test_vm.pyc'
2757Binary files tests/syncdaemon/test_vm.pyc 2010-03-04 16:47:43 +0000 and tests/syncdaemon/test_vm.pyc 2010-03-10 23:53:15 +0000 differ
2758=== modified file 'tests/test_login.pyc'
2759Binary files tests/test_login.pyc 2010-03-04 16:47:43 +0000 and tests/test_login.pyc 2010-03-10 23:53:15 +0000 differ
2760=== modified file 'tests/test_preferences.py'
2761--- tests/test_preferences.py 2010-03-04 16:47:43 +0000
2762+++ tests/test_preferences.py 2010-03-10 23:53:15 +0000
2763@@ -53,26 +53,24 @@
2764
2765 self.item_id = 999
2766
2767- ex = self.expect(self.item.item_id)
2768- ex.result(self.item_id)
2769- ex.count(0, None)
2770-
2771- ex = self.expect(self.item.secret)
2772- ex.result('oauth_token=access_key&oauth_token_secret=access_secret')
2773- ex.count(0, None)
2774-
2775- def expect_token_query(self):
2776- """Expects the keyring to be queried for a token."""
2777- return self.expect(
2778- self.keyring.find_items_sync(
2779- gnomekeyring.ITEM_GENERIC_SECRET,
2780- {'ubuntuone-realm': 'https://ubuntuone.com',
2781- 'oauth-consumer-key': 'ubuntuone'})
2782- )
2783-
2784- def mock_has_token(self):
2785- """Mocks a cached token in the keyring."""
2786- self.expect_token_query().result([self.item])
2787+ self.item.item_id
2788+ self.mocker.result(self.item_id)
2789+ self.mocker.count(0, None)
2790+
2791+ self.item.secret
2792+ self.mocker.result('oauth_token=access_key'
2793+ '&oauth_token_secret=access_secret')
2794+ self.mocker.count(0, None)
2795+
2796+ self.keyring.find_items_sync(
2797+ None,
2798+ {'ubuntuone-realm': 'https://ubuntuone.com',
2799+ 'oauth-consumer-key': 'ubuntuone'})
2800+ self.mocker.count(0, None)
2801+ self.mocker.result([self.item])
2802+ self.keyring.ITEM_GENERIC_SECRET
2803+ self.mocker.count(0, None)
2804+ self.mocker.result(None)
2805
2806 def tearDown(self):
2807 # collect all signal receivers registered during the test
2808@@ -93,18 +91,110 @@
2809 def test_bw_throttling(self):
2810 """Test that toggling bw throttling works correctly."""
2811 self.mocker.replay()
2812- dialog = self.u1prefs.UbuntuOneDialog()
2813- self.assertTrue(dialog is not None)
2814- dialog.notebook.set_current_page(1)
2815- self.assertFalse(dialog.bw_table.get_property('sensitive'))
2816- dialog.limit_check.set_active(True)
2817- self.assertTrue(dialog.bw_table.get_property('sensitive'))
2818- dialog.destroy()
2819+ widget = self.u1prefs.DevicesWidget(None, keyring=self.keyring)
2820+ try:
2821+ widget.devices = []
2822+ widget.list_devices()
2823+ self.assertFalse(widget.bw_limited,
2824+ "the bandwidth should start out not limited")
2825+ self.assertTrue(widget.bw_chk,
2826+ "the checkbox should be present")
2827+ self.assertFalse(widget.bw_chk.get_active(),
2828+ "the checkbox should start out unchecked")
2829+ self.assertFalse(widget.up_spin.get_property('sensitive') or
2830+ widget.dn_spin.get_property('sensitive'),
2831+ "the spinbuttons should start out unsensitive")
2832+ widget.bw_chk.set_active(True)
2833+ self.assertTrue(widget.bw_chk.get_active(),
2834+ "the checkbox should now be checked")
2835+ self.assertTrue(widget.up_spin.get_property('sensitive') and
2836+ widget.dn_spin.get_property('sensitive'),
2837+ "the spinbuttons should now be sensitive")
2838+ finally:
2839+ widget.destroy()
2840+
2841+ def test_list_devices_fills_devices_list_with_fake_result_when_empty(self):
2842+ self.mocker.replay()
2843+ widget = self.u1prefs.DevicesWidget(None, keyring=self.keyring)
2844+ try:
2845+ widget.devices = []
2846+ widget.list_devices()
2847+ # the devices list is no longer empty
2848+ self.assertTrue(widget.devices)
2849+ # it has 'fake' data (referring to the local machine)
2850+ self.assertTrue('FAKE' in widget.devices[0])
2851+ finally:
2852+ widget.destroy()
2853+
2854+ def test_list_devices_shows_devices_list(self):
2855+ self.mocker.replay()
2856+ widget = self.u1prefs.DevicesWidget(None, keyring=self.keyring)
2857+ try:
2858+ widget.devices = []
2859+ widget.list_devices()
2860+ # fake data now in devices
2861+ interesting = []
2862+ for i in widget.get_children():
2863+ clsname = i.__class__.__name__
2864+ if clsname == 'Image':
2865+ interesting.append((clsname, i.get_icon_name()[0]))
2866+ if clsname in ('Label', 'Button', 'CheckButton'):
2867+ interesting.append((clsname, i.get_label()))
2868+ # check there is an image of a computer in there
2869+ self.assertTrue(('Image', 'computer') in interesting)
2870+ # check a placeholder for the local machine description is there
2871+ self.assertTrue(('Label', '<LOCAL MACHINE>') in interesting)
2872+ # check the bw limitation stuff is there
2873+ self.assertTrue(('CheckButton', '_Limit Bandwidth Usage')
2874+ in interesting)
2875+ self.assertTrue(('Label', 'Maximum _download speed (KB/s):')
2876+ in interesting)
2877+ self.assertTrue(('Label', 'Maximum _upload speed (KB/s):')
2878+ in interesting)
2879+ # check the 'Remove' button is *not* there
2880+ self.assertTrue(('Button', 'Remove') not in interesting)
2881+ finally:
2882+ widget.destroy()
2883+
2884+ def test_list_devices_shows_real_devices_list(self):
2885+ self.mocker.replay()
2886+ widget = self.u1prefs.DevicesWidget(None, keyring=self.keyring)
2887+ try:
2888+ widget.devices = [{'kind': 'Computer',
2889+ 'description': 'xyzzy',
2890+ 'token': 'blah'},
2891+ {'kind': 'Phone',
2892+ 'description': 'quux',
2893+ 'token': '1234'}]
2894+ widget.list_devices()
2895+ # fake data now in devices
2896+ interesting = []
2897+ for i in widget.get_children():
2898+ clsname = i.__class__.__name__
2899+ if clsname == 'Image':
2900+ interesting.append((clsname, i.get_icon_name()[0]))
2901+ if clsname in ('Label', 'Button', 'CheckButton'):
2902+ interesting.append((clsname, i.get_label()))
2903+ # check there is an image of a computer in there
2904+ self.assertTrue(('Image', 'computer') in interesting)
2905+ # and of a phone
2906+ self.assertTrue(('Image', 'phone') in interesting)
2907+ # check a label of the local machine description is there
2908+ self.assertTrue(('Label', 'xyzzy') in interesting)
2909+ # check the bw limitation stuff is not there (no local machine)
2910+ self.assertTrue(('CheckButton', '_Limit Bandwidth Usage')
2911+ not in interesting)
2912+ self.assertTrue(('Label', 'Download (kB/s):') not in interesting)
2913+ self.assertTrue(('Label', 'Upload (kB/s):') not in interesting)
2914+ # check the 'Remove' button is there
2915+ self.assertTrue(('Button', 'Remove') in interesting)
2916+ finally:
2917+ widget.destroy()
2918
2919 def test_quota_display(self):
2920 """Test that quota display works correctly."""
2921 self.mocker.replay()
2922- dialog = self.u1prefs.UbuntuOneDialog()
2923+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
2924 self.assertTrue(dialog is not None)
2925 self.assertEqual(dialog.usage_graph.get_fraction(), 0.0)
2926 dialog.update_quota_display(1024, 2048)
2927@@ -113,11 +203,6 @@
2928
2929 def test_request_quota_info(self):
2930 """Test that we can request the quota info properly."""
2931- self.mock_has_token()
2932- dialog = self.u1prefs.UbuntuOneDialog()
2933- self.assertTrue(dialog is not None)
2934- dialog.keyring = self.keyring
2935- self.assertEqual(dialog.usage_graph.get_fraction(), 0.0)
2936 response = { 'status' : '200' }
2937 content = '{"total":2048, "used":1024}'
2938 client = self.mocker.mock()
2939@@ -125,16 +210,15 @@
2940 self.expect(client.request('https://one.ubuntu.com/api/quota/',
2941 'GET', KWARGS)).result((response, content))
2942 self.mocker.replay()
2943+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
2944+ self.assertTrue(dialog is not None)
2945+ self.assertEqual(dialog.usage_graph.get_fraction(), 0.0)
2946 dialog.request_quota_info()
2947 self.assertEqual(dialog.usage_graph.get_fraction(), 0.5)
2948 dialog.destroy()
2949
2950 def test_request_account_info(self):
2951 """Test that we can request the account info properly."""
2952- self.mock_has_token()
2953- dialog = self.u1prefs.UbuntuOneDialog()
2954- self.assertTrue(dialog is not None)
2955- dialog.keyring = self.keyring
2956 response = { 'status' : '200' }
2957 content = '''{"username": "ubuntuone", "nickname": "Ubuntu One",
2958 "email": "uone@example.com"}
2959@@ -144,6 +228,8 @@
2960 self.expect(client.request('https://one.ubuntu.com/api/account/',
2961 'GET', KWARGS)).result((response, content))
2962 self.mocker.replay()
2963+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
2964+ self.assertTrue(dialog is not None)
2965 dialog.request_account_info()
2966 self.assertEqual(dialog.name_label.get_text(), 'Ubuntu One')
2967 self.assertEqual(dialog.user_label.get_text(), 'ubuntuone')
2968@@ -152,13 +238,14 @@
2969
2970 def test_toggle_bookmarks(self):
2971 """Test toggling the bookmarks service on/off."""
2972- dialog = self.u1prefs.UbuntuOneDialog()
2973+ toggle_db_sync = self.mocker.mock()
2974+ self.expect(toggle_db_sync('bookmarks', False))
2975+ self.expect(toggle_db_sync('bookmarks', True))
2976+ self.expect(toggle_db_sync('bookmarks', False))
2977+ self.mocker.replay()
2978+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
2979 self.assertTrue(dialog is not None)
2980- dialog.toggle_db_sync = self.mocker.mock()
2981- self.expect(dialog.toggle_db_sync('bookmarks', False))
2982- self.expect(dialog.toggle_db_sync('bookmarks', True))
2983- self.expect(dialog.toggle_db_sync('bookmarks', False))
2984- self.mocker.replay()
2985+ dialog.toggle_db_sync = toggle_db_sync
2986 dialog.bookmarks_check.set_active(True)
2987 self.assertTrue(dialog.bookmarks_check.get_active())
2988 dialog.bookmarks_check.set_active(False)
2989@@ -168,13 +255,14 @@
2990
2991 def test_toggle_contacts(self):
2992 """Test toggling the contacts service on/off."""
2993- dialog = self.u1prefs.UbuntuOneDialog()
2994+ toggle_db_sync = self.mocker.mock()
2995+ self.expect(toggle_db_sync('contacts', False))
2996+ self.expect(toggle_db_sync('contacts', True))
2997+ self.expect(toggle_db_sync('contacts', False))
2998+ self.mocker.replay()
2999+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
3000 self.assertTrue(dialog is not None)
3001- dialog.toggle_db_sync = self.mocker.mock()
3002- self.expect(dialog.toggle_db_sync('contacts', False))
3003- self.expect(dialog.toggle_db_sync('contacts', True))
3004- self.expect(dialog.toggle_db_sync('contacts', False))
3005- self.mocker.replay()
3006+ dialog.toggle_db_sync = toggle_db_sync
3007 dialog.abook_check.set_active(True)
3008 self.assertTrue(dialog.abook_check.get_active())
3009 dialog.abook_check.set_active(False)
3010@@ -184,9 +272,9 @@
3011
3012 def test_toggle_files(self):
3013 """Test toggling the files service on/off."""
3014- dialog = self.u1prefs.UbuntuOneDialog()
3015+ self.mocker.replay()
3016+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
3017 self.assertTrue(dialog is not None)
3018- self.mocker.replay()
3019 dialog.files_check.set_active(True)
3020 self.assertTrue(dialog.files_check.get_active())
3021 dialog.files_check.set_active(False)
3022@@ -196,9 +284,9 @@
3023
3024 def test_toggle_files_and_music(self):
3025 """Test toggling the files and music services on/off."""
3026- dialog = self.u1prefs.UbuntuOneDialog()
3027+ self.mocker.replay()
3028+ dialog = self.u1prefs.UbuntuOneDialog(keyring=self.keyring)
3029 self.assertTrue(dialog is not None)
3030- self.mocker.replay()
3031 dialog.files_check.set_active(False)
3032 self.assertFalse(dialog.files_check.get_active())
3033 self.assertFalse(dialog.music_check.props.sensitive)
3034
3035=== modified file 'tests/test_preferences.pyc'
3036Binary files tests/test_preferences.pyc 2010-03-04 16:47:43 +0000 and tests/test_preferences.pyc 2010-03-10 23:53:15 +0000 differ
3037=== modified file 'tests/u1sync/__init__.pyc'
3038Binary files tests/u1sync/__init__.pyc 2010-03-04 16:47:43 +0000 and tests/u1sync/__init__.pyc 2010-03-10 23:53:15 +0000 differ
3039=== modified file 'tests/u1sync/test_init.pyc'
3040Binary files tests/u1sync/test_init.pyc 2010-03-04 16:47:43 +0000 and tests/u1sync/test_init.pyc 2010-03-10 23:53:15 +0000 differ
3041=== modified file 'tests/u1sync/test_merge.pyc'
3042Binary files tests/u1sync/test_merge.pyc 2010-03-04 16:47:43 +0000 and tests/u1sync/test_merge.pyc 2010-03-10 23:53:15 +0000 differ
3043=== modified file 'ubuntuone/syncdaemon/action_queue.py'
3044--- ubuntuone/syncdaemon/action_queue.py 2010-03-04 16:47:43 +0000
3045+++ ubuntuone/syncdaemon/action_queue.py 2010-03-10 23:53:15 +0000
3046@@ -187,13 +187,13 @@
3047 factory = None
3048
3049 def connectionMade(self):
3050- """Called when a new connection is made."""
3051+ """A new connection was made."""
3052+ self.log.info('Connection made.')
3053 self.factory.event_queue.push('SYS_CONNECTION_MADE')
3054
3055- def disconnect(self):
3056- """Close down the sockets."""
3057- if self.transport is not None:
3058- self.transport.loseConnection()
3059+ def connectionLost(self, reason):
3060+ """The connection was lost."""
3061+ self.log.info('Connection lost, reason: %s.', reason)
3062
3063
3064 class Marker(str):
3065@@ -694,11 +694,7 @@
3066 self.token = access_token
3067
3068 def cleanup(self):
3069- """
3070- Cancel, clean up, and reschedule things that were in progress
3071- when a disconnection happened
3072- """
3073- self.disconnect()
3074+ """Cancel, clean up, and reschedule things that were in progress."""
3075 self.meta_queue.cleanup_and_retry()
3076 self.content_queue.cleanup_and_retry()
3077
3078@@ -848,7 +844,8 @@
3079
3080 def startedConnecting(self, connector):
3081 """Called when a connection has been started."""
3082- logger.info('Connection started.')
3083+ logger.info('Connection started to host %s, port %s.',
3084+ self.host, self.port)
3085
3086 def disconnect(self):
3087 """Disconnect the client.
3088@@ -986,10 +983,10 @@
3089 self.deferred.callback(client)
3090
3091 @defer.inlineCallbacks
3092- def server_rescan(self, data_gen):
3093+ def server_rescan(self, root_mdid, data_gen):
3094 """Do the server rescan."""
3095 client = self.client
3096- yield self.get_root(object())
3097+ yield self.get_root(root_mdid)
3098 if client is not self.client:
3099 return
3100 data = data_gen()
3101@@ -1422,6 +1419,17 @@
3102 """Returns the action queue."""
3103 return self._queue.action_queue
3104
3105+ def __str__(self, str_attrs=None):
3106+ """Return a str representation of the instance."""
3107+ if str_attrs is None:
3108+ str_attrs = self.logged_attrs
3109+ name = self.__class__.__name__
3110+ if len(str_attrs) == 0:
3111+ return name
3112+ attrs = [str(attr) + '=' + str(getattr(self, attr, None) or 'None') \
3113+ for attr in str_attrs]
3114+ return ''.join([name, '(', ', '.join([attr for attr in attrs]), ')'])
3115+
3116
3117 class WaitForCondition(ActionQueueCommand):
3118 """A command which waits for some condition to be satisfied."""
3119@@ -2260,6 +2268,11 @@
3120
3121 is_dir = True
3122
3123+ def __str__(self):
3124+ """Return a str representation of the instance."""
3125+ to_show = ('share_id', 'node_id', 'server_hash')
3126+ return super(ListDir, self).__str__(str_attrs=to_show)
3127+
3128
3129 class Download(GetContentMixin, ActionQueueCommand):
3130 """Get the contents of a file."""
3131
3132=== modified file 'ubuntuone/syncdaemon/dbus_interface.py'
3133--- ubuntuone/syncdaemon/dbus_interface.py 2010-03-04 16:47:43 +0000
3134+++ ubuntuone/syncdaemon/dbus_interface.py 2010-03-10 23:53:15 +0000
3135@@ -191,6 +191,15 @@
3136 current_downloads.append(entry)
3137 return current_downloads
3138
3139+ @dbus.service.method(DBUS_IFACE_STATUS_NAME, out_signature='as')
3140+ def waiting_metadata(self):
3141+ """Return a list of the operations in the meta-queue."""
3142+ logger.debug('called waiting_metadata')
3143+ waiting_metadata = []
3144+ for cmd in self.action_queue.meta_queue.waiting:
3145+ waiting_metadata.append(str(cmd))
3146+ return waiting_metadata
3147+
3148 @dbus.service.method(DBUS_IFACE_STATUS_NAME, out_signature='aa{ss}')
3149 def waiting_content(self):
3150 """
3151@@ -327,7 +336,7 @@
3152 def emit_status_changed(self, state):
3153 """Emits the signal."""
3154 state_dict = {'name':state.name,
3155- 'description':self.bool_str(state.description),
3156+ 'description':state.description,
3157 'is_error':self.bool_str(state.is_error),
3158 'is_connected':self.bool_str(state.is_connected),
3159 'is_online':self.bool_str(state.is_online)}
3160@@ -678,6 +687,11 @@
3161 self.dbus_iface.public_files.emit_public_access_change_error(
3162 share_id, node_id, error)
3163
3164+ def handle_SYS_ROOT_MISMATCH(self, root_id, new_root_id):
3165+ """Handle the SYS_ROOT_MISMATCH event."""
3166+ self.handle_default('SYS_ROOT_MISMATCH', root_id, new_root_id)
3167+ self.dbus_iface.sync.emit_root_mismatch(root_id, new_root_id)
3168+
3169
3170 class SyncDaemon(DBusExposedObject):
3171 """ The Daemon dbus interface. """
3172@@ -745,6 +759,16 @@
3173 reply_handler()
3174 self.dbus_iface.quit()
3175
3176+ @dbus.service.signal(DBUS_IFACE_SYNC_NAME,
3177+ signature='ss')
3178+ def RootMismatch(self, root_id, new_root_id):
3179+ """RootMismatch signal, the user connected with a different account."""
3180+ pass
3181+
3182+ def emit_root_mismatch(self, root_id, new_root_id):
3183+ """Emit RootMismatch signal."""
3184+ self.RootMismatch(root_id, new_root_id)
3185+
3186
3187 class FileSystem(DBusExposedObject):
3188 """ A dbus interface to the FileSystem Manager. """
3189@@ -919,6 +943,21 @@
3190 raise ValueError("path '%r' does not exist" % path)
3191 self.vm.create_share(path, username, name, access_level)
3192
3193+ @dbus.service.method(DBUS_IFACE_SHARES_NAME,
3194+ in_signature='sasss', out_signature='')
3195+ def create_shares(self, path, usernames, name, access_level):
3196+ """Share a subtree with several users at once.
3197+
3198+ @param path: that path to share (the root of the subtree)
3199+ @param usernames: the user names to offer the share to
3200+ @param name: the name of the share
3201+ @param access_level: 'View' or 'Modify'
3202+ """
3203+ logger.debug('create shares: %r, %r, %r, %r',
3204+ path, usernames, name, access_level)
3205+ for user in usernames:
3206+ self.create_share(path, user, name, access_level)
3207+
3208 @dbus.service.signal(DBUS_IFACE_SHARES_NAME,
3209 signature='a{ss}')
3210 def ShareCreated(self, share_info):
3211@@ -1010,6 +1049,7 @@
3212 configured.
3213 The values are bytes/second
3214 """
3215+ logger.debug("called get_throttling_limits")
3216 try:
3217 aq = self.dbus_iface.action_queue
3218 download = -1
3219@@ -1037,6 +1077,7 @@
3220 def set_throttling_limits(self, download, upload,
3221 reply_handler=None, error_handler=None):
3222 """Set the read and write limits. The expected values are bytes/sec."""
3223+ logger.debug("called set_throttling_limits")
3224 try:
3225 # modify and save the config file
3226 user_config = config.get_user_config()
3227@@ -1349,11 +1390,19 @@
3228 """Emit the PublicAccessChanged signal."""
3229 share_id = str(share_id) if share_id else ''
3230 node_id = str(node_id)
3231+ try:
3232+ relpath = self.fs.get_by_node_id(share_id,
3233+ node_id).path
3234+ except KeyError:
3235+ pass
3236+ else:
3237+ path=self.fs.get_abspath(share_id, relpath)
3238 self.PublicAccessChanged(dict(
3239 share_id=str(share_id) if share_id else '',
3240 node_id=str(node_id),
3241 is_public=self.bool_str(is_public),
3242- public_url=public_url if public_url else ''))
3243+ public_url=public_url if public_url else '',
3244+ path=path))
3245
3246 @dbus.service.signal(DBUS_IFACE_PUBLIC_FILES_NAME,
3247 signature='a{ss}')
3248@@ -1362,9 +1411,17 @@
3249
3250 def emit_public_access_change_error(self, share_id, node_id, error):
3251 """Emit the PublicAccessChangeError signal."""
3252+ try:
3253+ relpath = self.fs.get_by_node_id(share_id,
3254+ node_id).path
3255+ except KeyError:
3256+ pass
3257+ else:
3258+ path=self.fs.get_abspath(share_id, relpath)
3259 self.PublicAccessChangeError(dict(
3260 share_id=str(share_id) if share_id else '',
3261- node_id=str(node_id)), str(error))
3262+ node_id=str(node_id),
3263+ path=path), str(error))
3264
3265 @dbus.service.signal(DBUS_IFACE_PUBLIC_FILES_NAME,
3266 signature='a{ss}s')
3267
3268=== modified file 'ubuntuone/syncdaemon/local_rescan.py'
3269--- ubuntuone/syncdaemon/local_rescan.py 2010-03-04 16:47:43 +0000
3270+++ ubuntuone/syncdaemon/local_rescan.py 2010-03-10 23:53:15 +0000
3271@@ -129,7 +129,7 @@
3272 elif not stat.S_ISDIR(stat_result.st_mode):
3273 m = "The path is in disk but it's not a dir: %r" % direct
3274 log_error(m)
3275- raise ValueError("m")
3276+ raise ValueError(m)
3277
3278 # No, 'share' is surely defined; pylint: disable-msg=W0631
3279 self._queue.appendleft((share, direct, False, mdid, udfmode))
3280
3281=== modified file 'ubuntuone/syncdaemon/main.py'
3282--- ubuntuone/syncdaemon/main.py 2010-03-04 16:47:43 +0000
3283+++ ubuntuone/syncdaemon/main.py 2010-03-10 23:53:15 +0000
3284@@ -232,7 +232,9 @@
3285
3286 def server_rescan(self):
3287 """Do the server rescan."""
3288- self.action_q.server_rescan(self.fs.get_data_for_server_rescan)
3289+ mdobj = self.fs.get_by_path(self.root_dir)
3290+ self.action_q.server_rescan(mdobj.mdid,
3291+ self.fs.get_data_for_server_rescan)
3292
3293 def set_oauth_token(self, key, secret):
3294 """ Sets the oauth token """
3295
3296=== modified file 'ubuntuone/syncdaemon/tools.py'
3297--- ubuntuone/syncdaemon/tools.py 2010-03-04 16:47:43 +0000
3298+++ ubuntuone/syncdaemon/tools.py 2010-03-10 23:53:15 +0000
3299@@ -593,6 +593,16 @@
3300 error_handler=d.errback)
3301 return d
3302
3303+ def waiting_metadata(self):
3304+ """Return a description of the waiting metadata queue elements."""
3305+ d = defer.Deferred()
3306+ status_client = DBusClient(self.bus, '/status',
3307+ DBUS_IFACE_STATUS_NAME)
3308+ status_client.call_method('waiting_metadata',
3309+ reply_handler=d.callback,
3310+ error_handler=d.errback)
3311+ return d
3312+
3313 def waiting_content(self):
3314 """Returns the waiting content queue elements."""
3315 d = defer.Deferred()
3316@@ -777,6 +787,12 @@
3317 out.write("\n")
3318
3319
3320+def show_waiting_metadata(waiting_ops, out):
3321+ """Print the waiting_metadata result"""
3322+ for value in waiting_ops:
3323+ out.write(" %s\n" % value)
3324+
3325+
3326 def show_waiting_content(waiting_ops, out):
3327 """Print the waiting_content result"""
3328 value_tpl = "operation='%(operation)s' node_id='%(node)s' " + \
3329@@ -785,6 +801,7 @@
3330 str_value = value_tpl % value
3331 out.write("%s\n" % str_value)
3332
3333+
3334 def show_public_file_info(file_info, out):
3335 """Print the public access information for a file."""
3336 if file_info['is_public']:

Subscribers

People subscribed via source and target branches

to all changes: