Merge lp:~ubuntu-branches/ubuntu/precise/ubuntuone-client/precise-201112142106 into lp:ubuntu/precise/ubuntuone-client
- Precise (12.04)
- precise-201112142106
- Merge into precise
Status: | Rejected |
---|---|
Rejected by: | James Westby |
Proposed branch: | lp:~ubuntu-branches/ubuntu/precise/ubuntuone-client/precise-201112142106 |
Merge into: | lp:ubuntu/precise/ubuntuone-client |
Diff against target: |
2539 lines (+2516/-0) (has conflicts) 3 files modified
.pc/03_reset_notify_name.patch/tests/status/test_aggregator.py (+1560/-0) .pc/03_reset_notify_name.patch/ubuntuone/status/aggregator.py (+882/-0) debian/patches/03_reset_notify_name.patch (+74/-0) Conflict adding file .pc/03_reset_notify_name.patch. Moved existing file to .pc/03_reset_notify_name.patch.moved. Conflict adding file debian/patches/03_reset_notify_name.patch. Moved existing file to debian/patches/03_reset_notify_name.patch.moved. |
To merge this branch: | bzr merge lp:~ubuntu-branches/ubuntu/precise/ubuntuone-client/precise-201112142106 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Ubuntu branches | Pending | ||
Review via email: mp+85748@code.launchpad.net |
Commit message
Description of the change
The package importer has detected a possible inconsistency between the package history in the archive and the history in bzr. As the archive is authoritative the importer has made lp:ubuntu/precise/ubuntuone-client reflect what is in the archive and the old bzr branch has been pushed to lp:~ubuntu-branches/ubuntu/precise/ubuntuone-client/precise-201112142106. This merge proposal was created so that an Ubuntu developer can review the situations and perform a merge/upload if necessary. There are three typical cases where this can happen.
1. Where someone pushes a change to bzr and someone else uploads the package without that change. This is the reason that this check is done by the importer. If this appears to be the case then a merge/upload should be done if the changes that were in bzr are still desirable.
2. The importer incorrectly detected the above situation when someone made a change in bzr and then uploaded it.
3. The importer incorrectly detected the above situation when someone just uploaded a package and didn't touch bzr.
If this case doesn't appear to be the first situation then set the status of the merge proposal to "Rejected" and help avoid the problem in future by filing a bug at https:/
(this is an automatically generated message)
Unmerged revisions
- 92. By Ken VanDine
-
releasing version 2.0.0-0ubuntu4
Preview Diff
1 | === added directory '.pc/03_reset_notify_name.patch' | |||
2 | === renamed directory '.pc/03_reset_notify_name.patch' => '.pc/03_reset_notify_name.patch.moved' | |||
3 | === added file '.pc/03_reset_notify_name.patch/.timestamp' | |||
4 | === added directory '.pc/03_reset_notify_name.patch/tests' | |||
5 | === added directory '.pc/03_reset_notify_name.patch/tests/status' | |||
6 | === added file '.pc/03_reset_notify_name.patch/tests/status/test_aggregator.py' | |||
7 | --- .pc/03_reset_notify_name.patch/tests/status/test_aggregator.py 1970-01-01 00:00:00 +0000 | |||
8 | +++ .pc/03_reset_notify_name.patch/tests/status/test_aggregator.py 2011-12-14 21:11:28 +0000 | |||
9 | @@ -0,0 +1,1560 @@ | |||
10 | 1 | # tests.status.test_aggregator | ||
11 | 2 | # | ||
12 | 3 | # Author: Alejandro J. Cura <alecu@canonical.com> | ||
13 | 4 | # | ||
14 | 5 | # Copyright 2011 Canonical Ltd. | ||
15 | 6 | # | ||
16 | 7 | # This program is free software: you can redistribute it and/or modify it | ||
17 | 8 | # under the terms of the GNU General Public License version 3, as published | ||
18 | 9 | # by the Free Software Foundation. | ||
19 | 10 | # | ||
20 | 11 | # This program is distributed in the hope that it will be useful, but | ||
21 | 12 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
22 | 13 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
23 | 14 | # PURPOSE. See the GNU General Public License for more details. | ||
24 | 15 | # | ||
25 | 16 | # You should have received a copy of the GNU General Public License along | ||
26 | 17 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
27 | 18 | """Tests for the status events aggregator.""" | ||
28 | 19 | |||
29 | 20 | import logging | ||
30 | 21 | |||
31 | 22 | from twisted.internet import defer | ||
32 | 23 | from twisted.internet.task import Clock | ||
33 | 24 | from twisted.trial.unittest import TestCase | ||
34 | 25 | from mocker import Mocker | ||
35 | 26 | |||
36 | 27 | from contrib.testing.testcase import BaseTwistedTestCase | ||
37 | 28 | from ubuntuone.devtools.handlers import MementoHandler | ||
38 | 29 | from ubuntuone.status import aggregator | ||
39 | 30 | from ubuntuone.status.notification import AbstractNotification | ||
40 | 31 | from ubuntuone.status.messaging import AbstractMessaging | ||
41 | 32 | from ubuntuone.syncdaemon import status_listener | ||
42 | 33 | from ubuntuone.syncdaemon.volume_manager import Share, UDF, Root | ||
43 | 34 | |||
44 | 35 | FILENAME = 'example.txt' | ||
45 | 36 | FILENAME2 = 'another_example.mp3' | ||
46 | 37 | |||
47 | 38 | |||
48 | 39 | class PatchedClock(Clock): | ||
49 | 40 | """Patch the clock to fix twisted bug #4823.""" | ||
50 | 41 | |||
51 | 42 | def advance(self, amount): | ||
52 | 43 | """Sort the calls before advancing the clock.""" | ||
53 | 44 | self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime())) | ||
54 | 45 | Clock.advance(self, amount) | ||
55 | 46 | |||
56 | 47 | |||
57 | 48 | class TimerTestCase(TestCase): | ||
58 | 49 | """Test the Timer class.""" | ||
59 | 50 | |||
60 | 51 | TIMEOUT = 3.0 | ||
61 | 52 | |||
62 | 53 | @defer.inlineCallbacks | ||
63 | 54 | def setUp(self): | ||
64 | 55 | """Initialize this test instance.""" | ||
65 | 56 | yield super(TimerTestCase, self).setUp() | ||
66 | 57 | self.clock = PatchedClock() | ||
67 | 58 | self.timer = aggregator.Timer(delay=3.0, clock=self.clock) | ||
68 | 59 | |||
69 | 60 | def test_not_fired_initially(self): | ||
70 | 61 | """The timer is not fired initially""" | ||
71 | 62 | self.assertFalse(self.timer.called) | ||
72 | 63 | |||
73 | 64 | def test_fired_after_delay(self): | ||
74 | 65 | """The timer is fired after the initial delay.""" | ||
75 | 66 | self.clock.advance(self.timer.delay) | ||
76 | 67 | self.assertTrue(self.timer.called) | ||
77 | 68 | |||
78 | 69 | def test_cleanup_cancels_delay_call(self): | ||
79 | 70 | """Calling cleanup cancels the delay call.""" | ||
80 | 71 | self.timer.cleanup() | ||
81 | 72 | self.assertTrue(self.timer.delay_call.cancelled) | ||
82 | 73 | |||
83 | 74 | def test_not_fired_immediately(self): | ||
84 | 75 | """The timer is not fired immediately.""" | ||
85 | 76 | self.timer.reset() | ||
86 | 77 | self.assertFalse(self.timer.called) | ||
87 | 78 | |||
88 | 79 | def test_fired_after_initial_wait(self): | ||
89 | 80 | """The timer is fired after an initial wait.""" | ||
90 | 81 | self.timer.reset() | ||
91 | 82 | self.clock.advance(self.timer.delay) | ||
92 | 83 | self.assertTrue(self.timer.called) | ||
93 | 84 | |||
94 | 85 | def test_not_fired_if_reset_within_delay(self): | ||
95 | 86 | """The timer is not fired if it is reset within the delay.""" | ||
96 | 87 | self.timer.reset() | ||
97 | 88 | self.clock.advance(self.timer.delay / 0.8) | ||
98 | 89 | self.timer.reset() | ||
99 | 90 | self.clock.advance(self.timer.delay / 0.8) | ||
100 | 91 | self.assertTrue(self.timer.called) | ||
101 | 92 | |||
102 | 93 | def test_active(self): | ||
103 | 94 | """The timer is active until the delay is reached.""" | ||
104 | 95 | self.timer.reset() | ||
105 | 96 | self.assertTrue(self.timer.active) | ||
106 | 97 | self.clock.advance(self.timer.delay + 1) | ||
107 | 98 | self.assertFalse(self.timer.active) | ||
108 | 99 | |||
109 | 100 | |||
110 | 101 | class DeadlineTimerTestCase(TimerTestCase): | ||
111 | 102 | """Test the DeadlineTimer class.""" | ||
112 | 103 | |||
113 | 104 | DELAY = 0.5 | ||
114 | 105 | |||
115 | 106 | @defer.inlineCallbacks | ||
116 | 107 | def setUp(self): | ||
117 | 108 | """Initialize this test instance.""" | ||
118 | 109 | yield super(DeadlineTimerTestCase, self).setUp() | ||
119 | 110 | self.clock = PatchedClock() | ||
120 | 111 | self.timer = aggregator.DeadlineTimer(delay=0.5, timeout=3.0, | ||
121 | 112 | clock=self.clock) | ||
122 | 113 | |||
123 | 114 | def test_fired_if_initial_timeout_exceeded(self): | ||
124 | 115 | """Timer is fired if the initial timeout is exceeded.""" | ||
125 | 116 | small_delay = self.timer.delay * 0.8 | ||
126 | 117 | for n in range(int(self.timer.timeout / small_delay) + 1): | ||
127 | 118 | self.timer.reset() | ||
128 | 119 | self.clock.advance(small_delay) | ||
129 | 120 | self.assertTrue(self.timer.called) | ||
130 | 121 | |||
131 | 122 | def test_not_fired_twice_if_delay_exceeded(self): | ||
132 | 123 | """Timer is not fired twice if the delay is exceeded.""" | ||
133 | 124 | large_delay = self.timer.delay * 1.2 | ||
134 | 125 | for n in range(int(self.timer.timeout / large_delay) + 1): | ||
135 | 126 | self.timer.reset() | ||
136 | 127 | self.clock.advance(large_delay) | ||
137 | 128 | self.clock.advance(self.timer.delay) | ||
138 | 129 | self.assertTrue(self.timer.called) | ||
139 | 130 | |||
140 | 131 | def test_not_fired_twice_if_timeout_exceeded(self): | ||
141 | 132 | """Timer is not fired twice if the timeout is exceeded.""" | ||
142 | 133 | small_delay = self.timer.delay * 0.8 | ||
143 | 134 | for n in range(int(self.timer.timeout / small_delay) + 1): | ||
144 | 135 | self.timer.reset() | ||
145 | 136 | self.clock.advance(small_delay) | ||
146 | 137 | self.clock.advance(self.timer.delay) | ||
147 | 138 | self.assertTrue(self.timer.called) | ||
148 | 139 | |||
149 | 140 | def test_cleanup_cancels_timeout_call(self): | ||
150 | 141 | """Calling cleanup cancels the delay call.""" | ||
151 | 142 | self.timer.cleanup() | ||
152 | 143 | self.assertTrue(self.timer.timeout_call.cancelled) | ||
153 | 144 | |||
154 | 145 | |||
155 | 146 | class FakeNotification(AbstractNotification): | ||
156 | 147 | """A fake notification class.""" | ||
157 | 148 | |||
158 | 149 | def __init__(self, application_name="fake app"): | ||
159 | 150 | """Initialize this instance.""" | ||
160 | 151 | self.notifications_shown = [] | ||
161 | 152 | self.notification_switch = None | ||
162 | 153 | self.application_name = application_name | ||
163 | 154 | self.notification = None | ||
164 | 155 | |||
165 | 156 | def send_notification(self, title, message, icon=None, append=False): | ||
166 | 157 | """Show a notification to the user.""" | ||
167 | 158 | if (self.notification_switch is not None | ||
168 | 159 | and not self.notification_switch.enabled): | ||
169 | 160 | return | ||
170 | 161 | self.notification = (title, message, icon, append) | ||
171 | 162 | self.notifications_shown.append((title, message, icon, append)) | ||
172 | 163 | return len(self.notifications_shown) - 1 | ||
173 | 164 | |||
174 | 165 | |||
175 | 166 | def FakeNotificationSingleton(): | ||
176 | 167 | """Builds a notification singleton, that logs all notifications shown.""" | ||
177 | 168 | instance = FakeNotification() | ||
178 | 169 | |||
179 | 170 | def get_instance(notification_switch): | ||
180 | 171 | """Returns the single instance.""" | ||
181 | 172 | instance.notification_switch = notification_switch | ||
182 | 173 | return instance | ||
183 | 174 | |||
184 | 175 | return get_instance | ||
185 | 176 | |||
186 | 177 | |||
187 | 178 | class FakeMessaging(AbstractMessaging): | ||
188 | 179 | """A fake messaging class.""" | ||
189 | 180 | |||
190 | 181 | def __init__(self): # pylint: disable=W0231 | ||
191 | 182 | self.messages_shown = {} | ||
192 | 183 | self.messages_updated = {} | ||
193 | 184 | self.callbacks = [] | ||
194 | 185 | |||
195 | 186 | # pylint: disable=R0913 | ||
196 | 187 | def show_message(self, sender, callback=None, message_time=None, | ||
197 | 188 | message_count=None, icon=None): | ||
198 | 189 | """Show a message to the user.""" | ||
199 | 190 | if message_count and sender in self.messages_shown: | ||
200 | 191 | self.update_count(sender, message_count) | ||
201 | 192 | self.messages_shown[sender] = ( | ||
202 | 193 | callback, message_time, message_count, icon) | ||
203 | 194 | # pylint: enable=R0913 | ||
204 | 195 | |||
205 | 196 | def update_count(self, sender, add_count): | ||
206 | 197 | """Update the count for an existing indicator.""" | ||
207 | 198 | self.messages_updated[sender] = (sender, add_count) | ||
208 | 199 | |||
209 | 200 | def _callback(self, indicator, message_time=None): | ||
210 | 201 | """Fake callback.""" | ||
211 | 202 | self.callbacks.append((indicator, message_time)) | ||
212 | 203 | |||
213 | 204 | def create_callback(self): | ||
214 | 205 | """Create the callback.""" | ||
215 | 206 | return self._callback | ||
216 | 207 | |||
217 | 208 | |||
218 | 209 | class FakeStatusAggregator(object): | ||
219 | 210 | """A fake status aggregator.""" | ||
220 | 211 | |||
221 | 212 | def __init__(self, clock): # pylint: disable=W0613 | ||
222 | 213 | """Initialize this instance.""" | ||
223 | 214 | self.discovered = 0 | ||
224 | 215 | self.completed = 0 | ||
225 | 216 | self.notification_switch = aggregator.NotificationSwitch() | ||
226 | 217 | |||
227 | 218 | def get_discovery_message(self): | ||
228 | 219 | """Return the file discovery message.""" | ||
229 | 220 | self.discovered += 1 | ||
230 | 221 | return self.build_discovery_message() | ||
231 | 222 | |||
232 | 223 | def build_discovery_message(self): | ||
233 | 224 | """Build the file discovery message.""" | ||
234 | 225 | return "a lot of files found (%d).""" % self.discovered | ||
235 | 226 | |||
236 | 227 | def get_progress_message(self): | ||
237 | 228 | """Return the progress message.""" | ||
238 | 229 | self.completed += 1 | ||
239 | 230 | return self.build_progress_message() | ||
240 | 231 | |||
241 | 232 | def build_progress_message(self): | ||
242 | 233 | """Build the progress message.""" | ||
243 | 234 | params = (self.discovered, self.completed) | ||
244 | 235 | return "a lot of files transferring (%d/%d).""" % params | ||
245 | 236 | |||
246 | 237 | def get_final_status_message(self): | ||
247 | 238 | """Return the final status message.""" | ||
248 | 239 | return "a lot of files completed.""" | ||
249 | 240 | |||
250 | 241 | def get_notification(self): | ||
251 | 242 | """Create a new toggleable notification object.""" | ||
252 | 243 | return self.notification_switch.get_notification() | ||
253 | 244 | |||
254 | 245 | |||
255 | 246 | class ToggleableNotificationTestCase(TestCase): | ||
256 | 247 | """Test the ToggleableNotification class.""" | ||
257 | 248 | |||
258 | 249 | @defer.inlineCallbacks | ||
259 | 250 | def setUp(self): | ||
260 | 251 | """Initialize this test instance.""" | ||
261 | 252 | yield super(ToggleableNotificationTestCase, self).setUp() | ||
262 | 253 | self.patch(aggregator, "Notification", FakeNotification) | ||
263 | 254 | self.notification_switch = aggregator.NotificationSwitch() | ||
264 | 255 | self.toggleable = self.notification_switch.get_notification() | ||
265 | 256 | |||
266 | 257 | def assertShown(self, notification): | ||
267 | 258 | """Assert that the notification was shown.""" | ||
268 | 259 | self.assertIn(notification, | ||
269 | 260 | self.toggleable.notification.notifications_shown) | ||
270 | 261 | |||
271 | 262 | def assertNotShown(self, notification): | ||
272 | 263 | """Assert that the notification was shown.""" | ||
273 | 264 | self.assertNotIn(notification, | ||
274 | 265 | self.toggleable.notification.notifications_shown) | ||
275 | 266 | |||
276 | 267 | def test_send_notification_passes_thru(self): | ||
277 | 268 | """The send_notification method passes thru.""" | ||
278 | 269 | args = (1, 2, 3, 4) | ||
279 | 270 | self.toggleable.send_notification(*args) | ||
280 | 271 | self.assertShown(args) | ||
281 | 272 | |||
282 | 273 | def test_send_notification_honored_when_enabled(self): | ||
283 | 274 | """The send_notification method is honored when enabled.""" | ||
284 | 275 | self.notification_switch.enable_notifications() | ||
285 | 276 | args = (aggregator.UBUNTUONE_TITLE, "hello", None, False) | ||
286 | 277 | self.toggleable.send_notification(*args) | ||
287 | 278 | self.assertShown(args) | ||
288 | 279 | |||
289 | 280 | def test_send_notification_ignored_when_disabled(self): | ||
290 | 281 | """The send_notification method is ignored when disabled.""" | ||
291 | 282 | self.notification_switch.disable_notifications() | ||
292 | 283 | args = (aggregator.UBUNTUONE_TITLE, "hello", None, False) | ||
293 | 284 | self.toggleable.send_notification(*args) | ||
294 | 285 | self.assertNotShown(args) | ||
295 | 286 | |||
296 | 287 | |||
297 | 288 | class NotificationSwitchTestCase(TestCase): | ||
298 | 289 | """Test the NotificationSwitch class.""" | ||
299 | 290 | |||
300 | 291 | @defer.inlineCallbacks | ||
301 | 292 | def setUp(self): | ||
302 | 293 | """Initialize this test instance.""" | ||
303 | 294 | yield super(NotificationSwitchTestCase, self).setUp() | ||
304 | 295 | self.notification_switch = aggregator.NotificationSwitch() | ||
305 | 296 | |||
306 | 297 | def test_get_notification(self): | ||
307 | 298 | """A new notification instance is returned.""" | ||
308 | 299 | notification = self.notification_switch.get_notification() | ||
309 | 300 | self.assertEqual(notification.notification_switch, | ||
310 | 301 | self.notification_switch) | ||
311 | 302 | |||
312 | 303 | def test_enable_notifications(self): | ||
313 | 304 | """The switch is turned on.""" | ||
314 | 305 | self.notification_switch.enable_notifications() | ||
315 | 306 | self.assertTrue(self.notification_switch.enabled) | ||
316 | 307 | |||
317 | 308 | def test_disable_notifications(self): | ||
318 | 309 | """The switch is turned off.""" | ||
319 | 310 | self.notification_switch.disable_notifications() | ||
320 | 311 | self.assertFalse(self.notification_switch.enabled) | ||
321 | 312 | |||
322 | 313 | |||
323 | 314 | class FileDiscoveryBubbleTestCase(TestCase): | ||
324 | 315 | """Test the FileDiscoveryBubble class.""" | ||
325 | 316 | |||
326 | 317 | @defer.inlineCallbacks | ||
327 | 318 | def setUp(self): | ||
328 | 319 | """Initialize this test instance.""" | ||
329 | 320 | yield super(FileDiscoveryBubbleTestCase, self).setUp() | ||
330 | 321 | self.patch(aggregator, "ToggleableNotification", | ||
331 | 322 | FakeNotificationSingleton()) | ||
332 | 323 | self.clock = PatchedClock() | ||
333 | 324 | self.aggregator = FakeStatusAggregator(clock=self.clock) | ||
334 | 325 | self.bubble = aggregator.FileDiscoveryBubble(self.aggregator, | ||
335 | 326 | clock=self.clock) | ||
336 | 327 | self.addCleanup(self.bubble.cleanup) | ||
337 | 328 | fdis = aggregator.FileDiscoveryGatheringState | ||
338 | 329 | self.initial_delay = fdis.initial_delay | ||
339 | 330 | self.smaller_delay = self.initial_delay * 0.8 | ||
340 | 331 | self.initial_timeout = fdis.initial_timeout | ||
341 | 332 | fdus = aggregator.FileDiscoveryUpdateState | ||
342 | 333 | self.updates_delay = fdus.updates_delay | ||
343 | 334 | self.updates_timeout = fdus.updates_timeout | ||
344 | 335 | fdss = aggregator.FileDiscoverySleepState | ||
345 | 336 | self.sleep_delay = fdss.sleep_delay | ||
346 | 337 | |||
347 | 338 | self.handler = MementoHandler() | ||
348 | 339 | self.handler.setLevel(logging.DEBUG) | ||
349 | 340 | aggregator.logger.addHandler(self.handler) | ||
350 | 341 | aggregator.logger.setLevel(logging.DEBUG) | ||
351 | 342 | self.addCleanup(aggregator.logger.removeHandler, self.handler) | ||
352 | 343 | |||
353 | 344 | def get_notifications_shown(self): | ||
354 | 345 | """The list of notifications shown.""" | ||
355 | 346 | return self.bubble.notification.notifications_shown | ||
356 | 347 | |||
357 | 348 | def test_popup_shows_notification_when_connected(self): | ||
358 | 349 | """The popup callback shows notifications.""" | ||
359 | 350 | self.bubble.connection_made() | ||
360 | 351 | self.bubble.new_file_found() | ||
361 | 352 | self.bubble._popup() | ||
362 | 353 | message = self.aggregator.build_discovery_message() | ||
363 | 354 | notification = (aggregator.UBUNTUONE_TITLE, message, None, False) | ||
364 | 355 | self.assertIn(notification, self.get_notifications_shown()) | ||
365 | 356 | |||
366 | 357 | def test_popup_shows_notification_after_connected(self): | ||
367 | 358 | """The popup callback shows notifications.""" | ||
368 | 359 | self.bubble.new_file_found() | ||
369 | 360 | self.bubble.connection_made() | ||
370 | 361 | message = self.aggregator.build_discovery_message() | ||
371 | 362 | notification = (aggregator.UBUNTUONE_TITLE, message, None, False) | ||
372 | 363 | self.assertIn(notification, self.get_notifications_shown()) | ||
373 | 364 | |||
374 | 365 | def test_popup_shows_no_notification_before_connection_made(self): | ||
375 | 366 | """The popup callback shows notifications.""" | ||
376 | 367 | self.bubble.new_file_found() | ||
377 | 368 | self.bubble._popup() | ||
378 | 369 | message = self.aggregator.build_discovery_message() | ||
379 | 370 | notification = (aggregator.UBUNTUONE_TITLE, message, None, False) | ||
380 | 371 | self.assertNotIn(notification, self.get_notifications_shown()) | ||
381 | 372 | |||
382 | 373 | def test_popup_shows_no_notification_after_connection_lost(self): | ||
383 | 374 | """The popup callback shows notifications.""" | ||
384 | 375 | self.bubble.connection_made() | ||
385 | 376 | self.bubble.connection_lost() | ||
386 | 377 | self.bubble.new_file_found() | ||
387 | 378 | self.bubble._popup() | ||
388 | 379 | message = self.aggregator.build_discovery_message() | ||
389 | 380 | notification = (aggregator.UBUNTUONE_TITLE, message, None, False) | ||
390 | 381 | self.assertNotIn(notification, self.get_notifications_shown()) | ||
391 | 382 | |||
392 | 383 | def test_notification_is_logged_in_debug(self): | ||
393 | 384 | """The notification is printed in the debug log.""" | ||
394 | 385 | self.bubble.connection_made() | ||
395 | 386 | self.bubble.new_file_found() | ||
396 | 387 | self.bubble._popup() | ||
397 | 388 | msg = "notification shown: %s" % self.get_notifications_shown()[0][1] | ||
398 | 389 | self.assertTrue(self.handler.check_debug(msg)) | ||
399 | 390 | |||
400 | 391 | def test_bubble_is_not_shown_initially(self): | ||
401 | 392 | """The bubble is not shown initially.""" | ||
402 | 393 | self.bubble.new_file_found() | ||
403 | 394 | self.assertEqual(0, len(self.get_notifications_shown())) | ||
404 | 395 | |||
405 | 396 | def test_bubble_is_shown_after_delay(self): | ||
406 | 397 | """The bubble is shown after a delay.""" | ||
407 | 398 | self.bubble.connection_made() | ||
408 | 399 | self.bubble.new_file_found() | ||
409 | 400 | self.clock.advance(self.initial_delay) | ||
410 | 401 | self.assertEqual(1, len(self.get_notifications_shown())) | ||
411 | 402 | |||
412 | 403 | def test_bubble_not_shown_if_more_files_found(self): | ||
413 | 404 | """The bubble is not shown if more files found within delay.""" | ||
414 | 405 | self.clock.advance(self.smaller_delay) | ||
415 | 406 | self.bubble.new_file_found() | ||
416 | 407 | self.clock.advance(self.smaller_delay) | ||
417 | 408 | self.assertEqual(0, len(self.get_notifications_shown())) | ||
418 | 409 | |||
419 | 410 | def test_bubble_shown_if_timeout_exceeded(self): | ||
420 | 411 | """The bubble is shown if the timeout is exceeded.""" | ||
421 | 412 | self.bubble.connection_made() | ||
422 | 413 | self.bubble.new_file_found() | ||
423 | 414 | count = int(self.initial_timeout / self.smaller_delay) + 1 | ||
424 | 415 | for n in range(count): | ||
425 | 416 | self.clock.advance(self.smaller_delay) | ||
426 | 417 | self.bubble.new_file_found() | ||
427 | 418 | self.assertEqual(1, len(self.get_notifications_shown())) | ||
428 | 419 | |||
429 | 420 | def test_idle_state(self): | ||
430 | 421 | """The idle state is verified.""" | ||
431 | 422 | self.assertEqual(type(self.bubble.state), | ||
432 | 423 | aggregator.FileDiscoveryIdleState) | ||
433 | 424 | |||
434 | 425 | def test_gathering_state(self): | ||
435 | 426 | """The gathering state is set after the first file is found.""" | ||
436 | 427 | self.bubble.new_file_found() | ||
437 | 428 | self.assertEqual(type(self.bubble.state), | ||
438 | 429 | aggregator.FileDiscoveryGatheringState) | ||
439 | 430 | |||
440 | 431 | def test_update_state(self): | ||
441 | 432 | """When the gathering state finishes, the update state is started.""" | ||
442 | 433 | self.bubble.connection_made() | ||
443 | 434 | self.bubble.new_file_found() | ||
444 | 435 | self.clock.advance(self.initial_delay) | ||
445 | 436 | self.assertEqual(type(self.bubble.state), | ||
446 | 437 | aggregator.FileDiscoveryUpdateState) | ||
447 | 438 | |||
448 | 439 | def test_sleeping_state(self): | ||
449 | 440 | """When the update state finishes, the sleeping state is started.""" | ||
450 | 441 | self.bubble.connection_made() | ||
451 | 442 | self.bubble.new_file_found() | ||
452 | 443 | self.clock.advance(self.initial_delay) | ||
453 | 444 | self.clock.advance(self.updates_timeout) | ||
454 | 445 | self.assertEqual(type(self.bubble.state), | ||
455 | 446 | aggregator.FileDiscoverySleepState) | ||
456 | 447 | |||
457 | 448 | def test_back_to_initial_state(self): | ||
458 | 449 | """When the last state finishes, we return to the idle state.""" | ||
459 | 450 | self.bubble.connection_made() | ||
460 | 451 | self.bubble.new_file_found() | ||
461 | 452 | self.clock.advance(self.initial_delay) | ||
462 | 453 | self.clock.advance(self.updates_timeout) | ||
463 | 454 | self.clock.advance(self.sleep_delay) | ||
464 | 455 | self.assertEqual(type(self.bubble.state), | ||
465 | 456 | aggregator.FileDiscoveryIdleState) | ||
466 | 457 | |||
467 | 458 | def test_new_files_found_while_updating_not_shown_immediately(self): | ||
468 | 459 | """New files found in the updating state are not shown immediately.""" | ||
469 | 460 | self.bubble.connection_made() | ||
470 | 461 | self.bubble.new_file_found() | ||
471 | 462 | self.clock.advance(self.initial_delay) | ||
472 | 463 | self.bubble.new_file_found() | ||
473 | 464 | self.assertEqual(1, len(self.get_notifications_shown())) | ||
474 | 465 | |||
475 | 466 | def test_new_files_found_while_updating_are_shown_after_a_delay(self): | ||
476 | 467 | """New files found in the updating state are shown after a delay.""" | ||
477 | 468 | self.bubble.connection_made() | ||
478 | 469 | self.bubble.new_file_found() | ||
479 | 470 | self.clock.advance(self.initial_delay) | ||
480 | 471 | self.bubble.new_file_found() | ||
481 | 472 | self.clock.advance(self.updates_delay) | ||
482 | 473 | self.assertEqual(2, len(self.get_notifications_shown())) | ||
483 | 474 | |||
484 | 475 | def test_update_modifies_notification(self): | ||
485 | 476 | """The update callback updates notifications.""" | ||
486 | 477 | self.bubble.connection_made() | ||
487 | 478 | self.bubble.new_file_found() | ||
488 | 479 | self.bubble._popup() | ||
489 | 480 | self.bubble.new_file_found() | ||
490 | 481 | self.bubble._update() | ||
491 | 482 | message = self.aggregator.build_discovery_message() | ||
492 | 483 | notification = (aggregator.UBUNTUONE_TITLE, message, None, False) | ||
493 | 484 | self.assertIn(notification, self.get_notifications_shown()) | ||
494 | 485 | |||
495 | 486 | def test_update_is_logged_in_debug(self): | ||
496 | 487 | """The notification is logged when _update is called.""" | ||
497 | 488 | self.bubble.connection_made() | ||
498 | 489 | self.bubble.new_file_found() | ||
499 | 490 | self.bubble._popup() | ||
500 | 491 | self.bubble.new_file_found() | ||
501 | 492 | self.bubble._update() | ||
502 | 493 | msg = "notification updated: %s" % self.get_notifications_shown()[1][1] | ||
503 | 494 | self.assertTrue(self.handler.check_debug(msg)) | ||
504 | 495 | |||
505 | 496 | |||
506 | 497 | class FinalBubbleTestCase(TestCase): | ||
507 | 498 | """Test for the final status notification bubble.""" | ||
508 | 499 | |||
509 | 500 | @defer.inlineCallbacks | ||
510 | 501 | def setUp(self): | ||
511 | 502 | """Initialize this test instance.""" | ||
512 | 503 | yield super(FinalBubbleTestCase, self).setUp() | ||
513 | 504 | self.patch(aggregator, "ToggleableNotification", | ||
514 | 505 | FakeNotificationSingleton()) | ||
515 | 506 | self.clock = PatchedClock() | ||
516 | 507 | self.aggregator = FakeStatusAggregator(clock=self.clock) | ||
517 | 508 | self.bubble = aggregator.FinalStatusBubble(self.aggregator) | ||
518 | 509 | self.addCleanup(self.bubble.cleanup) | ||
519 | 510 | |||
520 | 511 | def test_notification_not_shown_initially(self): | ||
521 | 512 | """The notification is not shown initially.""" | ||
522 | 513 | self.assertEqual(None, self.bubble.notification) | ||
523 | 514 | |||
524 | 515 | def test_show_pops_bubble(self): | ||
525 | 516 | """The show method pops the bubble immediately.""" | ||
526 | 517 | self.bubble.show() | ||
527 | 518 | self.assertEqual(1, len(self.bubble.notification.notifications_shown)) | ||
528 | 519 | |||
529 | 520 | |||
530 | 521 | class FakeLauncher(object): | ||
531 | 522 | """A fake UbuntuOneLauncher.""" | ||
532 | 523 | |||
533 | 524 | progress_visible = False | ||
534 | 525 | progress = 0.0 | ||
535 | 526 | |||
536 | 527 | def show_progressbar(self): | ||
537 | 528 | """The progressbar is shown.""" | ||
538 | 529 | self.progress_visible = True | ||
539 | 530 | |||
540 | 531 | def hide_progressbar(self): | ||
541 | 532 | """The progressbar is hidden.""" | ||
542 | 533 | self.progress_visible = False | ||
543 | 534 | |||
544 | 535 | def set_progress(self, value): | ||
545 | 536 | """The progressbar value is changed.""" | ||
546 | 537 | self.progress = value | ||
547 | 538 | |||
548 | 539 | |||
549 | 540 | class FakeInhibitor(object): | ||
550 | 541 | """A fake session inhibitor.""" | ||
551 | 542 | |||
552 | 543 | def inhibit(self, flags, reason): | ||
553 | 544 | """Inhibit some events with a given reason.""" | ||
554 | 545 | self.flags = flags | ||
555 | 546 | return defer.succeed(self) | ||
556 | 547 | |||
557 | 548 | def cancel(self): | ||
558 | 549 | """Cancel the inhibition for the current cookie.""" | ||
559 | 550 | self.flags = 0 | ||
560 | 551 | return defer.succeed(self) | ||
561 | 552 | |||
562 | 553 | |||
563 | 554 | class ProgressBarTestCase(TestCase): | ||
564 | 555 | """Tests for the progress bar.""" | ||
565 | 556 | |||
566 | 557 | @defer.inlineCallbacks | ||
567 | 558 | def setUp(self): | ||
568 | 559 | """Initialize this test instance.""" | ||
569 | 560 | yield super(ProgressBarTestCase, self).setUp() | ||
570 | 561 | self.patch(aggregator, "UbuntuOneLauncher", FakeLauncher) | ||
571 | 562 | self.patch(aggregator.session, "Inhibitor", FakeInhibitor) | ||
572 | 563 | self.clock = PatchedClock() | ||
573 | 564 | self.bar = aggregator.ProgressBar(clock=self.clock) | ||
574 | 565 | self.addCleanup(self.bar.cleanup) | ||
575 | 566 | self.timeout_calls = [] | ||
576 | 567 | original_timeout = self.bar._timeout | ||
577 | 568 | |||
578 | 569 | def fake_timeout(result): | ||
579 | 570 | """A fake _timeout method.""" | ||
580 | 571 | self.timeout_calls.append(self.bar.progress) | ||
581 | 572 | original_timeout(result) | ||
582 | 573 | |||
583 | 574 | self.patch(self.bar, "_timeout", fake_timeout) | ||
584 | 575 | |||
585 | 576 | def test_launcher_typeerror_nonfatal(self): | ||
586 | 577 | """Test that Launcher raising TypeError is not fatal.""" | ||
587 | 578 | def raise_typeerror(*args, **kwargs): | ||
588 | 579 | raise TypeError | ||
589 | 580 | |||
590 | 581 | self.patch(aggregator, "UbuntuOneLauncher", raise_typeerror) | ||
591 | 582 | aggregator.ProgressBar(clock=self.clock) | ||
592 | 583 | |||
593 | 584 | def test_shown_when_progress_made(self): | ||
594 | 585 | """The progress bar is shown when progress is made.""" | ||
595 | 586 | self.bar.set_progress(0.5) | ||
596 | 587 | self.assertTrue(self.bar.visible) | ||
597 | 588 | self.assertTrue(self.bar.launcher.progress_visible) | ||
598 | 589 | |||
599 | 590 | def test_progress_made_updates_counter(self): | ||
600 | 591 | """Progress made updates the counter.""" | ||
601 | 592 | self.bar.set_progress(0.5) | ||
602 | 593 | self.assertEqual(self.bar.progress, 0.5) | ||
603 | 594 | |||
604 | 595 | def test_no_timer_set_initially(self): | ||
605 | 596 | """There's no timer set initially.""" | ||
606 | 597 | self.assertEqual(self.bar.timer, None) | ||
607 | 598 | |||
608 | 599 | def test_progress_made_sets_timer(self): | ||
609 | 600 | """Progress made sets up a timer.""" | ||
610 | 601 | self.bar.set_progress(0.5) | ||
611 | 602 | self.assertNotEqual(self.bar.timer, None) | ||
612 | 603 | |||
613 | 604 | def test_cleanup_resets_timer(self): | ||
614 | 605 | """The cleanup method resets the timer.""" | ||
615 | 606 | self.bar.set_progress(0.5) | ||
616 | 607 | self.bar.cleanup() | ||
617 | 608 | self.assertEqual(self.bar.timer, None) | ||
618 | 609 | |||
619 | 610 | def test_progress_made_not_updated_initially(self): | ||
620 | 611 | """Progress made is not updated initially.""" | ||
621 | 612 | self.bar.set_progress(0.5) | ||
622 | 613 | self.assertEqual(0, len(self.timeout_calls)) | ||
623 | 614 | self.assertEqual(0.0, self.bar.launcher.progress) | ||
624 | 615 | |||
625 | 616 | def test_progress_made_updated_after_a_delay(self): | ||
626 | 617 | """The progressbar is updated after a delay.""" | ||
627 | 618 | self.bar.set_progress(0.5) | ||
628 | 619 | self.clock.advance(aggregator.ProgressBar.updates_delay) | ||
629 | 620 | self.assertIn(0.5, self.timeout_calls) | ||
630 | 621 | self.assertEqual(0.5, self.bar.launcher.progress) | ||
631 | 622 | |||
632 | 623 | def test_progress_updates_are_aggregated(self): | ||
633 | 624 | """The progressbar is updated after a delay.""" | ||
634 | 625 | self.bar.set_progress(0.5) | ||
635 | 626 | self.clock.advance(aggregator.ProgressBar.updates_delay / 2) | ||
636 | 627 | self.bar.set_progress(0.6) | ||
637 | 628 | self.clock.advance(aggregator.ProgressBar.updates_delay / 2) | ||
638 | 629 | self.assertEqual(1, len(self.timeout_calls)) | ||
639 | 630 | |||
640 | 631 | def test_progress_updates_are_continuous(self): | ||
641 | 632 | """The progressbar updates are continuous.""" | ||
642 | 633 | self.bar.set_progress(0.5) | ||
643 | 634 | self.clock.advance(aggregator.ProgressBar.updates_delay) | ||
644 | 635 | self.assertEqual(0.5, self.bar.launcher.progress) | ||
645 | 636 | self.bar.set_progress(0.6) | ||
646 | 637 | self.clock.advance(aggregator.ProgressBar.updates_delay) | ||
647 | 638 | self.assertEqual(0.6, self.bar.launcher.progress) | ||
648 | 639 | self.assertEqual(2, len(self.timeout_calls)) | ||
649 | 640 | |||
650 | 641 | def test_hidden_when_completed(self): | ||
651 | 642 | """The progressbar is hidden when everything completes.""" | ||
652 | 643 | self.bar.set_progress(0.5) | ||
653 | 644 | self.bar.completed() | ||
654 | 645 | self.assertFalse(self.bar.visible) | ||
655 | 646 | self.assertFalse(self.bar.launcher.progress_visible) | ||
656 | 647 | |||
657 | 648 | @defer.inlineCallbacks | ||
658 | 649 | def test_progress_made_inhibits_logout_suspend(self): | ||
659 | 650 | """Suspend and logout are inhibited when the progressbar is shown.""" | ||
660 | 651 | self.bar.set_progress(0.5) | ||
661 | 652 | expected = aggregator.session.INHIBIT_LOGOUT_SUSPEND | ||
662 | 653 | inhibitor = yield self.bar.inhibitor_defer | ||
663 | 654 | self.assertEqual(inhibitor.flags, expected) | ||
664 | 655 | |||
665 | 656 | @defer.inlineCallbacks | ||
666 | 657 | def test_completed_uninhibits_logout_suspend(self): | ||
667 | 658 | """Suspend and logout are uninhibited when all has completed.""" | ||
668 | 659 | self.bar.set_progress(0.5) | ||
669 | 660 | d = self.bar.inhibitor_defer | ||
670 | 661 | self.bar.completed() | ||
671 | 662 | inhibitor = yield d | ||
672 | 663 | self.assertEqual(inhibitor.flags, 0) | ||
673 | 664 | |||
674 | 665 | |||
675 | 666 | class FakeDelayedBuffer(object): | ||
676 | 667 | """Appends all status pushed into a list.""" | ||
677 | 668 | timer_reset = False | ||
678 | 669 | processed = False | ||
679 | 670 | |||
680 | 671 | def __init__(self, *args, **kwargs): | ||
681 | 672 | """Initialize this instance.""" | ||
682 | 673 | self.events = [] | ||
683 | 674 | |||
684 | 675 | def push_event(self, event): | ||
685 | 676 | """Push an event into this buffer.""" | ||
686 | 677 | self.events.append(event) | ||
687 | 678 | |||
688 | 679 | def reset_threshold_timer(self): | ||
689 | 680 | """The status has changed.""" | ||
690 | 681 | self.timer_reset = True | ||
691 | 682 | |||
692 | 683 | def process_accumulated(self): | ||
693 | 684 | """Process accumulated events.""" | ||
694 | 685 | self.processed = True | ||
695 | 686 | |||
696 | 687 | |||
697 | 688 | class FakeCommand(object): | ||
698 | 689 | """A fake command.""" | ||
699 | 690 | |||
700 | 691 | def __init__(self, path=''): | ||
701 | 692 | self.path = path | ||
702 | 693 | self.share_id = path | ||
703 | 694 | self.node_id = path | ||
704 | 695 | self.deflated_size = 10000 | ||
705 | 696 | |||
706 | 697 | |||
707 | 698 | class FakeVolumeManager(object): | ||
708 | 699 | """A fake vm.""" | ||
709 | 700 | |||
710 | 701 | def __init__(self): | ||
711 | 702 | """Initialize this instance.""" | ||
712 | 703 | self.volumes = {} | ||
713 | 704 | self.root = None | ||
714 | 705 | |||
715 | 706 | def get_volume(self, volume_id): | ||
716 | 707 | """Return a volume given its id.""" | ||
717 | 708 | return self.volumes[volume_id] | ||
718 | 709 | |||
719 | 710 | |||
720 | 711 | class FakeAggregator(object): | ||
721 | 712 | """A fake aggregator object.""" | ||
722 | 713 | |||
723 | 714 | def __init__(self, clock): | ||
724 | 715 | """Initialize this fake instance.""" | ||
725 | 716 | self.queued_commands = set() | ||
726 | 717 | self.notification_switch = aggregator.NotificationSwitch() | ||
727 | 718 | self.connected = False | ||
728 | 719 | self.clock = PatchedClock() | ||
729 | 720 | self.files_uploading = [] | ||
730 | 721 | self.files_downloading = [] | ||
731 | 722 | self.progress_events = [] | ||
732 | 723 | |||
733 | 724 | def queue_done(self): | ||
734 | 725 | """The queue completed all operations.""" | ||
735 | 726 | self.queued_commands.clear() | ||
736 | 727 | |||
737 | 728 | def get_notification(self): | ||
738 | 729 | """Create a new toggleable notification object.""" | ||
739 | 730 | return self.notification_switch.get_notification() | ||
740 | 731 | |||
741 | 732 | def download_started(self, command): | ||
742 | 733 | """A download just started.""" | ||
743 | 734 | self.files_downloading.append(command) | ||
744 | 735 | self.queued_commands.add(command) | ||
745 | 736 | |||
746 | 737 | def download_finished(self, command): | ||
747 | 738 | """A download just finished.""" | ||
748 | 739 | if command in self.files_downloading: | ||
749 | 740 | self.files_downloading.remove(command) | ||
750 | 741 | self.queued_commands.discard(command) | ||
751 | 742 | |||
752 | 743 | def upload_started(self, command): | ||
753 | 744 | """An upload just started.""" | ||
754 | 745 | self.files_uploading.append(command) | ||
755 | 746 | self.queued_commands.add(command) | ||
756 | 747 | |||
757 | 748 | def upload_finished(self, command): | ||
758 | 749 | """An upload just finished.""" | ||
759 | 750 | if command in self.files_uploading: | ||
760 | 751 | self.files_uploading.remove(command) | ||
761 | 752 | self.queued_commands.discard(command) | ||
762 | 753 | |||
763 | 754 | def progress_made(self, share_id, node_id, n_bytes, deflated_size): | ||
764 | 755 | """Progress made on up- or download.""" | ||
765 | 756 | self.progress_events.append( | ||
766 | 757 | (share_id, node_id, n_bytes, deflated_size)) | ||
767 | 758 | |||
768 | 759 | def connection_made(self): | ||
769 | 760 | """The client made the connection to the server.""" | ||
770 | 761 | self.connected = True | ||
771 | 762 | |||
772 | 763 | def connection_lost(self): | ||
773 | 764 | """The client lost the connection to the server.""" | ||
774 | 765 | self.connected = False | ||
775 | 766 | |||
776 | 767 | |||
777 | 768 | class StatusFrontendTestCase(BaseTwistedTestCase): | ||
778 | 769 | """Test the status frontend.""" | ||
779 | 770 | |||
780 | 771 | @defer.inlineCallbacks | ||
781 | 772 | def setUp(self): | ||
782 | 773 | """Initialize this test instance.""" | ||
783 | 774 | yield super(StatusFrontendTestCase, self).setUp() | ||
784 | 775 | self.patch(aggregator, "StatusAggregator", FakeAggregator) | ||
785 | 776 | self.patch(aggregator, "ToggleableNotification", | ||
786 | 777 | FakeNotificationSingleton()) | ||
787 | 778 | self.patch(aggregator, "Messaging", FakeMessaging) | ||
788 | 779 | self.fakefsm = None | ||
789 | 780 | self.fakevm = FakeVolumeManager() | ||
790 | 781 | self.status_frontend = aggregator.StatusFrontend() | ||
791 | 782 | self.listener = status_listener.StatusListener(self.fakefsm, | ||
792 | 783 | self.fakevm, | ||
793 | 784 | self.status_frontend) | ||
794 | 785 | |||
795 | 786 | def test_file_published(self): | ||
796 | 787 | """A file published event is processed.""" | ||
797 | 788 | share_id = "fake share id" | ||
798 | 789 | node_id = "fake node id" | ||
799 | 790 | is_public = True | ||
800 | 791 | public_url = "http://fake_public/url" | ||
801 | 792 | self.listener.handle_AQ_CHANGE_PUBLIC_ACCESS_OK(share_id, node_id, | ||
802 | 793 | is_public, public_url) | ||
803 | 794 | self.assertEqual( | ||
804 | 795 | 1, len(self.status_frontend.notification.notifications_shown)) | ||
805 | 796 | |||
806 | 797 | def test_file_unpublished(self): | ||
807 | 798 | """A file unpublished event is processed.""" | ||
808 | 799 | share_id = "fake share id" | ||
809 | 800 | node_id = "fake node id" | ||
810 | 801 | is_public = False | ||
811 | 802 | public_url = None # SD sends None when unpublishing | ||
812 | 803 | |||
813 | 804 | self.listener.handle_AQ_CHANGE_PUBLIC_ACCESS_OK(share_id, node_id, | ||
814 | 805 | is_public, public_url) | ||
815 | 806 | self.assertEqual( | ||
816 | 807 | 1, len(self.status_frontend.notification.notifications_shown)) | ||
817 | 808 | |||
818 | 809 | def test_download_started(self): | ||
819 | 810 | """A download was added to the queue.""" | ||
820 | 811 | self.patch(status_listener.action_queue, "Download", FakeCommand) | ||
821 | 812 | fake_command = FakeCommand() | ||
822 | 813 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
823 | 814 | qc = self.status_frontend.aggregator.queued_commands | ||
824 | 815 | self.assertIn(fake_command, qc) | ||
825 | 816 | |||
826 | 817 | def test_download_started_with_no_deflated_size(self): | ||
827 | 818 | """A download of unknown size was added to the queue.""" | ||
828 | 819 | self.patch(status_listener.action_queue, "Download", FakeCommand) | ||
829 | 820 | fake_command = FakeCommand() | ||
830 | 821 | fake_command.deflated_size = None | ||
831 | 822 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
832 | 823 | qc = self.status_frontend.aggregator.queued_commands | ||
833 | 824 | self.assertIn(fake_command, qc) | ||
834 | 825 | |||
835 | 826 | def test_download_finished(self): | ||
836 | 827 | """A download was removed from the queue.""" | ||
837 | 828 | self.patch(status_listener.action_queue, "Download", FakeCommand) | ||
838 | 829 | fake_command = FakeCommand() | ||
839 | 830 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
840 | 831 | self.listener.handle_SYS_QUEUE_REMOVED(fake_command) | ||
841 | 832 | qc = self.status_frontend.aggregator.queued_commands | ||
842 | 833 | self.assertNotIn(fake_command, qc) | ||
843 | 834 | |||
844 | 835 | def test_upload_started(self): | ||
845 | 836 | """An upload was added to the queue.""" | ||
846 | 837 | self.patch(status_listener.action_queue, "Upload", FakeCommand) | ||
847 | 838 | fake_command = FakeCommand() | ||
848 | 839 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
849 | 840 | qc = self.status_frontend.aggregator.queued_commands | ||
850 | 841 | self.assertIn(fake_command, qc) | ||
851 | 842 | |||
852 | 843 | def test_upload_started_with_no_deflated_size(self): | ||
853 | 844 | """An upload of unknown size was added to the queue.""" | ||
854 | 845 | self.patch(status_listener.action_queue, "Upload", FakeCommand) | ||
855 | 846 | fake_command = FakeCommand() | ||
856 | 847 | fake_command.deflated_size = None | ||
857 | 848 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
858 | 849 | qc = self.status_frontend.aggregator.queued_commands | ||
859 | 850 | self.assertIn(fake_command, qc) | ||
860 | 851 | |||
861 | 852 | def test_upload_finished(self): | ||
862 | 853 | """An upload was removed from the queue.""" | ||
863 | 854 | self.patch(status_listener.action_queue, "Upload", FakeCommand) | ||
864 | 855 | fake_command = FakeCommand() | ||
865 | 856 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
866 | 857 | self.listener.handle_SYS_QUEUE_REMOVED(fake_command) | ||
867 | 858 | qc = self.status_frontend.aggregator.queued_commands | ||
868 | 859 | self.assertNotIn(fake_command, qc) | ||
869 | 860 | |||
870 | 861 | def test_progress_made_on_upload(self): | ||
871 | 862 | """Progress was made on an uploading file.""" | ||
872 | 863 | share_id = 'fake_share' | ||
873 | 864 | node_id = 'fake_node' | ||
874 | 865 | n_bytes_written = 100 | ||
875 | 866 | deflated_size = 10000 | ||
876 | 867 | self.listener.handle_AQ_UPLOAD_FILE_PROGRESS( | ||
877 | 868 | share_id, node_id, n_bytes_written, deflated_size) | ||
878 | 869 | pe = self.status_frontend.aggregator.progress_events | ||
879 | 870 | self.assertEqual( | ||
880 | 871 | [(share_id, node_id, n_bytes_written, deflated_size)], pe, | ||
881 | 872 | "progress_made was not called (exactly once) on aggregator.") | ||
882 | 873 | |||
883 | 874 | def test_progress_made_on_download(self): | ||
884 | 875 | """Progress was made on an downloading file.""" | ||
885 | 876 | share_id = 'fake_share' | ||
886 | 877 | node_id = 'fake_node' | ||
887 | 878 | n_bytes_written = 200 | ||
888 | 879 | deflated_size = 20000 | ||
889 | 880 | self.listener.handle_AQ_DOWNLOAD_FILE_PROGRESS( | ||
890 | 881 | share_id, node_id, n_bytes_written, deflated_size) | ||
891 | 882 | pe = self.status_frontend.aggregator.progress_events | ||
892 | 883 | self.assertEqual( | ||
893 | 884 | [(share_id, node_id, n_bytes_written, deflated_size)], pe, | ||
894 | 885 | "progress_made was not called (exactly once) on aggregator.") | ||
895 | 886 | |||
896 | 887 | def test_queue_done(self): | ||
897 | 888 | """The queue is empty.""" | ||
898 | 889 | fake_command = FakeCommand() | ||
899 | 890 | qc = self.status_frontend.aggregator.queued_commands | ||
900 | 891 | qc.add(fake_command) | ||
901 | 892 | self.listener.handle_SYS_QUEUE_DONE() | ||
902 | 893 | self.assertEqual(0, len(qc)) | ||
903 | 894 | |||
904 | 895 | def test_new_share_available(self): | ||
905 | 896 | """A new share is available for subscription.""" | ||
906 | 897 | SHARE_ID = "fake share id" | ||
907 | 898 | FAKE_SENDER = 'Mom' | ||
908 | 899 | share = Share(volume_id=SHARE_ID, other_visible_name=FAKE_SENDER) | ||
909 | 900 | self.fakevm.volumes[SHARE_ID] = share | ||
910 | 901 | self.listener.handle_VM_SHARE_CREATED(SHARE_ID) | ||
911 | 902 | self.assertEqual( | ||
912 | 903 | 1, len(self.status_frontend.notification.notifications_shown)) | ||
913 | 904 | msg = self.status_frontend.messaging.messages_shown[FAKE_SENDER] | ||
914 | 905 | # msg did not receive a time argument | ||
915 | 906 | self.assertEqual(None, msg[1]) | ||
916 | 907 | # msg did not receive a count argument | ||
917 | 908 | self.assertEqual(None, msg[2]) | ||
918 | 909 | |||
919 | 910 | def test_already_subscribed_new_udf_available(self): | ||
920 | 911 | """A new udf that was already subscribed.""" | ||
921 | 912 | udf = UDF() | ||
922 | 913 | udf.subscribed = True | ||
923 | 914 | self.listener.handle_VM_UDF_CREATED(udf) | ||
924 | 915 | self.assertEqual( | ||
925 | 916 | 0, len(self.status_frontend.notification.notifications_shown)) | ||
926 | 917 | self.assertEqual( | ||
927 | 918 | 0, len(self.status_frontend.messaging.messages_shown)) | ||
928 | 919 | self.assertEqual( | ||
929 | 920 | 0, len(self.status_frontend.messaging.messages_updated)) | ||
930 | 921 | |||
931 | 922 | def test_new_udf_available(self): | ||
932 | 923 | """A new udf is available for subscription.""" | ||
933 | 924 | udf = UDF() | ||
934 | 925 | self.listener.handle_VM_UDF_CREATED(udf) | ||
935 | 926 | self.assertEqual( | ||
936 | 927 | 1, len(self.status_frontend.notification.notifications_shown)) | ||
937 | 928 | self.assertEqual( | ||
938 | 929 | 0, len(self.status_frontend.messaging.messages_shown)) | ||
939 | 930 | self.assertEqual( | ||
940 | 931 | 0, len(self.status_frontend.messaging.messages_updated)) | ||
941 | 932 | self.assertEqual(0, len(self.status_frontend.messaging.callbacks)) | ||
942 | 933 | |||
943 | 934 | def test_two_new_udfs_available(self): | ||
944 | 935 | """A new udf is available for subscription.""" | ||
945 | 936 | udf1 = UDF() | ||
946 | 937 | self.listener.handle_VM_UDF_CREATED(udf1) | ||
947 | 938 | udf2 = UDF() | ||
948 | 939 | self.listener.handle_VM_UDF_CREATED(udf2) | ||
949 | 940 | self.assertEqual( | ||
950 | 941 | 2, len(self.status_frontend.notification.notifications_shown)) | ||
951 | 942 | self.assertEqual( | ||
952 | 943 | 0, len(self.status_frontend.messaging.messages_shown)) | ||
953 | 944 | self.assertEqual( | ||
954 | 945 | 0, len(self.status_frontend.messaging.messages_updated)) | ||
955 | 946 | |||
956 | 947 | def test_server_connection_lost(self): | ||
957 | 948 | """The client connected to the server.""" | ||
958 | 949 | self.status_frontend.aggregator.connected = True | ||
959 | 950 | self.listener.handle_SYS_CONNECTION_LOST() | ||
960 | 951 | self.assertEqual( | ||
961 | 952 | 0, len(self.status_frontend.notification.notifications_shown)) | ||
962 | 953 | self.assertFalse(self.status_frontend.aggregator.connected) | ||
963 | 954 | |||
964 | 955 | def test_server_connection_made(self): | ||
965 | 956 | """The client connected to the server.""" | ||
966 | 957 | self.status_frontend.aggregator.connected = False | ||
967 | 958 | self.listener.handle_SYS_CONNECTION_MADE() | ||
968 | 959 | self.assertEqual( | ||
969 | 960 | 0, len(self.status_frontend.notification.notifications_shown)) | ||
970 | 961 | self.assertTrue(self.status_frontend.aggregator.connected) | ||
971 | 962 | |||
972 | 963 | def test_set_show_all_notifications(self): | ||
973 | 964 | """Test the set_show_all_notifications method.""" | ||
974 | 965 | self.status_frontend.set_show_all_notifications(False) | ||
975 | 966 | self.assertFalse(self.status_frontend.aggregator. | ||
976 | 967 | notification_switch.enabled) | ||
977 | 968 | |||
978 | 969 | def test_udf_quota_exceeded(self): # pylint: disable=R0201 | ||
979 | 970 | """Quota exceeded in udf.""" | ||
980 | 971 | mocker = Mocker() | ||
981 | 972 | launcher = mocker.replace( | ||
982 | 973 | "ubuntuone.platform.launcher.UbuntuOneLauncher") | ||
983 | 974 | launcher() | ||
984 | 975 | mock_launcher = mocker.mock() | ||
985 | 976 | mocker.result(mock_launcher) | ||
986 | 977 | mock_launcher.set_urgent() | ||
987 | 978 | mocker.replay() | ||
988 | 979 | UDF_ID = 'fake udf id' | ||
989 | 980 | udf = UDF(volume_id=UDF_ID) | ||
990 | 981 | self.fakevm.volumes[UDF_ID] = udf | ||
991 | 982 | self.listener.handle_SYS_QUOTA_EXCEEDED( | ||
992 | 983 | volume_id=UDF_ID, free_bytes=0) | ||
993 | 984 | self.assertEqual( | ||
994 | 985 | 0, len(self.status_frontend.notification.notifications_shown)) | ||
995 | 986 | mocker.restore() | ||
996 | 987 | mocker.verify() | ||
997 | 988 | |||
998 | 989 | def test_root_quota_exceeded(self): # pylint: disable=R0201 | ||
999 | 990 | """Quota exceeded in root.""" | ||
1000 | 991 | mocker = Mocker() | ||
1001 | 992 | launcher = mocker.replace( | ||
1002 | 993 | "ubuntuone.platform.launcher.UbuntuOneLauncher") | ||
1003 | 994 | launcher() | ||
1004 | 995 | mock_launcher = mocker.mock() | ||
1005 | 996 | mocker.result(mock_launcher) | ||
1006 | 997 | mock_launcher.set_urgent() | ||
1007 | 998 | mocker.replay() | ||
1008 | 999 | ROOT_ID = 'fake root id' | ||
1009 | 1000 | root = Root(volume_id=ROOT_ID) | ||
1010 | 1001 | self.fakevm.volumes[ROOT_ID] = root | ||
1011 | 1002 | self.fakevm.root = root | ||
1012 | 1003 | self.listener.handle_SYS_QUOTA_EXCEEDED( | ||
1013 | 1004 | volume_id=ROOT_ID, free_bytes=0) | ||
1014 | 1005 | self.assertEqual( | ||
1015 | 1006 | 0, len(self.status_frontend.notification.notifications_shown)) | ||
1016 | 1007 | mocker.restore() | ||
1017 | 1008 | mocker.verify() | ||
1018 | 1009 | |||
1019 | 1010 | def test_share_quota_exceeded(self): | ||
1020 | 1011 | """Quota exceeded in share.""" | ||
1021 | 1012 | mocker = Mocker() | ||
1022 | 1013 | launcher = mocker.replace( | ||
1023 | 1014 | "ubuntuone.platform.launcher.UbuntuOneLauncher") | ||
1024 | 1015 | launcher() | ||
1025 | 1016 | mock_launcher = mocker.mock() | ||
1026 | 1017 | mocker.result(mock_launcher) | ||
1027 | 1018 | mock_launcher.set_urgent() | ||
1028 | 1019 | launcher() | ||
1029 | 1020 | mock_launcher = mocker.mock() | ||
1030 | 1021 | mocker.result(mock_launcher) | ||
1031 | 1022 | mock_launcher.set_urgent() | ||
1032 | 1023 | mocker.replay() | ||
1033 | 1024 | SHARE_ID = 'fake share id' | ||
1034 | 1025 | BYTES = 0 | ||
1035 | 1026 | share = Share(volume_id=SHARE_ID) | ||
1036 | 1027 | self.fakevm.volumes[SHARE_ID] = share | ||
1037 | 1028 | self.listener.handle_SYS_QUOTA_EXCEEDED(SHARE_ID, BYTES) | ||
1038 | 1029 | self.assertEqual( | ||
1039 | 1030 | 1, len(self.status_frontend.notification.notifications_shown)) | ||
1040 | 1031 | self.listener.handle_SYS_QUOTA_EXCEEDED(SHARE_ID, BYTES) | ||
1041 | 1032 | self.listener.handle_SYS_QUOTA_EXCEEDED(SHARE_ID, BYTES) | ||
1042 | 1033 | self.assertEqual( | ||
1043 | 1034 | 1, len(self.status_frontend.notification.notifications_shown)) | ||
1044 | 1035 | self.status_frontend.aggregator.clock.advance(aggregator.ONE_DAY + 1) | ||
1045 | 1036 | self.listener.handle_SYS_QUOTA_EXCEEDED(SHARE_ID, BYTES) | ||
1046 | 1037 | self.assertEqual( | ||
1047 | 1038 | 2, len(self.status_frontend.notification.notifications_shown)) | ||
1048 | 1039 | mocker.restore() | ||
1049 | 1040 | mocker.verify() | ||
1050 | 1041 | |||
1051 | 1042 | |||
1052 | 1043 | class StatusEventTestCase(TestCase): | ||
1053 | 1044 | """Test the status event class and children.""" | ||
1054 | 1045 | |||
1055 | 1046 | CLASS = aggregator.StatusEvent | ||
1056 | 1047 | CLASS_KWARGS = {} | ||
1057 | 1048 | status = None | ||
1058 | 1049 | |||
1059 | 1050 | @defer.inlineCallbacks | ||
1060 | 1051 | def setUp(self): | ||
1061 | 1052 | """Initialize this test instance.""" | ||
1062 | 1053 | yield super(StatusEventTestCase, self).setUp() | ||
1063 | 1054 | if type(self) == StatusEventTestCase: | ||
1064 | 1055 | self.assertRaises(AssertionError, self.CLASS, **self.CLASS_KWARGS) | ||
1065 | 1056 | else: | ||
1066 | 1057 | self.status = self.CLASS(**self.CLASS_KWARGS) | ||
1067 | 1058 | |||
1068 | 1059 | def test_one_message_defined(self): | ||
1069 | 1060 | """The singular message is defined as MESSAGE_ONE.""" | ||
1070 | 1061 | if self.status: | ||
1071 | 1062 | self.assertNotEqual(None, self.CLASS.MESSAGE_ONE) | ||
1072 | 1063 | |||
1073 | 1064 | def test_one_message_built_correctly(self): | ||
1074 | 1065 | """The message returned by one() is returned ok.""" | ||
1075 | 1066 | if self.status: | ||
1076 | 1067 | self.assertEqual(self.status.one(), self.CLASS.MESSAGE_ONE) | ||
1077 | 1068 | |||
1078 | 1069 | |||
1079 | 1070 | class FilePublishingStatusTestCase(StatusEventTestCase): | ||
1080 | 1071 | """Test the file publishing status class.""" | ||
1081 | 1072 | |||
1082 | 1073 | CLASS = aggregator.FilePublishingStatus | ||
1083 | 1074 | CLASS_KWARGS = {"new_public_url": "http://fake_public/url"} | ||
1084 | 1075 | |||
1085 | 1076 | def test_one_message_built_correctly(self): | ||
1086 | 1077 | """The message returned by one() should include the url.""" | ||
1087 | 1078 | expected = self.CLASS.MESSAGE_ONE % self.status.kwargs | ||
1088 | 1079 | self.assertEqual(self.status.one(), expected) | ||
1089 | 1080 | |||
1090 | 1081 | |||
1091 | 1082 | class FileUnpublishingStatusTestCase(StatusEventTestCase): | ||
1092 | 1083 | """Test the file unpublishing status class.""" | ||
1093 | 1084 | |||
1094 | 1085 | CLASS = aggregator.FileUnpublishingStatus | ||
1095 | 1086 | CLASS_KWARGS = {"old_public_url": None} | ||
1096 | 1087 | |||
1097 | 1088 | |||
1098 | 1089 | class ShareAvailableEventTestCase(StatusEventTestCase): | ||
1099 | 1090 | """Test the folder available status class with a Share.""" | ||
1100 | 1091 | |||
1101 | 1092 | FOLDER_NAME = "folder name" | ||
1102 | 1093 | OTHER_USER_NAME = "person name" | ||
1103 | 1094 | SAMPLE_SHARE = Share(accepted=False, name=FOLDER_NAME, | ||
1104 | 1095 | other_visible_name=OTHER_USER_NAME) | ||
1105 | 1096 | CLASS = aggregator.ShareAvailableStatus | ||
1106 | 1097 | CLASS_KWARGS = {"share": SAMPLE_SHARE} | ||
1107 | 1098 | |||
1108 | 1099 | def test_one_message_built_correctly(self): | ||
1109 | 1100 | """one() must return the folder name and user name.""" | ||
1110 | 1101 | format_args = { | ||
1111 | 1102 | "folder_name": self.FOLDER_NAME, | ||
1112 | 1103 | "other_user_name": self.OTHER_USER_NAME, | ||
1113 | 1104 | } | ||
1114 | 1105 | expected = self.CLASS.MESSAGE_ONE % format_args | ||
1115 | 1106 | self.assertEqual(self.status.one(), expected) | ||
1116 | 1107 | |||
1117 | 1108 | |||
1118 | 1109 | class UDFAvailableEventTestCase(StatusEventTestCase): | ||
1119 | 1110 | """Test the folder available status class with a UDF.""" | ||
1120 | 1111 | |||
1121 | 1112 | FOLDER_NAME = "folder name" | ||
1122 | 1113 | SAMPLE_UDF = UDF(subscribed=False, suggested_path=FOLDER_NAME) | ||
1123 | 1114 | CLASS = aggregator.UDFAvailableStatus | ||
1124 | 1115 | CLASS_KWARGS = {'udf': SAMPLE_UDF} | ||
1125 | 1116 | |||
1126 | 1117 | def test_one_message_built_correctly(self): | ||
1127 | 1118 | """one() must return the folder name.""" | ||
1128 | 1119 | format_args = {"folder_name": self.FOLDER_NAME} | ||
1129 | 1120 | expected = self.CLASS.MESSAGE_ONE % format_args | ||
1130 | 1121 | self.assertEqual(self.status.one(), expected) | ||
1131 | 1122 | |||
1132 | 1123 | |||
1133 | 1124 | class ConnectionLostEventTestCase(StatusEventTestCase): | ||
1134 | 1125 | """Test the event when the connection is lost.""" | ||
1135 | 1126 | |||
1136 | 1127 | CLASS = aggregator.ConnectionLostStatus | ||
1137 | 1128 | |||
1138 | 1129 | def test_many_message_built_correctly(self): | ||
1139 | 1130 | """The message returned by many() is returned ok.""" | ||
1140 | 1131 | if self.status: | ||
1141 | 1132 | count = 99 | ||
1142 | 1133 | test_events = [FakeStatus(88)] * count + [self.CLASS()] | ||
1143 | 1134 | expected = self.CLASS.MESSAGE_ONE | ||
1144 | 1135 | self.assertEqual(self.status.many(test_events), expected) | ||
1145 | 1136 | |||
1146 | 1137 | |||
1147 | 1138 | class ConnectionMadeEventTestCase(ConnectionLostEventTestCase): | ||
1148 | 1139 | """Test the event when the connection is made.""" | ||
1149 | 1140 | |||
1150 | 1141 | CLASS = aggregator.ConnectionMadeStatus | ||
1151 | 1142 | |||
1152 | 1143 | |||
1153 | 1144 | class FakeStatus(aggregator.StatusEvent): | ||
1154 | 1145 | """A fake status to test weight comparisons.""" | ||
1155 | 1146 | |||
1156 | 1147 | def __init__(self, weight): | ||
1157 | 1148 | """Initialize with the fake weight.""" | ||
1158 | 1149 | super(FakeStatus, self).__init__() | ||
1159 | 1150 | self.WEIGHT = weight | ||
1160 | 1151 | |||
1161 | 1152 | |||
1162 | 1153 | class FakeFileDiscoveryBubble(object): | ||
1163 | 1154 | """A fake FileDiscoveryBubble.""" | ||
1164 | 1155 | |||
1165 | 1156 | count = 0 | ||
1166 | 1157 | |||
1167 | 1158 | def __init__(self, status_aggregator, clock=None): | ||
1168 | 1159 | """Initialize this instance.""" | ||
1169 | 1160 | self.status_aggregator = status_aggregator | ||
1170 | 1161 | |||
1171 | 1162 | def new_file_found(self): | ||
1172 | 1163 | """New files were found.""" | ||
1173 | 1164 | self.count += 1 | ||
1174 | 1165 | |||
1175 | 1166 | def cleanup(self): | ||
1176 | 1167 | """Cleanup this instance.""" | ||
1177 | 1168 | |||
1178 | 1169 | def connection_made(self): | ||
1179 | 1170 | """Connection made.""" | ||
1180 | 1171 | |||
1181 | 1172 | def connection_lost(self): | ||
1182 | 1173 | """Connection lost.""" | ||
1183 | 1174 | |||
1184 | 1175 | |||
1185 | 1176 | class FakeFinalBubble(object): | ||
1186 | 1177 | """A fake FinalStatusBubble.""" | ||
1187 | 1178 | |||
1188 | 1179 | shown = False | ||
1189 | 1180 | |||
1190 | 1181 | def __init__(self, status_aggregator): | ||
1191 | 1182 | """Initialize this fake instance.""" | ||
1192 | 1183 | self.status_aggregator = status_aggregator | ||
1193 | 1184 | |||
1194 | 1185 | def cleanup(self): | ||
1195 | 1186 | """Cleanup this instance.""" | ||
1196 | 1187 | |||
1197 | 1188 | def show(self): | ||
1198 | 1189 | """Show this bubble.""" | ||
1199 | 1190 | self.shown = True | ||
1200 | 1191 | |||
1201 | 1192 | |||
1202 | 1193 | class StatusAggregatorTestCase(TestCase): | ||
1203 | 1194 | """Test the backend of the status aggregator.""" | ||
1204 | 1195 | |||
1205 | 1196 | @defer.inlineCallbacks | ||
1206 | 1197 | def setUp(self): | ||
1207 | 1198 | """Initialize this test instance.""" | ||
1208 | 1199 | yield super(StatusAggregatorTestCase, self).setUp() | ||
1209 | 1200 | self.patch(aggregator, "FileDiscoveryBubble", | ||
1210 | 1201 | FakeFileDiscoveryBubble) | ||
1211 | 1202 | self.patch(aggregator, "FinalStatusBubble", | ||
1212 | 1203 | FakeFinalBubble) | ||
1213 | 1204 | self.patch(aggregator, "ToggleableNotification", | ||
1214 | 1205 | FakeNotificationSingleton()) | ||
1215 | 1206 | self.patch(aggregator, "UbuntuOneLauncher", FakeLauncher) | ||
1216 | 1207 | self.patch(aggregator.session, "Inhibitor", FakeInhibitor) | ||
1217 | 1208 | clock = PatchedClock() | ||
1218 | 1209 | self.status_frontend = aggregator.StatusFrontend(clock=clock) | ||
1219 | 1210 | self.aggregator = self.status_frontend.aggregator | ||
1220 | 1211 | self.fake_bubble = self.aggregator.file_discovery_bubble | ||
1221 | 1212 | |||
1222 | 1213 | self.handler = MementoHandler() | ||
1223 | 1214 | self.handler.setLevel(logging.DEBUG) | ||
1224 | 1215 | aggregator.logger.addHandler(self.handler) | ||
1225 | 1216 | aggregator.logger.setLevel(logging.DEBUG) | ||
1226 | 1217 | self.addCleanup(aggregator.logger.removeHandler, self.handler) | ||
1227 | 1218 | self.addCleanup(self.aggregator.progress_bar.cleanup) | ||
1228 | 1219 | |||
1229 | 1220 | def assertStatusReset(self): | ||
1230 | 1221 | """Assert that the status is at zero.""" | ||
1231 | 1222 | self.assertEqual(0, self.aggregator.download_done) | ||
1232 | 1223 | self.assertEqual(0, self.aggregator.upload_done) | ||
1233 | 1224 | self.assertEqual(0, len(self.aggregator.files_uploading)) | ||
1234 | 1225 | self.assertEqual(0, len(self.aggregator.files_downloading)) | ||
1235 | 1226 | self.assertEqual({}, self.aggregator.progress) | ||
1236 | 1227 | self.assertEqual({}, self.aggregator.to_do) | ||
1237 | 1228 | self.assertIdentical(None, self.aggregator.queue_done_timer) | ||
1238 | 1229 | |||
1239 | 1230 | def assertMiscCommandQueued(self, fc): | ||
1240 | 1231 | """Assert that some command was queued.""" | ||
1241 | 1232 | self.assertEqual(len(self.aggregator.to_do), 1) | ||
1242 | 1233 | message = "queueing command (total: 1): %s" % fc.__class__.__name__ | ||
1243 | 1234 | self.assertEqual(fc.deflated_size, sum(self.aggregator.to_do.values())) | ||
1244 | 1235 | self.assertTrue(self.handler.check_debug(message)) | ||
1245 | 1236 | self.assertTrue(self.aggregator.progress_bar.visible) | ||
1246 | 1237 | |||
1247 | 1238 | def assertMiscCommandUnqueued(self, fc): | ||
1248 | 1239 | """Assert that some command was unqueued.""" | ||
1249 | 1240 | self.assertEqual( | ||
1250 | 1241 | 1, self.aggregator.download_done + self.aggregator.upload_done) | ||
1251 | 1242 | message = "unqueueing command: %s" % fc.__class__.__name__ | ||
1252 | 1243 | self.assertTrue(self.handler.check_debug(message)) | ||
1253 | 1244 | |||
1254 | 1245 | def test_counters_start_at_zero(self): | ||
1255 | 1246 | """Test that the counters start at zero.""" | ||
1256 | 1247 | self.assertStatusReset() | ||
1257 | 1248 | |||
1258 | 1249 | def test_file_download_started(self): | ||
1259 | 1250 | """Test that a file has started download.""" | ||
1260 | 1251 | fc = FakeCommand(path='testfile.txt') | ||
1261 | 1252 | self.assertEqual('', self.aggregator.downloading_filename) | ||
1262 | 1253 | self.status_frontend.download_started(fc) | ||
1263 | 1254 | self.assertEqual(1, len(self.aggregator.files_downloading)) | ||
1264 | 1255 | self.assertEqual('testfile.txt', self.aggregator.downloading_filename) | ||
1265 | 1256 | self.assertMiscCommandQueued(fc) | ||
1266 | 1257 | self.assertEqual(1, self.fake_bubble.count) | ||
1267 | 1258 | self.assertEqual( | ||
1268 | 1259 | {(fc.share_id, fc.node_id): (fc.deflated_size)}, | ||
1269 | 1260 | self.aggregator.to_do) | ||
1270 | 1261 | |||
1271 | 1262 | def test_file_download_finished(self): | ||
1272 | 1263 | """Test that a file has finished downloading.""" | ||
1273 | 1264 | fc = FakeCommand() | ||
1274 | 1265 | self.status_frontend.download_started(fc) | ||
1275 | 1266 | self.status_frontend.download_finished(fc) | ||
1276 | 1267 | self.assertEqual(self.aggregator.download_done, 1) | ||
1277 | 1268 | self.assertMiscCommandUnqueued(fc) | ||
1278 | 1269 | self.assertEqual( | ||
1279 | 1270 | {(fc.share_id, fc.node_id): (fc.deflated_size)}, | ||
1280 | 1271 | self.aggregator.progress) | ||
1281 | 1272 | |||
1282 | 1273 | def test_file_upload_started(self): | ||
1283 | 1274 | """Test that a file has started upload.""" | ||
1284 | 1275 | fc = FakeCommand(path='testfile.txt') | ||
1285 | 1276 | self.assertEqual('', self.aggregator.uploading_filename) | ||
1286 | 1277 | self.status_frontend.upload_started(fc) | ||
1287 | 1278 | self.assertEqual(1, len(self.aggregator.files_uploading)) | ||
1288 | 1279 | self.assertEqual('testfile.txt', self.aggregator.uploading_filename) | ||
1289 | 1280 | self.assertMiscCommandQueued(fc) | ||
1290 | 1281 | self.assertEqual(1, self.fake_bubble.count) | ||
1291 | 1282 | self.assertEqual( | ||
1292 | 1283 | {(fc.share_id, fc.node_id): (fc.deflated_size)}, | ||
1293 | 1284 | self.aggregator.to_do) | ||
1294 | 1285 | |||
1295 | 1286 | def test_file_upload_finished(self): | ||
1296 | 1287 | """Test that a file has finished uploading.""" | ||
1297 | 1288 | fc = FakeCommand() | ||
1298 | 1289 | self.status_frontend.upload_started(fc) | ||
1299 | 1290 | self.status_frontend.upload_finished(fc) | ||
1300 | 1291 | self.assertEqual(self.aggregator.upload_done, 1) | ||
1301 | 1292 | self.assertMiscCommandUnqueued(fc) | ||
1302 | 1293 | self.assertEqual( | ||
1303 | 1294 | {(fc.share_id, fc.node_id): (fc.deflated_size)}, | ||
1304 | 1295 | self.aggregator.progress) | ||
1305 | 1296 | |||
1306 | 1297 | def test_progress_made(self): | ||
1307 | 1298 | """Progress on up and downloads is tracked.""" | ||
1308 | 1299 | share_id = 'fake_share' | ||
1309 | 1300 | node_id = 'fake_node' | ||
1310 | 1301 | n_bytes_written = 200 | ||
1311 | 1302 | deflated_size = 100000 | ||
1312 | 1303 | self.aggregator.progress_made( | ||
1313 | 1304 | share_id, node_id, n_bytes_written, deflated_size) | ||
1314 | 1305 | self.assertEqual( | ||
1315 | 1306 | {(share_id, node_id): (n_bytes_written)}, | ||
1316 | 1307 | self.aggregator.progress) | ||
1317 | 1308 | |||
1318 | 1309 | def test_get_discovery_message(self): | ||
1319 | 1310 | """Test the message that's shown on the discovery bubble.""" | ||
1320 | 1311 | uploading = 10 | ||
1321 | 1312 | downloading = 8 | ||
1322 | 1313 | filename = 'upfile0.ext' | ||
1323 | 1314 | filename2 = 'downfile0.ext' | ||
1324 | 1315 | self.aggregator.files_uploading.extend([ | ||
1325 | 1316 | FakeCommand(path='upfile%d.ext' % n) for n in range(uploading)]) | ||
1326 | 1317 | self.aggregator.uploading_filename = filename | ||
1327 | 1318 | self.aggregator.files_downloading.extend([ | ||
1328 | 1319 | FakeCommand(path='downfile%d.ext' % n) for n in | ||
1329 | 1320 | range(downloading)]) | ||
1330 | 1321 | self.aggregator.downloading_filename = filename2 | ||
1331 | 1322 | expected = ( | ||
1332 | 1323 | aggregator.files_being_uploaded(filename, uploading) + "\n" + | ||
1333 | 1324 | aggregator.files_being_downloaded(filename2, downloading)) | ||
1334 | 1325 | result = self.aggregator.get_discovery_message() | ||
1335 | 1326 | self.assertEqual(expected, result) | ||
1336 | 1327 | |||
1337 | 1328 | def test_get_final_status_message(self): | ||
1338 | 1329 | """The final status message.""" | ||
1339 | 1330 | done = (5, 10) | ||
1340 | 1331 | self.aggregator.uploading_filename = FILENAME | ||
1341 | 1332 | self.aggregator.downloading_filename = FILENAME2 | ||
1342 | 1333 | self.aggregator.upload_done, self.aggregator.download_done = done | ||
1343 | 1334 | |||
1344 | 1335 | expected = ( | ||
1345 | 1336 | aggregator.FINAL_COMPLETED + "\n" + | ||
1346 | 1337 | aggregator.files_were_uploaded( | ||
1347 | 1338 | FILENAME, self.aggregator.upload_done) + "\n" + | ||
1348 | 1339 | aggregator.files_were_downloaded( | ||
1349 | 1340 | FILENAME2, self.aggregator.download_done)) | ||
1350 | 1341 | |||
1351 | 1342 | result = self.aggregator.get_final_status_message() | ||
1352 | 1343 | self.assertEqual(expected, result) | ||
1353 | 1344 | |||
1354 | 1345 | def test_get_final_status_message_no_uploads(self): | ||
1355 | 1346 | """The final status message when there were no uploads.""" | ||
1356 | 1347 | done = (0, 12) | ||
1357 | 1348 | self.aggregator.upload_done, self.aggregator.download_done = done | ||
1358 | 1349 | self.aggregator.downloading_filename = FILENAME2 | ||
1359 | 1350 | |||
1360 | 1351 | expected = ( | ||
1361 | 1352 | aggregator.FINAL_COMPLETED + "\n" + | ||
1362 | 1353 | aggregator.files_were_downloaded( | ||
1363 | 1354 | FILENAME2, self.aggregator.download_done)) | ||
1364 | 1355 | |||
1365 | 1356 | result = self.aggregator.get_final_status_message() | ||
1366 | 1357 | self.assertEqual(expected, result) | ||
1367 | 1358 | |||
1368 | 1359 | def test_get_final_status_message_no_downloads(self): | ||
1369 | 1360 | """The final status message when there were no downloads.""" | ||
1370 | 1361 | done = (8, 0) | ||
1371 | 1362 | self.aggregator.upload_done, self.aggregator.download_done = done | ||
1372 | 1363 | self.aggregator.uploading_filename = FILENAME | ||
1373 | 1364 | |||
1374 | 1365 | expected = ( | ||
1375 | 1366 | aggregator.FINAL_COMPLETED + "\n" + | ||
1376 | 1367 | aggregator.files_were_uploaded( | ||
1377 | 1368 | FILENAME, self.aggregator.upload_done)) | ||
1378 | 1369 | |||
1379 | 1370 | result = self.aggregator.get_final_status_message() | ||
1380 | 1371 | self.assertEqual(expected, result) | ||
1381 | 1372 | |||
1382 | 1373 | def test_queue_done_shows_bubble_when_downloads_happened(self): | ||
1383 | 1374 | """On queue done, show final bubble if downloads happened.""" | ||
1384 | 1375 | fc = FakeCommand() | ||
1385 | 1376 | self.status_frontend.download_started(fc) | ||
1386 | 1377 | self.status_frontend.download_finished(fc) | ||
1387 | 1378 | old_final_bubble = self.aggregator.final_status_bubble | ||
1388 | 1379 | self.aggregator.queue_done() | ||
1389 | 1380 | self.aggregator.clock.advance(self.aggregator.finished_delay + 1) | ||
1390 | 1381 | self.assertTrue(old_final_bubble.shown) | ||
1391 | 1382 | |||
1392 | 1383 | def test_queue_done_shows_bubble_when_uploads_happened(self): | ||
1393 | 1384 | """On queue done, show final bubble if uploads happened.""" | ||
1394 | 1385 | fc = FakeCommand() | ||
1395 | 1386 | self.status_frontend.upload_started(fc) | ||
1396 | 1387 | self.status_frontend.upload_finished(fc) | ||
1397 | 1388 | old_final_bubble = self.aggregator.final_status_bubble | ||
1398 | 1389 | self.aggregator.queue_done() | ||
1399 | 1390 | self.aggregator.clock.advance(self.aggregator.finished_delay + 1) | ||
1400 | 1391 | self.assertTrue(old_final_bubble.shown) | ||
1401 | 1392 | |||
1402 | 1393 | def test_queue_done_shows_bubble_only_after_delay(self): | ||
1403 | 1394 | """On queue_done, show final bubble only after a delay.""" | ||
1404 | 1395 | fc = FakeCommand() | ||
1405 | 1396 | self.status_frontend.upload_started(fc) | ||
1406 | 1397 | self.status_frontend.upload_finished(fc) | ||
1407 | 1398 | old_final_bubble = self.aggregator.final_status_bubble | ||
1408 | 1399 | self.aggregator.queue_done() | ||
1409 | 1400 | self.assertFalse(old_final_bubble.shown) | ||
1410 | 1401 | self.aggregator.clock.advance(self.aggregator.finished_delay - 1) | ||
1411 | 1402 | self.assertFalse(old_final_bubble.shown) | ||
1412 | 1403 | self.aggregator.queue_done() | ||
1413 | 1404 | self.assertFalse(old_final_bubble.shown) | ||
1414 | 1405 | self.aggregator.clock.advance(2) | ||
1415 | 1406 | self.assertFalse(old_final_bubble.shown) | ||
1416 | 1407 | self.aggregator.clock.advance(self.aggregator.finished_delay + 1) | ||
1417 | 1408 | self.assertTrue(old_final_bubble.shown) | ||
1418 | 1409 | |||
1419 | 1410 | def test_queue_done_does_not_show_bubble_when_no_transfers_happened(self): | ||
1420 | 1411 | """On queue done, don't show final bubble if no transfers happened.""" | ||
1421 | 1412 | fc = FakeCommand() | ||
1422 | 1413 | self.status_frontend.upload_started(fc) | ||
1423 | 1414 | old_final_bubble = self.aggregator.final_status_bubble | ||
1424 | 1415 | self.aggregator.queue_done() | ||
1425 | 1416 | self.assertFalse(old_final_bubble.shown) | ||
1426 | 1417 | |||
1427 | 1418 | def test_queue_done_resets_status_and_hides_progressbar(self): | ||
1428 | 1419 | """On queue done, reset counters and hide progressbar.""" | ||
1429 | 1420 | fc = FakeCommand() | ||
1430 | 1421 | self.status_frontend.upload_started(fc) | ||
1431 | 1422 | self.aggregator.queue_done() | ||
1432 | 1423 | self.aggregator.clock.advance(self.aggregator.finished_delay + 1) | ||
1433 | 1424 | self.assertStatusReset() | ||
1434 | 1425 | self.assertEqual(0.0, self.aggregator.progress_bar.progress) | ||
1435 | 1426 | self.assertFalse(self.aggregator.progress_bar.visible) | ||
1436 | 1427 | |||
1437 | 1428 | def test_download_started_cancels_timer(self): | ||
1438 | 1429 | """Starting a download cancels the queue_done timer.""" | ||
1439 | 1430 | fc = FakeCommand() | ||
1440 | 1431 | self.status_frontend.download_started(fc) | ||
1441 | 1432 | self.aggregator.clock.advance(self.aggregator.finished_delay) | ||
1442 | 1433 | self.status_frontend.download_finished(fc) | ||
1443 | 1434 | self.aggregator.queue_done() | ||
1444 | 1435 | self.aggregator.clock.advance(self.aggregator.finished_delay / 2) | ||
1445 | 1436 | fc2 = FakeCommand() | ||
1446 | 1437 | self.status_frontend.download_started(fc2) | ||
1447 | 1438 | self.assertIdentical(self.aggregator.queue_done_timer, None) | ||
1448 | 1439 | self.aggregator.clock.advance(self.aggregator.finished_delay) | ||
1449 | 1440 | self.status_frontend.download_finished(fc2) | ||
1450 | 1441 | |||
1451 | 1442 | def test_upload_started_cancels_timer(self): | ||
1452 | 1443 | """Starting an upload cancels the queue_done timer.""" | ||
1453 | 1444 | fc = FakeCommand() | ||
1454 | 1445 | self.status_frontend.upload_started(fc) | ||
1455 | 1446 | self.aggregator.clock.advance(self.aggregator.finished_delay) | ||
1456 | 1447 | self.status_frontend.upload_finished(fc) | ||
1457 | 1448 | self.aggregator.queue_done() | ||
1458 | 1449 | self.aggregator.clock.advance(self.aggregator.finished_delay / 2) | ||
1459 | 1450 | fc2 = FakeCommand() | ||
1460 | 1451 | self.status_frontend.upload_started(fc2) | ||
1461 | 1452 | self.assertIdentical(self.aggregator.queue_done_timer, None) | ||
1462 | 1453 | self.aggregator.clock.advance(self.aggregator.finished_delay) | ||
1463 | 1454 | self.status_frontend.upload_finished(fc2) | ||
1464 | 1455 | |||
1465 | 1456 | |||
1466 | 1457 | class StatusGrouperTestCase(TestCase): | ||
1467 | 1458 | """Tests for the group_statuses function.""" | ||
1468 | 1459 | |||
1469 | 1460 | def test_group_status(self): | ||
1470 | 1461 | """The status grouper sorts and groups by weight.""" | ||
1471 | 1462 | status99 = FakeStatus(99) | ||
1472 | 1463 | statuses = [ | ||
1473 | 1464 | status99, | ||
1474 | 1465 | status99, | ||
1475 | 1466 | FakeStatus(12), | ||
1476 | 1467 | FakeStatus(1)] | ||
1477 | 1468 | |||
1478 | 1469 | result = [list(k) for _, k in aggregator.group_statuses(statuses)] | ||
1479 | 1470 | expected = [ | ||
1480 | 1471 | [statuses[3]], | ||
1481 | 1472 | [statuses[2]], | ||
1482 | 1473 | [status99, status99]] | ||
1483 | 1474 | |||
1484 | 1475 | self.assertEqual(result, expected) | ||
1485 | 1476 | |||
1486 | 1477 | |||
1487 | 1478 | class HundredFeetTestCase(TestCase): | ||
1488 | 1479 | """Try to make all parts work together.""" | ||
1489 | 1480 | |||
1490 | 1481 | def test_all_together_now(self): | ||
1491 | 1482 | """Make all parts work together.""" | ||
1492 | 1483 | self.patch(aggregator, "ToggleableNotification", | ||
1493 | 1484 | FakeNotificationSingleton()) | ||
1494 | 1485 | self.patch(aggregator, "UbuntuOneLauncher", FakeLauncher) | ||
1495 | 1486 | self.patch(aggregator.session, "Inhibitor", FakeInhibitor) | ||
1496 | 1487 | clock = PatchedClock() | ||
1497 | 1488 | upload = FakeCommand(path='upload.foo') | ||
1498 | 1489 | sf = aggregator.StatusFrontend(clock=clock) | ||
1499 | 1490 | sf.server_connection_made() | ||
1500 | 1491 | sf.set_show_all_notifications(True) | ||
1501 | 1492 | |||
1502 | 1493 | # the progress bar is not visible yet | ||
1503 | 1494 | self.assertFalse(sf.aggregator.progress_bar.visible) | ||
1504 | 1495 | sf.upload_started(upload) | ||
1505 | 1496 | # the progress bar is now shown | ||
1506 | 1497 | self.assertTrue(sf.aggregator.progress_bar.visible) | ||
1507 | 1498 | notifications_shown = (sf.aggregator.file_discovery_bubble. | ||
1508 | 1499 | notification.notifications_shown) | ||
1509 | 1500 | # no notifications shown yet | ||
1510 | 1501 | self.assertEqual(0, len(notifications_shown)) | ||
1511 | 1502 | clock.advance(aggregator.FileDiscoveryGatheringState.initial_delay) | ||
1512 | 1503 | # files found notification | ||
1513 | 1504 | self.assertEqual(1, len(notifications_shown)) | ||
1514 | 1505 | download = FakeCommand('download.bar') | ||
1515 | 1506 | sf.download_started(download) | ||
1516 | 1507 | self.assertEqual(1, len(notifications_shown)) | ||
1517 | 1508 | # the progress still is zero | ||
1518 | 1509 | self.assertEqual(0.0, sf.aggregator.progress_bar.progress) | ||
1519 | 1510 | clock.advance(aggregator.FileDiscoveryUpdateState.updates_delay) | ||
1520 | 1511 | # files count update | ||
1521 | 1512 | self.assertEqual(2, len(notifications_shown)) | ||
1522 | 1513 | clock.advance(aggregator.FileDiscoveryUpdateState.updates_timeout - | ||
1523 | 1514 | aggregator.FileDiscoveryUpdateState.updates_delay) | ||
1524 | 1515 | sf.upload_finished(upload) | ||
1525 | 1516 | sf.download_finished(download) | ||
1526 | 1517 | # the progress still is now 100% | ||
1527 | 1518 | self.assertEqual(1.0, sf.aggregator.progress_bar.progress) | ||
1528 | 1519 | sf.queue_done() | ||
1529 | 1520 | clock.advance(sf.aggregator.finished_delay + 1) | ||
1530 | 1521 | self.assertEqual(3, len(notifications_shown)) | ||
1531 | 1522 | |||
1532 | 1523 | def test_all_together_now_off(self): | ||
1533 | 1524 | """Make all parts work together, but with notifications off.""" | ||
1534 | 1525 | self.patch(aggregator, "ToggleableNotification", | ||
1535 | 1526 | FakeNotificationSingleton()) | ||
1536 | 1527 | self.patch(aggregator, "UbuntuOneLauncher", FakeLauncher) | ||
1537 | 1528 | self.patch(aggregator.session, "Inhibitor", FakeInhibitor) | ||
1538 | 1529 | clock = PatchedClock() | ||
1539 | 1530 | upload = FakeCommand('upload.foo') | ||
1540 | 1531 | sf = aggregator.StatusFrontend(clock=clock) | ||
1541 | 1532 | sf.set_show_all_notifications(False) | ||
1542 | 1533 | |||
1543 | 1534 | # the progress bar is not visible yet | ||
1544 | 1535 | self.assertFalse(sf.aggregator.progress_bar.visible) | ||
1545 | 1536 | sf.upload_started(upload) | ||
1546 | 1537 | # the progress bar is now shown | ||
1547 | 1538 | self.assertTrue(sf.aggregator.progress_bar.visible) | ||
1548 | 1539 | notifications_shown = (sf.aggregator.file_discovery_bubble. | ||
1549 | 1540 | notification.notifications_shown) | ||
1550 | 1541 | # no notifications shown, never | ||
1551 | 1542 | self.assertEqual(0, len(notifications_shown)) | ||
1552 | 1543 | clock.advance(aggregator.FileDiscoveryGatheringState.initial_delay) | ||
1553 | 1544 | self.assertEqual(0, len(notifications_shown)) | ||
1554 | 1545 | download = FakeCommand('download.bar') | ||
1555 | 1546 | sf.download_started(download) | ||
1556 | 1547 | self.assertEqual(0, len(notifications_shown)) | ||
1557 | 1548 | # the progress still is zero | ||
1558 | 1549 | self.assertEqual(0.0, sf.aggregator.progress_bar.progress) | ||
1559 | 1550 | clock.advance(aggregator.FileDiscoveryUpdateState.updates_delay) | ||
1560 | 1551 | self.assertEqual(0, len(notifications_shown)) | ||
1561 | 1552 | clock.advance(aggregator.FileDiscoveryUpdateState.updates_timeout - | ||
1562 | 1553 | aggregator.FileDiscoveryUpdateState.updates_delay) | ||
1563 | 1554 | sf.upload_finished(upload) | ||
1564 | 1555 | sf.download_finished(download) | ||
1565 | 1556 | # the progress still is now 100% | ||
1566 | 1557 | self.assertEqual(1.0, sf.aggregator.progress_bar.progress) | ||
1567 | 1558 | self.assertEqual(0, len(notifications_shown)) | ||
1568 | 1559 | sf.queue_done() | ||
1569 | 1560 | self.assertEqual(0, len(notifications_shown)) | ||
1570 | 0 | 1561 | ||
1571 | === added directory '.pc/03_reset_notify_name.patch/ubuntuone' | |||
1572 | === added directory '.pc/03_reset_notify_name.patch/ubuntuone/status' | |||
1573 | === added file '.pc/03_reset_notify_name.patch/ubuntuone/status/aggregator.py' | |||
1574 | --- .pc/03_reset_notify_name.patch/ubuntuone/status/aggregator.py 1970-01-01 00:00:00 +0000 | |||
1575 | +++ .pc/03_reset_notify_name.patch/ubuntuone/status/aggregator.py 2011-12-14 21:11:28 +0000 | |||
1576 | @@ -0,0 +1,882 @@ | |||
1577 | 1 | # ubuntuone.status.aggregator | ||
1578 | 2 | # | ||
1579 | 3 | # Author: Alejandro J. Cura <alecu@canonical.com> | ||
1580 | 4 | # | ||
1581 | 5 | # Copyright 2011 Canonical Ltd. | ||
1582 | 6 | # | ||
1583 | 7 | # This program is free software: you can redistribute it and/or modify it | ||
1584 | 8 | # under the terms of the GNU General Public License version 3, as published | ||
1585 | 9 | # by the Free Software Foundation. | ||
1586 | 10 | # | ||
1587 | 11 | # This program is distributed in the hope that it will be useful, but | ||
1588 | 12 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1589 | 13 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1590 | 14 | # PURPOSE. See the GNU General Public License for more details. | ||
1591 | 15 | # | ||
1592 | 16 | # You should have received a copy of the GNU General Public License along | ||
1593 | 17 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1594 | 18 | """Aggregate status events.""" | ||
1595 | 19 | |||
1596 | 20 | import itertools | ||
1597 | 21 | import operator | ||
1598 | 22 | import os | ||
1599 | 23 | |||
1600 | 24 | |||
1601 | 25 | import gettext | ||
1602 | 26 | |||
1603 | 27 | from twisted.internet import reactor, defer | ||
1604 | 28 | |||
1605 | 29 | from ubuntuone.clientdefs import GETTEXT_PACKAGE | ||
1606 | 30 | from ubuntuone.status.logger import logger | ||
1607 | 31 | from ubuntuone.platform import session | ||
1608 | 32 | from ubuntuone.platform.notification import Notification | ||
1609 | 33 | from ubuntuone.platform.messaging import Messaging | ||
1610 | 34 | from ubuntuone.platform.launcher import UbuntuOneLauncher, DummyLauncher | ||
1611 | 35 | |||
1612 | 36 | ONE_DAY = 24 * 60 * 60 | ||
1613 | 37 | Q_ = lambda string: gettext.dgettext(GETTEXT_PACKAGE, string) | ||
1614 | 38 | |||
1615 | 39 | UBUNTUONE_TITLE = Q_("Ubuntu One") | ||
1616 | 40 | NEW_UDFS_SENDER = Q_("New cloud folder(s) available") | ||
1617 | 41 | FINAL_COMPLETED = Q_("File synchronization completed.") | ||
1618 | 42 | |||
1619 | 43 | PROGRESS_COMPLETED = Q_("%(percentage_completed)d%% completed.") | ||
1620 | 44 | FILE_SYNC_IN_PROGRESS = Q_("File synchronization in progress") | ||
1621 | 45 | |||
1622 | 46 | SHARE_QUOTA_EXCEEDED = Q_( | ||
1623 | 47 | 'There is no available space on the folder:\n"%s" shared by %s') | ||
1624 | 48 | |||
1625 | 49 | |||
1626 | 50 | def alert_user(): | ||
1627 | 51 | """Set the launcher to urgent to alert the user.""" | ||
1628 | 52 | launcher = UbuntuOneLauncher() | ||
1629 | 53 | launcher.set_urgent() | ||
1630 | 54 | |||
1631 | 55 | |||
1632 | 56 | def files_being_uploaded(filename, files_uploading): | ||
1633 | 57 | """Get the i18n string for files being uploaded.""" | ||
1634 | 58 | other_files = files_uploading - 1 | ||
1635 | 59 | if other_files < 1: | ||
1636 | 60 | return Q_( | ||
1637 | 61 | "'%(filename)s' is being uploaded to your personal cloud.") % { | ||
1638 | 62 | 'filename': filename} | ||
1639 | 63 | format_args = { | ||
1640 | 64 | "filename": filename, "other_files": other_files} | ||
1641 | 65 | return gettext.dngettext( | ||
1642 | 66 | GETTEXT_PACKAGE, | ||
1643 | 67 | "'%(filename)s' and %(other_files)d other file are being " | ||
1644 | 68 | "uploaded to your personal cloud.", | ||
1645 | 69 | "'%(filename)s' and %(other_files)d other files are being " | ||
1646 | 70 | "uploaded to your personal cloud.", other_files) % format_args | ||
1647 | 71 | |||
1648 | 72 | |||
1649 | 73 | def files_being_downloaded(filename, files_downloading): | ||
1650 | 74 | """Get the i18n string for files being downloaded.""" | ||
1651 | 75 | other_files = files_downloading - 1 | ||
1652 | 76 | if other_files < 1: | ||
1653 | 77 | return Q_( | ||
1654 | 78 | "'%(filename)s' is being downloaded to your computer.") % { | ||
1655 | 79 | 'filename': filename} | ||
1656 | 80 | format_args = { | ||
1657 | 81 | "filename": filename, "other_files": other_files} | ||
1658 | 82 | return gettext.dngettext( | ||
1659 | 83 | GETTEXT_PACKAGE, | ||
1660 | 84 | "'%(filename)s' and %(other_files)d other file are being " | ||
1661 | 85 | "downloaded to your computer.", | ||
1662 | 86 | "'%(filename)s' and %(other_files)d other files are being " | ||
1663 | 87 | "downloaded to your computer.", other_files) % format_args | ||
1664 | 88 | |||
1665 | 89 | |||
1666 | 90 | def files_were_uploaded(filename, upload_done): | ||
1667 | 91 | """Get the i18n string for files that were uploaded.""" | ||
1668 | 92 | other_files = upload_done - 1 | ||
1669 | 93 | if other_files < 1: | ||
1670 | 94 | return Q_( | ||
1671 | 95 | "'%(filename)s' was uploaded to your personal cloud.") % { | ||
1672 | 96 | 'filename': filename} | ||
1673 | 97 | format_args = { | ||
1674 | 98 | 'filename': filename, 'other_files': other_files} | ||
1675 | 99 | return gettext.dngettext( | ||
1676 | 100 | GETTEXT_PACKAGE, | ||
1677 | 101 | "'%(filename)s' and %(other_files)d other file were uploaded to " | ||
1678 | 102 | "your personal cloud.", | ||
1679 | 103 | "'%(filename)s' and %(other_files)d other files were uploaded " | ||
1680 | 104 | "to your personal cloud.", other_files) % format_args | ||
1681 | 105 | |||
1682 | 106 | |||
1683 | 107 | def files_were_downloaded(filename, download_done): | ||
1684 | 108 | """Get the i18n string for files that were downloaded.""" | ||
1685 | 109 | other_files = download_done - 1 | ||
1686 | 110 | if other_files < 1: | ||
1687 | 111 | return Q_( | ||
1688 | 112 | "'%(filename)s' was downloaded to your computer.") % { | ||
1689 | 113 | 'filename': filename} | ||
1690 | 114 | format_args = { | ||
1691 | 115 | 'filename': filename, 'other_files': other_files} | ||
1692 | 116 | return gettext.dngettext( | ||
1693 | 117 | GETTEXT_PACKAGE, | ||
1694 | 118 | "'%(filename)s' and %(other_files)d other file were " | ||
1695 | 119 | "downloaded to your computer.", | ||
1696 | 120 | "'%(filename)s' and %(other_files)d other files were " | ||
1697 | 121 | "downloaded to your computer.", other_files) % format_args | ||
1698 | 122 | |||
1699 | 123 | |||
1700 | 124 | class ToggleableNotification(object): | ||
1701 | 125 | """A controller for notifications that can be turned off.""" | ||
1702 | 126 | |||
1703 | 127 | def __init__(self, notification_switch): | ||
1704 | 128 | """Initialize this instance.""" | ||
1705 | 129 | self.notification_switch = notification_switch | ||
1706 | 130 | self.notification = Notification() | ||
1707 | 131 | |||
1708 | 132 | def send_notification(self, *args): | ||
1709 | 133 | """Passthru the notification.""" | ||
1710 | 134 | if self.notification_switch.enabled: | ||
1711 | 135 | return self.notification.send_notification(*args) | ||
1712 | 136 | |||
1713 | 137 | |||
1714 | 138 | class NotificationSwitch(object): | ||
1715 | 139 | """A switch that turns notifications on and off.""" | ||
1716 | 140 | |||
1717 | 141 | enabled = True | ||
1718 | 142 | |||
1719 | 143 | def __init__(self): | ||
1720 | 144 | self.toggleable_notification = ToggleableNotification(self) | ||
1721 | 145 | |||
1722 | 146 | def get_notification(self): | ||
1723 | 147 | """Return a new notification instance.""" | ||
1724 | 148 | return self.toggleable_notification | ||
1725 | 149 | |||
1726 | 150 | def enable_notifications(self): | ||
1727 | 151 | """Turn the switch on.""" | ||
1728 | 152 | self.enabled = True | ||
1729 | 153 | |||
1730 | 154 | def disable_notifications(self): | ||
1731 | 155 | """Turn the switch off.""" | ||
1732 | 156 | self.enabled = False | ||
1733 | 157 | |||
1734 | 158 | |||
1735 | 159 | class StatusEvent(object): | ||
1736 | 160 | """An event representing a status change.""" | ||
1737 | 161 | |||
1738 | 162 | MESSAGE_ONE = None # to be defined in child classes | ||
1739 | 163 | WEIGHT = 99 | ||
1740 | 164 | DO_NOT_INSTANCE = "Do not instance this class, only children.""" | ||
1741 | 165 | |||
1742 | 166 | def __init__(self, **kwargs): | ||
1743 | 167 | """Initialize this instance.""" | ||
1744 | 168 | assert type(self) != StatusEvent, self.DO_NOT_INSTANCE | ||
1745 | 169 | self.kwargs = kwargs | ||
1746 | 170 | |||
1747 | 171 | def one(self): | ||
1748 | 172 | """A message if this is the only event of this type.""" | ||
1749 | 173 | return self.MESSAGE_ONE | ||
1750 | 174 | |||
1751 | 175 | |||
1752 | 176 | class FilePublishingStatus(StatusEvent): | ||
1753 | 177 | """Files that are made public with a url.""" | ||
1754 | 178 | |||
1755 | 179 | MESSAGE_ONE = Q_("A file was just made public at %(new_public_url)s") | ||
1756 | 180 | |||
1757 | 181 | WEIGHT = 50 | ||
1758 | 182 | |||
1759 | 183 | def one(self): | ||
1760 | 184 | """Show the url if only one event of this type.""" | ||
1761 | 185 | return self.MESSAGE_ONE % self.kwargs | ||
1762 | 186 | |||
1763 | 187 | def many(self, events): | ||
1764 | 188 | """Show the number of files if many event of this type.""" | ||
1765 | 189 | no_of_files = len(events) | ||
1766 | 190 | gettext.dngettext( | ||
1767 | 191 | GETTEXT_PACKAGE, | ||
1768 | 192 | "%(event_count)d file was just made public.", | ||
1769 | 193 | "%(event_count)d files were just made public.", | ||
1770 | 194 | no_of_files) % {'event_count': no_of_files} | ||
1771 | 195 | |||
1772 | 196 | |||
1773 | 197 | class FileUnpublishingStatus(StatusEvent): | ||
1774 | 198 | """Files that have stopped being published.""" | ||
1775 | 199 | |||
1776 | 200 | MESSAGE_ONE = Q_("A file is no longer published") | ||
1777 | 201 | WEIGHT = 51 | ||
1778 | 202 | |||
1779 | 203 | def many(self, events): | ||
1780 | 204 | """Show the number of files if many event of this type.""" | ||
1781 | 205 | no_of_files = len(events) | ||
1782 | 206 | gettext.dngettext( | ||
1783 | 207 | GETTEXT_PACKAGE, | ||
1784 | 208 | "%(event_count)d file is no longer published.", | ||
1785 | 209 | "%(event_count)d files are no longer published.", | ||
1786 | 210 | no_of_files) % {'event_count': no_of_files} | ||
1787 | 211 | |||
1788 | 212 | |||
1789 | 213 | class FolderAvailableStatus(StatusEvent): | ||
1790 | 214 | """Folders available for subscription.""" | ||
1791 | 215 | |||
1792 | 216 | WEIGHT = 60 | ||
1793 | 217 | |||
1794 | 218 | def many(self, events): | ||
1795 | 219 | """Show the number of files if many event of this type.""" | ||
1796 | 220 | no_of_files = len(events) | ||
1797 | 221 | gettext.dngettext( | ||
1798 | 222 | GETTEXT_PACKAGE, | ||
1799 | 223 | "Found %(event_count)d new cloud folder.", | ||
1800 | 224 | "Found %(event_count)d new cloud folders.", | ||
1801 | 225 | no_of_files) % {'event_count': no_of_files} | ||
1802 | 226 | |||
1803 | 227 | |||
1804 | 228 | class ShareAvailableStatus(FolderAvailableStatus): | ||
1805 | 229 | """A Share is available for subscription.""" | ||
1806 | 230 | |||
1807 | 231 | MESSAGE_ONE = Q_("New cloud folder available: '%(folder_name)s' " | ||
1808 | 232 | "shared by %(other_user_name)s") | ||
1809 | 233 | |||
1810 | 234 | def one(self): | ||
1811 | 235 | """Show the folder information.""" | ||
1812 | 236 | volume = self.kwargs["share"] | ||
1813 | 237 | format_args = { | ||
1814 | 238 | "folder_name": volume.name, | ||
1815 | 239 | "other_user_name": volume.other_visible_name, | ||
1816 | 240 | } | ||
1817 | 241 | return self.MESSAGE_ONE % format_args | ||
1818 | 242 | |||
1819 | 243 | |||
1820 | 244 | class UDFAvailableStatus(FolderAvailableStatus): | ||
1821 | 245 | """An UDF is available for subscription.""" | ||
1822 | 246 | |||
1823 | 247 | MESSAGE_ONE = Q_("New cloud folder available: '%(folder_name)s'") | ||
1824 | 248 | |||
1825 | 249 | def one(self): | ||
1826 | 250 | """Show the folder information.""" | ||
1827 | 251 | volume = self.kwargs["udf"] | ||
1828 | 252 | format_args = {"folder_name": volume.suggested_path} | ||
1829 | 253 | return self.MESSAGE_ONE % format_args | ||
1830 | 254 | |||
1831 | 255 | |||
1832 | 256 | class ConnectionStatusEvent(StatusEvent): | ||
1833 | 257 | """The connection to the server changed status.""" | ||
1834 | 258 | |||
1835 | 259 | WEIGHT = 30 | ||
1836 | 260 | |||
1837 | 261 | def many(self, events): | ||
1838 | 262 | """Only the last message if there are many events of this type.""" | ||
1839 | 263 | return events[-1].one() | ||
1840 | 264 | |||
1841 | 265 | |||
1842 | 266 | class ConnectionLostStatus(ConnectionStatusEvent): | ||
1843 | 267 | """The connection to the server was lost.""" | ||
1844 | 268 | |||
1845 | 269 | MESSAGE_ONE = Q_("The connection to the server was lost.") | ||
1846 | 270 | |||
1847 | 271 | |||
1848 | 272 | class ConnectionMadeStatus(ConnectionStatusEvent): | ||
1849 | 273 | """The connection to the server was made.""" | ||
1850 | 274 | |||
1851 | 275 | MESSAGE_ONE = Q_("The connection to the server was restored.") | ||
1852 | 276 | |||
1853 | 277 | |||
1854 | 278 | class Timer(defer.Deferred): | ||
1855 | 279 | """A deferred that fires past a given delay.""" | ||
1856 | 280 | |||
1857 | 281 | def __init__(self, delay, clock=reactor): | ||
1858 | 282 | """Initialize this instance.""" | ||
1859 | 283 | defer.Deferred.__init__(self) | ||
1860 | 284 | self.clock = clock | ||
1861 | 285 | self.delay = delay | ||
1862 | 286 | self.delay_call = self.clock.callLater(delay, self.callback) | ||
1863 | 287 | |||
1864 | 288 | def cancel_if_active(self, call): | ||
1865 | 289 | """Cancel a call if it is active.""" | ||
1866 | 290 | if call.active(): | ||
1867 | 291 | call.cancel() | ||
1868 | 292 | |||
1869 | 293 | def cleanup(self): | ||
1870 | 294 | """Cancel all active calls.""" | ||
1871 | 295 | self.cancel_if_active(self.delay_call) | ||
1872 | 296 | |||
1873 | 297 | def callback(self, result=None): | ||
1874 | 298 | """Make sure the timers are stopped when firing the callback.""" | ||
1875 | 299 | self.cleanup() | ||
1876 | 300 | defer.Deferred.callback(self, result) | ||
1877 | 301 | |||
1878 | 302 | def reset(self): | ||
1879 | 303 | """Reset the delay.""" | ||
1880 | 304 | if not self.called: | ||
1881 | 305 | self.delay_call.reset(self.delay) | ||
1882 | 306 | |||
1883 | 307 | @property | ||
1884 | 308 | def active(self): | ||
1885 | 309 | """Is the delay still active.""" | ||
1886 | 310 | return self.delay_call.active() | ||
1887 | 311 | |||
1888 | 312 | |||
1889 | 313 | class DeadlineTimer(Timer): | ||
1890 | 314 | """A Timer with a deadline.""" | ||
1891 | 315 | |||
1892 | 316 | def __init__(self, delay, timeout=None, clock=reactor): | ||
1893 | 317 | """Initialize this instance.""" | ||
1894 | 318 | Timer.__init__(self, delay, clock) | ||
1895 | 319 | self.timeout = timeout | ||
1896 | 320 | self.timeout_call = self.clock.callLater(timeout, self.callback) | ||
1897 | 321 | |||
1898 | 322 | def cleanup(self): | ||
1899 | 323 | """Cancel all active calls.""" | ||
1900 | 324 | Timer.cleanup(self) | ||
1901 | 325 | self.cancel_if_active(self.timeout_call) | ||
1902 | 326 | |||
1903 | 327 | |||
1904 | 328 | class FileDiscoveryBaseState(object): | ||
1905 | 329 | """States for file discovery bubble.""" | ||
1906 | 330 | |||
1907 | 331 | def __init__(self, bubble): | ||
1908 | 332 | """Initialize this instance.""" | ||
1909 | 333 | self.bubble = bubble | ||
1910 | 334 | self.clock = bubble.clock | ||
1911 | 335 | |||
1912 | 336 | def new_file_found(self): | ||
1913 | 337 | """New files found.""" | ||
1914 | 338 | |||
1915 | 339 | def cleanup(self): | ||
1916 | 340 | """Cleanup this instance.""" | ||
1917 | 341 | |||
1918 | 342 | |||
1919 | 343 | class FileDiscoveryIdleState(FileDiscoveryBaseState): | ||
1920 | 344 | """Waiting for first file to appear.""" | ||
1921 | 345 | |||
1922 | 346 | def new_file_found(self): | ||
1923 | 347 | """New files found.""" | ||
1924 | 348 | self.bubble._start() | ||
1925 | 349 | |||
1926 | 350 | |||
1927 | 351 | class FileDiscoveryGatheringState(FileDiscoveryBaseState): | ||
1928 | 352 | """Files are gathered then a notification is shown.""" | ||
1929 | 353 | |||
1930 | 354 | initial_delay = 0.5 | ||
1931 | 355 | initial_timeout = 3.0 | ||
1932 | 356 | |||
1933 | 357 | def __init__(self, *args): | ||
1934 | 358 | """Initialize this instance.""" | ||
1935 | 359 | super(FileDiscoveryGatheringState, self).__init__(*args) | ||
1936 | 360 | self.timer = DeadlineTimer(self.initial_delay, | ||
1937 | 361 | self.initial_timeout, | ||
1938 | 362 | clock=self.clock) | ||
1939 | 363 | self.timer.addCallback(self._timeout) | ||
1940 | 364 | |||
1941 | 365 | def _timeout(self, result): | ||
1942 | 366 | """Show the notification bubble.""" | ||
1943 | 367 | self.cleanup() | ||
1944 | 368 | self.bubble._popup() | ||
1945 | 369 | |||
1946 | 370 | def new_file_found(self): | ||
1947 | 371 | """New files found.""" | ||
1948 | 372 | self.timer.reset() | ||
1949 | 373 | |||
1950 | 374 | def cleanup(self): | ||
1951 | 375 | """Cleanup this instance.""" | ||
1952 | 376 | self.timer.cleanup() | ||
1953 | 377 | |||
1954 | 378 | |||
1955 | 379 | class FileDiscoveryUpdateState(FileDiscoveryBaseState): | ||
1956 | 380 | """The bubble is updated if more files are found.""" | ||
1957 | 381 | |||
1958 | 382 | updates_delay = 0.5 | ||
1959 | 383 | updates_timeout = 10.0 | ||
1960 | 384 | |||
1961 | 385 | def __init__(self, *args): | ||
1962 | 386 | """Initialize this instance.""" | ||
1963 | 387 | super(FileDiscoveryUpdateState, self).__init__(*args) | ||
1964 | 388 | self.main_timer = Timer(self.updates_timeout, clock=self.clock) | ||
1965 | 389 | self.main_timer.addCallback(self._timeout) | ||
1966 | 390 | self.updates_timer = None | ||
1967 | 391 | |||
1968 | 392 | def _timeout(self, result): | ||
1969 | 393 | """No more updates on the notification bubble.""" | ||
1970 | 394 | self.cleanup() | ||
1971 | 395 | self.bubble.start_sleeping() | ||
1972 | 396 | |||
1973 | 397 | def _update(self, result): | ||
1974 | 398 | """The bubble should be updated.""" | ||
1975 | 399 | self.bubble._update() | ||
1976 | 400 | |||
1977 | 401 | def new_file_found(self): | ||
1978 | 402 | """New files found.""" | ||
1979 | 403 | if self.updates_timer is None: | ||
1980 | 404 | self.updates_timer = Timer(self.updates_delay, clock=self.clock) | ||
1981 | 405 | self.updates_timer.addCallback(self._update) | ||
1982 | 406 | |||
1983 | 407 | def cleanup(self): | ||
1984 | 408 | """Clean up the timers.""" | ||
1985 | 409 | self.main_timer.cleanup() | ||
1986 | 410 | if self.updates_timer: | ||
1987 | 411 | self.updates_timer.cleanup() | ||
1988 | 412 | |||
1989 | 413 | |||
1990 | 414 | class FileDiscoverySleepState(FileDiscoveryBaseState): | ||
1991 | 415 | """The bubble is not updated while sleeping.""" | ||
1992 | 416 | |||
1993 | 417 | sleep_delay = 300.0 | ||
1994 | 418 | |||
1995 | 419 | def __init__(self, *args): | ||
1996 | 420 | """Initialize this instance.""" | ||
1997 | 421 | super(FileDiscoverySleepState, self).__init__(*args) | ||
1998 | 422 | self.main_timer = Timer(self.sleep_delay, clock=self.clock) | ||
1999 | 423 | self.main_timer.addCallback(self._timeout) | ||
2000 | 424 | |||
2001 | 425 | def _timeout(self, result): | ||
2002 | 426 | """Move the notification to the idle state.""" | ||
2003 | 427 | self.bubble._set_idle() | ||
2004 | 428 | |||
2005 | 429 | def cleanup(self): | ||
2006 | 430 | """Clean up the timers.""" | ||
2007 | 431 | self.main_timer.cleanup() | ||
2008 | 432 | |||
2009 | 433 | |||
2010 | 434 | class FileDiscoveryBubble(object): | ||
2011 | 435 | """ | ||
2012 | 436 | Show a notification for file discovery. | ||
2013 | 437 | |||
2014 | 438 | Waits 3 seconds for the file count to coalesce, then pops up a | ||
2015 | 439 | notification. If new files are found the notification is updated, | ||
2016 | 440 | but twice per second at most, and for up to 10 seconds. | ||
2017 | 441 | Finally, sleeps for 10 minutes so it does not get annoying. | ||
2018 | 442 | """ | ||
2019 | 443 | |||
2020 | 444 | state = None | ||
2021 | 445 | |||
2022 | 446 | def __init__(self, status_aggregator, clock=reactor): | ||
2023 | 447 | """Initialize this instance.""" | ||
2024 | 448 | self.connected = False | ||
2025 | 449 | self.files_found = False | ||
2026 | 450 | self.clock = clock | ||
2027 | 451 | self.status_aggregator = status_aggregator | ||
2028 | 452 | self._set_idle() | ||
2029 | 453 | self.notification = None | ||
2030 | 454 | |||
2031 | 455 | def _change_state(self, new_state_class): | ||
2032 | 456 | """Change to a new state.""" | ||
2033 | 457 | if self.state: | ||
2034 | 458 | self.state.cleanup() | ||
2035 | 459 | self.state = new_state_class(self) | ||
2036 | 460 | |||
2037 | 461 | def _set_idle(self): | ||
2038 | 462 | """Reset this bubble to the initial state.""" | ||
2039 | 463 | self._change_state(FileDiscoveryIdleState) | ||
2040 | 464 | |||
2041 | 465 | def _start(self): | ||
2042 | 466 | """The first file was found, so start gathering.""" | ||
2043 | 467 | self.notification = self.status_aggregator.get_notification() | ||
2044 | 468 | self._change_state(FileDiscoveryGatheringState) | ||
2045 | 469 | |||
2046 | 470 | def _popup(self): | ||
2047 | 471 | """Display the notification.""" | ||
2048 | 472 | if not self.connected: | ||
2049 | 473 | return | ||
2050 | 474 | text = self.status_aggregator.get_discovery_message() | ||
2051 | 475 | if text: | ||
2052 | 476 | self.notification.send_notification(UBUNTUONE_TITLE, text) | ||
2053 | 477 | logger.debug("notification shown: %s", text) | ||
2054 | 478 | self._change_state(FileDiscoveryUpdateState) | ||
2055 | 479 | |||
2056 | 480 | def _update(self): | ||
2057 | 481 | """Update the notification.""" | ||
2058 | 482 | if not self.connected: | ||
2059 | 483 | return | ||
2060 | 484 | text = self.status_aggregator.get_discovery_message() | ||
2061 | 485 | if text: | ||
2062 | 486 | logger.debug("notification updated: %s", text) | ||
2063 | 487 | self.notification.send_notification(UBUNTUONE_TITLE, text) | ||
2064 | 488 | |||
2065 | 489 | def start_sleeping(self): | ||
2066 | 490 | """Wait for 10 minutes before annoying again.""" | ||
2067 | 491 | self._change_state(FileDiscoverySleepState) | ||
2068 | 492 | |||
2069 | 493 | def cleanup(self): | ||
2070 | 494 | """Cleanup this instance.""" | ||
2071 | 495 | self.state.cleanup() | ||
2072 | 496 | |||
2073 | 497 | def connection_made(self): | ||
2074 | 498 | """Connection made.""" | ||
2075 | 499 | self.connected = True | ||
2076 | 500 | if self.files_found: | ||
2077 | 501 | self._popup() | ||
2078 | 502 | |||
2079 | 503 | def connection_lost(self): | ||
2080 | 504 | """Connection lost.""" | ||
2081 | 505 | self.connected = False | ||
2082 | 506 | |||
2083 | 507 | def new_file_found(self): | ||
2084 | 508 | """New files found.""" | ||
2085 | 509 | self.files_found = True | ||
2086 | 510 | self.state.new_file_found() | ||
2087 | 511 | |||
2088 | 512 | |||
2089 | 513 | class ProgressBar(object): | ||
2090 | 514 | """Update a progressbar no more than 10 times a second.""" | ||
2091 | 515 | pulsating = True | ||
2092 | 516 | visible = False | ||
2093 | 517 | progress = 0.0 | ||
2094 | 518 | updates_delay = 0.1 | ||
2095 | 519 | timer = None | ||
2096 | 520 | inhibitor_defer = None | ||
2097 | 521 | |||
2098 | 522 | def __init__(self, clock=reactor): | ||
2099 | 523 | """Initialize this instance.""" | ||
2100 | 524 | self.clock = clock | ||
2101 | 525 | try: | ||
2102 | 526 | self.launcher = UbuntuOneLauncher() | ||
2103 | 527 | except TypeError: | ||
2104 | 528 | # Unity GIR can cause a TypeError here so we should not fail | ||
2105 | 529 | self.launcher = DummyLauncher() | ||
2106 | 530 | |||
2107 | 531 | def cleanup(self): | ||
2108 | 532 | """Cleanup this instance.""" | ||
2109 | 533 | if self.timer: | ||
2110 | 534 | self.timer.cleanup() | ||
2111 | 535 | self.timer = None | ||
2112 | 536 | |||
2113 | 537 | def _timeout(self, result): | ||
2114 | 538 | """The aggregating timer has expired, so update the UI.""" | ||
2115 | 539 | self.timer = None | ||
2116 | 540 | self.launcher.set_progress(self.progress) | ||
2117 | 541 | logger.debug("progressbar updated: %f", self.progress) | ||
2118 | 542 | |||
2119 | 543 | def set_progress(self, progress): | ||
2120 | 544 | """Steps amount changed. Set up a timer if there isn't one ticking.""" | ||
2121 | 545 | self.progress = progress | ||
2122 | 546 | if not self.visible: | ||
2123 | 547 | self.visible = True | ||
2124 | 548 | self.launcher.show_progressbar() | ||
2125 | 549 | logger.debug("progressbar shown") | ||
2126 | 550 | if self.inhibitor_defer is None: | ||
2127 | 551 | self.inhibitor_defer = session.inhibit_logout_suspend( | ||
2128 | 552 | FILE_SYNC_IN_PROGRESS) | ||
2129 | 553 | if not self.timer: | ||
2130 | 554 | self.timer = Timer(self.updates_delay, clock=self.clock) | ||
2131 | 555 | self.timer.addCallback(self._timeout) | ||
2132 | 556 | |||
2133 | 557 | def completed(self): | ||
2134 | 558 | """All has completed.""" | ||
2135 | 559 | self.cleanup() | ||
2136 | 560 | self.visible = False | ||
2137 | 561 | self.launcher.hide_progressbar() | ||
2138 | 562 | logger.debug("progressbar hidden") | ||
2139 | 563 | if self.inhibitor_defer is not None: | ||
2140 | 564 | |||
2141 | 565 | def inhibitor_callback(inhibitor): | ||
2142 | 566 | """The inhibitor was found, so cancel it.""" | ||
2143 | 567 | self.inhibitor_defer = None | ||
2144 | 568 | return inhibitor.cancel() | ||
2145 | 569 | |||
2146 | 570 | self.inhibitor_defer.addCallback(inhibitor_callback) | ||
2147 | 571 | |||
2148 | 572 | |||
2149 | 573 | class FinalStatusBubble(object): | ||
2150 | 574 | """Final bubble that shows the status of transfers.""" | ||
2151 | 575 | |||
2152 | 576 | notification = None | ||
2153 | 577 | |||
2154 | 578 | def __init__(self, status_aggregator): | ||
2155 | 579 | """Initialize this instance.""" | ||
2156 | 580 | self.status_aggregator = status_aggregator | ||
2157 | 581 | |||
2158 | 582 | def cleanup(self): | ||
2159 | 583 | """Clean up this instance.""" | ||
2160 | 584 | |||
2161 | 585 | def show(self): | ||
2162 | 586 | """Show the final status notification.""" | ||
2163 | 587 | self.notification = self.status_aggregator.get_notification() | ||
2164 | 588 | text = self.status_aggregator.get_final_status_message() | ||
2165 | 589 | self.notification.send_notification(UBUNTUONE_TITLE, text) | ||
2166 | 590 | |||
2167 | 591 | |||
2168 | 592 | def group_statuses(status_events): | ||
2169 | 593 | """Groups statuses by weight.""" | ||
2170 | 594 | weight_getter = operator.attrgetter("WEIGHT") | ||
2171 | 595 | sorted_status_events = sorted(status_events, key=weight_getter) | ||
2172 | 596 | return itertools.groupby(sorted_status_events, weight_getter) | ||
2173 | 597 | |||
2174 | 598 | |||
2175 | 599 | class StatusAggregator(object): | ||
2176 | 600 | """The status aggregator backend.""" | ||
2177 | 601 | |||
2178 | 602 | file_discovery_bubble = None | ||
2179 | 603 | final_status_bubble = None | ||
2180 | 604 | |||
2181 | 605 | def __init__(self, clock=reactor): | ||
2182 | 606 | """Initialize this instance.""" | ||
2183 | 607 | self.clock = clock | ||
2184 | 608 | self.notification_switch = NotificationSwitch() | ||
2185 | 609 | self.queue_done_timer = None | ||
2186 | 610 | self.reset() | ||
2187 | 611 | self.progress_bar = ProgressBar(clock=self.clock) | ||
2188 | 612 | self.finished_delay = 10 | ||
2189 | 613 | self.progress = {} | ||
2190 | 614 | self.to_do = {} | ||
2191 | 615 | |||
2192 | 616 | def get_notification(self): | ||
2193 | 617 | """Create a new toggleable notification object.""" | ||
2194 | 618 | return self.notification_switch.get_notification() | ||
2195 | 619 | |||
2196 | 620 | # pylint: disable=W0201 | ||
2197 | 621 | def reset(self): | ||
2198 | 622 | """Reset all counters and notifications.""" | ||
2199 | 623 | self.download_done = 0 | ||
2200 | 624 | self.upload_done = 0 | ||
2201 | 625 | self.files_uploading = [] | ||
2202 | 626 | self.uploading_filename = '' | ||
2203 | 627 | self.files_downloading = [] | ||
2204 | 628 | self.downloading_filename = '' | ||
2205 | 629 | if self.queue_done_timer is not None: | ||
2206 | 630 | self.queue_done_timer.cleanup() | ||
2207 | 631 | self.queue_done_timer = None | ||
2208 | 632 | |||
2209 | 633 | if self.file_discovery_bubble: | ||
2210 | 634 | self.file_discovery_bubble.cleanup() | ||
2211 | 635 | self.file_discovery_bubble = FileDiscoveryBubble(self, | ||
2212 | 636 | clock=self.clock) | ||
2213 | 637 | |||
2214 | 638 | if self.final_status_bubble: | ||
2215 | 639 | self.final_status_bubble.cleanup() | ||
2216 | 640 | self.final_status_bubble = FinalStatusBubble(self) | ||
2217 | 641 | self.progress = {} | ||
2218 | 642 | self.to_do = {} | ||
2219 | 643 | # pylint: enable=W0201 | ||
2220 | 644 | |||
2221 | 645 | def get_discovery_message(self): | ||
2222 | 646 | """Get the text for the discovery bubble.""" | ||
2223 | 647 | lines = [] | ||
2224 | 648 | files_uploading = len(self.files_uploading) | ||
2225 | 649 | if files_uploading > 0: | ||
2226 | 650 | lines.append(files_being_uploaded( | ||
2227 | 651 | self.uploading_filename, files_uploading)) | ||
2228 | 652 | files_downloading = len(self.files_downloading) | ||
2229 | 653 | if files_downloading > 0: | ||
2230 | 654 | self.downloading_filename = self.files_downloading[0].path.split( | ||
2231 | 655 | os.path.sep)[-1] | ||
2232 | 656 | lines.append(files_being_downloaded( | ||
2233 | 657 | self.downloading_filename, files_downloading)) | ||
2234 | 658 | return "\n".join(lines) | ||
2235 | 659 | |||
2236 | 660 | def get_final_status_message(self): | ||
2237 | 661 | """Get some lines describing all we did.""" | ||
2238 | 662 | parts = [] | ||
2239 | 663 | parts.append(FINAL_COMPLETED) | ||
2240 | 664 | upload_done = self.upload_done | ||
2241 | 665 | if upload_done: | ||
2242 | 666 | parts.append(files_were_uploaded( | ||
2243 | 667 | self.uploading_filename, upload_done)) | ||
2244 | 668 | |||
2245 | 669 | download_done = self.download_done | ||
2246 | 670 | if download_done: | ||
2247 | 671 | parts.append(files_were_downloaded( | ||
2248 | 672 | self.downloading_filename, download_done)) | ||
2249 | 673 | return "\n".join(parts) | ||
2250 | 674 | |||
2251 | 675 | def _queue_done(self, _): | ||
2252 | 676 | """Show final bubble and reset counters.""" | ||
2253 | 677 | self.queue_done_timer.cleanup() | ||
2254 | 678 | self.queue_done_timer = None | ||
2255 | 679 | logger.debug("queue done callback fired") | ||
2256 | 680 | if self.upload_done + self.download_done > 0: | ||
2257 | 681 | self.final_status_bubble.show() | ||
2258 | 682 | self.progress_bar.completed() | ||
2259 | 683 | self.reset() | ||
2260 | 684 | |||
2261 | 685 | def queue_done(self): | ||
2262 | 686 | """Queue is finished.""" | ||
2263 | 687 | if not self.to_do: | ||
2264 | 688 | return | ||
2265 | 689 | if self.queue_done_timer is None: | ||
2266 | 690 | logger.debug("queue done callback added") | ||
2267 | 691 | self.queue_done_timer = Timer( | ||
2268 | 692 | self.finished_delay, clock=self.clock) | ||
2269 | 693 | self.queue_done_timer.addCallback(self._queue_done) | ||
2270 | 694 | return | ||
2271 | 695 | logger.debug("queue done callback reset") | ||
2272 | 696 | self.queue_done_timer.reset() | ||
2273 | 697 | |||
2274 | 698 | def update_progressbar(self): | ||
2275 | 699 | """Update the counters of the progressbar.""" | ||
2276 | 700 | if len(self.to_do) > 0: | ||
2277 | 701 | progress = float( | ||
2278 | 702 | sum(self.progress.values())) / sum(self.to_do.values()) | ||
2279 | 703 | self.progress_bar.set_progress(progress) | ||
2280 | 704 | |||
2281 | 705 | def download_started(self, command): | ||
2282 | 706 | """A download just started.""" | ||
2283 | 707 | if self.queue_done_timer is not None: | ||
2284 | 708 | self.queue_done_timer.cleanup() | ||
2285 | 709 | self.queue_done_timer = None | ||
2286 | 710 | self.files_downloading.append(command) | ||
2287 | 711 | if command.deflated_size is not None: | ||
2288 | 712 | self.to_do[ | ||
2289 | 713 | (command.share_id, command.node_id)] = command.deflated_size | ||
2290 | 714 | # pylint: disable=W0201 | ||
2291 | 715 | if not self.downloading_filename: | ||
2292 | 716 | self.downloading_filename = self.files_downloading[0].path.split( | ||
2293 | 717 | os.path.sep)[-1] | ||
2294 | 718 | # pylint: enable=W0201 | ||
2295 | 719 | self.update_progressbar() | ||
2296 | 720 | logger.debug( | ||
2297 | 721 | "queueing command (total: %d): %s", | ||
2298 | 722 | len(self.to_do), command.__class__.__name__) | ||
2299 | 723 | self.file_discovery_bubble.new_file_found() | ||
2300 | 724 | |||
2301 | 725 | def download_finished(self, command): | ||
2302 | 726 | """A download just finished.""" | ||
2303 | 727 | if command in self.files_downloading: | ||
2304 | 728 | self.files_downloading.remove(command) | ||
2305 | 729 | self.download_done += 1 | ||
2306 | 730 | if command.deflated_size is not None: | ||
2307 | 731 | self.progress[ | ||
2308 | 732 | (command.share_id, command.node_id)] = command.deflated_size | ||
2309 | 733 | logger.debug("unqueueing command: %s", command.__class__.__name__) | ||
2310 | 734 | self.update_progressbar() | ||
2311 | 735 | |||
2312 | 736 | def upload_started(self, command): | ||
2313 | 737 | """An upload just started.""" | ||
2314 | 738 | if self.queue_done_timer is not None: | ||
2315 | 739 | self.queue_done_timer.cleanup() | ||
2316 | 740 | self.queue_done_timer = None | ||
2317 | 741 | self.files_uploading.append(command) | ||
2318 | 742 | if command.deflated_size is not None: | ||
2319 | 743 | self.to_do[ | ||
2320 | 744 | (command.share_id, command.node_id)] = command.deflated_size | ||
2321 | 745 | # pylint: disable=W0201 | ||
2322 | 746 | if not self.uploading_filename: | ||
2323 | 747 | self.uploading_filename = self.files_uploading[0].path.split( | ||
2324 | 748 | os.path.sep)[-1] | ||
2325 | 749 | # pylint: enable=W0201 | ||
2326 | 750 | self.update_progressbar() | ||
2327 | 751 | logger.debug( | ||
2328 | 752 | "queueing command (total: %d): %s", len(self.to_do), | ||
2329 | 753 | command.__class__.__name__) | ||
2330 | 754 | self.file_discovery_bubble.new_file_found() | ||
2331 | 755 | |||
2332 | 756 | def upload_finished(self, command): | ||
2333 | 757 | """An upload just finished.""" | ||
2334 | 758 | if command in self.files_uploading: | ||
2335 | 759 | self.files_uploading.remove(command) | ||
2336 | 760 | self.upload_done += 1 | ||
2337 | 761 | if command.deflated_size is not None: | ||
2338 | 762 | self.progress[ | ||
2339 | 763 | (command.share_id, command.node_id)] = command.deflated_size | ||
2340 | 764 | logger.debug("unqueueing command: %s", command.__class__.__name__) | ||
2341 | 765 | self.update_progressbar() | ||
2342 | 766 | |||
2343 | 767 | def progress_made(self, share_id, node_id, n_bytes_written, deflated_size): | ||
2344 | 768 | """Progress made on up- or download.""" | ||
2345 | 769 | if n_bytes_written is not None: | ||
2346 | 770 | # if we haven't gotten the total size yet, set it now | ||
2347 | 771 | if deflated_size and (share_id, node_id) not in self.to_do: | ||
2348 | 772 | self.to_do[(share_id, node_id)] = deflated_size | ||
2349 | 773 | self.progress[(share_id, node_id)] = n_bytes_written | ||
2350 | 774 | self.update_progressbar() | ||
2351 | 775 | |||
2352 | 776 | def connection_lost(self): | ||
2353 | 777 | """The connection to the server was lost.""" | ||
2354 | 778 | self.file_discovery_bubble.connection_lost() | ||
2355 | 779 | |||
2356 | 780 | def connection_made(self): | ||
2357 | 781 | """The connection to the server was made.""" | ||
2358 | 782 | self.file_discovery_bubble.connection_made() | ||
2359 | 783 | |||
2360 | 784 | |||
2361 | 785 | class StatusFrontend(object): | ||
2362 | 786 | """Frontend for the status aggregator, used by the StatusListener.""" | ||
2363 | 787 | |||
2364 | 788 | def __init__(self, clock=reactor): | ||
2365 | 789 | """Initialize this instance.""" | ||
2366 | 790 | self.aggregator = StatusAggregator(clock=clock) | ||
2367 | 791 | self.notification = self.aggregator.get_notification() | ||
2368 | 792 | self.messaging = Messaging() | ||
2369 | 793 | self.quota_timer = None | ||
2370 | 794 | |||
2371 | 795 | def file_published(self, public_url): | ||
2372 | 796 | """A file was published.""" | ||
2373 | 797 | status_event = FilePublishingStatus(new_public_url=public_url) | ||
2374 | 798 | self.notification.send_notification( | ||
2375 | 799 | UBUNTUONE_TITLE, status_event.one()) | ||
2376 | 800 | |||
2377 | 801 | def file_unpublished(self, public_url): # pylint: disable=W0613 | ||
2378 | 802 | """A file was unpublished.""" | ||
2379 | 803 | self.notification.send_notification( | ||
2380 | 804 | UBUNTUONE_TITLE, FileUnpublishingStatus().one()) | ||
2381 | 805 | |||
2382 | 806 | def download_started(self, command): | ||
2383 | 807 | """A file was queued for download.""" | ||
2384 | 808 | self.aggregator.download_started(command) | ||
2385 | 809 | |||
2386 | 810 | def download_finished(self, command): | ||
2387 | 811 | """A file download was unqueued.""" | ||
2388 | 812 | self.aggregator.download_finished(command) | ||
2389 | 813 | |||
2390 | 814 | def upload_started(self, command): | ||
2391 | 815 | """A file was queued for upload.""" | ||
2392 | 816 | self.aggregator.upload_started(command) | ||
2393 | 817 | |||
2394 | 818 | def upload_finished(self, command): | ||
2395 | 819 | """A file upload was unqueued.""" | ||
2396 | 820 | self.aggregator.upload_finished(command) | ||
2397 | 821 | |||
2398 | 822 | def progress_made(self, share_id, node_id, n_bytes_written, deflated_size): | ||
2399 | 823 | """Progress made on up- or download.""" | ||
2400 | 824 | self.aggregator.progress_made( | ||
2401 | 825 | share_id, node_id, n_bytes_written, deflated_size) | ||
2402 | 826 | |||
2403 | 827 | def queue_done(self): | ||
2404 | 828 | """The queue is empty.""" | ||
2405 | 829 | self.aggregator.queue_done() | ||
2406 | 830 | |||
2407 | 831 | def new_share_available(self, share): | ||
2408 | 832 | """A new share is available for subscription.""" | ||
2409 | 833 | self.messaging.show_message(share.other_visible_name) | ||
2410 | 834 | self.notification.send_notification( | ||
2411 | 835 | UBUNTUONE_TITLE, ShareAvailableStatus(share=share).one()) | ||
2412 | 836 | |||
2413 | 837 | def new_udf_available(self, udf): | ||
2414 | 838 | """A new udf is available for subscription.""" | ||
2415 | 839 | if udf.subscribed: | ||
2416 | 840 | return | ||
2417 | 841 | self.notification.send_notification( | ||
2418 | 842 | UBUNTUONE_TITLE, UDFAvailableStatus(udf=udf).one()) | ||
2419 | 843 | |||
2420 | 844 | def server_connection_lost(self): | ||
2421 | 845 | """The client lost the connection to the server.""" | ||
2422 | 846 | logger.debug("server connection lost") | ||
2423 | 847 | self.aggregator.connection_lost() | ||
2424 | 848 | |||
2425 | 849 | def server_connection_made(self): | ||
2426 | 850 | """The client made the connection to the server.""" | ||
2427 | 851 | logger.debug("server connection made") | ||
2428 | 852 | self.aggregator.connection_made() | ||
2429 | 853 | |||
2430 | 854 | def udf_quota_exceeded(self, volume_dict): | ||
2431 | 855 | """Quota exceeded in UDF.""" | ||
2432 | 856 | logger.debug("UDF quota exceeded for volume %r." % volume_dict) | ||
2433 | 857 | alert_user() | ||
2434 | 858 | |||
2435 | 859 | def share_quota_exceeded(self, volume_dict): | ||
2436 | 860 | """Sharing user's quota exceeded in share.""" | ||
2437 | 861 | logger.debug("Share quota exceeded for volume %r." % volume_dict) | ||
2438 | 862 | if self.quota_timer is not None: | ||
2439 | 863 | if self.quota_timer.active: | ||
2440 | 864 | return | ||
2441 | 865 | else: | ||
2442 | 866 | self.quota_timer = Timer(ONE_DAY, clock=self.aggregator.clock) | ||
2443 | 867 | self.notification.send_notification( | ||
2444 | 868 | UBUNTUONE_TITLE, SHARE_QUOTA_EXCEEDED % ( | ||
2445 | 869 | volume_dict['path'], volume_dict['other_visible_name'])) | ||
2446 | 870 | alert_user() | ||
2447 | 871 | |||
2448 | 872 | def root_quota_exceeded(self, volume_dict): | ||
2449 | 873 | """Quota exceeded in root.""" | ||
2450 | 874 | logger.debug("Root quota exceeded for volume %r." % volume_dict) | ||
2451 | 875 | alert_user() | ||
2452 | 876 | |||
2453 | 877 | def set_show_all_notifications(self, value): | ||
2454 | 878 | """Set the flag to show all notifications.""" | ||
2455 | 879 | if value: | ||
2456 | 880 | self.aggregator.notification_switch.enable_notifications() | ||
2457 | 881 | else: | ||
2458 | 882 | self.aggregator.notification_switch.disable_notifications() | ||
2459 | 0 | 883 | ||
2460 | === added file 'debian/patches/03_reset_notify_name.patch' | |||
2461 | --- debian/patches/03_reset_notify_name.patch 1970-01-01 00:00:00 +0000 | |||
2462 | +++ debian/patches/03_reset_notify_name.patch 2011-12-14 21:11:28 +0000 | |||
2463 | @@ -0,0 +1,74 @@ | |||
2464 | 1 | === modified file 'tests/status/test_aggregator.py' | ||
2465 | 2 | --- old/tests/status/test_aggregator.py 2011-10-27 13:47:09 +0000 | ||
2466 | 3 | +++ new/tests/status/test_aggregator.py 2011-12-07 20:41:48 +0000 | ||
2467 | 4 | @@ -1327,6 +1327,26 @@ | ||
2468 | 5 | result = self.aggregator.get_discovery_message() | ||
2469 | 6 | self.assertEqual(expected, result) | ||
2470 | 7 | |||
2471 | 8 | + def test_get_discovery_message_clears_filenames(self): | ||
2472 | 9 | + """Test the message that's shown on the discovery bubble.""" | ||
2473 | 10 | + uploading = 10 | ||
2474 | 11 | + downloading = 8 | ||
2475 | 12 | + filename = 'upfile0.ext' | ||
2476 | 13 | + filename2 = 'downfile0.ext' | ||
2477 | 14 | + self.aggregator.files_uploading.extend([ | ||
2478 | 15 | + FakeCommand(path='upfile%d.ext' % n) for n in range(uploading)]) | ||
2479 | 16 | + self.aggregator.uploading_filename = filename | ||
2480 | 17 | + self.aggregator.files_downloading.extend([ | ||
2481 | 18 | + FakeCommand(path='downfile%d.ext' % n) for n in | ||
2482 | 19 | + range(downloading)]) | ||
2483 | 20 | + self.aggregator.downloading_filename = 'STALE FILENAME' | ||
2484 | 21 | + self.aggregator.uploading_filename = 'STALE FILENAME' | ||
2485 | 22 | + expected = ( | ||
2486 | 23 | + aggregator.files_being_uploaded(filename, uploading) + "\n" + | ||
2487 | 24 | + aggregator.files_being_downloaded(filename2, downloading)) | ||
2488 | 25 | + result = self.aggregator.get_discovery_message() | ||
2489 | 26 | + self.assertEqual(expected, result) | ||
2490 | 27 | + | ||
2491 | 28 | def test_get_final_status_message(self): | ||
2492 | 29 | """The final status message.""" | ||
2493 | 30 | done = (5, 10) | ||
2494 | 31 | |||
2495 | 32 | === modified file 'ubuntuone/status/aggregator.py' | ||
2496 | 33 | --- old/ubuntuone/status/aggregator.py 2011-10-21 15:49:18 +0000 | ||
2497 | 34 | +++ new/ubuntuone/status/aggregator.py 2011-12-12 22:50:55 +0000 | ||
2498 | 35 | @@ -646,12 +646,14 @@ | ||
2499 | 36 | lines = [] | ||
2500 | 37 | files_uploading = len(self.files_uploading) | ||
2501 | 38 | if files_uploading > 0: | ||
2502 | 39 | + self.uploading_filename = os.path.basename( | ||
2503 | 40 | + self.files_uploading[0].path) | ||
2504 | 41 | lines.append(files_being_uploaded( | ||
2505 | 42 | self.uploading_filename, files_uploading)) | ||
2506 | 43 | files_downloading = len(self.files_downloading) | ||
2507 | 44 | if files_downloading > 0: | ||
2508 | 45 | - self.downloading_filename = self.files_downloading[0].path.split( | ||
2509 | 46 | - os.path.sep)[-1] | ||
2510 | 47 | + self.downloading_filename = os.path.basename( | ||
2511 | 48 | + self.files_downloading[0].path) | ||
2512 | 49 | lines.append(files_being_downloaded( | ||
2513 | 50 | self.downloading_filename, files_downloading)) | ||
2514 | 51 | return "\n".join(lines) | ||
2515 | 52 | @@ -712,8 +714,8 @@ | ||
2516 | 53 | (command.share_id, command.node_id)] = command.deflated_size | ||
2517 | 54 | # pylint: disable=W0201 | ||
2518 | 55 | if not self.downloading_filename: | ||
2519 | 56 | - self.downloading_filename = self.files_downloading[0].path.split( | ||
2520 | 57 | - os.path.sep)[-1] | ||
2521 | 58 | + self.downloading_filename = os.path.basename( | ||
2522 | 59 | + self.files_downloading[0].path) | ||
2523 | 60 | # pylint: enable=W0201 | ||
2524 | 61 | self.update_progressbar() | ||
2525 | 62 | logger.debug( | ||
2526 | 63 | @@ -743,8 +745,8 @@ | ||
2527 | 64 | (command.share_id, command.node_id)] = command.deflated_size | ||
2528 | 65 | # pylint: disable=W0201 | ||
2529 | 66 | if not self.uploading_filename: | ||
2530 | 67 | - self.uploading_filename = self.files_uploading[0].path.split( | ||
2531 | 68 | - os.path.sep)[-1] | ||
2532 | 69 | + self.uploading_filename = os.path.basename( | ||
2533 | 70 | + self.files_uploading[0].path) | ||
2534 | 71 | # pylint: enable=W0201 | ||
2535 | 72 | self.update_progressbar() | ||
2536 | 73 | logger.debug( | ||
2537 | 74 | |||
2538 | 0 | 75 | ||
2539 | === renamed file 'debian/patches/03_reset_notify_name.patch' => 'debian/patches/03_reset_notify_name.patch.moved' |