Merge lp:~nataliabidart/magicicada-client/fix-lint-errors into lp:magicicada-client
- fix-lint-errors
- Merge into trunk
Proposed by
Natalia Bidart
Status: | Merged |
---|---|
Approved by: | Natalia Bidart |
Approved revision: | 1431 |
Merged at revision: | 1423 |
Proposed branch: | lp:~nataliabidart/magicicada-client/fix-lint-errors |
Merge into: | lp:magicicada-client |
Diff against target: |
11214 lines (+2228/-2227) 90 files modified
Makefile (+1/-1) ubuntuone/clientdefs.py.in (+4/-3) ubuntuone/keyring.py (+2/-2) ubuntuone/networkstate/darwin.py (+12/-12) ubuntuone/networkstate/linux.py (+1/-2) ubuntuone/networkstate/tests/test_darwin.py (+3/-4) ubuntuone/networkstate/tests/test_linux.py (+11/-11) ubuntuone/networkstate/windows.py (+2/-2) ubuntuone/platform/__init__.py (+5/-4) ubuntuone/platform/filesystem_notifications/monitor/common.py (+2/-2) ubuntuone/platform/filesystem_notifications/monitor/linux.py (+1/-0) ubuntuone/platform/filesystem_notifications/notify_processor/linux.py (+3/-4) ubuntuone/platform/ipc/linux.py (+3/-3) ubuntuone/platform/ipc/perspective_broker.py (+4/-2) ubuntuone/platform/ipc/windows.py (+1/-1) ubuntuone/platform/notification/linux.py (+4/-5) ubuntuone/platform/sync_menu/linux.py (+4/-1) ubuntuone/platform/tests/filesystem_notifications/common.py (+55/-50) ubuntuone/platform/tests/filesystem_notifications/test_darwin.py (+78/-63) ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py (+7/-3) ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py (+9/-6) ubuntuone/platform/tests/filesystem_notifications/test_linux.py (+9/-8) ubuntuone/platform/tests/filesystem_notifications/test_windows.py (+7/-8) ubuntuone/platform/tests/ipc/test_external_interface.py (+47/-45) ubuntuone/platform/tests/ipc/test_linux.py (+7/-9) ubuntuone/platform/tests/ipc/test_perspective_broker.py (+30/-22) ubuntuone/platform/tests/ipc/test_unix.py (+4/-5) ubuntuone/platform/tests/ipc/test_windows.py (+5/-9) ubuntuone/platform/tests/linux/test_vm.py (+106/-98) ubuntuone/platform/tests/os_helper/test_darwin.py (+2/-2) ubuntuone/platform/tests/os_helper/test_os_helper.py (+13/-7) ubuntuone/platform/tests/os_helper/test_windows.py (+11/-8) ubuntuone/platform/tests/session/test_common.py (+0/-1) ubuntuone/platform/tests/session/test_linux.py (+3/-5) ubuntuone/platform/tests/sync_menu/test_linux.py (+58/-45) ubuntuone/platform/tests/test_tools.py (+23/-13) ubuntuone/platform/tests/test_u1sdtool.py (+22/-22) ubuntuone/platform/tests/tools/test_tools.py (+28/-22) ubuntuone/platform/tests/windows/run_sdtool.py (+5/-4) ubuntuone/platform/tools/perspective_broker.py (+5/-4) ubuntuone/proxy/tests/__init__.py (+0/-4) ubuntuone/proxy/tests/test_tunnel_client.py (+6/-6) ubuntuone/proxy/tests/test_tunnel_server.py (+11/-9) ubuntuone/proxy/tunnel_server.py (+6/-4) ubuntuone/status/aggregator.py (+6/-6) ubuntuone/status/tests/test_aggregator.py (+30/-25) ubuntuone/syncdaemon/action_queue.py (+2/-2) ubuntuone/syncdaemon/config.py (+3/-1) ubuntuone/syncdaemon/filesystem_manager.py (+2/-2) ubuntuone/syncdaemon/fsm/fsm.py (+1/-17) ubuntuone/syncdaemon/fsm/fsm_parser.py (+45/-35) ubuntuone/syncdaemon/main.py (+9/-9) ubuntuone/syncdaemon/tests/fsm/test_fsm.py (+18/-20) ubuntuone/syncdaemon/tests/fsm/test_fsm_run.py (+12/-13) ubuntuone/syncdaemon/tests/test_action_queue.py (+24/-14) ubuntuone/syncdaemon/tests/test_config.py (+76/-82) ubuntuone/syncdaemon/tests/test_eq_inotify.py (+68/-68) ubuntuone/syncdaemon/tests/test_eventqueue.py (+55/-35) ubuntuone/syncdaemon/tests/test_eventsnanny.py (+2/-1) ubuntuone/syncdaemon/tests/test_fileshelf.py (+84/-84) ubuntuone/syncdaemon/tests/test_fsm.py (+171/-155) ubuntuone/syncdaemon/tests/test_hashqueue.py (+160/-260) ubuntuone/syncdaemon/tests/test_interaction_interfaces.py (+6/-4) ubuntuone/syncdaemon/tests/test_localrescan.py (+90/-129) ubuntuone/syncdaemon/tests/test_logger.py (+48/-48) ubuntuone/syncdaemon/tests/test_main.py (+2/-2) ubuntuone/syncdaemon/tests/test_mutefilter.py (+9/-11) ubuntuone/syncdaemon/tests/test_pathlockingtree.py (+0/-1) ubuntuone/syncdaemon/tests/test_states.py (+15/-10) ubuntuone/syncdaemon/tests/test_sync.py (+113/-107) ubuntuone/syncdaemon/tests/test_tritcask.py (+143/-123) ubuntuone/syncdaemon/tests/test_tunnel_runner.py (+2/-2) ubuntuone/syncdaemon/tests/test_u1fsfsm.py (+2/-0) ubuntuone/syncdaemon/tests/test_vm.py (+302/-298) ubuntuone/syncdaemon/tests/test_vm_helper.py (+2/-2) ubuntuone/syncdaemon/volume_manager.py (+34/-17) ubuntuone/utils/__init__.py (+1/-1) ubuntuone/utils/ipc.py (+4/-4) ubuntuone/utils/tests/test_ipc.py (+11/-13) ubuntuone/utils/tests/test_tcpactivation.py (+18/-25) ubuntuone/utils/tests/test_translation.py (+13/-12) ubuntuone/utils/tests/test_txsecrets.py (+0/-5) ubuntuone/utils/txsecrets.py (+4/-4) ubuntuone/utils/webclient/__init__.py (+0/-2) ubuntuone/utils/webclient/common.py (+1/-1) ubuntuone/utils/webclient/libsoup.py (+2/-2) ubuntuone/utils/webclient/tests/test_timestamp.py (+5/-4) ubuntuone/utils/webclient/tests/test_webclient.py (+9/-25) ubuntuone/utils/webclient/timestamp.py (+1/-1) ubuntuone/utils/webclient/txweb.py (+3/-4) |
To merge this branch: | bzr merge lp:~nataliabidart/magicicada-client/fix-lint-errors |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Natalia Bidart | Approve | ||
Review via email: mp+301581@code.launchpad.net |
Commit message
- Fixed all lint errors as per latest flake8 checks.
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'Makefile' |
2 | --- Makefile 2015-09-19 23:15:50 +0000 |
3 | +++ Makefile 2016-07-30 22:04:10 +0000 |
4 | @@ -49,7 +49,7 @@ |
5 | lint: |
6 | virtualenv $(ENV) |
7 | $(ENV)/bin/pip install flake8 |
8 | - $(ENV)/bin/flake8 --filename='*.py' --exclude='u1fsfsm.py' ubuntuone |
9 | + $(ENV)/bin/flake8 --filename='*.py' --exclude='u1fsfsm.py,test_run_hello.py' ubuntuone |
10 | |
11 | test: lint |
12 | ./run-tests |
13 | |
14 | === modified file 'ubuntuone/clientdefs.py.in' |
15 | --- ubuntuone/clientdefs.py.in 2016-05-29 00:50:05 +0000 |
16 | +++ ubuntuone/clientdefs.py.in 2016-07-30 22:04:10 +0000 |
17 | @@ -35,10 +35,7 @@ |
18 | import platform |
19 | import urllib |
20 | |
21 | -Q_ = lambda string: gettext.dgettext(GETTEXT_PACKAGE, string) |
22 | - |
23 | NAME = 'Magicicada' |
24 | - |
25 | VERSION = "@VERSION@" |
26 | LOCALEDIR = "@localedir@" |
27 | LIBEXECDIR = "@libexecdir@" |
28 | @@ -48,6 +45,10 @@ |
29 | BIN_DIR = os.path.join('@prefix@', 'lib', PROJECT_NAME) |
30 | |
31 | |
32 | +def Q_(string): |
33 | + return gettext.dgettext(GETTEXT_PACKAGE, string) |
34 | + |
35 | + |
36 | def platform_data(): |
37 | result = {'platform': platform.system(), |
38 | 'platform_version': platform.release(), |
39 | |
40 | === modified file 'ubuntuone/keyring.py' |
41 | --- ubuntuone/keyring.py 2016-05-29 20:12:25 +0000 |
42 | +++ ubuntuone/keyring.py 2016-07-30 22:04:10 +0000 |
43 | @@ -37,9 +37,9 @@ |
44 | import sys |
45 | |
46 | try: |
47 | - from urllib.parse import parse_qsl, quote, urlencode |
48 | + from urllib.parse import parse_qsl, urlencode |
49 | except ImportError: |
50 | - from urllib import quote, urlencode |
51 | + from urllib import urlencode |
52 | from urlparse import parse_qsl |
53 | |
54 | from twisted.internet.defer import inlineCallbacks, returnValue |
55 | |
56 | === modified file 'ubuntuone/networkstate/darwin.py' |
57 | --- ubuntuone/networkstate/darwin.py 2016-05-29 00:50:05 +0000 |
58 | +++ ubuntuone/networkstate/darwin.py 2016-07-30 22:04:10 +0000 |
59 | @@ -24,17 +24,6 @@ |
60 | |
61 | import logging |
62 | |
63 | -from threading import Thread |
64 | - |
65 | -from twisted.internet import defer |
66 | - |
67 | -from ubuntuone.networkstate import NetworkFailException |
68 | -from ubuntuone.networkstate.networkstates import (ONLINE, OFFLINE, UNKNOWN) |
69 | - |
70 | -logger = logging.getLogger(__name__) |
71 | - |
72 | -HOSTNAME_TO_CHECK = 'login.ubuntu.com' |
73 | - |
74 | from ctypes import ( |
75 | CDLL, |
76 | POINTER, |
77 | @@ -44,11 +33,22 @@ |
78 | c_bool, |
79 | c_long, |
80 | c_void_p, |
81 | - c_uint32) |
82 | + c_uint32, |
83 | +) |
84 | +from threading import Thread |
85 | + |
86 | +from twisted.internet import defer |
87 | + |
88 | +from ubuntuone.networkstate import NetworkFailException |
89 | +from ubuntuone.networkstate.networkstates import (ONLINE, OFFLINE, UNKNOWN) |
90 | + |
91 | |
92 | from ctypes.util import find_library |
93 | |
94 | |
95 | +logger = logging.getLogger(__name__) |
96 | +HOSTNAME_TO_CHECK = 'login.ubuntu.com' |
97 | + |
98 | # Functions and constants below are from |
99 | # /System/Library/CoreFoundation.framework/ |
100 | CoreFoundationPath = find_library("CoreFoundation") |
101 | |
102 | === modified file 'ubuntuone/networkstate/linux.py' |
103 | --- ubuntuone/networkstate/linux.py 2016-05-29 00:50:05 +0000 |
104 | +++ ubuntuone/networkstate/linux.py 2016-07-30 22:04:10 +0000 |
105 | @@ -76,8 +76,7 @@ |
106 | """Called by DBus when the state is retrieved from NM.""" |
107 | # Assuming since Network Manager is not running, |
108 | # the user has connected in some other way |
109 | - logger.error("Error contacting NetworkManager: %s" % |
110 | - str(error)) |
111 | + logger.error("Error contacting NetworkManager: %s", error) |
112 | self.call_result_cb(ONLINE) |
113 | |
114 | def state_changed(self, state): |
115 | |
116 | === modified file 'ubuntuone/networkstate/tests/test_darwin.py' |
117 | --- ubuntuone/networkstate/tests/test_darwin.py 2016-05-29 00:50:05 +0000 |
118 | +++ ubuntuone/networkstate/tests/test_darwin.py 2016-07-30 22:04:10 +0000 |
119 | @@ -81,7 +81,7 @@ |
120 | |
121 | def expect_unknown(self, state): |
122 | """A convenience callback that fails unless it sees UNKNOWN.""" |
123 | - self.assertEquals(state, UNKNOWN) |
124 | + self.assertEqual(state, UNKNOWN) |
125 | |
126 | def test_exc_in_find_online_state(self): |
127 | """Expect UNKNOWN from find_online_state in case of exception.""" |
128 | @@ -159,11 +159,10 @@ |
129 | """Test the changes in the network connection.""" |
130 | nms = NetworkManagerState(self._listen_network_changes) |
131 | nms._state_changed(2) |
132 | - nms._state_changed(0) # 0 or anything other than 2. |
133 | + nms._state_changed(0) # 0 or anything other than 2 |
134 | nms._state_changed(2) |
135 | |
136 | - self.assertEqual(self.network_changes, |
137 | - [ONLINE, OFFLINE, ONLINE]) |
138 | + self.assertEqual(self.network_changes, [ONLINE, OFFLINE, ONLINE]) |
139 | |
140 | |
141 | class TestIsMachineConnectedFunc(TestCase): |
142 | |
143 | === modified file 'ubuntuone/networkstate/tests/test_linux.py' |
144 | --- ubuntuone/networkstate/tests/test_linux.py 2016-05-29 00:50:05 +0000 |
145 | +++ ubuntuone/networkstate/tests/test_linux.py 2016-07-30 22:04:10 +0000 |
146 | @@ -161,18 +161,18 @@ |
147 | |
148 | nms.find_online_state() |
149 | |
150 | - self.nm_interface.emit_signal('StateChanged', |
151 | - NM_STATE_CONNECTED_GLOBAL) |
152 | + self.nm_interface.emit_signal( |
153 | + 'StateChanged', NM_STATE_CONNECTED_GLOBAL) |
154 | self.nm_interface.emit_signal('StateChanged', NM_STATE_DISCONNECTED) |
155 | - self.nm_interface.emit_signal('StateChanged', |
156 | - NM_STATE_CONNECTED_GLOBAL) |
157 | + self.nm_interface.emit_signal( |
158 | + 'StateChanged', NM_STATE_CONNECTED_GLOBAL) |
159 | |
160 | self.assertEqual(nms.state_signal.name, "StateChanged") |
161 | self.assertEqual(nms.state_signal.callback, nms.state_changed) |
162 | - self.assertEqual(nms.state_signal.interface, |
163 | - "org.freedesktop.NetworkManager") |
164 | - self.assertEqual(self.network_changes, |
165 | - [ONLINE, ONLINE, OFFLINE, ONLINE]) |
166 | + self.assertEqual( |
167 | + nms.state_signal.interface, "org.freedesktop.NetworkManager") |
168 | + self.assertEqual( |
169 | + self.network_changes, [ONLINE, ONLINE, OFFLINE, ONLINE]) |
170 | self.assertFalse(nms.state_signal.removed) |
171 | |
172 | @inlineCallbacks |
173 | @@ -245,15 +245,15 @@ |
174 | |
175 | def assertOnline(self, state): |
176 | """Check that the state given is ONLINE.""" |
177 | - self.assertEquals(state, ONLINE) |
178 | + self.assertEqual(state, ONLINE) |
179 | |
180 | def assertOffline(self, state): |
181 | """Check that the state given is OFFLINE.""" |
182 | - self.assertEquals(state, OFFLINE) |
183 | + self.assertEqual(state, OFFLINE) |
184 | |
185 | def assertUnknown(self, state): |
186 | """Check that the state was UNKNOWN.""" |
187 | - self.assertEquals(state, UNKNOWN) |
188 | + self.assertEqual(state, UNKNOWN) |
189 | |
190 | def get_nms(self, callback): |
191 | """Get the NetworkManagerState object.""" |
192 | |
193 | === modified file 'ubuntuone/networkstate/windows.py' |
194 | --- ubuntuone/networkstate/windows.py 2016-05-29 00:50:05 +0000 |
195 | +++ ubuntuone/networkstate/windows.py 2016-07-30 22:04:10 +0000 |
196 | @@ -75,8 +75,8 @@ |
197 | 'ConnectionLost') |
198 | |
199 | SUBSCRIPTION_REACH = ('{4c6b2afa-3235-4185-8558-57a7a922ac7b}', |
200 | - 'UbuntuOne Network Reach', |
201 | - 'ConnectionMade') |
202 | + 'UbuntuOne Network Reach', |
203 | + 'ConnectionMade') |
204 | |
205 | SUBSCRIPTION_REACH_NOQOC = ('{db62fa23-4c3e-47a3-aef2-b843016177cf}', |
206 | 'UbuntuOne Network Reach No Info', |
207 | |
208 | === modified file 'ubuntuone/platform/__init__.py' |
209 | --- ubuntuone/platform/__init__.py 2015-09-19 23:15:50 +0000 |
210 | +++ ubuntuone/platform/__init__.py 2016-07-30 22:04:10 +0000 |
211 | @@ -33,6 +33,11 @@ |
212 | |
213 | from dirspec.utils import user_home |
214 | |
215 | +from ubuntuone.platform import ipc |
216 | +from ubuntuone.platform import logger |
217 | +from ubuntuone.platform import os_helper |
218 | + |
219 | + |
220 | # define a platform string separate from sys.platform to be sent to |
221 | # the server for metrics in ActionQueue.authenticate(). |
222 | if sys.platform == "win32": |
223 | @@ -42,10 +47,6 @@ |
224 | else: |
225 | platform = "linux" |
226 | |
227 | -from ubuntuone.platform import ipc |
228 | -from ubuntuone.platform import logger |
229 | -from ubuntuone.platform import os_helper |
230 | - |
231 | |
232 | def expand_user(path): |
233 | """Fix Python expanduser for weird chars in windows.""" |
234 | |
235 | === modified file 'ubuntuone/platform/filesystem_notifications/monitor/common.py' |
236 | --- ubuntuone/platform/filesystem_notifications/monitor/common.py 2015-09-19 23:15:50 +0000 |
237 | +++ ubuntuone/platform/filesystem_notifications/monitor/common.py 2016-07-30 22:04:10 +0000 |
238 | @@ -305,8 +305,8 @@ |
239 | path += os.path.sep |
240 | for current_wd in self._wdm: |
241 | watch_path = self._wdm[current_wd].path |
242 | - if ((watch_path == path or watch_path in path) |
243 | - and path not in self._ignored_paths): |
244 | + if ((watch_path == path or watch_path in path) and |
245 | + path not in self._ignored_paths): |
246 | return current_wd |
247 | |
248 | def get_path(self, wd): |
249 | |
250 | === modified file 'ubuntuone/platform/filesystem_notifications/monitor/linux.py' |
251 | --- ubuntuone/platform/filesystem_notifications/monitor/linux.py 2015-09-19 23:15:50 +0000 |
252 | +++ ubuntuone/platform/filesystem_notifications/monitor/linux.py 2016-07-30 22:04:10 +0000 |
253 | @@ -64,6 +64,7 @@ |
254 | pyinotify.IN_MOVED_FROM | |
255 | pyinotify.IN_MOVED_TO | |
256 | pyinotify.IN_MOVE_SELF) |
257 | + |
258 | INOTIFY_EVENTS_ANCESTORS = ( |
259 | pyinotify.IN_DELETE | |
260 | pyinotify.IN_MOVED_FROM | |
261 | |
262 | === modified file 'ubuntuone/platform/filesystem_notifications/notify_processor/linux.py' |
263 | --- ubuntuone/platform/filesystem_notifications/notify_processor/linux.py 2015-09-19 23:15:50 +0000 |
264 | +++ ubuntuone/platform/filesystem_notifications/notify_processor/linux.py 2016-07-30 22:04:10 +0000 |
265 | @@ -33,6 +33,9 @@ |
266 | import pyinotify |
267 | from twisted.internet import reactor, error |
268 | |
269 | +from ubuntuone.syncdaemon.filesystem_notifications import ( |
270 | + GeneralINotifyProcessor, |
271 | +) |
272 | |
273 | # translates quickly the event and it's is_dir state to our standard events |
274 | NAME_TRANSLATIONS = { |
275 | @@ -65,10 +68,6 @@ |
276 | pyinotify.IN_MOVED_TO | |
277 | pyinotify.IN_MOVE_SELF) |
278 | |
279 | -from ubuntuone.syncdaemon.filesystem_notifications import ( |
280 | - GeneralINotifyProcessor, |
281 | -) |
282 | - |
283 | |
284 | def validate_filename(real_func): |
285 | """Decorator that validates the filename.""" |
286 | |
287 | === modified file 'ubuntuone/platform/ipc/linux.py' |
288 | --- ubuntuone/platform/ipc/linux.py 2016-05-29 14:29:29 +0000 |
289 | +++ ubuntuone/platform/ipc/linux.py 2016-07-30 22:04:10 +0000 |
290 | @@ -37,7 +37,7 @@ |
291 | from twisted.internet import defer |
292 | from xml.etree import ElementTree |
293 | |
294 | -from ubuntuone.platform.launcher import Launcher |
295 | +from ubuntuone.platform import launcher |
296 | from ubuntuone.syncdaemon import ( |
297 | RECENT_TRANSFERS, |
298 | UPLOADING, |
299 | @@ -814,8 +814,8 @@ |
300 | @dbus.service.method(DBUS_IFACE_LAUNCHER_NAME) |
301 | def unset_urgency(self): |
302 | """Unset urgency on the launcher.""" |
303 | - launcher = Launcher() |
304 | - launcher.set_urgent(False) |
305 | + result = launcher.Launcher() |
306 | + result.set_urgent(False) |
307 | |
308 | |
309 | class PublicFiles(DBusExposedObject): |
310 | |
311 | === modified file 'ubuntuone/platform/ipc/perspective_broker.py' |
312 | --- ubuntuone/platform/ipc/perspective_broker.py 2016-05-29 00:50:05 +0000 |
313 | +++ ubuntuone/platform/ipc/perspective_broker.py 2016-07-30 22:04:10 +0000 |
314 | @@ -119,9 +119,11 @@ |
315 | |
316 | |
317 | def remote_handler(handler): |
318 | + result = handler |
319 | if handler: |
320 | - handler = lambda x: handler.callRemote('execute', x) |
321 | - return handler |
322 | + def result(x): |
323 | + return handler.callRemote('execute', x) |
324 | + return result |
325 | |
326 | |
327 | class RemoteMeta(type): |
328 | |
329 | === modified file 'ubuntuone/platform/ipc/windows.py' |
330 | --- ubuntuone/platform/ipc/windows.py 2016-05-29 00:50:05 +0000 |
331 | +++ ubuntuone/platform/ipc/windows.py 2016-07-30 22:04:10 +0000 |
332 | @@ -33,7 +33,7 @@ |
333 | |
334 | def get_sd_pb_port(): |
335 | """Returns the host and port for this user.""" |
336 | - return SD_PORT_OFFSET |
337 | + return SD_PORT |
338 | |
339 | |
340 | class DescriptionFactory(object): |
341 | |
342 | === modified file 'ubuntuone/platform/notification/linux.py' |
343 | --- ubuntuone/platform/notification/linux.py 2016-05-29 00:50:05 +0000 |
344 | +++ ubuntuone/platform/notification/linux.py 2016-07-30 22:04:10 +0000 |
345 | @@ -26,16 +26,15 @@ |
346 | # files in the program, then also delete it here. |
347 | """Module that implements notification of the end user.""" |
348 | |
349 | -# TODO: We may want to enable different notifiers. When none of them |
350 | -# are available, we should fall back to silently discarding |
351 | -# notifications. |
352 | -USE_NOTIFY = False |
353 | |
354 | try: |
355 | from gi.repository import Notify |
356 | USE_NOTIFY = True |
357 | except ImportError: |
358 | - pass |
359 | + # TODO: We may want to enable different notifiers. When none of them |
360 | + # are available, we should fall back to silently discarding |
361 | + # notifications. |
362 | + USE_NOTIFY = False |
363 | |
364 | from ubuntuone.clientdefs import NAME |
365 | from ubuntuone.status.notification import AbstractNotification |
366 | |
367 | === modified file 'ubuntuone/platform/sync_menu/linux.py' |
368 | --- ubuntuone/platform/sync_menu/linux.py 2016-05-29 00:50:05 +0000 |
369 | +++ ubuntuone/platform/sync_menu/linux.py 2016-07-30 22:04:10 +0000 |
370 | @@ -55,7 +55,10 @@ |
371 | |
372 | logger = logging.getLogger("ubuntuone.platform.SyncMenu") |
373 | |
374 | -Q_ = lambda string: gettext.dgettext(GETTEXT_PACKAGE, string) |
375 | + |
376 | +def Q_(string): |
377 | + return gettext.dgettext(GETTEXT_PACKAGE, string) |
378 | + |
379 | |
380 | GET_HELP = Q_("Get Help on the Web") |
381 | GO_TO_WEB = Q_("Go to the main website") |
382 | |
383 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/common.py' |
384 | --- ubuntuone/platform/tests/filesystem_notifications/common.py 2013-01-14 21:45:06 +0000 |
385 | +++ ubuntuone/platform/tests/filesystem_notifications/common.py 2016-07-30 22:04:10 +0000 |
386 | @@ -60,7 +60,7 @@ |
387 | OP_FLAGS = EventsCodes.FLAG_COLLECTIONS['OP_FLAGS'] |
388 | IS_DIR = EventsCodes.FLAG_COLLECTIONS['SPECIAL_FLAGS']['IN_ISDIR'] |
389 | |
390 | -#create a rever mapping to use it in the tests. |
391 | +# create a rever mapping to use it in the tests. |
392 | REVERSE_OS_ACTIONS = {} |
393 | for key, value in ACTIONS.items(): |
394 | REVERSE_OS_ACTIONS[value] = key |
395 | @@ -180,8 +180,8 @@ |
396 | os.fsync(fd) |
397 | fd.close() |
398 | |
399 | - events = yield self._perform_operations(self.basedir, self.mask, |
400 | - create_file, 1) |
401 | + events = yield self._perform_operations( |
402 | + self.basedir, self.mask, create_file, 1) |
403 | event = events[0] |
404 | self.assertFalse(event.dir) |
405 | self.assertEqual(OP_FLAGS['IN_CREATE'], event.mask) |
406 | @@ -200,8 +200,8 @@ |
407 | """Action for the test.""" |
408 | os.mkdir(dir_name) |
409 | |
410 | - events = yield self._perform_operations(self.basedir, self.mask, |
411 | - create_dir, 1) |
412 | + events = yield self._perform_operations( |
413 | + self.basedir, self.mask, create_dir, 1) |
414 | event = events[0] |
415 | self.assertTrue(event.dir) |
416 | self.assertEqual(OP_FLAGS['IN_CREATE'] | IS_DIR, event.mask) |
417 | @@ -261,12 +261,13 @@ |
418 | @defer.inlineCallbacks |
419 | def test_file_moved_to_watched_dir_same_watcher(self): |
420 | """Test that the correct event is raised when a file is moved.""" |
421 | - from_file_name = os.path.join(self.basedir, |
422 | - 'test_file_moved_to_watched_dir_same_watcher') |
423 | - to_file_name = os.path.join(self.basedir, |
424 | - 'test_file_moved_to_watched_dir_same_watcher_2') |
425 | + from_file_name = os.path.join( |
426 | + self.basedir, 'test_file_moved_to_watched_dir_same_watcher') |
427 | + to_file_name = os.path.join( |
428 | + self.basedir, 'test_file_moved_to_watched_dir_same_watcher_2') |
429 | open(from_file_name, 'w').close() |
430 | - #create file before recording |
431 | + |
432 | + # create file before recording |
433 | |
434 | def move_file(): |
435 | """Action for the test.""" |
436 | @@ -283,7 +284,8 @@ |
437 | self.assertEqual(os.path.split(from_file_name)[1], |
438 | move_from_event.name) |
439 | self.assertEqual('.', move_from_event.path) |
440 | - self.assertEqual(os.path.join(self.basedir, from_file_name), |
441 | + self.assertEqual( |
442 | + os.path.join(self.basedir, from_file_name), |
443 | move_from_event.pathname) |
444 | self.assertEqual(0, move_from_event.wd) |
445 | # test the move to |
446 | @@ -292,10 +294,10 @@ |
447 | self.assertEqual('IN_MOVED_TO', move_to_event.maskname) |
448 | self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name) |
449 | self.assertEqual('.', move_to_event.path) |
450 | - self.assertEqual(os.path.join(self.basedir, to_file_name), |
451 | - move_to_event.pathname) |
452 | - self.assertEqual(os.path.split(from_file_name)[1], |
453 | - move_to_event.src_pathname) |
454 | + self.assertEqual( |
455 | + os.path.join(self.basedir, to_file_name), move_to_event.pathname) |
456 | + self.assertEqual( |
457 | + os.path.split(from_file_name)[1], move_to_event.src_pathname) |
458 | self.assertEqual(0, move_to_event.wd) |
459 | # assert that both cookies are the same |
460 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) |
461 | @@ -303,14 +305,15 @@ |
462 | @defer.inlineCallbacks |
463 | def test_file_moved_to_not_watched_dir(self): |
464 | """Test that the correct event is raised when a file is moved.""" |
465 | - from_file_name = os.path.join(self.basedir, |
466 | - 'test_file_moved_to_not_watched_dir') |
467 | + from_file_name = os.path.join( |
468 | + self.basedir, 'test_file_moved_to_not_watched_dir') |
469 | open(from_file_name, 'w').close() |
470 | |
471 | def move_file(): |
472 | """Action for the test.""" |
473 | - os.rename(from_file_name, os.path.join(tempfile.mkdtemp(), |
474 | - 'test_file_moved_to_not_watched_dir')) |
475 | + target = os.path.join( |
476 | + tempfile.mkdtemp(), 'test_file_moved_to_not_watched_dir') |
477 | + os.rename(from_file_name, target) |
478 | |
479 | # while on linux we will have to do some sort of magic like facundo |
480 | # did, on windows we will get a deleted event which is much more |
481 | @@ -330,10 +333,10 @@ |
482 | @defer.inlineCallbacks |
483 | def test_file_move_from_not_watched_dir(self): |
484 | """Test that the correct event is raised when a file is moved.""" |
485 | - from_file_name = os.path.join(tempfile.mkdtemp(), |
486 | - 'test_file_move_from_not_watched_dir') |
487 | - to_file_name = os.path.join(self.basedir, |
488 | - 'test_file_move_from_not_watched_dir') |
489 | + from_file_name = os.path.join( |
490 | + tempfile.mkdtemp(), 'test_file_move_from_not_watched_dir') |
491 | + to_file_name = os.path.join( |
492 | + self.basedir, 'test_file_move_from_not_watched_dir') |
493 | # create file before we record |
494 | open(from_file_name, 'w').close() |
495 | |
496 | @@ -351,35 +354,36 @@ |
497 | self.assertEqual('IN_CREATE', event.maskname) |
498 | self.assertEqual(os.path.split(to_file_name)[1], event.name) |
499 | self.assertEqual('.', event.path) |
500 | - self.assertEqual(os.path.join(self.basedir, to_file_name), |
501 | - event.pathname) |
502 | + self.assertEqual( |
503 | + os.path.join(self.basedir, to_file_name), event.pathname) |
504 | self.assertEqual(0, event.wd) |
505 | |
506 | @defer.inlineCallbacks |
507 | def test_dir_moved_to_watched_dir_same_watcher(self): |
508 | """Test that the correct event is raised when a dir is moved.""" |
509 | - from_dir_name = os.path.join(self.basedir, |
510 | - 'test_dir_moved_to_watched_dir_same_watcher') |
511 | - to_dir_name = os.path.join(self.basedir, |
512 | - 'test_dir_moved_to_watched_dir_same_watcher_2') |
513 | + from_dir_name = os.path.join( |
514 | + self.basedir, 'test_dir_moved_to_watched_dir_same_watcher') |
515 | + to_dir_name = os.path.join( |
516 | + self.basedir, 'test_dir_moved_to_watched_dir_same_watcher_2') |
517 | os.mkdir(from_dir_name) |
518 | |
519 | def move_file(): |
520 | """Action for the test.""" |
521 | os.rename(from_dir_name, to_dir_name) |
522 | |
523 | - events = yield self._perform_operations(self.basedir, |
524 | - self.mask, move_file, 2) |
525 | + events = yield self._perform_operations( |
526 | + self.basedir, self.mask, move_file, 2) |
527 | move_from_event = events[0] |
528 | move_to_event = events[1] |
529 | # first test the move from |
530 | self.assertTrue(move_from_event.dir) |
531 | - self.assertEqual(OP_FLAGS['IN_MOVED_FROM'] | IS_DIR, |
532 | - move_from_event.mask) |
533 | + self.assertEqual( |
534 | + OP_FLAGS['IN_MOVED_FROM'] | IS_DIR, move_from_event.mask) |
535 | self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname) |
536 | self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name) |
537 | self.assertEqual('.', move_from_event.path) |
538 | - self.assertEqual(os.path.join(self.basedir, from_dir_name), |
539 | + self.assertEqual( |
540 | + os.path.join(self.basedir, from_dir_name), |
541 | move_from_event.pathname) |
542 | self.assertEqual(0, move_from_event.wd) |
543 | # test the move to |
544 | @@ -388,10 +392,10 @@ |
545 | self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname) |
546 | self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name) |
547 | self.assertEqual('.', move_to_event.path) |
548 | - self.assertEqual(os.path.join(self.basedir, to_dir_name), |
549 | - move_to_event.pathname) |
550 | - self.assertEqual(os.path.split(from_dir_name)[1], |
551 | - move_to_event.src_pathname) |
552 | + self.assertEqual( |
553 | + os.path.join(self.basedir, to_dir_name), move_to_event.pathname) |
554 | + self.assertEqual( |
555 | + os.path.split(from_dir_name)[1], move_to_event.src_pathname) |
556 | self.assertEqual(0, move_to_event.wd) |
557 | # assert that both cookies are the same |
558 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) |
559 | @@ -399,14 +403,15 @@ |
560 | @defer.inlineCallbacks |
561 | def test_dir_moved_to_not_watched_dir(self): |
562 | """Test that the correct event is raised when a file is moved.""" |
563 | - dir_name = os.path.join(self.basedir, |
564 | - 'test_dir_moved_to_not_watched_dir') |
565 | + dir_name = os.path.join( |
566 | + self.basedir, 'test_dir_moved_to_not_watched_dir') |
567 | os.mkdir(dir_name) |
568 | |
569 | def move_dir(): |
570 | """Action for the test.""" |
571 | - os.rename(dir_name, os.path.join(tempfile.mkdtemp(), |
572 | - 'test_dir_moved_to_not_watched_dir')) |
573 | + target = os.path.join( |
574 | + tempfile.mkdtemp(), 'test_dir_moved_to_not_watched_dir') |
575 | + os.rename(dir_name, target) |
576 | |
577 | # on windows a move to outside a watched dir translates to a remove |
578 | events = yield self._perform_operations(self.basedir, self.mask, |
579 | @@ -422,10 +427,10 @@ |
580 | @defer.inlineCallbacks |
581 | def test_dir_move_from_not_watched_dir(self): |
582 | """Test that the correct event is raised when a file is moved.""" |
583 | - from_dir_name = os.path.join(tempfile.mkdtemp(), |
584 | - 'test_dir_move_from_not_watched_dir') |
585 | - to_dir_name = os.path.join(self.basedir, |
586 | - 'test_dir_move_from_not_watched_dir') |
587 | + from_dir_name = os.path.join( |
588 | + tempfile.mkdtemp(), 'test_dir_move_from_not_watched_dir') |
589 | + to_dir_name = os.path.join( |
590 | + self.basedir, 'test_dir_move_from_not_watched_dir') |
591 | # create file before we record |
592 | os.mkdir(from_dir_name) |
593 | |
594 | @@ -474,9 +479,9 @@ |
595 | watch.ignore_path(os.path.join(self.path, child)) |
596 | paths_to_ignore = [] |
597 | for file_name in 'abcdef': |
598 | - paths_to_ignore.append( |
599 | - self.fake_events_processor.create_fake_event( |
600 | - os.path.join(child, file_name))) |
601 | + fake_event = self.fake_events_processor.create_fake_event( |
602 | + os.path.join(child, file_name)) |
603 | + paths_to_ignore.append(fake_event) |
604 | # ensure that the watch is watching |
605 | watch.platform_watch.watching = True |
606 | self.fake_events_processor.custom_process_events( |
607 | @@ -589,7 +594,7 @@ |
608 | watch, paths_not_to_ignore) |
609 | self.assertEqual(len(paths_not_to_ignore), len(events), |
610 | 'All events should have been accepted.') |
611 | - self.assertTrue(all([event in expected_events for event in events]), |
612 | + self.assertTrue(all([e in expected_events for e in events]), |
613 | 'Paths ignored that should have not been ignored.') |
614 | |
615 | def random_error(self, *args): |
616 | |
617 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_darwin.py' |
618 | --- ubuntuone/platform/tests/filesystem_notifications/test_darwin.py 2016-05-29 19:15:01 +0000 |
619 | +++ ubuntuone/platform/tests/filesystem_notifications/test_darwin.py 2016-07-30 22:04:10 +0000 |
620 | @@ -187,25 +187,29 @@ |
621 | |
622 | def test_not_ignore_path(self): |
623 | """Test that we do get the events when they do not match.""" |
624 | - self.patch(filesystem_notifications.reactor, 'callFromThread', |
625 | + self.patch( |
626 | + filesystem_notifications.reactor, 'callFromThread', |
627 | lambda x, e: x(e)) |
628 | super(TestWatch, self).test_not_ignore_path() |
629 | |
630 | def test_undo_ignore_path_ignored(self): |
631 | """Test that we do deal with events from and old ignored path.""" |
632 | - self.patch(filesystem_notifications.reactor, 'callFromThread', |
633 | + self.patch( |
634 | + filesystem_notifications.reactor, 'callFromThread', |
635 | lambda x, e: x(e)) |
636 | super(TestWatch, self).test_not_ignore_path() |
637 | |
638 | def test_undo_ignore_path_other_ignored(self): |
639 | """Test that we can undo and the other path is ignored.""" |
640 | - self.patch(filesystem_notifications.reactor, 'callFromThread', |
641 | + self.patch( |
642 | + filesystem_notifications.reactor, 'callFromThread', |
643 | lambda x, e: x(e)) |
644 | super(TestWatch, self).test_not_ignore_path() |
645 | |
646 | def test_mixed_ignore_path(self): |
647 | """Test that we do get the correct events.""" |
648 | - self.patch(filesystem_notifications.reactor, 'callFromThread', |
649 | + self.patch( |
650 | + filesystem_notifications.reactor, 'callFromThread', |
651 | lambda x, e: x(e)) |
652 | super(TestWatch, self).test_mixed_ignore_path() |
653 | |
654 | @@ -221,8 +225,8 @@ |
655 | os.fsync(fd) |
656 | fd.close() |
657 | |
658 | - events = yield self._perform_operations(self.basedir, self.mask, |
659 | - create_file, 1) |
660 | + events = yield self._perform_operations( |
661 | + self.basedir, self.mask, create_file, 1) |
662 | event = events[0] |
663 | self.assertFalse(event.dir) |
664 | self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask) |
665 | @@ -241,11 +245,12 @@ |
666 | """Action for the test.""" |
667 | os.mkdir(dir_name) |
668 | |
669 | - events = yield self._perform_operations(self.basedir, self.mask, |
670 | - create_dir, 1) |
671 | + events = yield self._perform_operations( |
672 | + self.basedir, self.mask, create_dir, 1) |
673 | event = events[0] |
674 | self.assertTrue(event.dir) |
675 | - self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'] | |
676 | + self.assertEqual( |
677 | + common_tests.OP_FLAGS['IN_CREATE'] | |
678 | common_tests.IS_DIR, event.mask) |
679 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) |
680 | self.assertEqual(os.path.split(dir_name)[1], event.name) |
681 | @@ -290,7 +295,8 @@ |
682 | remove_dir, 1) |
683 | event = events[0] |
684 | self.assertTrue(event.dir) |
685 | - self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'] | |
686 | + self.assertEqual( |
687 | + common_tests.OP_FLAGS['IN_DELETE'] | |
688 | common_tests.IS_DIR, event.mask) |
689 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) |
690 | self.assertEqual('.', event.path) |
691 | @@ -325,12 +331,12 @@ |
692 | @defer.inlineCallbacks |
693 | def test_file_moved_to_watched_dir_same_watcher(self): |
694 | """Test that the correct event is raised when a file is moved.""" |
695 | - from_file_name = os.path.join(self.basedir, |
696 | - 'test_file_moved_to_watched_dir_same_watcher') |
697 | - to_file_name = os.path.join(self.basedir, |
698 | - 'test_file_moved_to_watched_dir_same_watcher_2') |
699 | + from_file_name = os.path.join( |
700 | + self.basedir, 'test_file_moved_to_watched_dir_same_watcher') |
701 | + to_file_name = os.path.join( |
702 | + self.basedir, 'test_file_moved_to_watched_dir_same_watcher_2') |
703 | open(from_file_name, 'w').close() |
704 | - #create file before recording |
705 | + # create file before recording |
706 | |
707 | def move_file(): |
708 | """Action for the test.""" |
709 | @@ -342,26 +348,27 @@ |
710 | move_to_event = events[1] |
711 | # first test the move from |
712 | self.assertFalse(move_from_event.dir) |
713 | - self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_FROM'], |
714 | - move_from_event.mask) |
715 | + self.assertEqual( |
716 | + common_tests.OP_FLAGS['IN_MOVED_FROM'], move_from_event.mask) |
717 | self.assertEqual('IN_MOVED_FROM', move_from_event.maskname) |
718 | self.assertEqual(os.path.split(from_file_name)[1], |
719 | move_from_event.name) |
720 | self.assertEqual('.', move_from_event.path) |
721 | - self.assertEqual(os.path.join(self.basedir, from_file_name), |
722 | + self.assertEqual( |
723 | + os.path.join(self.basedir, from_file_name), |
724 | move_from_event.pathname) |
725 | self.assertEqual(0, move_from_event.wd) |
726 | # test the move to |
727 | self.assertFalse(move_to_event.dir) |
728 | - self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_TO'], |
729 | - move_to_event.mask) |
730 | + self.assertEqual( |
731 | + common_tests.OP_FLAGS['IN_MOVED_TO'], move_to_event.mask) |
732 | self.assertEqual('IN_MOVED_TO', move_to_event.maskname) |
733 | self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name) |
734 | self.assertEqual('.', move_to_event.path) |
735 | - self.assertEqual(os.path.join(self.basedir, to_file_name), |
736 | - move_to_event.pathname) |
737 | - self.assertEqual(os.path.split(from_file_name)[1], |
738 | - move_to_event.src_pathname) |
739 | + self.assertEqual( |
740 | + os.path.join(self.basedir, to_file_name), move_to_event.pathname) |
741 | + self.assertEqual( |
742 | + os.path.split(from_file_name)[1], move_to_event.src_pathname) |
743 | self.assertEqual(0, move_to_event.wd) |
744 | # assert that both cookies are the same |
745 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) |
746 | @@ -369,14 +376,15 @@ |
747 | @defer.inlineCallbacks |
748 | def test_file_moved_to_not_watched_dir(self): |
749 | """Test that the correct event is raised when a file is moved.""" |
750 | - from_file_name = os.path.join(self.basedir, |
751 | - 'test_file_moved_to_not_watched_dir') |
752 | + from_file_name = os.path.join( |
753 | + self.basedir, 'test_file_moved_to_not_watched_dir') |
754 | open(from_file_name, 'w').close() |
755 | |
756 | def move_file(): |
757 | """Action for the test.""" |
758 | - os.rename(from_file_name, os.path.join(tempfile.mkdtemp(), |
759 | - 'test_file_moved_to_not_watched_dir')) |
760 | + target = os.path.join( |
761 | + tempfile.mkdtemp(), 'test_file_moved_to_not_watched_dir') |
762 | + os.rename(from_file_name, target) |
763 | |
764 | # We need to test that we get a delete operation when moving |
765 | # a file to an unwatched folder |
766 | @@ -395,10 +403,10 @@ |
767 | @defer.inlineCallbacks |
768 | def test_file_move_from_not_watched_dir(self): |
769 | """Test that the correct event is raised when a file is moved.""" |
770 | - from_file_name = os.path.join(tempfile.mkdtemp(), |
771 | - 'test_file_move_from_not_watched_dir') |
772 | - to_file_name = os.path.join(self.basedir, |
773 | - 'test_file_move_from_not_watched_dir') |
774 | + from_file_name = os.path.join( |
775 | + tempfile.mkdtemp(), 'test_file_move_from_not_watched_dir') |
776 | + to_file_name = os.path.join( |
777 | + self.basedir, 'test_file_move_from_not_watched_dir') |
778 | # create file before we record |
779 | open(from_file_name, 'w').close() |
780 | |
781 | @@ -416,47 +424,49 @@ |
782 | self.assertEqual('IN_CREATE', event.maskname) |
783 | self.assertEqual(os.path.split(to_file_name)[1], event.name) |
784 | self.assertEqual('.', event.path) |
785 | - self.assertEqual(os.path.join(self.basedir, to_file_name), |
786 | - event.pathname) |
787 | + self.assertEqual( |
788 | + os.path.join(self.basedir, to_file_name), event.pathname) |
789 | self.assertEqual(0, event.wd) |
790 | |
791 | @defer.inlineCallbacks |
792 | def test_dir_moved_to_watched_dir_same_watcher(self): |
793 | """Test that the correct event is raised when a dir is moved.""" |
794 | - from_dir_name = os.path.join(self.basedir, |
795 | - 'test_dir_moved_to_watched_dir_same_watcher') |
796 | - to_dir_name = os.path.join(self.basedir, |
797 | - 'test_dir_moved_to_watched_dir_same_watcher_2') |
798 | + from_dir_name = os.path.join( |
799 | + self.basedir, 'test_dir_moved_to_watched_dir_same_watcher') |
800 | + to_dir_name = os.path.join( |
801 | + self.basedir, 'test_dir_moved_to_watched_dir_same_watcher_2') |
802 | os.mkdir(from_dir_name) |
803 | |
804 | def move_file(): |
805 | """Action for the test.""" |
806 | os.rename(from_dir_name, to_dir_name) |
807 | |
808 | - events = yield self._perform_operations(self.basedir, |
809 | - self.mask, move_file, 2) |
810 | + events = yield self._perform_operations( |
811 | + self.basedir, self.mask, move_file, 2) |
812 | move_from_event = events[0] |
813 | move_to_event = events[1] |
814 | # first test the move from |
815 | self.assertTrue(move_from_event.dir) |
816 | - self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_FROM'] | |
817 | - common_tests.IS_DIR, |
818 | + self.assertEqual( |
819 | + common_tests.OP_FLAGS['IN_MOVED_FROM'] | common_tests.IS_DIR, |
820 | move_from_event.mask) |
821 | self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname) |
822 | self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name) |
823 | self.assertEqual('.', move_from_event.path) |
824 | - self.assertEqual(os.path.join(self.basedir, from_dir_name), |
825 | + self.assertEqual( |
826 | + os.path.join(self.basedir, from_dir_name), |
827 | move_from_event.pathname) |
828 | self.assertEqual(0, move_from_event.wd) |
829 | # test the move to |
830 | self.assertTrue(move_to_event.dir) |
831 | - self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_TO'] | |
832 | - common_tests.IS_DIR, move_to_event.mask) |
833 | + self.assertEqual( |
834 | + common_tests.OP_FLAGS['IN_MOVED_TO'] | common_tests.IS_DIR, |
835 | + move_to_event.mask) |
836 | self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname) |
837 | self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name) |
838 | self.assertEqual('.', move_to_event.path) |
839 | - self.assertEqual(os.path.join(self.basedir, to_dir_name), |
840 | - move_to_event.pathname) |
841 | + self.assertEqual( |
842 | + os.path.join(self.basedir, to_dir_name), move_to_event.pathname) |
843 | self.assertEqual(os.path.split(from_dir_name)[1], |
844 | move_to_event.src_pathname) |
845 | self.assertEqual(0, move_to_event.wd) |
846 | @@ -466,14 +476,15 @@ |
847 | @defer.inlineCallbacks |
848 | def test_dir_moved_to_not_watched_dir(self): |
849 | """Test that the correct event is raised when a file is moved.""" |
850 | - dir_name = os.path.join(self.basedir, |
851 | - 'test_dir_moved_to_not_watched_dir') |
852 | + dir_name = os.path.join( |
853 | + self.basedir, 'test_dir_moved_to_not_watched_dir') |
854 | os.mkdir(dir_name) |
855 | |
856 | def move_dir(): |
857 | """Action for the test.""" |
858 | - os.rename(dir_name, os.path.join(tempfile.mkdtemp(), |
859 | - 'test_dir_moved_to_not_watched_dir')) |
860 | + target = os.path.join( |
861 | + tempfile.mkdtemp(), 'test_dir_moved_to_not_watched_dir') |
862 | + os.rename(dir_name, target) |
863 | |
864 | # We need to test that we get a delete operation when moving |
865 | # a file to an unwatched folder |
866 | @@ -481,8 +492,9 @@ |
867 | move_dir, 1) |
868 | event = events[0] |
869 | self.assertTrue(event.dir) |
870 | - self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'] | |
871 | - common_tests.IS_DIR, event.mask) |
872 | + self.assertEqual( |
873 | + common_tests.OP_FLAGS['IN_DELETE'] | common_tests.IS_DIR, |
874 | + event.mask) |
875 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) |
876 | self.assertEqual('.', event.path) |
877 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) |
878 | @@ -491,10 +503,10 @@ |
879 | @defer.inlineCallbacks |
880 | def test_dir_move_from_not_watched_dir(self): |
881 | """Test that the correct event is raised when a file is moved.""" |
882 | - from_dir_name = os.path.join(tempfile.mkdtemp(), |
883 | - 'test_dir_move_from_not_watched_dir') |
884 | - to_dir_name = os.path.join(self.basedir, |
885 | - 'test_dir_move_from_not_watched_dir') |
886 | + from_dir_name = os.path.join( |
887 | + tempfile.mkdtemp(), 'test_dir_move_from_not_watched_dir') |
888 | + to_dir_name = os.path.join( |
889 | + self.basedir, 'test_dir_move_from_not_watched_dir') |
890 | # create file before we record |
891 | os.mkdir(from_dir_name) |
892 | |
893 | @@ -506,8 +518,9 @@ |
894 | move_dir, 1) |
895 | event = events[0] |
896 | self.assertTrue(event.dir) |
897 | - self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'] | |
898 | - common_tests.IS_DIR, event.mask) |
899 | + self.assertEqual( |
900 | + common_tests.OP_FLAGS['IN_CREATE'] | common_tests.IS_DIR, |
901 | + event.mask) |
902 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) |
903 | self.assertEqual(os.path.split(from_dir_name)[1], event.name) |
904 | self.assertEqual('.', event.path) |
905 | @@ -521,8 +534,8 @@ |
906 | manager = WatchManager(handler) |
907 | self.addCleanup(manager.stop) |
908 | # add a watch that will always exclude all actions |
909 | - manager.add_watch(self.basedir, self.mask, |
910 | - exclude_filter=lambda x: True) |
911 | + manager.add_watch( |
912 | + self.basedir, self.mask, exclude_filter=lambda x: True) |
913 | # execution the actions |
914 | file_name = os.path.join(self.basedir, 'test_file_create') |
915 | open(file_name, 'w').close() |
916 | @@ -537,7 +550,8 @@ |
917 | |
918 | path = '/Users/username/folder/' |
919 | watch = Watch(1, path, None) |
920 | - self.assertEqual(watch.platform_watch._process_events, |
921 | + self.assertEqual( |
922 | + watch.platform_watch._process_events, |
923 | watch.platform_watch.stream.callback) |
924 | self.assertEqual(watch.platform_watch.stream.paths, [path]) |
925 | self.assertEqual(watch.platform_watch.stream.file_events, True) |
926 | @@ -732,7 +746,8 @@ |
927 | |
928 | def test_rm_child_path(self): |
929 | """Test the removal of a child path.""" |
930 | - self.patch(filesystem_notifications.reactor, 'callFromThread', |
931 | + self.patch( |
932 | + filesystem_notifications.reactor, 'callFromThread', |
933 | lambda x, e: x(e)) |
934 | super(TestWatchManager, self).test_rm_child_path() |
935 | |
936 | |
937 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py' |
938 | --- ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py 2012-08-01 11:34:37 +0000 |
939 | +++ ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py 2016-07-30 22:04:10 +0000 |
940 | @@ -81,8 +81,8 @@ |
941 | |
942 | def test_filter_two_complex(self): |
943 | """Filters stuff that matches (or not) these complex regexes.""" |
944 | - p = notify_processor.NotifyProcessor(None, |
945 | - ['\A.*foo\Z|\Afoo.*\Z', '\A.*bar\Z']) |
946 | + p = notify_processor.NotifyProcessor( |
947 | + None, ['\A.*foo\Z|\Afoo.*\Z', '\A.*bar\Z']) |
948 | self.assertTrue(p.is_ignored("blah_foo")) |
949 | self.assertTrue(p.is_ignored("blah_bar")) |
950 | self.assertTrue(p.is_ignored("foo_xxx")) |
951 | @@ -93,7 +93,10 @@ |
952 | """Test that the right access function is called.""" |
953 | sample_path = "sample path" |
954 | calls = [] |
955 | - store_call = lambda *args: calls.append(args) |
956 | + |
957 | + def store_call(*args): |
958 | + return calls.append(args) |
959 | + |
960 | self.patch(filesystem_notifications, "access", store_call) |
961 | self.patch(filesystem_notifications, "path_exists", lambda _: True) |
962 | p = notify_processor.NotifyProcessor(None) |
963 | @@ -221,6 +224,7 @@ |
964 | self.finished_error(exception) |
965 | raise exception |
966 | return first |
967 | + |
968 | assertEqual = assertEquals = failUnlessEquals = failUnlessEqual |
969 | |
970 | |
971 | |
972 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py' |
973 | --- ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py 2015-09-20 20:52:48 +0000 |
974 | +++ ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py 2016-07-30 22:04:10 +0000 |
975 | @@ -37,7 +37,6 @@ |
976 | try: |
977 | from ubuntuone.devtools.testcases import skipIf |
978 | from ubuntuone.devtools.testcases.txsocketserver import TidyUnixServer |
979 | - TidyUnixServer = None |
980 | except ImportError: |
981 | from ubuntuone.devtools.testcase import skipIf |
982 | TidyUnixServer = None |
983 | @@ -52,6 +51,7 @@ |
984 | IN_MOVED_TO, |
985 | ) |
986 | |
987 | + |
988 | class FakeServerProtocol(protocol.Protocol): |
989 | """A test protocol.""" |
990 | |
991 | @@ -132,6 +132,7 @@ |
992 | """Add a path.""" |
993 | self.called.extend(['add_path', path]) |
994 | |
995 | + |
996 | class PyInotifyEventsFactoryTestCase(BaseTwistedTestCase): |
997 | """Test the factory used to receive events.""" |
998 | |
999 | @@ -268,7 +269,8 @@ |
1000 | pyinotify_event = converted_events[0] |
1001 | self.assertEqual(0, pyinotify_event.wd) |
1002 | self.assertEqual(event.is_directory, pyinotify_event.dir) |
1003 | - self.assertEqual(fsevents_daemon.DARWIN_ACTIONS[action], |
1004 | + self.assertEqual( |
1005 | + fsevents_daemon.DARWIN_ACTIONS[action], |
1006 | pyinotify_event.mask) |
1007 | self.assertEqual(event_path, pyinotify_event.pathname) |
1008 | |
1009 | @@ -381,8 +383,8 @@ |
1010 | event.event_type = fseventsd.FSE_CREATE_FILE |
1011 | self.factory.process_event(event) |
1012 | self.assertEqual(1, len(self.processor.processed_events)) |
1013 | - self.assertEqual(event_path, |
1014 | - self.processor.processed_events[0].pathname) |
1015 | + self.assertEqual( |
1016 | + event_path, self.processor.processed_events[0].pathname) |
1017 | |
1018 | |
1019 | class FilesystemMonitorTestCase(BaseTwistedTestCase): |
1020 | @@ -407,8 +409,9 @@ |
1021 | self.monitor._factory = self.factory |
1022 | |
1023 | # patch the connect |
1024 | - self.patch(fsevents_daemon.FilesystemMonitor, '_connect_to_daemon', |
1025 | - self.fake_connect_to_daemon) |
1026 | + self.patch( |
1027 | + fsevents_daemon.FilesystemMonitor, '_connect_to_daemon', |
1028 | + self.fake_connect_to_daemon) |
1029 | |
1030 | @defer.inlineCallbacks |
1031 | def test_shutdown_protocol(self): |
1032 | |
1033 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_linux.py' |
1034 | --- ubuntuone/platform/tests/filesystem_notifications/test_linux.py 2016-05-29 19:15:01 +0000 |
1035 | +++ ubuntuone/platform/tests/filesystem_notifications/test_linux.py 2016-07-30 22:04:10 +0000 |
1036 | @@ -39,9 +39,11 @@ |
1037 | from ubuntuone.syncdaemon import volume_manager |
1038 | from ubuntuone.platform.filesystem_notifications import notify_processor |
1039 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
1040 | - linux as filesystem_notifications |
1041 | -) |
1042 | -from ubuntuone.platform.tests.filesystem_notifications import BaseFSMonitorTestCase |
1043 | + linux as filesystem_notifications, |
1044 | +) |
1045 | +from ubuntuone.platform.tests.filesystem_notifications import ( |
1046 | + BaseFSMonitorTestCase, |
1047 | +) |
1048 | |
1049 | |
1050 | class FakeVolume(object): |
1051 | @@ -170,8 +172,8 @@ |
1052 | self.monitor._general_watchs = {'/path1/foo': 1, '/other': 2} |
1053 | self.monitor._ancestors_watchs = {'/foo': 3} |
1054 | self.monitor.inotify_watch_fix('/path1/foo', '/path1/new') |
1055 | - self.assertEqual(self.monitor._general_watchs, |
1056 | - {'/path1/new': 1, '/other': 2}) |
1057 | + self.assertEqual( |
1058 | + self.monitor._general_watchs, {'/path1/new': 1, '/other': 2}) |
1059 | self.assertEqual(self.monitor._ancestors_watchs, {'/foo': 3}) |
1060 | |
1061 | def test_fix_path_ancestors(self): |
1062 | @@ -180,8 +182,8 @@ |
1063 | self.monitor._ancestors_watchs = {'/oth': 1, '/other': 2} |
1064 | self.monitor.inotify_watch_fix('/oth', '/baz') |
1065 | self.assertEqual(self.monitor._general_watchs, {'/bar': 3}) |
1066 | - self.assertEqual(self.monitor._ancestors_watchs, |
1067 | - {'/baz': 1, '/other': 2}) |
1068 | + self.assertEqual( |
1069 | + self.monitor._ancestors_watchs, {'/baz': 1, '/other': 2}) |
1070 | |
1071 | |
1072 | class DynamicHitMe(object): |
1073 | @@ -421,7 +423,6 @@ |
1074 | |
1075 | self.assertNotIn(fake_event, result) |
1076 | |
1077 | - |
1078 | def test_close_write_on_files_is_handled(self): |
1079 | """When anything sends CLOSE_WRITE on files, handle it.""" |
1080 | result = [] |
1081 | |
1082 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_windows.py' |
1083 | --- ubuntuone/platform/tests/filesystem_notifications/test_windows.py 2016-05-29 19:15:01 +0000 |
1084 | +++ ubuntuone/platform/tests/filesystem_notifications/test_windows.py 2016-07-30 22:04:10 +0000 |
1085 | @@ -101,8 +101,7 @@ |
1086 | # group all events in a single lists which is not what the COM API |
1087 | # does. |
1088 | str_events = [ |
1089 | - (common.ACTIONS_NAMES[action], path) for action, path in |
1090 | - events] |
1091 | + (common.ACTIONS_NAMES[action], p) for action, p in events] |
1092 | self.raw_events.append(str_events) |
1093 | return events |
1094 | |
1095 | @@ -163,14 +162,14 @@ |
1096 | def test_started_property(self): |
1097 | """Test that the started property returns the started deferred.""" |
1098 | watch = Watch(1, self.path, None) |
1099 | - self.assertEqual(watch.started, |
1100 | - watch.platform_watch._watch_started_deferred) |
1101 | + self.assertEqual( |
1102 | + watch.started, watch.platform_watch._watch_started_deferred) |
1103 | |
1104 | def test_stopped_property(self): |
1105 | """Test that the stopped property returns the stopped deferred.""" |
1106 | watch = Watch(1, self.path, None) |
1107 | - self.assertEqual(watch.stopped, |
1108 | - watch.platform_watch._watch_stopped_deferred) |
1109 | + self.assertEqual( |
1110 | + watch.stopped, watch.platform_watch._watch_stopped_deferred) |
1111 | |
1112 | @defer.inlineCallbacks |
1113 | def test_start_watching_fails_early_in_thread(self): |
1114 | @@ -249,7 +248,8 @@ |
1115 | self.assertEqual(1, len(self.manager._wdm)) |
1116 | self.assertTrue(self.was_called, 'The watch start was not called.') |
1117 | self.assertEqual(self.path + os.path.sep, self.manager._wdm[0].path) |
1118 | - self.assertEqual(filesystem_notifications.FILESYSTEM_MONITOR_MASK, |
1119 | + self.assertEqual( |
1120 | + filesystem_notifications.FILESYSTEM_MONITOR_MASK, |
1121 | self.manager._wdm[0].platform_watch._mask) |
1122 | self.manager._wdm[0].stopped.callback(None) |
1123 | |
1124 | @@ -265,7 +265,6 @@ |
1125 | |
1126 | first_watch = Watch(1, self.path, None) |
1127 | self.manager._wdm = {1: first_watch} |
1128 | - |
1129 | second_path = self.parent_path + u"second_path" |
1130 | second_watch = Watch(2, second_path, None) |
1131 | self.manager._wdm[2] = second_watch |
1132 | |
1133 | === modified file 'ubuntuone/platform/tests/ipc/test_external_interface.py' |
1134 | --- ubuntuone/platform/tests/ipc/test_external_interface.py 2016-05-29 19:15:01 +0000 |
1135 | +++ ubuntuone/platform/tests/ipc/test_external_interface.py 2016-07-30 22:04:10 +0000 |
1136 | @@ -87,8 +87,8 @@ |
1137 | result = [{}, STR_STR_DICT] |
1138 | yield self.assert_method_called(self.service.status, |
1139 | 'current_uploads', result) |
1140 | - self.assert_remote_method('current_uploads', |
1141 | - in_signature=None, out_signature='aa{ss}') |
1142 | + self.assert_remote_method( |
1143 | + 'current_uploads', in_signature=None, out_signature='aa{ss}') |
1144 | |
1145 | @defer.inlineCallbacks |
1146 | def test_current_downloads(self): |
1147 | @@ -96,8 +96,8 @@ |
1148 | result = [STR_STR_DICT, {}] |
1149 | yield self.assert_method_called(self.service.status, |
1150 | 'current_downloads', result) |
1151 | - self.assert_remote_method('current_downloads', |
1152 | - in_signature=None, out_signature='aa{ss}') |
1153 | + self.assert_remote_method( |
1154 | + 'current_downloads', in_signature=None, out_signature='aa{ss}') |
1155 | |
1156 | @defer.inlineCallbacks |
1157 | def test_free_space(self): |
1158 | @@ -106,8 +106,8 @@ |
1159 | volume_id = '123-456-789' |
1160 | yield self.assert_method_called(self.service.status, |
1161 | 'free_space', result, volume_id) |
1162 | - self.assert_remote_method('free_space', |
1163 | - in_signature='s', out_signature='t') |
1164 | + self.assert_remote_method( |
1165 | + 'free_space', in_signature='s', out_signature='t') |
1166 | |
1167 | @defer.inlineCallbacks |
1168 | def test_waiting(self): |
1169 | @@ -115,8 +115,8 @@ |
1170 | result = [('foo', 'bar', {'command': 'test'})] |
1171 | yield self.assert_method_called(self.service.status, |
1172 | 'waiting', result) |
1173 | - self.assert_remote_method('waiting', |
1174 | - in_signature=None, out_signature='a(ssa{ss})') |
1175 | + self.assert_remote_method( |
1176 | + 'waiting', in_signature=None, out_signature='a(ssa{ss})') |
1177 | |
1178 | @defer.inlineCallbacks |
1179 | def test_waiting_metadata(self): |
1180 | @@ -124,8 +124,8 @@ |
1181 | result = [] |
1182 | yield self.assert_method_called(self.service.status, |
1183 | 'waiting_metadata', result) |
1184 | - self.assert_remote_method('waiting_metadata', |
1185 | - in_signature=None, out_signature='a(sa{ss})') |
1186 | + self.assert_remote_method( |
1187 | + 'waiting_metadata', in_signature=None, out_signature='a(sa{ss})') |
1188 | |
1189 | @defer.inlineCallbacks |
1190 | def test_waiting_content(self): |
1191 | @@ -133,8 +133,8 @@ |
1192 | result = [] |
1193 | yield self.assert_method_called(self.service.status, |
1194 | 'waiting_content', result) |
1195 | - self.assert_remote_method('waiting_metadata', |
1196 | - in_signature=None, out_signature='a(sa{ss})') |
1197 | + self.assert_remote_method( |
1198 | + 'waiting_metadata', in_signature=None, out_signature='a(sa{ss})') |
1199 | |
1200 | @defer.inlineCallbacks |
1201 | def test_sync_menu(self): |
1202 | @@ -143,8 +143,8 @@ |
1203 | method = 'sync_menu' |
1204 | yield self.assert_method_called(self.service.status, |
1205 | method, result) |
1206 | - self.assert_remote_method(method, |
1207 | - in_signature=None, out_signature='a{sv}') |
1208 | + self.assert_remote_method( |
1209 | + method, in_signature=None, out_signature='a{sv}') |
1210 | |
1211 | |
1212 | class EventsTests(EventsTestCase): |
1213 | @@ -162,8 +162,8 @@ |
1214 | yield self.assert_method_called(self.service.events, |
1215 | 'push_event', result, |
1216 | event_name, event_args) |
1217 | - self.assert_remote_method('push_event', |
1218 | - in_signature='sa{ss}', out_signature=None) |
1219 | + self.assert_remote_method( |
1220 | + 'push_event', in_signature='sa{ss}', out_signature=None) |
1221 | |
1222 | |
1223 | class SyncDaemonTests(SyncDaemonTestCase): |
1224 | @@ -233,8 +233,9 @@ |
1225 | 'wait_for_nirvana', result, |
1226 | last_event_interval) |
1227 | async_cb = ('reply_handler', 'error_handler') |
1228 | - self.assert_remote_method('wait_for_nirvana', |
1229 | - in_signature='d', out_signature='b', async_callbacks=async_cb) |
1230 | + self.assert_remote_method( |
1231 | + 'wait_for_nirvana', in_signature='d', out_signature='b', |
1232 | + async_callbacks=async_cb) |
1233 | |
1234 | @defer.inlineCallbacks |
1235 | def test_quit(self): |
1236 | @@ -253,8 +254,8 @@ |
1237 | yield self.assert_method_called(self.service.sync, |
1238 | 'rescan_from_scratch', result, |
1239 | volume_id) |
1240 | - self.assert_remote_method('rescan_from_scratch', |
1241 | - in_signature='s', out_signature='') |
1242 | + self.assert_remote_method( |
1243 | + 'rescan_from_scratch', in_signature='s', out_signature='') |
1244 | |
1245 | |
1246 | class FileSystemTests(FileSystemTestCase): |
1247 | @@ -271,8 +272,8 @@ |
1248 | yield self.assert_method_called(self.service.file_system, |
1249 | 'get_metadata', result, |
1250 | path) |
1251 | - self.assert_remote_method('get_metadata', |
1252 | - in_signature='s', out_signature='a{ss}') |
1253 | + self.assert_remote_method( |
1254 | + 'get_metadata', in_signature='s', out_signature='a{ss}') |
1255 | |
1256 | @defer.inlineCallbacks |
1257 | def test_search_files(self): |
1258 | @@ -280,8 +281,8 @@ |
1259 | result = ['path'] |
1260 | yield self.assert_method_called(self.service.file_system, |
1261 | 'search_files', result, 'file') |
1262 | - self.assert_remote_method('search_files', |
1263 | - in_signature='s', out_signature='as') |
1264 | + self.assert_remote_method( |
1265 | + 'search_files', in_signature='s', out_signature='as') |
1266 | |
1267 | @defer.inlineCallbacks |
1268 | def test_get_metadata_by_node(self): |
1269 | @@ -291,8 +292,8 @@ |
1270 | yield self.assert_method_called(self.service.file_system, |
1271 | 'get_metadata_by_node', result, |
1272 | share_id, node_id) |
1273 | - self.assert_remote_method('get_metadata_by_node', |
1274 | - in_signature='ss', out_signature='a{ss}') |
1275 | + self.assert_remote_method( |
1276 | + 'get_metadata_by_node', in_signature='ss', out_signature='a{ss}') |
1277 | |
1278 | @defer.inlineCallbacks |
1279 | def test_get_metadata_and_quick_tree_synced(self): |
1280 | @@ -302,7 +303,8 @@ |
1281 | yield self.assert_method_called(self.service.file_system, |
1282 | 'get_metadata_and_quick_tree_synced', |
1283 | result, path) |
1284 | - self.assert_remote_method('get_metadata_and_quick_tree_synced', |
1285 | + self.assert_remote_method( |
1286 | + 'get_metadata_and_quick_tree_synced', |
1287 | in_signature='s', out_signature='a{ss}') |
1288 | |
1289 | @defer.inlineCallbacks |
1290 | @@ -311,8 +313,8 @@ |
1291 | result = [{'node_id': 'test'}, {'node_id': 'toast'}] |
1292 | yield self.assert_method_called(self.service.file_system, |
1293 | 'get_dirty_nodes', result) |
1294 | - self.assert_remote_method('get_dirty_nodes', |
1295 | - in_signature='', out_signature='aa{ss}') |
1296 | + self.assert_remote_method( |
1297 | + 'get_dirty_nodes', in_signature='', out_signature='aa{ss}') |
1298 | |
1299 | |
1300 | class SharesTests(SharesTestCase): |
1301 | @@ -375,8 +377,8 @@ |
1302 | share_id = '1234' |
1303 | yield self.assert_method_called(self.service.shares, |
1304 | 'subscribe', result, share_id) |
1305 | - self.assert_remote_method('subscribe', |
1306 | - in_signature='s', out_signature=None) |
1307 | + self.assert_remote_method( |
1308 | + 'subscribe', in_signature='s', out_signature=None) |
1309 | |
1310 | @defer.inlineCallbacks |
1311 | def test_unsubscribe(self): |
1312 | @@ -385,8 +387,8 @@ |
1313 | share_id = '1234' |
1314 | yield self.assert_method_called(self.service.shares, |
1315 | 'unsubscribe', result, share_id) |
1316 | - self.assert_remote_method('unsubscribe', |
1317 | - in_signature='s', out_signature=None) |
1318 | + self.assert_remote_method( |
1319 | + 'unsubscribe', in_signature='s', out_signature=None) |
1320 | |
1321 | @defer.inlineCallbacks |
1322 | def test_create_share(self): |
1323 | @@ -575,8 +577,8 @@ |
1324 | path = 'foo' |
1325 | yield self.assert_method_called(self.service.folders, |
1326 | 'create', result, path) |
1327 | - self.assert_remote_method('create', |
1328 | - in_signature='s', out_signature=None) |
1329 | + self.assert_remote_method( |
1330 | + 'create', in_signature='s', out_signature=None) |
1331 | |
1332 | @defer.inlineCallbacks |
1333 | def test_delete(self): |
1334 | @@ -585,8 +587,8 @@ |
1335 | folder_id = '1234' |
1336 | yield self.assert_method_called(self.service.folders, |
1337 | 'delete', result, folder_id) |
1338 | - self.assert_remote_method('delete', |
1339 | - in_signature='s', out_signature=None) |
1340 | + self.assert_remote_method( |
1341 | + 'delete', in_signature='s', out_signature=None) |
1342 | |
1343 | @defer.inlineCallbacks |
1344 | def test_validate_path(self): |
1345 | @@ -595,8 +597,8 @@ |
1346 | path = 'test' |
1347 | yield self.assert_method_called(self.service.folders, |
1348 | 'validate_path', result, path) |
1349 | - self.assert_remote_method('validate_path', |
1350 | - in_signature='s', out_signature='b') |
1351 | + self.assert_remote_method( |
1352 | + 'validate_path', in_signature='s', out_signature='b') |
1353 | |
1354 | @defer.inlineCallbacks |
1355 | def test_get_folders(self): |
1356 | @@ -604,8 +606,8 @@ |
1357 | result = [{'folder_id': '1'}, {'folder_id': '2'}] |
1358 | yield self.assert_method_called(self.service.folders, |
1359 | 'get_folders', result) |
1360 | - self.assert_remote_method('get_folders', |
1361 | - in_signature=None, out_signature='aa{ss}') |
1362 | + self.assert_remote_method( |
1363 | + 'get_folders', in_signature=None, out_signature='aa{ss}') |
1364 | |
1365 | @defer.inlineCallbacks |
1366 | def test_subscribe(self): |
1367 | @@ -632,8 +634,8 @@ |
1368 | path = 'yadda' |
1369 | yield self.assert_method_called(self.service.folders, |
1370 | 'get_info', result, path) |
1371 | - self.assert_remote_method('get_info', |
1372 | - in_signature='s', out_signature='a{ss}') |
1373 | + self.assert_remote_method( |
1374 | + 'get_info', in_signature='s', out_signature='a{ss}') |
1375 | |
1376 | @defer.inlineCallbacks |
1377 | def test_refresh_volumes(self): |
1378 | @@ -672,5 +674,5 @@ |
1379 | result = None |
1380 | yield self.assert_method_called(self.service.public_files, |
1381 | 'get_public_files', result) |
1382 | - self.assert_remote_method('get_public_files', |
1383 | - in_signature=None, out_signature=None) |
1384 | + self.assert_remote_method( |
1385 | + 'get_public_files', in_signature=None, out_signature=None) |
1386 | |
1387 | === modified file 'ubuntuone/platform/tests/ipc/test_linux.py' |
1388 | --- ubuntuone/platform/tests/ipc/test_linux.py 2016-05-29 16:05:27 +0000 |
1389 | +++ ubuntuone/platform/tests/ipc/test_linux.py 2016-07-30 22:04:10 +0000 |
1390 | @@ -55,8 +55,6 @@ |
1391 | DBUS_IFACE_FOLDERS_NAME, |
1392 | DBUS_IFACE_PUBLIC_FILES_NAME, |
1393 | DBUS_IFACE_LAUNCHER_NAME, |
1394 | -# NM_STATE_CONNECTED_GLOBAL, |
1395 | -# NM_STATE_DISCONNECTED, |
1396 | ) |
1397 | from ubuntuone.platform.tools.linux import DBusClient |
1398 | |
1399 | @@ -71,8 +69,8 @@ |
1400 | """ Creates the instance. """ |
1401 | self.bus = bus |
1402 | self.bus.request_name('org.freedesktop.NetworkManager', |
1403 | - flags=dbus.bus.NAME_FLAG_REPLACE_EXISTING | \ |
1404 | - dbus.bus.NAME_FLAG_DO_NOT_QUEUE | \ |
1405 | + flags=dbus.bus.NAME_FLAG_REPLACE_EXISTING | |
1406 | + dbus.bus.NAME_FLAG_DO_NOT_QUEUE | |
1407 | dbus.bus.NAME_FLAG_ALLOW_REPLACEMENT) |
1408 | self.busName = dbus.service.BusName('org.freedesktop.NetworkManager', |
1409 | bus=self.bus) |
1410 | @@ -120,8 +118,8 @@ |
1411 | path = None |
1412 | iface = None |
1413 | client_name = None # parity with other platform's tests |
1414 | - signal_mapping = [] # a list of tuples (signal_name, signal_signature) to |
1415 | - # be used in test_remote_signals |
1416 | + # a list of tuples (name, signature) to be used in test_remote_signals |
1417 | + signal_mapping = [] |
1418 | |
1419 | @defer.inlineCallbacks |
1420 | def setUp(self): |
1421 | @@ -292,12 +290,12 @@ |
1422 | def launcher_factory(): |
1423 | return service |
1424 | |
1425 | - self.patch(dbus_interface, 'Launcher', launcher_factory) |
1426 | + self.patch(dbus_interface.launcher, 'Launcher', launcher_factory) |
1427 | client = self.get_client() |
1428 | yield client.call_method('unset_urgency') |
1429 | self.assertEqual(service._called, {'set_urgent': [((False,), {})]}) |
1430 | - self.assert_remote_method('unset_urgency', |
1431 | - in_signature=None, out_signature=None) |
1432 | + self.assert_remote_method( |
1433 | + 'unset_urgency', in_signature=None, out_signature=None) |
1434 | |
1435 | |
1436 | class TestDBusRestart(DBusTwistedTestCase): |
1437 | |
1438 | === modified file 'ubuntuone/platform/tests/ipc/test_perspective_broker.py' |
1439 | --- ubuntuone/platform/tests/ipc/test_perspective_broker.py 2016-05-29 14:29:29 +0000 |
1440 | +++ ubuntuone/platform/tests/ipc/test_perspective_broker.py 2016-07-30 22:04:10 +0000 |
1441 | @@ -46,9 +46,9 @@ |
1442 | FakeMainTestCase, |
1443 | ) |
1444 | try: |
1445 | - from ubuntuone.devtools.testcases import skipTest, skipIf, skipIfOS |
1446 | + from ubuntuone.devtools.testcases import skipIf, skipIfOS |
1447 | except ImportError: |
1448 | - from ubuntuone.devtools.testcase import skipTest, skipIf, skipIfOS |
1449 | + from ubuntuone.devtools.testcase import skipIf, skipIfOS |
1450 | from ubuntuone.platform.ipc import perspective_broker as ipc |
1451 | from ubuntuone.platform.ipc.perspective_broker import ( |
1452 | Config, |
1453 | @@ -238,8 +238,8 @@ |
1454 | self.mocker.replay() |
1455 | signals = ["demo_signal1", "demo_signal2"] |
1456 | self.broad_caster.remote_register_to_signals(self.client, signals) |
1457 | - for signal in signals: |
1458 | - clients = self.broad_caster.clients_per_signal[signal] |
1459 | + for sig in signals: |
1460 | + clients = self.broad_caster.clients_per_signal[sig] |
1461 | self.assertTrue(self.client in clients) |
1462 | |
1463 | def test_emit_signal(self): |
1464 | @@ -469,8 +469,11 @@ |
1465 | yield client.register_to_signals() |
1466 | # addCleanup support having deferreds as cleanup calls |
1467 | self.addCleanup(client.unregister_to_signals) |
1468 | - client.call_method = lambda method, *a, **kw: \ |
1469 | - getattr(result, method)(*a, **kw) |
1470 | + |
1471 | + def helper(method, *a, **kw): |
1472 | + return getattr(result, method)(*a, **kw) |
1473 | + |
1474 | + client.call_method = helper |
1475 | result = client |
1476 | else: |
1477 | result = root |
1478 | @@ -709,8 +712,8 @@ |
1479 | return test_token |
1480 | |
1481 | tc = TestClass() |
1482 | - self.assertEquals(tc.test_method(), test_token) |
1483 | - self.assertEquals(tc.remote_test_method(), test_token) |
1484 | + self.assertEqual(tc.test_method(), test_token) |
1485 | + self.assertEqual(tc.remote_test_method(), test_token) |
1486 | |
1487 | def test_signal_handlers_renamed(self): |
1488 | """The signal_handlers are renamed.""" |
1489 | @@ -728,8 +731,8 @@ |
1490 | return test_token |
1491 | |
1492 | tc = TestClass() |
1493 | - self.assertEquals(tc.test_signal_handler(), test_token) |
1494 | - self.assertEquals(tc.remote_test_signal_handler(), test_token) |
1495 | + self.assertEqual(tc.test_signal_handler(), test_token) |
1496 | + self.assertEqual(tc.remote_test_signal_handler(), test_token) |
1497 | |
1498 | |
1499 | class IPCInterfaceTestCase(IPCTestCase): |
1500 | @@ -758,8 +761,8 @@ |
1501 | """Ensure that a reference object is returned.""" |
1502 | client = yield self.get_client() |
1503 | remote = yield client.callRemote('get_sync_daemon') |
1504 | - self.assertNotEqual(remote, None, |
1505 | - 'Remote object should not be None') |
1506 | + self.assertNotEqual( |
1507 | + remote, None, 'Remote object should not be None') |
1508 | self.assertIsInstance(remote, RemoteReference) |
1509 | |
1510 | @defer.inlineCallbacks |
1511 | @@ -883,8 +886,9 @@ |
1512 | self.called.append('_request_remote_objects') |
1513 | defer.returnValue(True) |
1514 | |
1515 | - self.patch(ipc_client.UbuntuOneClient, '_request_remote_objects', |
1516 | - fake_request_remote_objects) |
1517 | + self.patch( |
1518 | + ipc_client.UbuntuOneClient, '_request_remote_objects', |
1519 | + fake_request_remote_objects) |
1520 | |
1521 | @defer.inlineCallbacks |
1522 | def fake_register_to_signals(my_self): |
1523 | @@ -893,8 +897,9 @@ |
1524 | self.called.append('register_to_signals') |
1525 | defer.returnValue(True) |
1526 | |
1527 | - self.patch(ipc_client.UbuntuOneClient, 'register_to_signals', |
1528 | - fake_register_to_signals) |
1529 | + self.patch( |
1530 | + ipc_client.UbuntuOneClient, 'register_to_signals', |
1531 | + fake_register_to_signals) |
1532 | |
1533 | def grouper(self, n, iterable, fillvalue=None): |
1534 | "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" |
1535 | @@ -908,8 +913,9 @@ |
1536 | self.remote_obj_d, self.register_to_signals_d] |
1537 | |
1538 | # the order in which the calls are expected |
1539 | - expected_calls = ('ipc_client_connect','getRootObject', |
1540 | - '_request_remote_objects', 'register_to_signals') |
1541 | + expected_calls = ( |
1542 | + 'ipc_client_connect', 'getRootObject', |
1543 | + '_request_remote_objects', 'register_to_signals') |
1544 | |
1545 | clients = [] |
1546 | while len(clients) < self.num_clients: |
1547 | @@ -936,13 +942,15 @@ |
1548 | step_d.callback(True) |
1549 | |
1550 | yield defer.gatherResults(connected_d) |
1551 | + |
1552 | # reset all the deferreds for the next round of testing |
1553 | - for index, d_name in enumerate(('client_connect_d', |
1554 | - 'client_root_obj_d', 'remote_obj_d', |
1555 | - 'register_to_signals_d')): |
1556 | + methods = enumerate( |
1557 | + ('client_connect_d', 'client_root_obj_d', 'remote_obj_d', |
1558 | + 'register_to_signals_d')) |
1559 | + for i, d_name in methods: |
1560 | new_d = defer.Deferred() |
1561 | setattr(self, d_name, new_d) |
1562 | - deferreds[index] = new_d |
1563 | + deferreds[i] = new_d |
1564 | |
1565 | # assert that all connect calls have been done in the correct |
1566 | # order |
1567 | |
1568 | === modified file 'ubuntuone/platform/tests/ipc/test_unix.py' |
1569 | --- ubuntuone/platform/tests/ipc/test_unix.py 2016-05-29 16:05:27 +0000 |
1570 | +++ ubuntuone/platform/tests/ipc/test_unix.py 2016-07-30 22:04:10 +0000 |
1571 | @@ -43,8 +43,7 @@ |
1572 | self.patch(unix, 'get_domain_socket', lambda: path) |
1573 | |
1574 | factory = unix.DescriptionFactory() |
1575 | - self.assertEqual(factory.server_description_pattern % path, |
1576 | - factory.server) |
1577 | - self.assertEqual(factory.client_description_pattern % path, |
1578 | - factory.client) |
1579 | - |
1580 | + self.assertEqual( |
1581 | + factory.server_description_pattern % path, factory.server) |
1582 | + self.assertEqual( |
1583 | + factory.client_description_pattern % path, factory.client) |
1584 | |
1585 | === modified file 'ubuntuone/platform/tests/ipc/test_windows.py' |
1586 | --- ubuntuone/platform/tests/ipc/test_windows.py 2012-05-22 14:07:55 +0000 |
1587 | +++ ubuntuone/platform/tests/ipc/test_windows.py 2016-07-30 22:04:10 +0000 |
1588 | @@ -38,11 +38,8 @@ |
1589 | |
1590 | def test_get_sd_pb_port(self): |
1591 | """A test for the get_sd_pb_port function.""" |
1592 | - sso_port = 50001 |
1593 | - self.patch(windows, "get_sso_pb_port", lambda: sso_port) |
1594 | result = windows.get_sd_pb_port() |
1595 | - expected = sso_port + windows.SD_SSO_PORT_OFFSET |
1596 | - self.assertEqual(result, expected) |
1597 | + self.assertEqual(result, windows.SD_PORT) |
1598 | |
1599 | |
1600 | class DescriptionFactoryTestCase(TestCase): |
1601 | @@ -54,8 +51,7 @@ |
1602 | self.patch(windows, 'get_sd_pb_port', lambda: port) |
1603 | |
1604 | factory = windows.DescriptionFactory() |
1605 | - self.assertEqual(factory.server_description_pattern % port, |
1606 | - factory.server) |
1607 | - self.assertEqual(factory.client_description_pattern % port, |
1608 | - factory.client) |
1609 | - |
1610 | + self.assertEqual( |
1611 | + factory.server_description_pattern % port, factory.server) |
1612 | + self.assertEqual( |
1613 | + factory.client_description_pattern % port, factory.client) |
1614 | |
1615 | === modified file 'ubuntuone/platform/tests/linux/test_vm.py' |
1616 | --- ubuntuone/platform/tests/linux/test_vm.py 2016-05-29 19:15:01 +0000 |
1617 | +++ ubuntuone/platform/tests/linux/test_vm.py 2016-07-30 22:04:10 +0000 |
1618 | @@ -57,9 +57,9 @@ |
1619 | """Test for get_udf_path.""" |
1620 | suggested_path = u"suggested_path" |
1621 | udf_path = get_udf_path(u"~/" + suggested_path) |
1622 | - self.assertEquals(os.path.join(self.home_dir, |
1623 | - suggested_path.encode('utf-8')), |
1624 | - udf_path) |
1625 | + self.assertEqual( |
1626 | + os.path.join(self.home_dir, suggested_path.encode('utf-8')), |
1627 | + udf_path) |
1628 | |
1629 | |
1630 | class MetadataOldLayoutTests(MetadataTestCase): |
1631 | @@ -108,7 +108,7 @@ |
1632 | path=os.path.join(self.shares_dir, str(idx)), share_id=sid) |
1633 | # ShareFileShelf.keys returns a generator |
1634 | old_keys = [key for key in old_shelf.keys()] |
1635 | - self.assertEquals(10, len(old_keys)) |
1636 | + self.assertEqual(10, len(old_keys)) |
1637 | if self.md_version_None: |
1638 | self.set_md_version('') |
1639 | # set the ro permissions |
1640 | @@ -116,7 +116,7 @@ |
1641 | self.main = FakeMain(self.new_root_dir, self.new_shares_dir, |
1642 | self.data_dir, self.partials_dir) |
1643 | new_keys = [new_key for new_key in self.main.vm.shares.keys()] |
1644 | - self.assertEquals(10, len(new_keys)) |
1645 | + self.assertEqual(10, len(new_keys)) |
1646 | for new_key in new_keys: |
1647 | self.assertIn(new_key, old_keys) |
1648 | # check the old data is still there (in the backup) |
1649 | @@ -140,15 +140,16 @@ |
1650 | '0/6/6/0664f050-9254-45c5-9f31-3482858709e4') |
1651 | os.makedirs(os.path.dirname(share_file)) |
1652 | # this is the str of a version 2 pickle |
1653 | - share_value = "\x80\x02ccanonical.ubuntuone.storage.syncdaemon." + \ |
1654 | - "volume_manager\nShare\nq\x01)\x81q\x02}q\x03(U\x04nameq" + \ |
1655 | - "\x04U\tfakeshareq\x05U\x0eother_usernameq\x06U\x08fakeu" + \ |
1656 | - "serq\x07U\x07subtreeq\x08U$beb0c48c-6755-4fbd-938f-3d20" + \ |
1657 | - "fa7b102bq\tU\x12other_visible_nameq\nU\tfake userq\x0bU" + \ |
1658 | - "\x0caccess_levelq\x0cU\x04Viewq\rU\x04pathq\x0eU=/home/" + \ |
1659 | - "auser/Magicicada/Shared With Me/fakeshare from fakeuser" + \ |
1660 | - "q\x0fU\x08acceptedq\x10\x88U\x02idq\x11U$0664f050-9254-" + \ |
1661 | - "45c5-9f31-3482858709e4q\x12ub." |
1662 | + share_value = ( |
1663 | + "\x80\x02ccanonical.ubuntuone.storage.syncdaemon." |
1664 | + "volume_manager\nShare\nq\x01)\x81q\x02}q\x03(U\x04nameq" |
1665 | + "\x04U\tfakeshareq\x05U\x0eother_usernameq\x06U\x08fakeu" |
1666 | + "serq\x07U\x07subtreeq\x08U$beb0c48c-6755-4fbd-938f-3d20" |
1667 | + "fa7b102bq\tU\x12other_visible_nameq\nU\tfake userq\x0bU" |
1668 | + "\x0caccess_levelq\x0cU\x04Viewq\rU\x04pathq\x0eU=/home/" |
1669 | + "auser/Magicicada/Shared With Me/fakeshare from fakeuser" |
1670 | + "q\x0fU\x08acceptedq\x10\x88U\x02idq\x11U$0664f050-9254-" |
1671 | + "45c5-9f31-3482858709e4q\x12ub.") |
1672 | with open(share_file, 'w') as fd: |
1673 | fd.write(share_value) |
1674 | |
1675 | @@ -164,7 +165,7 @@ |
1676 | self.main = FakeMain(self.new_root_dir, self.new_shares_dir, |
1677 | self.data_dir, self.partials_dir) |
1678 | new_keys = [new_key for new_key in self.main.vm.shares.keys()] |
1679 | - self.assertEquals(2, len(new_keys)) # the fake share plus root |
1680 | + self.assertEqual(2, len(new_keys)) # the fake share plus root |
1681 | for key in [request.ROOT, share.id]: |
1682 | self.assertIn(key, new_keys) |
1683 | self.check_version() |
1684 | @@ -185,12 +186,12 @@ |
1685 | self.main = FakeMain(self.new_root_dir, self.new_shares_dir, |
1686 | self.data_dir, self.partials_dir) |
1687 | self.assertTrue(os.path.exists(self.new_root_dir + '/foo.u1conflict')) |
1688 | - self.assertTrue(os.path.exists(self.new_root_dir + \ |
1689 | - '/foo.u1conflict.23')) |
1690 | - self.assertTrue(os.path.exists(self.new_shares_dir + \ |
1691 | - '/.u1partial.bar')) |
1692 | - self.assertTrue(os.path.exists(self.new_shares_dir + \ |
1693 | - '/baz/baz.u1conflict')) |
1694 | + self.assertTrue( |
1695 | + os.path.exists(self.new_root_dir + '/foo.u1conflict.23')) |
1696 | + self.assertTrue( |
1697 | + os.path.exists(self.new_shares_dir + '/.u1partial.bar')) |
1698 | + self.assertTrue( |
1699 | + os.path.exists(self.new_shares_dir + '/baz/baz.u1conflict')) |
1700 | self.check_version() |
1701 | |
1702 | def test_upgrade_2_more(self): |
1703 | @@ -364,18 +365,18 @@ |
1704 | maybe_old_shelf[request.ROOT] = root_share |
1705 | for idx in range(1, 10): |
1706 | share_id = str(uuid.uuid4()) |
1707 | - maybe_old_shelf[share_id] = \ |
1708 | - _Share(share_id=share_id, |
1709 | - path=os.path.join(self.shares_dir, str(idx))) |
1710 | + maybe_old_shelf[share_id] = _Share( |
1711 | + share_id=share_id, path=os.path.join(self.shares_dir, str(idx)) |
1712 | + ) |
1713 | # ShareFileShelf.keys returns a generator |
1714 | maybe_old_keys = [key for key in maybe_old_shelf.keys()] |
1715 | - self.assertEquals(10, len(maybe_old_keys)) |
1716 | + self.assertEqual(10, len(maybe_old_keys)) |
1717 | if self.md_version_None: |
1718 | self.set_md_version('') |
1719 | self.main = FakeMain(self.new_root_dir, self.new_shares_dir, |
1720 | self.data_dir, self.partials_dir) |
1721 | new_keys = [new_key for new_key in self.main.vm.shares.keys()] |
1722 | - self.assertEquals(10, len(new_keys)) |
1723 | + self.assertEqual(10, len(new_keys)) |
1724 | for new_key in new_keys: |
1725 | self.assertIn(new_key, maybe_old_keys) |
1726 | # as we didn't actually upgrade the shelf, just the .version file |
1727 | @@ -384,7 +385,7 @@ |
1728 | backup_shelf = LegacyShareFileShelf(os.path.join(self.vm_data_dir, |
1729 | '0.bkp')) |
1730 | backup_keys = [key for key in backup_shelf.keys()] |
1731 | - self.assertEquals(0, len(backup_keys)) |
1732 | + self.assertEqual(0, len(backup_keys)) |
1733 | self.check_version() |
1734 | |
1735 | def test_upgrade_3(self): |
1736 | @@ -411,7 +412,7 @@ |
1737 | self.assertFalse(os.path.exists(self.root_dir)) |
1738 | self.assertTrue(os.path.exists(self.shares_dir)) |
1739 | self.assertTrue(os.path.islink(self.shares_dir), self.shares_dir) |
1740 | - self.assertEquals(self.shares_dir, self.main.shares_dir_link) |
1741 | + self.assertEqual(self.shares_dir, self.main.shares_dir_link) |
1742 | self.assertTrue(os.path.exists(os.path.join(self.new_root_dir, |
1743 | 'test_dir'))) |
1744 | self.assertTrue(os.path.exists(os.path.join(self.new_root_dir, |
1745 | @@ -449,7 +450,7 @@ |
1746 | self.assertFalse(os.path.exists(self.root_dir)) |
1747 | self.assertTrue(os.path.exists(self.shares_dir)) |
1748 | self.assertTrue(os.path.islink(self.shares_dir)) |
1749 | - self.assertEquals(self.shares_dir, self.main.shares_dir_link) |
1750 | + self.assertEqual(self.shares_dir, self.main.shares_dir_link) |
1751 | self.assertTrue(os.path.exists(os.path.join(self.new_root_dir, |
1752 | 'test_dir'))) |
1753 | self.assertTrue(os.path.exists(os.path.join(self.new_root_dir, |
1754 | @@ -459,8 +460,8 @@ |
1755 | self.assertTrue(os.path.exists(share_path)) |
1756 | self.assertTrue(os.path.exists(os.path.join(share_path, 'test_dir'))) |
1757 | self.assertTrue(os.path.exists(os.path.join(share_path, 'test_file'))) |
1758 | - self.assertEquals(self.main.shares_dir, |
1759 | - os.readlink(self.main.shares_dir_link)) |
1760 | + self.assertEqual( |
1761 | + self.main.shares_dir, os.readlink(self.main.shares_dir_link)) |
1762 | self.check_version() |
1763 | |
1764 | |
1765 | @@ -475,7 +476,8 @@ |
1766 | self.home_dir = os.path.join(self.tmpdir, 'home', 'ubuntuonehacker') |
1767 | self.share_md_dir = os.path.join(self.vm_data_dir, 'shares') |
1768 | self.shared_md_dir = os.path.join(self.vm_data_dir, 'shared') |
1769 | - self.u1_dir = os.path.join(self.home_dir, os.path.split(self.u1_dir)[1]) |
1770 | + self.u1_dir = os.path.join( |
1771 | + self.home_dir, os.path.split(self.u1_dir)[1]) |
1772 | self.root_dir = self.u1_dir |
1773 | self.shares_dir = os.path.join(self.tmpdir, 'shares') |
1774 | self.shares_dir_link = os.path.join(self.u1_dir, 'Shared With Me') |
1775 | @@ -503,7 +505,7 @@ |
1776 | self.assertTrue(os.path.exists(self.root_dir)) |
1777 | self.assertTrue(os.path.exists(old_shares)) |
1778 | self.assertTrue(os.path.islink(old_shares)) |
1779 | - self.assertEquals(old_shares, self.main.shares_dir_link) |
1780 | + self.assertEqual(old_shares, self.main.shares_dir_link) |
1781 | self.check_version() |
1782 | |
1783 | def test_upgrade_None_to_last_phantom_share_path(self): |
1784 | @@ -535,7 +537,7 @@ |
1785 | self.assertTrue(os.path.exists(self.root_dir)) |
1786 | self.assertTrue(os.path.exists(self.shares_dir)) |
1787 | self.assertTrue(os.path.islink(old_shares)) |
1788 | - self.assertEquals(old_shares, self.main.shares_dir_link) |
1789 | + self.assertEqual(old_shares, self.main.shares_dir_link) |
1790 | self.check_version() |
1791 | |
1792 | def test_upgrade_4(self): |
1793 | @@ -551,8 +553,8 @@ |
1794 | self.set_md_version('') |
1795 | self.main = FakeMain(self.root_dir, self.shares_dir, |
1796 | self.data_dir, self.partials_dir) |
1797 | - self.assertEquals(self.main.shares_dir, |
1798 | - os.readlink(self.main.shares_dir_link)) |
1799 | + self.assertEqual( |
1800 | + self.main.shares_dir, os.readlink(self.main.shares_dir_link)) |
1801 | self.check_version() |
1802 | |
1803 | def test_upgrade_5(self): |
1804 | @@ -608,22 +610,22 @@ |
1805 | |
1806 | def compare_share(share, old_share): |
1807 | """Compare two shares, new and old""" |
1808 | - self.assertEquals(share.volume_id, old_share.id) |
1809 | - self.assertEquals(share.path, old_share.path) |
1810 | - self.assertEquals(share.node_id, old_share.subtree) |
1811 | + self.assertEqual(share.volume_id, old_share.id) |
1812 | + self.assertEqual(share.path, old_share.path) |
1813 | + self.assertEqual(share.node_id, old_share.subtree) |
1814 | if not isinstance(share, Root): |
1815 | - self.assertEquals(share.name, old_share.name) |
1816 | - self.assertEquals(share.other_username, |
1817 | - old_share.other_username) |
1818 | - self.assertEquals(share.other_visible_name, |
1819 | - old_share.other_visible_name) |
1820 | - self.assertEquals(share.access_level, old_share.access_level) |
1821 | + self.assertEqual(share.name, old_share.name) |
1822 | + self.assertEqual( |
1823 | + share.other_username, old_share.other_username) |
1824 | + self.assertEqual( |
1825 | + share.other_visible_name, old_share.other_visible_name) |
1826 | + self.assertEqual(share.access_level, old_share.access_level) |
1827 | |
1828 | for sid in vm.shares: |
1829 | old_share = legacy_shares[sid] |
1830 | share = vm.shares[sid] |
1831 | - self.assertTrue(isinstance(share, Share) or \ |
1832 | - isinstance(share, Root)) |
1833 | + self.assertTrue( |
1834 | + isinstance(share, Share) or isinstance(share, Root)) |
1835 | compare_share(share, old_share) |
1836 | |
1837 | for sid in vm.shared: |
1838 | @@ -677,9 +679,9 @@ |
1839 | for idx, name in enumerate(['dir'] * 5): |
1840 | udf_id = str(uuid.uuid4()) |
1841 | udf_name = name + '_' + str(idx) |
1842 | - udf = _UDF(udf_id, str(uuid.uuid4()), u'~/' + \ |
1843 | - udf_name.decode('utf-8'), |
1844 | - os.path.join(self.home_dir, udf_name)) |
1845 | + udf = _UDF( |
1846 | + udf_id, str(uuid.uuid4()), u'~/' + udf_name.decode('utf-8'), |
1847 | + os.path.join(self.home_dir, udf_name)) |
1848 | if idx % 2: |
1849 | udf.subscribed = True |
1850 | else: |
1851 | @@ -701,21 +703,22 @@ |
1852 | |
1853 | def compare_share(share, old_share): |
1854 | """Compare two shares, new and old""" |
1855 | - self.assertEquals(share.volume_id, old_share.id) |
1856 | - self.assertEquals(share.path, old_share.path) |
1857 | - self.assertEquals(share.node_id, old_share.subtree) |
1858 | + self.assertEqual(share.volume_id, old_share.id) |
1859 | + self.assertEqual(share.path, old_share.path) |
1860 | + self.assertEqual(share.node_id, old_share.subtree) |
1861 | if not isinstance(share, Root): |
1862 | - self.assertEquals(share.name, old_share.name) |
1863 | - self.assertEquals(share.other_username, |
1864 | - old_share.other_username) |
1865 | - self.assertEquals(share.other_visible_name, |
1866 | - old_share.other_visible_name) |
1867 | - self.assertEquals(share.access_level, old_share.access_level) |
1868 | + self.assertEqual(share.name, old_share.name) |
1869 | + self.assertEqual( |
1870 | + share.other_username, old_share.other_username) |
1871 | + self.assertEqual( |
1872 | + share.other_visible_name, old_share.other_visible_name) |
1873 | + self.assertEqual(share.access_level, old_share.access_level) |
1874 | |
1875 | for sid in vm.shares: |
1876 | old_share = legacy_shares[sid] |
1877 | share = vm.shares[sid] |
1878 | - self.assertTrue(isinstance(share, Share) or isinstance(share, Root)) |
1879 | + self.assertTrue( |
1880 | + isinstance(share, Share) or isinstance(share, Root)) |
1881 | compare_share(share, old_share) |
1882 | |
1883 | for sid in vm.shared: |
1884 | @@ -728,13 +731,13 @@ |
1885 | old_udf = legacy_udfs[udf_id] |
1886 | udf = vm.udfs[udf_id] |
1887 | self.assertTrue(isinstance(udf, UDF)) |
1888 | - self.assertEquals(udf.volume_id, old_udf.id) |
1889 | - self.assertEquals(udf.path, old_udf.path) |
1890 | - self.assertEquals(udf.node_id, old_udf.node_id) |
1891 | - self.assertEquals(udf.suggested_path, old_udf.suggested_path) |
1892 | - self.assertEquals(type(udf.suggested_path), |
1893 | - type(old_udf.suggested_path)) |
1894 | - self.assertEquals(udf.subscribed, old_udf.subscribed) |
1895 | + self.assertEqual(udf.volume_id, old_udf.id) |
1896 | + self.assertEqual(udf.path, old_udf.path) |
1897 | + self.assertEqual(udf.node_id, old_udf.node_id) |
1898 | + self.assertEqual(udf.suggested_path, old_udf.suggested_path) |
1899 | + self.assertEqual( |
1900 | + type(udf.suggested_path), type(old_udf.suggested_path)) |
1901 | + self.assertEqual(udf.subscribed, old_udf.subscribed) |
1902 | |
1903 | def test_upgrade_5_partial_upgrade(self): |
1904 | """Test migration from version 5 with upgrade to 6 unfinished.""" |
1905 | @@ -765,12 +768,12 @@ |
1906 | else: |
1907 | # add a 'new' Share dict to the shelf |
1908 | share.access_level = ACCESS_LEVEL_RW |
1909 | - share = Share(path=share.path, |
1910 | - volume_id=share.id, name=share.name, |
1911 | - access_level=share.access_level, |
1912 | - other_username=share.other_username, |
1913 | - other_visible_name=share.other_visible_name, |
1914 | - node_id=share.subtree) |
1915 | + share = Share( |
1916 | + path=share.path, volume_id=share.id, name=share.name, |
1917 | + access_level=share.access_level, |
1918 | + other_username=share.other_username, |
1919 | + other_visible_name=share.other_visible_name, |
1920 | + node_id=share.subtree) |
1921 | legacy_shares[sid] = share.__dict__ |
1922 | |
1923 | # create shared shares |
1924 | @@ -816,28 +819,32 @@ |
1925 | old_id = getattr(old_share, 'id', None) |
1926 | if old_id is None: |
1927 | old_id = old_share['volume_id'] |
1928 | - self.assertEquals(share.volume_id, old_id) |
1929 | - self.assertEquals(share.path, |
1930 | + self.assertEqual(share.volume_id, old_id) |
1931 | + self.assertEqual( |
1932 | + share.path, |
1933 | getattr(old_share, 'path', None) or old_share['path']) |
1934 | - self.assertEquals(share.node_id, |
1935 | + self.assertEqual( |
1936 | + share.node_id, |
1937 | getattr(old_share, 'subtree', None) or old_share['node_id']) |
1938 | if not isinstance(share, Root): |
1939 | - self.assertEquals(share.name, |
1940 | + self.assertEqual( |
1941 | + share.name, |
1942 | getattr(old_share, 'name', None) or old_share['name']) |
1943 | - self.assertEquals(share.other_username, |
1944 | - getattr(old_share, 'other_username', None) \ |
1945 | - or old_share['other_username']) |
1946 | - self.assertEquals(share.other_visible_name, |
1947 | - getattr(old_share, 'other_visible_name', None) \ |
1948 | - or old_share['other_visible_name']) |
1949 | - self.assertEquals(share.access_level, |
1950 | - getattr(old_share, 'access_level', None) \ |
1951 | - or old_share['access_level']) |
1952 | + username = (getattr(old_share, 'other_username', None) or |
1953 | + old_share['other_username']) |
1954 | + self.assertEqual(share.other_username, username) |
1955 | + name = (getattr(old_share, 'other_visible_name', None) or |
1956 | + old_share['other_visible_name']) |
1957 | + self.assertEqual(share.other_visible_name, name) |
1958 | + level = (getattr(old_share, 'access_level', None) or |
1959 | + old_share['access_level']) |
1960 | + self.assertEqual(share.access_level, level) |
1961 | |
1962 | for sid in vm.shares: |
1963 | old_share = legacy_shares[sid] |
1964 | share = vm.shares[sid] |
1965 | - self.assertTrue(isinstance(share, Share) or isinstance(share, Root)) |
1966 | + self.assertTrue( |
1967 | + isinstance(share, Share) or isinstance(share, Root)) |
1968 | compare_share(share, old_share) |
1969 | |
1970 | for sid in vm.shared: |
1971 | @@ -906,13 +913,13 @@ |
1972 | old_upgrade_share_to_volume |
1973 | |
1974 | shares = LegacyShareFileShelf(self.share_md_dir) |
1975 | - self.assertEquals(len(list(shares.keys())), len(legacy_shares.keys())) |
1976 | + self.assertEqual(len(list(shares.keys())), len(legacy_shares.keys())) |
1977 | for sid, share in shares.iteritems(): |
1978 | old_share = legacy_shares[sid] |
1979 | self.assertTrue(isinstance(share, _Share)) |
1980 | self.assertTrue(isinstance(old_share, _Share)) |
1981 | shared = LegacyShareFileShelf(self.shared_md_dir) |
1982 | - self.assertEquals(len(list(shared.keys())), len(legacy_shared.keys())) |
1983 | + self.assertEqual(len(list(shared.keys())), len(legacy_shared.keys())) |
1984 | for sid, share in shared.iteritems(): |
1985 | old_share = legacy_shared[sid] |
1986 | self.assertTrue(isinstance(share, _Share)) |
1987 | @@ -941,11 +948,11 @@ |
1988 | for idx, name in enumerate(['share'] * 10): |
1989 | sid = str(uuid.uuid4()) |
1990 | share_name = name + '_' + str(idx) |
1991 | - share = Share(path=os.path.join(self.shares_dir, share_name), |
1992 | - volume_id=sid, name=share_name, |
1993 | - node_id=str(uuid.uuid4()), |
1994 | - other_username='username' + str(idx), |
1995 | - other_visible_name='visible name ' + str(idx)) |
1996 | + share = Share( |
1997 | + path=os.path.join(self.shares_dir, share_name), |
1998 | + volume_id=sid, name=share_name, node_id=str(uuid.uuid4()), |
1999 | + other_username='username' + str(idx), |
2000 | + other_visible_name='visible name ' + str(idx)) |
2001 | if idx % 2: |
2002 | share.access_level = ACCESS_LEVEL_RW |
2003 | else: |
2004 | @@ -972,9 +979,9 @@ |
2005 | for idx, name in enumerate(['dir'] * 5): |
2006 | udf_id = str(uuid.uuid4()) |
2007 | udf_name = name + '_' + str(idx) |
2008 | - udf = UDF(udf_id, str(uuid.uuid4()), u'~/' + \ |
2009 | - udf_name.decode('utf-8'), |
2010 | - os.path.join(self.home_dir, udf_name)) |
2011 | + udf = UDF( |
2012 | + udf_id, str(uuid.uuid4()), u'~/' + udf_name.decode('utf-8'), |
2013 | + os.path.join(self.home_dir, udf_name)) |
2014 | if idx % 2: |
2015 | udf.subscribed = True |
2016 | else: |
2017 | @@ -997,7 +1004,8 @@ |
2018 | for sid in vm.shares: |
2019 | old_share = legacy_shares[sid] |
2020 | share = vm.shares[sid] |
2021 | - self.assertTrue(isinstance(share, Share) or isinstance(share, Root)) |
2022 | + self.assertTrue( |
2023 | + isinstance(share, Share) or isinstance(share, Root)) |
2024 | self.assertEqual(share.__dict__, old_share.__dict__) |
2025 | |
2026 | for sid in vm.shared: |
2027 | |
2028 | === modified file 'ubuntuone/platform/tests/os_helper/test_darwin.py' |
2029 | --- ubuntuone/platform/tests/os_helper/test_darwin.py 2016-05-29 19:15:01 +0000 |
2030 | +++ ubuntuone/platform/tests/os_helper/test_darwin.py 2016-07-30 22:04:10 +0000 |
2031 | @@ -154,8 +154,8 @@ |
2032 | def test_top_down(self, topdown=True, expected=None): |
2033 | """Walk the tree top-down.""" |
2034 | result = os.walk(self.basedir, topdown) |
2035 | - expected = self._build_dict_from_walk(result, |
2036 | - path_transformer=darwin.get_syncdaemon_valid_path, |
2037 | + expected = self._build_dict_from_walk( |
2038 | + result, path_transformer=darwin.get_syncdaemon_valid_path, |
2039 | name_transformer=darwin.get_syncdaemon_valid_path) |
2040 | super(TestIllegalPathsWalk, self).test_top_down(topdown=topdown, |
2041 | expected=expected) |
2042 | |
2043 | === modified file 'ubuntuone/platform/tests/os_helper/test_os_helper.py' |
2044 | --- ubuntuone/platform/tests/os_helper/test_os_helper.py 2015-09-20 20:52:48 +0000 |
2045 | +++ ubuntuone/platform/tests/os_helper/test_os_helper.py 2016-07-30 22:04:10 +0000 |
2046 | @@ -86,7 +86,9 @@ |
2047 | self.testfile = os.path.join(self.basedir, test_file_name) |
2048 | |
2049 | if valid_file_path_builder is None: |
2050 | - valid_file_path_builder = lambda x: x # skip |
2051 | + |
2052 | + def valid_file_path_builder(x): |
2053 | + return x # skip |
2054 | |
2055 | self.valid_file_path_builder = valid_file_path_builder |
2056 | self.valid_path = self.valid_file_path_builder(self.testfile) |
2057 | @@ -228,8 +230,8 @@ |
2058 | |
2059 | def test_rename_not_there(self): |
2060 | """Rename something that does not exist.""" |
2061 | - exc = self.assertRaises(OSError, rename, |
2062 | - os.path.join(self.basedir, 'no'), 'foo') |
2063 | + exc = self.assertRaises( |
2064 | + OSError, rename, os.path.join(self.basedir, 'no'), 'foo') |
2065 | self.assertEqual(exc.errno, errno.ENOENT) |
2066 | |
2067 | def test_rename_file(self, target=None): |
2068 | @@ -240,9 +242,11 @@ |
2069 | assert path_exists(self.testfile) |
2070 | rename(self.testfile, target) |
2071 | |
2072 | - self.assertFalse(path_exists(self.testfile), |
2073 | + self.assertFalse( |
2074 | + path_exists(self.testfile), |
2075 | 'Path %r should not exist after rename.' % self.testfile) |
2076 | - self.assertTrue(path_exists(target), |
2077 | + self.assertTrue( |
2078 | + path_exists(target), |
2079 | 'Path %r should exist after rename.' % target) |
2080 | |
2081 | def test_rename_dir(self, source=None, target=None): |
2082 | @@ -255,9 +259,11 @@ |
2083 | |
2084 | rename(source, target) |
2085 | |
2086 | - self.assertFalse(path_exists(source), |
2087 | + self.assertFalse( |
2088 | + path_exists(source), |
2089 | 'Path %r should not exist after rename.' % source) |
2090 | - self.assertTrue(path_exists(target), |
2091 | + self.assertTrue( |
2092 | + path_exists(target), |
2093 | 'Path %r should exist after rename.' % target) |
2094 | |
2095 | def test_listdir(self, expected_result=None): |
2096 | |
2097 | === modified file 'ubuntuone/platform/tests/os_helper/test_windows.py' |
2098 | --- ubuntuone/platform/tests/os_helper/test_windows.py 2016-05-29 19:15:01 +0000 |
2099 | +++ ubuntuone/platform/tests/os_helper/test_windows.py 2016-07-30 22:04:10 +0000 |
2100 | @@ -69,7 +69,10 @@ |
2101 | set_no_rights, |
2102 | WINDOWS_ILLEGAL_CHARS_MAP, |
2103 | ) |
2104 | -from ubuntuone.platform.tests.os_helper.test_os_helper import OSWrapperTests, WalkTests |
2105 | +from ubuntuone.platform.tests.os_helper.test_os_helper import ( |
2106 | + OSWrapperTests, |
2107 | + WalkTests, |
2108 | +) |
2109 | |
2110 | |
2111 | # ugly trick to stop pylint for complaining about |
2112 | @@ -309,7 +312,7 @@ |
2113 | exc = self.assertRaises(WindowsError, _set_file_attributes, |
2114 | self.valid_path, groups) |
2115 | self.assertEqual(errno.ENOENT, exc.errno, |
2116 | - 'Errno should be file not found.') |
2117 | + 'Errno should be file not found.') |
2118 | |
2119 | |
2120 | class DecoratorsTestCase(TestCase): |
2121 | @@ -319,8 +322,8 @@ |
2122 | if method_name is None: |
2123 | self.assertRaises(AssertionError, assert_windows_path, path) |
2124 | else: |
2125 | - exc = self.assertRaises(AssertionError, assert_windows_path, path, |
2126 | - method_name) |
2127 | + exc = self.assertRaises( |
2128 | + AssertionError, assert_windows_path, path, method_name) |
2129 | self.assertTrue(method_name in exc.message) |
2130 | |
2131 | def test_assert_windows_path_slash(self): |
2132 | @@ -395,8 +398,8 @@ |
2133 | """Walk the tree top-down.""" |
2134 | valid_base_dir = get_windows_valid_path(self.basedir) |
2135 | result = os.walk(valid_base_dir, topdown) |
2136 | - expected = self._build_dict_from_walk(result, |
2137 | - path_transformer=get_syncdaemon_valid_path, |
2138 | + expected = self._build_dict_from_walk( |
2139 | + result, path_transformer=get_syncdaemon_valid_path, |
2140 | name_transformer=_unicode_to_bytes) |
2141 | super(TestIllegalPathsWalk, self).test_top_down(topdown=topdown, |
2142 | expected=expected) |
2143 | @@ -442,8 +445,8 @@ |
2144 | def test_os_listdir(self): |
2145 | """Test the list dir.""" |
2146 | expected_result = self.dirs + self.files |
2147 | - self.assertEqual(sorted(expected_result), |
2148 | - sorted(os_helper.listdir(self.temp))) |
2149 | + self.assertEqual( |
2150 | + sorted(expected_result), sorted(os_helper.listdir(self.temp))) |
2151 | |
2152 | def test_os_walk(self): |
2153 | """Test the walk.""" |
2154 | |
2155 | === modified file 'ubuntuone/platform/tests/session/test_common.py' |
2156 | --- ubuntuone/platform/tests/session/test_common.py 2012-06-26 15:02:12 +0000 |
2157 | +++ ubuntuone/platform/tests/session/test_common.py 2016-07-30 22:04:10 +0000 |
2158 | @@ -43,4 +43,3 @@ |
2159 | inhibitor = Inhibitor() |
2160 | result = yield inhibitor.inhibit(INHIBIT_LOGOUT_SUSPEND, 'test') |
2161 | self.assertEqual(inhibitor, result) |
2162 | - |
2163 | |
2164 | === modified file 'ubuntuone/platform/tests/session/test_linux.py' |
2165 | --- ubuntuone/platform/tests/session/test_linux.py 2015-09-20 20:52:48 +0000 |
2166 | +++ ubuntuone/platform/tests/session/test_linux.py 2016-07-30 22:04:10 +0000 |
2167 | @@ -114,8 +114,7 @@ |
2168 | dbus.bus.NAME_FLAG_ALLOW_REPLACEMENT) |
2169 | name = self.bus.request_name(bus_name, flags=flags) |
2170 | self.assertNotEqual(name, dbus.bus.REQUEST_NAME_REPLY_EXISTS) |
2171 | - fake = object_class(object_path=object_path, conn=self.bus, |
2172 | - **kwargs) |
2173 | + fake = object_class(object_path=object_path, conn=self.bus, **kwargs) |
2174 | self.addCleanup(fake.remove_from_connection) |
2175 | self.addCleanup(self.bus.release_name, bus_name) |
2176 | |
2177 | @@ -125,9 +124,8 @@ |
2178 | def test_inhibit_call(self): |
2179 | """Test the inhibit call.""" |
2180 | fakeinhibitor = self.register_fakeserver( |
2181 | - session.SESSION_MANAGER_BUSNAME, |
2182 | - session.SESSION_MANAGER_PATH, |
2183 | - FakeGnomeSessionManagerInhibitor) |
2184 | + session.SESSION_MANAGER_BUSNAME, session.SESSION_MANAGER_PATH, |
2185 | + FakeGnomeSessionManagerInhibitor) |
2186 | inhibit_result = yield session.inhibit_logout_suspend("fake reason") |
2187 | self.assertNotEqual(None, inhibit_result) |
2188 | result = fakeinhibitor.IsInhibited(session.INHIBIT_LOGGING_OUT) |
2189 | |
2190 | === modified file 'ubuntuone/platform/tests/sync_menu/test_linux.py' |
2191 | --- ubuntuone/platform/tests/sync_menu/test_linux.py 2015-09-17 02:20:40 +0000 |
2192 | +++ ubuntuone/platform/tests/sync_menu/test_linux.py 2016-07-30 22:04:10 +0000 |
2193 | @@ -214,46 +214,50 @@ |
2194 | self.syncdaemon_service = FakeSyncdaemonService() |
2195 | self.status_frontend = FakeStatusFrontend() |
2196 | self._paused = False |
2197 | - self.sync_menu = sync_menu.UbuntuOneSyncMenu(self.status_frontend, |
2198 | - self.syncdaemon_service) |
2199 | + self.sync_menu = sync_menu.UbuntuOneSyncMenu( |
2200 | + self.status_frontend, self.syncdaemon_service) |
2201 | |
2202 | def test_init(self): |
2203 | """Check that the menu is properly initialized.""" |
2204 | - self.assertIsInstance(FakeSyncMenuApp.data['server'], |
2205 | - linux.Dbusmenu.Server) |
2206 | - self.assertEqual(self.sync_menu.open_u1.get_parent(), |
2207 | - self.sync_menu.root_menu) |
2208 | - self.assertEqual(self.sync_menu.go_to_web.get_parent(), |
2209 | - self.sync_menu.root_menu) |
2210 | - self.assertEqual(self.sync_menu.more_storage.get_parent(), |
2211 | - self.sync_menu.root_menu) |
2212 | - self.assertEqual(self.sync_menu.get_help.get_parent(), |
2213 | - self.sync_menu.root_menu) |
2214 | - self.assertEqual(self.sync_menu.transfers.get_parent(), |
2215 | - self.sync_menu.root_menu) |
2216 | - self.assertEqual(self.sync_menu.open_u1_folder.get_parent(), |
2217 | - self.sync_menu.root_menu) |
2218 | - self.assertEqual(self.sync_menu.share_file.get_parent(), |
2219 | - self.sync_menu.root_menu) |
2220 | - |
2221 | - self.assertEqual(self.sync_menu.open_u1.property_get( |
2222 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.OPEN_U1) |
2223 | - self.assertEqual(self.sync_menu.open_u1_folder.property_get( |
2224 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.OPEN_U1_FOLDER) |
2225 | - self.assertEqual(self.sync_menu.share_file.property_get( |
2226 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.SHARE_A_FILE) |
2227 | - self.assertEqual(self.sync_menu.go_to_web.property_get( |
2228 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.GO_TO_WEB) |
2229 | - self.assertEqual(self.sync_menu.transfers.property_get( |
2230 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.TRANSFERS) |
2231 | - self.assertEqual(self.sync_menu.more_storage.property_get( |
2232 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.MORE_STORAGE) |
2233 | - self.assertEqual(self.sync_menu.get_help.property_get( |
2234 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), linux.GET_HELP) |
2235 | + self.assertIsInstance( |
2236 | + FakeSyncMenuApp.data['server'], linux.Dbusmenu.Server) |
2237 | + self.assertEqual( |
2238 | + self.sync_menu.open_u1.get_parent(), self.sync_menu.root_menu) |
2239 | + self.assertEqual( |
2240 | + self.sync_menu.go_to_web.get_parent(), self.sync_menu.root_menu) |
2241 | + self.assertEqual( |
2242 | + self.sync_menu.more_storage.get_parent(), self.sync_menu.root_menu) |
2243 | + self.assertEqual( |
2244 | + self.sync_menu.get_help.get_parent(), self.sync_menu.root_menu) |
2245 | + self.assertEqual( |
2246 | + self.sync_menu.transfers.get_parent(), self.sync_menu.root_menu) |
2247 | + self.assertEqual( |
2248 | + self.sync_menu.open_u1_folder.get_parent(), |
2249 | + self.sync_menu.root_menu) |
2250 | + self.assertEqual( |
2251 | + self.sync_menu.share_file.get_parent(), self.sync_menu.root_menu) |
2252 | + |
2253 | + def get_prop(item): |
2254 | + return item.property_get(linux.Dbusmenu.MENUITEM_PROP_LABEL) |
2255 | + |
2256 | + self.assertEqual( |
2257 | + get_prop(self.sync_menu.open_u1), linux.OPEN_U1) |
2258 | + self.assertEqual( |
2259 | + get_prop(self.sync_menu.open_u1_folder), linux.OPEN_U1_FOLDER) |
2260 | + self.assertEqual( |
2261 | + get_prop(self.sync_menu.share_file), linux.SHARE_A_FILE) |
2262 | + self.assertEqual( |
2263 | + get_prop(self.sync_menu.go_to_web), linux.GO_TO_WEB) |
2264 | + self.assertEqual( |
2265 | + get_prop(self.sync_menu.transfers), linux.TRANSFERS) |
2266 | + self.assertEqual( |
2267 | + get_prop(self.sync_menu.more_storage), linux.MORE_STORAGE) |
2268 | + self.assertEqual( |
2269 | + get_prop(self.sync_menu.get_help), linux.GET_HELP) |
2270 | |
2271 | self.sync_menu.transfers.update_progress() |
2272 | - self.assertIsInstance(self.sync_menu.transfers.separator, |
2273 | - linux.Dbusmenu.Menuitem) |
2274 | + self.assertIsInstance( |
2275 | + self.sync_menu.transfers.separator, linux.Dbusmenu.Menuitem) |
2276 | |
2277 | def test_get_launch_context_with_display(self): |
2278 | """Check that the proper context is returned.""" |
2279 | @@ -285,7 +289,8 @@ |
2280 | arg = "--test-arg" |
2281 | self.sync_menu._open_control_panel_by_command_line(timestamp, arg) |
2282 | |
2283 | - self.assertEqual(appinfo.command_line, "%s %s" % (linux.CLIENT_COMMAND_LINE, arg)) |
2284 | + self.assertEqual( |
2285 | + appinfo.command_line, "%s %s" % (linux.CLIENT_COMMAND_LINE, arg)) |
2286 | self.assertEqual(appinfo.context.timestamp, timestamp) |
2287 | |
2288 | def test_open_uri(self): |
2289 | @@ -315,7 +320,9 @@ |
2290 | timestamp = time.time() |
2291 | data = [] |
2292 | |
2293 | - self.patch(self.sync_menu, "_open_control_panel_by_command_line", lambda t, a: data.append((t, a))) |
2294 | + self.patch( |
2295 | + self.sync_menu, "_open_control_panel_by_command_line", |
2296 | + lambda t, a: data.append((t, a))) |
2297 | self.sync_menu.open_share_file_tab(timestamp=timestamp) |
2298 | self.assertEqual(data, [(timestamp, "--switch-to share_links")]) |
2299 | |
2300 | @@ -324,7 +331,8 @@ |
2301 | timestamp = time.time() |
2302 | data = [] |
2303 | |
2304 | - self.patch(self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2305 | + self.patch( |
2306 | + self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2307 | self.sync_menu.open_go_to_web(timestamp=timestamp) |
2308 | self.assertEqual(data, [(timestamp, linux.DASHBOARD)]) |
2309 | |
2310 | @@ -333,16 +341,20 @@ |
2311 | timestamp = time.time() |
2312 | data = [] |
2313 | |
2314 | - self.patch(self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2315 | + self.patch( |
2316 | + self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2317 | self.sync_menu.open_ubuntu_one_folder(timestamp=timestamp) |
2318 | - self.assertEqual(data, [(timestamp, "file://" + self.syncdaemon_service.fake_root_path)]) |
2319 | + self.assertEqual( |
2320 | + data, |
2321 | + [(timestamp, "file://" + self.syncdaemon_service.fake_root_path)]) |
2322 | |
2323 | def test_get_help(self): |
2324 | """Check that the proper action is executed.""" |
2325 | timestamp = time.time() |
2326 | data = [] |
2327 | |
2328 | - self.patch(self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2329 | + self.patch( |
2330 | + self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2331 | self.sync_menu.open_web_help(timestamp=timestamp) |
2332 | self.assertEqual(data, [(timestamp, linux.HELP_LINK)]) |
2333 | |
2334 | @@ -351,7 +363,8 @@ |
2335 | timestamp = time.time() |
2336 | data = [] |
2337 | |
2338 | - self.patch(self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2339 | + self.patch( |
2340 | + self.sync_menu, "_open_uri", lambda u, t: data.append((t, u))) |
2341 | self.sync_menu.open_get_more_storage(timestamp=timestamp) |
2342 | self.assertEqual(data, [(timestamp, linux.GET_STORAGE_LINK)]) |
2343 | |
2344 | @@ -368,8 +381,8 @@ |
2345 | self.assertEqual(len(children), 4) |
2346 | data.reverse() |
2347 | for itemM, itemD in zip(children, data): |
2348 | - self.assertEqual(itemM.property_get( |
2349 | - linux.Dbusmenu.MENUITEM_PROP_LABEL), itemD) |
2350 | + self.assertEqual( |
2351 | + itemM.property_get(linux.Dbusmenu.MENUITEM_PROP_LABEL), itemD) |
2352 | |
2353 | def test_only_progress(self): |
2354 | """Check that only progress items are loaded.""" |
2355 | @@ -583,4 +596,4 @@ |
2356 | self.sync_menu.sync_status_changed(False) |
2357 | self.assertFalse(self.sync_menu._connected) |
2358 | self.assertTrue(self.sync_menu._ignore_status_event) |
2359 | - self.assertTrue(self.sync_menu.app.data['paused']) |
2360 | \ No newline at end of file |
2361 | + self.assertTrue(self.sync_menu.app.data['paused']) |
2362 | |
2363 | === modified file 'ubuntuone/platform/tests/test_tools.py' |
2364 | --- ubuntuone/platform/tests/test_tools.py 2016-05-29 19:15:01 +0000 |
2365 | +++ ubuntuone/platform/tests/test_tools.py 2016-07-30 22:04:10 +0000 |
2366 | @@ -204,7 +204,8 @@ |
2367 | self.fs.fs = {mdid: mdobj, mdid2: mdobj2, mdid3: mdobj3} |
2368 | |
2369 | result = yield self.tool.search_files('file') |
2370 | - expected = [os.path.join(self.root_dir, 'path/to/file_test'), |
2371 | + expected = [ |
2372 | + os.path.join(self.root_dir, 'path/to/file_test'), |
2373 | os.path.join(self.root_dir, 'path/to/my_files')] |
2374 | self.assertEqual(result, expected) |
2375 | |
2376 | @@ -265,8 +266,6 @@ |
2377 | |
2378 | signal_ok = 'Foo' |
2379 | signal_error = 'Bar' |
2380 | - success_filter = lambda *a: True |
2381 | - error_filter = lambda *a: True |
2382 | |
2383 | target_signal = signal_ok |
2384 | target_filter = 'success_filter' |
2385 | @@ -274,10 +273,15 @@ |
2386 | @defer.inlineCallbacks |
2387 | def setUp(self): |
2388 | yield super(TestWaitForSignals, self).setUp() |
2389 | + self.success_filter = self.return_true |
2390 | + self.error_filter = self.return_true |
2391 | self.signals = defaultdict(list) |
2392 | self.patch(self.tool, 'connect_signal', self.connect_signal) |
2393 | self.patch(self.tool, 'disconnect_signal', self.disconnect_signal) |
2394 | |
2395 | + def return_true(self, *a): |
2396 | + return True |
2397 | + |
2398 | def connect_signal(self, signal_name, handler): |
2399 | """Fake signal connection.""" |
2400 | self.signals[signal_name].append(handler) |
2401 | @@ -295,8 +299,9 @@ |
2402 | @defer.inlineCallbacks |
2403 | def test_filter_yes(self): |
2404 | """Test emitting signal with filter returning True.""" |
2405 | - d = self.tool.wait_for_signals(self.signal_ok, self.signal_error, |
2406 | - **{self.target_filter: lambda *a: True}) |
2407 | + d = self.tool.wait_for_signals( |
2408 | + self.signal_ok, self.signal_error, |
2409 | + **{self.target_filter: lambda *a: True}) |
2410 | expected = object() |
2411 | self.emit_signal(self.target_signal, expected) |
2412 | |
2413 | @@ -309,8 +314,9 @@ |
2414 | |
2415 | def test_filter_no(self): |
2416 | """Test emitting signal with filter returning False.""" |
2417 | - d = self.tool.wait_for_signals(self.signal_ok, self.signal_error, |
2418 | - **{self.target_filter: lambda *a: False}) |
2419 | + d = self.tool.wait_for_signals( |
2420 | + self.signal_ok, self.signal_error, |
2421 | + **{self.target_filter: lambda *a: False}) |
2422 | expected = object() |
2423 | self.emit_signal(self.target_signal, expected) |
2424 | |
2425 | @@ -328,8 +334,9 @@ |
2426 | """Broken filter""" |
2427 | raise ValueError('DIE!!!!') |
2428 | |
2429 | - d = self.tool.wait_for_signals(self.signal_ok, self.signal_error, |
2430 | - **{self.target_filter: some_filter}) |
2431 | + d = self.tool.wait_for_signals( |
2432 | + self.signal_ok, self.signal_error, |
2433 | + **{self.target_filter: some_filter}) |
2434 | args = (object(), 123456789) |
2435 | self.emit_signal(self.target_signal, *args) |
2436 | |
2437 | @@ -358,8 +365,9 @@ |
2438 | @defer.inlineCallbacks |
2439 | def test_filter_yes(self): |
2440 | """Test emitting signal with filter returning True.""" |
2441 | - d = self.tool.wait_for_signals(self.signal_ok, self.signal_error, |
2442 | - **{self.target_filter: lambda *a: True}) |
2443 | + d = self.tool.wait_for_signals( |
2444 | + self.signal_ok, self.signal_error, |
2445 | + **{self.target_filter: lambda *a: True}) |
2446 | expected = object() |
2447 | self.emit_signal(self.target_signal, expected) |
2448 | |
2449 | @@ -418,21 +426,23 @@ |
2450 | def test_connect(self): |
2451 | """Test the connect method.""" |
2452 | self.assertEqual(self.main.state_manager.state, |
2453 | - states.StateManager.QUEUE_MANAGER) |
2454 | + states.StateManager.QUEUE_MANAGER) |
2455 | d = self.main.wait_for('SYS_USER_DISCONNECT') |
2456 | self.tool.disconnect() |
2457 | + |
2458 | def connect(r): |
2459 | d = self.main.wait_for('SYS_USER_CONNECT') |
2460 | self.tool.connect() |
2461 | d.addCallbacks(lambda x: x, self.fail) |
2462 | return d |
2463 | + |
2464 | d.addCallbacks(connect, self.fail) |
2465 | return d |
2466 | |
2467 | def test_disconnect(self): |
2468 | """Test the disconnect method.""" |
2469 | self.assertEqual(self.main.state_manager.state, |
2470 | - states.StateManager.QUEUE_MANAGER) |
2471 | + states.StateManager.QUEUE_MANAGER) |
2472 | d = self.main.wait_for('SYS_USER_DISCONNECT') |
2473 | self.tool.disconnect() |
2474 | d.addCallbacks(self.assertFalse, self.fail) |
2475 | |
2476 | === modified file 'ubuntuone/platform/tests/test_u1sdtool.py' |
2477 | --- ubuntuone/platform/tests/test_u1sdtool.py 2016-05-29 19:15:01 +0000 |
2478 | +++ ubuntuone/platform/tests/test_u1sdtool.py 2016-07-30 22:04:10 +0000 |
2479 | @@ -88,9 +88,9 @@ |
2480 | other_visible_name=u"ñoño", accepted=False, |
2481 | subscribed=False) |
2482 | yield self.main.vm.add_share(share) |
2483 | - expected = u"Shares list:\n id=share_id name=\xf1o\xf1o " + \ |
2484 | - "accepted=False subscribed=False access_level=View " + \ |
2485 | - "from=fake_user\n" |
2486 | + expected = ( |
2487 | + u"Shares list:\n id=share_id name=\xf1o\xf1o accepted=False " |
2488 | + u"subscribed=False access_level=View from=fake_user\n") |
2489 | result = yield self.tool.get_shares() |
2490 | show_shares(result, out) |
2491 | self.assertEqual(out.getvalue(), expected) |
2492 | @@ -115,15 +115,15 @@ |
2493 | # helper function, pylint: disable-msg=C0111 |
2494 | |
2495 | def fake_create_share(node_id, user, name, access_level, marker, path): |
2496 | - self.main.vm.handle_AQ_CREATE_SHARE_OK(share_id='share_id', |
2497 | - marker=marker) |
2498 | + self.main.vm.handle_AQ_CREATE_SHARE_OK( |
2499 | + share_id='share_id', marker=marker) |
2500 | self.main.action_q.create_share = fake_create_share |
2501 | self.main.vm.create_share(path, 'fake_user', 'shared_name', |
2502 | ACCESS_LEVEL_RO) |
2503 | out = StringIO() |
2504 | - expected = u"Shared list:\n id=share_id name=shared_name " + \ |
2505 | - "accepted=False access_level=View to=fake_user " + \ |
2506 | - "path=%s\n" % path.decode('utf-8') |
2507 | + expected = ( |
2508 | + u"Shared list:\n id=share_id name=shared_name accepted=False " |
2509 | + u"access_level=View to=fake_user path=%s\n" % path.decode('utf-8')) |
2510 | d = self.tool.list_shared() |
2511 | d.addCallback(lambda result: show_shared(result, out)) |
2512 | |
2513 | @@ -148,8 +148,7 @@ |
2514 | self.fs.set_node_id(path, "uuid1") |
2515 | mdobj = self.fs.get_by_mdid(mdid) |
2516 | self.fs.create_partial(mdobj.node_id, mdobj.share_id) |
2517 | - fh = self.fs.get_partial_for_writing(mdobj.node_id, |
2518 | - mdobj.share_id) |
2519 | + fh = self.fs.get_partial_for_writing(mdobj.node_id, mdobj.share_id) |
2520 | fh.write("foobar") |
2521 | fh.close() |
2522 | self.fs.commit_partial(mdobj.node_id, mdobj.share_id, "localhash") |
2523 | @@ -235,10 +234,10 @@ |
2524 | self.action_q.queue.waiting.append(fake_upload) |
2525 | |
2526 | out = StringIO() |
2527 | - expected = u"Current uploads:\n path: up_path\n " + \ |
2528 | - "deflated size: 100\n bytes written: 10\nCurrent " + \ |
2529 | - "downloads:\n path: down_path\n deflated size: " + \ |
2530 | - "10\n bytes read: 1\n" |
2531 | + expected = ( |
2532 | + u"Current uploads:\n path: up_path\n deflated size: 100\n " |
2533 | + u"bytes written: 10\nCurrent downloads:\n path: down_path\n " |
2534 | + u"deflated size: 10\n bytes read: 1\n") |
2535 | result = yield self.tool.get_current_uploads() |
2536 | show_uploads(result, out) |
2537 | |
2538 | @@ -296,10 +295,10 @@ |
2539 | |
2540 | out = StringIO() |
2541 | expected = ( |
2542 | - " FakeCommand(running=True, share_id='', " |
2543 | - "node_id='node1', path='foo', other='')\n" |
2544 | - " FakeCommand(running=False, share_id='', " |
2545 | - "node_id='node2', other='')\n" |
2546 | + " FakeCommand(running=True, share_id='', node_id='node1', " |
2547 | + "path='foo', other='')\n" |
2548 | + " FakeCommand(running=False, share_id='', node_id='node2', " |
2549 | + "other='')\n" |
2550 | ) |
2551 | |
2552 | result = yield self.tool.waiting() |
2553 | @@ -318,9 +317,9 @@ |
2554 | expected = ( |
2555 | "Warning: this option is deprecated! Use '--waiting' instead\n" |
2556 | " FakeCommand(running=True, share_id='', node_id='node1', " |
2557 | - "path='p', other='')\n" |
2558 | + "path='p', other='')\n" |
2559 | " FakeCommand(running=True, share_id='', node_id='node2', " |
2560 | - "other='')\n" |
2561 | + "other='')\n" |
2562 | ) |
2563 | |
2564 | result = yield self.tool.waiting_metadata() |
2565 | @@ -428,8 +427,9 @@ |
2566 | # sort the list |
2567 | dirty_nodes.sort(key=itemgetter('mdid')) |
2568 | show_dirty_nodes(dirty_nodes, out) |
2569 | - node_line_tpl = "mdid: %(mdid)s volume_id: %(share_id)s " + \ |
2570 | - "node_id: %(node_id)s is_dir: %(is_dir)s path: %(path)s\n" |
2571 | + node_line_tpl = ( |
2572 | + "mdid: %(mdid)s volume_id: %(share_id)s node_id: %(node_id)s " |
2573 | + "is_dir: %(is_dir)s path: %(path)s\n") |
2574 | if not empty: |
2575 | expected = " Dirty nodes:\n%s" |
2576 | lines = [] |
2577 | |
2578 | === modified file 'ubuntuone/platform/tests/tools/test_tools.py' |
2579 | --- ubuntuone/platform/tests/tools/test_tools.py 2012-10-09 15:34:55 +0000 |
2580 | +++ ubuntuone/platform/tests/tools/test_tools.py 2016-07-30 22:04:10 +0000 |
2581 | @@ -50,16 +50,17 @@ |
2582 | @defer.inlineCallbacks |
2583 | def setUp(self): |
2584 | yield super(TestSyncDaemonTool, self).setUp() |
2585 | - self.patch(perspective_broker.UbuntuOneClient, "connect", |
2586 | - lambda _: defer.Deferred()) |
2587 | + self.patch( |
2588 | + perspective_broker.UbuntuOneClient, "connect", |
2589 | + lambda _: defer.Deferred()) |
2590 | self.sdtool = perspective_broker.SyncDaemonToolProxy() |
2591 | self.calls = {} |
2592 | |
2593 | def test_call_after_connection(self): |
2594 | """Test the _call_after_connection method.""" |
2595 | collected_calls = [] |
2596 | - test_method = lambda *args: collected_calls.append(args) |
2597 | - test_method = self.sdtool._call_after_connection(test_method) |
2598 | + test_method = self.sdtool._call_after_connection( |
2599 | + lambda *args: collected_calls.append(args)) |
2600 | test_method(123) |
2601 | self.assertEqual(len(collected_calls), 0) |
2602 | self.sdtool.connected.callback("got connected!") |
2603 | @@ -68,12 +69,12 @@ |
2604 | def test_call_after_connection_connect(self): |
2605 | """Test execute connect in _call_after_connection method.""" |
2606 | self.patch(self.sdtool.client, "is_connected", lambda: False) |
2607 | - my_connect = lambda *args, **kwargs: operator.setitem( |
2608 | - self.calls, "connect", (args, kwargs)) |
2609 | - self.patch(self.sdtool.client, "connect", my_connect) |
2610 | + self.patch( |
2611 | + self.sdtool.client, "connect", |
2612 | + lambda *a, **kw: operator.setitem(self.calls, "connect", (a, kw))) |
2613 | collected_calls = [] |
2614 | - test_method = lambda *args: collected_calls.append(args) |
2615 | - test_method = self.sdtool._call_after_connection(test_method) |
2616 | + test_method = self.sdtool._call_after_connection( |
2617 | + lambda *args: collected_calls.append(args)) |
2618 | test_method(123) |
2619 | self.assertIn("connect", self.calls) |
2620 | self.assertEqual(self.calls["connect"], ((), {})) |
2621 | @@ -81,12 +82,12 @@ |
2622 | def test_call_after_connection_not_connect(self): |
2623 | """Test execute connect in _call_after_connection method.""" |
2624 | self.patch(self.sdtool.client, "is_connected", lambda: True) |
2625 | - my_connect = lambda *args, **kwargs: operator.setitem( |
2626 | - self.calls, "connect", (args, kwargs)) |
2627 | - self.patch(self.sdtool.client, "connect", my_connect) |
2628 | + self.patch( |
2629 | + self.sdtool.client, "connect", |
2630 | + lambda *a, **kw: operator.setitem(self.calls, "connect", (a, kw))) |
2631 | collected_calls = [] |
2632 | - test_method = lambda *args: collected_calls.append(args) |
2633 | - test_method = self.sdtool._call_after_connection(test_method) |
2634 | + test_method = self.sdtool._call_after_connection( |
2635 | + lambda *args: collected_calls.append(args)) |
2636 | test_method(123) |
2637 | self.assertNotIn("connect", self.calls) |
2638 | |
2639 | @@ -162,17 +163,19 @@ |
2640 | """Fake connect_signal call.""" |
2641 | self.connected_signals.append(('connect_signal', args, kwargs)) |
2642 | |
2643 | - self.patch(perspective_broker.SyncDaemonToolProxy, 'connect_signal', |
2644 | - connect_signal) |
2645 | + self.patch( |
2646 | + perspective_broker.SyncDaemonToolProxy, 'connect_signal', |
2647 | + connect_signal) |
2648 | |
2649 | def fake_reconnect(_): |
2650 | """Fake the reconnection of the client.""" |
2651 | self.reconnected = True |
2652 | |
2653 | - self.patch(perspective_broker.UbuntuOneClient, 'reconnect', |
2654 | - fake_reconnect) |
2655 | - self.patch(perspective_broker.UbuntuOneClient, 'connect', |
2656 | - lambda _: defer.succeed(True)) |
2657 | + self.patch( |
2658 | + perspective_broker.UbuntuOneClient, 'reconnect', fake_reconnect) |
2659 | + self.patch( |
2660 | + perspective_broker.UbuntuOneClient, 'connect', |
2661 | + lambda _: defer.succeed(True)) |
2662 | |
2663 | @defer.inlineCallbacks |
2664 | def test_reconnect_no_signals(self): |
2665 | @@ -252,7 +255,10 @@ |
2666 | data = [] |
2667 | |
2668 | signal_name = "PublicFilesList" |
2669 | - func = lambda *a: data.append(a) |
2670 | + |
2671 | + def func(*a): |
2672 | + return data.append(a) |
2673 | + |
2674 | self.sdtool.connect_signal(signal_name, func) |
2675 | self.sdtool.connect_signal(signal_name, func) |
2676 | |
2677 | @@ -280,4 +286,4 @@ |
2678 | data = [] |
2679 | self.sdtool.client.shares.on_share_changed_cb() |
2680 | self.assertEqual(data, []) |
2681 | - self.assertEqual(data2, expected) |
2682 | \ No newline at end of file |
2683 | + self.assertEqual(data2, expected) |
2684 | |
2685 | === modified file 'ubuntuone/platform/tests/windows/run_sdtool.py' |
2686 | --- ubuntuone/platform/tests/windows/run_sdtool.py 2012-04-09 20:07:05 +0000 |
2687 | +++ ubuntuone/platform/tests/windows/run_sdtool.py 2016-07-30 22:04:10 +0000 |
2688 | @@ -57,13 +57,14 @@ |
2689 | print '\tRoot dir: %s' % root_dir |
2690 | is_udf_autosubscribe_enabled = yield sdtool.is_udf_autosubscribe_enabled() |
2691 | print '\tAutosubscribe enabled: %s' % is_udf_autosubscribe_enabled |
2692 | - is_share_autosubscribe_enabled = yield sdtool.is_share_autosubscribe_enabled() |
2693 | + is_share_autosubscribe_enabled = ( |
2694 | + yield sdtool.is_share_autosubscribe_enabled()) |
2695 | print '\tAutosubscribe enabled: %s' % is_share_autosubscribe_enabled |
2696 | - is_show_all_notifications_enabled =\ |
2697 | - yield sdtool.is_show_all_notifications_enabled() |
2698 | + is_show_all_notifications_enabled = ( |
2699 | + yield sdtool.is_show_all_notifications_enabled()) |
2700 | print '\tShow all notifications: %s' % is_show_all_notifications_enabled |
2701 | reactor.stop() |
2702 | - |
2703 | + |
2704 | |
2705 | if __name__ == '__main__': |
2706 | reactor.callLater(0, print_test) |
2707 | |
2708 | === modified file 'ubuntuone/platform/tools/perspective_broker.py' |
2709 | --- ubuntuone/platform/tools/perspective_broker.py 2015-09-19 23:15:50 +0000 |
2710 | +++ ubuntuone/platform/tools/perspective_broker.py 2016-07-30 22:04:10 +0000 |
2711 | @@ -107,8 +107,8 @@ |
2712 | |
2713 | def _should_wrap(self, attr_name): |
2714 | """Check if this attribute should be wrapped.""" |
2715 | - return not (attr_name in SyncDaemonToolProxy._DONT_VERIFY_CONNECTED |
2716 | - or attr_name.startswith("_")) |
2717 | + return not (attr_name in SyncDaemonToolProxy._DONT_VERIFY_CONNECTED or |
2718 | + attr_name.startswith("_")) |
2719 | |
2720 | def __getattribute__(self, attr_name): |
2721 | """If the attribute is not special, verify the ipc connection.""" |
2722 | @@ -191,8 +191,9 @@ |
2723 | client_kind, callback = self._SIGNAL_MAPPING[signal_name] |
2724 | client = getattr(self.client, client_kind) |
2725 | if len(self.connected_signals[signal_name]) == 0: |
2726 | - f = lambda *args, **kw: self._handler(signal_name, *args, **kw) |
2727 | - setattr(client, callback, f) |
2728 | + setattr( |
2729 | + client, callback, |
2730 | + lambda *args, **kw: self._handler(signal_name, *args, **kw)) |
2731 | # do remember the connected signal in case we need to reconnect |
2732 | self.connected_signals[signal_name].add(handler) |
2733 | return handler |
2734 | |
2735 | === modified file 'ubuntuone/proxy/tests/__init__.py' |
2736 | --- ubuntuone/proxy/tests/__init__.py 2016-05-27 23:49:19 +0000 |
2737 | +++ ubuntuone/proxy/tests/__init__.py 2016-07-30 22:04:10 +0000 |
2738 | @@ -71,7 +71,6 @@ |
2739 | self.site = SaveSite(self.root) |
2740 | application = service.Application('web') |
2741 | self.service_collection = service.IServiceCollection(application) |
2742 | - #pylint: disable=E1101 |
2743 | self.tcpserver = internet.TCPServer(0, self.site) |
2744 | self.tcpserver.setServiceParent(self.service_collection) |
2745 | self.sslserver = internet.SSLServer(0, self.site, self.get_context()) |
2746 | @@ -95,19 +94,16 @@ |
2747 | |
2748 | def get_iri(self): |
2749 | """Build the iri for this mock server.""" |
2750 | - #pylint: disable=W0212 |
2751 | port_num = self.tcpserver._port.getHost().port |
2752 | return u"http://0.0.0.0:%d/" % port_num |
2753 | |
2754 | def get_ssl_iri(self): |
2755 | """Build the iri for the ssl mock server.""" |
2756 | - #pylint: disable=W0212 |
2757 | port_num = self.sslserver._port.getHost().port |
2758 | return u"https://0.0.0.0:%d/" % port_num |
2759 | |
2760 | def stop(self): |
2761 | """Shut it down.""" |
2762 | - #pylint: disable=E1101 |
2763 | if self.site.protocol.protocolInstance: |
2764 | self.site.protocol.protocolInstance.timeoutConnection() |
2765 | return self.service_collection.stopService() |
2766 | |
2767 | === modified file 'ubuntuone/proxy/tests/test_tunnel_client.py' |
2768 | --- ubuntuone/proxy/tests/test_tunnel_client.py 2016-05-29 19:15:01 +0000 |
2769 | +++ ubuntuone/proxy/tests/test_tunnel_client.py 2016-07-30 22:04:10 +0000 |
2770 | @@ -90,8 +90,8 @@ |
2771 | self.other_proto = SavingProtocol() |
2772 | other_factory = protocol.ClientFactory() |
2773 | other_factory.buildProtocol = lambda _addr: self.other_proto |
2774 | - tunnel_client_factory = tunnel_client.TunnelClientFactory(self.host, |
2775 | - self.port, other_factory, self.cookie) |
2776 | + tunnel_client_factory = tunnel_client.TunnelClientFactory( |
2777 | + self.host, self.port, other_factory, self.cookie) |
2778 | tunnel_client_proto = tunnel_client_factory.buildProtocol(fake_addr) |
2779 | tunnel_client_proto.transport = FakeTransport() |
2780 | tunnel_client_proto.connectionMade() |
2781 | @@ -195,8 +195,8 @@ |
2782 | self.ws = MockWebServer() |
2783 | self.addCleanup(self.ws.stop) |
2784 | self.dest_url = self.ws.get_iri().encode("utf-8") + SIMPLERESOURCE |
2785 | - self.dest_ssl_url = (self.ws.get_ssl_iri().encode("utf-8") + |
2786 | - SIMPLERESOURCE) |
2787 | + self.dest_ssl_url = ( |
2788 | + self.ws.get_ssl_iri().encode("utf-8") + SIMPLERESOURCE) |
2789 | self.cookie = FAKE_COOKIE |
2790 | self.tunnel_server = TunnelServer(self.cookie) |
2791 | self.addCleanup(self.tunnel_server.shutdown) |
2792 | @@ -215,8 +215,8 @@ |
2793 | @defer.inlineCallbacks |
2794 | def test_starts_tls_connection(self): |
2795 | """TLS is started after connecting; control passed to the client.""" |
2796 | - tunnel_client = TunnelClient("0.0.0.0", self.tunnel_server.port, |
2797 | - self.cookie) |
2798 | + tunnel_client = TunnelClient( |
2799 | + "0.0.0.0", self.tunnel_server.port, self.cookie) |
2800 | factory = client.HTTPClientFactory(self.dest_ssl_url) |
2801 | scheme, host, port, path = client._parse(self.dest_ssl_url) |
2802 | context_factory = ssl.ClientContextFactory() |
2803 | |
2804 | === modified file 'ubuntuone/proxy/tests/test_tunnel_server.py' |
2805 | --- ubuntuone/proxy/tests/test_tunnel_server.py 2016-05-29 19:15:01 +0000 |
2806 | +++ ubuntuone/proxy/tests/test_tunnel_server.py 2016-07-30 22:04:10 +0000 |
2807 | @@ -194,7 +194,7 @@ |
2808 | def connect(self, hostport): |
2809 | """Establish a connection with the other end.""" |
2810 | if (self.check_credentials and |
2811 | - self.protocol.proxy_credentials != FAKE_CREDS): |
2812 | + self.protocol.proxy_credentials != FAKE_CREDS): |
2813 | self.proxy_domain = "fake domain" |
2814 | return defer.fail(tunnel_server.ProxyAuthenticationError()) |
2815 | return self.connection_result |
2816 | @@ -211,7 +211,6 @@ |
2817 | """Reset this client.""" |
2818 | |
2819 | |
2820 | - |
2821 | class ServerTunnelProtocolTestCase(SquidTestCase): |
2822 | """Tests for the ServerTunnelProtocol.""" |
2823 | |
2824 | @@ -411,7 +410,9 @@ |
2825 | """Tests for the client that connects to the other side.""" |
2826 | |
2827 | timeout = 3 |
2828 | - get_proxy_settings = lambda _: {} |
2829 | + |
2830 | + def get_proxy_settings(self): |
2831 | + return {} |
2832 | |
2833 | @defer.inlineCallbacks |
2834 | def setUp(self): |
2835 | @@ -716,8 +717,8 @@ |
2836 | """The QtDbus mainloop is installed.""" |
2837 | self.patch(tunnel_server.sys, "platform", "linux123") |
2838 | installed = [] |
2839 | - self.patch(tunnel_server, "install_qt_dbus", |
2840 | - lambda: installed.append(None)) |
2841 | + self.patch( |
2842 | + tunnel_server, "install_qt_dbus", lambda: installed.append(None)) |
2843 | self.proxies_enabled = True |
2844 | tunnel_server.main(["example.com", "443"]) |
2845 | self.assertEqual(len(installed), 1) |
2846 | @@ -726,8 +727,8 @@ |
2847 | """The QtDbus mainloop is installed.""" |
2848 | self.patch(tunnel_server.sys, "platform", "win98") |
2849 | installed = [] |
2850 | - self.patch(tunnel_server, "install_qt_dbus", |
2851 | - lambda: installed.append(None)) |
2852 | + self.patch( |
2853 | + tunnel_server, "install_qt_dbus", lambda: installed.append(None)) |
2854 | self.proxies_enabled = True |
2855 | tunnel_server.main(["example.com", "443"]) |
2856 | self.assertEqual(len(installed), 0) |
2857 | @@ -735,7 +736,8 @@ |
2858 | def test_fix_turkish_locale_called(self): |
2859 | """The fix_turkish_locale function is called, always.""" |
2860 | called = [] |
2861 | - self.patch(tunnel_server, "fix_turkish_locale", |
2862 | - lambda *args, **kwargs: called.append((args, kwargs))) |
2863 | + self.patch( |
2864 | + tunnel_server, "fix_turkish_locale", |
2865 | + lambda *args, **kwargs: called.append((args, kwargs))) |
2866 | tunnel_server.main(["localhost", "443"]) |
2867 | self.assertEqual(called, [((), {})]) |
2868 | |
2869 | === modified file 'ubuntuone/proxy/tunnel_server.py' |
2870 | --- ubuntuone/proxy/tunnel_server.py 2016-05-29 00:50:05 +0000 |
2871 | +++ ubuntuone/proxy/tunnel_server.py 2016-07-30 22:04:10 +0000 |
2872 | @@ -60,10 +60,6 @@ |
2873 | |
2874 | from ubuntuone.clientdefs import NAME |
2875 | from ubuntuone.keyring import Keyring |
2876 | -try: |
2877 | - from ubuntuone.utils.locale import fix_turkish_locale |
2878 | -except ImportError: |
2879 | - fix_turkish_locale = lambda: None |
2880 | from ubuntuone.utils.webclient import gsettings |
2881 | from ubuntuone.proxy.common import ( |
2882 | BaseTunnelProtocol, |
2883 | @@ -73,6 +69,12 @@ |
2884 | TUNNEL_PORT_LABEL, |
2885 | ) |
2886 | from ubuntuone.proxy.logger import logger |
2887 | +try: |
2888 | + from ubuntuone.utils.locale import fix_turkish_locale |
2889 | +except ImportError: |
2890 | + def fix_turkish_locale(): |
2891 | + return None |
2892 | + |
2893 | |
2894 | DEFAULT_CODE = 500 |
2895 | DEFAULT_DESCRIPTION = "Connection error" |
2896 | |
2897 | === modified file 'ubuntuone/status/aggregator.py' |
2898 | --- ubuntuone/status/aggregator.py 2016-05-29 16:05:27 +0000 |
2899 | +++ ubuntuone/status/aggregator.py 2016-07-30 22:04:10 +0000 |
2900 | @@ -47,15 +47,15 @@ |
2901 | ) |
2902 | from ubuntuone.platform.launcher import Launcher, DummyLauncher |
2903 | |
2904 | + |
2905 | +def Q_(string): |
2906 | + return gettext.dgettext(GETTEXT_PACKAGE, string) |
2907 | + |
2908 | +FINAL_COMPLETED = Q_("File synchronization completed.") |
2909 | +NEW_UDFS_SENDER = Q_("New cloud folder(s) available") |
2910 | ONE_DAY = 24 * 60 * 60 |
2911 | -Q_ = lambda string: gettext.dgettext(GETTEXT_PACKAGE, string) |
2912 | - |
2913 | -NEW_UDFS_SENDER = Q_("New cloud folder(s) available") |
2914 | -FINAL_COMPLETED = Q_("File synchronization completed.") |
2915 | - |
2916 | PROGRESS_COMPLETED = Q_("%(percentage_completed)d%% completed.") |
2917 | FILE_SYNC_IN_PROGRESS = Q_("File synchronization in progress") |
2918 | - |
2919 | SHARE_QUOTA_EXCEEDED = Q_( |
2920 | 'There is no available space on the folder:\n"%s" shared by %s') |
2921 | |
2922 | |
2923 | === modified file 'ubuntuone/status/tests/test_aggregator.py' |
2924 | --- ubuntuone/status/tests/test_aggregator.py 2016-05-29 16:05:27 +0000 |
2925 | +++ ubuntuone/status/tests/test_aggregator.py 2016-07-30 22:04:10 +0000 |
2926 | @@ -126,8 +126,8 @@ |
2927 | """Initialize this test instance.""" |
2928 | yield super(DeadlineTimerTestCase, self).setUp() |
2929 | self.clock = PatchedClock() |
2930 | - self.timer = aggregator.DeadlineTimer(delay=0.5, timeout=3.0, |
2931 | - clock=self.clock) |
2932 | + self.timer = aggregator.DeadlineTimer( |
2933 | + delay=0.5, timeout=3.0, clock=self.clock) |
2934 | |
2935 | def test_fired_if_initial_timeout_exceeded(self): |
2936 | """Timer is fired if the initial timeout is exceeded.""" |
2937 | @@ -173,8 +173,8 @@ |
2938 | |
2939 | def send_notification(self, title, message, icon=None, append=False): |
2940 | """Show a notification to the user.""" |
2941 | - if (self.notification_switch is not None |
2942 | - and not self.notification_switch.enabled): |
2943 | + if (self.notification_switch is not None and |
2944 | + not self.notification_switch.enabled): |
2945 | return |
2946 | self.notification = (title, message, icon, append) |
2947 | self.notifications_shown.append((title, message, icon, append)) |
2948 | @@ -406,22 +406,22 @@ |
2949 | |
2950 | def test_idle_state(self): |
2951 | """The idle state is verified.""" |
2952 | - self.assertEqual(type(self.bubble.state), |
2953 | - aggregator.FileDiscoveryIdleState) |
2954 | + self.assertEqual( |
2955 | + type(self.bubble.state), aggregator.FileDiscoveryIdleState) |
2956 | |
2957 | def test_gathering_state(self): |
2958 | """The gathering state is set after the first file is found.""" |
2959 | self.bubble.new_file_found() |
2960 | - self.assertEqual(type(self.bubble.state), |
2961 | - aggregator.FileDiscoveryGatheringState) |
2962 | + self.assertEqual( |
2963 | + type(self.bubble.state), aggregator.FileDiscoveryGatheringState) |
2964 | |
2965 | def test_update_state(self): |
2966 | """When the gathering state finishes, the update state is started.""" |
2967 | self.bubble.connection_made() |
2968 | self.bubble.new_file_found() |
2969 | self.clock.advance(self.initial_delay) |
2970 | - self.assertEqual(type(self.bubble.state), |
2971 | - aggregator.FileDiscoveryUpdateState) |
2972 | + self.assertEqual( |
2973 | + type(self.bubble.state), aggregator.FileDiscoveryUpdateState) |
2974 | |
2975 | def test_sleeping_state(self): |
2976 | """When the update state finishes, the sleeping state is started.""" |
2977 | @@ -429,8 +429,8 @@ |
2978 | self.bubble.new_file_found() |
2979 | self.clock.advance(self.initial_delay) |
2980 | self.clock.advance(self.updates_timeout) |
2981 | - self.assertEqual(type(self.bubble.state), |
2982 | - aggregator.FileDiscoverySleepState) |
2983 | + self.assertEqual( |
2984 | + type(self.bubble.state), aggregator.FileDiscoverySleepState) |
2985 | |
2986 | def test_back_to_initial_state(self): |
2987 | """When the last state finishes, we return to the idle state.""" |
2988 | @@ -439,8 +439,8 @@ |
2989 | self.clock.advance(self.initial_delay) |
2990 | self.clock.advance(self.updates_timeout) |
2991 | self.clock.advance(self.sleep_delay) |
2992 | - self.assertEqual(type(self.bubble.state), |
2993 | - aggregator.FileDiscoveryIdleState) |
2994 | + self.assertEqual( |
2995 | + type(self.bubble.state), aggregator.FileDiscoveryIdleState) |
2996 | |
2997 | def test_new_files_found_while_updating_not_shown_immediately(self): |
2998 | """New files found in the updating state are not shown immediately.""" |
2999 | @@ -808,7 +808,8 @@ |
3000 | self.assertEqual(transfers, expected) |
3001 | |
3002 | menu_data = self.listener.menu_data() |
3003 | - self.assertEqual(menu_data, |
3004 | + self.assertEqual( |
3005 | + menu_data, |
3006 | {UPLOADING: [], |
3007 | DOWNLOADING: [], |
3008 | RECENT_TRANSFERS: expected}) |
3009 | @@ -822,7 +823,8 @@ |
3010 | expected = [('testfile.txt', 200, 0)] |
3011 | self.assertEqual(uploading, expected) |
3012 | menu_data = self.listener.menu_data() |
3013 | - self.assertEqual(menu_data, |
3014 | + self.assertEqual( |
3015 | + menu_data, |
3016 | {UPLOADING: expected, |
3017 | DOWNLOADING: [], |
3018 | RECENT_TRANSFERS: []}) |
3019 | @@ -838,7 +840,8 @@ |
3020 | self.assertEqual(uploading, expected) |
3021 | |
3022 | menu_data = self.listener.menu_data() |
3023 | - self.assertEqual(menu_data, |
3024 | + self.assertEqual( |
3025 | + menu_data, |
3026 | {UPLOADING: expected, |
3027 | DOWNLOADING: [], |
3028 | RECENT_TRANSFERS: []}) |
3029 | @@ -852,7 +855,8 @@ |
3030 | expected = [('testfile.txt', 200, 0)] |
3031 | self.assertEqual(downloading, expected) |
3032 | menu_data = self.listener.menu_data() |
3033 | - self.assertEqual(menu_data, |
3034 | + self.assertEqual( |
3035 | + menu_data, |
3036 | {DOWNLOADING: expected, |
3037 | UPLOADING: [], |
3038 | RECENT_TRANSFERS: []}) |
3039 | @@ -868,7 +872,8 @@ |
3040 | self.assertEqual(downloading, expected) |
3041 | |
3042 | menu_data = self.listener.menu_data() |
3043 | - self.assertEqual(menu_data, |
3044 | + self.assertEqual( |
3045 | + menu_data, |
3046 | {DOWNLOADING: expected, |
3047 | UPLOADING: [], |
3048 | RECENT_TRANSFERS: []}) |
3049 | @@ -1372,8 +1377,8 @@ |
3050 | |
3051 | def test_register_progress_listener_fail(self): |
3052 | """Check that register listener fails with not Callable objects.""" |
3053 | - self.assertRaises(TypeError, |
3054 | - self.aggregator.register_progress_listener, []) |
3055 | + self.assertRaises( |
3056 | + TypeError, self.aggregator.register_progress_listener, []) |
3057 | self.assertEqual(len(self.aggregator.progress_listeners), 0) |
3058 | |
3059 | def test_register_connection_listener(self): |
3060 | @@ -1387,8 +1392,8 @@ |
3061 | |
3062 | def test_register_connection_listener_fail(self): |
3063 | """Check that register listener fails with not Callable objects.""" |
3064 | - self.assertRaises(TypeError, |
3065 | - self.aggregator.register_connection_listener, []) |
3066 | + self.assertRaises( |
3067 | + TypeError, self.aggregator.register_connection_listener, []) |
3068 | self.assertEqual(len(self.aggregator.connection_listeners), 0) |
3069 | |
3070 | def test_connection_notifications(self): |
3071 | @@ -1735,7 +1740,7 @@ |
3072 | # the progress bar is now shown |
3073 | self.assertTrue(sf.aggregator.progress_bar.visible) |
3074 | notifications_shown = (sf.aggregator.file_discovery_bubble. |
3075 | - notification.notifications_shown) |
3076 | + notification.notifications_shown) |
3077 | # no notifications shown yet |
3078 | self.assertEqual(0, len(notifications_shown)) |
3079 | clock.advance(aggregator.FileDiscoveryGatheringState.initial_delay) |
3080 | @@ -1776,7 +1781,7 @@ |
3081 | # the progress bar is now shown |
3082 | self.assertTrue(sf.aggregator.progress_bar.visible) |
3083 | notifications_shown = (sf.aggregator.file_discovery_bubble. |
3084 | - notification.notifications_shown) |
3085 | + notification.notifications_shown) |
3086 | # no notifications shown, never |
3087 | self.assertEqual(0, len(notifications_shown)) |
3088 | clock.advance(aggregator.FileDiscoveryGatheringState.initial_delay) |
3089 | |
3090 | === modified file 'ubuntuone/syncdaemon/action_queue.py' |
3091 | --- ubuntuone/syncdaemon/action_queue.py 2016-05-29 00:50:05 +0000 |
3092 | +++ ubuntuone/syncdaemon/action_queue.py 2016-07-30 22:04:10 +0000 |
3093 | @@ -2625,8 +2625,8 @@ |
3094 | self.log.debug('semaphore acquired') |
3095 | |
3096 | fsm = self.action_queue.main.fs |
3097 | - fileobj_factory = lambda: fsm.open_file(self.mdid) |
3098 | - yield self.action_queue.zip_queue.zip(self, fileobj_factory) |
3099 | + yield self.action_queue.zip_queue.zip( |
3100 | + self, lambda: fsm.open_file(self.mdid)) |
3101 | |
3102 | def finish(self): |
3103 | """Release the semaphore if already acquired.""" |
3104 | |
3105 | === modified file 'ubuntuone/syncdaemon/config.py' |
3106 | --- ubuntuone/syncdaemon/config.py 2015-09-19 23:15:50 +0000 |
3107 | +++ ubuntuone/syncdaemon/config.py 2016-07-30 22:04:10 +0000 |
3108 | @@ -90,7 +90,9 @@ |
3109 | # this object is the shared config |
3110 | _user_config = None |
3111 | |
3112 | -path_from_unix = lambda path: path.replace('/', os.path.sep) |
3113 | + |
3114 | +def path_from_unix(path): |
3115 | + return path.replace('/', os.path.sep) |
3116 | |
3117 | |
3118 | def home_dir_parser(value): |
3119 | |
3120 | === modified file 'ubuntuone/syncdaemon/filesystem_manager.py' |
3121 | --- ubuntuone/syncdaemon/filesystem_manager.py 2016-05-29 00:50:05 +0000 |
3122 | +++ ubuntuone/syncdaemon/filesystem_manager.py 2016-07-30 22:04:10 +0000 |
3123 | @@ -416,8 +416,8 @@ |
3124 | base_path.endswith('%s/Shared With Me' % NAME): |
3125 | realpath = os.path.realpath(mdobj['path']) |
3126 | mdobj['path'] = realpath |
3127 | - if (base_path.startswith('/') and base_path.endswith(NAME) |
3128 | - and name == 'My Files'): |
3129 | + if (base_path.startswith('/') and base_path.endswith(NAME) and |
3130 | + name == 'My Files'): |
3131 | mdobj['path'] = base_path |
3132 | |
3133 | def _migrate_trash_to_tritcask(self): |
3134 | |
3135 | === modified file 'ubuntuone/syncdaemon/fsm/fsm.py' |
3136 | --- ubuntuone/syncdaemon/fsm/fsm.py 2015-09-19 23:15:50 +0000 |
3137 | +++ ubuntuone/syncdaemon/fsm/fsm.py 2016-07-30 22:04:10 +0000 |
3138 | @@ -37,22 +37,6 @@ |
3139 | |
3140 | from ubuntuone.syncdaemon import logger |
3141 | |
3142 | -try: |
3143 | - product = itertools.product |
3144 | -except AttributeError: |
3145 | - # taken from python docs for 2.6 |
3146 | - |
3147 | - def product(*args, **kwds): |
3148 | - "cartesian product" |
3149 | - # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy |
3150 | - # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 |
3151 | - pools = map(tuple, args) * kwds.get('repeat', 1) |
3152 | - result = [[]] |
3153 | - for pool in pools: |
3154 | - result = [x + [y] for x in result for y in pool] |
3155 | - for prod in result: |
3156 | - yield tuple(prod) |
3157 | - |
3158 | |
3159 | def hash_dict(d): |
3160 | "return a hashable representation of the dict" |
3161 | @@ -86,7 +70,7 @@ |
3162 | values = [x[1] for x in items] |
3163 | |
3164 | possible_states = [dict(zip(keys, state)) |
3165 | - for state in product(*values)] |
3166 | + for state in itertools.product(*values)] |
3167 | return possible_states |
3168 | |
3169 | |
3170 | |
3171 | === modified file 'ubuntuone/syncdaemon/fsm/fsm_parser.py' |
3172 | --- ubuntuone/syncdaemon/fsm/fsm_parser.py 2015-09-19 23:15:50 +0000 |
3173 | +++ ubuntuone/syncdaemon/fsm/fsm_parser.py 2016-07-30 22:04:10 +0000 |
3174 | @@ -75,15 +75,21 @@ |
3175 | import optparse |
3176 | import pprint |
3177 | |
3178 | -if "HAS_OOFFICE" in os.environ: |
3179 | - # we have to do this because python-uno breaks mocker |
3180 | +try: |
3181 | import uno |
3182 | from com.sun.star.connection import NoConnectException |
3183 | from com.sun.star.lang import IndexOutOfBoundsException |
3184 | from com.sun.star.container import NoSuchElementException |
3185 | from com.sun.star.beans import PropertyValue |
3186 | from unohelper import systemPathToFileUrl, absolutize |
3187 | - |
3188 | +except ImportError: |
3189 | + has_oo_bindings = False |
3190 | +else: |
3191 | + has_oo_bindings = True |
3192 | + |
3193 | + |
3194 | +if has_oo_bindings: |
3195 | + # we have to do this because python-uno breaks mocker |
3196 | CONNECT_MSG = """ |
3197 | Need to start OpenOffice! Use a command like: |
3198 | ooffice -accept="socket,host=localhost,port=2002;urp;" |
3199 | @@ -249,12 +255,14 @@ |
3200 | vars[rows[2][i]] = value.strip() |
3201 | return vars |
3202 | |
3203 | - build_state_from_row = lambda row: get_var_value_from_row_part( |
3204 | - row, state_idx, param_idx) |
3205 | - build_params_from_row = lambda row: get_var_value_from_row_part( |
3206 | - row, param_idx, action_idx) |
3207 | - build_state_out_from_row = lambda row: get_var_value_from_row_part( |
3208 | - row, state_out_idx, row_size) |
3209 | + def build_state_from_row(row): |
3210 | + return get_var_value_from_row_part(row, state_idx, param_idx) |
3211 | + |
3212 | + def build_params_from_row(row): |
3213 | + return get_var_value_from_row_part(row, param_idx, action_idx) |
3214 | + |
3215 | + def build_state_out_from_row(row): |
3216 | + return get_var_value_from_row_part(row, state_out_idx, row_size) |
3217 | |
3218 | # generate states_vars |
3219 | descs = rows[1][state_idx:param_idx] |
3220 | @@ -292,29 +300,31 @@ |
3221 | return dict(events=events, state_vars=state_vars, |
3222 | parameters=parameters, invalid=invalid) |
3223 | |
3224 | - def main(): |
3225 | - 'a simple interface to test the parser' |
3226 | - usage = "usage: %prog [options] SPREADSHEET" |
3227 | - |
3228 | - parser = optparse.OptionParser(usage=usage) |
3229 | - parser.add_option("-o", "--output", dest="output", |
3230 | - help="write result to FILE", metavar="FILE") |
3231 | - |
3232 | - (options, args) = parser.parse_args() |
3233 | - if len(args) != 1: |
3234 | - parser.print_help() |
3235 | - print "SPREADSHEET required" |
3236 | - return |
3237 | - |
3238 | - result = parse(args[0]) |
3239 | - if options.output: |
3240 | - f = open(options.output, "w") |
3241 | - data = pprint.pformat(result) |
3242 | - f.write("\"\"\"This is a generated python file.\"\"\"\n" |
3243 | - "state_machine = %s""" % data) |
3244 | - f.close() |
3245 | - else: |
3246 | - pprint.pprint(result) |
3247 | - |
3248 | - if __name__ == "__main__": |
3249 | - main() |
3250 | + |
3251 | +def main(): |
3252 | + """A simple interface to test the parser.""" |
3253 | + usage = "usage: %prog [options] SPREADSHEET" |
3254 | + |
3255 | + parser = optparse.OptionParser(usage=usage) |
3256 | + parser.add_option("-o", "--output", dest="output", |
3257 | + help="write result to FILE", metavar="FILE") |
3258 | + |
3259 | + (options, args) = parser.parse_args() |
3260 | + if len(args) != 1: |
3261 | + parser.print_help() |
3262 | + print "SPREADSHEET required" |
3263 | + return |
3264 | + |
3265 | + result = parse(args[0]) |
3266 | + if options.output: |
3267 | + f = open(options.output, "w") |
3268 | + data = pprint.pformat(result) |
3269 | + f.write("\"\"\"This is a generated python file.\"\"\"\n" |
3270 | + "state_machine = %s""" % data) |
3271 | + f.close() |
3272 | + else: |
3273 | + pprint.pprint(result) |
3274 | + |
3275 | + |
3276 | +if __name__ == "__main__": |
3277 | + main() |
3278 | |
3279 | === modified file 'ubuntuone/syncdaemon/main.py' |
3280 | --- ubuntuone/syncdaemon/main.py 2016-05-29 00:50:05 +0000 |
3281 | +++ ubuntuone/syncdaemon/main.py 2016-07-30 22:04:10 +0000 |
3282 | @@ -183,16 +183,16 @@ |
3283 | |
3284 | def callback(): |
3285 | """Event queue is empty.""" |
3286 | - if not (self.state_manager.state == StateManager.QUEUE_MANAGER |
3287 | - and self.state_manager.queues.state == QueueManager.IDLE |
3288 | - and not self.action_q.queue |
3289 | - and self.hash_q.empty()): |
3290 | - self.logger.debug("I can't attain Nirvana yet. [state: %s; " |
3291 | - "queue: %d; hash: %d]", |
3292 | - self.state_manager, |
3293 | - len(self.action_q.queue), |
3294 | - len(self.hash_q)) |
3295 | + state = self.state_manager.state |
3296 | + if not (self.state_manager.queues.state == QueueManager.IDLE and |
3297 | + state == StateManager.QUEUE_MANAGER and |
3298 | + not self.action_q.queue and self.hash_q.empty()): |
3299 | + self.logger.debug( |
3300 | + "I can't reach Nirvana yet [state: %s queue: %d hash: %d]", |
3301 | + self.state_manager, len(self.action_q.queue), |
3302 | + len(self.hash_q)) |
3303 | return |
3304 | + |
3305 | self.logger.debug("Nirvana reached!! I'm a Buddha") |
3306 | self.event_q.remove_empty_event_queue_callback(callback) |
3307 | d.callback(True) |
3308 | |
3309 | === modified file 'ubuntuone/syncdaemon/tests/fsm/test_fsm.py' |
3310 | --- ubuntuone/syncdaemon/tests/fsm/test_fsm.py 2016-05-27 23:49:19 +0000 |
3311 | +++ ubuntuone/syncdaemon/tests/fsm/test_fsm.py 2016-07-30 22:04:10 +0000 |
3312 | @@ -35,10 +35,18 @@ |
3313 | |
3314 | from ubuntuone.syncdaemon.fsm import fsm |
3315 | |
3316 | +try: |
3317 | + import uno |
3318 | +except ImportError: |
3319 | + uno = None |
3320 | + |
3321 | + |
3322 | def p(name): |
3323 | """Make a full path from here.""" |
3324 | return os.path.join(os.path.dirname(__file__), name) |
3325 | |
3326 | + |
3327 | +@unittest.skipIf(uno is None, 'python-uno not available') |
3328 | class TestParse(unittest.TestCase): |
3329 | """Test fsm validation.""" |
3330 | |
3331 | @@ -61,26 +69,25 @@ |
3332 | """Test error on duplicate transition.""" |
3333 | f = fsm.StateMachine(p("test_transition_twice.ods")) |
3334 | self.assertRaises(fsm.ValidationFailed, f.validate) |
3335 | - self.assertEquals(len(f.errors), 1) |
3336 | + self.assertEqual(len(f.errors), 1) |
3337 | |
3338 | def test_missing_source_state(self): |
3339 | """Test incomplete state transition coverage.""" |
3340 | f = fsm.StateMachine(p("test_missing_source_state.ods")) |
3341 | self.assertRaises(fsm.ValidationFailed, f.validate) |
3342 | - self.assertEquals(len(f.errors), 1) |
3343 | + self.assertEqual(len(f.errors), 1) |
3344 | |
3345 | def test_missing_param_values(self): |
3346 | """Test incomplete param transition coverage.""" |
3347 | f = fsm.StateMachine(p("test_missing_param_values.ods")) |
3348 | self.assertRaises(fsm.ValidationFailed, f.validate) |
3349 | - self.assertEquals(len(f.errors), 4) |
3350 | - |
3351 | + self.assertEqual(len(f.errors), 4) |
3352 | |
3353 | def test_two_missing_source_state(self): |
3354 | """Test incomplete state transition coverage.""" |
3355 | f = fsm.StateMachine(p("test_two_missing_source_state.ods")) |
3356 | self.assertRaises(fsm.ValidationFailed, f.validate) |
3357 | - self.assertEquals(len(f.errors), 2) |
3358 | + self.assertEqual(len(f.errors), 2) |
3359 | |
3360 | def test_star_event(self): |
3361 | """Test expansion of one star in event columns.""" |
3362 | @@ -116,7 +123,7 @@ |
3363 | """Test expansion of stars that cover too much.""" |
3364 | f = fsm.StateMachine(p("test_star_event_repeat.ods")) |
3365 | self.assertRaises(fsm.ValidationFailed, f.validate) |
3366 | - self.assertEquals(len(f.errors), 1) |
3367 | + self.assertEqual(len(f.errors), 1) |
3368 | |
3369 | def test_out_equal(self): |
3370 | """Test expansion of "=" in state out.""" |
3371 | @@ -125,7 +132,7 @@ |
3372 | for s in f.states.values(): |
3373 | for t in s.transitions.values(): |
3374 | for k in t.source: |
3375 | - self.assertEquals(t.source[k], t.target[k]) |
3376 | + self.assertEqual(t.source[k], t.target[k]) |
3377 | |
3378 | def test_out_equal_star(self): |
3379 | """Test expansion of "=" in state out.""" |
3380 | @@ -134,8 +141,9 @@ |
3381 | for s in f.states.values(): |
3382 | for t in s.transitions.values(): |
3383 | for k in t.source: |
3384 | - self.assertEquals(t.source[k], t.target[k], |
3385 | - "on transition %s target is %s"%(t, t.target)) |
3386 | + self.assertEqual( |
3387 | + t.source[k], t.target[k], |
3388 | + "on transition %s target is %s" % (t, t.target)) |
3389 | |
3390 | def test_equal_wrong_places(self): |
3391 | """make sure "=" are not allowed on state or params.""" |
3392 | @@ -144,7 +152,7 @@ |
3393 | # this should be two errors |
3394 | # but more errors happen as there is no clear interpretation of |
3395 | # the table in this case |
3396 | - self.assertEquals(len(f.errors), 5) |
3397 | + self.assertEqual(len(f.errors), 5) |
3398 | |
3399 | def test_param_na(self): |
3400 | """Test that na param columns are ignored.""" |
3401 | @@ -162,13 +170,3 @@ |
3402 | # the transition |
3403 | t = "EVENT_1", fsm.hash_dict(dict(MV1="T", MV2="T")) |
3404 | self.assertFalse(t in s.transitions) |
3405 | - |
3406 | - |
3407 | -def test_suite(): |
3408 | - if "HAS_OOFFICE" in os.environ: |
3409 | - return unittest.TestLoader().loadTestsFromName(__name__) |
3410 | - else: |
3411 | - return unittest.TestSuite() |
3412 | - |
3413 | -if __name__ == "__main__": |
3414 | - unittest.main() |
3415 | |
3416 | === modified file 'ubuntuone/syncdaemon/tests/fsm/test_fsm_run.py' |
3417 | --- ubuntuone/syncdaemon/tests/fsm/test_fsm_run.py 2012-04-09 20:07:05 +0000 |
3418 | +++ ubuntuone/syncdaemon/tests/fsm/test_fsm_run.py 2016-07-30 22:04:10 +0000 |
3419 | @@ -35,32 +35,36 @@ |
3420 | |
3421 | from ubuntuone.syncdaemon.fsm import fsm |
3422 | |
3423 | + |
3424 | def p(name): |
3425 | - """make a full path from here.""" |
3426 | + """Make a full path from here.""" |
3427 | if "HAS_OOFICE" in os.environ: |
3428 | return os.path.join(os.path.dirname(__file__), name+".ods") |
3429 | else: |
3430 | return os.path.join(os.path.dirname(__file__), name+".py") |
3431 | |
3432 | + |
3433 | class TestRun(unittest.TestCase): |
3434 | - 'Test fsm running' |
3435 | + """Test fsm running.""" |
3436 | |
3437 | def test_hello(self): |
3438 | - 'test running a hello world machine' |
3439 | + """Test running a hello world machine.""" |
3440 | f = fsm.StateMachine(p("test_run_hello")) |
3441 | f.validate() |
3442 | - |
3443 | result = [] |
3444 | + |
3445 | def make(out, outstates): |
3446 | - "make action_func functions" |
3447 | + """Make action_func functions.""" |
3448 | + |
3449 | def maker(self, event, params): |
3450 | "inner" |
3451 | result.append(out) |
3452 | self.state = outstates[int(params["MV1"])-1] |
3453 | + |
3454 | return maker |
3455 | |
3456 | class HelloRunner(fsm.StateMachineRunner): |
3457 | - "our implementation of the runner" |
3458 | + """Our implementation of the runner.""" |
3459 | state = "H" |
3460 | H = make("h", "EEE") |
3461 | E = make("e", "LLL") |
3462 | @@ -72,15 +76,10 @@ |
3463 | newline = make("\n", "HHH") |
3464 | |
3465 | def get_state_values(self): |
3466 | - "return the stateval of this fsm." |
3467 | + """Return the stateval of this fsm.""" |
3468 | return dict(SV1=self.state) |
3469 | |
3470 | - |
3471 | runner = HelloRunner(f) |
3472 | for i in [1, 1, 1, 2, 1, 2, 2, 3, 3, 1, 1]: |
3473 | runner.on_event("EVENT_1", dict(MV1=str(i))) |
3474 | - self.assertEquals("helloworld\n", "".join(result)) |
3475 | - |
3476 | - |
3477 | -if __name__ == "__main__": |
3478 | - unittest.main() |
3479 | + self.assertEqual("helloworld\n", "".join(result)) |
3480 | |
3481 | === modified file 'ubuntuone/syncdaemon/tests/test_action_queue.py' |
3482 | --- ubuntuone/syncdaemon/tests/test_action_queue.py 2016-05-27 23:49:19 +0000 |
3483 | +++ ubuntuone/syncdaemon/tests/test_action_queue.py 2016-07-30 22:04:10 +0000 |
3484 | @@ -1,4 +1,4 @@ |
3485 | -#-*- coding: utf-8 -*- |
3486 | +# -*- coding: utf-8 -*- |
3487 | # |
3488 | # Copyright 2009-2015 Canonical Ltd. |
3489 | # |
3490 | @@ -324,7 +324,8 @@ |
3491 | def user_connect(self): |
3492 | """User requested to connect to server.""" |
3493 | auth_info = dict(username='test_username', password='test_password') |
3494 | - self.action_queue.event_queue.push('SYS_USER_CONNECT', access_token=auth_info) |
3495 | + self.action_queue.event_queue.push( |
3496 | + 'SYS_USER_CONNECT', access_token=auth_info) |
3497 | |
3498 | |
3499 | class BasicTests(BasicTestCase): |
3500 | @@ -2830,7 +2831,10 @@ |
3501 | |
3502 | class MyDownload(Download): |
3503 | """Just to allow monkeypatching.""" |
3504 | - sync = lambda s: None |
3505 | + |
3506 | + def sync(s): |
3507 | + return None |
3508 | + |
3509 | self.command = MyDownload(self.rq, share_id='a_share_id', |
3510 | node_id='a_node_id', |
3511 | server_hash='server_hash', mdid=self.mdid) |
3512 | @@ -3691,11 +3695,9 @@ |
3513 | self.command.pause() |
3514 | # make it run again |
3515 | self.command._run() |
3516 | - try: |
3517 | - upload_id = self.command.action_queue.client.called[1][2]['upload_id'] |
3518 | - self.assertEqual(upload_id, 'hola') |
3519 | - finally: |
3520 | - self.command.action_queue.client = None |
3521 | + upload_id = self.command.action_queue.client.called[1][2]['upload_id'] |
3522 | + self.assertEqual(upload_id, 'hola') |
3523 | + self.addCleanup(setattr, self.command.action_queue, 'client', None) |
3524 | |
3525 | def test_uses_rb_flags_when_creating_temp_file(self): |
3526 | """Check that the 'b' flag is used for the temporary file.""" |
3527 | @@ -3961,8 +3963,9 @@ |
3528 | """handle_SYS_USER_CONNECT stores credentials.""" |
3529 | self.assertEqual(self.action_queue.credentials, {}) |
3530 | self.user_connect() |
3531 | - self.assertEqual(self.action_queue.credentials, |
3532 | - {'password': 'test_password', 'username': 'test_username'}) |
3533 | + self.assertEqual( |
3534 | + self.action_queue.credentials, |
3535 | + {'password': 'test_password', 'username': 'test_username'}) |
3536 | |
3537 | |
3538 | class SpecificException(Exception): |
3539 | @@ -4820,7 +4823,10 @@ |
3540 | def test_path_locking(self): |
3541 | """Test that it acquires correctly the path lock.""" |
3542 | t = [] |
3543 | - fake_acquire = lambda s, *a, **k: t.extend((a, k)) or defer.succeed(None) |
3544 | + |
3545 | + def fake_acquire(s, *a, **k): |
3546 | + return t.extend((a, k)) or defer.succeed(None) |
3547 | + |
3548 | self.patch(PathLockingTree, 'acquire', fake_acquire) |
3549 | cmd = Move(self.rq, VOLUME, 'node', 'o_parent', 'n_parent', 'n_name', |
3550 | os.path.join(os.path.sep, 'path', 'from'), |
3551 | @@ -5060,7 +5066,8 @@ |
3552 | request = client.Authenticate(self.action_queue.client, |
3553 | {'dummy_token': 'credentials'}) |
3554 | request.session_id = str(uuid.uuid4()) |
3555 | - self.action_queue.client.simple_authenticate = lambda *args: defer.succeed(request) |
3556 | + self.action_queue.client.simple_authenticate = ( |
3557 | + lambda *args: defer.succeed(request)) |
3558 | |
3559 | yield self.action_queue.authenticate() |
3560 | |
3561 | @@ -5083,7 +5090,8 @@ |
3562 | yield self.action_queue.authenticate() |
3563 | self.assertEqual(len(called), 1) |
3564 | metadata = called[0][0][2] |
3565 | - expected_metadata = {'platform': platform, 'version': clientdefs.VERSION} |
3566 | + expected_metadata = { |
3567 | + 'platform': platform, 'version': clientdefs.VERSION} |
3568 | self.assertEqual(metadata, expected_metadata) |
3569 | |
3570 | |
3571 | @@ -5855,7 +5863,9 @@ |
3572 | """Fake object for the tests.""" |
3573 | log = logging.getLogger("ubuntuone.SyncDaemon.ActionQueue") |
3574 | log.setLevel(logger.TRACE) |
3575 | - ping = lambda self: defer.Deferred() |
3576 | + |
3577 | + def ping(self): |
3578 | + return defer.Deferred() |
3579 | |
3580 | self.fake_aqp = FakeActionQueueProtocol() |
3581 | self.handler = MementoHandler() |
3582 | |
3583 | === modified file 'ubuntuone/syncdaemon/tests/test_config.py' |
3584 | --- ubuntuone/syncdaemon/tests/test_config.py 2012-10-17 06:13:52 +0000 |
3585 | +++ ubuntuone/syncdaemon/tests/test_config.py 2016-07-30 22:04:10 +0000 |
3586 | @@ -48,7 +48,7 @@ |
3587 | |
3588 | |
3589 | class TestConfigBasic(BaseTwistedTestCase): |
3590 | - """Basic _Config object tests""" |
3591 | + """Basic _Config object tests.""" |
3592 | |
3593 | @defer.inlineCallbacks |
3594 | def setUp(self): |
3595 | @@ -56,32 +56,30 @@ |
3596 | self.test_root = self.mktemp() |
3597 | |
3598 | def assertThrottlingSection(self, expected, current, on, read, write): |
3599 | - """Assert for equality two ConfigParser and against the on, read and |
3600 | - write args |
3601 | - """ |
3602 | - self.assertEquals(expected.getboolean(config.THROTTLING, 'on'), on) |
3603 | - self.assertEquals(expected.getint(config.THROTTLING, 'read_limit'), |
3604 | - read) |
3605 | - self.assertEquals(expected.getint(config.THROTTLING, 'write_limit'), |
3606 | - write) |
3607 | - self.assertEquals(expected.getboolean(config.THROTTLING, 'on'), |
3608 | - current.get_throttling()) |
3609 | - self.assertEquals(expected.getint(config.THROTTLING, 'read_limit'), |
3610 | - current.get_throttling_read_limit()) |
3611 | - self.assertEquals(expected.getint(config.THROTTLING, 'write_limit'), |
3612 | - current.get_throttling_write_limit()) |
3613 | + """Assert equality for two ConfigParser.""" |
3614 | + self.assertEqual(expected.getboolean(config.THROTTLING, 'on'), on) |
3615 | + self.assertEqual( |
3616 | + expected.getint(config.THROTTLING, 'read_limit'), read) |
3617 | + self.assertEqual( |
3618 | + expected.getint(config.THROTTLING, 'write_limit'), write) |
3619 | + self.assertEqual(expected.getboolean(config.THROTTLING, 'on'), |
3620 | + current.get_throttling()) |
3621 | + self.assertEqual(expected.getint(config.THROTTLING, 'read_limit'), |
3622 | + current.get_throttling_read_limit()) |
3623 | + self.assertEqual(expected.getint(config.THROTTLING, 'write_limit'), |
3624 | + current.get_throttling_write_limit()) |
3625 | |
3626 | def test_load_empty(self): |
3627 | - """test loading the a non-existent config file""" |
3628 | + """Test loading the a non-existent config file.""" |
3629 | conf_file = os.path.join(self.test_root, 'test_missing_config.conf') |
3630 | # create the config object with an empty config file |
3631 | conf = config._Config(conf_file) |
3632 | - self.assertEquals(False, conf.get_throttling()) |
3633 | - self.assertEquals(2097152, conf.get_throttling_read_limit()) |
3634 | - self.assertEquals(2097152, conf.get_throttling_write_limit()) |
3635 | + self.assertEqual(False, conf.get_throttling()) |
3636 | + self.assertEqual(2097152, conf.get_throttling_read_limit()) |
3637 | + self.assertEqual(2097152, conf.get_throttling_write_limit()) |
3638 | |
3639 | def test_load_basic(self): |
3640 | - """test loading the config file containing only the throttling values""" |
3641 | + """Test loading the config file with only the throttling values.""" |
3642 | conf_file = os.path.join(self.test_root, 'test_load_config.conf') |
3643 | # write some throttling values to the config file |
3644 | with open_file(conf_file, 'w') as fp: |
3645 | @@ -90,12 +88,12 @@ |
3646 | fp.write('read_limit = 1000\n') |
3647 | fp.write('write_limit = 200\n') |
3648 | conf = config._Config(conf_file) |
3649 | - self.assertEquals(True, conf.get_throttling()) |
3650 | - self.assertEquals(1000, conf.get_throttling_read_limit()) |
3651 | - self.assertEquals(200, conf.get_throttling_write_limit()) |
3652 | + self.assertEqual(True, conf.get_throttling()) |
3653 | + self.assertEqual(1000, conf.get_throttling_read_limit()) |
3654 | + self.assertEqual(200, conf.get_throttling_write_limit()) |
3655 | |
3656 | def test_load_extra_data(self): |
3657 | - """test loading the a config file with other sections too""" |
3658 | + """Test loading the a config file with other sections too.""" |
3659 | conf_file = os.path.join(self.test_root, 'test_load_extra_config.conf') |
3660 | # write some throttling values to the config file |
3661 | with open_file(conf_file, 'w') as fp: |
3662 | @@ -108,12 +106,12 @@ |
3663 | fp.write('read_limit = 1000\n') |
3664 | fp.write('write_limit = 200\n') |
3665 | conf = config._Config(conf_file) |
3666 | - self.assertEquals(True, conf.get_throttling()) |
3667 | - self.assertEquals(1000, conf.get_throttling_read_limit()) |
3668 | - self.assertEquals(200, conf.get_throttling_write_limit()) |
3669 | + self.assertEqual(True, conf.get_throttling()) |
3670 | + self.assertEqual(1000, conf.get_throttling_read_limit()) |
3671 | + self.assertEqual(200, conf.get_throttling_write_limit()) |
3672 | |
3673 | def test_write_new(self): |
3674 | - """test writing the throttling section to a new config file""" |
3675 | + """Test writing the throttling section to a new config file.""" |
3676 | conf_file = os.path.join(self.test_root, 'test_write_new_config.conf') |
3677 | self.assertFalse(path_exists(conf_file)) |
3678 | conf = config._Config(conf_file) |
3679 | @@ -127,7 +125,7 @@ |
3680 | self.assertThrottlingSection(conf_1, conf, True, 1000, 100) |
3681 | |
3682 | def test_write_existing(self): |
3683 | - """test writing the throttling section to a existing config file""" |
3684 | + """Test writing the throttling section to a existing config file.""" |
3685 | conf_file = os.path.join(self.test_root, |
3686 | 'test_write_existing_config.conf') |
3687 | # write some throttling values to the config file |
3688 | @@ -148,10 +146,9 @@ |
3689 | self.assertThrottlingSection(conf_1, conf, True, 2000, 200) |
3690 | |
3691 | def test_write_extra(self): |
3692 | - """test writing the throttling section back to the config file, |
3693 | - including extra sections |
3694 | - """ |
3695 | - conf_file = os.path.join(self.test_root, 'test_write_extra_config.conf') |
3696 | + """Writing the throttling back to the file, with extra sections.""" |
3697 | + conf_file = os.path.join( |
3698 | + self.test_root, 'test_write_extra_config.conf') |
3699 | # write some throttling values to the config file |
3700 | with open_file(conf_file, 'w') as fp: |
3701 | fp.write('[__main__]\n') |
3702 | @@ -172,15 +169,13 @@ |
3703 | conf_1 = ConfigParser() |
3704 | conf_1.read(conf_file) |
3705 | self.assertThrottlingSection(conf_1, conf, True, 3000, 300) |
3706 | - self.assertEquals(conf_1.get('__main__', 'log_level'), |
3707 | - conf.get('__main__', 'log_level')) |
3708 | - self.assertEquals(conf_1.getboolean('__main__', 'disable_ssl_verify'), |
3709 | - conf.getboolean('__main__', 'disable_ssl_verify')) |
3710 | + self.assertEqual(conf_1.get('__main__', 'log_level'), |
3711 | + conf.get('__main__', 'log_level')) |
3712 | + self.assertEqual(conf_1.getboolean('__main__', 'disable_ssl_verify'), |
3713 | + conf.getboolean('__main__', 'disable_ssl_verify')) |
3714 | |
3715 | def test_write_existing_partial(self): |
3716 | - """test writing a partially updated throttling section |
3717 | - to a existing config file |
3718 | - """ |
3719 | + """Writing a partially updated throttling section to existing file.""" |
3720 | conf_file = os.path.join(self.test_root, |
3721 | 'test_write_existing_config.conf') |
3722 | # write some throttling values to the config file |
3723 | @@ -199,7 +194,7 @@ |
3724 | self.assertThrottlingSection(conf_1, conf, False, 1000, 100) |
3725 | |
3726 | def test_load_negative_limits(self): |
3727 | - """test loading the config file with negative read/write limits""" |
3728 | + """Test loading the config file with negative read/write limits.""" |
3729 | conf_file = os.path.join(self.test_root, 'test_load_config.conf') |
3730 | # write some throttling values to the config file |
3731 | with open_file(conf_file, 'w') as fp: |
3732 | @@ -208,12 +203,12 @@ |
3733 | fp.write('read_limit = -1\n') |
3734 | fp.write('write_limit = -1\n') |
3735 | conf = config._Config(conf_file) |
3736 | - self.assertEquals(True, conf.get_throttling()) |
3737 | - self.assertEquals(None, conf.get_throttling_read_limit()) |
3738 | - self.assertEquals(None, conf.get_throttling_write_limit()) |
3739 | + self.assertEqual(True, conf.get_throttling()) |
3740 | + self.assertEqual(None, conf.get_throttling_read_limit()) |
3741 | + self.assertEqual(None, conf.get_throttling_write_limit()) |
3742 | |
3743 | def test_load_partial_config(self): |
3744 | - """test loading a partial config file and fallback to defaults""" |
3745 | + """Test loading a partial config file and fallback to defaults.""" |
3746 | conf_file = os.path.join(self.test_root, 'test_load_config.conf') |
3747 | # write some throttling values to the config file |
3748 | with open_file(conf_file, 'w') as fp: |
3749 | @@ -221,12 +216,12 @@ |
3750 | fp.write('on = True\n') |
3751 | fp.write('read_limit = 1\n') |
3752 | conf = config._Config(conf_file) |
3753 | - self.assertEquals(True, conf.get_throttling()) |
3754 | - self.assertEquals(1, conf.get_throttling_read_limit()) |
3755 | - self.assertEquals(2097152, conf.get_throttling_write_limit()) |
3756 | + self.assertEqual(True, conf.get_throttling()) |
3757 | + self.assertEqual(1, conf.get_throttling_read_limit()) |
3758 | + self.assertEqual(2097152, conf.get_throttling_write_limit()) |
3759 | |
3760 | def test_override(self): |
3761 | - """test loading the config file containing only the throttling values""" |
3762 | + """Test loading the config file with only the throttling values.""" |
3763 | conf_file = os.path.join(self.test_root, 'test_load_config.conf') |
3764 | # write some throttling values to the config file |
3765 | with open_file(conf_file, 'w') as fp: |
3766 | @@ -238,10 +233,10 @@ |
3767 | conf_orig = config._Config(conf_file) |
3768 | overridden_opts = [('bandwidth_throttling', 'on', False)] |
3769 | conf.override_options(overridden_opts) |
3770 | - self.assertEquals(False, conf.get_throttling()) |
3771 | + self.assertEqual(False, conf.get_throttling()) |
3772 | self.assertFalse(conf.get_throttling() == conf_orig.get_throttling()) |
3773 | - self.assertEquals(1000, conf.get_throttling_read_limit()) |
3774 | - self.assertEquals(200, conf.get_throttling_write_limit()) |
3775 | + self.assertEqual(1000, conf.get_throttling_read_limit()) |
3776 | + self.assertEqual(200, conf.get_throttling_write_limit()) |
3777 | conf.save() |
3778 | # load the config in a barebone ConfigParser and check |
3779 | conf_1 = ConfigParser() |
3780 | @@ -295,7 +290,7 @@ |
3781 | conf_orig.get_udf_autosubscribe()) |
3782 | conf.save() |
3783 | conf_1 = config._Config(conf_file) |
3784 | - self.assertEquals(True, conf_1.get_udf_autosubscribe()) |
3785 | + self.assertEqual(True, conf_1.get_udf_autosubscribe()) |
3786 | |
3787 | def test_load_share_autosubscribe(self): |
3788 | """Test load/set/override of share_autosubscribe config value.""" |
3789 | @@ -337,7 +332,7 @@ |
3790 | conf_orig.get_share_autosubscribe()) |
3791 | conf.save() |
3792 | conf_1 = config._Config(conf_file) |
3793 | - self.assertEquals(True, conf_1.get_share_autosubscribe()) |
3794 | + self.assertEqual(True, conf_1.get_share_autosubscribe()) |
3795 | |
3796 | def test_load_autoconnect(self): |
3797 | """Test load/set/override of autoconnect config value.""" |
3798 | @@ -385,7 +380,7 @@ |
3799 | conf_orig.get_autoconnect()) |
3800 | conf.save() |
3801 | conf_1 = config._Config(conf_file) |
3802 | - self.assertEquals(True, conf_1.get_autoconnect()) |
3803 | + self.assertEqual(True, conf_1.get_autoconnect()) |
3804 | |
3805 | def test_load_show_all_notifications(self): |
3806 | """Test load/set/override of show_all_notifications config value.""" |
3807 | @@ -433,8 +428,7 @@ |
3808 | conf_orig.get_show_all_notifications()) |
3809 | conf.save() |
3810 | conf_1 = config._Config(conf_file) |
3811 | - self.assertEquals(True, conf_1.get_show_all_notifications()) |
3812 | - |
3813 | + self.assertEqual(True, conf_1.get_show_all_notifications()) |
3814 | |
3815 | def test_get_simult_transfers(self): |
3816 | """Get simult transfers.""" |
3817 | @@ -504,8 +498,8 @@ |
3818 | os.makedirs(fake_path) |
3819 | with open(os.path.join(fake_path, config.CONFIG_FILE), "w") as f: |
3820 | f.write("this is a fake config file") |
3821 | - fake_load_config_paths = lambda _: [fake_path.encode("utf8")] |
3822 | - self.patch(config, "load_config_paths", fake_load_config_paths) |
3823 | + self.patch( |
3824 | + config, "load_config_paths", lambda _: [fake_path.encode("utf8")]) |
3825 | config_files = config.get_config_files() |
3826 | branch_config = os.path.join(fake_path, config.CONFIG_FILE) |
3827 | self.assertIn(branch_config, config_files) |
3828 | @@ -515,30 +509,31 @@ |
3829 | config_files = [os.path.normpath(p) for p in config.get_config_files()] |
3830 | rootdir = os.environ['ROOTDIR'] |
3831 | branch_config = os.path.join(rootdir, "data", config.CONFIG_FILE) |
3832 | - branch_logging_config = os.path.join(rootdir, "data", config.CONFIG_LOGS) |
3833 | + branch_logging_config = os.path.join( |
3834 | + rootdir, "data", config.CONFIG_LOGS) |
3835 | self.assertIn(branch_config, config_files) |
3836 | - self.assertIn(branch_logging_config, config_files) |
3837 | - |
3838 | - |
3839 | + self.assertIn(branch_logging_config, config_files) |
3840 | + |
3841 | + |
3842 | class ConfigglueParsersTests(BaseTwistedTestCase): |
3843 | """Tests for our custom configglue parsers.""" |
3844 | |
3845 | def test_throttling_limit_parser(self): |
3846 | - """Test throttling_limit_parser""" |
3847 | + """Test throttling_limit_parser.""" |
3848 | good_value = '20480' |
3849 | unset_value = '-1' |
3850 | bad_value = 'hola' |
3851 | invalid_value = None |
3852 | zero_value = '0' |
3853 | parser = config.throttling_limit_parser |
3854 | - self.assertEquals(20480, parser(good_value)) |
3855 | - self.assertEquals(None, parser(unset_value)) |
3856 | + self.assertEqual(20480, parser(good_value)) |
3857 | + self.assertEqual(None, parser(unset_value)) |
3858 | self.assertRaises(ValueError, parser, bad_value) |
3859 | self.assertRaises(TypeError, parser, invalid_value) |
3860 | - self.assertEquals(None, parser(zero_value)) |
3861 | + self.assertEqual(None, parser(zero_value)) |
3862 | |
3863 | def test_log_level_parser(self): |
3864 | - """Test log_level_parser""" |
3865 | + """Test log_level_parser.""" |
3866 | good_value = 'INFO' |
3867 | bad_value = 'hola' |
3868 | invalid_value = None |
3869 | @@ -599,7 +594,7 @@ |
3870 | |
3871 | |
3872 | class SyncDaemonConfigParserTests(BaseTwistedTestCase): |
3873 | - """Tests for SyncDaemonConfigParser""" |
3874 | + """Tests for SyncDaemonConfigParser.""" |
3875 | |
3876 | @defer.inlineCallbacks |
3877 | def setUp(self): |
3878 | @@ -623,10 +618,10 @@ |
3879 | self.assertTrue(path_exists(conf_file)) |
3880 | self.cp.read([conf_file]) |
3881 | self.cp.parse_all() |
3882 | - self.assertEquals(self.cp.get('logging', 'level').value, 10) |
3883 | + self.assertEqual(self.cp.get('logging', 'level').value, 10) |
3884 | |
3885 | def test_log_level_new_config(self): |
3886 | - """Test log_level upgrade hook with new config""" |
3887 | + """Test log_level upgrade hook with new config.""" |
3888 | conf_file = os.path.join(self.test_root, 'test_new_config.conf') |
3889 | # write some throttling values to the config file |
3890 | with open_file(conf_file, 'w') as fp: |
3891 | @@ -635,7 +630,7 @@ |
3892 | self.assertTrue(path_exists(conf_file)) |
3893 | self.cp.read([conf_file]) |
3894 | self.cp.parse_all() |
3895 | - self.assertEquals(self.cp.get('logging', 'level').value, 10) |
3896 | + self.assertEqual(self.cp.get('logging', 'level').value, 10) |
3897 | |
3898 | def test_log_level_old_and_new_config(self): |
3899 | """Test log_level upgrade hook with a mixed config.""" |
3900 | @@ -650,10 +645,10 @@ |
3901 | self.assertTrue(path_exists(conf_file)) |
3902 | self.cp.read([conf_file]) |
3903 | self.cp.parse_all() |
3904 | - self.assertEquals(self.cp.get('logging', 'level').value, logging.ERROR) |
3905 | + self.assertEqual(self.cp.get('logging', 'level').value, logging.ERROR) |
3906 | |
3907 | def test_old_default_config(self): |
3908 | - """Test log_level upgrade hook with an old default config""" |
3909 | + """Test log_level upgrade hook with an old default config.""" |
3910 | self.cp.read(config.get_config_files()[0]) |
3911 | # fake an old config |
3912 | value = self.cp.get('logging', 'level.default') |
3913 | @@ -668,10 +663,10 @@ |
3914 | # parse it |
3915 | self.cp.parse_all() |
3916 | new_value = self.cp.get('logging', 'level') |
3917 | - self.assertEquals(new_value.value, new_value.parser(value)) |
3918 | + self.assertEqual(new_value.value, new_value.parser(value)) |
3919 | |
3920 | def test_add_upgrade_hook(self): |
3921 | - """Test add_upgrade_hook method""" |
3922 | + """Test add_upgrade_hook method.""" |
3923 | self.cp.add_upgrade_hook('foo', 'bar', lambda x: None) |
3924 | self.assertIn(('foo', 'bar'), self.cp.upgrade_hooks) |
3925 | # try to add the same upgrade_hook |
3926 | @@ -687,8 +682,7 @@ |
3927 | self.assertTrue(path_exists(conf_file)) |
3928 | self.cp.read([conf_file]) |
3929 | self.cp.parse_all() |
3930 | - self.assertEquals(self.cp.get('__main__', 'ignore').value, |
3931 | - [r'.*\.pyc']) |
3932 | + self.assertEqual(self.cp.get('__main__', 'ignore').value, [r'.*\.pyc']) |
3933 | |
3934 | def test_ignore_two(self): |
3935 | """Test ignore files config, two regexes.""" |
3936 | @@ -700,8 +694,8 @@ |
3937 | self.assertTrue(path_exists(conf_file)) |
3938 | self.cp.read([conf_file]) |
3939 | self.cp.parse_all() |
3940 | - self.assertEquals(self.cp.get('__main__', 'ignore').value, |
3941 | - ['.*\\.pyc', '.*\\.sw[opnx]']) |
3942 | + self.assertEqual(self.cp.get('__main__', 'ignore').value, |
3943 | + ['.*\\.pyc', '.*\\.sw[opnx]']) |
3944 | |
3945 | def test_fs_monitor_not_default(self): |
3946 | """Test get monitor.""" |
3947 | @@ -713,8 +707,8 @@ |
3948 | self.assertTrue(path_exists(conf_file)) |
3949 | self.cp.read([conf_file]) |
3950 | self.cp.parse_all() |
3951 | - self.assertEquals(self.cp.get('__main__', 'fs_monitor').value, |
3952 | - monitor_id) |
3953 | + self.assertEqual( |
3954 | + self.cp.get('__main__', 'fs_monitor').value, monitor_id) |
3955 | |
3956 | def test_use_trash_default(self): |
3957 | """Test default configuration for use_trash.""" |
3958 | |
3959 | === modified file 'ubuntuone/syncdaemon/tests/test_eq_inotify.py' |
3960 | --- ubuntuone/syncdaemon/tests/test_eq_inotify.py 2016-05-29 19:15:01 +0000 |
3961 | +++ ubuntuone/syncdaemon/tests/test_eq_inotify.py 2016-07-30 22:04:10 +0000 |
3962 | @@ -312,9 +312,8 @@ |
3963 | testdir = os.path.join(self.root_dir, "foo") |
3964 | make_dir(testdir) |
3965 | |
3966 | - # helper class, pylint: disable-msg=C0111 |
3967 | class HitMe(object): |
3968 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
3969 | + |
3970 | def handle_FS_DIR_DELETE(innerself, path): |
3971 | if path != "foobar": |
3972 | self.finished_error("received a wrong path") |
3973 | @@ -353,6 +352,7 @@ |
3974 | def freeze_commit(): |
3975 | """Release and check result.""" |
3976 | d = self.eq.freeze_commit([("FS_DIR_DELETE", "foobar")]) |
3977 | + |
3978 | def check(dirty): |
3979 | """check dirty""" |
3980 | if not dirty: |
3981 | @@ -379,22 +379,20 @@ |
3982 | testfile = os.path.join(testdir, "bar") |
3983 | make_dir(testdir) |
3984 | |
3985 | - # helper class, pylint: disable-msg=C0111 |
3986 | class HitMe(object): |
3987 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
3988 | + |
3989 | def handle_FS_DIR_DELETE(innerself, path): |
3990 | if path != "foobar": |
3991 | self.finished_error("received a wrong path") |
3992 | else: |
3993 | self.finished_ok() |
3994 | |
3995 | - |
3996 | def freeze_rollback(): |
3997 | """release with handcrafted event and check result.""" |
3998 | self.eq.freeze_rollback() |
3999 | self.eq.freeze_begin(testdir) |
4000 | - reactor.callLater(.1, |
4001 | - self.eq.freeze_commit, [("FS_DIR_DELETE", "foobar")]) |
4002 | + reactor.callLater( |
4003 | + .1, self.eq.freeze_commit, [("FS_DIR_DELETE", "foobar")]) |
4004 | |
4005 | # set up everything and freeze |
4006 | yield self.eq.add_watch(testdir) |
4007 | @@ -417,9 +415,8 @@ |
4008 | make_dir(testdir) |
4009 | testfile = os.path.join(self.root_dir, "bar") |
4010 | |
4011 | - # helper class, pylint: disable-msg=C0111 |
4012 | class HitMe(object): |
4013 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4014 | + |
4015 | def __init__(innerself): |
4016 | innerself.hist = [] |
4017 | |
4018 | @@ -833,7 +830,7 @@ |
4019 | |
4020 | def test_move_udf_ancestor(self): |
4021 | """UDF is unsubscribed on ancestor move.""" |
4022 | - path = self.udf.ancestors[-2] # an ancestor common to both UDFs |
4023 | + path = self.udf.ancestors[-2] # an ancestor common to both UDFs |
4024 | # generate IN_MOVED_FROM and IN_MOVED_TO |
4025 | newpath = path + '.old' # no unicode, paths are always a byte sequence |
4026 | |
4027 | @@ -845,6 +842,7 @@ |
4028 | assert path_exists(newpath) |
4029 | |
4030 | unsubscribed = [] |
4031 | + |
4032 | def check(): |
4033 | """Check.""" |
4034 | self.assertEqual(len(unsubscribed), 2) |
4035 | @@ -861,9 +859,11 @@ |
4036 | self._deferred.callback(True) |
4037 | |
4038 | original = self.eq.fs.vm.unsubscribe_udf |
4039 | + |
4040 | def unsubsc(uid): |
4041 | original(uid) |
4042 | unsubscribed.append(uid) |
4043 | + |
4044 | self.patch(self.eq.fs.vm, 'unsubscribe_udf', unsubsc) |
4045 | |
4046 | reactor.callLater(.1, check) |
4047 | @@ -878,18 +878,21 @@ |
4048 | assert path_exists(newpath) |
4049 | |
4050 | unsubscribed = [] |
4051 | + |
4052 | def check(): |
4053 | """Check.""" |
4054 | self.assertEqual(len(unsubscribed), 1) |
4055 | uid = unsubscribed[0] |
4056 | self.assertEqual(uid, self.udf.id, "wrong UDF removed!") |
4057 | - self.assertNotIn(self.udf.path, self.eq.monitor._ancestors_watchs, |
4058 | - 'watch must be removed') |
4059 | - self.assertEqual(False, |
4060 | - self.eq.fs.vm.udfs[self.udf.id].subscribed) |
4061 | + self.assertNotIn( |
4062 | + self.udf.path, self.eq.monitor._ancestors_watchs, |
4063 | + 'watch must be removed') |
4064 | + self.assertEqual( |
4065 | + False, self.eq.fs.vm.udfs[self.udf.id].subscribed) |
4066 | self._deferred.callback(True) |
4067 | |
4068 | original = self.eq.fs.vm.unsubscribe_udf |
4069 | + |
4070 | def unsubsc(uid): |
4071 | original(uid) |
4072 | unsubscribed.append(uid) |
4073 | @@ -945,6 +948,7 @@ |
4074 | # only the parent, as the other ancestors are shared with other UDFs |
4075 | # and should not be removed |
4076 | expected = os.path.dirname(self.udf.path) |
4077 | + |
4078 | def check(): |
4079 | self.assertEqual([expected], removed_watches, |
4080 | "Removed watches don't match the expected") |
4081 | @@ -958,6 +962,7 @@ |
4082 | """Remove the watches of the ancestors in an unsubscription.""" |
4083 | removed_watches = [] |
4084 | original = self.eq.monitor.rm_watch |
4085 | + |
4086 | def remove_watch(path): |
4087 | """Store the path.""" |
4088 | original(path) |
4089 | @@ -968,6 +973,7 @@ |
4090 | # only the parent, as the other ancestors are shared with other UDFs |
4091 | # and should not be removed, and the path of the udf itself |
4092 | expected = [os.path.dirname(self.udf.path), self.udf.path] |
4093 | + |
4094 | def check(): |
4095 | self.assertEqual(sorted(expected), sorted(removed_watches), |
4096 | "Removed watches don't match the expected") |
4097 | @@ -981,6 +987,7 @@ |
4098 | """Mix of unsubscription and further renaming.""" |
4099 | removed_watches = [] |
4100 | original = self.eq.monitor.rm_watch |
4101 | + |
4102 | def remove_watch(path): |
4103 | """Store the path.""" |
4104 | original(path) |
4105 | @@ -989,14 +996,15 @@ |
4106 | self.patch(self.eq.monitor, 'rm_watch', remove_watch) |
4107 | |
4108 | # all should be removed |
4109 | - expected = list(set(self.udf.ancestors) | set(self.udf2.ancestors)) + [ |
4110 | - self.udf.path, self.udf2.path] |
4111 | + expected = list(set(self.udf.ancestors) | set(self.udf2.ancestors)) |
4112 | + expected += [self.udf.path, self.udf2.path] |
4113 | + |
4114 | def check(): |
4115 | self.assertEqual(sorted(expected), sorted(removed_watches), |
4116 | "Removed watches don't match the expected") |
4117 | self._deferred.callback(True) |
4118 | |
4119 | - path = self.udf.ancestors[-2] # an ancestor common to both UDFs |
4120 | + path = self.udf.ancestors[-2] # an ancestor common to both UDFs |
4121 | |
4122 | rename(self.udf.path, self.udf.path + ".old") |
4123 | rename(path, path + ".old") |
4124 | @@ -1024,10 +1032,10 @@ |
4125 | |
4126 | yield self.eq.add_watch(self.root_dir) |
4127 | should_events = [ |
4128 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4129 | - filename=self.invalid_name)), # open |
4130 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4131 | - filename=self.invalid_name)), # close no w |
4132 | + ('FS_INVALID_NAME', |
4133 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # open |
4134 | + ('FS_INVALID_NAME', |
4135 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # close |
4136 | ] |
4137 | listener = DynamicHitMe(should_events, self) |
4138 | self.eq.subscribe(listener) |
4139 | @@ -1046,8 +1054,8 @@ |
4140 | |
4141 | yield self.eq.add_watch(self.root_dir) |
4142 | should_events = [ |
4143 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4144 | - filename=self.invalid_name)), # close no w |
4145 | + ('FS_INVALID_NAME', |
4146 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # close |
4147 | ] |
4148 | listener = DynamicHitMe(should_events, self) |
4149 | self.eq.subscribe(listener) |
4150 | @@ -1062,12 +1070,12 @@ |
4151 | """Test invalid_filename after a create, open and close write.""" |
4152 | yield self.eq.add_watch(self.root_dir) |
4153 | should_events = [ |
4154 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4155 | - filename=self.invalid_name)), # create |
4156 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4157 | - filename=self.invalid_name)), # open |
4158 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4159 | - filename=self.invalid_name)), # close w |
4160 | + ('FS_INVALID_NAME', |
4161 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # new |
4162 | + ('FS_INVALID_NAME', |
4163 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # open |
4164 | + ('FS_INVALID_NAME', |
4165 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # close |
4166 | ] |
4167 | listener = DynamicHitMe(should_events, self) |
4168 | self.eq.subscribe(listener) |
4169 | @@ -1083,8 +1091,8 @@ |
4170 | """Test invalid_filename after a dir create.""" |
4171 | yield self.eq.add_watch(self.root_dir) |
4172 | should_events = [ |
4173 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4174 | - filename=self.invalid_name)), # create |
4175 | + ('FS_INVALID_NAME', |
4176 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # new |
4177 | ] |
4178 | listener = DynamicHitMe(should_events, self) |
4179 | self.eq.subscribe(listener) |
4180 | @@ -1102,8 +1110,8 @@ |
4181 | |
4182 | yield self.eq.add_watch(self.root_dir) |
4183 | should_events = [ |
4184 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4185 | - filename=self.invalid_name)), # delete |
4186 | + ('FS_INVALID_NAME', |
4187 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # del |
4188 | ] |
4189 | listener = DynamicHitMe(should_events, self) |
4190 | self.eq.subscribe(listener) |
4191 | @@ -1120,8 +1128,8 @@ |
4192 | |
4193 | yield self.eq.add_watch(self.root_dir) |
4194 | should_events = [ |
4195 | - ('FS_INVALID_NAME', dict(dirname=self.root_dir, |
4196 | - filename=self.invalid_name)), # delete |
4197 | + ('FS_INVALID_NAME', |
4198 | + dict(dirname=self.root_dir, filename=self.invalid_name)), # del |
4199 | ] |
4200 | listener = DynamicHitMe(should_events, self) |
4201 | self.eq.subscribe(listener) |
4202 | @@ -1141,8 +1149,8 @@ |
4203 | |
4204 | yield self.eq.add_watch(destdir) |
4205 | should_events = [ |
4206 | - ('FS_INVALID_NAME', dict(dirname=destdir, |
4207 | - filename=self.invalid_name)), # move to |
4208 | + ('FS_INVALID_NAME', |
4209 | + dict(dirname=destdir, filename=self.invalid_name)), # move to |
4210 | ] |
4211 | listener = DynamicHitMe(should_events, self) |
4212 | self.eq.subscribe(listener) |
4213 | @@ -1164,8 +1172,8 @@ |
4214 | |
4215 | yield self.eq.add_watch(fromdir) |
4216 | should_events = [ |
4217 | - ('FS_INVALID_NAME', dict(dirname=fromdir, |
4218 | - filename=self.invalid_name)), # move from |
4219 | + ('FS_INVALID_NAME', |
4220 | + dict(dirname=fromdir, filename=self.invalid_name)), # move from |
4221 | ] |
4222 | listener = DynamicHitMe(should_events, self) |
4223 | self.eq.subscribe(listener) |
4224 | @@ -1187,9 +1195,8 @@ |
4225 | """Test receiving the open signal on files.""" |
4226 | testfile = os.path.join(self.root_dir, "foo") |
4227 | |
4228 | - # helper class, pylint: disable-msg=C0111 |
4229 | class HitMe(object): |
4230 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4231 | + |
4232 | def handle_FS_FILE_OPEN(innerself, path): |
4233 | if path != testfile: |
4234 | self.finished_error("received a wrong path") |
4235 | @@ -1211,9 +1218,8 @@ |
4236 | open_file(testfile, "w").close() |
4237 | fh = open_file(testfile) |
4238 | |
4239 | - # helper class, pylint: disable-msg=C0111 |
4240 | class HitMe(object): |
4241 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4242 | + |
4243 | def handle_FS_FILE_CLOSE_NOWRITE(innerself, path): |
4244 | if path != testfile: |
4245 | self.finished_error("received a wrong path") |
4246 | @@ -1233,9 +1239,8 @@ |
4247 | """Test receiving the create and close_write signals on files.""" |
4248 | testfile = os.path.join(self.root_dir, "foo") |
4249 | |
4250 | - # helper class, pylint: disable-msg=C0111 |
4251 | class HitMe(object): |
4252 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4253 | + |
4254 | def __init__(innerself): |
4255 | innerself.hist = [] |
4256 | |
4257 | @@ -1268,9 +1273,8 @@ |
4258 | """Test receiving the create signal on dirs.""" |
4259 | testdir = os.path.join(self.root_dir, "foo") |
4260 | |
4261 | - # helper class, pylint: disable-msg=C0111 |
4262 | class HitMe(object): |
4263 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4264 | + |
4265 | def handle_FS_DIR_CREATE(innerself, path): |
4266 | if path != testdir: |
4267 | self.finished_error("received a wrong path") |
4268 | @@ -1291,9 +1295,8 @@ |
4269 | testfile = os.path.join(self.root_dir, "foo") |
4270 | open_file(testfile, "w").close() |
4271 | |
4272 | - # helper class, pylint: disable-msg=C0111 |
4273 | class HitMe(object): |
4274 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4275 | + |
4276 | def handle_FS_FILE_DELETE(innerself, path): |
4277 | if path != testfile: |
4278 | self.finished_error("received a wrong path") |
4279 | @@ -1318,9 +1321,8 @@ |
4280 | testdir = os.path.join(self.root_dir, "foo") |
4281 | make_dir(testdir) |
4282 | |
4283 | - # helper class, pylint: disable-msg=C0111 |
4284 | class HitMe(object): |
4285 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4286 | + |
4287 | def handle_FS_DIR_DELETE(innerself, path): |
4288 | if path != testdir: |
4289 | self.finished_error("received a wrong path") |
4290 | @@ -1372,9 +1374,8 @@ |
4291 | open_file(fromfile, "w").close() |
4292 | make_dir(helpdir) |
4293 | |
4294 | - # helper class, pylint: disable-msg=C0111 |
4295 | class HitMe(object): |
4296 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4297 | + |
4298 | def handle_FS_FILE_DELETE(innerself, path): |
4299 | if path != fromfile: |
4300 | self.finished_error("received a wrong path") |
4301 | @@ -1399,9 +1400,8 @@ |
4302 | make_dir(fromdir) |
4303 | make_dir(helpdir) |
4304 | |
4305 | - # helper class, pylint: disable-msg=C0111 |
4306 | class HitMe(object): |
4307 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4308 | + |
4309 | def handle_FS_DIR_DELETE(innerself, path): |
4310 | if path != fromdir: |
4311 | self.finished_error("received a wrong path") |
4312 | @@ -1429,9 +1429,8 @@ |
4313 | make_dir(helpdir) |
4314 | open_file(fromfile, "w").close() |
4315 | |
4316 | - # helper class, pylint: disable-msg=C0111 |
4317 | class HitMe(object): |
4318 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4319 | + |
4320 | def handle_FS_FILE_CREATE(innerself, path): |
4321 | if path != tofile: |
4322 | self.finished_error("received a wrong path") |
4323 | @@ -1456,9 +1455,8 @@ |
4324 | make_dir(helpdir) |
4325 | make_dir(fromdir) |
4326 | |
4327 | - # helper class, pylint: disable-msg=C0111 |
4328 | class HitMe(object): |
4329 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4330 | + |
4331 | def handle_FS_DIR_CREATE(innerself, path): |
4332 | if path != todir: |
4333 | self.finished_error("received a wrong path") |
4334 | @@ -1611,9 +1609,8 @@ |
4335 | self.fs.set_node_id(tofile, "to_node_id") |
4336 | open_file(fromfile, "w").close() |
4337 | |
4338 | - # helper class, pylint: disable-msg=C0111 |
4339 | class HitMe(object): |
4340 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4341 | + |
4342 | def handle_FS_FILE_MOVE(innerself, path_from, path_to): |
4343 | if path_from != fromfile: |
4344 | self.finished_error("received a wrong path in from") |
4345 | @@ -1641,9 +1638,8 @@ |
4346 | self.fs.set_node_id(todir, "to_node_id") |
4347 | make_dir(fromdir) |
4348 | |
4349 | - # helper class, pylint: disable-msg=C0111 |
4350 | class HitMe(object): |
4351 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4352 | + |
4353 | def handle_FS_DIR_MOVE(innerself, path_from, path_to): |
4354 | if path_from != fromdir: |
4355 | self.finished_error("received a wrong path in from") |
4356 | @@ -1671,7 +1667,6 @@ |
4357 | self.fs.create(mypath('bar'), "") |
4358 | self.fs.set_node_id(mypath('bar'), "bar_node_id") |
4359 | |
4360 | - |
4361 | yield self.eq.add_watch(self.root_dir) |
4362 | |
4363 | should_events = [ |
4364 | @@ -1718,9 +1713,9 @@ |
4365 | open_file(testfile, 'w').close() |
4366 | |
4367 | paths = [testdir, testfile] |
4368 | - # helper class, pylint: disable-msg=C0111 |
4369 | + |
4370 | class HitMe(object): |
4371 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4372 | + |
4373 | def handle_FS_DIR_DELETE(innerself, path): |
4374 | expected = paths.pop() |
4375 | if path != expected: |
4376 | @@ -1748,9 +1743,8 @@ |
4377 | make_dir(testdir) |
4378 | newdirname = os.path.join(self.root_dir, "newdir") |
4379 | |
4380 | - # helper class, pylint: disable-msg=C0111 |
4381 | class HitMe(object): |
4382 | - # class-closure, cannot use self, pylint: disable-msg=E0213 |
4383 | + |
4384 | def handle_FS_FILE_CREATE(innerself, path): |
4385 | if path != newfilepath: |
4386 | self.finished_error("received a wrong path") |
4387 | @@ -1932,13 +1926,16 @@ |
4388 | |
4389 | d = self._deferred |
4390 | log = self.eq.monitor._processor.log |
4391 | + |
4392 | class Handler(logging.Handler): |
4393 | """Handler that trigger the deferred callback.""" |
4394 | + |
4395 | def emit(self, record): |
4396 | """Dummy emit.""" |
4397 | # cleanup, remove the handler |
4398 | log.removeHandler(self) |
4399 | d.callback(record) |
4400 | + |
4401 | hdlr = Handler() |
4402 | hdlr.setLevel(logging.WARNING) |
4403 | log.addHandler(hdlr) |
4404 | @@ -1967,13 +1964,16 @@ |
4405 | |
4406 | d = self._deferred |
4407 | log = self.eq.monitor._processor.log |
4408 | + |
4409 | class Handler(logging.Handler): |
4410 | """Handler that trigger the deferred callback.""" |
4411 | + |
4412 | def emit(self, record): |
4413 | """Dummy emit.""" |
4414 | # cleanup, remove the handler |
4415 | log.removeHandler(self) |
4416 | d.callback(record) |
4417 | + |
4418 | hdlr = Handler() |
4419 | hdlr.setLevel(logging.WARNING) |
4420 | log.addHandler(hdlr) |
4421 | |
4422 | === modified file 'ubuntuone/syncdaemon/tests/test_eventqueue.py' |
4423 | --- ubuntuone/syncdaemon/tests/test_eventqueue.py 2016-05-27 23:49:19 +0000 |
4424 | +++ ubuntuone/syncdaemon/tests/test_eventqueue.py 2016-07-30 22:04:10 +0000 |
4425 | @@ -36,10 +36,14 @@ |
4426 | from twisted.internet import defer |
4427 | from twisted.trial.unittest import TestCase |
4428 | |
4429 | -from contrib.testing.testcase import (BaseTwistedTestCase, |
4430 | - FakeMonitor, |
4431 | - FakeVolumeManager) |
4432 | -from ubuntuone.platform.filesystem_notifications.monitor import FilesystemMonitor |
4433 | +from contrib.testing.testcase import ( |
4434 | + BaseTwistedTestCase, |
4435 | + FakeMonitor, |
4436 | + FakeVolumeManager, |
4437 | +) |
4438 | +from ubuntuone.platform.filesystem_notifications.monitor import ( |
4439 | + FilesystemMonitor, |
4440 | +) |
4441 | from ubuntuone.syncdaemon import ( |
4442 | event_queue, |
4443 | filesystem_manager, |
4444 | @@ -49,7 +53,7 @@ |
4445 | |
4446 | |
4447 | class BaseEQTestCase(BaseTwistedTestCase): |
4448 | - """ Setup an EQ for test. """ |
4449 | + """Setup an EQ for test.""" |
4450 | |
4451 | _monitor_class = FakeMonitor |
4452 | |
4453 | @@ -63,15 +67,14 @@ |
4454 | self.vm = FakeVolumeManager(self.root_dir) |
4455 | self.db = tritcask.Tritcask(self.mktemp('tritcask')) |
4456 | self.addCleanup(self.db.shutdown) |
4457 | - self.fs = filesystem_manager.FileSystemManager(self.fsmdir, |
4458 | - self.partials_dir, |
4459 | - self.vm, self.db) |
4460 | - self.fs.create(path=self.root_dir, |
4461 | - share_id='', is_dir=True) |
4462 | - self.fs.set_by_path(path=self.root_dir, |
4463 | - local_hash=None, server_hash=None) |
4464 | - self.eq = event_queue.EventQueue(self.fs, |
4465 | - monitor_class=self._monitor_class) |
4466 | + self.fs = filesystem_manager.FileSystemManager( |
4467 | + self.fsmdir, self.partials_dir, self.vm, self.db) |
4468 | + self.fs.create( |
4469 | + path=self.root_dir, share_id='', is_dir=True) |
4470 | + self.fs.set_by_path( |
4471 | + path=self.root_dir, local_hash=None, server_hash=None) |
4472 | + self.eq = event_queue.EventQueue( |
4473 | + self.fs, monitor_class=self._monitor_class) |
4474 | self.eq.listener_map = {} |
4475 | self.addCleanup(self.eq.shutdown) |
4476 | self.fs.register_eq(self.eq) |
4477 | @@ -110,6 +113,7 @@ |
4478 | |
4479 | def test_subscription_nodefault(self): |
4480 | """Don't subscribe if there's no default.""" |
4481 | + |
4482 | class Listener(object): |
4483 | """Listener.""" |
4484 | |
4485 | @@ -129,6 +133,7 @@ |
4486 | |
4487 | def test_subscription_two_listeners(self): |
4488 | """Subscribe several listeners.""" |
4489 | + |
4490 | class Listener1(object): |
4491 | """Listener 1.""" |
4492 | |
4493 | @@ -212,8 +217,8 @@ |
4494 | |
4495 | # incorrect args, only kwargs supported |
4496 | self.assertRaises(TypeError, self.eq.push, "FS_FILE_MOVE", 1) |
4497 | - self.assertRaises(TypeError, |
4498 | - self.eq.push, "FS_FILE_MOVE", 1, path_to=2) |
4499 | + self.assertRaises( |
4500 | + TypeError, self.eq.push, "FS_FILE_MOVE", 1, path_to=2) |
4501 | |
4502 | # ok: just kwargs |
4503 | self.eq.push("FS_FILE_MOVE", path_from=1, path_to=2) |
4504 | @@ -231,10 +236,12 @@ |
4505 | def test_listened_pushs(self): |
4506 | """Push events and listem them.""" |
4507 | |
4508 | - # helper class, pylint: disable-msg=C0111 |
4509 | + # helper class |
4510 | class Create(object): |
4511 | + |
4512 | def __init__(self): |
4513 | self.a = None |
4514 | + |
4515 | def handle_FS_FILE_CREATE(self, path): |
4516 | self.a = path |
4517 | |
4518 | @@ -255,9 +262,10 @@ |
4519 | def test_signatures(self): |
4520 | """Check that the handle signatures are forced when passing.""" |
4521 | |
4522 | - # helper class, pylint: disable-msg=C0111 |
4523 | + # helper class |
4524 | class Create(object): |
4525 | - def handle_FS_FILE_CREATE(self, notpath): # it should be path here |
4526 | + |
4527 | + def handle_FS_FILE_CREATE(self, notpath): # it should be path here |
4528 | pass |
4529 | |
4530 | # it get passed! |
4531 | @@ -296,16 +304,22 @@ |
4532 | """Test the error handling in the event distribution machinery.""" |
4533 | |
4534 | def test_keep_going(self): |
4535 | - """ Check that if a listener raises an Exception or have a |
4536 | - wrong signature, the next listeners are called. |
4537 | + """Checks. |
4538 | + |
4539 | + If a listener raises an Exception or have a wrong signature, the next |
4540 | + listeners are called. |
4541 | + |
4542 | """ |
4543 | d = defer.Deferred() |
4544 | - # helper class, pylint: disable-msg=C0111 |
4545 | + |
4546 | + # helper class |
4547 | class BadListener(object): |
4548 | - def handle_FS_FILE_CREATE(self, notpath): # it should be path here |
4549 | + |
4550 | + def handle_FS_FILE_CREATE(self, notpath): # it should be path here |
4551 | d.callback(False) |
4552 | |
4553 | class GoodListener(object): |
4554 | + |
4555 | def handle_FS_FILE_CREATE(self, path): |
4556 | d.callback(path) |
4557 | |
4558 | @@ -315,26 +329,30 @@ |
4559 | self.eq.subscribe(gl) |
4560 | |
4561 | def cleanup(): |
4562 | - """ unsubscribe the listeners """ |
4563 | + """unsubscribe the listeners """ |
4564 | self.eq.unsubscribe(bl) |
4565 | self.eq.unsubscribe(gl) |
4566 | + |
4567 | self.addCleanup(cleanup) |
4568 | |
4569 | # one listener has a wrong signature |
4570 | self.eq.push("FS_FILE_CREATE", path=1) |
4571 | + |
4572 | def callback(result): |
4573 | - """ asserts that GoodListener was called. """ |
4574 | + """Assert that GoodListener was called.""" |
4575 | self.assertTrue(result) |
4576 | - self.assertEquals(1, result) |
4577 | + self.assertEqual(1, result) |
4578 | |
4579 | d.addCallback(callback) |
4580 | return d |
4581 | |
4582 | def test_default_handler(self): |
4583 | - """ Check that handler_default is called. """ |
4584 | + """Check that handler_default is called.""" |
4585 | d = defer.Deferred() |
4586 | - # helper class, pylint: disable-msg=C0111 |
4587 | + |
4588 | + # helper class |
4589 | class Listener(object): |
4590 | + |
4591 | def handle_default(self, event, **kwargs): |
4592 | d.callback((event, kwargs)) |
4593 | |
4594 | @@ -342,17 +360,18 @@ |
4595 | self.eq.subscribe(l) |
4596 | |
4597 | def cleanup(): |
4598 | - """ unsubscribe the listeners """ |
4599 | + """Unsubscribe the listeners.""" |
4600 | self.eq.unsubscribe(l) |
4601 | self.addCleanup(cleanup) |
4602 | |
4603 | # push some event and expect it'll be handled by handle_default |
4604 | self.eq.push("FS_FILE_CREATE", path=1) |
4605 | + |
4606 | def callback(result): |
4607 | - """ asserts that GoodListener was called. """ |
4608 | - self.assertEquals(2, len(result)) |
4609 | - self.assertEquals('FS_FILE_CREATE', result[0]) |
4610 | - self.assertEquals({'path': 1}, result[1]) |
4611 | + """Assert that GoodListener was called.""" |
4612 | + self.assertEqual(2, len(result)) |
4613 | + self.assertEqual('FS_FILE_CREATE', result[0]) |
4614 | + self.assertEqual({'path': 1}, result[1]) |
4615 | |
4616 | d.addCallback(callback) |
4617 | return d |
4618 | @@ -360,8 +379,10 @@ |
4619 | def test_ordered_dispatch(self): |
4620 | """Check that the events are pushed to all listeners in order.""" |
4621 | d = defer.Deferred() |
4622 | - # helper class, pylint: disable-msg=C0111 |
4623 | + |
4624 | + # helper class |
4625 | class Listener(object): |
4626 | + |
4627 | def __init__(self, eq): |
4628 | self.eq = eq |
4629 | self.events = [] |
4630 | @@ -426,7 +447,6 @@ |
4631 | self.assertIsInstance(eq.monitor, FakeMonitor) |
4632 | |
4633 | |
4634 | - |
4635 | class EventQueueShutdownTestCase(TestCase): |
4636 | """Test the shutdown method in EQ.""" |
4637 | |
4638 | |
4639 | === modified file 'ubuntuone/syncdaemon/tests/test_eventsnanny.py' |
4640 | --- ubuntuone/syncdaemon/tests/test_eventsnanny.py 2012-04-09 20:07:05 +0000 |
4641 | +++ ubuntuone/syncdaemon/tests/test_eventsnanny.py 2016-07-30 22:04:10 +0000 |
4642 | @@ -117,6 +117,7 @@ |
4643 | """Inserts something in HQ and waits that thread.""" |
4644 | d = defer.Deferred() |
4645 | self.hq.insert(path, node_id) |
4646 | + |
4647 | def wait(): |
4648 | """waits for the var to get set""" |
4649 | if self.hq.hasher.hashing is None: |
4650 | @@ -130,6 +131,7 @@ |
4651 | """Releases HQ as it finished.""" |
4652 | d = defer.Deferred() |
4653 | self.hq.insert(None, None) |
4654 | + |
4655 | def wait(): |
4656 | """waits for the var to get set""" |
4657 | if self.hq.hasher.hashing is not None: |
4658 | @@ -139,7 +141,6 @@ |
4659 | reactor.callLater(.1, wait) |
4660 | return d |
4661 | |
4662 | - |
4663 | def test_forward(self): |
4664 | """Forwards the event when file is not blocked.""" |
4665 | self.eq.push("AQ_DOWNLOAD_COMMIT", |
4666 | |
4667 | === modified file 'ubuntuone/syncdaemon/tests/test_fileshelf.py' |
4668 | --- ubuntuone/syncdaemon/tests/test_fileshelf.py 2016-05-27 23:49:19 +0000 |
4669 | +++ ubuntuone/syncdaemon/tests/test_fileshelf.py 2016-07-30 22:04:10 +0000 |
4670 | @@ -56,22 +56,23 @@ |
4671 | |
4672 | |
4673 | class TestFileShelf(BaseTwistedTestCase): |
4674 | - """ Test the FileShelf """ |
4675 | + """Test the FileShelf """ |
4676 | fileshelf_class = FileShelf |
4677 | |
4678 | @defer.inlineCallbacks |
4679 | def setUp(self): |
4680 | - """ Sets up a test. """ |
4681 | + """Set up a test.""" |
4682 | yield super(TestFileShelf, self).setUp() |
4683 | self.path = self.mktemp('shelf') |
4684 | self.shelf = self.fileshelf_class(self.path) |
4685 | |
4686 | def test_bad_depth(self): |
4687 | - """ test that the shelf reject invalid depth at creation time """ |
4688 | - self.assertRaises(ValueError, self.fileshelf_class, self.path, depth=-1) |
4689 | + """Test that the shelf reject invalid depth at creation time """ |
4690 | + self.assertRaises( |
4691 | + ValueError, self.fileshelf_class, self.path, depth=-1) |
4692 | |
4693 | def test_bad_path(self): |
4694 | - """ test that the shelf removes the previous shelve file and create a |
4695 | + """Test that the shelf removes the previous shelve file and create a |
4696 | directory for the new file based shelf at creation time. |
4697 | """ |
4698 | path = os.path.join(self.path, 'shelf_file') |
4699 | @@ -80,7 +81,7 @@ |
4700 | self.assertTrue(os.path.isdir(path)) |
4701 | |
4702 | def test_different_depth_sizes(self): |
4703 | - """ test the basic operations (delitem, getitem, setitem) with |
4704 | + """Test the basic operations (delitem, getitem, setitem) with |
4705 | depths between 0 and len(hashlib.sha1().hexdigest()) |
4706 | """ |
4707 | base_path = os.path.join(self.path, 'shelf_depth-') |
4708 | @@ -94,29 +95,29 @@ |
4709 | key_path = os.path.join(path, *[key[i] for i in xrange(0, idx)]) |
4710 | self.assertTrue(path_exists(os.path.join(key_path, key))) |
4711 | # test __getitem__ |
4712 | - self.assertEquals('foo', shelf[key]) |
4713 | + self.assertEqual('foo', shelf[key]) |
4714 | # test __delitem__ |
4715 | del shelf[key] |
4716 | self.assertRaises(KeyError, shelf.__getitem__, key) |
4717 | self.assertFalse(path_exists(os.path.join(key_path, key))) |
4718 | |
4719 | def test_invalid_keys(self): |
4720 | - """ test the exception raised when invalid keys are eused ('', None)""" |
4721 | + """Test the exception raised when invalid keys are eused ('', None)""" |
4722 | self.assertRaises(ValueError, self.shelf.__setitem__, None, 'foo') |
4723 | self.assertRaises(ValueError, self.shelf.__setitem__, '', 'foo') |
4724 | |
4725 | def test_contains(self): |
4726 | - """ test that it behaves with the 'in' """ |
4727 | + """Test that it behaves with the 'in' """ |
4728 | path = os.path.join(self.path, 'shelf_depth') |
4729 | shelf = self.fileshelf_class(path) |
4730 | shelf["foo"] = "bar" |
4731 | self.assertTrue("foo" in shelf) |
4732 | self.assertFalse("baz" in shelf) |
4733 | - self.assertEquals('bar', shelf.get('foo')) |
4734 | - self.assertEquals(None, shelf.get('baz', None)) |
4735 | + self.assertEqual('bar', shelf.get('foo')) |
4736 | + self.assertEqual(None, shelf.get('baz', None)) |
4737 | |
4738 | def test_pop(self): |
4739 | - """ test that it behaves with the .pop() """ |
4740 | + """Test that it behaves with the .pop() """ |
4741 | path = os.path.join(self.path, 'shelf_depth') |
4742 | shelf = self.fileshelf_class(path) |
4743 | shelf["foo"] = "bar" |
4744 | @@ -127,26 +128,26 @@ |
4745 | self.assertRaises(KeyError, shelf.pop, "no-key") |
4746 | |
4747 | def test_get(self): |
4748 | - """ test that it behaves with the .get(key, default) """ |
4749 | + """Test that it behaves with the .get(key, default) """ |
4750 | path = os.path.join(self.path, 'shelf_get') |
4751 | shelf = self.fileshelf_class(path) |
4752 | shelf["foo"] = "bar" |
4753 | - self.assertEquals('bar', shelf.get('foo')) |
4754 | - self.assertEquals('bar', shelf.get('foo', None)) |
4755 | - self.assertEquals(None, shelf.get('baz')) |
4756 | + self.assertEqual('bar', shelf.get('foo')) |
4757 | + self.assertEqual('bar', shelf.get('foo', None)) |
4758 | + self.assertEqual(None, shelf.get('baz')) |
4759 | self.assertFalse(shelf.get('baz', False)) |
4760 | |
4761 | def test_items(self): |
4762 | - """ test that it behaves with the .items() """ |
4763 | + """Test that it behaves with the .items() """ |
4764 | path = os.path.join(self.path, 'shelf_get') |
4765 | shelf = self.fileshelf_class(path) |
4766 | shelf["foo"] = "bar" |
4767 | - # k, v are temp variables, pylint: disable-msg=W0631 |
4768 | - self.assertEquals([('foo', 'bar')], |
4769 | - [(k, v) for k, v in shelf.items()]) |
4770 | + self.assertEqual([('foo', 'bar')], |
4771 | + [(k, v) for k, v in shelf.items()]) |
4772 | shelf["foo1"] = "bar1" |
4773 | - self.assertTrue(('foo', 'bar') and ('foo1', 'bar1') in \ |
4774 | - [(k, v) for k, v in shelf.items()]) |
4775 | + items = [(k, v) for k, v in shelf.items()] |
4776 | + self.assertIn(('foo', 'bar'), items) |
4777 | + self.assertIn(('foo1', 'bar1'), items) |
4778 | |
4779 | def test_broken_metadata_without_backup(self): |
4780 | """test the shelf behavior when it hit a broken metadata file without |
4781 | @@ -165,35 +166,35 @@ |
4782 | |
4783 | def test_broken_metadata_with_backup(self): |
4784 | """test that each time a metadata file is updated a .old is kept""" |
4785 | - self.shelf['bad_file'] = {'value':'old'} |
4786 | + self.shelf['bad_file'] = {'value': 'old'} |
4787 | path = self.shelf.key_file('bad_file') |
4788 | self.assertFalse(path_exists(path+'.old')) |
4789 | - self.assertEquals({'value':'old'}, self.shelf['bad_file']) |
4790 | + self.assertEqual({'value': 'old'}, self.shelf['bad_file']) |
4791 | # force the creation of the .old file |
4792 | - self.shelf['bad_file'] = {'value':'new'} |
4793 | + self.shelf['bad_file'] = {'value': 'new'} |
4794 | self.assertTrue(path_exists(path+'.old')) |
4795 | # check that the new value is there |
4796 | - self.assertEquals({'value':'new'}, self.shelf['bad_file']) |
4797 | + self.assertEqual({'value': 'new'}, self.shelf['bad_file']) |
4798 | # write the current md file fwith 0 bytes |
4799 | open_file(path, 'w').close() |
4800 | # test that the old value is retrieved |
4801 | - self.assertEquals({'value':'old'}, self.shelf['bad_file']) |
4802 | + self.assertEqual({'value': 'old'}, self.shelf['bad_file']) |
4803 | |
4804 | - self.shelf['broken_pickle'] = {'value':'old'} |
4805 | + self.shelf['broken_pickle'] = {'value': 'old'} |
4806 | path = self.shelf.key_file('broken_pickle') |
4807 | # check that .old don't exist |
4808 | self.assertFalse(path_exists(path+'.old')) |
4809 | # force the creation of the .old file |
4810 | - self.shelf['broken_pickle'] = {'value':'new'} |
4811 | + self.shelf['broken_pickle'] = {'value': 'new'} |
4812 | # check that .old exists |
4813 | self.assertTrue(path_exists(path+'.old')) |
4814 | # check that the new value is there |
4815 | - self.assertEquals({'value':'new'}, self.shelf['broken_pickle']) |
4816 | + self.assertEqual({'value': 'new'}, self.shelf['broken_pickle']) |
4817 | # write random bytes to the md file |
4818 | with open_file(path, 'w') as f: |
4819 | f.write(BROKEN_PICKLE) |
4820 | # check that the old value is retrieved |
4821 | - self.assertEquals({'value':'old'}, self.shelf['broken_pickle']) |
4822 | + self.assertEqual({'value': 'old'}, self.shelf['broken_pickle']) |
4823 | |
4824 | def test_keys_with_old_and_new(self): |
4825 | """test keys() with .old and .new files around""" |
4826 | @@ -203,7 +204,7 @@ |
4827 | open_file(self.shelf.key_file('foo1')+'.old', 'w').close() |
4828 | open_file(self.shelf.key_file('foo')+'.new', 'w').close() |
4829 | open_file(self.shelf.key_file('foo1')+'.new', 'w').close() |
4830 | - self.assertEquals(set(['foo', 'foo1']), set(self.shelf.keys())) |
4831 | + self.assertEqual(set(['foo', 'foo1']), set(self.shelf.keys())) |
4832 | |
4833 | def test_corrupted_backup(self): |
4834 | """test getitem if also the .old file is corrupted""" |
4835 | @@ -221,8 +222,8 @@ |
4836 | path = self.shelf.key_file('foo') |
4837 | open_file(self.shelf.key_file('foo'), 'w').close() |
4838 | for _ in xrange(20): |
4839 | - open_file(path+'.old', 'w').close() |
4840 | - path=path+'.old' |
4841 | + open_file(path + '.old', 'w').close() |
4842 | + path += '.old' |
4843 | self.assertRaises(KeyError, self.shelf.__getitem__, 'foo') |
4844 | |
4845 | def test_delete_backups_too(self): |
4846 | @@ -243,8 +244,10 @@ |
4847 | def test_custom_unpickle(self): |
4848 | """Test the _pickle and _unpikle methods.""" |
4849 | self.mktemp('my_shelf') |
4850 | + |
4851 | class InMemoryFileShelf(FileShelf): |
4852 | """A in-memory FileShelf.""" |
4853 | + |
4854 | values = {} |
4855 | |
4856 | def key_file(self, key): |
4857 | @@ -267,17 +270,16 @@ |
4858 | shelf = InMemoryFileShelf(self.path) |
4859 | shelf['foo'] = 'bar' |
4860 | self.assertIn('foo', shelf.values) |
4861 | - self.assertEquals(shelf.values['foo'], |
4862 | - cPickle.dumps('bar', protocol=2)) |
4863 | + self.assertEqual(shelf.values['foo'], cPickle.dumps('bar', protocol=2)) |
4864 | |
4865 | def test_broken_metadata_iteritems(self): |
4866 | """Test that broken metadata is ignored during iteritems.""" |
4867 | - self.shelf['ok_key'] = {'status':'this is valid metadata'} |
4868 | + self.shelf['ok_key'] = {'status': 'this is valid metadata'} |
4869 | self.shelf['bad_file'] = {} |
4870 | path = self.shelf.key_file('bad_file') |
4871 | open_file(path, 'w').close() |
4872 | self.assertRaises(KeyError, self.shelf.__getitem__, 'bad_file') |
4873 | - self.assertEquals(1, len(list(self.shelf.iteritems()))) |
4874 | + self.assertEqual(1, len(list(self.shelf.iteritems()))) |
4875 | self.assertFalse(path_exists(path)) |
4876 | |
4877 | self.shelf['broken_pickle'] = {} |
4878 | @@ -285,17 +287,17 @@ |
4879 | with open_file(path, 'w') as f: |
4880 | f.write(BROKEN_PICKLE) |
4881 | self.assertRaises(KeyError, self.shelf.__getitem__, 'broken_pickle') |
4882 | - self.assertEquals(1, len(list(self.shelf.iteritems()))) |
4883 | + self.assertEqual(1, len(list(self.shelf.iteritems()))) |
4884 | self.assertFalse(path_exists(path)) |
4885 | |
4886 | def test_broken_metadata_items(self): |
4887 | """Test that broken metadata is ignored during iteritems.""" |
4888 | - self.shelf['ok_key'] = {'status':'this is valid metadata'} |
4889 | + self.shelf['ok_key'] = {'status': 'this is valid metadata'} |
4890 | self.shelf['bad_file'] = {} |
4891 | path = self.shelf.key_file('bad_file') |
4892 | open_file(path, 'w').close() |
4893 | self.assertRaises(KeyError, self.shelf.__getitem__, 'bad_file') |
4894 | - self.assertEquals(1, len(list(self.shelf.items()))) |
4895 | + self.assertEqual(1, len(list(self.shelf.items()))) |
4896 | self.assertFalse(path_exists(path)) |
4897 | |
4898 | self.shelf['broken_pickle'] = {} |
4899 | @@ -303,7 +305,7 @@ |
4900 | with open_file(path, 'w') as f: |
4901 | f.write(BROKEN_PICKLE) |
4902 | self.assertRaises(KeyError, self.shelf.__getitem__, 'broken_pickle') |
4903 | - self.assertEquals(1, len(list(self.shelf.items()))) |
4904 | + self.assertEqual(1, len(list(self.shelf.items()))) |
4905 | self.assertFalse(path_exists(path)) |
4906 | |
4907 | |
4908 | @@ -313,51 +315,50 @@ |
4909 | |
4910 | def test_hit_miss_properties(self): |
4911 | """test the cache hits/misses properties""" |
4912 | - # yes, the statement has some effect, pylint: disable-msg=W0104 |
4913 | try: |
4914 | self.shelf['missingkey'] |
4915 | except KeyError: |
4916 | - self.assertEquals(self.shelf.cache_misses, 1) |
4917 | + self.assertEqual(self.shelf.cache_misses, 1) |
4918 | else: |
4919 | self.fail('We have a key in the shelf, but it should be empty!!') |
4920 | self.shelf['realkey'] = 'realvalue' |
4921 | self.shelf['realkey'] |
4922 | self.shelf['realkey'] |
4923 | - self.assertEquals(self.shelf.cache_hits, 1) |
4924 | + self.assertEqual(self.shelf.cache_hits, 1) |
4925 | |
4926 | def test_broken_metadata_with_backup(self): |
4927 | """overrides parent test as we have the value in the cache.""" |
4928 | - self.shelf['bad_file'] = {'value':'old'} |
4929 | + self.shelf['bad_file'] = {'value': 'old'} |
4930 | path = self.shelf.key_file('bad_file') |
4931 | self.assertFalse(path_exists(path+'.old')) |
4932 | - self.assertEquals({'value':'old'}, self.shelf['bad_file']) |
4933 | + self.assertEqual({'value': 'old'}, self.shelf['bad_file']) |
4934 | # force the creation of the .old file |
4935 | - self.shelf['bad_file'] = {'value':'new'} |
4936 | + self.shelf['bad_file'] = {'value': 'new'} |
4937 | self.assertTrue(path_exists(path+'.old')) |
4938 | # check that the new value is there |
4939 | - self.assertEquals({'value':'new'}, self.shelf['bad_file']) |
4940 | + self.assertEqual({'value': 'new'}, self.shelf['bad_file']) |
4941 | # write the current md file fwith 0 bytes |
4942 | open_file(path, 'w').close() |
4943 | # HERE IS THE DIFFERENCE with the parent tests |
4944 | # test that the new value is retrieved from the cache! |
4945 | - self.assertEquals({'value':'new'}, self.shelf['bad_file']) |
4946 | + self.assertEqual({'value': 'new'}, self.shelf['bad_file']) |
4947 | |
4948 | - self.shelf['broken_pickle'] = {'value':'old'} |
4949 | + self.shelf['broken_pickle'] = {'value': 'old'} |
4950 | path = self.shelf.key_file('broken_pickle') |
4951 | # check that .old don't exist |
4952 | self.assertFalse(path_exists(path+'.old')) |
4953 | # force the creation of the .old file |
4954 | - self.shelf['broken_pickle'] = {'value':'new'} |
4955 | + self.shelf['broken_pickle'] = {'value': 'new'} |
4956 | # check that .old exists |
4957 | self.assertTrue(path_exists(path+'.old')) |
4958 | # check that the new value is there |
4959 | - self.assertEquals({'value':'new'}, self.shelf['broken_pickle']) |
4960 | + self.assertEqual({'value': 'new'}, self.shelf['broken_pickle']) |
4961 | # write random bytes to the md file |
4962 | with open_file(path, 'w') as f: |
4963 | f.write(BROKEN_PICKLE) |
4964 | # HERE IS THE DIFFERENCE with the parent tests |
4965 | # test that the new value is retrieved from the cache! |
4966 | - self.assertEquals({'value':'new'}, self.shelf['broken_pickle']) |
4967 | + self.assertEqual({'value': 'new'}, self.shelf['broken_pickle']) |
4968 | |
4969 | |
4970 | class LRUCacheTests(unittest.TestCase): |
4971 | @@ -370,8 +371,8 @@ |
4972 | values = [('key'+str(i), i) for i in range(100)] |
4973 | for i, j in values: |
4974 | cache[i] = j |
4975 | - self.assertEquals(len(cache._queue), len(values)) |
4976 | - self.assertEquals(len(cache._cache), len(values)) |
4977 | + self.assertEqual(len(cache._queue), len(values)) |
4978 | + self.assertEqual(len(cache._cache), len(values)) |
4979 | |
4980 | def test_getitem(self): |
4981 | """test __delitem__ method""" |
4982 | @@ -380,11 +381,11 @@ |
4983 | values = [('key'+str(i), i) for i in range(100)] |
4984 | for i, j in values: |
4985 | cache[i] = j |
4986 | - self.assertEquals(len(cache._queue), len(values)) |
4987 | - self.assertEquals(len(cache._cache), len(values)) |
4988 | + self.assertEqual(len(cache._queue), len(values)) |
4989 | + self.assertEqual(len(cache._cache), len(values)) |
4990 | # compare all the items with the values |
4991 | for i, j in values: |
4992 | - self.assertEquals(cache[i], j) |
4993 | + self.assertEqual(cache[i], j) |
4994 | |
4995 | def test_delitem(self): |
4996 | """test __delitem__ method""" |
4997 | @@ -392,24 +393,24 @@ |
4998 | values = [('key'+str(i), i) for i in range(100)] |
4999 | for i, j in values: |
5000 | cache[i] = j |
The diff has been truncated for viewing.
All tests passing.