Merge lp:~dobey/ubuntuone-client/update-4-0 into lp:ubuntuone-client/stable-4-0
- update-4-0
- Merge into stable-4-0
Status: | Merged |
---|---|
Merged at revision: | 1256 |
Proposed branch: | lp:~dobey/ubuntuone-client/update-4-0 |
Merge into: | lp:ubuntuone-client/stable-4-0 |
Diff against target: |
2941 lines (+894/-756) 31 files modified
contrib/testing/testcase.py (+11/-3) run-mac-tests (+1/-1) tests/platform/filesystem_notifications/common.py (+122/-303) tests/platform/filesystem_notifications/test_darwin.py (+84/-388) tests/platform/filesystem_notifications/test_fsevents_daemon.py (+8/-8) tests/platform/filesystem_notifications/test_windows.py (+344/-0) tests/platform/ipc/test_external_interface.py (+14/-0) tests/platform/test_tools.py (+10/-0) tests/status/test_aggregator.py (+82/-1) tests/syncdaemon/test_fsm.py (+27/-0) tests/syncdaemon/test_interaction_interfaces.py (+28/-20) tests/syncdaemon/test_vm.py (+13/-2) ubuntuone/platform/filesystem_notifications/monitor/__init__.py (+3/-0) ubuntuone/platform/filesystem_notifications/monitor/common.py (+1/-0) ubuntuone/platform/filesystem_notifications/monitor/darwin/fsevents_daemon.py (+20/-20) ubuntuone/platform/ipc/ipc_client.py (+17/-1) ubuntuone/platform/ipc/linux.py (+33/-0) ubuntuone/platform/ipc/perspective_broker.py (+7/-2) ubuntuone/platform/os_helper/__init__.py (+1/-0) ubuntuone/platform/os_helper/darwin.py (+5/-2) ubuntuone/platform/os_helper/linux.py (+1/-0) ubuntuone/platform/os_helper/unix.py (+5/-0) ubuntuone/platform/os_helper/windows.py (+2/-0) ubuntuone/platform/tools/__init__.py (+7/-0) ubuntuone/status/aggregator.py (+16/-1) ubuntuone/syncdaemon/__init__.py (+5/-0) ubuntuone/syncdaemon/event_queue.py (+1/-0) ubuntuone/syncdaemon/filesystem_manager.py (+3/-1) ubuntuone/syncdaemon/interaction_interfaces.py (+6/-2) ubuntuone/syncdaemon/status_listener.py (+13/-1) ubuntuone/syncdaemon/volume_manager.py (+4/-0) |
To merge this branch: | bzr merge lp:~dobey/ubuntuone-client/update-4-0 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Roberto Alsina (community) | Approve | ||
Review via email: mp+120847@code.launchpad.net |
Commit message
[Diego Sarmentero]
- Fixing ipc signals in windows and changing deque size to 5.
- Adding ipc support to share the menu data (LP: #1032659).
- Collect and return the data for the menu from aggregator (LP: #1032659).
- Refactoring test for filesystem notifications
- Use the correct name space (LP: 1026209).
[Rodney Dawes]
- Move the patching of user_home to before where it is used elsewhere in setUp.
[Alejandro J. Cura]
- DownloadFinished ipc signal is now thrown after the partial is commited. (LP: #1031197)
[Roberto Alsina]
- Added check for UDF path not being a file (LP:1033582).
Description of the change
Roberto Alsina (ralsina) : | # |
Ubuntu One Auto Pilot (otto-pilot) wrote : | # |
- 1256. By dobey
-
[Diego Sarmentero]
- Fixing ipc signals in windows and changing deque size to 5.
- Adding ipc support to share the menu data (LP: #1032659).
- Collect and return the data for the menu from aggregator (LP: #1032659).
- Refactoring test for filesystem notifications
- Use the correct name space (LP: 1026209).[Rodney Dawes]
- Move the patching of user_home to before where it is used elsewhere in setUp.
[Alejandro J. Cura]
- DownloadFinished ipc signal is now thrown after the partial is commited. (LP: #1031197)
[Roberto Alsina]
- Added check for UDF path not being a file (LP:1033582).
Preview Diff
1 | === modified file 'contrib/testing/testcase.py' | |||
2 | --- contrib/testing/testcase.py 2012-06-22 09:59:14 +0000 | |||
3 | +++ contrib/testing/testcase.py 2012-08-22 18:22:29 +0000 | |||
4 | @@ -57,6 +57,8 @@ | |||
5 | 57 | main, | 57 | main, |
6 | 58 | local_rescan, | 58 | local_rescan, |
7 | 59 | tritcask, | 59 | tritcask, |
8 | 60 | RECENT_TRANSFERS, | ||
9 | 61 | UPLOADING, | ||
10 | 60 | ) | 62 | ) |
11 | 61 | from ubuntuone.syncdaemon import logger | 63 | from ubuntuone.syncdaemon import logger |
12 | 62 | from ubuntuone import platform | 64 | from ubuntuone import platform |
13 | @@ -208,6 +210,10 @@ | |||
14 | 208 | 210 | ||
15 | 209 | show_all_notifications = True | 211 | show_all_notifications = True |
16 | 210 | 212 | ||
17 | 213 | def menu_data(self): | ||
18 | 214 | """Fake menu_data.""" | ||
19 | 215 | return {RECENT_TRANSFERS: [], UPLOADING: []} | ||
20 | 216 | |||
21 | 211 | 217 | ||
22 | 212 | class FakeMain(main.Main): | 218 | class FakeMain(main.Main): |
23 | 213 | """ A fake Main class to setup the tests """ | 219 | """ A fake Main class to setup the tests """ |
24 | @@ -385,6 +391,11 @@ | |||
25 | 385 | def setUp(self): | 391 | def setUp(self): |
26 | 386 | yield super(BaseTwistedTestCase, self).setUp() | 392 | yield super(BaseTwistedTestCase, self).setUp() |
27 | 387 | self.__root = None | 393 | self.__root = None |
28 | 394 | |||
29 | 395 | # Patch the user home | ||
30 | 396 | self.home_dir = self.mktemp('ubuntuonehacker') | ||
31 | 397 | self.patch(platform, "user_home", self.home_dir) | ||
32 | 398 | |||
33 | 388 | # use the config from the branch | 399 | # use the config from the branch |
34 | 389 | new_get_config_files = lambda: [os.path.join(os.environ['ROOTDIR'], | 400 | new_get_config_files = lambda: [os.path.join(os.environ['ROOTDIR'], |
35 | 390 | 'data', 'syncdaemon.conf')] | 401 | 'data', 'syncdaemon.conf')] |
36 | @@ -405,9 +416,6 @@ | |||
37 | 405 | self.log = logging.getLogger("ubuntuone.SyncDaemon.TEST") | 416 | self.log = logging.getLogger("ubuntuone.SyncDaemon.TEST") |
38 | 406 | self.log.info("starting test %s.%s", self.__class__.__name__, | 417 | self.log.info("starting test %s.%s", self.__class__.__name__, |
39 | 407 | self._testMethodName) | 418 | self._testMethodName) |
40 | 408 | # Patch the user home | ||
41 | 409 | self.home_dir = self.mktemp('ubuntuonehacker') | ||
42 | 410 | self.patch(platform, "user_home", self.home_dir) | ||
43 | 411 | self.patch(action_queue.tunnel_runner, "TunnelRunner", | 419 | self.patch(action_queue.tunnel_runner, "TunnelRunner", |
44 | 412 | self.tunnel_runner_class) | 420 | self.tunnel_runner_class) |
45 | 413 | 421 | ||
46 | 414 | 422 | ||
47 | === modified file 'run-mac-tests' | |||
48 | --- run-mac-tests 2012-07-30 19:31:32 +0000 | |||
49 | +++ run-mac-tests 2012-08-22 18:22:29 +0000 | |||
50 | @@ -27,7 +27,7 @@ | |||
51 | 27 | # version. If you delete this exception statement from all source | 27 | # version. If you delete this exception statement from all source |
52 | 28 | # files in the program, then also delete it here. | 28 | # files in the program, then also delete it here. |
53 | 29 | 29 | ||
55 | 30 | PYTHONPATH=../ubuntu-sso-client/:../ubuntuone-storage-protocol:../ubuntuone-dev-tools:$PYTHONPATH | 30 | PYTHONPATH=../ubuntu-sso-client/:../ubuntuone-storage-protocol:../ubuntuone-dev-tools:../ubuntuone-fsevents-daemon/python:$PYTHONPATH |
56 | 31 | 31 | ||
57 | 32 | set -e | 32 | set -e |
58 | 33 | if [ $# -ne 0 ]; then | 33 | if [ $# -ne 0 ]; then |
59 | 34 | 34 | ||
60 | === renamed file 'tests/platform/filesystem_notifications/test_windows.py' => 'tests/platform/filesystem_notifications/common.py' | |||
61 | --- tests/platform/filesystem_notifications/test_windows.py 2012-07-18 15:18:04 +0000 | |||
62 | +++ tests/platform/filesystem_notifications/common.py 2012-08-22 18:22:29 +0000 | |||
63 | @@ -38,42 +38,45 @@ | |||
64 | 38 | import itertools | 38 | import itertools |
65 | 39 | 39 | ||
66 | 40 | from twisted.internet import defer | 40 | from twisted.internet import defer |
67 | 41 | from win32file import FILE_NOTIFY_INFORMATION | ||
68 | 42 | |||
69 | 43 | from contrib.testing.testcase import BaseTwistedTestCase | 41 | from contrib.testing.testcase import BaseTwistedTestCase |
70 | 44 | from ubuntuone.devtools.handlers import MementoHandler | 42 | from ubuntuone.devtools.handlers import MementoHandler |
71 | 45 | from ubuntuone.platform.os_helper import windows as os_helper | ||
72 | 46 | from ubuntuone.platform.filesystem_notifications.pyinotify_agnostic import ( | 43 | from ubuntuone.platform.filesystem_notifications.pyinotify_agnostic import ( |
73 | 44 | EventsCodes, | ||
74 | 47 | ProcessEvent, | 45 | ProcessEvent, |
75 | 48 | IN_CLOSE_WRITE, | 46 | IN_CLOSE_WRITE, |
76 | 49 | IN_CREATE, | 47 | IN_CREATE, |
77 | 50 | IN_DELETE, | 48 | IN_DELETE, |
78 | 51 | IN_OPEN, | 49 | IN_OPEN, |
79 | 52 | ) | 50 | ) |
80 | 53 | from ubuntuone.platform.filesystem_notifications.monitor import ( | ||
81 | 54 | windows as filesystem_notifications, | ||
82 | 55 | ) | ||
83 | 56 | from ubuntuone.platform.filesystem_notifications import notify_processor | 51 | from ubuntuone.platform.filesystem_notifications import notify_processor |
84 | 57 | from ubuntuone.platform.filesystem_notifications.monitor.common import ( | 52 | from ubuntuone.platform.filesystem_notifications.monitor.common import ( |
85 | 58 | FilesystemMonitor, | 53 | FilesystemMonitor, |
86 | 59 | Watch, | 54 | Watch, |
87 | 60 | WatchManager, | 55 | WatchManager, |
88 | 61 | ) | 56 | ) |
99 | 62 | from ubuntuone.platform.filesystem_notifications.monitor.windows import ( | 57 | from ubuntuone.platform.filesystem_notifications.monitor import ACTIONS |
100 | 63 | ACTIONS, | 58 | from ubuntuone.platform.os_helper import get_os_valid_path |
101 | 64 | FILE_NOTIFY_CHANGE_FILE_NAME, | 59 | |
102 | 65 | FILE_NOTIFY_CHANGE_DIR_NAME, | 60 | OP_FLAGS = EventsCodes.FLAG_COLLECTIONS['OP_FLAGS'] |
103 | 66 | FILE_NOTIFY_CHANGE_ATTRIBUTES, | 61 | IS_DIR = EventsCodes.FLAG_COLLECTIONS['SPECIAL_FLAGS']['IN_ISDIR'] |
94 | 67 | FILE_NOTIFY_CHANGE_SIZE, | ||
95 | 68 | FILE_NOTIFY_CHANGE_LAST_WRITE, | ||
96 | 69 | FILE_NOTIFY_CHANGE_SECURITY, | ||
97 | 70 | FILE_NOTIFY_CHANGE_LAST_ACCESS, | ||
98 | 71 | ) | ||
104 | 72 | 62 | ||
105 | 73 | #create a rever mapping to use it in the tests. | 63 | #create a rever mapping to use it in the tests. |
109 | 74 | REVERSE_WINDOWS_ACTIONS = {} | 64 | REVERSE_OS_ACTIONS = {} |
110 | 75 | for key, value in ACTIONS.iteritems(): | 65 | for key, value in ACTIONS.items(): |
111 | 76 | REVERSE_WINDOWS_ACTIONS[value] = key | 66 | REVERSE_OS_ACTIONS[value] = key |
112 | 67 | |||
113 | 68 | |||
114 | 69 | class FakeEventsProcessor(object): | ||
115 | 70 | |||
116 | 71 | """Handle fake events creation and processing.""" | ||
117 | 72 | |||
118 | 73 | def create_fake_event(self): | ||
119 | 74 | """Create a fake filesystem event.""" | ||
120 | 75 | raise NotImplementedError | ||
121 | 76 | |||
122 | 77 | def custom_process_events(self): | ||
123 | 78 | """Process a fake event.""" | ||
124 | 79 | raise NotImplementedError | ||
125 | 77 | 80 | ||
126 | 78 | 81 | ||
127 | 79 | class FakeException(Exception): | 82 | class FakeException(Exception): |
128 | @@ -127,32 +130,17 @@ | |||
129 | 127 | @defer.inlineCallbacks | 130 | @defer.inlineCallbacks |
130 | 128 | def setUp(self): | 131 | def setUp(self): |
131 | 129 | yield super(TestWatch, self).setUp() | 132 | yield super(TestWatch, self).setUp() |
132 | 133 | self.path = '' | ||
133 | 134 | self.invalid_path = '' | ||
134 | 135 | self.common_path = '' | ||
135 | 130 | self.basedir = self.mktemp('test_root') | 136 | self.basedir = self.mktemp('test_root') |
143 | 131 | self.mask = FILE_NOTIFY_CHANGE_FILE_NAME | \ | 137 | self.mask = None |
137 | 132 | FILE_NOTIFY_CHANGE_DIR_NAME | \ | ||
138 | 133 | FILE_NOTIFY_CHANGE_ATTRIBUTES | \ | ||
139 | 134 | FILE_NOTIFY_CHANGE_SIZE | \ | ||
140 | 135 | FILE_NOTIFY_CHANGE_LAST_WRITE | \ | ||
141 | 136 | FILE_NOTIFY_CHANGE_SECURITY | \ | ||
142 | 137 | FILE_NOTIFY_CHANGE_LAST_ACCESS | ||
144 | 138 | self.memento = MementoHandler() | 138 | self.memento = MementoHandler() |
145 | 139 | self.memento.setLevel(logging.DEBUG) | 139 | self.memento.setLevel(logging.DEBUG) |
146 | 140 | self.raw_events = [] | 140 | self.raw_events = [] |
147 | 141 | self.paths_checked = [] | 141 | self.paths_checked = [] |
148 | 142 | old_is_dir = Watch._path_is_dir | 142 | old_is_dir = Watch._path_is_dir |
162 | 143 | 143 | self.fake_events_processor = FakeEventsProcessor() | |
150 | 144 | def file_notify_information_wrapper(buf, data): | ||
151 | 145 | """Wrapper that gets the events and adds them to the list.""" | ||
152 | 146 | events = FILE_NOTIFY_INFORMATION(buf, data) | ||
153 | 147 | # we want to append the list because that is what will be logged. | ||
154 | 148 | # If we use extend we wont have the same logging because it will | ||
155 | 149 | # group all events in a single lists which is not what the COM API | ||
156 | 150 | # does. | ||
157 | 151 | str_events = [ | ||
158 | 152 | (filesystem_notifications.ACTIONS_NAMES[action], path) for action, path in | ||
159 | 153 | events] | ||
160 | 154 | self.raw_events.append(str_events) | ||
161 | 155 | return events | ||
163 | 156 | 144 | ||
164 | 157 | def path_is_dir_wrapper(watch, path): | 145 | def path_is_dir_wrapper(watch, path): |
165 | 158 | """Wrapper that gets the checked paths.""" | 146 | """Wrapper that gets the checked paths.""" |
166 | @@ -160,8 +148,6 @@ | |||
167 | 160 | self.paths_checked.append((path, result)) | 148 | self.paths_checked.append((path, result)) |
168 | 161 | return result | 149 | return result |
169 | 162 | 150 | ||
170 | 163 | self.patch(filesystem_notifications, 'FILE_NOTIFY_INFORMATION', | ||
171 | 164 | file_notify_information_wrapper) | ||
172 | 165 | self.patch(Watch, '_path_is_dir', path_is_dir_wrapper) | 151 | self.patch(Watch, '_path_is_dir', path_is_dir_wrapper) |
173 | 166 | 152 | ||
174 | 167 | @defer.inlineCallbacks | 153 | @defer.inlineCallbacks |
175 | @@ -169,7 +155,7 @@ | |||
176 | 169 | """Perform the file operations and returns the recorded events.""" | 155 | """Perform the file operations and returns the recorded events.""" |
177 | 170 | handler = TestCaseHandler(number_events=number_events) | 156 | handler = TestCaseHandler(number_events=number_events) |
178 | 171 | manager = WatchManager(handler) | 157 | manager = WatchManager(handler) |
180 | 172 | yield manager.add_watch(os_helper.get_windows_valid_path(path), mask) | 158 | yield manager.add_watch(get_os_valid_path(path), mask) |
181 | 173 | # change the logger so that we can check the logs if we wanted | 159 | # change the logger so that we can check the logs if we wanted |
182 | 174 | manager._wdm[0].log.addHandler(self.memento) | 160 | manager._wdm[0].log.addHandler(self.memento) |
183 | 175 | # clean logger later | 161 | # clean logger later |
184 | @@ -198,7 +184,7 @@ | |||
185 | 198 | create_file, 1) | 184 | create_file, 1) |
186 | 199 | event = events[0] | 185 | event = events[0] |
187 | 200 | self.assertFalse(event.dir) | 186 | self.assertFalse(event.dir) |
189 | 201 | self.assertEqual(0x100, event.mask) | 187 | self.assertEqual(OP_FLAGS['IN_CREATE'], event.mask) |
190 | 202 | self.assertEqual('IN_CREATE', event.maskname) | 188 | self.assertEqual('IN_CREATE', event.maskname) |
191 | 203 | self.assertEqual(os.path.split(file_name)[1], event.name) | 189 | self.assertEqual(os.path.split(file_name)[1], event.name) |
192 | 204 | self.assertEqual('.', event.path) | 190 | self.assertEqual('.', event.path) |
193 | @@ -218,7 +204,7 @@ | |||
194 | 218 | create_dir, 1) | 204 | create_dir, 1) |
195 | 219 | event = events[0] | 205 | event = events[0] |
196 | 220 | self.assertTrue(event.dir) | 206 | self.assertTrue(event.dir) |
198 | 221 | self.assertEqual(0x40000100, event.mask) | 207 | self.assertEqual(OP_FLAGS['IN_CREATE'] | IS_DIR, event.mask) |
199 | 222 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) | 208 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) |
200 | 223 | self.assertEqual(os.path.split(dir_name)[1], event.name) | 209 | self.assertEqual(os.path.split(dir_name)[1], event.name) |
201 | 224 | self.assertEqual('.', event.path) | 210 | self.assertEqual('.', event.path) |
202 | @@ -240,7 +226,7 @@ | |||
203 | 240 | remove_file, 1) | 226 | remove_file, 1) |
204 | 241 | event = events[0] | 227 | event = events[0] |
205 | 242 | self.assertFalse(event.dir) | 228 | self.assertFalse(event.dir) |
207 | 243 | self.assertEqual(0x200, event.mask) | 229 | self.assertEqual(OP_FLAGS['IN_DELETE'], event.mask) |
208 | 244 | self.assertEqual('IN_DELETE', event.maskname) | 230 | self.assertEqual('IN_DELETE', event.maskname) |
209 | 245 | self.assertEqual(os.path.split(file_name)[1], event.name) | 231 | self.assertEqual(os.path.split(file_name)[1], event.name) |
210 | 246 | self.assertEqual('.', event.path) | 232 | self.assertEqual('.', event.path) |
211 | @@ -262,36 +248,15 @@ | |||
212 | 262 | remove_dir, 1) | 248 | remove_dir, 1) |
213 | 263 | event = events[0] | 249 | event = events[0] |
214 | 264 | self.assertTrue(event.dir) | 250 | self.assertTrue(event.dir) |
216 | 265 | self.assertEqual(0x40000200, event.mask) | 251 | self.assertEqual(OP_FLAGS['IN_DELETE'] | IS_DIR, event.mask) |
217 | 266 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) | 252 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) |
218 | 267 | self.assertEqual('.', event.path) | 253 | self.assertEqual('.', event.path) |
219 | 268 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) | 254 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) |
220 | 269 | self.assertEqual(0, event.wd) | 255 | self.assertEqual(0, event.wd) |
221 | 270 | 256 | ||
222 | 271 | @defer.inlineCallbacks | ||
223 | 272 | def test_file_write(self): | 257 | def test_file_write(self): |
224 | 273 | """Test that the correct event is raised when a file is written.""" | 258 | """Test that the correct event is raised when a file is written.""" |
246 | 274 | file_name = os.path.join(self.basedir, 'test_file_write') | 259 | raise NotImplementedError |
226 | 275 | # create the file before recording | ||
227 | 276 | fd = open(file_name, 'w') | ||
228 | 277 | # clean behind us by removing the file | ||
229 | 278 | self.addCleanup(os.remove, file_name) | ||
230 | 279 | |||
231 | 280 | def write_file(): | ||
232 | 281 | """Action for the test.""" | ||
233 | 282 | fd.write('test') | ||
234 | 283 | fd.close() | ||
235 | 284 | |||
236 | 285 | events = yield self._perform_operations(self.basedir, self.mask, | ||
237 | 286 | write_file, 1) | ||
238 | 287 | event = events[0] | ||
239 | 288 | self.assertFalse(event.dir) | ||
240 | 289 | self.assertEqual(0x2, event.mask) | ||
241 | 290 | self.assertEqual('IN_MODIFY', event.maskname) | ||
242 | 291 | self.assertEqual(os.path.split(file_name)[1], event.name) | ||
243 | 292 | self.assertEqual('.', event.path) | ||
244 | 293 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) | ||
245 | 294 | self.assertEqual(0, event.wd) | ||
247 | 295 | 260 | ||
248 | 296 | @defer.inlineCallbacks | 261 | @defer.inlineCallbacks |
249 | 297 | def test_file_moved_to_watched_dir_same_watcher(self): | 262 | def test_file_moved_to_watched_dir_same_watcher(self): |
250 | @@ -313,7 +278,7 @@ | |||
251 | 313 | move_to_event = events[1] | 278 | move_to_event = events[1] |
252 | 314 | # first test the move from | 279 | # first test the move from |
253 | 315 | self.assertFalse(move_from_event.dir) | 280 | self.assertFalse(move_from_event.dir) |
255 | 316 | self.assertEqual(0x40, move_from_event.mask) | 281 | self.assertEqual(OP_FLAGS['IN_MOVED_FROM'], move_from_event.mask) |
256 | 317 | self.assertEqual('IN_MOVED_FROM', move_from_event.maskname) | 282 | self.assertEqual('IN_MOVED_FROM', move_from_event.maskname) |
257 | 318 | self.assertEqual(os.path.split(from_file_name)[1], | 283 | self.assertEqual(os.path.split(from_file_name)[1], |
258 | 319 | move_from_event.name) | 284 | move_from_event.name) |
259 | @@ -323,7 +288,7 @@ | |||
260 | 323 | self.assertEqual(0, move_from_event.wd) | 288 | self.assertEqual(0, move_from_event.wd) |
261 | 324 | # test the move to | 289 | # test the move to |
262 | 325 | self.assertFalse(move_to_event.dir) | 290 | self.assertFalse(move_to_event.dir) |
264 | 326 | self.assertEqual(0x80, move_to_event.mask) | 291 | self.assertEqual(OP_FLAGS['IN_MOVED_TO'], move_to_event.mask) |
265 | 327 | self.assertEqual('IN_MOVED_TO', move_to_event.maskname) | 292 | self.assertEqual('IN_MOVED_TO', move_to_event.maskname) |
266 | 328 | self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name) | 293 | self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name) |
267 | 329 | self.assertEqual('.', move_to_event.path) | 294 | self.assertEqual('.', move_to_event.path) |
268 | @@ -354,7 +319,7 @@ | |||
269 | 354 | move_file, 1) | 319 | move_file, 1) |
270 | 355 | event = events[0] | 320 | event = events[0] |
271 | 356 | self.assertFalse(event.dir) | 321 | self.assertFalse(event.dir) |
273 | 357 | self.assertEqual(0x200, event.mask) | 322 | self.assertEqual(OP_FLAGS['IN_DELETE'], event.mask) |
274 | 358 | self.assertEqual('IN_DELETE', event.maskname) | 323 | self.assertEqual('IN_DELETE', event.maskname) |
275 | 359 | self.assertEqual(os.path.split(from_file_name)[1], event.name) | 324 | self.assertEqual(os.path.split(from_file_name)[1], event.name) |
276 | 360 | self.assertEqual('.', event.path) | 325 | self.assertEqual('.', event.path) |
277 | @@ -382,7 +347,7 @@ | |||
278 | 382 | move_files, 1) | 347 | move_files, 1) |
279 | 383 | event = events[0] | 348 | event = events[0] |
280 | 384 | self.assertFalse(event.dir) | 349 | self.assertFalse(event.dir) |
282 | 385 | self.assertEqual(0x100, event.mask) | 350 | self.assertEqual(OP_FLAGS['IN_CREATE'], event.mask) |
283 | 386 | self.assertEqual('IN_CREATE', event.maskname) | 351 | self.assertEqual('IN_CREATE', event.maskname) |
284 | 387 | self.assertEqual(os.path.split(to_file_name)[1], event.name) | 352 | self.assertEqual(os.path.split(to_file_name)[1], event.name) |
285 | 388 | self.assertEqual('.', event.path) | 353 | self.assertEqual('.', event.path) |
286 | @@ -409,7 +374,8 @@ | |||
287 | 409 | move_to_event = events[1] | 374 | move_to_event = events[1] |
288 | 410 | # first test the move from | 375 | # first test the move from |
289 | 411 | self.assertTrue(move_from_event.dir) | 376 | self.assertTrue(move_from_event.dir) |
291 | 412 | self.assertEqual(0x40000040, move_from_event.mask) | 377 | self.assertEqual(OP_FLAGS['IN_MOVED_FROM'] | IS_DIR, |
292 | 378 | move_from_event.mask) | ||
293 | 413 | self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname) | 379 | self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname) |
294 | 414 | self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name) | 380 | self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name) |
295 | 415 | self.assertEqual('.', move_from_event.path) | 381 | self.assertEqual('.', move_from_event.path) |
296 | @@ -418,7 +384,7 @@ | |||
297 | 418 | self.assertEqual(0, move_from_event.wd) | 384 | self.assertEqual(0, move_from_event.wd) |
298 | 419 | # test the move to | 385 | # test the move to |
299 | 420 | self.assertTrue(move_to_event.dir) | 386 | self.assertTrue(move_to_event.dir) |
301 | 421 | self.assertEqual(0x40000080, move_to_event.mask) | 387 | self.assertEqual(OP_FLAGS['IN_MOVED_TO'] | IS_DIR, move_to_event.mask) |
302 | 422 | self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname) | 388 | self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname) |
303 | 423 | self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name) | 389 | self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name) |
304 | 424 | self.assertEqual('.', move_to_event.path) | 390 | self.assertEqual('.', move_to_event.path) |
305 | @@ -447,7 +413,7 @@ | |||
306 | 447 | move_dir, 1) | 413 | move_dir, 1) |
307 | 448 | event = events[0] | 414 | event = events[0] |
308 | 449 | self.assertTrue(event.dir) | 415 | self.assertTrue(event.dir) |
310 | 450 | self.assertEqual(0x40000200, event.mask) | 416 | self.assertEqual(OP_FLAGS['IN_DELETE'] | IS_DIR, event.mask) |
311 | 451 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) | 417 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) |
312 | 452 | self.assertEqual('.', event.path) | 418 | self.assertEqual('.', event.path) |
313 | 453 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) | 419 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) |
314 | @@ -471,7 +437,7 @@ | |||
315 | 471 | move_dir, 1) | 437 | move_dir, 1) |
316 | 472 | event = events[0] | 438 | event = events[0] |
317 | 473 | self.assertTrue(event.dir) | 439 | self.assertTrue(event.dir) |
319 | 474 | self.assertEqual(0x40000100, event.mask) | 440 | self.assertEqual(OP_FLAGS['IN_CREATE'] | IS_DIR, event.mask) |
320 | 475 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) | 441 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) |
321 | 476 | self.assertEqual(os.path.split(from_dir_name)[1], event.name) | 442 | self.assertEqual(os.path.split(from_dir_name)[1], event.name) |
322 | 477 | self.assertEqual('.', event.path) | 443 | self.assertEqual('.', event.path) |
323 | @@ -484,8 +450,9 @@ | |||
324 | 484 | handler = TestCaseHandler(number_events=0) | 450 | handler = TestCaseHandler(number_events=0) |
325 | 485 | manager = WatchManager(handler) | 451 | manager = WatchManager(handler) |
326 | 486 | # add a watch that will always exclude all actions | 452 | # add a watch that will always exclude all actions |
329 | 487 | manager.add_watch(os_helper.get_windows_valid_path(self.basedir), | 453 | manager.add_watch(get_os_valid_path(self.basedir), |
330 | 488 | self.mask, exclude_filter=lambda x: True) | 454 | self.mask, auto_add=True, |
331 | 455 | exclude_filter=lambda x: True) | ||
332 | 489 | # execution the actions | 456 | # execution the actions |
333 | 490 | file_name = os.path.join(self.basedir, 'test_file_create') | 457 | file_name = os.path.join(self.basedir, 'test_file_create') |
334 | 491 | open(file_name, 'w').close() | 458 | open(file_name, 'w').close() |
335 | @@ -502,16 +469,18 @@ | |||
336 | 502 | """Memorize the processed events.""" | 469 | """Memorize the processed events.""" |
337 | 503 | events.append(event) | 470 | events.append(event) |
338 | 504 | 471 | ||
343 | 505 | path = u'\\\\?\\C:\\path' # a valid windows path | 472 | child = 'child' |
344 | 506 | child = u'child' | 473 | watch = Watch(1, self.path, None) |
345 | 507 | watch = Watch(1, path, fake_processor) | 474 | watch.ignore_path(os.path.join(self.path, child)) |
342 | 508 | watch.ignore_path(os.path.join(path, child)) | ||
346 | 509 | paths_to_ignore = [] | 475 | paths_to_ignore = [] |
347 | 510 | for file_name in 'abcdef': | 476 | for file_name in 'abcdef': |
349 | 511 | paths_to_ignore.append((1, os.path.join(child, file_name))) | 477 | paths_to_ignore.append( |
350 | 478 | self.fake_events_processor.create_fake_event( | ||
351 | 479 | os.path.join(child, file_name))) | ||
352 | 512 | # ensure that the watch is watching | 480 | # ensure that the watch is watching |
355 | 513 | watch._watching = True | 481 | watch.platform_watch.watching = True |
356 | 514 | watch.platform_watch._process_events(paths_to_ignore) | 482 | self.fake_events_processor.custom_process_events( |
357 | 483 | watch, paths_to_ignore) | ||
358 | 515 | self.assertEqual(0, len(events), | 484 | self.assertEqual(0, len(events), |
359 | 516 | 'All events should have been ignored.') | 485 | 'All events should have been ignored.') |
360 | 517 | 486 | ||
361 | @@ -523,17 +492,18 @@ | |||
362 | 523 | """Memorize the processed events.""" | 492 | """Memorize the processed events.""" |
363 | 524 | events.append(event) | 493 | events.append(event) |
364 | 525 | 494 | ||
369 | 526 | path = u'\\\\?\\C:\\path' # a valid windows path | 495 | child = 'child' |
370 | 527 | child = u'child' | 496 | watch = Watch(1, self.path, fake_processor) |
371 | 528 | watch = Watch(1, path, fake_processor) | 497 | watch.ignore_path(os.path.join(self.path, child)) |
368 | 529 | watch.ignore_path(os.path.join(path, child)) | ||
372 | 530 | paths_not_to_ignore = [] | 498 | paths_not_to_ignore = [] |
373 | 531 | for file_name in 'abcdef': | 499 | for file_name in 'abcdef': |
376 | 532 | paths_not_to_ignore.append((1, os.path.join( | 500 | event = self.fake_events_processor.create_fake_event( |
377 | 533 | child + file_name, file_name))) | 501 | os.path.join(child + file_name, file_name)) |
378 | 502 | paths_not_to_ignore.append(event) | ||
379 | 534 | # ensure that the watch is watching | 503 | # ensure that the watch is watching |
380 | 535 | watch.platform_watch.watching = True | 504 | watch.platform_watch.watching = True |
382 | 536 | watch.platform_watch._process_events(paths_not_to_ignore) | 505 | self.fake_events_processor.custom_process_events( |
383 | 506 | watch, paths_not_to_ignore) | ||
384 | 537 | self.assertEqual(len(paths_not_to_ignore), len(events), | 507 | self.assertEqual(len(paths_not_to_ignore), len(events), |
385 | 538 | 'No events should have been ignored.') | 508 | 'No events should have been ignored.') |
386 | 539 | 509 | ||
387 | @@ -545,21 +515,22 @@ | |||
388 | 545 | """Memorize the processed events.""" | 515 | """Memorize the processed events.""" |
389 | 546 | events.append(event.pathname) | 516 | events.append(event.pathname) |
390 | 547 | 517 | ||
395 | 548 | child = u'child' | 518 | child = 'child' |
396 | 549 | path = u'\\\\?\\C:\\path\\' # a valid windows path | 519 | watch = Watch(1, self.path, fake_processor) |
397 | 550 | watch = Watch(1, path, fake_processor) | 520 | watch.ignore_path(os.path.join(self.path, child)) |
394 | 551 | watch.ignore_path(os.path.join(path, child)) | ||
398 | 552 | paths_not_to_ignore = [] | 521 | paths_not_to_ignore = [] |
399 | 553 | paths_to_ignore = [] | 522 | paths_to_ignore = [] |
400 | 554 | expected_events = [] | 523 | expected_events = [] |
401 | 555 | for file_name in 'abcdef': | 524 | for file_name in 'abcdef': |
402 | 556 | valid = os.path.join(child + file_name, file_name) | 525 | valid = os.path.join(child + file_name, file_name) |
403 | 557 | paths_to_ignore.append((1, os.path.join(child, file_name))) | 526 | paths_to_ignore.append((1, os.path.join(child, file_name))) |
406 | 558 | paths_not_to_ignore.append((1, valid)) | 527 | paths_not_to_ignore.append( |
407 | 559 | expected_events.append(os.path.join('C:\\path', valid)) | 528 | self.fake_events_processor.create_fake_event(valid)) |
408 | 529 | expected_events.append(os.path.join(self.common_path, valid)) | ||
409 | 560 | # ensure that the watch is watching | 530 | # ensure that the watch is watching |
410 | 561 | watch.platform_watch.watching = True | 531 | watch.platform_watch.watching = True |
412 | 562 | watch.platform_watch._process_events(paths_not_to_ignore) | 532 | self.fake_events_processor.custom_process_events( |
413 | 533 | watch, paths_not_to_ignore) | ||
414 | 563 | self.assertEqual(len(paths_not_to_ignore), len(events), | 534 | self.assertEqual(len(paths_not_to_ignore), len(events), |
415 | 564 | 'Wrong number of events ignored.') | 535 | 'Wrong number of events ignored.') |
416 | 565 | self.assertTrue(all([event in expected_events for event in events]), | 536 | self.assertTrue(all([event in expected_events for event in events]), |
417 | @@ -573,17 +544,19 @@ | |||
418 | 573 | """Memorize the processed events.""" | 544 | """Memorize the processed events.""" |
419 | 574 | events.append(event) | 545 | events.append(event) |
420 | 575 | 546 | ||
426 | 576 | path = u'\\\\?\\C:\\path' # a valid windows path | 547 | child = 'child' |
427 | 577 | child = u'child' | 548 | watch = Watch(1, self.path, fake_processor) |
428 | 578 | watch = Watch(1, path, fake_processor) | 549 | watch.ignore_path(os.path.join(self.path, child)) |
429 | 579 | watch.ignore_path(os.path.join(path, child)) | 550 | watch.remove_ignored_path(os.path.join(self.path, child)) |
425 | 580 | watch.remove_ignored_path(os.path.join(path, child)) | ||
430 | 581 | paths_not_to_ignore = [] | 551 | paths_not_to_ignore = [] |
431 | 582 | for file_name in 'abcdef': | 552 | for file_name in 'abcdef': |
433 | 583 | paths_not_to_ignore.append((1, os.path.join(child, file_name))) | 553 | event = self.fake_events_processor.create_fake_event( |
434 | 554 | os.path.join(child, file_name)) | ||
435 | 555 | paths_not_to_ignore.append(event) | ||
436 | 584 | # ensure that the watch is watching | 556 | # ensure that the watch is watching |
437 | 585 | watch.platform_watch.watching = True | 557 | watch.platform_watch.watching = True |
439 | 586 | watch.platform_watch._process_events(paths_not_to_ignore) | 558 | self.fake_events_processor.custom_process_events( |
440 | 559 | watch, paths_not_to_ignore) | ||
441 | 587 | self.assertEqual(len(paths_not_to_ignore), len(events), | 560 | self.assertEqual(len(paths_not_to_ignore), len(events), |
442 | 588 | 'All events should have been accepted.') | 561 | 'All events should have been accepted.') |
443 | 589 | 562 | ||
444 | @@ -595,190 +568,98 @@ | |||
445 | 595 | """Memorize the processed events.""" | 568 | """Memorize the processed events.""" |
446 | 596 | events.append(event.pathname) | 569 | events.append(event.pathname) |
447 | 597 | 570 | ||
455 | 598 | path = u'\\\\?\\C:\\path' # a valid windows path | 571 | child_a = 'childa' |
456 | 599 | child_a = u'childa' | 572 | child_b = 'childb' |
457 | 600 | child_b = u'childb' | 573 | watch = Watch(1, self.path, fake_processor) |
458 | 601 | watch = Watch(1, path, fake_processor) | 574 | watch.ignore_path(os.path.join(self.path, child_a)) |
459 | 602 | watch.ignore_path(os.path.join(path, child_a)) | 575 | watch.ignore_path(os.path.join(self.path, child_b)) |
460 | 603 | watch.ignore_path(os.path.join(path, child_b)) | 576 | watch.remove_ignored_path(os.path.join(self.path, child_a)) |
454 | 604 | watch.remove_ignored_path(os.path.join(path, child_a)) | ||
461 | 605 | paths_to_ignore = [] | 577 | paths_to_ignore = [] |
462 | 606 | paths_not_to_ignore = [] | 578 | paths_not_to_ignore = [] |
463 | 607 | expected_events = [] | 579 | expected_events = [] |
464 | 608 | for file_name in 'abcdef': | 580 | for file_name in 'abcdef': |
465 | 609 | paths_to_ignore.append((1, os.path.join(child_b, file_name))) | 581 | paths_to_ignore.append((1, os.path.join(child_b, file_name))) |
466 | 610 | valid = os.path.join(child_a, file_name) | 582 | valid = os.path.join(child_a, file_name) |
469 | 611 | paths_not_to_ignore.append((1, valid)) | 583 | event = self.fake_events_processor.create_fake_event(valid) |
470 | 612 | expected_events.append(os.path.join('C:\\path', valid)) | 584 | paths_not_to_ignore.append(event) |
471 | 585 | expected_events.append(os.path.join(self.common_path, valid)) | ||
472 | 613 | # ensure that the watch is watching | 586 | # ensure that the watch is watching |
473 | 614 | watch.platform_watch.watching = True | 587 | watch.platform_watch.watching = True |
475 | 615 | watch.platform_watch._process_events(paths_not_to_ignore) | 588 | self.fake_events_processor.custom_process_events( |
476 | 589 | watch, paths_not_to_ignore) | ||
477 | 616 | self.assertEqual(len(paths_not_to_ignore), len(events), | 590 | self.assertEqual(len(paths_not_to_ignore), len(events), |
478 | 617 | 'All events should have been accepted.') | 591 | 'All events should have been accepted.') |
479 | 618 | self.assertTrue(all([event in expected_events for event in events]), | 592 | self.assertTrue(all([event in expected_events for event in events]), |
480 | 619 | 'Paths ignored that should have not been ignored.') | 593 | 'Paths ignored that should have not been ignored.') |
481 | 620 | 594 | ||
482 | 621 | @defer.inlineCallbacks | ||
483 | 622 | def test_call_deferred_already_called(self): | ||
484 | 623 | """Test that the function is not called.""" | ||
485 | 624 | method_args = [] | ||
486 | 625 | |||
487 | 626 | def fake_call(*args, **kwargs): | ||
488 | 627 | """Execute the call.""" | ||
489 | 628 | method_args.append((args, kwargs),) | ||
490 | 629 | |||
491 | 630 | path = u'\\\\?\\C:\\path' # a valid windows path | ||
492 | 631 | watch = Watch(1, path, None) | ||
493 | 632 | yield watch.platform_watch._watch_started_deferred.callback(True) | ||
494 | 633 | watch.platform_watch._call_deferred(fake_call, None) | ||
495 | 634 | self.assertEqual(0, len(method_args)) | ||
496 | 635 | |||
497 | 636 | def test_call_deferred_not_called(self): | ||
498 | 637 | """Test that is indeed called.""" | ||
499 | 638 | method_args = [] | ||
500 | 639 | |||
501 | 640 | def fake_call(*args, **kwargs): | ||
502 | 641 | """Execute the call.""" | ||
503 | 642 | method_args.append((args, kwargs),) | ||
504 | 643 | |||
505 | 644 | path = u'\\\\?\\C:\\path' # a valid windows path | ||
506 | 645 | watch = Watch(1, path, None) | ||
507 | 646 | watch.platform_watch._call_deferred(fake_call, None) | ||
508 | 647 | self.assertEqual(1, len(method_args)) | ||
509 | 648 | |||
510 | 649 | def test_started_property(self): | ||
511 | 650 | """Test that the started property returns the started deferred.""" | ||
512 | 651 | path = u'\\\\?\\C:\\path' # a valid windows path | ||
513 | 652 | watch = Watch(1, path, None) | ||
514 | 653 | self.assertEqual(watch.started, watch.platform_watch._watch_started_deferred) | ||
515 | 654 | |||
516 | 655 | def test_stopped_property(self): | ||
517 | 656 | """Test that the stopped property returns the stopped deferred.""" | ||
518 | 657 | path = u'\\\\?\\C:\\path' # a valid windows path | ||
519 | 658 | watch = Watch(1, path, None) | ||
520 | 659 | self.assertEqual(watch.stopped, watch.platform_watch._watch_stopped_deferred) | ||
521 | 660 | |||
522 | 661 | def random_error(self, *args): | 595 | def random_error(self, *args): |
523 | 662 | """Throw a fake exception.""" | 596 | """Throw a fake exception.""" |
524 | 663 | raise FakeException() | 597 | raise FakeException() |
525 | 664 | 598 | ||
526 | 665 | @defer.inlineCallbacks | ||
527 | 666 | def test_start_watching_fails_early_in_thread(self): | ||
528 | 667 | """An early failure inside the thread should errback the deferred.""" | ||
529 | 668 | test_path = self.mktemp("test_directory") | ||
530 | 669 | self.patch(filesystem_notifications, "CreateFileW", self.random_error) | ||
531 | 670 | watch = Watch(1, test_path, None) | ||
532 | 671 | d = watch.start_watching() | ||
533 | 672 | yield self.assertFailure(d, FakeException) | ||
534 | 673 | |||
535 | 674 | @defer.inlineCallbacks | ||
536 | 675 | def test_start_watching_fails_late_in_thread(self): | ||
537 | 676 | """A late failure inside the thread should errback the deferred.""" | ||
538 | 677 | test_path = self.mktemp("test_directory") | ||
539 | 678 | self.patch(filesystem_notifications, "ReadDirectoryChangesW", | ||
540 | 679 | self.random_error) | ||
541 | 680 | watch = Watch(1, test_path, None) | ||
542 | 681 | d = watch.start_watching() | ||
543 | 682 | yield self.assertFailure(d, FakeException) | ||
544 | 683 | |||
545 | 684 | @defer.inlineCallbacks | ||
546 | 685 | def test_close_handle_is_called_on_error(self): | ||
547 | 686 | """CloseHandle is called when there's an error in the watch thread.""" | ||
548 | 687 | test_path = self.mktemp("test_directory") | ||
549 | 688 | close_called = [] | ||
550 | 689 | self.patch(filesystem_notifications, "CreateFileW", lambda *_: None) | ||
551 | 690 | self.patch(filesystem_notifications, "CloseHandle", | ||
552 | 691 | close_called.append) | ||
553 | 692 | self.patch(filesystem_notifications, "ReadDirectoryChangesW", | ||
554 | 693 | self.random_error) | ||
555 | 694 | watch = Watch(1, test_path, None) | ||
556 | 695 | d = watch.start_watching() | ||
557 | 696 | yield self.assertFailure(d, FakeException) | ||
558 | 697 | self.assertEqual(len(close_called), 1) | ||
559 | 698 | yield watch.stop_watching() | ||
560 | 699 | |||
561 | 700 | @defer.inlineCallbacks | ||
562 | 701 | def test_stop_watching_fired_when_watch_thread_finishes(self): | ||
563 | 702 | """The deferred returned is fired when the watch thread finishes.""" | ||
564 | 703 | test_path = self.mktemp("another_test_directory") | ||
565 | 704 | watch = Watch(1, test_path, None) | ||
566 | 705 | yield watch.start_watching() | ||
567 | 706 | self.assertNotEqual(watch.platform_watch._watch_handle, None) | ||
568 | 707 | yield watch.stop_watching() | ||
569 | 708 | self.assertEqual(watch.platform_watch._watch_handle, None) | ||
570 | 709 | |||
571 | 710 | def test_is_path_dir_missing_no_subdir(self): | 599 | def test_is_path_dir_missing_no_subdir(self): |
572 | 711 | """Test when the path does not exist and is no a subdir.""" | 600 | """Test when the path does not exist and is no a subdir.""" |
573 | 712 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
574 | 713 | test_path = self.mktemp("test_directory") | 601 | test_path = self.mktemp("test_directory") |
575 | 714 | self.patch(os.path, 'exists', lambda path: False) | 602 | self.patch(os.path, 'exists', lambda path: False) |
576 | 715 | watch = Watch(1, test_path, None) | 603 | watch = Watch(1, test_path, None) |
578 | 716 | self.assertFalse(watch._path_is_dir(path)) | 604 | self.assertFalse(watch._path_is_dir(self.invalid_path)) |
579 | 717 | 605 | ||
580 | 718 | def test_is_path_dir_missing_in_subdir(self): | 606 | def test_is_path_dir_missing_in_subdir(self): |
581 | 719 | """Test when the path does not exist and is a subdir.""" | 607 | """Test when the path does not exist and is a subdir.""" |
582 | 720 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
583 | 721 | test_path = self.mktemp("test_directory") | 608 | test_path = self.mktemp("test_directory") |
584 | 722 | self.patch(os.path, 'exists', lambda path: False) | 609 | self.patch(os.path, 'exists', lambda path: False) |
585 | 723 | watch = Watch(1, test_path, None) | 610 | watch = Watch(1, test_path, None) |
588 | 724 | watch._subdirs.add(path) | 611 | watch._subdirs.add(self.invalid_path) |
589 | 725 | self.assertTrue(watch._path_is_dir(path)) | 612 | self.assertTrue(watch._path_is_dir(self.invalid_path)) |
590 | 726 | 613 | ||
591 | 727 | def test_is_path_dir_present_is_dir(self): | 614 | def test_is_path_dir_present_is_dir(self): |
592 | 728 | """Test when the path is present and is dir.""" | 615 | """Test when the path is present and is dir.""" |
593 | 729 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
594 | 730 | test_path = self.mktemp("test_directory") | 616 | test_path = self.mktemp("test_directory") |
595 | 731 | self.patch(os.path, 'exists', lambda path: True) | 617 | self.patch(os.path, 'exists', lambda path: True) |
596 | 732 | self.patch(os.path, 'isdir', lambda path: True) | 618 | self.patch(os.path, 'isdir', lambda path: True) |
597 | 733 | watch = Watch(1, test_path, None) | 619 | watch = Watch(1, test_path, None) |
600 | 734 | watch._subdirs.add(path) | 620 | watch._subdirs.add(self.invalid_path) |
601 | 735 | self.assertTrue(watch._path_is_dir(path)) | 621 | self.assertTrue(watch._path_is_dir(self.invalid_path)) |
602 | 736 | 622 | ||
603 | 737 | def test_is_path_dir_present_no_dir(self): | 623 | def test_is_path_dir_present_no_dir(self): |
604 | 738 | """Test when the path is present but not a dir.""" | 624 | """Test when the path is present but not a dir.""" |
605 | 739 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
606 | 740 | test_path = self.mktemp("test_directory") | 625 | test_path = self.mktemp("test_directory") |
607 | 741 | self.patch(os.path, 'exists', lambda path: True) | 626 | self.patch(os.path, 'exists', lambda path: True) |
608 | 742 | self.patch(os.path, 'isdir', lambda path: False) | 627 | self.patch(os.path, 'isdir', lambda path: False) |
609 | 743 | watch = Watch(1, test_path, None) | 628 | watch = Watch(1, test_path, None) |
612 | 744 | watch._subdirs.add(path) | 629 | watch._subdirs.add(self.invalid_path) |
613 | 745 | self.assertFalse(watch._path_is_dir(path)) | 630 | self.assertFalse(watch._path_is_dir(self.invalid_path)) |
614 | 746 | 631 | ||
615 | 747 | def test_update_subdirs_create_not_present(self): | 632 | def test_update_subdirs_create_not_present(self): |
616 | 748 | """Test when we update on a create event and not present.""" | 633 | """Test when we update on a create event and not present.""" |
617 | 749 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
618 | 750 | test_path = self.mktemp("test_directory") | 634 | test_path = self.mktemp("test_directory") |
619 | 751 | watch = Watch(1, test_path, None) | 635 | watch = Watch(1, test_path, None) |
622 | 752 | watch._update_subdirs(path, REVERSE_WINDOWS_ACTIONS[IN_CREATE]) | 636 | watch._update_subdirs(self.invalid_path, REVERSE_OS_ACTIONS[IN_CREATE]) |
623 | 753 | self.assertTrue(path in watch._subdirs) | 637 | self.assertTrue(self.invalid_path in watch._subdirs) |
624 | 754 | 638 | ||
625 | 755 | def test_update_subdirs_create_present(self): | 639 | def test_update_subdirs_create_present(self): |
626 | 756 | """Test when we update on a create event and is present.""" | 640 | """Test when we update on a create event and is present.""" |
627 | 757 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
628 | 758 | test_path = self.mktemp("test_directory") | 641 | test_path = self.mktemp("test_directory") |
629 | 759 | watch = Watch(1, test_path, None) | 642 | watch = Watch(1, test_path, None) |
631 | 760 | watch._subdirs.add(path) | 643 | watch._subdirs.add(self.invalid_path) |
632 | 761 | old_length = len(watch._subdirs) | 644 | old_length = len(watch._subdirs) |
635 | 762 | watch._update_subdirs(path, REVERSE_WINDOWS_ACTIONS[IN_CREATE]) | 645 | watch._update_subdirs(self.invalid_path, REVERSE_OS_ACTIONS[IN_CREATE]) |
636 | 763 | self.assertTrue(path in watch._subdirs) | 646 | self.assertTrue(self.invalid_path in watch._subdirs) |
637 | 764 | self.assertEqual(old_length, len(watch._subdirs)) | 647 | self.assertEqual(old_length, len(watch._subdirs)) |
638 | 765 | 648 | ||
639 | 766 | def test_update_subdirs_delete_not_present(self): | 649 | def test_update_subdirs_delete_not_present(self): |
640 | 767 | """Test when we delete and is not present.""" | 650 | """Test when we delete and is not present.""" |
641 | 768 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
642 | 769 | test_path = self.mktemp("test_directory") | 651 | test_path = self.mktemp("test_directory") |
643 | 770 | watch = Watch(1, test_path, None) | 652 | watch = Watch(1, test_path, None) |
646 | 771 | watch._update_subdirs(path, REVERSE_WINDOWS_ACTIONS[IN_DELETE]) | 653 | watch._update_subdirs(self.invalid_path, REVERSE_OS_ACTIONS[IN_DELETE]) |
647 | 772 | self.assertTrue(path not in watch._subdirs) | 654 | self.assertTrue(self.invalid_path not in watch._subdirs) |
648 | 773 | 655 | ||
649 | 774 | def test_update_subdirs_delete_present(self): | 656 | def test_update_subdirs_delete_present(self): |
650 | 775 | """Test when we delete and is present.""" | 657 | """Test when we delete and is present.""" |
651 | 776 | path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
652 | 777 | test_path = self.mktemp("test_directory") | 658 | test_path = self.mktemp("test_directory") |
653 | 778 | watch = Watch(1, test_path, None) | 659 | watch = Watch(1, test_path, None) |
657 | 779 | watch._subdirs.add(path) | 660 | watch._subdirs.add(self.invalid_path) |
658 | 780 | watch._update_subdirs(path, REVERSE_WINDOWS_ACTIONS[IN_DELETE]) | 661 | watch._update_subdirs(self.invalid_path, REVERSE_OS_ACTIONS[IN_DELETE]) |
659 | 781 | self.assertTrue(path not in watch._subdirs) | 662 | self.assertTrue(self.invalid_path not in watch._subdirs) |
660 | 782 | 663 | ||
661 | 783 | 664 | ||
662 | 784 | class TestWatchManager(BaseTwistedTestCase): | 665 | class TestWatchManager(BaseTwistedTestCase): |
663 | @@ -788,11 +669,8 @@ | |||
664 | 788 | def setUp(self): | 669 | def setUp(self): |
665 | 789 | """Set each of the tests.""" | 670 | """Set each of the tests.""" |
666 | 790 | yield super(TestWatchManager, self).setUp() | 671 | yield super(TestWatchManager, self).setUp() |
667 | 791 | self.parent_path = u'\\\\?\\C:\\' # a valid windows path | ||
668 | 792 | self.path = self.parent_path + u'path' | ||
669 | 793 | self.watch = Watch(1, self.path, None) | ||
670 | 794 | self.manager = WatchManager(None) | 672 | self.manager = WatchManager(None) |
672 | 795 | self.manager._wdm = {1: self.watch} | 673 | self.fake_events_processor = FakeEventsProcessor() |
673 | 796 | 674 | ||
674 | 797 | @defer.inlineCallbacks | 675 | @defer.inlineCallbacks |
675 | 798 | def test_stop(self): | 676 | def test_stop(self): |
676 | @@ -808,25 +686,9 @@ | |||
677 | 808 | yield self.manager.stop() | 686 | yield self.manager.stop() |
678 | 809 | self.assertTrue(self.was_called, 'The watch stop should be called.') | 687 | self.assertTrue(self.was_called, 'The watch stop should be called.') |
679 | 810 | 688 | ||
680 | 811 | @defer.inlineCallbacks | ||
681 | 812 | def test_stop_multiple(self): | 689 | def test_stop_multiple(self): |
682 | 813 | """Test that stop is fired when *all* watches have stopped.""" | 690 | """Test that stop is fired when *all* watches have stopped.""" |
699 | 814 | 691 | raise NotImplementedError | |
684 | 815 | def fake_stop_watching(watch): | ||
685 | 816 | """Another fake stop watch.""" | ||
686 | 817 | return watch.stopped | ||
687 | 818 | |||
688 | 819 | self.patch(Watch, "stop_watching", fake_stop_watching) | ||
689 | 820 | second_path = self.parent_path + u"second_path" | ||
690 | 821 | second_watch = Watch(2, second_path, None) | ||
691 | 822 | self.manager._wdm[2] = second_watch | ||
692 | 823 | d = self.manager.stop() | ||
693 | 824 | self.assertFalse(d.called, "Not fired before all watches end") | ||
694 | 825 | self.watch.stopped.callback(None) | ||
695 | 826 | self.assertFalse(d.called, "Not fired before all watches end") | ||
696 | 827 | second_watch.stopped.callback(None) | ||
697 | 828 | yield d | ||
698 | 829 | self.assertTrue(d.called, "Fired after the watches ended") | ||
700 | 830 | 692 | ||
701 | 831 | def test_get_present_watch(self): | 693 | def test_get_present_watch(self): |
702 | 832 | """Test that we can get a Watch using is wd.""" | 694 | """Test that we can get a Watch using is wd.""" |
703 | @@ -836,7 +698,6 @@ | |||
704 | 836 | """Test that we get an error when trying to get a missing wd.""" | 698 | """Test that we get an error when trying to get a missing wd.""" |
705 | 837 | self.assertRaises(KeyError, self.manager.get_watch, (1,)) | 699 | self.assertRaises(KeyError, self.manager.get_watch, (1,)) |
706 | 838 | 700 | ||
707 | 839 | @defer.inlineCallbacks | ||
708 | 840 | def test_delete_present_watch(self): | 701 | def test_delete_present_watch(self): |
709 | 841 | """Test that we can remove a present watch.""" | 702 | """Test that we can remove a present watch.""" |
710 | 842 | self.was_called = False | 703 | self.was_called = False |
711 | @@ -865,7 +726,6 @@ | |||
712 | 865 | self.manager.add_watch(self.path, mask) | 726 | self.manager.add_watch(self.path, mask) |
713 | 866 | self.assertEqual(1, len(self.manager._wdm)) | 727 | self.assertEqual(1, len(self.manager._wdm)) |
714 | 867 | self.assertTrue(self.was_called, 'The watch start was not called.') | 728 | self.assertTrue(self.was_called, 'The watch start was not called.') |
715 | 868 | self.assertEqual(self.path + os.path.sep, self.manager._wdm[0].path) | ||
716 | 869 | 729 | ||
717 | 870 | def test_get_watch_present_wd(self): | 730 | def test_get_watch_present_wd(self): |
718 | 871 | """Test that the correct path is returned.""" | 731 | """Test that the correct path is returned.""" |
719 | @@ -889,10 +749,8 @@ | |||
720 | 889 | """A watch on an unwatched path returns None.""" | 749 | """A watch on an unwatched path returns None.""" |
721 | 890 | self.assertEqual(None, self.manager.get_wd(self.parent_path)) | 750 | self.assertEqual(None, self.manager.get_wd(self.parent_path)) |
722 | 891 | 751 | ||
723 | 892 | @defer.inlineCallbacks | ||
724 | 893 | def test_rm_present_wd(self): | 752 | def test_rm_present_wd(self): |
725 | 894 | """Test the removal of a present watch.""" | 753 | """Test the removal of a present watch.""" |
726 | 895 | |||
727 | 896 | def fake_stop_watching(): | 754 | def fake_stop_watching(): |
728 | 897 | """Fake stop watch.""" | 755 | """Fake stop watch.""" |
729 | 898 | return defer.succeed(True) | 756 | return defer.succeed(True) |
730 | @@ -913,8 +771,9 @@ | |||
731 | 913 | self.manager.rm_path(self.path) | 771 | self.manager.rm_path(self.path) |
732 | 914 | self.assertEqual(self.watch, self.manager._wdm.get(1)) | 772 | self.assertEqual(self.watch, self.manager._wdm.get(1)) |
733 | 915 | self.watch._watching = True | 773 | self.watch._watching = True |
736 | 916 | self.watch.platform_watch._process_events( | 774 | event = self.fake_events_processor.create_fake_event( |
737 | 917 | [(1, os.path.join(self.path, 'test'))]) | 775 | os.path.join(self.path, 'test')) |
738 | 776 | self.fake_events_processor.custom_process_events(self.watch, [event]) | ||
739 | 918 | self.assertEqual(0, len(events)) | 777 | self.assertEqual(0, len(events)) |
740 | 919 | 778 | ||
741 | 920 | def test_rm_child_path(self): | 779 | def test_rm_child_path(self): |
742 | @@ -926,41 +785,27 @@ | |||
743 | 926 | events.append(event.pathname) | 785 | events.append(event.pathname) |
744 | 927 | 786 | ||
745 | 928 | self.watch._processor = fake_processor | 787 | self.watch._processor = fake_processor |
747 | 929 | child = os.path.join(self.path, u'child') | 788 | child = os.path.join(self.path, 'child') |
748 | 930 | self.manager.rm_path(child) | 789 | self.manager.rm_path(child) |
749 | 931 | self.assertEqual(self.watch, self.manager._wdm[1]) | 790 | self.assertEqual(self.watch, self.manager._wdm[1]) |
750 | 932 | # assert that the correct event is ignored | 791 | # assert that the correct event is ignored |
751 | 933 | self.watch.platform_watch.watching = True | 792 | self.watch.platform_watch.watching = True |
754 | 934 | self.watch.platform_watch._process_events( | 793 | event = self.fake_events_processor.create_fake_event( |
755 | 935 | [(1, os.path.join('child', 'test'))]) | 794 | os.path.join('child', 'test')) |
756 | 795 | self.fake_events_processor.custom_process_events(self.watch, [event]) | ||
757 | 936 | self.assertEqual(0, len(events)) | 796 | self.assertEqual(0, len(events)) |
758 | 937 | # assert that other events are not ignored | 797 | # assert that other events are not ignored |
760 | 938 | self.watch.platform_watch._process_events([(1, 'test')]) | 798 | event2 = self.fake_events_processor.create_fake_event('test') |
761 | 799 | self.fake_events_processor.custom_process_events(self.watch, [event2]) | ||
762 | 939 | self.assertEqual(1, len(events)) | 800 | self.assertEqual(1, len(events)) |
763 | 940 | 801 | ||
764 | 941 | 802 | ||
765 | 942 | class TestWatchManagerAddWatches(BaseTwistedTestCase): | 803 | class TestWatchManagerAddWatches(BaseTwistedTestCase): |
766 | 943 | """Test the watch manager.""" | 804 | """Test the watch manager.""" |
767 | 944 | timeout = 5 | ||
768 | 945 | 805 | ||
769 | 946 | def test_add_watch_twice(self): | 806 | def test_add_watch_twice(self): |
770 | 947 | """Adding a watch twice succeeds when the watch is running.""" | 807 | """Adding a watch twice succeeds when the watch is running.""" |
787 | 948 | self.patch(Watch, "start_watching", lambda self: self.started) | 808 | raise NotImplementedError |
772 | 949 | manager = WatchManager(None) | ||
773 | 950 | # no need to stop watching because start_watching is fake | ||
774 | 951 | |||
775 | 952 | path = u'\\\\?\\C:\\test' # a valid windows path | ||
776 | 953 | mask = 'fake bit mask' | ||
777 | 954 | d1 = manager.add_watch(path, mask) | ||
778 | 955 | d2 = manager.add_watch(path, mask) | ||
779 | 956 | |||
780 | 957 | self.assertFalse(d1.called, "Should not be called yet.") | ||
781 | 958 | self.assertFalse(d2.called, "Should not be called yet.") | ||
782 | 959 | |||
783 | 960 | manager._wdm.values()[0].started.callback(True) | ||
784 | 961 | |||
785 | 962 | self.assertTrue(d1.called, "Should already be called.") | ||
786 | 963 | self.assertTrue(d2.called, "Should already be called.") | ||
788 | 964 | 809 | ||
789 | 965 | 810 | ||
790 | 966 | class FakeEvent(object): | 811 | class FakeEvent(object): |
791 | @@ -1083,16 +928,6 @@ | |||
792 | 1083 | self.assertEqual(event, self.general.called_methods[0][1]) | 928 | self.assertEqual(event, self.general.called_methods[0][1]) |
793 | 1084 | self.assertEqual(paths, self.general.called_methods[0][2]) | 929 | self.assertEqual(paths, self.general.called_methods[0][2]) |
794 | 1085 | 930 | ||
795 | 1086 | def test_platform_is_ignored(self): | ||
796 | 1087 | """Test that we do indeed ignore the correct paths.""" | ||
797 | 1088 | not_ignored = 'test' | ||
798 | 1089 | ignored = not_ignored + '.lnk' | ||
799 | 1090 | path_is_ignored = notify_processor.common.path_is_ignored | ||
800 | 1091 | self.assertFalse(path_is_ignored(not_ignored), | ||
801 | 1092 | 'Only links should be ignored.') | ||
802 | 1093 | self.assertTrue(path_is_ignored(ignored), | ||
803 | 1094 | 'Links should be ignored.') | ||
804 | 1095 | |||
805 | 1096 | def test_is_ignored(self): | 931 | def test_is_ignored(self): |
806 | 1097 | """Test that we do ensure that the path is ignored.""" | 932 | """Test that we do ensure that the path is ignored.""" |
807 | 1098 | path = 'path' | 933 | path = 'path' |
808 | @@ -1406,31 +1241,15 @@ | |||
809 | 1406 | 1241 | ||
810 | 1407 | class FilesystemMonitorTestCase(BaseTwistedTestCase): | 1242 | class FilesystemMonitorTestCase(BaseTwistedTestCase): |
811 | 1408 | """Tests for the FilesystemMonitor.""" | 1243 | """Tests for the FilesystemMonitor.""" |
812 | 1244 | |||
813 | 1409 | timeout = 5 | 1245 | timeout = 5 |
814 | 1410 | 1246 | ||
815 | 1411 | def test_add_watch_twice(self): | 1247 | def test_add_watch_twice(self): |
816 | 1412 | """Check the deferred returned by a second add_watch.""" | 1248 | """Check the deferred returned by a second add_watch.""" |
835 | 1413 | self.patch(Watch, "start_watching", lambda self: self.started) | 1249 | raise NotImplementedError |
836 | 1414 | monitor = FilesystemMonitor(None, None) | 1250 | |
819 | 1415 | # no need to stop watching because start_watching is fake | ||
820 | 1416 | |||
821 | 1417 | parent_path = 'C:\\test' # a valid windows path in utf-8 bytes | ||
822 | 1418 | child_path = parent_path + "\\child" | ||
823 | 1419 | d1 = monitor.add_watch(parent_path) | ||
824 | 1420 | d2 = monitor.add_watch(child_path) | ||
825 | 1421 | |||
826 | 1422 | self.assertFalse(d1.called, "Should not be called yet.") | ||
827 | 1423 | self.assertFalse(d2.called, "Should not be called yet.") | ||
828 | 1424 | |||
829 | 1425 | monitor._watch_manager._wdm.values()[0].started.callback(True) | ||
830 | 1426 | |||
831 | 1427 | self.assertTrue(d1.called, "Should already be called.") | ||
832 | 1428 | self.assertTrue(d2.called, "Should already be called.") | ||
833 | 1429 | |||
834 | 1430 | @defer.inlineCallbacks | ||
837 | 1431 | def test_add_watches_to_udf_ancestors(self): | 1251 | def test_add_watches_to_udf_ancestors(self): |
838 | 1432 | """Test that the ancestor watches are not added.""" | 1252 | """Test that the ancestor watches are not added.""" |
839 | 1433 | |||
840 | 1434 | class FakeVolume(object): | 1253 | class FakeVolume(object): |
841 | 1435 | """A fake UDF.""" | 1254 | """A fake UDF.""" |
842 | 1436 | 1255 | ||
843 | 1437 | 1256 | ||
844 | === modified file 'tests/platform/filesystem_notifications/test_darwin.py' | |||
845 | --- tests/platform/filesystem_notifications/test_darwin.py 2012-07-18 15:18:04 +0000 | |||
846 | +++ tests/platform/filesystem_notifications/test_darwin.py 2012-08-22 18:22:29 +0000 | |||
847 | @@ -28,16 +28,13 @@ | |||
848 | 28 | # files in the program, then also delete it here. | 28 | # files in the program, then also delete it here. |
849 | 29 | """Test the filesystem notifications on MAC OS.""" | 29 | """Test the filesystem notifications on MAC OS.""" |
850 | 30 | 30 | ||
851 | 31 | import itertools | ||
852 | 31 | import logging | 32 | import logging |
853 | 32 | import os | 33 | import os |
854 | 33 | import tempfile | 34 | import tempfile |
855 | 34 | import thread | 35 | import thread |
856 | 35 | import itertools | ||
857 | 36 | 36 | ||
858 | 37 | import fsevents | ||
859 | 38 | from twisted.internet import defer | 37 | from twisted.internet import defer |
860 | 39 | |||
861 | 40 | from contrib.testing.testcase import BaseTwistedTestCase | ||
862 | 41 | from ubuntuone.devtools.handlers import MementoHandler | 38 | from ubuntuone.devtools.handlers import MementoHandler |
863 | 42 | from ubuntuone.platform.filesystem_notifications.monitor import ( | 39 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
864 | 43 | common, | 40 | common, |
865 | @@ -51,7 +48,6 @@ | |||
866 | 51 | WatchManager, | 48 | WatchManager, |
867 | 52 | ) | 49 | ) |
868 | 53 | from ubuntuone.platform.filesystem_notifications.pyinotify_agnostic import ( | 50 | from ubuntuone.platform.filesystem_notifications.pyinotify_agnostic import ( |
869 | 54 | EventsCodes, | ||
870 | 55 | ProcessEvent, | 51 | ProcessEvent, |
871 | 56 | IN_CLOSE_WRITE, | 52 | IN_CLOSE_WRITE, |
872 | 57 | IN_CREATE, | 53 | IN_CREATE, |
873 | @@ -59,6 +55,7 @@ | |||
874 | 59 | IN_OPEN, | 55 | IN_OPEN, |
875 | 60 | ) | 56 | ) |
876 | 61 | from tests.platform.filesystem_notifications import BaseFSMonitorTestCase | 57 | from tests.platform.filesystem_notifications import BaseFSMonitorTestCase |
877 | 58 | from tests.platform.filesystem_notifications import common as common_tests | ||
878 | 62 | 59 | ||
879 | 63 | 60 | ||
880 | 64 | # A reverse mapping for the tests | 61 | # A reverse mapping for the tests |
881 | @@ -67,23 +64,18 @@ | |||
882 | 67 | REVERSE_MACOS_ACTIONS[value] = key | 64 | REVERSE_MACOS_ACTIONS[value] = key |
883 | 68 | 65 | ||
884 | 69 | 66 | ||
902 | 70 | OP_FLAGS = EventsCodes.FLAG_COLLECTIONS['OP_FLAGS'] | 67 | class FakeEventsProcessor(object): |
903 | 71 | IS_DIR = EventsCodes.FLAG_COLLECTIONS['SPECIAL_FLAGS']['IN_ISDIR'] | 68 | |
904 | 72 | 69 | """Handle fake events creation and processing.""" | |
905 | 73 | 70 | ||
906 | 74 | class FakeException(Exception): | 71 | def create_fake_event(self, filename): |
907 | 75 | """A fake Exception used in tests.""" | 72 | """Create a fake file event.""" |
908 | 76 | 73 | return FakeFileEvent(256, None, filename) | |
909 | 77 | 74 | ||
910 | 78 | class FakeVolume(object): | 75 | def custom_process_events(self, watch, events): |
911 | 79 | """A fake volume.""" | 76 | """Adapt to each platform way to process events.""" |
912 | 80 | 77 | for event in events: | |
913 | 81 | def __init__(self, path, ancestors): | 78 | watch.platform_watch._process_events(event) |
897 | 82 | """Create a new instance.""" | ||
898 | 83 | super(FakeVolume, self).__init__() | ||
899 | 84 | self.volume_id = path | ||
900 | 85 | self.path = path | ||
901 | 86 | self.ancestors = ancestors | ||
914 | 87 | 79 | ||
915 | 88 | 80 | ||
916 | 89 | class FakeFileEvent(object): | 81 | class FakeFileEvent(object): |
917 | @@ -135,7 +127,7 @@ | |||
918 | 135 | assert self.main_thread_id == thread.get_ident() | 127 | assert self.main_thread_id == thread.get_ident() |
919 | 136 | 128 | ||
920 | 137 | 129 | ||
922 | 138 | class TestWatch(BaseTwistedTestCase): | 130 | class TestWatch(common_tests.TestWatch): |
923 | 139 | """Test the watch so that it returns the same events as pyinotify.""" | 131 | """Test the watch so that it returns the same events as pyinotify.""" |
924 | 140 | 132 | ||
925 | 141 | timeout = 5 | 133 | timeout = 5 |
926 | @@ -143,6 +135,9 @@ | |||
927 | 143 | @defer.inlineCallbacks | 135 | @defer.inlineCallbacks |
928 | 144 | def setUp(self): | 136 | def setUp(self): |
929 | 145 | yield super(TestWatch, self).setUp() | 137 | yield super(TestWatch, self).setUp() |
930 | 138 | self.path = '/Users/username/folder' | ||
931 | 139 | self.common_path = '/Users/username/folder' | ||
932 | 140 | self.invalid_path = '/Users/username/path/to/not/dir' | ||
933 | 146 | self.basedir = self.mktemp('test_root') | 141 | self.basedir = self.mktemp('test_root') |
934 | 147 | self.mask = None | 142 | self.mask = None |
935 | 148 | self.stream = None | 143 | self.stream = None |
936 | @@ -151,6 +146,7 @@ | |||
937 | 151 | self.raw_events = [] | 146 | self.raw_events = [] |
938 | 152 | self.paths_checked = [] | 147 | self.paths_checked = [] |
939 | 153 | old_is_dir = Watch._path_is_dir | 148 | old_is_dir = Watch._path_is_dir |
940 | 149 | self.fake_events_processor = FakeEventsProcessor() | ||
941 | 154 | 150 | ||
942 | 155 | def path_is_dir_wrapper(watch, path): | 151 | def path_is_dir_wrapper(watch, path): |
943 | 156 | """Wrapper that gets the checked paths.""" | 152 | """Wrapper that gets the checked paths.""" |
944 | @@ -158,37 +154,32 @@ | |||
945 | 158 | self.paths_checked.append((path, result)) | 154 | self.paths_checked.append((path, result)) |
946 | 159 | return result | 155 | return result |
947 | 160 | 156 | ||
979 | 161 | self.patch(Watch, '_path_is_dir', | 157 | self.patch(Watch, '_path_is_dir', path_is_dir_wrapper) |
980 | 162 | path_is_dir_wrapper) | 158 | |
981 | 163 | 159 | def test_not_ignore_path(self): | |
982 | 164 | @defer.inlineCallbacks | 160 | """Test that we do get the events when they do not match.""" |
983 | 165 | def _perform_operations(self, path, mask, actions, number_events): | 161 | self.patch(filesystem_notifications.reactor, 'callFromThread', |
984 | 166 | """Perform the file operations and returns the recorded events.""" | 162 | lambda x, e: x(e)) |
985 | 167 | handler = TestCaseHandler(number_events=number_events) | 163 | super(TestWatch, self).test_not_ignore_path() |
986 | 168 | manager = WatchManager(handler) | 164 | |
987 | 169 | yield manager.add_watch(path, mask) | 165 | def test_undo_ignore_path_ignored(self): |
988 | 170 | # change the logger so that we can check the logs if we wanted | 166 | """Test that we do deal with events from and old ignored path.""" |
989 | 171 | manager._wdm[0].log.addHandler(self.memento) | 167 | self.patch(filesystem_notifications.reactor, 'callFromThread', |
990 | 172 | # clean logger later | 168 | lambda x, e: x(e)) |
991 | 173 | self.addCleanup(manager._wdm[0].log.removeHandler, self.memento) | 169 | super(TestWatch, self).test_not_ignore_path() |
992 | 174 | # execution the actions | 170 | |
993 | 175 | actions() | 171 | def test_undo_ignore_path_other_ignored(self): |
994 | 176 | # process the recorded events | 172 | """Test that we can undo and the other path is ignored.""" |
995 | 177 | ret = yield handler.deferred | 173 | self.patch(filesystem_notifications.reactor, 'callFromThread', |
996 | 178 | self.addCleanup(manager.stop) | 174 | lambda x, e: x(e)) |
997 | 179 | defer.returnValue(ret) | 175 | super(TestWatch, self).test_not_ignore_path() |
998 | 180 | 176 | ||
999 | 181 | def _assert_logs(self, events): | 177 | def test_mixed_ignore_path(self): |
1000 | 182 | """Assert the debug logs.""" | 178 | """Test that we do get the correct events.""" |
1001 | 183 | logs = [] | 179 | self.patch(filesystem_notifications.reactor, 'callFromThread', |
1002 | 184 | msg = 'Is path %r a dir? %s' | 180 | lambda x, e: x(e)) |
1003 | 185 | logs.extend([msg % data for data in self.paths_checked]) | 181 | super(TestWatch, self).test_mixed_ignore_path() |
1004 | 186 | msg = 'Pushing event %r to processor.' | 182 | |
974 | 187 | logs.extend([msg % e for e in events]) | ||
975 | 188 | for msg in logs: | ||
976 | 189 | self.assertTrue(self.memento.check_debug(msg)) | ||
977 | 190 | |||
978 | 191 | @defer.inlineCallbacks | ||
1005 | 192 | def test_file_create(self): | 183 | def test_file_create(self): |
1006 | 193 | """Test that the correct event is returned on a file create.""" | 184 | """Test that the correct event is returned on a file create.""" |
1007 | 194 | file_name = os.path.join(self.basedir, 'test_file_create') | 185 | file_name = os.path.join(self.basedir, 'test_file_create') |
1008 | @@ -205,14 +196,12 @@ | |||
1009 | 205 | create_file, 1) | 196 | create_file, 1) |
1010 | 206 | event = events[0] | 197 | event = events[0] |
1011 | 207 | self.assertFalse(event.dir) | 198 | self.assertFalse(event.dir) |
1013 | 208 | self.assertEqual(OP_FLAGS['IN_CREATE'], event.mask) | 199 | self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask) |
1014 | 209 | self.assertEqual('IN_CREATE', event.maskname) | 200 | self.assertEqual('IN_CREATE', event.maskname) |
1015 | 210 | self.assertEqual(os.path.split(file_name)[1], event.name) | 201 | self.assertEqual(os.path.split(file_name)[1], event.name) |
1016 | 211 | self.assertEqual('.', event.path) | 202 | self.assertEqual('.', event.path) |
1017 | 212 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) | 203 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) |
1018 | 213 | self.assertEqual(0, event.wd) | 204 | self.assertEqual(0, event.wd) |
1019 | 214 | # assert the logging | ||
1020 | 215 | self._assert_logs(events) | ||
1021 | 216 | 205 | ||
1022 | 217 | @defer.inlineCallbacks | 206 | @defer.inlineCallbacks |
1023 | 218 | def test_dir_create(self): | 207 | def test_dir_create(self): |
1024 | @@ -227,14 +216,13 @@ | |||
1025 | 227 | create_dir, 1) | 216 | create_dir, 1) |
1026 | 228 | event = events[0] | 217 | event = events[0] |
1027 | 229 | self.assertTrue(event.dir) | 218 | self.assertTrue(event.dir) |
1029 | 230 | self.assertEqual(OP_FLAGS['IN_CREATE'] | IS_DIR, event.mask) | 219 | self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'] | |
1030 | 220 | common_tests.IS_DIR, event.mask) | ||
1031 | 231 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) | 221 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) |
1032 | 232 | self.assertEqual(os.path.split(dir_name)[1], event.name) | 222 | self.assertEqual(os.path.split(dir_name)[1], event.name) |
1033 | 233 | self.assertEqual('.', event.path) | 223 | self.assertEqual('.', event.path) |
1034 | 234 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) | 224 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) |
1035 | 235 | self.assertEqual(0, event.wd) | 225 | self.assertEqual(0, event.wd) |
1036 | 236 | # assert the logging | ||
1037 | 237 | self._assert_logs(events) | ||
1038 | 238 | 226 | ||
1039 | 239 | @defer.inlineCallbacks | 227 | @defer.inlineCallbacks |
1040 | 240 | def test_file_remove(self): | 228 | def test_file_remove(self): |
1041 | @@ -251,14 +239,12 @@ | |||
1042 | 251 | remove_file, 1) | 239 | remove_file, 1) |
1043 | 252 | event = events[0] | 240 | event = events[0] |
1044 | 253 | self.assertFalse(event.dir) | 241 | self.assertFalse(event.dir) |
1046 | 254 | self.assertEqual(OP_FLAGS['IN_DELETE'], event.mask) | 242 | self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'], event.mask) |
1047 | 255 | self.assertEqual('IN_DELETE', event.maskname) | 243 | self.assertEqual('IN_DELETE', event.maskname) |
1048 | 256 | self.assertEqual(os.path.split(file_name)[1], event.name) | 244 | self.assertEqual(os.path.split(file_name)[1], event.name) |
1049 | 257 | self.assertEqual('.', event.path) | 245 | self.assertEqual('.', event.path) |
1050 | 258 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) | 246 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) |
1051 | 259 | self.assertEqual(0, event.wd) | 247 | self.assertEqual(0, event.wd) |
1052 | 260 | # assert the logging | ||
1053 | 261 | self._assert_logs(events) | ||
1054 | 262 | 248 | ||
1055 | 263 | @defer.inlineCallbacks | 249 | @defer.inlineCallbacks |
1056 | 264 | def test_dir_remove(self): | 250 | def test_dir_remove(self): |
1057 | @@ -275,13 +261,12 @@ | |||
1058 | 275 | remove_dir, 1) | 261 | remove_dir, 1) |
1059 | 276 | event = events[0] | 262 | event = events[0] |
1060 | 277 | self.assertTrue(event.dir) | 263 | self.assertTrue(event.dir) |
1062 | 278 | self.assertEqual(OP_FLAGS['IN_DELETE'] | IS_DIR, event.mask) | 264 | self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'] | |
1063 | 265 | common_tests.IS_DIR, event.mask) | ||
1064 | 279 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) | 266 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) |
1065 | 280 | self.assertEqual('.', event.path) | 267 | self.assertEqual('.', event.path) |
1066 | 281 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) | 268 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) |
1067 | 282 | self.assertEqual(0, event.wd) | 269 | self.assertEqual(0, event.wd) |
1068 | 283 | # assert the logging | ||
1069 | 284 | self._assert_logs(events) | ||
1070 | 285 | 270 | ||
1071 | 286 | @defer.inlineCallbacks | 271 | @defer.inlineCallbacks |
1072 | 287 | def test_file_write(self): | 272 | def test_file_write(self): |
1073 | @@ -301,14 +286,12 @@ | |||
1074 | 301 | write_file, 1) | 286 | write_file, 1) |
1075 | 302 | event = events[0] | 287 | event = events[0] |
1076 | 303 | self.assertFalse(event.dir) | 288 | self.assertFalse(event.dir) |
1078 | 304 | self.assertEqual(OP_FLAGS['IN_CREATE'], event.mask) | 289 | self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask) |
1079 | 305 | self.assertEqual('IN_CREATE', event.maskname) | 290 | self.assertEqual('IN_CREATE', event.maskname) |
1080 | 306 | self.assertEqual(os.path.split(file_name)[1], event.name) | 291 | self.assertEqual(os.path.split(file_name)[1], event.name) |
1081 | 307 | self.assertEqual('.', event.path) | 292 | self.assertEqual('.', event.path) |
1082 | 308 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) | 293 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) |
1083 | 309 | self.assertEqual(0, event.wd) | 294 | self.assertEqual(0, event.wd) |
1084 | 310 | # assert the logging | ||
1085 | 311 | self._assert_logs(events) | ||
1086 | 312 | 295 | ||
1087 | 313 | @defer.inlineCallbacks | 296 | @defer.inlineCallbacks |
1088 | 314 | def test_file_moved_to_watched_dir_same_watcher(self): | 297 | def test_file_moved_to_watched_dir_same_watcher(self): |
1089 | @@ -330,7 +313,8 @@ | |||
1090 | 330 | move_to_event = events[1] | 313 | move_to_event = events[1] |
1091 | 331 | # first test the move from | 314 | # first test the move from |
1092 | 332 | self.assertFalse(move_from_event.dir) | 315 | self.assertFalse(move_from_event.dir) |
1094 | 333 | self.assertEqual(OP_FLAGS['IN_MOVED_FROM'], move_from_event.mask) | 316 | self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_FROM'], |
1095 | 317 | move_from_event.mask) | ||
1096 | 334 | self.assertEqual('IN_MOVED_FROM', move_from_event.maskname) | 318 | self.assertEqual('IN_MOVED_FROM', move_from_event.maskname) |
1097 | 335 | self.assertEqual(os.path.split(from_file_name)[1], | 319 | self.assertEqual(os.path.split(from_file_name)[1], |
1098 | 336 | move_from_event.name) | 320 | move_from_event.name) |
1099 | @@ -340,7 +324,8 @@ | |||
1100 | 340 | self.assertEqual(0, move_from_event.wd) | 324 | self.assertEqual(0, move_from_event.wd) |
1101 | 341 | # test the move to | 325 | # test the move to |
1102 | 342 | self.assertFalse(move_to_event.dir) | 326 | self.assertFalse(move_to_event.dir) |
1104 | 343 | self.assertEqual(OP_FLAGS['IN_MOVED_TO'], move_to_event.mask) | 327 | self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_TO'], |
1105 | 328 | move_to_event.mask) | ||
1106 | 344 | self.assertEqual('IN_MOVED_TO', move_to_event.maskname) | 329 | self.assertEqual('IN_MOVED_TO', move_to_event.maskname) |
1107 | 345 | self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name) | 330 | self.assertEqual(os.path.split(to_file_name)[1], move_to_event.name) |
1108 | 346 | self.assertEqual('.', move_to_event.path) | 331 | self.assertEqual('.', move_to_event.path) |
1109 | @@ -351,8 +336,6 @@ | |||
1110 | 351 | self.assertEqual(0, move_to_event.wd) | 336 | self.assertEqual(0, move_to_event.wd) |
1111 | 352 | # assert that both cookies are the same | 337 | # assert that both cookies are the same |
1112 | 353 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) | 338 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) |
1113 | 354 | # assert the logging | ||
1114 | 355 | self._assert_logs(events) | ||
1115 | 356 | 339 | ||
1116 | 357 | @defer.inlineCallbacks | 340 | @defer.inlineCallbacks |
1117 | 358 | def test_file_moved_to_not_watched_dir(self): | 341 | def test_file_moved_to_not_watched_dir(self): |
1118 | @@ -372,15 +355,13 @@ | |||
1119 | 372 | move_file, 1) | 355 | move_file, 1) |
1120 | 373 | event = events[0] | 356 | event = events[0] |
1121 | 374 | self.assertFalse(event.dir) | 357 | self.assertFalse(event.dir) |
1123 | 375 | self.assertEqual(OP_FLAGS['IN_DELETE'], event.mask) | 358 | self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'], event.mask) |
1124 | 376 | self.assertEqual('IN_DELETE', event.maskname) | 359 | self.assertEqual('IN_DELETE', event.maskname) |
1125 | 377 | self.assertEqual(os.path.split(from_file_name)[1], event.name) | 360 | self.assertEqual(os.path.split(from_file_name)[1], event.name) |
1126 | 378 | self.assertEqual('.', event.path) | 361 | self.assertEqual('.', event.path) |
1127 | 379 | self.assertEqual(os.path.join(self.basedir, from_file_name), | 362 | self.assertEqual(os.path.join(self.basedir, from_file_name), |
1128 | 380 | event.pathname) | 363 | event.pathname) |
1129 | 381 | self.assertEqual(0, event.wd) | 364 | self.assertEqual(0, event.wd) |
1130 | 382 | # assert the logging | ||
1131 | 383 | self._assert_logs(events) | ||
1132 | 384 | 365 | ||
1133 | 385 | @defer.inlineCallbacks | 366 | @defer.inlineCallbacks |
1134 | 386 | def test_file_move_from_not_watched_dir(self): | 367 | def test_file_move_from_not_watched_dir(self): |
1135 | @@ -402,15 +383,13 @@ | |||
1136 | 402 | move_files, 1) | 383 | move_files, 1) |
1137 | 403 | event = events[0] | 384 | event = events[0] |
1138 | 404 | self.assertFalse(event.dir) | 385 | self.assertFalse(event.dir) |
1140 | 405 | self.assertEqual(OP_FLAGS['IN_CREATE'], event.mask) | 386 | self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'], event.mask) |
1141 | 406 | self.assertEqual('IN_CREATE', event.maskname) | 387 | self.assertEqual('IN_CREATE', event.maskname) |
1142 | 407 | self.assertEqual(os.path.split(to_file_name)[1], event.name) | 388 | self.assertEqual(os.path.split(to_file_name)[1], event.name) |
1143 | 408 | self.assertEqual('.', event.path) | 389 | self.assertEqual('.', event.path) |
1144 | 409 | self.assertEqual(os.path.join(self.basedir, to_file_name), | 390 | self.assertEqual(os.path.join(self.basedir, to_file_name), |
1145 | 410 | event.pathname) | 391 | event.pathname) |
1146 | 411 | self.assertEqual(0, event.wd) | 392 | self.assertEqual(0, event.wd) |
1147 | 412 | # assert the logging | ||
1148 | 413 | self._assert_logs(events) | ||
1149 | 414 | 393 | ||
1150 | 415 | @defer.inlineCallbacks | 394 | @defer.inlineCallbacks |
1151 | 416 | def test_dir_moved_to_watched_dir_same_watcher(self): | 395 | def test_dir_moved_to_watched_dir_same_watcher(self): |
1152 | @@ -431,7 +410,8 @@ | |||
1153 | 431 | move_to_event = events[1] | 410 | move_to_event = events[1] |
1154 | 432 | # first test the move from | 411 | # first test the move from |
1155 | 433 | self.assertTrue(move_from_event.dir) | 412 | self.assertTrue(move_from_event.dir) |
1157 | 434 | self.assertEqual(OP_FLAGS['IN_MOVED_FROM'] | IS_DIR, | 413 | self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_FROM'] | |
1158 | 414 | common_tests.IS_DIR, | ||
1159 | 435 | move_from_event.mask) | 415 | move_from_event.mask) |
1160 | 436 | self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname) | 416 | self.assertEqual('IN_MOVED_FROM|IN_ISDIR', move_from_event.maskname) |
1161 | 437 | self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name) | 417 | self.assertEqual(os.path.split(from_dir_name)[1], move_from_event.name) |
1162 | @@ -441,7 +421,8 @@ | |||
1163 | 441 | self.assertEqual(0, move_from_event.wd) | 421 | self.assertEqual(0, move_from_event.wd) |
1164 | 442 | # test the move to | 422 | # test the move to |
1165 | 443 | self.assertTrue(move_to_event.dir) | 423 | self.assertTrue(move_to_event.dir) |
1167 | 444 | self.assertEqual(OP_FLAGS['IN_MOVED_TO'] | IS_DIR, move_to_event.mask) | 424 | self.assertEqual(common_tests.OP_FLAGS['IN_MOVED_TO'] | |
1168 | 425 | common_tests.IS_DIR, move_to_event.mask) | ||
1169 | 445 | self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname) | 426 | self.assertEqual('IN_MOVED_TO|IN_ISDIR', move_to_event.maskname) |
1170 | 446 | self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name) | 427 | self.assertEqual(os.path.split(to_dir_name)[1], move_to_event.name) |
1171 | 447 | self.assertEqual('.', move_to_event.path) | 428 | self.assertEqual('.', move_to_event.path) |
1172 | @@ -452,8 +433,6 @@ | |||
1173 | 452 | self.assertEqual(0, move_to_event.wd) | 433 | self.assertEqual(0, move_to_event.wd) |
1174 | 453 | # assert that both cookies are the same | 434 | # assert that both cookies are the same |
1175 | 454 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) | 435 | self.assertEqual(move_from_event.cookie, move_to_event.cookie) |
1176 | 455 | # assert the logging | ||
1177 | 456 | self._assert_logs(events) | ||
1178 | 457 | 436 | ||
1179 | 458 | @defer.inlineCallbacks | 437 | @defer.inlineCallbacks |
1180 | 459 | def test_dir_moved_to_not_watched_dir(self): | 438 | def test_dir_moved_to_not_watched_dir(self): |
1181 | @@ -473,13 +452,12 @@ | |||
1182 | 473 | move_dir, 1) | 452 | move_dir, 1) |
1183 | 474 | event = events[0] | 453 | event = events[0] |
1184 | 475 | self.assertTrue(event.dir) | 454 | self.assertTrue(event.dir) |
1186 | 476 | self.assertEqual(OP_FLAGS['IN_DELETE'] | IS_DIR, event.mask) | 455 | self.assertEqual(common_tests.OP_FLAGS['IN_DELETE'] | |
1187 | 456 | common_tests.IS_DIR, event.mask) | ||
1188 | 477 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) | 457 | self.assertEqual('IN_DELETE|IN_ISDIR', event.maskname) |
1189 | 478 | self.assertEqual('.', event.path) | 458 | self.assertEqual('.', event.path) |
1190 | 479 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) | 459 | self.assertEqual(os.path.join(self.basedir, dir_name), event.pathname) |
1191 | 480 | self.assertEqual(0, event.wd) | 460 | self.assertEqual(0, event.wd) |
1192 | 481 | # assert the logging | ||
1193 | 482 | self._assert_logs(events) | ||
1194 | 483 | 461 | ||
1195 | 484 | @defer.inlineCallbacks | 462 | @defer.inlineCallbacks |
1196 | 485 | def test_dir_move_from_not_watched_dir(self): | 463 | def test_dir_move_from_not_watched_dir(self): |
1197 | @@ -499,7 +477,8 @@ | |||
1198 | 499 | move_dir, 1) | 477 | move_dir, 1) |
1199 | 500 | event = events[0] | 478 | event = events[0] |
1200 | 501 | self.assertTrue(event.dir) | 479 | self.assertTrue(event.dir) |
1202 | 502 | self.assertEqual(OP_FLAGS['IN_CREATE'] | IS_DIR, event.mask) | 480 | self.assertEqual(common_tests.OP_FLAGS['IN_CREATE'] | |
1203 | 481 | common_tests.IS_DIR, event.mask) | ||
1204 | 503 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) | 482 | self.assertEqual('IN_CREATE|IN_ISDIR', event.maskname) |
1205 | 504 | self.assertEqual(os.path.split(from_dir_name)[1], event.name) | 483 | self.assertEqual(os.path.split(from_dir_name)[1], event.name) |
1206 | 505 | self.assertEqual('.', event.path) | 484 | self.assertEqual('.', event.path) |
1207 | @@ -521,149 +500,6 @@ | |||
1208 | 521 | self.assertEqual(0, len(handler.processed_events)) | 500 | self.assertEqual(0, len(handler.processed_events)) |
1209 | 522 | test_exclude_filter.skip = "we must rethink this test." | 501 | test_exclude_filter.skip = "we must rethink this test." |
1210 | 523 | 502 | ||
1211 | 524 | def test_ignore_path(self): | ||
1212 | 525 | """Test that events from a path are ignored.""" | ||
1213 | 526 | events = [] | ||
1214 | 527 | |||
1215 | 528 | def fake_processor(event): | ||
1216 | 529 | """Memorize the processed events.""" | ||
1217 | 530 | events.append(event) | ||
1218 | 531 | |||
1219 | 532 | path = '/Users/username/folder' | ||
1220 | 533 | child = 'child' | ||
1221 | 534 | watch = Watch(1, path, fake_processor) | ||
1222 | 535 | watch.ignore_path(os.path.join(path, child)) | ||
1223 | 536 | # ensure that the watch is watching | ||
1224 | 537 | watch.platform_watch.watching = True | ||
1225 | 538 | for file_name in 'abcdef': | ||
1226 | 539 | event = FakeFileEvent(256, None, os.path.join(child, file_name)) | ||
1227 | 540 | watch.platform_watch._process_events(event) | ||
1228 | 541 | self.assertEqual(0, len(events), | ||
1229 | 542 | 'All events should have been ignored.') | ||
1230 | 543 | |||
1231 | 544 | def test_not_ignore_path(self): | ||
1232 | 545 | """Test that we do get the events when they do not match.""" | ||
1233 | 546 | events = [] | ||
1234 | 547 | |||
1235 | 548 | def fake_processor(event): | ||
1236 | 549 | """Memorize the processed events.""" | ||
1237 | 550 | events.append(event) | ||
1238 | 551 | |||
1239 | 552 | self.patch(filesystem_notifications.reactor, 'callFromThread', | ||
1240 | 553 | lambda x, e: x(e)) | ||
1241 | 554 | |||
1242 | 555 | path = '/Users/username/folder' | ||
1243 | 556 | child = 'child' | ||
1244 | 557 | watch = Watch(1, path, fake_processor) | ||
1245 | 558 | watch.ignore_path(os.path.join(path, child)) | ||
1246 | 559 | paths_not_to_ignore = [] | ||
1247 | 560 | for file_name in 'abcdef': | ||
1248 | 561 | event = FakeFileEvent(256, None, os.path.join(child + file_name, | ||
1249 | 562 | file_name)) | ||
1250 | 563 | paths_not_to_ignore.append(event) | ||
1251 | 564 | # ensure that the watch is watching | ||
1252 | 565 | watch.platform_watch.watching = True | ||
1253 | 566 | for event in paths_not_to_ignore: | ||
1254 | 567 | watch.platform_watch._process_events(event) | ||
1255 | 568 | self.assertEqual(len(paths_not_to_ignore), len(events), | ||
1256 | 569 | 'No events should have been ignored.') | ||
1257 | 570 | |||
1258 | 571 | def test_mixed_ignore_path(self): | ||
1259 | 572 | """Test that we do get the correct events.""" | ||
1260 | 573 | events = [] | ||
1261 | 574 | |||
1262 | 575 | def fake_processor(event): | ||
1263 | 576 | """Memorize the processed events.""" | ||
1264 | 577 | events.append(event.pathname) | ||
1265 | 578 | |||
1266 | 579 | self.patch(filesystem_notifications.reactor, 'callFromThread', | ||
1267 | 580 | lambda x, e: x(e)) | ||
1268 | 581 | |||
1269 | 582 | child = 'child' | ||
1270 | 583 | path = '/Users/username/folder' | ||
1271 | 584 | watch = Watch(1, path, fake_processor) | ||
1272 | 585 | watch.ignore_path(os.path.join(path, child)) | ||
1273 | 586 | paths_not_to_ignore = [] | ||
1274 | 587 | paths_to_ignore = [] | ||
1275 | 588 | expected_events = [] | ||
1276 | 589 | for file_name in 'abcdef': | ||
1277 | 590 | valid = os.path.join(child + file_name, file_name) | ||
1278 | 591 | paths_to_ignore.append((1, os.path.join(child, file_name))) | ||
1279 | 592 | event = FakeFileEvent(256, None, valid) | ||
1280 | 593 | paths_not_to_ignore.append(event) | ||
1281 | 594 | expected_events.append(os.path.join(path, valid)) | ||
1282 | 595 | # ensure that the watch is watching | ||
1283 | 596 | watch.platform_watch.watching = True | ||
1284 | 597 | for event in paths_not_to_ignore: | ||
1285 | 598 | watch.platform_watch._process_events(event) | ||
1286 | 599 | self.assertEqual(len(paths_not_to_ignore), len(events), | ||
1287 | 600 | 'Wrong number of events ignored.') | ||
1288 | 601 | self.assertTrue(all([event in expected_events for event in events]), | ||
1289 | 602 | 'Paths ignored that should have not been ignored.') | ||
1290 | 603 | |||
1291 | 604 | def test_undo_ignore_path_ignored(self): | ||
1292 | 605 | """Test that we do deal with events from and old ignored path.""" | ||
1293 | 606 | events = [] | ||
1294 | 607 | |||
1295 | 608 | def fake_processor(event): | ||
1296 | 609 | """Memorize the processed events.""" | ||
1297 | 610 | events.append(event) | ||
1298 | 611 | |||
1299 | 612 | self.patch(filesystem_notifications.reactor, 'callFromThread', | ||
1300 | 613 | lambda x, e: x(e)) | ||
1301 | 614 | |||
1302 | 615 | path = '/Users/username/folder' | ||
1303 | 616 | child = 'child' | ||
1304 | 617 | watch = Watch(1, path, fake_processor) | ||
1305 | 618 | watch.ignore_path(os.path.join(path, child)) | ||
1306 | 619 | watch.remove_ignored_path(os.path.join(path, child)) | ||
1307 | 620 | paths_not_to_ignore = [] | ||
1308 | 621 | for file_name in 'abcdef': | ||
1309 | 622 | event = FakeFileEvent(256, None, os.path.join(child, file_name)) | ||
1310 | 623 | paths_not_to_ignore.append(event) | ||
1311 | 624 | # ensure that the watch is watching | ||
1312 | 625 | watch.platform_watch.watching = True | ||
1313 | 626 | for event in paths_not_to_ignore: | ||
1314 | 627 | watch.platform_watch._process_events(event) | ||
1315 | 628 | self.assertEqual(len(paths_not_to_ignore), len(events), | ||
1316 | 629 | 'All events should have been accepted.') | ||
1317 | 630 | |||
1318 | 631 | def test_undo_ignore_path_other_ignored(self): | ||
1319 | 632 | """Test that we can undo and the other path is ignored.""" | ||
1320 | 633 | events = [] | ||
1321 | 634 | |||
1322 | 635 | def fake_processor(event): | ||
1323 | 636 | """Memorize the processed events.""" | ||
1324 | 637 | events.append(event.pathname) | ||
1325 | 638 | |||
1326 | 639 | self.patch(filesystem_notifications.reactor, 'callFromThread', | ||
1327 | 640 | lambda x, e: x(e)) | ||
1328 | 641 | |||
1329 | 642 | path = '/Users/username/folder' | ||
1330 | 643 | child_a = 'childa' | ||
1331 | 644 | child_b = 'childb' | ||
1332 | 645 | watch = Watch(1, path, fake_processor) | ||
1333 | 646 | watch.ignore_path(os.path.join(path, child_a)) | ||
1334 | 647 | watch.ignore_path(os.path.join(path, child_b)) | ||
1335 | 648 | watch.remove_ignored_path(os.path.join(path, child_a)) | ||
1336 | 649 | paths_to_ignore = [] | ||
1337 | 650 | paths_not_to_ignore = [] | ||
1338 | 651 | expected_events = [] | ||
1339 | 652 | for file_name in 'abcdef': | ||
1340 | 653 | paths_to_ignore.append((1, os.path.join(child_b, file_name))) | ||
1341 | 654 | valid = os.path.join(child_a, file_name) | ||
1342 | 655 | event = FakeFileEvent(256, None, valid) | ||
1343 | 656 | paths_not_to_ignore.append(event) | ||
1344 | 657 | expected_events.append(os.path.join(path, valid)) | ||
1345 | 658 | # ensure that the watch is watching | ||
1346 | 659 | watch.platform_watch.watching = True | ||
1347 | 660 | for event in paths_not_to_ignore: | ||
1348 | 661 | watch.platform_watch._process_events(event) | ||
1349 | 662 | self.assertEqual(len(paths_not_to_ignore), len(events), | ||
1350 | 663 | 'All events should have been accepted.') | ||
1351 | 664 | self.assertTrue(all([event in expected_events for event in events]), | ||
1352 | 665 | 'Paths ignored that should have not been ignored.') | ||
1353 | 666 | |||
1354 | 667 | def test_stream_created(self): | 503 | def test_stream_created(self): |
1355 | 668 | """Test that the stream is created.""" | 504 | """Test that the stream is created.""" |
1356 | 669 | def fake_call(*args, **kwargs): | 505 | def fake_call(*args, **kwargs): |
1357 | @@ -684,7 +520,7 @@ | |||
1358 | 684 | 520 | ||
1359 | 685 | def random_error(self, *args): | 521 | def random_error(self, *args): |
1360 | 686 | """Throw a fake exception.""" | 522 | """Throw a fake exception.""" |
1362 | 687 | raise FakeException() | 523 | raise common_tests.FakeException() |
1363 | 688 | 524 | ||
1364 | 689 | def test_is_path_dir_missing_no_subdir(self): | 525 | def test_is_path_dir_missing_no_subdir(self): |
1365 | 690 | """Test when the path does not exist and is no a subdir.""" | 526 | """Test when the path does not exist and is no a subdir.""" |
1366 | @@ -760,7 +596,7 @@ | |||
1367 | 760 | self.assertTrue(path not in watch._subdirs) | 596 | self.assertTrue(path not in watch._subdirs) |
1368 | 761 | 597 | ||
1369 | 762 | 598 | ||
1371 | 763 | class TestWatchManager(BaseTwistedTestCase): | 599 | class TestWatchManager(common_tests.TestWatchManager): |
1372 | 764 | """Test the watch manager.""" | 600 | """Test the watch manager.""" |
1373 | 765 | 601 | ||
1374 | 766 | @defer.inlineCallbacks | 602 | @defer.inlineCallbacks |
1375 | @@ -772,24 +608,19 @@ | |||
1376 | 772 | self.watch = Watch(1, self.path, None) | 608 | self.watch = Watch(1, self.path, None) |
1377 | 773 | self.manager = WatchManager(None) | 609 | self.manager = WatchManager(None) |
1378 | 774 | self.manager._wdm = {1: self.watch} | 610 | self.manager._wdm = {1: self.watch} |
1379 | 611 | self.stream = None | ||
1380 | 612 | self.fake_events_processor = FakeEventsProcessor() | ||
1381 | 775 | 613 | ||
1382 | 776 | @defer.inlineCallbacks | 614 | @defer.inlineCallbacks |
1383 | 777 | def test_stop(self): | 615 | def test_stop(self): |
1384 | 778 | """Test that the different watches are stopped.""" | 616 | """Test that the different watches are stopped.""" |
1393 | 779 | self.was_called = False | 617 | self.patch(self.manager.manager.observer, "unschedule", |
1394 | 780 | 618 | lambda x: None) | |
1387 | 781 | def fake_stop_watching(watch): | ||
1388 | 782 | """Fake stop watch.""" | ||
1389 | 783 | self.was_called = True | ||
1390 | 784 | return defer.succeed(True) | ||
1391 | 785 | |||
1392 | 786 | self.patch(Watch, "stop_watching", fake_stop_watching) | ||
1395 | 787 | self.patch(self.manager.manager.observer, "unschedule", lambda x: None) | 619 | self.patch(self.manager.manager.observer, "unschedule", lambda x: None) |
1398 | 788 | yield self.manager.stop() | 620 | yield super(TestWatchManager, self).test_stop() |
1397 | 789 | self.assertTrue(self.was_called, 'The watch stop should be called.') | ||
1399 | 790 | 621 | ||
1400 | 791 | def test_stop_multiple(self): | 622 | def test_stop_multiple(self): |
1402 | 792 | """The watches should became watching=False and the observer stopped.""" | 623 | """Watches should became watching=False and the observer stopped.""" |
1403 | 793 | self.patch(self.manager.manager.observer, "unschedule", lambda x: None) | 624 | self.patch(self.manager.manager.observer, "unschedule", lambda x: None) |
1404 | 794 | second_path = self.parent_path + "second_path" | 625 | second_path = self.parent_path + "second_path" |
1405 | 795 | second_watch = Watch(2, second_path, None) | 626 | second_watch = Watch(2, second_path, None) |
1406 | @@ -810,28 +641,6 @@ | |||
1407 | 810 | """Test that we get an error when trying to get a missing wd.""" | 641 | """Test that we get an error when trying to get a missing wd.""" |
1408 | 811 | self.assertRaises(KeyError, self.manager.get_watch, (1,)) | 642 | self.assertRaises(KeyError, self.manager.get_watch, (1,)) |
1409 | 812 | 643 | ||
1410 | 813 | @defer.inlineCallbacks | ||
1411 | 814 | def test_delete_present_watch(self): | ||
1412 | 815 | """Test that we can remove a present watch.""" | ||
1413 | 816 | self.was_called = False | ||
1414 | 817 | |||
1415 | 818 | def stop_watching(): | ||
1416 | 819 | """Fake stop watch.""" | ||
1417 | 820 | self.was_called = True | ||
1418 | 821 | return defer.succeed(True) | ||
1419 | 822 | |||
1420 | 823 | def fake_unschedule(s): | ||
1421 | 824 | """Fake function that should receive a Stream object.""" | ||
1422 | 825 | self.stream = s | ||
1423 | 826 | |||
1424 | 827 | self.patch(self.manager.manager.observer, "unschedule", | ||
1425 | 828 | fake_unschedule) | ||
1426 | 829 | |||
1427 | 830 | self.watch.stop_watching = stop_watching | ||
1428 | 831 | yield self.manager.del_watch(1) | ||
1429 | 832 | self.assertIsInstance(self.stream, fsevents.Stream) | ||
1430 | 833 | self.assertRaises(KeyError, self.manager.get_watch, (1,)) | ||
1431 | 834 | |||
1432 | 835 | def test_add_single_watch(self): | 644 | def test_add_single_watch(self): |
1433 | 836 | """Test the addition of a new single watch.""" | 645 | """Test the addition of a new single watch.""" |
1434 | 837 | self.was_called = False | 646 | self.was_called = False |
1435 | @@ -849,10 +658,6 @@ | |||
1436 | 849 | self.assertTrue(self.was_called, 'The watch start was not called.') | 658 | self.assertTrue(self.was_called, 'The watch start was not called.') |
1437 | 850 | self.assertEqual(self.path + os.path.sep, self.manager._wdm[0].path) | 659 | self.assertEqual(self.path + os.path.sep, self.manager._wdm[0].path) |
1438 | 851 | 660 | ||
1439 | 852 | def test_get_watch_present_wd(self): | ||
1440 | 853 | """Test that the correct path is returned.""" | ||
1441 | 854 | self.assertEqual(self.path + os.path.sep, self.manager.get_path(1)) | ||
1442 | 855 | |||
1443 | 856 | def test_get_watch_missing_wd(self): | 661 | def test_get_watch_missing_wd(self): |
1444 | 857 | """Test that the correct path is returned.""" | 662 | """Test that the correct path is returned.""" |
1445 | 858 | self.manager._wdm = {} | 663 | self.manager._wdm = {} |
1446 | @@ -873,60 +678,24 @@ | |||
1447 | 873 | 678 | ||
1448 | 874 | def test_rm_present_wd(self): | 679 | def test_rm_present_wd(self): |
1449 | 875 | """Test the removal of a present watch.""" | 680 | """Test the removal of a present watch.""" |
1450 | 876 | self.patch(self.watch, "stop_watching", lambda: None) | ||
1451 | 877 | self.patch(self.manager.manager.observer, "unschedule", lambda x: None) | 681 | self.patch(self.manager.manager.observer, "unschedule", lambda x: None) |
1470 | 878 | self.manager.rm_watch(1) | 682 | super(TestWatchManager, self).test_rm_present_wd() |
1453 | 879 | self.assertEqual(None, self.manager._wdm.get(1)) | ||
1454 | 880 | |||
1455 | 881 | def test_rm_root_path(self): | ||
1456 | 882 | """Test the removal of a root path.""" | ||
1457 | 883 | events = [] | ||
1458 | 884 | |||
1459 | 885 | def fake_processor(event): | ||
1460 | 886 | """Memorize the processed events.""" | ||
1461 | 887 | events.append(event.pathname) | ||
1462 | 888 | |||
1463 | 889 | self.watch._processor = fake_processor | ||
1464 | 890 | self.manager.rm_path(self.path) | ||
1465 | 891 | self.assertEqual(self.watch, self.manager._wdm.get(1)) | ||
1466 | 892 | self.watch._watching = True | ||
1467 | 893 | event = FakeFileEvent(256, None, os.path.join(self.path, 'test')) | ||
1468 | 894 | self.watch.platform_watch._process_events(event) | ||
1469 | 895 | self.assertEqual(0, len(events)) | ||
1471 | 896 | 683 | ||
1472 | 897 | def test_rm_child_path(self): | 684 | def test_rm_child_path(self): |
1473 | 898 | """Test the removal of a child path.""" | 685 | """Test the removal of a child path.""" |
1474 | 899 | events = [] | ||
1475 | 900 | |||
1476 | 901 | def fake_processor(event): | ||
1477 | 902 | """Memorize the processed events.""" | ||
1478 | 903 | events.append(event.pathname) | ||
1479 | 904 | |||
1480 | 905 | self.patch(filesystem_notifications.reactor, 'callFromThread', | 686 | self.patch(filesystem_notifications.reactor, 'callFromThread', |
1481 | 906 | lambda x, e: x(e)) | 687 | lambda x, e: x(e)) |
1499 | 907 | 688 | super(TestWatchManager, self).test_rm_child_path() | |
1500 | 908 | self.watch._processor = fake_processor | 689 | |
1501 | 909 | child = os.path.join(self.path, 'child') | 690 | |
1502 | 910 | self.manager.rm_path(child) | 691 | class TestWatchManagerAddWatches(common_tests.TestWatchManagerAddWatches): |
1486 | 911 | self.assertEqual(self.watch, self.manager._wdm[1]) | ||
1487 | 912 | # assert that the correct event is ignored | ||
1488 | 913 | self.watch.platform_watch.watching = True | ||
1489 | 914 | event = FakeFileEvent(256, None, os.path.join('child', 'test')) | ||
1490 | 915 | self.watch.platform_watch._process_events(event) | ||
1491 | 916 | self.assertEqual(0, len(events)) | ||
1492 | 917 | # assert that other events are not ignored | ||
1493 | 918 | event2 = FakeFileEvent(256, None, 'test') | ||
1494 | 919 | self.watch.platform_watch._process_events(event2) | ||
1495 | 920 | self.assertEqual(1, len(events)) | ||
1496 | 921 | |||
1497 | 922 | |||
1498 | 923 | class TestWatchManagerAddWatches(BaseTwistedTestCase): | ||
1503 | 924 | """Test the watch manager.""" | 692 | """Test the watch manager.""" |
1504 | 925 | timeout = 5 | 693 | timeout = 5 |
1505 | 926 | 694 | ||
1506 | 927 | def test_add_watch_twice(self): | 695 | def test_add_watch_twice(self): |
1507 | 928 | """Adding a watch twice succeeds when the watch is running.""" | 696 | """Adding a watch twice succeeds when the watch is running.""" |
1508 | 929 | self.patch(Watch, "start_watching", lambda self: None) | 697 | self.patch(Watch, "start_watching", lambda self: None) |
1509 | 698 | self.patch(Watch, "started", lambda self: True) | ||
1510 | 930 | manager = WatchManager(None) | 699 | manager = WatchManager(None) |
1511 | 931 | # no need to stop watching because start_watching is fake | 700 | # no need to stop watching because start_watching is fake |
1512 | 932 | 701 | ||
1513 | @@ -936,7 +705,7 @@ | |||
1514 | 936 | d2 = manager.add_watch(path, mask) | 705 | d2 = manager.add_watch(path, mask) |
1515 | 937 | 706 | ||
1516 | 938 | self.assertTrue(d1.result, "Should not be called yet.") | 707 | self.assertTrue(d1.result, "Should not be called yet.") |
1518 | 939 | self.assertFalse(d2.result, "Should not be called yet.") | 708 | self.assertTrue(d2, "Should not be called yet.") |
1519 | 940 | 709 | ||
1520 | 941 | 710 | ||
1521 | 942 | class FakeEvent(object): | 711 | class FakeEvent(object): |
1522 | @@ -953,88 +722,15 @@ | |||
1523 | 953 | self.cookie = cookie | 722 | self.cookie = cookie |
1524 | 954 | 723 | ||
1525 | 955 | 724 | ||
1600 | 956 | class FakeLog(object): | 725 | class TestNotifyProcessor(common_tests.TestNotifyProcessor): |
1527 | 957 | """A fake log that is used by the general processor.""" | ||
1528 | 958 | |||
1529 | 959 | def __init__(self): | ||
1530 | 960 | """Create the fake.""" | ||
1531 | 961 | self.called_methods = [] | ||
1532 | 962 | |||
1533 | 963 | def info(self, *args): | ||
1534 | 964 | """Fake the info call.""" | ||
1535 | 965 | self.called_methods.append(('info', args)) | ||
1536 | 966 | |||
1537 | 967 | |||
1538 | 968 | class FakeGeneralProcessor(object): | ||
1539 | 969 | """Fake implementation of the general processor.""" | ||
1540 | 970 | |||
1541 | 971 | def __init__(self): | ||
1542 | 972 | """Create the fake.""" | ||
1543 | 973 | self.called_methods = [] | ||
1544 | 974 | self.paths_to_return = [] | ||
1545 | 975 | self.log = FakeLog() | ||
1546 | 976 | self.share_id = None | ||
1547 | 977 | self.ignore = False | ||
1548 | 978 | |||
1549 | 979 | def rm_from_mute_filter(self, event, paths): | ||
1550 | 980 | """Fake rm_from_mute_filter.""" | ||
1551 | 981 | self.called_methods.append(('rm_from_mute_filter', event, paths)) | ||
1552 | 982 | |||
1553 | 983 | def add_to_mute_filter(self, event, paths): | ||
1554 | 984 | """Fake add_to_move_filter.""" | ||
1555 | 985 | self.called_methods.append(('add_to_mute_filter', event, paths)) | ||
1556 | 986 | |||
1557 | 987 | def is_ignored(self, path): | ||
1558 | 988 | """Fake is_ignored.""" | ||
1559 | 989 | self.called_methods.append(('is_ignored', path)) | ||
1560 | 990 | return self.ignore | ||
1561 | 991 | |||
1562 | 992 | def push_event(self, event): | ||
1563 | 993 | """Fake push event.""" | ||
1564 | 994 | self.called_methods.append(('push_event', event)) | ||
1565 | 995 | |||
1566 | 996 | def eq_push(self, event, path=None, path_to=None, path_from=None): | ||
1567 | 997 | """Fake event to push event.""" | ||
1568 | 998 | self.called_methods.append(('eq_push', event, path, path_to, | ||
1569 | 999 | path_from)) | ||
1570 | 1000 | |||
1571 | 1001 | def get_paths_starting_with(self, fullpath, include_base=False): | ||
1572 | 1002 | """Fake get_paths_starting_with.""" | ||
1573 | 1003 | self.called_methods.append(('get_paths_starting_with', fullpath, | ||
1574 | 1004 | include_base)) | ||
1575 | 1005 | return self.paths_to_return | ||
1576 | 1006 | |||
1577 | 1007 | def get_path_share_id(self, path): | ||
1578 | 1008 | """Fake get_path_share_id.""" | ||
1579 | 1009 | self.called_methods.append(('get_path_share_id', path)) | ||
1580 | 1010 | return self.share_id | ||
1581 | 1011 | |||
1582 | 1012 | def rm_watch(self, path): | ||
1583 | 1013 | """Fake the remove watch.""" | ||
1584 | 1014 | self.called_methods.append(('rm_watch', path)) | ||
1585 | 1015 | |||
1586 | 1016 | def freeze_begin(self, path): | ||
1587 | 1017 | """Fake freeze_begin""" | ||
1588 | 1018 | self.called_methods.append(('freeze_begin', path)) | ||
1589 | 1019 | |||
1590 | 1020 | def freeze_rollback(self): | ||
1591 | 1021 | """Fake rollback.""" | ||
1592 | 1022 | self.called_methods.append(('freeze_rollback',)) | ||
1593 | 1023 | |||
1594 | 1024 | def freeze_commit(self, path): | ||
1595 | 1025 | """Fake freeze commit.""" | ||
1596 | 1026 | self.called_methods.append(('freeze_commit', path)) | ||
1597 | 1027 | |||
1598 | 1028 | |||
1599 | 1029 | class TestNotifyProcessor(BaseTwistedTestCase): | ||
1601 | 1030 | """Test the notify processor.""" | 726 | """Test the notify processor.""" |
1602 | 1031 | 727 | ||
1603 | 1032 | @defer.inlineCallbacks | 728 | @defer.inlineCallbacks |
1604 | 1033 | def setUp(self): | 729 | def setUp(self): |
1606 | 1034 | """set up the diffeent tests.""" | 730 | """set up the different tests.""" |
1607 | 1035 | yield super(TestNotifyProcessor, self).setUp() | 731 | yield super(TestNotifyProcessor, self).setUp() |
1608 | 1036 | self.processor = notify_processor.NotifyProcessor(None) | 732 | self.processor = notify_processor.NotifyProcessor(None) |
1610 | 1037 | self.general = FakeGeneralProcessor() | 733 | self.general = common_tests.FakeGeneralProcessor() |
1611 | 1038 | self.processor.general_processor = self.general | 734 | self.processor.general_processor = self.general |
1612 | 1039 | 735 | ||
1613 | 1040 | def test_rm_from_mute_filter(self): | 736 | def test_rm_from_mute_filter(self): |
1614 | 1041 | 737 | ||
1615 | === modified file 'tests/platform/filesystem_notifications/test_fsevents_daemon.py' | |||
1616 | --- tests/platform/filesystem_notifications/test_fsevents_daemon.py 2012-07-19 14:13:06 +0000 | |||
1617 | +++ tests/platform/filesystem_notifications/test_fsevents_daemon.py 2012-08-22 18:22:29 +0000 | |||
1618 | @@ -26,14 +26,14 @@ | |||
1619 | 26 | # do not wish to do so, delete this exception statement from your | 26 | # do not wish to do so, delete this exception statement from your |
1620 | 27 | # version. If you delete this exception statement from all source | 27 | # version. If you delete this exception statement from all source |
1621 | 28 | # files in the program, then also delete it here. | 28 | # files in the program, then also delete it here. |
1623 | 29 | """Tests for the fsevents daemon integration.""" | 29 | """Tests for the fseventsd daemon integration.""" |
1624 | 30 | 30 | ||
1625 | 31 | import os | 31 | import os |
1626 | 32 | 32 | ||
1627 | 33 | from twisted.internet import defer, protocol | 33 | from twisted.internet import defer, protocol |
1628 | 34 | 34 | ||
1629 | 35 | from contrib.testing.testcase import BaseTwistedTestCase | 35 | from contrib.testing.testcase import BaseTwistedTestCase |
1631 | 36 | from ubuntuone.darwin import fsevents | 36 | from ubuntuone import fseventsd |
1632 | 37 | from ubuntuone.devtools.testcases.txsocketserver import TidyUnixServer | 37 | from ubuntuone.devtools.testcases.txsocketserver import TidyUnixServer |
1633 | 38 | from ubuntuone.platform.filesystem_notifications.monitor.darwin import ( | 38 | from ubuntuone.platform.filesystem_notifications.monitor.darwin import ( |
1634 | 39 | fsevents_daemon, | 39 | fsevents_daemon, |
1635 | @@ -272,7 +272,7 @@ | |||
1636 | 272 | head, _ = os.path.split(destination_path) | 272 | head, _ = os.path.split(destination_path) |
1637 | 273 | self.factory.watched_paths.append(head) | 273 | self.factory.watched_paths.append(head) |
1638 | 274 | event = FakeDaemonEvent() | 274 | event = FakeDaemonEvent() |
1640 | 275 | event.event_type = fsevents.FSE_RENAME | 275 | event.event_type = fseventsd.FSE_RENAME |
1641 | 276 | event.event_paths.extend([source_path, destination_path]) | 276 | event.event_paths.extend([source_path, destination_path]) |
1642 | 277 | converted_events = self.factory.convert_in_pyinotify_event(event) | 277 | converted_events = self.factory.convert_in_pyinotify_event(event) |
1643 | 278 | self.assertEqual(1, len(converted_events)) | 278 | self.assertEqual(1, len(converted_events)) |
1644 | @@ -289,7 +289,7 @@ | |||
1645 | 289 | head, _ = os.path.split(source_path) | 289 | head, _ = os.path.split(source_path) |
1646 | 290 | self.factory.watched_paths.append(head) | 290 | self.factory.watched_paths.append(head) |
1647 | 291 | event = FakeDaemonEvent() | 291 | event = FakeDaemonEvent() |
1649 | 292 | event.event_type = fsevents.FSE_RENAME | 292 | event.event_type = fseventsd.FSE_RENAME |
1650 | 293 | event.event_paths.extend([source_path, destination_path]) | 293 | event.event_paths.extend([source_path, destination_path]) |
1651 | 294 | converted_events = self.factory.convert_in_pyinotify_event(event) | 294 | converted_events = self.factory.convert_in_pyinotify_event(event) |
1652 | 295 | self.assertEqual(1, len(converted_events)) | 295 | self.assertEqual(1, len(converted_events)) |
1653 | @@ -306,7 +306,7 @@ | |||
1654 | 306 | head, _ = os.path.split(source_path) | 306 | head, _ = os.path.split(source_path) |
1655 | 307 | self.factory.watched_paths.append(head) | 307 | self.factory.watched_paths.append(head) |
1656 | 308 | event = FakeDaemonEvent() | 308 | event = FakeDaemonEvent() |
1658 | 309 | event.event_type = fsevents.FSE_RENAME | 309 | event.event_type = fseventsd.FSE_RENAME |
1659 | 310 | event.event_paths.extend([source_path, destination_path]) | 310 | event.event_paths.extend([source_path, destination_path]) |
1660 | 311 | converted_events = self.factory.convert_in_pyinotify_event(event) | 311 | converted_events = self.factory.convert_in_pyinotify_event(event) |
1661 | 312 | self.assertEqual(2, len(converted_events)) | 312 | self.assertEqual(2, len(converted_events)) |
1662 | @@ -337,7 +337,7 @@ | |||
1663 | 337 | """Test processing the drop of the events.""" | 337 | """Test processing the drop of the events.""" |
1664 | 338 | func_called = [] | 338 | func_called = [] |
1665 | 339 | event = FakeDaemonEvent() | 339 | event = FakeDaemonEvent() |
1667 | 340 | event.event_type = fsevents.FSE_EVENTS_DROPPED | 340 | event.event_type = fseventsd.FSE_EVENTS_DROPPED |
1668 | 341 | 341 | ||
1669 | 342 | def fake_events_dropped(): | 342 | def fake_events_dropped(): |
1670 | 343 | """A fake events dropped implementation.""" | 343 | """A fake events dropped implementation.""" |
1671 | @@ -354,7 +354,7 @@ | |||
1672 | 354 | self.factory.ignored_paths.append(head) | 354 | self.factory.ignored_paths.append(head) |
1673 | 355 | event = FakeDaemonEvent() | 355 | event = FakeDaemonEvent() |
1674 | 356 | event.event_paths.append(event_path) | 356 | event.event_paths.append(event_path) |
1676 | 357 | event.event_type = fsevents.FSE_CREATE_FILE | 357 | event.event_type = fseventsd.FSE_CREATE_FILE |
1677 | 358 | self.factory.process_event(event) | 358 | self.factory.process_event(event) |
1678 | 359 | self.assertEqual(0, len(self.processor.processed_events)) | 359 | self.assertEqual(0, len(self.processor.processed_events)) |
1679 | 360 | 360 | ||
1680 | @@ -365,7 +365,7 @@ | |||
1681 | 365 | self.factory.watched_paths.append(head) | 365 | self.factory.watched_paths.append(head) |
1682 | 366 | event = FakeDaemonEvent() | 366 | event = FakeDaemonEvent() |
1683 | 367 | event.event_paths.append(event_path) | 367 | event.event_paths.append(event_path) |
1685 | 368 | event.event_type = fsevents.FSE_CREATE_FILE | 368 | event.event_type = fseventsd.FSE_CREATE_FILE |
1686 | 369 | self.factory.process_event(event) | 369 | self.factory.process_event(event) |
1687 | 370 | self.assertEqual(1, len(self.processor.processed_events)) | 370 | self.assertEqual(1, len(self.processor.processed_events)) |
1688 | 371 | self.assertEqual(event_path, | 371 | self.assertEqual(event_path, |
1689 | 372 | 372 | ||
1690 | === added file 'tests/platform/filesystem_notifications/test_windows.py' | |||
1691 | --- tests/platform/filesystem_notifications/test_windows.py 1970-01-01 00:00:00 +0000 | |||
1692 | +++ tests/platform/filesystem_notifications/test_windows.py 2012-08-22 18:22:29 +0000 | |||
1693 | @@ -0,0 +1,344 @@ | |||
1694 | 1 | # | ||
1695 | 2 | # Authors: Manuel de la Pena <manuel@canonical.com> | ||
1696 | 3 | # Alejandro J. Cura <alecu@canonical.com> | ||
1697 | 4 | # | ||
1698 | 5 | # Copyright 2011-2012 Canonical Ltd. | ||
1699 | 6 | # | ||
1700 | 7 | # This program is free software: you can redistribute it and/or modify it | ||
1701 | 8 | # under the terms of the GNU General Public License version 3, as published | ||
1702 | 9 | # by the Free Software Foundation. | ||
1703 | 10 | # | ||
1704 | 11 | # This program is distributed in the hope that it will be useful, but | ||
1705 | 12 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1706 | 13 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1707 | 14 | # PURPOSE. See the GNU General Public License for more details. | ||
1708 | 15 | # | ||
1709 | 16 | # You should have received a copy of the GNU General Public License along | ||
1710 | 17 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1711 | 18 | # | ||
1712 | 19 | # In addition, as a special exception, the copyright holders give | ||
1713 | 20 | # permission to link the code of portions of this program with the | ||
1714 | 21 | # OpenSSL library under certain conditions as described in each | ||
1715 | 22 | # individual source file, and distribute linked combinations | ||
1716 | 23 | # including the two. | ||
1717 | 24 | # You must obey the GNU General Public License in all respects | ||
1718 | 25 | # for all of the code used other than OpenSSL. If you modify | ||
1719 | 26 | # file(s) with this exception, you may extend this exception to your | ||
1720 | 27 | # version of the file(s), but you are not obligated to do so. If you | ||
1721 | 28 | # do not wish to do so, delete this exception statement from your | ||
1722 | 29 | # version. If you delete this exception statement from all source | ||
1723 | 30 | # files in the program, then also delete it here. | ||
1724 | 31 | """Test the filesystem notifications on windows.""" | ||
1725 | 32 | |||
1726 | 33 | import os | ||
1727 | 34 | |||
1728 | 35 | from twisted.internet import defer | ||
1729 | 36 | from win32file import FILE_NOTIFY_INFORMATION | ||
1730 | 37 | |||
1731 | 38 | from ubuntuone.platform.filesystem_notifications.monitor import ( | ||
1732 | 39 | common, | ||
1733 | 40 | windows as filesystem_notifications, | ||
1734 | 41 | ) | ||
1735 | 42 | from ubuntuone.platform.filesystem_notifications.monitor.common import ( | ||
1736 | 43 | FilesystemMonitor, | ||
1737 | 44 | Watch, | ||
1738 | 45 | WatchManager, | ||
1739 | 46 | ) | ||
1740 | 47 | from ubuntuone.platform.filesystem_notifications.monitor.windows import ( | ||
1741 | 48 | FILE_NOTIFY_CHANGE_FILE_NAME, | ||
1742 | 49 | FILE_NOTIFY_CHANGE_DIR_NAME, | ||
1743 | 50 | FILE_NOTIFY_CHANGE_ATTRIBUTES, | ||
1744 | 51 | FILE_NOTIFY_CHANGE_SIZE, | ||
1745 | 52 | FILE_NOTIFY_CHANGE_LAST_WRITE, | ||
1746 | 53 | FILE_NOTIFY_CHANGE_SECURITY, | ||
1747 | 54 | FILE_NOTIFY_CHANGE_LAST_ACCESS, | ||
1748 | 55 | ) | ||
1749 | 56 | from tests.platform.filesystem_notifications import common as common_tests | ||
1750 | 57 | |||
1751 | 58 | |||
1752 | 59 | class FakeEventsProcessor(object): | ||
1753 | 60 | |||
1754 | 61 | """Handle fake events creation and processing.""" | ||
1755 | 62 | |||
1756 | 63 | def create_fake_event(self, filename): | ||
1757 | 64 | """Create a fake file event.""" | ||
1758 | 65 | return (1, filename) | ||
1759 | 66 | |||
1760 | 67 | def custom_process_events(self, watch, events): | ||
1761 | 68 | """Adapt to each platform way to process events.""" | ||
1762 | 69 | watch.platform_watch._process_events(events) | ||
1763 | 70 | |||
1764 | 71 | |||
1765 | 72 | class TestWatch(common_tests.TestWatch): | ||
1766 | 73 | """Test the watch so that it returns the same events as pyinotify.""" | ||
1767 | 74 | |||
1768 | 75 | timeout = 5 | ||
1769 | 76 | |||
1770 | 77 | @defer.inlineCallbacks | ||
1771 | 78 | def setUp(self): | ||
1772 | 79 | yield super(TestWatch, self).setUp() | ||
1773 | 80 | self.path = u'\\\\?\\C:\\path' # a valid windows path | ||
1774 | 81 | self.common_path = u'C:\\path' | ||
1775 | 82 | self.invalid_path = u'\\\\?\\C:\\path\\to\\no\\dir' | ||
1776 | 83 | self.mask = FILE_NOTIFY_CHANGE_FILE_NAME | \ | ||
1777 | 84 | FILE_NOTIFY_CHANGE_DIR_NAME | \ | ||
1778 | 85 | FILE_NOTIFY_CHANGE_ATTRIBUTES | \ | ||
1779 | 86 | FILE_NOTIFY_CHANGE_SIZE | \ | ||
1780 | 87 | FILE_NOTIFY_CHANGE_LAST_WRITE | \ | ||
1781 | 88 | FILE_NOTIFY_CHANGE_SECURITY | \ | ||
1782 | 89 | FILE_NOTIFY_CHANGE_LAST_ACCESS | ||
1783 | 90 | self.fake_events_processor = FakeEventsProcessor() | ||
1784 | 91 | |||
1785 | 92 | def file_notify_information_wrapper(buf, data): | ||
1786 | 93 | """Wrapper that gets the events and adds them to the list.""" | ||
1787 | 94 | events = FILE_NOTIFY_INFORMATION(buf, data) | ||
1788 | 95 | # we want to append the list because that is what will be logged. | ||
1789 | 96 | # If we use extend we wont have the same logging because it will | ||
1790 | 97 | # group all events in a single lists which is not what the COM API | ||
1791 | 98 | # does. | ||
1792 | 99 | str_events = [ | ||
1793 | 100 | (common.ACTIONS_NAMES[action], path) for action, path in | ||
1794 | 101 | events] | ||
1795 | 102 | self.raw_events.append(str_events) | ||
1796 | 103 | return events | ||
1797 | 104 | |||
1798 | 105 | self.patch(filesystem_notifications, 'FILE_NOTIFY_INFORMATION', | ||
1799 | 106 | file_notify_information_wrapper) | ||
1800 | 107 | |||
1801 | 108 | @defer.inlineCallbacks | ||
1802 | 109 | def test_file_write(self): | ||
1803 | 110 | """Test that the correct event is raised when a file is written.""" | ||
1804 | 111 | file_name = os.path.join(self.basedir, 'test_file_write') | ||
1805 | 112 | # create the file before recording | ||
1806 | 113 | fd = open(file_name, 'w') | ||
1807 | 114 | # clean behind us by removing the file | ||
1808 | 115 | self.addCleanup(os.remove, file_name) | ||
1809 | 116 | |||
1810 | 117 | def write_file(): | ||
1811 | 118 | """Action for the test.""" | ||
1812 | 119 | fd.write('test') | ||
1813 | 120 | fd.close() | ||
1814 | 121 | |||
1815 | 122 | events = yield self._perform_operations(self.basedir, self.mask, | ||
1816 | 123 | write_file, 1) | ||
1817 | 124 | event = events[0] | ||
1818 | 125 | self.assertFalse(event.dir) | ||
1819 | 126 | self.assertEqual(0x2, event.mask) | ||
1820 | 127 | self.assertEqual('IN_MODIFY', event.maskname) | ||
1821 | 128 | self.assertEqual(os.path.split(file_name)[1], event.name) | ||
1822 | 129 | self.assertEqual('.', event.path) | ||
1823 | 130 | self.assertEqual(os.path.join(self.basedir, file_name), event.pathname) | ||
1824 | 131 | self.assertEqual(0, event.wd) | ||
1825 | 132 | |||
1826 | 133 | @defer.inlineCallbacks | ||
1827 | 134 | def test_call_deferred_already_called(self): | ||
1828 | 135 | """Test that the function is not called.""" | ||
1829 | 136 | method_args = [] | ||
1830 | 137 | |||
1831 | 138 | def fake_call(*args, **kwargs): | ||
1832 | 139 | """Execute the call.""" | ||
1833 | 140 | method_args.append((args, kwargs),) | ||
1834 | 141 | |||
1835 | 142 | watch = Watch(1, self.path, None) | ||
1836 | 143 | yield watch.platform_watch._watch_started_deferred.callback(True) | ||
1837 | 144 | watch.platform_watch._call_deferred(fake_call, None) | ||
1838 | 145 | self.assertEqual(0, len(method_args)) | ||
1839 | 146 | |||
1840 | 147 | def test_call_deferred_not_called(self): | ||
1841 | 148 | """Test that is indeed called.""" | ||
1842 | 149 | method_args = [] | ||
1843 | 150 | |||
1844 | 151 | def fake_call(*args, **kwargs): | ||
1845 | 152 | """Execute the call.""" | ||
1846 | 153 | method_args.append((args, kwargs),) | ||
1847 | 154 | |||
1848 | 155 | watch = Watch(1, self.path, None) | ||
1849 | 156 | watch.platform_watch._call_deferred(fake_call, None) | ||
1850 | 157 | self.assertEqual(1, len(method_args)) | ||
1851 | 158 | |||
1852 | 159 | def test_started_property(self): | ||
1853 | 160 | """Test that the started property returns the started deferred.""" | ||
1854 | 161 | watch = Watch(1, self.path, None) | ||
1855 | 162 | self.assertEqual(watch.started, | ||
1856 | 163 | watch.platform_watch._watch_started_deferred) | ||
1857 | 164 | |||
1858 | 165 | def test_stopped_property(self): | ||
1859 | 166 | """Test that the stopped property returns the stopped deferred.""" | ||
1860 | 167 | watch = Watch(1, self.path, None) | ||
1861 | 168 | self.assertEqual(watch.stopped, | ||
1862 | 169 | watch.platform_watch._watch_stopped_deferred) | ||
1863 | 170 | |||
1864 | 171 | @defer.inlineCallbacks | ||
1865 | 172 | def test_start_watching_fails_early_in_thread(self): | ||
1866 | 173 | """An early failure inside the thread should errback the deferred.""" | ||
1867 | 174 | test_path = self.mktemp("test_directory") | ||
1868 | 175 | self.patch(filesystem_notifications, "CreateFileW", self.random_error) | ||
1869 | 176 | watch = Watch(1, test_path, None) | ||
1870 | 177 | d = watch.start_watching() | ||
1871 | 178 | yield self.assertFailure(d, common_tests.FakeException) | ||
1872 | 179 | |||
1873 | 180 | @defer.inlineCallbacks | ||
1874 | 181 | def test_start_watching_fails_late_in_thread(self): | ||
1875 | 182 | """A late failure inside the thread should errback the deferred.""" | ||
1876 | 183 | test_path = self.mktemp("test_directory") | ||
1877 | 184 | self.patch(filesystem_notifications, "ReadDirectoryChangesW", | ||
1878 | 185 | self.random_error) | ||
1879 | 186 | watch = Watch(1, test_path, None) | ||
1880 | 187 | d = watch.start_watching() | ||
1881 | 188 | yield self.assertFailure(d, common_tests.FakeException) | ||
1882 | 189 | |||
1883 | 190 | @defer.inlineCallbacks | ||
1884 | 191 | def test_close_handle_is_called_on_error(self): | ||
1885 | 192 | """CloseHandle is called when there's an error in the watch thread.""" | ||
1886 | 193 | test_path = self.mktemp("test_directory") | ||
1887 | 194 | close_called = [] | ||
1888 | 195 | self.patch(filesystem_notifications, "CreateFileW", lambda *_: None) | ||
1889 | 196 | self.patch(filesystem_notifications, "CloseHandle", | ||
1890 | 197 | close_called.append) | ||
1891 | 198 | self.patch(filesystem_notifications, "ReadDirectoryChangesW", | ||
1892 | 199 | self.random_error) | ||
1893 | 200 | watch = Watch(1, test_path, self.mask) | ||
1894 | 201 | d = watch.start_watching() | ||
1895 | 202 | yield self.assertFailure(d, common_tests.FakeException) | ||
1896 | 203 | self.assertEqual(len(close_called), 1) | ||
1897 | 204 | yield watch.stop_watching() | ||
1898 | 205 | |||
1899 | 206 | @defer.inlineCallbacks | ||
1900 | 207 | def test_stop_watching_fired_when_watch_thread_finishes(self): | ||
1901 | 208 | """The deferred returned is fired when the watch thread finishes.""" | ||
1902 | 209 | test_path = self.mktemp("another_test_directory") | ||
1903 | 210 | watch = Watch(1, test_path, self.mask) | ||
1904 | 211 | yield watch.start_watching() | ||
1905 | 212 | self.assertNotEqual(watch.platform_watch._watch_handle, None) | ||
1906 | 213 | yield watch.stop_watching() | ||
1907 | 214 | self.assertEqual(watch.platform_watch._watch_handle, None) | ||
1908 | 215 | |||
1909 | 216 | |||
1910 | 217 | class TestWatchManager(common_tests.TestWatchManager): | ||
1911 | 218 | """Test the watch manager.""" | ||
1912 | 219 | |||
1913 | 220 | @defer.inlineCallbacks | ||
1914 | 221 | def setUp(self): | ||
1915 | 222 | """Set each of the tests.""" | ||
1916 | 223 | yield super(TestWatchManager, self).setUp() | ||
1917 | 224 | self.parent_path = u'\\\\?\\C:\\' # a valid windows path | ||
1918 | 225 | self.path = self.parent_path + u'path' | ||
1919 | 226 | self.watch = Watch(1, self.path, None) | ||
1920 | 227 | self.manager._wdm = {1: self.watch} | ||
1921 | 228 | self.fake_events_processor = FakeEventsProcessor() | ||
1922 | 229 | |||
1923 | 230 | def test_add_single_watch(self): | ||
1924 | 231 | """Test the addition of a new single watch.""" | ||
1925 | 232 | self.was_called = False | ||
1926 | 233 | |||
1927 | 234 | def fake_start_watching(*args): | ||
1928 | 235 | """Fake start watch.""" | ||
1929 | 236 | self.was_called = True | ||
1930 | 237 | |||
1931 | 238 | self.patch(Watch, "start_watching", fake_start_watching) | ||
1932 | 239 | self.manager._wdm = {} | ||
1933 | 240 | |||
1934 | 241 | mask = 'bit_mask' | ||
1935 | 242 | self.manager.add_watch(self.path, mask) | ||
1936 | 243 | self.assertEqual(1, len(self.manager._wdm)) | ||
1937 | 244 | self.assertTrue(self.was_called, 'The watch start was not called.') | ||
1938 | 245 | self.assertEqual(self.path + os.path.sep, self.manager._wdm[0].path) | ||
1939 | 246 | self.assertEqual(filesystem_notifications.FILESYSTEM_MONITOR_MASK, | ||
1940 | 247 | self.manager._wdm[0].platform_watch._mask) | ||
1941 | 248 | |||
1942 | 249 | @defer.inlineCallbacks | ||
1943 | 250 | def test_stop_multiple(self): | ||
1944 | 251 | """Test that stop is fired when *all* watches have stopped.""" | ||
1945 | 252 | |||
1946 | 253 | def fake_stop_watching(watch): | ||
1947 | 254 | """Another fake stop watch.""" | ||
1948 | 255 | return watch.stopped | ||
1949 | 256 | |||
1950 | 257 | self.patch(Watch, "stop_watching", fake_stop_watching) | ||
1951 | 258 | second_path = self.parent_path + u"second_path" | ||
1952 | 259 | second_watch = Watch(2, second_path, None) | ||
1953 | 260 | self.manager._wdm[2] = second_watch | ||
1954 | 261 | d = self.manager.stop() | ||
1955 | 262 | self.assertFalse(d.called, "Not fired before all watches end") | ||
1956 | 263 | self.watch.stopped.callback(None) | ||
1957 | 264 | self.assertFalse(d.called, "Not fired before all watches end") | ||
1958 | 265 | second_watch.stopped.callback(None) | ||
1959 | 266 | yield d | ||
1960 | 267 | self.assertTrue(d.called, "Fired after the watches ended") | ||
1961 | 268 | |||
1962 | 269 | |||
1963 | 270 | class TestWatchManagerAddWatches(common_tests.TestWatchManagerAddWatches): | ||
1964 | 271 | """Test the watch manager.""" | ||
1965 | 272 | timeout = 5 | ||
1966 | 273 | |||
1967 | 274 | def test_add_watch_twice(self): | ||
1968 | 275 | """Adding a watch twice succeeds when the watch is running.""" | ||
1969 | 276 | self.patch(Watch, "start_watching", lambda self: self.started) | ||
1970 | 277 | manager = WatchManager(None) | ||
1971 | 278 | # no need to stop watching because start_watching is fake | ||
1972 | 279 | |||
1973 | 280 | path = u'\\\\?\\C:\\test' # a valid windows path | ||
1974 | 281 | mask = 'fake bit mask' | ||
1975 | 282 | d1 = manager.add_watch(path, mask) | ||
1976 | 283 | d2 = manager.add_watch(path, mask) | ||
1977 | 284 | |||
1978 | 285 | self.assertFalse(d1.called, "Should not be called yet.") | ||
1979 | 286 | self.assertFalse(d2.called, "Should not be called yet.") | ||
1980 | 287 | |||
1981 | 288 | manager._wdm.values()[0].started.callback(True) | ||
1982 | 289 | |||
1983 | 290 | self.assertTrue(d1.called, "Should already be called.") | ||
1984 | 291 | self.assertTrue(d2.called, "Should already be called.") | ||
1985 | 292 | |||
1986 | 293 | |||
1987 | 294 | class TestNotifyProcessor(common_tests.TestNotifyProcessor): | ||
1988 | 295 | """Test the notify processor.""" | ||
1989 | 296 | |||
1990 | 297 | @defer.inlineCallbacks | ||
1991 | 298 | def setUp(self): | ||
1992 | 299 | """set up the diffeent tests.""" | ||
1993 | 300 | yield super(TestNotifyProcessor, self).setUp() | ||
1994 | 301 | |||
1995 | 302 | |||
1996 | 303 | class FilesystemMonitorTestCase(common_tests.FilesystemMonitorTestCase): | ||
1997 | 304 | """Tests for the FilesystemMonitor.""" | ||
1998 | 305 | timeout = 5 | ||
1999 | 306 | |||
2000 | 307 | def test_add_watch_twice(self): | ||
2001 | 308 | """Check the deferred returned by a second add_watch.""" | ||
2002 | 309 | self.patch(Watch, "start_watching", lambda self: self.started) | ||
2003 | 310 | monitor = FilesystemMonitor(None, None) | ||
2004 | 311 | # no need to stop watching because start_watching is fake | ||
2005 | 312 | |||
2006 | 313 | parent_path = 'C:\\test' # a valid windows path in utf-8 bytes | ||
2007 | 314 | child_path = parent_path + "\\child" | ||
2008 | 315 | d1 = monitor.add_watch(parent_path) | ||
2009 | 316 | d2 = monitor.add_watch(child_path) | ||
2010 | 317 | |||
2011 | 318 | self.assertFalse(d1.called, "Should not be called yet.") | ||
2012 | 319 | self.assertFalse(d2.called, "Should not be called yet.") | ||
2013 | 320 | |||
2014 | 321 | monitor._watch_manager._wdm.values()[0].started.callback(True) | ||
2015 | 322 | |||
2016 | 323 | self.assertTrue(d1.called, "Should already be called.") | ||
2017 | 324 | self.assertTrue(d2.called, "Should already be called.") | ||
2018 | 325 | |||
2019 | 326 | @defer.inlineCallbacks | ||
2020 | 327 | def test_add_watches_to_udf_ancestors(self): | ||
2021 | 328 | """Test that the ancestor watches are not added.""" | ||
2022 | 329 | |||
2023 | 330 | class FakeVolume(object): | ||
2024 | 331 | """A fake UDF.""" | ||
2025 | 332 | |||
2026 | 333 | def __init__(self, ancestors): | ||
2027 | 334 | """Create a new instance.""" | ||
2028 | 335 | self.ancestors = ancestors | ||
2029 | 336 | |||
2030 | 337 | ancestors = ['~', '~\\Pictures', '~\\Pictures\\Home', ] | ||
2031 | 338 | volume = FakeVolume(ancestors) | ||
2032 | 339 | monitor = FilesystemMonitor(None, None) | ||
2033 | 340 | added = yield monitor.add_watches_to_udf_ancestors(volume) | ||
2034 | 341 | self.assertTrue(added, 'We should always return true.') | ||
2035 | 342 | # lets ensure that we never added the watches | ||
2036 | 343 | self.assertEqual(0, len(monitor._watch_manager._wdm.values()), | ||
2037 | 344 | 'No watches should have been added.') | ||
2038 | 0 | 345 | ||
2039 | === modified file 'tests/platform/ipc/test_external_interface.py' | |||
2040 | --- tests/platform/ipc/test_external_interface.py 2012-04-30 14:24:55 +0000 | |||
2041 | +++ tests/platform/ipc/test_external_interface.py 2012-08-22 18:22:29 +0000 | |||
2042 | @@ -40,6 +40,10 @@ | |||
2043 | 40 | StatusTestCase, | 40 | StatusTestCase, |
2044 | 41 | SyncDaemonTestCase, | 41 | SyncDaemonTestCase, |
2045 | 42 | ) | 42 | ) |
2046 | 43 | from ubuntuone.syncdaemon import ( | ||
2047 | 44 | RECENT_TRANSFERS, | ||
2048 | 45 | UPLOADING, | ||
2049 | 46 | ) | ||
2050 | 43 | 47 | ||
2051 | 44 | STR = 'something' | 48 | STR = 'something' |
2052 | 45 | STR_STR_DICT = {'foo': 'bar'} | 49 | STR_STR_DICT = {'foo': 'bar'} |
2053 | @@ -132,6 +136,16 @@ | |||
2054 | 132 | self.assert_remote_method('waiting_metadata', | 136 | self.assert_remote_method('waiting_metadata', |
2055 | 133 | in_signature=None, out_signature='a(sa{ss})') | 137 | in_signature=None, out_signature='a(sa{ss})') |
2056 | 134 | 138 | ||
2057 | 139 | @defer.inlineCallbacks | ||
2058 | 140 | def test_sync_menu(self): | ||
2059 | 141 | """Test sync_menu.""" | ||
2060 | 142 | result = {RECENT_TRANSFERS: [], UPLOADING: []} | ||
2061 | 143 | method = 'sync_menu' | ||
2062 | 144 | yield self.assert_method_called(self.service.status, | ||
2063 | 145 | method, result) | ||
2064 | 146 | self.assert_remote_method(method, | ||
2065 | 147 | in_signature=None, out_signature='a{sv}') | ||
2066 | 148 | |||
2067 | 135 | 149 | ||
2068 | 136 | class EventsTests(EventsTestCase): | 150 | class EventsTests(EventsTestCase): |
2069 | 137 | """Basic tests for the Events exposed object.""" | 151 | """Basic tests for the Events exposed object.""" |
2070 | 138 | 152 | ||
2071 | === modified file 'tests/platform/test_tools.py' | |||
2072 | --- tests/platform/test_tools.py 2012-05-30 15:35:49 +0000 | |||
2073 | +++ tests/platform/test_tools.py 2012-08-22 18:22:29 +0000 | |||
2074 | @@ -36,12 +36,15 @@ | |||
2075 | 36 | from ubuntuone.devtools.handlers import MementoHandler | 36 | from ubuntuone.devtools.handlers import MementoHandler |
2076 | 37 | 37 | ||
2077 | 38 | from contrib.testing.testcase import FakeCommand, skipIfOS | 38 | from contrib.testing.testcase import FakeCommand, skipIfOS |
2078 | 39 | |||
2079 | 39 | from ubuntuone.syncdaemon import ( | 40 | from ubuntuone.syncdaemon import ( |
2080 | 40 | action_queue, | 41 | action_queue, |
2081 | 41 | event_queue, | 42 | event_queue, |
2082 | 42 | interaction_interfaces, | 43 | interaction_interfaces, |
2083 | 43 | states, | 44 | states, |
2084 | 44 | volume_manager, | 45 | volume_manager, |
2085 | 46 | RECENT_TRANSFERS, | ||
2086 | 47 | UPLOADING, | ||
2087 | 45 | ) | 48 | ) |
2088 | 46 | from ubuntuone.platform import tools | 49 | from ubuntuone.platform import tools |
2089 | 47 | from tests.platform import IPCTestCase | 50 | from tests.platform import IPCTestCase |
2090 | @@ -243,6 +246,13 @@ | |||
2091 | 243 | self.assertEqual('share_id', result['volume_id']) | 246 | self.assertEqual('share_id', result['volume_id']) |
2092 | 244 | self.assertEqual(False, self.main.vm.shares['share_id'].accepted) | 247 | self.assertEqual(False, self.main.vm.shares['share_id'].accepted) |
2093 | 245 | 248 | ||
2094 | 249 | @defer.inlineCallbacks | ||
2095 | 250 | def test_sync_menu(self): | ||
2096 | 251 | """Test accept_share method.""" | ||
2097 | 252 | result = yield self.tool.sync_menu() | ||
2098 | 253 | self.assertIn(RECENT_TRANSFERS, result) | ||
2099 | 254 | self.assertIn(UPLOADING, result) | ||
2100 | 255 | |||
2101 | 246 | 256 | ||
2102 | 247 | class TestWaitForSignals(TestToolsBase): | 257 | class TestWaitForSignals(TestToolsBase): |
2103 | 248 | """Test case for the wait_for_signals method from SyncDaemonTool.""" | 258 | """Test case for the wait_for_signals method from SyncDaemonTool.""" |
2104 | 249 | 259 | ||
2105 | === modified file 'tests/status/test_aggregator.py' | |||
2106 | --- tests/status/test_aggregator.py 2012-04-09 20:07:05 +0000 | |||
2107 | +++ tests/status/test_aggregator.py 2012-08-22 18:22:29 +0000 | |||
2108 | @@ -42,7 +42,11 @@ | |||
2109 | 42 | from ubuntuone.status import aggregator | 42 | from ubuntuone.status import aggregator |
2110 | 43 | from ubuntuone.status.notification import AbstractNotification | 43 | from ubuntuone.status.notification import AbstractNotification |
2111 | 44 | from ubuntuone.status.messaging import AbstractMessaging | 44 | from ubuntuone.status.messaging import AbstractMessaging |
2113 | 45 | from ubuntuone.syncdaemon import status_listener | 45 | from ubuntuone.syncdaemon import ( |
2114 | 46 | status_listener, | ||
2115 | 47 | RECENT_TRANSFERS, | ||
2116 | 48 | UPLOADING, | ||
2117 | 49 | ) | ||
2118 | 46 | from ubuntuone.syncdaemon.volume_manager import Share, UDF, Root | 50 | from ubuntuone.syncdaemon.volume_manager import Share, UDF, Root |
2119 | 47 | 51 | ||
2120 | 48 | FILENAME = 'example.txt' | 52 | FILENAME = 'example.txt' |
2121 | @@ -706,6 +710,8 @@ | |||
2122 | 706 | self.share_id = path | 710 | self.share_id = path |
2123 | 707 | self.node_id = path | 711 | self.node_id = path |
2124 | 708 | self.deflated_size = 10000 | 712 | self.deflated_size = 10000 |
2125 | 713 | self.size = 0 | ||
2126 | 714 | self.n_bytes_written = 0 | ||
2127 | 709 | 715 | ||
2128 | 710 | 716 | ||
2129 | 711 | class FakeVolumeManager(object): | 717 | class FakeVolumeManager(object): |
2130 | @@ -733,6 +739,7 @@ | |||
2131 | 733 | self.files_uploading = [] | 739 | self.files_uploading = [] |
2132 | 734 | self.files_downloading = [] | 740 | self.files_downloading = [] |
2133 | 735 | self.progress_events = [] | 741 | self.progress_events = [] |
2134 | 742 | self.recent_transfers = aggregator.deque(maxlen=10) | ||
2135 | 736 | 743 | ||
2136 | 737 | def queue_done(self): | 744 | def queue_done(self): |
2137 | 738 | """The queue completed all operations.""" | 745 | """The queue completed all operations.""" |
2138 | @@ -762,6 +769,7 @@ | |||
2139 | 762 | """An upload just finished.""" | 769 | """An upload just finished.""" |
2140 | 763 | if command in self.files_uploading: | 770 | if command in self.files_uploading: |
2141 | 764 | self.files_uploading.remove(command) | 771 | self.files_uploading.remove(command) |
2142 | 772 | self.recent_transfers.append(command.path) | ||
2143 | 765 | self.queued_commands.discard(command) | 773 | self.queued_commands.discard(command) |
2144 | 766 | 774 | ||
2145 | 767 | def progress_made(self, share_id, node_id, n_bytes, deflated_size): | 775 | def progress_made(self, share_id, node_id, n_bytes, deflated_size): |
2146 | @@ -796,6 +804,70 @@ | |||
2147 | 796 | self.fakevm, | 804 | self.fakevm, |
2148 | 797 | self.status_frontend) | 805 | self.status_frontend) |
2149 | 798 | 806 | ||
2150 | 807 | def test_recent_transfers(self): | ||
2151 | 808 | """Check that it generates a tuple with the recent transfers.""" | ||
2152 | 809 | self.patch(status_listener.action_queue, "Upload", FakeCommand) | ||
2153 | 810 | fake_command = FakeCommand('path1') | ||
2154 | 811 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
2155 | 812 | self.listener.handle_SYS_QUEUE_REMOVED(fake_command) | ||
2156 | 813 | fake_command = FakeCommand('path2') | ||
2157 | 814 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
2158 | 815 | self.listener.handle_SYS_QUEUE_REMOVED(fake_command) | ||
2159 | 816 | fake_command = FakeCommand('path3') | ||
2160 | 817 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
2161 | 818 | self.listener.handle_SYS_QUEUE_REMOVED(fake_command) | ||
2162 | 819 | transfers = self.status_frontend.recent_transfers() | ||
2163 | 820 | expected = ['path1', 'path2', 'path3'] | ||
2164 | 821 | self.assertEqual(transfers, expected) | ||
2165 | 822 | |||
2166 | 823 | menu_data = self.listener.menu_data() | ||
2167 | 824 | self.assertEqual(menu_data, | ||
2168 | 825 | {UPLOADING: [], RECENT_TRANSFERS: expected}) | ||
2169 | 826 | |||
2170 | 827 | def test_file_uploading(self): | ||
2171 | 828 | """Check that it returns a list with the path, size, and progress.""" | ||
2172 | 829 | fc = FakeCommand(path='testfile.txt') | ||
2173 | 830 | fc.size = 200 | ||
2174 | 831 | self.status_frontend.upload_started(fc) | ||
2175 | 832 | uploading = self.status_frontend.files_uploading() | ||
2176 | 833 | expected = [('testfile.txt', 200, 0)] | ||
2177 | 834 | self.assertEqual(uploading, expected) | ||
2178 | 835 | menu_data = self.listener.menu_data() | ||
2179 | 836 | self.assertEqual(menu_data, | ||
2180 | 837 | {UPLOADING: expected, RECENT_TRANSFERS: []}) | ||
2181 | 838 | |||
2182 | 839 | fc.size = 1000 | ||
2183 | 840 | fc.n_bytes_written = 200 | ||
2184 | 841 | fc2 = FakeCommand(path='testfile2.txt') | ||
2185 | 842 | fc2.size = 2000 | ||
2186 | 843 | fc2.n_bytes_written = 450 | ||
2187 | 844 | self.status_frontend.upload_started(fc2) | ||
2188 | 845 | uploading = self.status_frontend.files_uploading() | ||
2189 | 846 | expected = [('testfile.txt', 1000, 200), ('testfile2.txt', 2000, 450)] | ||
2190 | 847 | self.assertEqual(uploading, expected) | ||
2191 | 848 | |||
2192 | 849 | menu_data = self.listener.menu_data() | ||
2193 | 850 | self.assertEqual(menu_data, | ||
2194 | 851 | {UPLOADING: expected, RECENT_TRANSFERS: []}) | ||
2195 | 852 | |||
2196 | 853 | def test_menu_data_full_response(self): | ||
2197 | 854 | """Check that listener.menu_data returns both uploading and recent.""" | ||
2198 | 855 | self.patch(status_listener.action_queue, "Upload", FakeCommand) | ||
2199 | 856 | fake_command = FakeCommand('path1') | ||
2200 | 857 | self.listener.handle_SYS_QUEUE_ADDED(fake_command) | ||
2201 | 858 | self.listener.handle_SYS_QUEUE_REMOVED(fake_command) | ||
2202 | 859 | fc = FakeCommand(path='testfile.txt') | ||
2203 | 860 | fc.size = 1000 | ||
2204 | 861 | fc.n_bytes_written = 200 | ||
2205 | 862 | self.status_frontend.upload_started(fc) | ||
2206 | 863 | uploading = self.status_frontend.files_uploading() | ||
2207 | 864 | transfers = self.status_frontend.recent_transfers() | ||
2208 | 865 | expected = {UPLOADING: [('testfile.txt', 1000, 200)], | ||
2209 | 866 | RECENT_TRANSFERS: ['path1']} | ||
2210 | 867 | |||
2211 | 868 | self.assertEqual( | ||
2212 | 869 | {UPLOADING: uploading, RECENT_TRANSFERS: transfers}, expected) | ||
2213 | 870 | |||
2214 | 799 | def test_file_published(self): | 871 | def test_file_published(self): |
2215 | 800 | """A file published event is processed.""" | 872 | """A file published event is processed.""" |
2216 | 801 | share_id = "fake share id" | 873 | share_id = "fake share id" |
2217 | @@ -1308,6 +1380,15 @@ | |||
2218 | 1308 | self.assertEqual( | 1380 | self.assertEqual( |
2219 | 1309 | {(fc.share_id, fc.node_id): (fc.deflated_size)}, | 1381 | {(fc.share_id, fc.node_id): (fc.deflated_size)}, |
2220 | 1310 | self.aggregator.progress) | 1382 | self.aggregator.progress) |
2221 | 1383 | self.assertEqual(len(self.aggregator.recent_transfers), 1) | ||
2222 | 1384 | |||
2223 | 1385 | def test_max_recent_files(self): | ||
2224 | 1386 | """Check that the queue doesn't exceed the 5 items.""" | ||
2225 | 1387 | for i in range(15): | ||
2226 | 1388 | fc = FakeCommand() | ||
2227 | 1389 | self.status_frontend.upload_started(fc) | ||
2228 | 1390 | self.status_frontend.upload_finished(fc) | ||
2229 | 1391 | self.assertEqual(len(self.aggregator.recent_transfers), 5) | ||
2230 | 1311 | 1392 | ||
2231 | 1312 | def test_progress_made(self): | 1393 | def test_progress_made(self): |
2232 | 1313 | """Progress on up and downloads is tracked.""" | 1394 | """Progress on up and downloads is tracked.""" |
2233 | 1314 | 1395 | ||
2234 | === modified file 'tests/syncdaemon/test_fsm.py' | |||
2235 | --- tests/syncdaemon/test_fsm.py 2012-04-09 20:07:05 +0000 | |||
2236 | +++ tests/syncdaemon/test_fsm.py 2012-08-22 18:22:29 +0000 | |||
2237 | @@ -1626,6 +1626,33 @@ | |||
2238 | 1626 | mdobj = self.fsm.get_by_mdid(mdid) | 1626 | mdobj = self.fsm.get_by_mdid(mdid) |
2239 | 1627 | self.assertEqual(mdobj.stat, stat_path(path)) | 1627 | self.assertEqual(mdobj.stat, stat_path(path)) |
2240 | 1628 | 1628 | ||
2241 | 1629 | def test_commit_partial_pushes_event(self): | ||
2242 | 1630 | """Test that the right event is pushed after the commit.""" | ||
2243 | 1631 | listener = Listener() | ||
2244 | 1632 | self.eq.subscribe(listener) | ||
2245 | 1633 | |||
2246 | 1634 | path = os.path.join(self.share.path, "thisfile") | ||
2247 | 1635 | open_file(path, "w").close() | ||
2248 | 1636 | mdobj = self.create_node("thisfile") | ||
2249 | 1637 | mdid = mdobj.mdid | ||
2250 | 1638 | oldstat = stat_path(path) | ||
2251 | 1639 | self.assertEqual(mdobj.stat, oldstat) | ||
2252 | 1640 | |||
2253 | 1641 | # create a partial | ||
2254 | 1642 | self.fsm.create_partial(mdobj.node_id, mdobj.share_id) | ||
2255 | 1643 | fh = self.fsm.get_partial_for_writing(mdobj.node_id, mdobj.share_id) | ||
2256 | 1644 | fh.write("foobar") | ||
2257 | 1645 | fh.close() | ||
2258 | 1646 | mdobj = self.fsm.get_by_mdid(mdid) | ||
2259 | 1647 | self.assertEqual(mdobj.stat, oldstat) | ||
2260 | 1648 | |||
2261 | 1649 | # commit the partial | ||
2262 | 1650 | self.fsm.commit_partial(mdobj.node_id, mdobj.share_id, "localhash") | ||
2263 | 1651 | mdobj = self.fsm.get_by_mdid(mdid) | ||
2264 | 1652 | |||
2265 | 1653 | kwargs = dict(share_id=mdobj.share_id, node_id=mdobj.node_id) | ||
2266 | 1654 | self.assertTrue(("FSM_PARTIAL_COMMITED", kwargs) in listener.events) | ||
2267 | 1655 | |||
2268 | 1629 | def test_move(self): | 1656 | def test_move(self): |
2269 | 1630 | """Test that move refreshes stat.""" | 1657 | """Test that move refreshes stat.""" |
2270 | 1631 | path1 = os.path.join(self.share.path, "thisfile1") | 1658 | path1 = os.path.join(self.share.path, "thisfile1") |
2271 | 1632 | 1659 | ||
2272 | === modified file 'tests/syncdaemon/test_interaction_interfaces.py' | |||
2273 | --- tests/syncdaemon/test_interaction_interfaces.py 2012-04-09 20:07:05 +0000 | |||
2274 | +++ tests/syncdaemon/test_interaction_interfaces.py 2012-08-22 18:22:29 +0000 | |||
2275 | @@ -1226,18 +1226,19 @@ | |||
2276 | 1226 | self.addCleanup(self.main.event_q.unsubscribe, self.sd_obj) | 1226 | self.addCleanup(self.main.event_q.unsubscribe, self.sd_obj) |
2277 | 1227 | 1227 | ||
2278 | 1228 | 1228 | ||
2281 | 1229 | class DownloadTestCase(SyncdaemonEventListenerTestCase): | 1229 | class UploadTestCase(SyncdaemonEventListenerTestCase): |
2282 | 1230 | """Test the Download events in SyncdaemonEventListener.""" | 1230 | """Test the Upload events in SyncdaemonEventListener.""" |
2283 | 1231 | 1231 | ||
2284 | 1232 | add_fsm_key = True | 1232 | add_fsm_key = True |
2289 | 1233 | direction = 'Download' | 1233 | direction = 'Upload' |
2290 | 1234 | bytes_key = 'n_bytes_read' | 1234 | bytes_key = 'n_bytes_written' |
2291 | 1235 | hash_kwarg = 'server_hash' | 1235 | hash_kwarg = 'hash' |
2292 | 1236 | extra_finished_args = {} | 1236 | extra_finished_args = dict(new_generation='new_generation', hash='') |
2293 | 1237 | finished_event = 'AQ_UPLOAD_FINISHED' | ||
2294 | 1237 | 1238 | ||
2295 | 1238 | @defer.inlineCallbacks | 1239 | @defer.inlineCallbacks |
2296 | 1239 | def setUp(self): | 1240 | def setUp(self): |
2298 | 1240 | yield super(DownloadTestCase, self).setUp() | 1241 | yield super(UploadTestCase, self).setUp() |
2299 | 1241 | self.deferred = None | 1242 | self.deferred = None |
2300 | 1242 | self.signal_name = None | 1243 | self.signal_name = None |
2301 | 1243 | if self.add_fsm_key: | 1244 | if self.add_fsm_key: |
2302 | @@ -1304,7 +1305,7 @@ | |||
2303 | 1304 | return self.deferred | 1305 | return self.deferred |
2304 | 1305 | 1306 | ||
2305 | 1306 | def test_handle_finished(self): | 1307 | def test_handle_finished(self): |
2307 | 1307 | """Test the handle_AQ_<direction>_FINISHED method.""" | 1308 | """Test the handle_<finished_event> method.""" |
2308 | 1308 | self.signal_name = self.direction + 'Finished' | 1309 | self.signal_name = self.direction + 'Finished' |
2309 | 1309 | self.deferred = defer.Deferred() | 1310 | self.deferred = defer.Deferred() |
2310 | 1310 | 1311 | ||
2311 | @@ -1320,10 +1321,9 @@ | |||
2312 | 1320 | self.patch(self.sd_obj.interface.status, 'SignalError', | 1321 | self.patch(self.sd_obj.interface.status, 'SignalError', |
2313 | 1321 | self.error_handler) | 1322 | self.error_handler) |
2314 | 1322 | 1323 | ||
2316 | 1323 | kwargs = {'share_id': '', 'node_id': 'node_id', self.hash_kwarg: ''} | 1324 | kwargs = {'share_id': '', 'node_id': 'node_id'} |
2317 | 1324 | kwargs.update(self.extra_finished_args) | 1325 | kwargs.update(self.extra_finished_args) |
2320 | 1325 | self.main.event_q.push('AQ_%s_FINISHED' % self.direction.upper(), | 1326 | self.main.event_q.push(self.finished_event, **kwargs) |
2319 | 1326 | **kwargs) | ||
2321 | 1327 | return self.deferred | 1327 | return self.deferred |
2322 | 1328 | 1328 | ||
2323 | 1329 | def test_handle_event_error(self): | 1329 | def test_handle_event_error(self): |
2324 | @@ -1350,21 +1350,29 @@ | |||
2325 | 1350 | return self.deferred | 1350 | return self.deferred |
2326 | 1351 | 1351 | ||
2327 | 1352 | 1352 | ||
2328 | 1353 | class DownloadTestCase(UploadTestCase): | ||
2329 | 1354 | """Test the Download events in SyncdaemonEventListener.""" | ||
2330 | 1355 | |||
2331 | 1356 | direction = 'Download' | ||
2332 | 1357 | bytes_key = 'n_bytes_read' | ||
2333 | 1358 | hash_kwarg = 'server_hash' | ||
2334 | 1359 | extra_finished_args = {} | ||
2335 | 1360 | finished_event = 'FSM_PARTIAL_COMMITED' | ||
2336 | 1361 | |||
2337 | 1362 | # The download is special, because we don't want to throw the ipc signal on | ||
2338 | 1363 | # AQ_DOWNLOAD_FINISHED but instead we should wait for FSM_PARTIAL_COMMITED | ||
2339 | 1364 | |||
2340 | 1365 | def test_ignore_pre_partial_commit_event(self): | ||
2341 | 1366 | """The AQ_DOWNLOAD_FINISHED signal is ignored.""" | ||
2342 | 1367 | self.assertNotIn("handle_AQ_DOWNLOAD_FINISHED", vars(self.sd_class)) | ||
2343 | 1368 | |||
2344 | 1369 | |||
2345 | 1353 | class DownloadNoKeyTestCase(DownloadTestCase): | 1370 | class DownloadNoKeyTestCase(DownloadTestCase): |
2346 | 1354 | """Test the Download events when there is a fsm KeyError.""" | 1371 | """Test the Download events when there is a fsm KeyError.""" |
2347 | 1355 | 1372 | ||
2348 | 1356 | add_fsm_key = False | 1373 | add_fsm_key = False |
2349 | 1357 | 1374 | ||
2350 | 1358 | 1375 | ||
2351 | 1359 | class UploadTestCase(DownloadTestCase): | ||
2352 | 1360 | """Test the Upload events in SyncdaemonEventListener.""" | ||
2353 | 1361 | |||
2354 | 1362 | direction = 'Upload' | ||
2355 | 1363 | bytes_key = 'n_bytes_written' | ||
2356 | 1364 | hash_kwarg = 'hash' | ||
2357 | 1365 | extra_finished_args = dict(new_generation='new_generation') | ||
2358 | 1366 | |||
2359 | 1367 | |||
2360 | 1368 | class UploadNoKeyTestCase(UploadTestCase): | 1376 | class UploadNoKeyTestCase(UploadTestCase): |
2361 | 1369 | """Test the Upload events when there is a fsm KeyError.""" | 1377 | """Test the Upload events when there is a fsm KeyError.""" |
2362 | 1370 | 1378 | ||
2363 | 1371 | 1379 | ||
2364 | === modified file 'tests/syncdaemon/test_vm.py' | |||
2365 | --- tests/syncdaemon/test_vm.py 2012-04-09 20:07:05 +0000 | |||
2366 | +++ tests/syncdaemon/test_vm.py 2012-08-22 18:22:29 +0000 | |||
2367 | @@ -2752,7 +2752,7 @@ | |||
2368 | 2752 | 2752 | ||
2369 | 2753 | result, msg = self.vm.validate_path_for_folder(folder_path) | 2753 | result, msg = self.vm.validate_path_for_folder(folder_path) |
2370 | 2754 | self.assertTrue(result) | 2754 | self.assertTrue(result) |
2372 | 2755 | self.assertIs(msg, "", | 2755 | self.assertIs(msg, "", |
2373 | 2756 | '%r must be a valid path for creating a folder.' % folder_path) | 2756 | '%r must be a valid path for creating a folder.' % folder_path) |
2374 | 2757 | 2757 | ||
2375 | 2758 | def test_validate_UDF_path_if_folder_shares_a_prefix_with_an_udf(self): | 2758 | def test_validate_UDF_path_if_folder_shares_a_prefix_with_an_udf(self): |
2376 | @@ -2769,7 +2769,7 @@ | |||
2377 | 2769 | 2769 | ||
2378 | 2770 | result, msg = self.vm.validate_path_for_folder(tricky_path) | 2770 | result, msg = self.vm.validate_path_for_folder(tricky_path) |
2379 | 2771 | self.assertTrue(result) | 2771 | self.assertTrue(result) |
2381 | 2772 | self.assertIs(msg, "", | 2772 | self.assertIs(msg, "", |
2382 | 2773 | '%r must be a valid path for creating a folder.' % tricky_path) | 2773 | '%r must be a valid path for creating a folder.' % tricky_path) |
2383 | 2774 | 2774 | ||
2384 | 2775 | def test_validate_UDF_path_not_valid_if_outside_home(self): | 2775 | def test_validate_UDF_path_not_valid_if_outside_home(self): |
2385 | @@ -2818,6 +2818,17 @@ | |||
2386 | 2818 | self.assertIsNot(msg, "", | 2818 | self.assertIsNot(msg, "", |
2387 | 2819 | '%r must be an invalid path for creating a folder.' % udf_parent) | 2819 | '%r must be an invalid path for creating a folder.' % udf_parent) |
2388 | 2820 | 2820 | ||
2389 | 2821 | def test_not_valid_if_folder_is_file(self): | ||
2390 | 2822 | """A link path is not valid.""" | ||
2391 | 2823 | self.patch(volume_manager.os.path, 'isdir', lambda p: False) | ||
2392 | 2824 | self.patch(volume_manager, 'path_exists', lambda p: True) | ||
2393 | 2825 | path_link = os.path.join(self.home_dir, 'Test Me') | ||
2394 | 2826 | |||
2395 | 2827 | result, msg = self.vm.validate_path_for_folder(path_link) | ||
2396 | 2828 | self.assertFalse(result) | ||
2397 | 2829 | self.assertIsNot(msg, "", | ||
2398 | 2830 | '%r must be an invalid path for creating a folder.' % path_link) | ||
2399 | 2831 | |||
2400 | 2821 | def test_not_valid_if_folder_is_link(self): | 2832 | def test_not_valid_if_folder_is_link(self): |
2401 | 2822 | """A link path is not valid.""" | 2833 | """A link path is not valid.""" |
2402 | 2823 | self.patch(volume_manager, 'is_link', lambda p: True) | 2834 | self.patch(volume_manager, 'is_link', lambda p: True) |
2403 | 2824 | 2835 | ||
2404 | === modified file 'ubuntuone/platform/filesystem_notifications/monitor/__init__.py' | |||
2405 | --- ubuntuone/platform/filesystem_notifications/monitor/__init__.py 2012-07-18 15:18:04 +0000 | |||
2406 | +++ ubuntuone/platform/filesystem_notifications/monitor/__init__.py 2012-08-22 18:22:29 +0000 | |||
2407 | @@ -42,11 +42,13 @@ | |||
2408 | 42 | if sys.platform == 'win32': | 42 | if sys.platform == 'win32': |
2409 | 43 | from ubuntuone.platform.filesystem_notifications.monitor import ( | 43 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
2410 | 44 | common, | 44 | common, |
2411 | 45 | windows, | ||
2412 | 45 | ) | 46 | ) |
2413 | 46 | 47 | ||
2414 | 47 | FILEMONITOR_IDS = { | 48 | FILEMONITOR_IDS = { |
2415 | 48 | DEFAULT_MONITOR: common.FilesystemMonitor, | 49 | DEFAULT_MONITOR: common.FilesystemMonitor, |
2416 | 49 | } | 50 | } |
2417 | 51 | ACTIONS = windows.ACTIONS | ||
2418 | 50 | 52 | ||
2419 | 51 | elif sys.platform == 'darwin': | 53 | elif sys.platform == 'darwin': |
2420 | 52 | from ubuntuone.platform.filesystem_notifications.monitor import darwin | 54 | from ubuntuone.platform.filesystem_notifications.monitor import darwin |
2421 | @@ -58,6 +60,7 @@ | |||
2422 | 58 | DEFAULT_MONITOR: common.FilesystemMonitor, | 60 | DEFAULT_MONITOR: common.FilesystemMonitor, |
2423 | 59 | 'daemon': darwin.fsevents_daemon.FilesystemMonitor, | 61 | 'daemon': darwin.fsevents_daemon.FilesystemMonitor, |
2424 | 60 | } | 62 | } |
2425 | 63 | ACTIONS = darwin.fsevents_client.ACTIONS | ||
2426 | 61 | else: | 64 | else: |
2427 | 62 | from ubuntuone.platform.filesystem_notifications.monitor import ( | 65 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
2428 | 63 | linux, | 66 | linux, |
2429 | 64 | 67 | ||
2430 | === modified file 'ubuntuone/platform/filesystem_notifications/monitor/common.py' | |||
2431 | --- ubuntuone/platform/filesystem_notifications/monitor/common.py 2012-07-18 09:05:26 +0000 | |||
2432 | +++ ubuntuone/platform/filesystem_notifications/monitor/common.py 2012-08-22 18:22:29 +0000 | |||
2433 | @@ -65,6 +65,7 @@ | |||
2434 | 65 | else: | 65 | else: |
2435 | 66 | raise ImportError('Not supported platform') | 66 | raise ImportError('Not supported platform') |
2436 | 67 | 67 | ||
2437 | 68 | |||
2438 | 68 | # a map between the few events that we have on common platforms and those | 69 | # a map between the few events that we have on common platforms and those |
2439 | 69 | # found in pyinotify | 70 | # found in pyinotify |
2440 | 70 | ACTIONS = source.ACTIONS | 71 | ACTIONS = source.ACTIONS |
2441 | 71 | 72 | ||
2442 | === modified file 'ubuntuone/platform/filesystem_notifications/monitor/darwin/fsevents_daemon.py' | |||
2443 | --- ubuntuone/platform/filesystem_notifications/monitor/darwin/fsevents_daemon.py 2012-07-19 14:13:06 +0000 | |||
2444 | +++ ubuntuone/platform/filesystem_notifications/monitor/darwin/fsevents_daemon.py 2012-08-22 18:22:29 +0000 | |||
2445 | @@ -25,7 +25,7 @@ | |||
2446 | 25 | # do not wish to do so, delete this exception statement from your | 25 | # do not wish to do so, delete this exception statement from your |
2447 | 26 | # version. If you delete this exception statement from all source | 26 | # version. If you delete this exception statement from all source |
2448 | 27 | # files in the program, then also delete it here. | 27 | # files in the program, then also delete it here. |
2450 | 28 | """Filesystem notifications based on the fsevents daemon..""" | 28 | """Filesystem notifications based on the fseventsd daemon..""" |
2451 | 29 | 29 | ||
2452 | 30 | import logging | 30 | import logging |
2453 | 31 | import os | 31 | import os |
2454 | @@ -42,7 +42,7 @@ | |||
2455 | 42 | ) | 42 | ) |
2456 | 43 | 43 | ||
2457 | 44 | from ubuntuone import logger | 44 | from ubuntuone import logger |
2459 | 45 | from ubuntuone.darwin import fsevents | 45 | from ubuntuone import fseventsd |
2460 | 46 | from ubuntuone.platform.filesystem_notifications.notify_processor import ( | 46 | from ubuntuone.platform.filesystem_notifications.notify_processor import ( |
2461 | 47 | NotifyProcessor, | 47 | NotifyProcessor, |
2462 | 48 | ) | 48 | ) |
2463 | @@ -61,24 +61,24 @@ | |||
2464 | 61 | 61 | ||
2465 | 62 | TRACE = logger.TRACE | 62 | TRACE = logger.TRACE |
2466 | 63 | 63 | ||
2468 | 64 | # map the fsevents actions to those from pyinotify | 64 | # map the fseventsd actions to those from pyinotify |
2469 | 65 | DARWIN_ACTIONS = { | 65 | DARWIN_ACTIONS = { |
2475 | 66 | fsevents.FSE_CREATE_FILE: IN_CREATE, | 66 | fseventsd.FSE_CREATE_FILE: IN_CREATE, |
2476 | 67 | fsevents.FSE_DELETE: IN_DELETE, | 67 | fseventsd.FSE_DELETE: IN_DELETE, |
2477 | 68 | fsevents.FSE_STAT_CHANGED: IN_MODIFY, | 68 | fseventsd.FSE_STAT_CHANGED: IN_MODIFY, |
2478 | 69 | fsevents.FSE_CONTENT_MODIFIED: IN_MODIFY, | 69 | fseventsd.FSE_CONTENT_MODIFIED: IN_MODIFY, |
2479 | 70 | fsevents.FSE_CREATE_DIR: IN_CREATE, | 70 | fseventsd.FSE_CREATE_DIR: IN_CREATE, |
2480 | 71 | } | 71 | } |
2481 | 72 | 72 | ||
2482 | 73 | # list of those events from which we do not care | 73 | # list of those events from which we do not care |
2483 | 74 | DARWIN_IGNORED_ACTIONS = ( | 74 | DARWIN_IGNORED_ACTIONS = ( |
2491 | 75 | fsevents.FSE_UNKNOWN, | 75 | fseventsd.FSE_UNKNOWN, |
2492 | 76 | fsevents.FSE_INVALID, | 76 | fseventsd.FSE_INVALID, |
2493 | 77 | fsevents.FSE_EXCHANGE, | 77 | fseventsd.FSE_EXCHANGE, |
2494 | 78 | fsevents.FSE_FINDER_INFO_CHANGED, | 78 | fseventsd.FSE_FINDER_INFO_CHANGED, |
2495 | 79 | fsevents.FSE_CHOWN, | 79 | fseventsd.FSE_CHOWN, |
2496 | 80 | fsevents.FSE_XATTR_MODIFIED, | 80 | fseventsd.FSE_XATTR_MODIFIED, |
2497 | 81 | fsevents.FSE_XATTR_REMOVED, | 81 | fseventsd.FSE_XATTR_REMOVED, |
2498 | 82 | ) | 82 | ) |
2499 | 83 | 83 | ||
2500 | 84 | # translates quickly the event and it's is_dir state to our standard events | 84 | # translates quickly the event and it's is_dir state to our standard events |
2501 | @@ -95,7 +95,7 @@ | |||
2502 | 95 | IN_MOVED_TO: 'FS_FILE_CREATE', | 95 | IN_MOVED_TO: 'FS_FILE_CREATE', |
2503 | 96 | IN_MOVED_TO | IN_ISDIR: 'FS_DIR_CREATE'} | 96 | IN_MOVED_TO | IN_ISDIR: 'FS_DIR_CREATE'} |
2504 | 97 | 97 | ||
2506 | 98 | # TODO: This should be in fsevents to be imported! | 98 | # TODO: This should be in fseventsd to be imported! |
2507 | 99 | # Path to the socket used by the daemon | 99 | # Path to the socket used by the daemon |
2508 | 100 | DAEMON_SOCKET = '/var/run/ubuntuone_fsevents_daemon' | 100 | DAEMON_SOCKET = '/var/run/ubuntuone_fsevents_daemon' |
2509 | 101 | 101 | ||
2510 | @@ -134,14 +134,14 @@ | |||
2511 | 134 | return unicodedata.normalize('NFC', path).encode('utf-8') | 134 | return unicodedata.normalize('NFC', path).encode('utf-8') |
2512 | 135 | 135 | ||
2513 | 136 | 136 | ||
2515 | 137 | class PyInotifyEventsFactory(fsevents.FsEventsFactory): | 137 | class PyInotifyEventsFactory(fseventsd.FsEventsFactory): |
2516 | 138 | """Factory that process events and converts them in pyinotify ones.""" | 138 | """Factory that process events and converts them in pyinotify ones.""" |
2517 | 139 | 139 | ||
2518 | 140 | def __init__(self, processor, | 140 | def __init__(self, processor, |
2519 | 141 | ignored_events=DARWIN_IGNORED_ACTIONS): | 141 | ignored_events=DARWIN_IGNORED_ACTIONS): |
2520 | 142 | """Create a new instance.""" | 142 | """Create a new instance.""" |
2521 | 143 | # old style class | 143 | # old style class |
2523 | 144 | fsevents.FsEventsFactory.__init__(self) | 144 | fseventsd.FsEventsFactory.__init__(self) |
2524 | 145 | self._processor = processor | 145 | self._processor = processor |
2525 | 146 | self._ignored_events = ignored_events | 146 | self._ignored_events = ignored_events |
2526 | 147 | self.watched_paths = [] | 147 | self.watched_paths = [] |
2527 | @@ -215,7 +215,7 @@ | |||
2528 | 215 | """Get an event from the daemon and convert it in a pyinotify one.""" | 215 | """Get an event from the daemon and convert it in a pyinotify one.""" |
2529 | 216 | # the rename is a special type of event because it has to be either | 216 | # the rename is a special type of event because it has to be either |
2530 | 217 | # converted is a pair of events or in a single one (CREATE or DELETE) | 217 | # converted is a pair of events or in a single one (CREATE or DELETE) |
2532 | 218 | if event.event_type == fsevents.FSE_RENAME: | 218 | if event.event_type == fseventsd.FSE_RENAME: |
2533 | 219 | is_create = self.is_create(event) | 219 | is_create = self.is_create(event) |
2534 | 220 | if is_create or self.is_delete(event): | 220 | if is_create or self.is_delete(event): |
2535 | 221 | mask = IN_CREATE if is_create else IN_DELETE | 221 | mask = IN_CREATE if is_create else IN_DELETE |
2536 | @@ -278,7 +278,7 @@ | |||
2537 | 278 | if event.event_type in self._ignored_events: | 278 | if event.event_type in self._ignored_events: |
2538 | 279 | # Do nothing because sd does not care about such info | 279 | # Do nothing because sd does not care about such info |
2539 | 280 | return | 280 | return |
2541 | 281 | if event.event_type == fsevents.FSE_EVENTS_DROPPED: | 281 | if event.event_type == fseventsd.FSE_EVENTS_DROPPED: |
2542 | 282 | # this should not be very common but we have to deal with it | 282 | # this should not be very common but we have to deal with it |
2543 | 283 | return self.events_dropper() | 283 | return self.events_dropper() |
2544 | 284 | events = self.convert_in_pyinotify_event(event) | 284 | events = self.convert_in_pyinotify_event(event) |
2545 | 285 | 285 | ||
2546 | === modified file 'ubuntuone/platform/ipc/ipc_client.py' | |||
2547 | --- ubuntuone/platform/ipc/ipc_client.py 2012-05-22 14:28:56 +0000 | |||
2548 | +++ ubuntuone/platform/ipc/ipc_client.py 2012-08-22 18:22:29 +0000 | |||
2549 | @@ -197,6 +197,22 @@ | |||
2550 | 197 | def current_uploads(self): | 197 | def current_uploads(self): |
2551 | 198 | """Return a list of files with a upload in progress.""" | 198 | """Return a list of files with a upload in progress.""" |
2552 | 199 | 199 | ||
2553 | 200 | @remote | ||
2554 | 201 | def sync_menu(self): | ||
2555 | 202 | """ | ||
2556 | 203 | This method returns a dictionary, with the following keys and values: | ||
2557 | 204 | |||
2558 | 205 | Key: 'recent-transfers' | ||
2559 | 206 | Value: a list of strings (paths), each being the name of a file that | ||
2560 | 207 | was recently transferred. | ||
2561 | 208 | |||
2562 | 209 | Key: 'uploading' | ||
2563 | 210 | Value: a list of tuples, with each tuple having the following items: | ||
2564 | 211 | * str: the path of a file that's currently being uploaded | ||
2565 | 212 | * int: size of the file | ||
2566 | 213 | * int: bytes written | ||
2567 | 214 | """ | ||
2568 | 215 | |||
2569 | 200 | @signal | 216 | @signal |
2570 | 201 | def on_content_queue_changed(self): | 217 | def on_content_queue_changed(self): |
2571 | 202 | """Emit ContentQueueChanged.""" | 218 | """Emit ContentQueueChanged.""" |
2572 | @@ -234,7 +250,7 @@ | |||
2573 | 234 | """Emit UploadFileProgress.""" | 250 | """Emit UploadFileProgress.""" |
2574 | 235 | 251 | ||
2575 | 236 | @signal | 252 | @signal |
2577 | 237 | def on_upload_finished(self, upload, **info): | 253 | def on_upload_finished(self, upload, *info): |
2578 | 238 | """Emit UploadFinished.""" | 254 | """Emit UploadFinished.""" |
2579 | 239 | 255 | ||
2580 | 240 | @signal | 256 | @signal |
2581 | 241 | 257 | ||
2582 | === modified file 'ubuntuone/platform/ipc/linux.py' | |||
2583 | --- ubuntuone/platform/ipc/linux.py 2012-05-22 14:07:55 +0000 | |||
2584 | +++ ubuntuone/platform/ipc/linux.py 2012-08-22 18:22:29 +0000 | |||
2585 | @@ -38,6 +38,10 @@ | |||
2586 | 38 | from xml.etree import ElementTree | 38 | from xml.etree import ElementTree |
2587 | 39 | 39 | ||
2588 | 40 | from ubuntuone.platform.launcher import UbuntuOneLauncher | 40 | from ubuntuone.platform.launcher import UbuntuOneLauncher |
2589 | 41 | from ubuntuone.syncdaemon import ( | ||
2590 | 42 | RECENT_TRANSFERS, | ||
2591 | 43 | UPLOADING, | ||
2592 | 44 | ) | ||
2593 | 41 | 45 | ||
2594 | 42 | # Disable the "Invalid Name" check here, as we have lots of DBus style names | 46 | # Disable the "Invalid Name" check here, as we have lots of DBus style names |
2595 | 43 | # pylint: disable-msg=C0103 | 47 | # pylint: disable-msg=C0103 |
2596 | @@ -173,6 +177,35 @@ | |||
2597 | 173 | warnings.warn('Use "waiting" method instead.', DeprecationWarning) | 177 | warnings.warn('Use "waiting" method instead.', DeprecationWarning) |
2598 | 174 | return self.service.status.waiting_content() | 178 | return self.service.status.waiting_content() |
2599 | 175 | 179 | ||
2600 | 180 | @dbus.service.method(DBUS_IFACE_STATUS_NAME, out_signature='a{sv}') | ||
2601 | 181 | def sync_menu(self): | ||
2602 | 182 | """ | ||
2603 | 183 | This method returns a dictionary, with the following keys and values: | ||
2604 | 184 | |||
2605 | 185 | Key: 'recent-transfers' | ||
2606 | 186 | Value: a list of strings (paths), each being the name of a file that | ||
2607 | 187 | was recently transferred. | ||
2608 | 188 | |||
2609 | 189 | Key: 'uploading' | ||
2610 | 190 | Value: a list of tuples, with each tuple having the following items: | ||
2611 | 191 | * str: the path of a file that's currently being uploaded | ||
2612 | 192 | * int: size of the file | ||
2613 | 193 | * int: bytes written | ||
2614 | 194 | """ | ||
2615 | 195 | data = self.service.status.sync_menu() | ||
2616 | 196 | uploading = data[UPLOADING] | ||
2617 | 197 | transfers = data[RECENT_TRANSFERS] | ||
2618 | 198 | upload_data = dbus.Array(signature="(sii)") | ||
2619 | 199 | transfer_data = dbus.Array(signature="s") | ||
2620 | 200 | for up in uploading: | ||
2621 | 201 | upload_data.append(dbus.Struct(up, signature="sii")) | ||
2622 | 202 | for transfer in transfers: | ||
2623 | 203 | transfer_data.append(transfer) | ||
2624 | 204 | result = dbus.Dictionary(signature="sv") | ||
2625 | 205 | result[UPLOADING] = upload_data | ||
2626 | 206 | result[RECENT_TRANSFERS] = transfer_data | ||
2627 | 207 | return result | ||
2628 | 208 | |||
2629 | 176 | @dbus.service.signal(DBUS_IFACE_STATUS_NAME) | 209 | @dbus.service.signal(DBUS_IFACE_STATUS_NAME) |
2630 | 177 | def DownloadStarted(self, path): | 210 | def DownloadStarted(self, path): |
2631 | 178 | """Fire a signal to notify that a download has started.""" | 211 | """Fire a signal to notify that a download has started.""" |
2632 | 179 | 212 | ||
2633 | === modified file 'ubuntuone/platform/ipc/perspective_broker.py' | |||
2634 | --- ubuntuone/platform/ipc/perspective_broker.py 2012-07-13 16:06:27 +0000 | |||
2635 | +++ ubuntuone/platform/ipc/perspective_broker.py 2012-08-22 18:22:29 +0000 | |||
2636 | @@ -231,6 +231,7 @@ | |||
2637 | 231 | 'waiting', | 231 | 'waiting', |
2638 | 232 | 'waiting_metadata', | 232 | 'waiting_metadata', |
2639 | 233 | 'waiting_content', | 233 | 'waiting_content', |
2640 | 234 | 'sync_menu', | ||
2641 | 234 | ] | 235 | ] |
2642 | 235 | 236 | ||
2643 | 236 | signal_mapping = { | 237 | signal_mapping = { |
2644 | @@ -291,6 +292,10 @@ | |||
2645 | 291 | warnings.warn('Use "waiting" method instead.', DeprecationWarning) | 292 | warnings.warn('Use "waiting" method instead.', DeprecationWarning) |
2646 | 292 | return self.service.status.waiting_content() | 293 | return self.service.status.waiting_content() |
2647 | 293 | 294 | ||
2648 | 295 | def sync_menu(self): | ||
2649 | 296 | """Return the info necessary to construct the menu.""" | ||
2650 | 297 | return self.service.status.sync_menu() | ||
2651 | 298 | |||
2652 | 294 | @signal | 299 | @signal |
2653 | 295 | def DownloadStarted(self, path): | 300 | def DownloadStarted(self, path): |
2654 | 296 | """Fire a signal to notify that a download has started.""" | 301 | """Fire a signal to notify that a download has started.""" |
2655 | @@ -300,7 +305,7 @@ | |||
2656 | 300 | """Fire a signal to notify about a download progress.""" | 305 | """Fire a signal to notify about a download progress.""" |
2657 | 301 | 306 | ||
2658 | 302 | @signal | 307 | @signal |
2660 | 303 | def DownloadFinished(self, path, info): | 308 | def DownloadFinished(self, path, *info): |
2661 | 304 | """Fire a signal to notify that a download has finished.""" | 309 | """Fire a signal to notify that a download has finished.""" |
2662 | 305 | 310 | ||
2663 | 306 | @signal | 311 | @signal |
2664 | @@ -312,7 +317,7 @@ | |||
2665 | 312 | """Fire a signal to notify about an upload progress.""" | 317 | """Fire a signal to notify about an upload progress.""" |
2666 | 313 | 318 | ||
2667 | 314 | @signal | 319 | @signal |
2669 | 315 | def UploadFinished(self, path, info): | 320 | def UploadFinished(self, path, *info): |
2670 | 316 | """Fire a signal to notify that an upload has finished.""" | 321 | """Fire a signal to notify that an upload has finished.""" |
2671 | 317 | 322 | ||
2672 | 318 | @signal | 323 | @signal |
2673 | 319 | 324 | ||
2674 | === modified file 'ubuntuone/platform/os_helper/__init__.py' | |||
2675 | --- ubuntuone/platform/os_helper/__init__.py 2012-06-22 19:37:36 +0000 | |||
2676 | +++ ubuntuone/platform/os_helper/__init__.py 2012-08-22 18:22:29 +0000 | |||
2677 | @@ -73,6 +73,7 @@ | |||
2678 | 73 | 73 | ||
2679 | 74 | # Decorators | 74 | # Decorators |
2680 | 75 | 75 | ||
2681 | 76 | get_os_valid_path = source.get_os_valid_path | ||
2682 | 76 | is_valid_syncdaemon_path = source.is_valid_syncdaemon_path | 77 | is_valid_syncdaemon_path = source.is_valid_syncdaemon_path |
2683 | 77 | is_valid_os_path = source.is_valid_os_path | 78 | is_valid_os_path = source.is_valid_os_path |
2684 | 78 | os_path = source.os_path | 79 | os_path = source.os_path |
2685 | 79 | 80 | ||
2686 | === modified file 'ubuntuone/platform/os_helper/darwin.py' | |||
2687 | --- ubuntuone/platform/os_helper/darwin.py 2012-07-10 18:41:15 +0000 | |||
2688 | +++ ubuntuone/platform/os_helper/darwin.py 2012-08-22 18:22:29 +0000 | |||
2689 | @@ -29,8 +29,8 @@ | |||
2690 | 29 | """ | 29 | """ |
2691 | 30 | Darwin import for ubuntuone-client | 30 | Darwin import for ubuntuone-client |
2692 | 31 | 31 | ||
2695 | 32 | This module has to have all darwin specific modules and provide the api required | 32 | This module has to have all darwin specific modules and provide the |
2696 | 33 | to support the darwin platform. | 33 | api required to support the darwin platform. |
2697 | 34 | """ | 34 | """ |
2698 | 35 | 35 | ||
2699 | 36 | import errno | 36 | import errno |
2700 | @@ -70,6 +70,7 @@ | |||
2701 | 70 | is_root = unix.is_root | 70 | is_root = unix.is_root |
2702 | 71 | get_path_list = unix.get_path_list | 71 | get_path_list = unix.get_path_list |
2703 | 72 | normpath = unix.normpath | 72 | normpath = unix.normpath |
2704 | 73 | get_os_valid_path = unix.get_os_valid_path | ||
2705 | 73 | 74 | ||
2706 | 74 | 75 | ||
2707 | 75 | def move_to_trash(path): | 76 | def move_to_trash(path): |
2708 | @@ -121,6 +122,8 @@ | |||
2709 | 121 | def is_valid_os_path(path_indexes=None): | 122 | def is_valid_os_path(path_indexes=None): |
2710 | 122 | def decorator(func): | 123 | def decorator(func): |
2711 | 123 | def wrapped(*args, **kwargs): | 124 | def wrapped(*args, **kwargs): |
2712 | 125 | for i in path_indexes: | ||
2713 | 126 | assert isinstance(args[i], str), 'Path %r should be str.' | ||
2714 | 124 | return func(*args, **kwargs) | 127 | return func(*args, **kwargs) |
2715 | 125 | return wrapped | 128 | return wrapped |
2716 | 126 | return decorator | 129 | return decorator |
2717 | 127 | 130 | ||
2718 | === modified file 'ubuntuone/platform/os_helper/linux.py' | |||
2719 | --- ubuntuone/platform/os_helper/linux.py 2012-06-27 12:51:20 +0000 | |||
2720 | +++ ubuntuone/platform/os_helper/linux.py 2012-08-22 18:22:29 +0000 | |||
2721 | @@ -123,6 +123,7 @@ | |||
2722 | 123 | is_root = unix.is_root | 123 | is_root = unix.is_root |
2723 | 124 | get_path_list = unix.get_path_list | 124 | get_path_list = unix.get_path_list |
2724 | 125 | normpath = unix.normpath | 125 | normpath = unix.normpath |
2725 | 126 | get_os_valid_path = unix.get_os_valid_path | ||
2726 | 126 | is_valid_syncdaemon_path = None | 127 | is_valid_syncdaemon_path = None |
2727 | 127 | is_valid_os_path = None | 128 | is_valid_os_path = None |
2728 | 128 | os_path = None | 129 | os_path = None |
2729 | 129 | 130 | ||
2730 | === modified file 'ubuntuone/platform/os_helper/unix.py' | |||
2731 | --- ubuntuone/platform/os_helper/unix.py 2012-06-27 14:02:14 +0000 | |||
2732 | +++ ubuntuone/platform/os_helper/unix.py 2012-08-22 18:22:29 +0000 | |||
2733 | @@ -185,3 +185,8 @@ | |||
2734 | 185 | def normpath(path): | 185 | def normpath(path): |
2735 | 186 | """Normalize path, eliminating double slashes, etc.""" | 186 | """Normalize path, eliminating double slashes, etc.""" |
2736 | 187 | return os.path.normpath(path) | 187 | return os.path.normpath(path) |
2737 | 188 | |||
2738 | 189 | |||
2739 | 190 | def get_os_valid_path(path): | ||
2740 | 191 | """Return a valid os path.""" | ||
2741 | 192 | return os.path.abspath(path) | ||
2742 | 188 | 193 | ||
2743 | === modified file 'ubuntuone/platform/os_helper/windows.py' | |||
2744 | --- ubuntuone/platform/os_helper/windows.py 2012-07-03 17:16:57 +0000 | |||
2745 | +++ ubuntuone/platform/os_helper/windows.py 2012-08-22 18:22:29 +0000 | |||
2746 | @@ -249,6 +249,8 @@ | |||
2747 | 249 | assert_windows_path(result) | 249 | assert_windows_path(result) |
2748 | 250 | return result | 250 | return result |
2749 | 251 | 251 | ||
2750 | 252 | get_os_valid_path = get_windows_valid_path | ||
2751 | 253 | |||
2752 | 252 | 254 | ||
2753 | 253 | def _unicode_to_bytes(path): | 255 | def _unicode_to_bytes(path): |
2754 | 254 | """Convert a unicode path to a bytes path.""" | 256 | """Convert a unicode path to a bytes path.""" |
2755 | 255 | 257 | ||
2756 | === modified file 'ubuntuone/platform/tools/__init__.py' | |||
2757 | --- ubuntuone/platform/tools/__init__.py 2012-05-30 15:35:49 +0000 | |||
2758 | +++ ubuntuone/platform/tools/__init__.py 2012-08-22 18:22:29 +0000 | |||
2759 | @@ -326,6 +326,13 @@ | |||
2760 | 326 | 326 | ||
2761 | 327 | @defer.inlineCallbacks | 327 | @defer.inlineCallbacks |
2762 | 328 | @log_call(logger.debug) | 328 | @log_call(logger.debug) |
2763 | 329 | def sync_menu(self): | ||
2764 | 330 | """Return a deferred that will be fired with the sync menu data.""" | ||
2765 | 331 | results = yield self.proxy.call_method('status', 'sync_menu') | ||
2766 | 332 | defer.returnValue(results) | ||
2767 | 333 | |||
2768 | 334 | @defer.inlineCallbacks | ||
2769 | 335 | @log_call(logger.debug) | ||
2770 | 329 | def accept_share(self, share_id): | 336 | def accept_share(self, share_id): |
2771 | 330 | """Accept the share with id: share_id.""" | 337 | """Accept the share with id: share_id.""" |
2772 | 331 | d = self.wait_for_signals(signal_ok='ShareAnswerResponse', | 338 | d = self.wait_for_signals(signal_ok='ShareAnswerResponse', |
2773 | 332 | 339 | ||
2774 | === modified file 'ubuntuone/status/aggregator.py' | |||
2775 | --- ubuntuone/status/aggregator.py 2012-04-09 20:07:05 +0000 | |||
2776 | +++ ubuntuone/status/aggregator.py 2012-08-22 18:22:29 +0000 | |||
2777 | @@ -33,7 +33,7 @@ | |||
2778 | 33 | import itertools | 33 | import itertools |
2779 | 34 | import operator | 34 | import operator |
2780 | 35 | import os | 35 | import os |
2782 | 36 | 36 | from collections import deque | |
2783 | 37 | 37 | ||
2784 | 38 | import gettext | 38 | import gettext |
2785 | 39 | 39 | ||
2786 | @@ -624,6 +624,7 @@ | |||
2787 | 624 | self.finished_delay = 10 | 624 | self.finished_delay = 10 |
2788 | 625 | self.progress = {} | 625 | self.progress = {} |
2789 | 626 | self.to_do = {} | 626 | self.to_do = {} |
2790 | 627 | self.recent_transfers = deque(maxlen=5) | ||
2791 | 627 | 628 | ||
2792 | 628 | def get_notification(self): | 629 | def get_notification(self): |
2793 | 629 | """Create a new toggleable notification object.""" | 630 | """Create a new toggleable notification object.""" |
2794 | @@ -775,6 +776,7 @@ | |||
2795 | 775 | if command.deflated_size is not None: | 776 | if command.deflated_size is not None: |
2796 | 776 | self.progress[ | 777 | self.progress[ |
2797 | 777 | (command.share_id, command.node_id)] = command.deflated_size | 778 | (command.share_id, command.node_id)] = command.deflated_size |
2798 | 779 | self.recent_transfers.append(command.path) | ||
2799 | 778 | logger.debug("unqueueing command: %s", command.__class__.__name__) | 780 | logger.debug("unqueueing command: %s", command.__class__.__name__) |
2800 | 779 | self.update_progressbar() | 781 | self.update_progressbar() |
2801 | 780 | 782 | ||
2802 | @@ -806,6 +808,19 @@ | |||
2803 | 806 | self.messaging = Messaging() | 808 | self.messaging = Messaging() |
2804 | 807 | self.quota_timer = None | 809 | self.quota_timer = None |
2805 | 808 | 810 | ||
2806 | 811 | def recent_transfers(self): | ||
2807 | 812 | """Return a tuple with the recent transfers paths.""" | ||
2808 | 813 | return list(self.aggregator.recent_transfers) | ||
2809 | 814 | |||
2810 | 815 | def files_uploading(self): | ||
2811 | 816 | """Return a list with the files being uploading.""" | ||
2812 | 817 | uploading = [] | ||
2813 | 818 | for upload in self.aggregator.files_uploading: | ||
2814 | 819 | if upload.size != 0: | ||
2815 | 820 | uploading.append((upload.path, upload.size, | ||
2816 | 821 | upload.n_bytes_written)) | ||
2817 | 822 | return uploading | ||
2818 | 823 | |||
2819 | 809 | def file_published(self, public_url): | 824 | def file_published(self, public_url): |
2820 | 810 | """A file was published.""" | 825 | """A file was published.""" |
2821 | 811 | status_event = FilePublishingStatus(new_public_url=public_url) | 826 | status_event = FilePublishingStatus(new_public_url=public_url) |
2822 | 812 | 827 | ||
2823 | === modified file 'ubuntuone/syncdaemon/__init__.py' | |||
2824 | --- ubuntuone/syncdaemon/__init__.py 2012-04-09 20:07:05 +0000 | |||
2825 | +++ ubuntuone/syncdaemon/__init__.py 2012-08-22 18:22:29 +0000 | |||
2826 | @@ -36,3 +36,8 @@ | |||
2827 | 36 | "volumes", | 36 | "volumes", |
2828 | 37 | "generations", | 37 | "generations", |
2829 | 38 | ]) | 38 | ]) |
2830 | 39 | |||
2831 | 40 | |||
2832 | 41 | #Sync Menu data constants | ||
2833 | 42 | RECENT_TRANSFERS = 'recent-transfers' | ||
2834 | 43 | UPLOADING = 'uploading' | ||
2835 | 39 | 44 | ||
2836 | === modified file 'ubuntuone/syncdaemon/event_queue.py' | |||
2837 | --- ubuntuone/syncdaemon/event_queue.py 2012-07-31 08:26:30 +0000 | |||
2838 | +++ ubuntuone/syncdaemon/event_queue.py 2012-08-22 18:22:29 +0000 | |||
2839 | @@ -159,6 +159,7 @@ | |||
2840 | 159 | 159 | ||
2841 | 160 | 'FSM_FILE_CONFLICT': ('old_name', 'new_name'), | 160 | 'FSM_FILE_CONFLICT': ('old_name', 'new_name'), |
2842 | 161 | 'FSM_DIR_CONFLICT': ('old_name', 'new_name'), | 161 | 'FSM_DIR_CONFLICT': ('old_name', 'new_name'), |
2843 | 162 | 'FSM_PARTIAL_COMMITED': ('share_id', 'node_id'), | ||
2844 | 162 | 163 | ||
2845 | 163 | 'VM_UDF_SUBSCRIBED': ('udf',), | 164 | 'VM_UDF_SUBSCRIBED': ('udf',), |
2846 | 164 | 'VM_UDF_SUBSCRIBE_ERROR': ('udf_id', 'error'), | 165 | 'VM_UDF_SUBSCRIBE_ERROR': ('udf_id', 'error'), |
2847 | 165 | 166 | ||
2848 | === modified file 'ubuntuone/syncdaemon/filesystem_manager.py' | |||
2849 | --- ubuntuone/syncdaemon/filesystem_manager.py 2012-06-22 11:29:10 +0000 | |||
2850 | +++ ubuntuone/syncdaemon/filesystem_manager.py 2012-08-22 18:22:29 +0000 | |||
2851 | @@ -1142,7 +1142,7 @@ | |||
2852 | 1142 | return fd | 1142 | return fd |
2853 | 1143 | 1143 | ||
2854 | 1144 | def commit_partial(self, node_id, share_id, local_hash): | 1144 | def commit_partial(self, node_id, share_id, local_hash): |
2856 | 1145 | """Create a .partial in disk and set the flag in metadata.""" | 1145 | """Commit a file from a .partial to disk.""" |
2857 | 1146 | mdid = self._idx_node_id[(share_id, node_id)] | 1146 | mdid = self._idx_node_id[(share_id, node_id)] |
2858 | 1147 | mdobj = self.fs[mdid] | 1147 | mdobj = self.fs[mdid] |
2859 | 1148 | if mdobj["is_dir"]: | 1148 | if mdobj["is_dir"]: |
2860 | @@ -1166,6 +1166,8 @@ | |||
2861 | 1166 | mdobj["info"]["is_partial"] = False | 1166 | mdobj["info"]["is_partial"] = False |
2862 | 1167 | mdobj["stat"] = get_stat(path) | 1167 | mdobj["stat"] = get_stat(path) |
2863 | 1168 | self.fs[mdid] = mdobj | 1168 | self.fs[mdid] = mdobj |
2864 | 1169 | self.eq.push("FSM_PARTIAL_COMMITED", share_id=share_id, | ||
2865 | 1170 | node_id=node_id) | ||
2866 | 1169 | 1171 | ||
2867 | 1170 | def remove_partial(self, node_id, share_id): | 1172 | def remove_partial(self, node_id, share_id): |
2868 | 1171 | """Remove a .partial in disk and set the flag in metadata.""" | 1173 | """Remove a .partial in disk and set the flag in metadata.""" |
2869 | 1172 | 1174 | ||
2870 | === modified file 'ubuntuone/syncdaemon/interaction_interfaces.py' | |||
2871 | --- ubuntuone/syncdaemon/interaction_interfaces.py 2012-05-22 14:07:55 +0000 | |||
2872 | +++ ubuntuone/syncdaemon/interaction_interfaces.py 2012-08-22 18:22:29 +0000 | |||
2873 | @@ -302,6 +302,10 @@ | |||
2874 | 302 | waiting_content.append(data) | 302 | waiting_content.append(data) |
2875 | 303 | return waiting_content | 303 | return waiting_content |
2876 | 304 | 304 | ||
2877 | 305 | def sync_menu(self): | ||
2878 | 306 | """Return the info necessary to construct the menu.""" | ||
2879 | 307 | return self.main.status_listener.menu_data() | ||
2880 | 308 | |||
2881 | 305 | 309 | ||
2882 | 306 | class SyncdaemonFileSystem(SyncdaemonObject): | 310 | class SyncdaemonFileSystem(SyncdaemonObject): |
2883 | 307 | """An interface to the FileSystem Manager.""" | 311 | """An interface to the FileSystem Manager.""" |
2884 | @@ -816,8 +820,8 @@ | |||
2885 | 816 | self._path_from_ids(share_id, node_id, 'DownloadFileProgress', info) | 820 | self._path_from_ids(share_id, node_id, 'DownloadFileProgress', info) |
2886 | 817 | 821 | ||
2887 | 818 | @log_call(logger.debug) | 822 | @log_call(logger.debug) |
2890 | 819 | def handle_AQ_DOWNLOAD_FINISHED(self, share_id, node_id, server_hash): | 823 | def handle_FSM_PARTIAL_COMMITED(self, share_id, node_id): |
2891 | 820 | """Handle AQ_DOWNLOAD_FINISHED.""" | 824 | """Handle FSM_PARTIAL_COMMITED.""" |
2892 | 821 | self._path_from_ids(share_id, node_id, 'DownloadFinished', info={}) | 825 | self._path_from_ids(share_id, node_id, 'DownloadFinished', info={}) |
2893 | 822 | 826 | ||
2894 | 823 | @log_call(logger.debug) | 827 | @log_call(logger.debug) |
2895 | 824 | 828 | ||
2896 | === modified file 'ubuntuone/syncdaemon/status_listener.py' | |||
2897 | --- ubuntuone/syncdaemon/status_listener.py 2012-04-09 20:07:05 +0000 | |||
2898 | +++ ubuntuone/syncdaemon/status_listener.py 2012-08-22 18:22:29 +0000 | |||
2899 | @@ -31,7 +31,12 @@ | |||
2900 | 31 | """Listener for event queue that updates the UI to show syncdaemon status.""" | 31 | """Listener for event queue that updates the UI to show syncdaemon status.""" |
2901 | 32 | 32 | ||
2902 | 33 | from ubuntuone.status.aggregator import StatusFrontend | 33 | from ubuntuone.status.aggregator import StatusFrontend |
2904 | 34 | from ubuntuone.syncdaemon import action_queue, config | 34 | from ubuntuone.syncdaemon import ( |
2905 | 35 | action_queue, | ||
2906 | 36 | config, | ||
2907 | 37 | RECENT_TRANSFERS, | ||
2908 | 38 | UPLOADING, | ||
2909 | 39 | ) | ||
2910 | 35 | from ubuntuone.syncdaemon.interaction_interfaces import ( | 40 | from ubuntuone.syncdaemon.interaction_interfaces import ( |
2911 | 36 | get_share_dict, get_udf_dict) | 41 | get_share_dict, get_udf_dict) |
2912 | 37 | from ubuntuone.syncdaemon.volume_manager import UDF, Root | 42 | from ubuntuone.syncdaemon.volume_manager import UDF, Root |
2913 | @@ -66,6 +71,13 @@ | |||
2914 | 66 | user_conf = config.get_user_config() | 71 | user_conf = config.get_user_config() |
2915 | 67 | self.show_all_notifications = user_conf.get_show_all_notifications() | 72 | self.show_all_notifications = user_conf.get_show_all_notifications() |
2916 | 68 | 73 | ||
2917 | 74 | def menu_data(self): | ||
2918 | 75 | """Return the info necessary to construct the sync menu.""" | ||
2919 | 76 | uploading = self.status_frontend.files_uploading() | ||
2920 | 77 | transfers = self.status_frontend.recent_transfers() | ||
2921 | 78 | data = {RECENT_TRANSFERS: transfers, UPLOADING: uploading} | ||
2922 | 79 | return data | ||
2923 | 80 | |||
2924 | 69 | def get_show_all_notifications(self): | 81 | def get_show_all_notifications(self): |
2925 | 70 | """Get the value of show_all_notifications.""" | 82 | """Get the value of show_all_notifications.""" |
2926 | 71 | return self._show_all_notifications | 83 | return self._show_all_notifications |
2927 | 72 | 84 | ||
2928 | === modified file 'ubuntuone/syncdaemon/volume_manager.py' | |||
2929 | --- ubuntuone/syncdaemon/volume_manager.py 2012-05-14 21:24:24 +0000 | |||
2930 | +++ ubuntuone/syncdaemon/volume_manager.py 2012-08-22 18:22:29 +0000 | |||
2931 | @@ -1207,6 +1207,10 @@ | |||
2932 | 1207 | if is_link(path): | 1207 | if is_link(path): |
2933 | 1208 | return (False, "UDFs can not be a symlink") | 1208 | return (False, "UDFs can not be a symlink") |
2934 | 1209 | 1209 | ||
2935 | 1210 | # check if path exists but is not a directory | ||
2936 | 1211 | if path_exists(path) and not os.path.isdir(path): | ||
2937 | 1212 | return (False, "The path exists but is not a folder") | ||
2938 | 1213 | |||
2939 | 1210 | # check if the path it's ok (outside root and | 1214 | # check if the path it's ok (outside root and |
2940 | 1211 | # isn't a ancestor or child of another UDF) | 1215 | # isn't a ancestor or child of another UDF) |
2941 | 1212 | if self._is_nested_udf(path): | 1216 | if self._is_nested_udf(path): |
The attempt to merge lp:~dobey/ubuntuone-client/update-4-0 into lp:ubuntuone-client/stable-4-0 failed. Below is the output from the failed tests.
/usr/bin/ gnome-autogen. sh MACRO_DIR, `m4'. unknown- linux-gnu unknown- linux-gnu
checking for autoconf >= 2.53...
testing autoconf2.50... not found.
testing autoconf... found 2.69
checking for automake >= 1.10...
testing automake-1.11... found 1.11.5
checking for libtool >= 1.5...
testing libtoolize... found 2.4.2
checking for intltool >= 0.30...
testing intltoolize... found 0.50.2
checking for pkg-config >= 0.14.0...
testing pkg-config... found 0.26
checking for gtk-doc >= 1.0...
testing gtkdocize... found 1.18
Checking for required M4 macros...
Checking for forbidden M4 macros...
Processing ./configure.ac
Running libtoolize...
libtoolize: putting auxiliary files in `.'.
libtoolize: copying file `./ltmain.sh'
libtoolize: putting macros in AC_CONFIG_
libtoolize: copying file `m4/libtool.m4'
libtoolize: copying file `m4/ltoptions.m4'
libtoolize: copying file `m4/ltsugar.m4'
libtoolize: copying file `m4/ltversion.m4'
libtoolize: copying file `m4/lt~obsolete.m4'
Running intltoolize...
Running gtkdocize...
Running aclocal-1.11...
Running autoconf...
Running autoheader...
Running automake-1.11...
Running ./configure --enable-gtk-doc --enable-debug ...
checking for a BSD-compatible install... /usr/bin/install -c
checking whether build environment is sane... yes
checking for a thread-safe mkdir -p... /bin/mkdir -p
checking for gawk... no
checking for mawk... mawk
checking whether make sets $(MAKE)... yes
checking how to create a ustar tar archive... gnutar
checking whether make supports nested variables... yes
checking for style of include used by make... GNU
checking for gcc... gcc
checking whether the C compiler works... yes
checking for C compiler default output file name... a.out
checking for suffix of executables...
checking whether we are cross compiling... no
checking for suffix of object files... o
checking whether we are using the GNU C compiler... yes
checking whether gcc accepts -g... yes
checking for gcc option to accept ISO C89... none needed
checking dependency style of gcc... gcc3
checking for library containing strerror... none required
checking for gcc... (cached) gcc
checking whether we are using the GNU C compiler... (cached) yes
checking whether gcc accepts -g... (cached) yes
checking for gcc option to accept ISO C89... (cached) none needed
checking dependency style of gcc... (cached) gcc3
checking build system type... x86_64-
checking host system type... x86_64-
checking how to print strings... printf
checking for a sed that does not truncate output... /bin/sed
checking for grep that handles long lines and -e... /bin/grep
checking for egrep... /bin/grep -E
checking for fgrep... /bin/grep -F
checking for ld used by gcc... /usr/bin/ld
checking if the linker (/usr/bin/ld) is GNU ld... yes
checking for BSD- or MS-compatible name lister (nm)... /usr/bin/nm -B
checking the name lister (/usr/bin/nm -B) interface... BSD nm
checking whether ln -s works... yes
checking the maximum length of command line arguments... 1572864
checking whether the shell understands some XSI constructs......