Merge lp:~nataliabidart/magicicada-client/clean-pos-ins into lp:magicicada-client
- clean-pos-ins
- Merge into trunk
Proposed by
Natalia Bidart
Status: | Work in progress |
---|---|
Proposed branch: | lp:~nataliabidart/magicicada-client/clean-pos-ins |
Merge into: | lp:magicicada-client |
Diff against target: |
4069 lines (+715/-1693) 73 files modified
.bzrignore (+0/-2) bin/u1sdtool (+1/-1) bin/ubuntuone-launch (+3/-3) bin/ubuntuone-syncdaemon (+51/-88) contrib/__init__.py (+0/-31) contrib/dbus-docs (+9/-12) contrib/dump_metadata.py (+8/-14) data/com.ubuntuone.SyncDaemon.service.in (+0/-4) data/logging.conf (+12/-0) data/logging.conf.in (+0/-13) data/source_ubuntuone-client.py (+0/-61) data/syncdaemon-dev.conf (+0/-11) data/syncdaemon.conf (+0/-12) data/ubuntuone-client-crashdb.conf (+0/-5) data/ubuntuone-launch.desktop.in (+0/-8) setup.cfg (+0/-2) setup.py (+32/-206) ubuntuone/clientdefs.py (+9/-15) ubuntuone/config.py (+62/-76) ubuntuone/logger.py (+168/-65) ubuntuone/platform/__init__.py (+1/-8) ubuntuone/platform/filesystem_notifications/monitor/common.py (+1/-3) ubuntuone/platform/logger/__init__.py (+0/-53) ubuntuone/platform/logger/darwin.py (+0/-37) ubuntuone/platform/logger/linux.py (+0/-50) ubuntuone/platform/logger/windows.py (+0/-37) ubuntuone/platform/tests/filesystem_notifications/__init__.py (+1/-1) ubuntuone/platform/tests/filesystem_notifications/common.py (+2/-1) ubuntuone/platform/tests/filesystem_notifications/test_darwin.py (+1/-2) ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py (+6/-6) ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py (+1/-1) ubuntuone/platform/tests/filesystem_notifications/test_linux.py (+1/-1) ubuntuone/platform/tests/filesystem_notifications/test_windows.py (+1/-2) ubuntuone/platform/tests/ipc/test_linux.py (+5/-5) ubuntuone/platform/tests/ipc/test_perspective_broker.py (+4/-4) ubuntuone/platform/tests/linux/test_vm.py (+1/-5) ubuntuone/platform/tests/os_helper/test_os_helper.py (+4/-4) ubuntuone/platform/tests/os_helper/test_windows.py (+1/-2) ubuntuone/platform/tests/test_logger.py (+0/-47) ubuntuone/platform/tests/test_tools.py (+1/-2) ubuntuone/platform/tests/test_u1sdtool.py (+5/-5) ubuntuone/syncdaemon/action_queue.py (+96/-5) ubuntuone/syncdaemon/file_shelf.py (+1/-1) ubuntuone/syncdaemon/filesystem_manager.py (+92/-85) ubuntuone/syncdaemon/filesystem_notifications.py (+4/-8) ubuntuone/syncdaemon/fsm/fsm.py (+5/-7) ubuntuone/syncdaemon/hash_queue.py (+1/-7) ubuntuone/syncdaemon/interaction_interfaces.py (+1/-3) ubuntuone/syncdaemon/logger.py (+0/-314) ubuntuone/syncdaemon/main.py (+10/-4) ubuntuone/syncdaemon/sync.py (+4/-7) ubuntuone/syncdaemon/tests/test_action_queue.py (+15/-12) ubuntuone/syncdaemon/tests/test_eq_inotify.py (+8/-8) ubuntuone/syncdaemon/tests/test_eventqueue.py (+6/-6) ubuntuone/syncdaemon/tests/test_eventsnanny.py (+3/-3) ubuntuone/syncdaemon/tests/test_fileshelf.py (+1/-4) ubuntuone/syncdaemon/tests/test_fsm.py (+12/-11) ubuntuone/syncdaemon/tests/test_hashqueue.py (+1/-5) ubuntuone/syncdaemon/tests/test_interaction_interfaces.py (+9/-9) ubuntuone/syncdaemon/tests/test_localrescan.py (+5/-5) ubuntuone/syncdaemon/tests/test_main.py (+3/-3) ubuntuone/syncdaemon/tests/test_states.py (+1/-2) ubuntuone/syncdaemon/tests/test_sync.py (+6/-7) ubuntuone/syncdaemon/tests/test_tritcask.py (+1/-1) ubuntuone/syncdaemon/tests/test_vm.py (+6/-6) ubuntuone/syncdaemon/tests/test_vm_helper.py (+1/-1) ubuntuone/syncdaemon/utils.py (+4/-2) ubuntuone/syncdaemon/volume_manager.py (+9/-8) ubuntuone/testing/testcase.py (+16/-10) ubuntuone/tests/test_config.py (+2/-84) ubuntuone/tests/test_logger.py (+1/-1) ubuntuone/utils/__init__.py (+0/-51) ubuntuone/utils/tests/test_common.py (+0/-108) |
To merge this branch: | bzr merge lp:~nataliabidart/magicicada-client/clean-pos-ins |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Facundo Batista | Needs Information | ||
Review via email:
|
Commit message
- Have a working setup.py that would install locally and in custom locations.
Description of the change
To post a comment you must log in.
- 1437. By Natalia Bidart
-
Merged no-pos into clean-pos-ins.
- 1438. By Natalia Bidart
-
Merged no-pos into clean-pos-ins.
- 1439. By Natalia Bidart
-
Checkpoint.
- 1440. By Natalia Bidart
-
Closer.
- 1441. By Natalia Bidart
-
Merged trunk in.
- 1442. By Natalia Bidart
-
Fixed typo.
- 1443. By Natalia Bidart
-
Merged trunk in.
- 1444. By Natalia Bidart
-
Merged trunk in.
- 1445. By Natalia Bidart
-
Merged simpler-setup-py into clean-pos-ins.
- 1446. By Natalia Bidart
-
Merged trunk in.
Unmerged revisions
- 1446. By Natalia Bidart
-
Merged trunk in.
- 1445. By Natalia Bidart
-
Merged simpler-setup-py into clean-pos-ins.
- 1444. By Natalia Bidart
-
Merged trunk in.
- 1443. By Natalia Bidart
-
Merged trunk in.
- 1442. By Natalia Bidart
-
Fixed typo.
- 1441. By Natalia Bidart
-
Merged trunk in.
- 1440. By Natalia Bidart
-
Closer.
- 1439. By Natalia Bidart
-
Checkpoint.
- 1438. By Natalia Bidart
-
Merged no-pos into clean-pos-ins.
- 1437. By Natalia Bidart
-
Merged no-pos into clean-pos-ins.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file '.bzrignore' |
2 | --- .bzrignore 2016-10-24 20:59:39 +0000 |
3 | +++ .bzrignore 2018-04-14 23:11:29 +0000 |
4 | @@ -12,6 +12,4 @@ |
5 | |
6 | .env |
7 | .protocol |
8 | -clientdefs.py |
9 | -logging.conf |
10 | ubuntuone/storageprotocol |
11 | |
12 | === modified file 'bin/u1sdtool' |
13 | --- bin/u1sdtool 2013-01-28 17:43:24 +0000 |
14 | +++ bin/u1sdtool 2018-04-14 23:11:29 +0000 |
15 | @@ -90,7 +90,7 @@ |
16 | if should_start: |
17 | yield sync_daemon_tool.start() |
18 | yield run(options, sync_daemon_tool, out) |
19 | - except Exception, e: |
20 | + except Exception as e: |
21 | out.write("\nOops, an error ocurred:\n%s\n" % e) |
22 | finally: |
23 | if reactor.running: |
24 | |
25 | === modified file 'bin/ubuntuone-launch' |
26 | --- bin/ubuntuone-launch 2015-09-29 21:05:26 +0000 |
27 | +++ bin/ubuntuone-launch 2018-04-14 23:11:29 +0000 |
28 | @@ -68,7 +68,7 @@ |
29 | from gi.repository import GLib |
30 | from twisted.internet import defer |
31 | |
32 | -from ubuntuone.syncdaemon.config import get_user_config |
33 | +from ubuntuone.config import get_user_config |
34 | from ubuntuone.platform.tools import SyncDaemonTool, is_already_running |
35 | |
36 | |
37 | @@ -89,8 +89,8 @@ |
38 | if not running: |
39 | # have SD start |
40 | yield sync_daemon_tool.start() |
41 | - yield sync_daemon_tool.wait_for_signal('StatusChanged', |
42 | - lambda a: a.get('name', '') == 'READY') |
43 | + yield sync_daemon_tool.wait_for_signal( |
44 | + 'StatusChanged', lambda a: a.get('name', '') == 'READY') |
45 | |
46 | |
47 | if __name__ == '__main__': |
48 | |
49 | === modified file 'bin/ubuntuone-syncdaemon' |
50 | --- bin/ubuntuone-syncdaemon 2017-02-10 01:15:07 +0000 |
51 | +++ bin/ubuntuone-syncdaemon 2018-04-14 23:11:29 +0000 |
52 | @@ -29,46 +29,33 @@ |
53 | # files in the program, then also delete it here. |
54 | """Storage synchronization daemon.""" |
55 | |
56 | +import atexit |
57 | +import logging |
58 | +import signal |
59 | import sys |
60 | |
61 | -if sys.platform not in ('win32', 'darwin'): |
62 | +if sys.platform not in ('win32', 'darwin'): # noqa |
63 | from twisted.internet import gireactor |
64 | gireactor.install() |
65 | from dbus.mainloop.glib import DBusGMainLoop |
66 | DBusGMainLoop(set_as_default=True) |
67 | |
68 | - |
69 | -import atexit |
70 | -import os |
71 | -import signal |
72 | -import sys |
73 | - |
74 | +from ubuntuone import config |
75 | from ubuntuone.platform import ( |
76 | - can_write, |
77 | - set_dir_readwrite, |
78 | is_already_running, |
79 | is_root, |
80 | - make_dir, |
81 | - path_exists, |
82 | - recursive_move, |
83 | set_application_name, |
84 | ) |
85 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
86 | get_filemonitor_class, |
87 | ) |
88 | - |
89 | -from ubuntuone.syncdaemon import logger, config |
90 | -from ubuntuone.syncdaemon.config import ( |
91 | - get_config_files, |
92 | -) |
93 | - |
94 | +from ubuntuone.syncdaemon import config |
95 | from ubuntuone.syncdaemon.main import Main |
96 | |
97 | from twisted.internet import reactor, defer |
98 | -from dirspec.basedir import ( |
99 | - xdg_cache_home, |
100 | - xdg_data_home, |
101 | -) |
102 | + |
103 | + |
104 | +logger = logging.getLogger(__name__) |
105 | |
106 | |
107 | class DeathException(Exception): |
108 | @@ -77,7 +64,7 @@ |
109 | |
110 | def die(msg): |
111 | """Write the error message an die.""" |
112 | - logger.root_logger.warning(msg) |
113 | + logger.warning(msg) |
114 | sys.stderr.write(msg + '\n') |
115 | raise DeathException() |
116 | |
117 | @@ -96,18 +83,19 @@ |
118 | while len(args) > 0 and not args[0].startswith('-'): |
119 | configs.append(args.pop(0)) |
120 | if len(configs) == 0: |
121 | - configs.extend(get_config_files()) |
122 | - (parser, options, argv) = config.configglue(file(configs[0]), *configs[1:], |
123 | - args=args, usage=usage) |
124 | + configs.extend(config.get_config_files()) |
125 | + with open(configs[0]) as f: |
126 | + parser, options, argv = config.configglue(f, *configs[1:], |
127 | + args=args, usage=usage) |
128 | d = async_main(parser, options, argv) |
129 | d.addErrback(check_death) |
130 | - d.addErrback(logger.root_logger.exception) |
131 | + d.addErrback(logger.exception) |
132 | |
133 | # check if we should start a twisted manhole |
134 | if options.debug_manhole: |
135 | startManhole() |
136 | else: |
137 | - logger.root_logger.info('not starting twisted.manhole') |
138 | + logger.info('not starting twisted.manhole') |
139 | |
140 | if options.debug_lsprof_file: |
141 | try: |
142 | @@ -115,7 +103,7 @@ |
143 | ret, stats = profile(reactor.run) |
144 | stats.save(options.debug_lsprof_file) |
145 | except ImportError: |
146 | - logger.root_logger.warning('bzrlib.lsprof not available') |
147 | + logger.warning('bzrlib.lsprof not available') |
148 | reactor.run() |
149 | else: |
150 | reactor.run() |
151 | @@ -127,9 +115,9 @@ |
152 | from twisted.cred.portal import Portal |
153 | from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse |
154 | except ImportError: |
155 | - logger.root_logger.warning('twisted.manhole not available') |
156 | + logger.warning('twisted.manhole not available') |
157 | else: |
158 | - logger.root_logger.info('starting twisted.manhole') |
159 | + logger.info('starting twisted.manhole') |
160 | realm = manhole_ssh.TerminalRealm() |
161 | getManhole = lambda _: manhole.Manhole(globals()) |
162 | realm.chainedProtocolFactory.protocolFactory = getManhole |
163 | @@ -138,19 +126,23 @@ |
164 | portal.registerChecker(checker) |
165 | manholeFactory = manhole_ssh.ConchFactory(portal) |
166 | reactor.listenTCP(2222, manholeFactory) |
167 | - logger.root_logger.info('twisted.manhole started') |
168 | + logger.info('twisted.manhole started') |
169 | |
170 | |
171 | @defer.inlineCallbacks |
172 | def async_main(parser, options, argv): |
173 | """The client entry point that can yield.""" |
174 | - logger.init() |
175 | - if options.debug: |
176 | - logger.set_debug('stdout file') |
177 | + logs_dir = config.get_logs_dir() |
178 | + if options.debug or os.environ.get("MAGICICADA_DEBUG", None): |
179 | + level = logging.DEBUG |
180 | + dest = 'stdout file' |
181 | else: |
182 | - logger.configure_logging(options.logging_level, |
183 | - options.logging_file_size, |
184 | - options.logging_backup_count) |
185 | + level = options.logging_level |
186 | + dest = '' |
187 | + logger.init( |
188 | + logs_dir=logs_dir, level=level, dest=dest, |
189 | + file_size=options.logging_file_size, |
190 | + backup_count=options.logging_backup_count) |
191 | |
192 | # check we're not running as root, or have explicitely and in |
193 | # length expressed our desire to do so |
194 | @@ -171,37 +163,7 @@ |
195 | if is_running: |
196 | die('Another instance is running') |
197 | |
198 | - # check if we are using xdg_data_home and it doesn't exists |
199 | - if xdg_data_home in options.data_dir and \ |
200 | - not path_exists(options.data_dir): |
201 | - # if we have metadata in the old xdg_cache, move it! |
202 | - old_data_dir = options.data_dir.replace(xdg_data_home, xdg_cache_home) |
203 | - if path_exists(old_data_dir): |
204 | - parent = os.path.dirname(options.data_dir) |
205 | - if path_exists(parent) and not can_write(parent): |
206 | - # make the parent dir writable |
207 | - set_dir_readwrite(parent) |
208 | - elif not path_exists(parent): |
209 | - # if it don't exits |
210 | - make_dir(parent, recursive=True) |
211 | - recursive_move(old_data_dir, options.data_dir) |
212 | - if not path_exists(options.data_dir): |
213 | - parent = os.path.dirname(options.data_dir) |
214 | - if path_exists(parent) and not can_write(parent): |
215 | - # make the parent dir writable |
216 | - set_dir_readwrite(parent) |
217 | - make_dir(options.data_dir, recursive=True) |
218 | - |
219 | - # create the partials_dir |
220 | - partials_dir = os.path.join(xdg_cache_home, 'ubuntuone', 'partials') |
221 | - if not path_exists(partials_dir): |
222 | - make_dir(partials_dir, recursive=True) |
223 | - |
224 | - logger.rotate_logs() |
225 | - |
226 | - assert isinstance(options.root_dir, str) |
227 | - assert isinstance(options.shares_dir, str) |
228 | - assert isinstance(options.data_dir, str) |
229 | + dirs = config.get_or_create_dirs() |
230 | |
231 | # check if we have auth credentials |
232 | auth_credentials = None |
233 | @@ -218,28 +180,28 @@ |
234 | # check which file monitor to use |
235 | monitor_class = yield get_filemonitor_class(options.fs_monitor) |
236 | |
237 | - main = Main(options.root_dir, options.shares_dir, options.data_dir, |
238 | - partials_dir, options.server, |
239 | - mark_interval=options.mark_interval, |
240 | - broadcast_events=options.send_events_over_dbus, |
241 | - handshake_timeout=options.handshake_timeout, |
242 | - shares_symlink_name='Shared With Me', |
243 | - read_limit=options.bandwidth_throttling_read_limit, |
244 | - write_limit=options.bandwidth_throttling_write_limit, |
245 | - throttling_enabled=options.bandwidth_throttling_on, |
246 | - ignore_files=options.ignore, |
247 | - auth_credentials=auth_credentials, |
248 | - monitor_class=monitor_class) |
249 | + syncdaemon_main = Main( |
250 | + host=options.host, port=int(options.port), dns_srv=options.dns_srv, |
251 | + ssl=True, disable_ssl_verify=options.disable_ssl_verify, |
252 | + mark_interval=options.mark_interval, |
253 | + broadcast_events=options.send_events_over_dbus, |
254 | + handshake_timeout=options.handshake_timeout, |
255 | + shares_symlink_name='Shared With Me', |
256 | + read_limit=options.bandwidth_throttling_read_limit, |
257 | + write_limit=options.bandwidth_throttling_write_limit, |
258 | + throttling_enabled=options.bandwidth_throttling_on, |
259 | + ignore_files=options.ignore, auth_credentials=auth_credentials, |
260 | + monitor_class=monitor_class, **dirs) |
261 | |
262 | # override the reactor default signal handlers in order to |
263 | # shutdown properly |
264 | - atexit.register(reactor.callFromThread, main.quit) |
265 | + atexit.register(reactor.callFromThread, syncdaemon_main.quit) |
266 | |
267 | def install_handlers(): |
268 | """ install our custom signal handler. """ |
269 | def handler(signum, frame): |
270 | - logger.root_logger.debug("Signal received %s ", str(signum)) |
271 | - reactor.callFromThread(main.quit) |
272 | + logger.debug("Signal received %s ", str(signum)) |
273 | + reactor.callFromThread(syncdaemon_main.quit) |
274 | for signal_name in ['SIGHUP', 'SIGTERM', 'SIGINT']: |
275 | actual_signal = getattr(signal, signal_name, None) |
276 | # some platforms do not have all the signals, eg: Windows does not |
277 | @@ -256,17 +218,18 @@ |
278 | try: |
279 | import guppy.heapy.RM |
280 | except ImportError: |
281 | - logger.root_logger.warning('guppy-pe/heapy not available, remote ' |
282 | - 'monitor thread not started') |
283 | + logger.warning( |
284 | + 'guppy-pe/heapy not available, remote monitor thread not ' |
285 | + 'started') |
286 | else: |
287 | guppy.heapy.RM.on() |
288 | |
289 | - main.start() |
290 | + syncdaemon_main.start() |
291 | |
292 | |
293 | if __name__ == '__main__': |
294 | try: |
295 | main(sys.argv) |
296 | except Exception: |
297 | - logger.root_logger.exception('Unexpected error') |
298 | + logger.exception('Unexpected error while starting:') |
299 | raise |
300 | |
301 | === removed file 'contrib/__init__.py' |
302 | --- contrib/__init__.py 2012-04-09 20:07:05 +0000 |
303 | +++ contrib/__init__.py 1970-01-01 00:00:00 +0000 |
304 | @@ -1,31 +0,0 @@ |
305 | -# contrib - Extra required code to build/install the client |
306 | -# |
307 | -# Author: Rodney Dawes <rodney.dawes@canonical.com> |
308 | -# |
309 | -# Copyright 2009-2012 Canonical Ltd. |
310 | -# |
311 | -# This program is free software: you can redistribute it and/or modify it |
312 | -# under the terms of the GNU General Public License version 3, as published |
313 | -# by the Free Software Foundation. |
314 | -# |
315 | -# This program is distributed in the hope that it will be useful, but |
316 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
317 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
318 | -# PURPOSE. See the GNU General Public License for more details. |
319 | -# |
320 | -# You should have received a copy of the GNU General Public License along |
321 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
322 | -# |
323 | -# In addition, as a special exception, the copyright holders give |
324 | -# permission to link the code of portions of this program with the |
325 | -# OpenSSL library under certain conditions as described in each |
326 | -# individual source file, and distribute linked combinations |
327 | -# including the two. |
328 | -# You must obey the GNU General Public License in all respects |
329 | -# for all of the code used other than OpenSSL. If you modify |
330 | -# file(s) with this exception, you may extend this exception to your |
331 | -# version of the file(s), but you are not obligated to do so. If you |
332 | -# do not wish to do so, delete this exception statement from your |
333 | -# version. If you delete this exception statement from all source |
334 | -# files in the program, then also delete it here. |
335 | -"""Extra things we need to build, test, or install the client.""" |
336 | |
337 | === modified file 'contrib/dbus-docs' |
338 | --- contrib/dbus-docs 2013-01-28 17:43:24 +0000 |
339 | +++ contrib/dbus-docs 2018-04-14 23:11:29 +0000 |
340 | @@ -41,13 +41,7 @@ |
341 | sys.path.insert(0, os.path.abspath(".")) |
342 | |
343 | from contrib.dbus_util import DBusRunner |
344 | -from contrib.testing.testcase import ( |
345 | - FakeMain, |
346 | - DBusGMainLoop, |
347 | - DBusInterface, |
348 | - FakeNetworkManager, |
349 | -) |
350 | - |
351 | +from ubuntuone import config |
352 | from ubuntuone.platform.dbus_interface import ( |
353 | DBUS_IFACE_SYNC_NAME, |
354 | DBUS_IFACE_STATUS_NAME, |
355 | @@ -59,6 +53,12 @@ |
356 | DBUS_IFACE_PUBLIC_FILES_NAME, |
357 | ) |
358 | from ubuntuone.platform.tools import DBusClient |
359 | +from ubuntuone.testing.testcase import ( |
360 | + FakeMain, |
361 | + DBusGMainLoop, |
362 | + DBusInterface, |
363 | + FakeNetworkManager, |
364 | +) |
365 | from twisted.internet import reactor, defer |
366 | |
367 | |
368 | @@ -173,11 +173,8 @@ |
369 | def start_syncdaemon(tmp_dir): |
370 | """Starts a syncdaemon instance just like the one used in the test suite""" |
371 | xdg_cache = os.path.join(tmp_dir, 'xdg_cache') |
372 | - data_dir = os.path.join(xdg_cache, 'data') |
373 | - partials_dir = os.path.join(xdg_cache, 'partials') |
374 | - root_dir = os.path.join(tmp_dir, 'root') |
375 | - shares_dir = os.path.join(tmp_dir, 'shares') |
376 | - main = FakeMain(root_dir, shares_dir, data_dir, partials_dir) |
377 | + dirs = config.get_or_create_dirs(prefix=xdg_cache) |
378 | + main = FakeMain(**dirs) |
379 | loop = DBusGMainLoop(set_as_default=True) |
380 | bus = dbus.bus.BusConnection(mainloop=loop) |
381 | nm = FakeNetworkManager(bus) |
382 | |
383 | === modified file 'contrib/dump_metadata.py' |
384 | --- contrib/dump_metadata.py 2012-06-21 18:58:50 +0000 |
385 | +++ contrib/dump_metadata.py 2018-04-14 23:11:29 +0000 |
386 | @@ -39,15 +39,7 @@ |
387 | import os |
388 | import sys |
389 | |
390 | -from ubuntuone.syncdaemon import ( |
391 | - filesystem_manager, |
392 | - tritcask, |
393 | - volume_manager, |
394 | -) |
395 | -from dirspec.basedir import ( |
396 | - xdg_cache_home, |
397 | - xdg_data_home, |
398 | -) |
399 | +from ubuntuone.syncdaemon import volume_manager |
400 | |
401 | |
402 | class FakeVM(object): |
403 | @@ -85,8 +77,10 @@ |
404 | tritcask_dir = os.path.join(data_dir, 'tritcask') |
405 | db = tritcask.Tritcask(tritcask_dir) |
406 | vm = FakeVM(data_dir, db) |
407 | - partials_dir = os.path.join(xdg_cache_home, 'ubuntuone', 'partials') |
408 | - fsm = filesystem_manager.FileSystemManager(data_dir, partials_dir, vm, db) |
409 | + filesync_main = Main( |
410 | + root_dir, shares_dir, data_dir, vm_class=FakeVM) |
411 | + vm = filesync_main.vm |
412 | + fsm = filesync_main.fs |
413 | |
414 | shares = [] |
415 | root = None |
416 | @@ -156,11 +150,11 @@ |
417 | |
418 | if __name__ == "__main__": |
419 | if len(sys.argv) == 1: |
420 | - basedir = os.path.join(xdg_data_home, 'ubuntuone', 'syncdaemon') |
421 | + data_dir = None |
422 | elif len(sys.argv) == 2: |
423 | - basedir = sys.argv[1] |
424 | + data_dir = sys.argv[1] |
425 | else: |
426 | print __doc__ |
427 | sys.exit() |
428 | |
429 | - main(basedir) |
430 | + main(data_dir) |
431 | |
432 | === removed file 'data/com.ubuntuone.SyncDaemon.service.in' |
433 | --- data/com.ubuntuone.SyncDaemon.service.in 2009-06-17 16:08:17 +0000 |
434 | +++ data/com.ubuntuone.SyncDaemon.service.in 1970-01-01 00:00:00 +0000 |
435 | @@ -1,4 +0,0 @@ |
436 | -[D-BUS Service] |
437 | -Name=com.ubuntuone.SyncDaemon |
438 | -Exec=@libexecdir@/ubuntuone-syncdaemon |
439 | - |
440 | |
441 | === added file 'data/logging.conf' |
442 | --- data/logging.conf 1970-01-01 00:00:00 +0000 |
443 | +++ data/logging.conf 2018-04-14 23:11:29 +0000 |
444 | @@ -0,0 +1,12 @@ |
445 | +[logging] |
446 | +level.default = INFO |
447 | +level.parser = log_level |
448 | +level.help = Set the log level (TRACE, DEBUG, INFO, WARNING, ERROR, NOTE CRITICAL, FATAL) |
449 | + |
450 | +file_size.default = 1048576 |
451 | +file_size.parser = int |
452 | +file_size.help = max file size (the file will be rotated) |
453 | + |
454 | +backup_count.default = 5 |
455 | +backup_count.parser = int |
456 | +backup_count.help = number of rotated log files to keep around. |
457 | |
458 | === removed file 'data/logging.conf.in' |
459 | --- data/logging.conf.in 2009-12-23 22:05:44 +0000 |
460 | +++ data/logging.conf.in 1970-01-01 00:00:00 +0000 |
461 | @@ -1,13 +0,0 @@ |
462 | -[logging] |
463 | -level.default = @LOG_LEVEL@ |
464 | -level.parser = log_level |
465 | -level.help = Set the log level (TRACE, DEBUG, INFO, WARNING, ERROR, NOTE |
466 | - CRITICAL, FATAL) |
467 | - |
468 | -file_size.default = @LOG_FILE_SIZE@ |
469 | -file_size.parser = int |
470 | -file_size.help = max file size (the file will be rotated) |
471 | - |
472 | -backup_count.default = 5 |
473 | -backup_count.parser = int |
474 | -backup_count.help = number of rotated log files to keep around. |
475 | |
476 | === removed file 'data/source_ubuntuone-client.py' |
477 | --- data/source_ubuntuone-client.py 2013-01-31 20:35:21 +0000 |
478 | +++ data/source_ubuntuone-client.py 1970-01-01 00:00:00 +0000 |
479 | @@ -1,61 +0,0 @@ |
480 | -# Copyright 2009-2013 Canonical Ltd. |
481 | -# |
482 | -# This program is free software: you can redistribute it and/or modify it |
483 | -# under the terms of the GNU General Public License version 3, as published |
484 | -# by the Free Software Foundation. |
485 | -# |
486 | -# This program is distributed in the hope that it will be useful, but |
487 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
488 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
489 | -# PURPOSE. See the GNU General Public License for more details. |
490 | -# |
491 | -# You should have received a copy of the GNU General Public License along |
492 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
493 | -# |
494 | -# In addition, as a special exception, the copyright holders give |
495 | -# permission to link the code of portions of this program with the |
496 | -# OpenSSL library under certain conditions as described in each |
497 | -# individual source file, and distribute linked combinations |
498 | -# including the two. |
499 | -# You must obey the GNU General Public License in all respects |
500 | -# for all of the code used other than OpenSSL. If you modify |
501 | -# file(s) with this exception, you may extend this exception to your |
502 | -# version of the file(s), but you are not obligated to do so. If you |
503 | -# do not wish to do so, delete this exception statement from your |
504 | -# version. If you delete this exception statement from all source |
505 | -# files in the program, then also delete it here. |
506 | -"""Stub for Apport""" |
507 | - |
508 | -from __future__ import print_function, unicode_literals |
509 | - |
510 | -import apport |
511 | -import os |
512 | - |
513 | -from apport.hookutils import attach_file_if_exists |
514 | -from dirspec.basedir import xdg_cache_home, xdg_config_home |
515 | - |
516 | -# Paths where things we might want live |
517 | -u1_log_path = os.path.join(xdg_cache_home, b"ubuntuone", b"log") |
518 | -u1_user_config_path = os.path.join(xdg_config_home, b"ubuntuone") |
519 | -# things we may want to collect for the report |
520 | -u1_client_log = os.path.join(u1_log_path, b"syncdaemon.log") |
521 | -u1_except_log = os.path.join(u1_log_path, b"syncdaemon-exceptions.log") |
522 | -u1_invalidnames_log = os.path.join(u1_log_path, b"syncdaemon-invalid-names.log") |
523 | -u1_sd_conf = os.path.join(b"etc", b"xdg", b"ubuntuone", b"syncdaemon.conf") |
524 | -u1_usersd_conf = os.path.join(u1_user_config_path, b"syncdaemon.conf") |
525 | - |
526 | - |
527 | -def add_info(report): |
528 | - """add report info""" |
529 | - attach_file_if_exists(report, u1_except_log, |
530 | - "UbuntuOneSyncdaemonExceptionsLog") |
531 | - attach_file_if_exists(report, u1_invalidnames_log, |
532 | - "UbuntuOneSyncdaemonInvalidNamesLog") |
533 | - attach_file_if_exists(report, u1_usersd_conf, |
534 | - "UbuntuOneUserSyncdaemonConfig") |
535 | - attach_file_if_exists(report, u1_sd_conf, |
536 | - "UbuntuOneSyncdaemonConfig") |
537 | - |
538 | - if not apport.packaging.is_distro_package(report['Package'].split()[0]): |
539 | - report['ThirdParty'] = 'True' |
540 | - report['CrashDB'] = 'ubuntuone' |
541 | |
542 | === removed file 'data/syncdaemon-dev.conf' |
543 | --- data/syncdaemon-dev.conf 2009-06-18 19:01:02 +0000 |
544 | +++ data/syncdaemon-dev.conf 1970-01-01 00:00:00 +0000 |
545 | @@ -1,11 +0,0 @@ |
546 | -[__main__] |
547 | -data_dir.default = tmp/syncdaemon/data |
548 | -# override the parser to allow using arbritary locations instead of xdg based paths |
549 | -data_dir.parser = home_dir |
550 | - |
551 | -# In development don't lookup the SRV records |
552 | -dns_srv.default = |
553 | - |
554 | -# In development don't verify the SSL certificate. |
555 | -disable_ssl_verify = True |
556 | - |
557 | |
558 | === modified file 'data/syncdaemon.conf' |
559 | --- data/syncdaemon.conf 2017-02-22 16:48:48 +0000 |
560 | +++ data/syncdaemon.conf 2018-04-14 23:11:29 +0000 |
561 | @@ -10,18 +10,6 @@ |
562 | files_sync_enabled.parser = bool |
563 | files_sync_enabled.help = Toggles synchronization of files (False disables syncdaemon entirely) |
564 | |
565 | -root_dir.default = ~/Magicicada |
566 | -root_dir.parser = home_dir |
567 | -root_dir.help = Use the specified directory as the root |
568 | - |
569 | -shares_dir.default = ubuntuone/shares |
570 | -shares_dir.parser = xdg_data |
571 | -shares_dir.help = Use the specified directory as shares root |
572 | - |
573 | -data_dir.default = ubuntuone/syncdaemon |
574 | -data_dir.parser = xdg_data |
575 | -data_dir.help = Use the specified directory to store the metadata |
576 | - |
577 | auth.help = Explicitly provide Auth credentials (username and password) |
578 | auth.metavar = USERNAME:PASSWORD |
579 | |
580 | |
581 | === removed file 'data/ubuntuone-client-crashdb.conf' |
582 | --- data/ubuntuone-client-crashdb.conf 2009-08-05 18:01:19 +0000 |
583 | +++ data/ubuntuone-client-crashdb.conf 1970-01-01 00:00:00 +0000 |
584 | @@ -1,5 +0,0 @@ |
585 | -ubuntuone = { |
586 | - 'impl' : 'launchpad', |
587 | - 'project' : 'ubuntuone-client', |
588 | - 'bug_pattern_base' : None, |
589 | -} |
590 | |
591 | === removed file 'data/ubuntuone-launch.desktop.in' |
592 | --- data/ubuntuone-launch.desktop.in 2015-09-29 02:25:44 +0000 |
593 | +++ data/ubuntuone-launch.desktop.in 1970-01-01 00:00:00 +0000 |
594 | @@ -1,8 +0,0 @@ |
595 | -[Desktop Entry] |
596 | -Name=Magicicada |
597 | -Exec=/bin/sh -c '[ -d "$HOME/Magicicada" ] && ubuntuone-launch' |
598 | -Type=Application |
599 | -X-GNOME-Autostart-Delay=30 |
600 | -Icon=ubuntuone |
601 | -Comment= |
602 | -NoDisplay=true |
603 | |
604 | === removed file 'setup.cfg' |
605 | --- setup.cfg 2013-06-07 15:22:31 +0000 |
606 | +++ setup.cfg 1970-01-01 00:00:00 +0000 |
607 | @@ -1,2 +0,0 @@ |
608 | -[build_i18n] |
609 | -desktop_files=[("etc/xdg/autostart", ("data/ubuntuone-launch.desktop.in",))] |
610 | |
611 | === modified file 'setup.py' |
612 | --- setup.py 2018-03-14 21:01:56 +0000 |
613 | +++ setup.py 2018-04-14 23:11:29 +0000 |
614 | @@ -30,214 +30,29 @@ |
615 | """Setup.py: build, distribute, clean.""" |
616 | |
617 | import os |
618 | -import sys |
619 | - |
620 | -try: |
621 | - from DistUtilsExtra.command import build_extra, build_i18n |
622 | - import DistUtilsExtra.auto |
623 | -except ImportError: |
624 | - print >> sys.stderr, 'To build this program you need '\ |
625 | - 'https://launchpad.net/python-distutils-extra' |
626 | - raise |
627 | -assert DistUtilsExtra.auto.__version__ >= '2.18', \ |
628 | - 'needs DistUtilsExtra.auto >= 2.18' |
629 | - |
630 | - |
631 | -PROJECT_NAME = 'magicicada-client' |
632 | -VERSION = '1.0' |
633 | - |
634 | -POT_FILE = 'po/%s.pot' % PROJECT_NAME |
635 | -SERVICE_FILES = ['data/com.ubuntuone.Credentials.service', |
636 | - 'data/com.ubuntuone.SyncDaemon.service'] |
637 | -CONFIG_FILES = ['data/logging.conf'] |
638 | -CLIENTDEFS = 'ubuntuone/clientdefs.py' |
639 | - |
640 | -BUILD_FILES = [CLIENTDEFS] + CONFIG_FILES |
641 | -CLEANFILES = [POT_FILE, 'MANIFEST'] + BUILD_FILES + SERVICE_FILES |
642 | - |
643 | -if int(VERSION.split('.')[1]) % 2 != 0: |
644 | - LOG_LEVEL = 'DEBUG' |
645 | - LOG_FILE_SIZE = '10485760' |
646 | -else: |
647 | - LOG_LEVEL = 'INFO' |
648 | - LOG_FILE_SIZE = '1048576' |
649 | - |
650 | - |
651 | -def replace_variables(files_to_replace, prefix=None, *args, **kwargs): |
652 | - """Replace the @VERSION@ in the constants file with the actual version.""" |
653 | - for fname in files_to_replace: |
654 | - with open(fname + '.in') as in_file: |
655 | - content = in_file.read() |
656 | - with open(fname, 'w') as out_file: |
657 | - content = content.replace('@VERSION@', VERSION) |
658 | - content = content.replace('@PROJECT_NAME@', PROJECT_NAME) |
659 | - content = content.replace('@GETTEXT_PACKAGE@', PROJECT_NAME) |
660 | - content = content.replace('@LOG_LEVEL@', LOG_LEVEL) |
661 | - content = content.replace('@LOG_FILE_SIZE@', LOG_FILE_SIZE) |
662 | - if prefix is not None: |
663 | - content = content.replace( |
664 | - '@localedir@', os.path.join(prefix, |
665 | - 'share', 'locale')) |
666 | - content = content.replace( |
667 | - '@libexecdir@', os.path.join(prefix, |
668 | - 'lib', PROJECT_NAME)) |
669 | - out_file.write(content) |
670 | - |
671 | - |
672 | -class Install(DistUtilsExtra.auto.install_auto): |
673 | + |
674 | +from distutils.command.build import build |
675 | +from distutils.command.clean import clean |
676 | +from setuptools import Command, find_packages, setup |
677 | +from setuptools.command.install import install |
678 | + |
679 | +from ubuntuone.clientdefs import PROJECT_NAME, VERSION |
680 | + |
681 | + |
682 | +class Install(install): |
683 | """Class to install proper files.""" |
684 | |
685 | def run(self): |
686 | - """Do the install. |
687 | - |
688 | - Read from *.service.in and generate .service files by replacing |
689 | - @prefix@ by self.prefix. |
690 | - |
691 | - """ |
692 | - |
693 | - # Remove the contrib and tests packages from the packages list |
694 | - # as they are not meant to be installed to the system. |
695 | - pkgs = [x for x in self.distribution.packages if not ( |
696 | - x.startswith('contrib') or x.startswith('tests'))] |
697 | - self.distribution.packages = pkgs |
698 | - |
699 | - # Remove the input and dev files from the data files list, |
700 | - # as they are not meant to be installed. |
701 | - data_files = [x for x in self.distribution.data_files if not ( |
702 | - x[1][0].endswith('.in') or x[1][0].endswith('-dev.conf'))] |
703 | - self.distribution.data_files = data_files |
704 | - |
705 | + """Do the install.""" |
706 | # Get just the prefix value, without the root |
707 | prefix = self.install_data.replace( |
708 | self.root if self.root is not None else '', '') |
709 | - replace_variables(SERVICE_FILES, prefix) |
710 | - DistUtilsExtra.auto.install_auto.run(self) |
711 | - # Replace the CLIENTDEFS paths here, so that we can do it directly in |
712 | - # the installed copy, rather than the lcoal copy. This allows us to |
713 | - # have a semi-generated version for use in tests, and a full version |
714 | - # for use in installed systems. |
715 | - with open(CLIENTDEFS) as in_file: |
716 | - content = in_file.read() |
717 | - with open(os.path.join(self.install_purelib, |
718 | - PROJECT_NAME, |
719 | - CLIENTDEFS), 'w') as out_file: |
720 | - content = content.replace( |
721 | - '@localedir@', os.path.join(prefix, 'share', 'locale')) |
722 | - content = content.replace( |
723 | - '@libexecdir@', os.path.join(prefix, 'lib', PROJECT_NAME)) |
724 | - out_file.write(content) |
725 | - |
726 | - |
727 | -class Build(build_extra.build_extra): |
728 | - """Build PyQt (.ui) files and resources.""" |
729 | - |
730 | - description = "build PyQt GUIs (.ui) and resources (.qrc)" |
731 | - |
732 | - def run(self): |
733 | - """Execute the command.""" |
734 | - replace_variables(BUILD_FILES) |
735 | - build_extra.build_extra.run(self) |
736 | - |
737 | - |
738 | -class Clean(DistUtilsExtra.auto.clean_build_tree): |
739 | - """Class to clean up after the build.""" |
740 | - |
741 | - def run(self): |
742 | - """Clean up the built files.""" |
743 | - for built_file in CLEANFILES: |
744 | - if os.path.exists(built_file): |
745 | - os.unlink(built_file) |
746 | - |
747 | - DistUtilsExtra.auto.clean_build_tree.run(self) |
748 | - |
749 | - |
750 | -class BuildLocale(build_i18n.build_i18n): |
751 | - """Work around a bug in DistUtilsExtra.""" |
752 | - |
753 | - def run(self): |
754 | - """Magic.""" |
755 | - build_i18n.build_i18n.run(self) |
756 | - i = 0 |
757 | - for df in self.distribution.data_files: |
758 | - if df[0].startswith('etc/xdg/'): |
759 | - if sys.platform not in ('darwin', 'win32'): |
760 | - new_df = (df[0].replace('etc/xdg/', '/etc/xdg/'), df[1]) |
761 | - self.distribution.data_files[i] = new_df |
762 | - else: |
763 | - self.distribution.data_files.pop(i) |
764 | - i += 1 |
765 | - |
766 | - |
767 | -def set_py2exe_paths(): |
768 | - """Set the path so that py2exe finds the required modules.""" |
769 | - # Pylint does not understand same spaced imports |
770 | - import win32com |
771 | - try: |
772 | - import py2exe.mf as modulefinder |
773 | - except ImportError: |
774 | - import modulefinder |
775 | - |
776 | - # py2exe 0.6.4 introduced a replacement modulefinder. |
777 | - # This means we have to add package paths there, |
778 | - # not to the built-in one. If this new modulefinder gets |
779 | - # integrated into Python, then we might be able to revert |
780 | - # this some day. If this doesn't work, try import modulefinder |
781 | - for package_path in win32com.__path__[1:]: |
782 | - modulefinder.AddPackagePath("win32com", package_path) |
783 | - for extra_mod in ["win32com.server", "win32com.client"]: |
784 | - __import__(extra_mod) |
785 | - module = sys.modules[extra_mod] |
786 | - for module_path in module.__path__[1:]: |
787 | - modulefinder.AddPackagePath(extra_mod, module_path) |
788 | - |
789 | - |
790 | -cmdclass = { |
791 | - 'install': Install, |
792 | - 'build': Build, |
793 | - 'clean': Clean, |
794 | - 'build_i18n': BuildLocale, |
795 | -} |
796 | - |
797 | -bin_scripts = [ |
798 | - 'bin/u1sdtool', |
799 | - 'bin/ubuntuone-launch', |
800 | -] |
801 | - |
802 | -libexec_scripts = [ |
803 | - 'bin/ubuntuone-syncdaemon', |
804 | -] |
805 | - |
806 | -data_files = [] |
807 | -scripts = [] |
808 | - |
809 | -if sys.platform == 'win32': |
810 | - set_py2exe_paths() |
811 | - extra = { |
812 | - 'options': { |
813 | - 'py2exe': { |
814 | - 'bundle_files': 1, |
815 | - 'skip_archive': 0, |
816 | - 'optimize': 1, |
817 | - 'dll_excludes': ["mswsock.dll", "powrprof.dll"], |
818 | - }, |
819 | - }, |
820 | - # add the console script so that py2exe compiles it |
821 | - 'console': bin_scripts + libexec_scripts, |
822 | - 'zipfile': None, |
823 | - } |
824 | -else: |
825 | - data_files.extend([ |
826 | - ('lib/%s' % PROJECT_NAME, libexec_scripts), |
827 | - ('share/dbus-1/services', SERVICE_FILES), |
828 | - ('/etc/xdg/ubuntuone', CONFIG_FILES + ['data/syncdaemon.conf']), |
829 | - ('/etc/apport/crashdb.conf.d', ['data/ubuntuone-client-crashdb.conf']), |
830 | - ('share/apport/package-hooks', ['data/source_ubuntuone-client.py']), |
831 | - ('share/man/man1', ['docs/man/u1sdtool.1']), |
832 | - ]) |
833 | - scripts.extend(bin_scripts) |
834 | - extra = {} |
835 | - |
836 | -DistUtilsExtra.auto.setup( |
837 | + if prefix: |
838 | + print "Will install on prefix is", prefix |
839 | + |
840 | + install.run(self) |
841 | + |
842 | +setup( |
843 | name=PROJECT_NAME, |
844 | version=VERSION, |
845 | license='GPL v3', |
846 | @@ -246,7 +61,18 @@ |
847 | description='Magicicada file synchronization client', |
848 | url='https://launchpad.net/%s' % PROJECT_NAME, |
849 | extra_path=PROJECT_NAME, |
850 | - scripts=scripts, |
851 | - data_files=data_files, |
852 | - cmdclass=cmdclass, |
853 | - **extra) |
854 | + packages=find_packages(), |
855 | + package_data={ |
856 | + '': ['data/*'], |
857 | + }, |
858 | + exclude_package_data={ |
859 | + '': ['contrib', '*.in'], |
860 | + }, |
861 | + scripts=['bin/u1sdtool', 'bin/ubuntuone-syncdaemon', |
862 | + 'bin/ubuntuone-proxy-tunnel'], |
863 | + data_files=[ |
864 | + ('share/dbus-1/services', ['data/com.ubuntuone.SyncDaemon.service']), |
865 | + ('share/data', ['data/logging.conf', 'data/syncdaemon.conf']), |
866 | + ('share/man/man1', ['docs/man/u1sdtool.1']), |
867 | + ], |
868 | +) |
869 | |
870 | === renamed file 'ubuntuone/clientdefs.py.in' => 'ubuntuone/clientdefs.py' |
871 | --- ubuntuone/clientdefs.py.in 2016-06-04 23:41:52 +0000 |
872 | +++ ubuntuone/clientdefs.py 2018-04-14 23:11:29 +0000 |
873 | @@ -1,8 +1,6 @@ |
874 | -# ubuntuone.clientdefs - Configure-time definitions |
875 | -# |
876 | -# Author: David Planella <david.planella@ubuntu.com> |
877 | -# |
878 | -# Copyright 2009-2012 Canonical Ltd. |
879 | +# |
880 | +# Copyright 2009-2015 Canonical Ltd. |
881 | +# Copyright 2005-2016 Chicharreros |
882 | # |
883 | # This program is free software: you can redistribute it and/or modify it |
884 | # under the terms of the GNU General Public License version 3, as published |
885 | @@ -28,25 +26,21 @@ |
886 | # do not wish to do so, delete this exception statement from your |
887 | # version. If you delete this exception statement from all source |
888 | # files in the program, then also delete it here. |
889 | -"""Configure-time definitions for the client.""" |
890 | + |
891 | +"""Definitions for the client.""" |
892 | |
893 | import gettext |
894 | -import os |
895 | import platform |
896 | import urllib |
897 | |
898 | NAME = 'Magicicada' |
899 | -VERSION = "@VERSION@" |
900 | -LOCALEDIR = "@localedir@" |
901 | -LIBEXECDIR = "@libexecdir@" |
902 | -GETTEXT_PACKAGE = "@GETTEXT_PACKAGE@" |
903 | -PROJECT_NAME = "@GETTEXT_PACKAGE@" |
904 | -PROJECT_DIR = os.path.join('@prefix@', 'share', PROJECT_NAME) |
905 | -BIN_DIR = os.path.join('@prefix@', 'lib', PROJECT_NAME) |
906 | +VERSION = '1.0' |
907 | +PROJECT_NAME = 'magicicada-client' |
908 | +GETTEXT_PACKAGE = PROJECT_NAME |
909 | |
910 | |
911 | def Q_(string): |
912 | - return gettext.dgettext(GETTEXT_PACKAGE, string) |
913 | + return gettext.dgettext(PROJECT_NAME, string) |
914 | |
915 | |
916 | def platform_data(): |
917 | |
918 | === renamed file 'ubuntuone/syncdaemon/config.py' => 'ubuntuone/config.py' |
919 | --- ubuntuone/syncdaemon/config.py 2018-03-14 21:55:27 +0000 |
920 | +++ ubuntuone/config.py 2018-04-14 23:11:29 +0000 |
921 | @@ -1,5 +1,3 @@ |
922 | -# ubuntuone.syncdaemon.config - SyncDaemon config utilities |
923 | -# |
924 | # Copyright 2009-2012 Canonical Ltd. |
925 | # Copyright 2017 Chicharreros (https://launchpad.net/~chicharreros) |
926 | # |
927 | @@ -37,15 +35,16 @@ |
928 | |
929 | from ConfigParser import NoOptionError, NoSectionError |
930 | from optparse import OptionParser |
931 | -from dirspec.basedir import ( |
932 | - load_config_paths, |
933 | - save_config_path, |
934 | - xdg_data_home, |
935 | - xdg_cache_home, |
936 | +from dirspec.basedir import xdg_cache_home, xdg_data_home, save_config_path |
937 | + |
938 | +from ubuntuone.platform import ( |
939 | + can_write, |
940 | + expand_user, |
941 | + make_dir, |
942 | + path_exists, |
943 | + set_dir_readwrite, |
944 | ) |
945 | -from dirspec.utils import unicode_path |
946 | |
947 | -from ubuntuone.platform import expand_user |
948 | |
949 | # the try/except is to work with older versions of configglue (that |
950 | # had everything that is now configglue.inischema.* as configglue.*). |
951 | @@ -91,42 +90,46 @@ |
952 | _user_config = None |
953 | |
954 | |
955 | -def path_from_unix(path): |
956 | - return path.replace('/', os.path.sep) |
957 | - |
958 | - |
959 | -def home_dir_parser(value): |
960 | - """Parser for the root_dir and shares_dir options. |
961 | - |
962 | - Return the path using user home + value. |
963 | - |
964 | - """ |
965 | - path = path_from_unix(value) |
966 | - result = expand_user(path) |
967 | - assert isinstance(result, str) |
968 | - return result |
969 | - |
970 | - |
971 | -def xdg_cache_dir_parser(value): |
972 | - """Parser for the data_dir option. |
973 | - |
974 | - Return the path using xdg_cache_home + value. |
975 | - |
976 | - """ |
977 | - result = os.path.join(xdg_cache_home, path_from_unix(value)) |
978 | - assert isinstance(result, str) |
979 | - return result |
980 | - |
981 | - |
982 | -def xdg_data_dir_parser(value): |
983 | - """Parser for the data_dir option. |
984 | - |
985 | - Return the path using xdg_data_home + value. |
986 | - |
987 | - """ |
988 | - result = os.path.join(xdg_data_home, path_from_unix(value)) |
989 | - assert isinstance(result, str) |
990 | - return result |
991 | +def get_or_create_dirs(prefix=None): |
992 | + if prefix is not None: |
993 | + data_dir = os.path.join(prefix, 'data') |
994 | + cache_dir = os.path.join(prefix, 'cache') |
995 | + root_dir = os.path.join(prefix, 'Magicicada') |
996 | + else: |
997 | + root_dir = expand_user('~/Magicicada') |
998 | + data_dir = xdg_data_home |
999 | + cache_dir = xdg_cache_home |
1000 | + |
1001 | + if not path_exists(data_dir): |
1002 | + parent = os.path.dirname(data_dir) |
1003 | + if path_exists(parent) and not can_write(parent): |
1004 | + # make the parent dir writable |
1005 | + set_dir_readwrite(parent) |
1006 | + make_dir(data_dir, recursive=True) |
1007 | + |
1008 | + if not path_exists(cache_dir): |
1009 | + make_dir(cache_dir, recursive=True) |
1010 | + |
1011 | + # create the logs_dir |
1012 | + logs_dir = os.path.join(data_dir, 'magicicada', 'logs') |
1013 | + if not path_exists(shares_dir): |
1014 | + make_dir(logs_dir, recursive=True) |
1015 | + |
1016 | + # create the shares_dir |
1017 | + shares_dir = os.path.join(data_dir, 'magicicada', 'shares') |
1018 | + if not path_exists(shares_dir): |
1019 | + make_dir(shares_dir, recursive=True) |
1020 | + |
1021 | + # create the partials_dir |
1022 | + partials_dir = os.path.join(cache_dir, 'magicicada', 'partials') |
1023 | + if not path_exists(partials_dir): |
1024 | + make_dir(partials_dir, recursive=True) |
1025 | + # ensure that we can write in the partials_dir |
1026 | + set_dir_readwrite(partials_dir) |
1027 | + |
1028 | + return dict( |
1029 | + root_dir=root_dir, shares_dir=shares_dir, partials_dir=partials_dir, |
1030 | + data_dir=data_dir, cache_dir=cache_dir, logs_dir=logs_dir) |
1031 | |
1032 | |
1033 | def server_connection_parser(value): |
1034 | @@ -190,42 +193,25 @@ |
1035 | |
1036 | def get_parsers(): |
1037 | """returns a list of tuples: (name, parser)""" |
1038 | - return [('home_dir', home_dir_parser), |
1039 | - ('xdg_cache', xdg_cache_dir_parser), |
1040 | - ('xdg_data', xdg_data_dir_parser), |
1041 | - ('log_level', log_level_parser), |
1042 | - ('connection', server_connection_parser), |
1043 | + return [('log_level', log_level_parser), |
1044 | ('throttling_limit', throttling_limit_parser)] |
1045 | |
1046 | |
1047 | def get_config_files(): |
1048 | - """ return the path to the config files or and empty list. |
1049 | - The search path is based on the paths returned by load_config_paths |
1050 | - but it's returned in reverse order (e.g: /etc/xdg first). |
1051 | - """ |
1052 | + """Return the path to the config files or and empty list.""" |
1053 | config_files = [] |
1054 | - for xdg_config_dir in load_config_paths('ubuntuone'): |
1055 | - xdg_config_dir = unicode_path(xdg_config_dir) |
1056 | - config_file = os.path.join(xdg_config_dir, CONFIG_FILE).encode('utf8') |
1057 | - if os.path.exists(config_file): |
1058 | - config_files.append(config_file) |
1059 | - |
1060 | - config_logs = os.path.join(xdg_config_dir, CONFIG_LOGS).encode('utf8') |
1061 | - if os.path.exists(config_logs): |
1062 | - config_files.append(config_logs) |
1063 | - |
1064 | - # reverse the list as load_config_paths returns the user dir first |
1065 | - config_files.reverse() |
1066 | - # if we are running from a branch, get the config files from it too |
1067 | - config_file = os.path.join(os.path.dirname(__file__), os.path.pardir, |
1068 | - os.path.pardir, 'data', CONFIG_FILE) |
1069 | - if os.path.exists(config_file): |
1070 | - config_files.append(config_file) |
1071 | - |
1072 | - config_logs = os.path.join(os.path.dirname(__file__), os.path.pardir, |
1073 | - os.path.pardir, 'data', CONFIG_LOGS) |
1074 | - if os.path.exists(config_logs): |
1075 | - config_files.append(config_logs) |
1076 | + |
1077 | + # if we are running from a branch or, get the config files from it too |
1078 | + local_data = os.path.abspath( |
1079 | + os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir, |
1080 | + 'share', 'data')) |
1081 | + if not os.path.exists(local_data): |
1082 | + local_data = os.path.abspath(os.path.join( |
1083 | + os.path.dirname(__file__), os.path.pardir, os.path.pardir, 'data')) |
1084 | + |
1085 | + if os.path.exists(local_data): |
1086 | + config_files.append(os.path.join(local_data, CONFIG_FILE)) |
1087 | + config_files.append(os.path.join(local_data, CONFIG_LOGS)) |
1088 | |
1089 | return config_files |
1090 | |
1091 | |
1092 | === modified file 'ubuntuone/logger.py' |
1093 | --- ubuntuone/logger.py 2015-09-19 23:15:50 +0000 |
1094 | +++ ubuntuone/logger.py 2018-04-14 23:11:29 +0000 |
1095 | @@ -1,8 +1,5 @@ |
1096 | -# ubuntuone.syncdaemon.logger - logging utilities |
1097 | -# |
1098 | -# Author: Guillermo Gonzalez <guillermo.gonzalez@canonical.com> |
1099 | -# |
1100 | # Copyright 2010-2012 Canonical Ltd. |
1101 | +# Copyright 2015-2016 Chicharreros (https://launchpad.net/~chicharreros) |
1102 | # |
1103 | # This program is free software: you can redistribute it and/or modify it |
1104 | # under the terms of the GNU General Public License version 3, as published |
1105 | @@ -28,16 +25,19 @@ |
1106 | # do not wish to do so, delete this exception statement from your |
1107 | # version. If you delete this exception statement from all source |
1108 | # files in the program, then also delete it here. |
1109 | -"""Ubuntuone client logging utilities and config. """ |
1110 | + |
1111 | +"""Logging utilities and config. """ |
1112 | |
1113 | from __future__ import with_statement |
1114 | |
1115 | import contextlib |
1116 | import functools |
1117 | import logging |
1118 | +import os |
1119 | import re |
1120 | import sys |
1121 | import weakref |
1122 | +import zlib |
1123 | |
1124 | from logging.handlers import TimedRotatingFileHandler |
1125 | |
1126 | @@ -69,22 +69,22 @@ |
1127 | |
1128 | |
1129 | class DayRotatingFileHandler(TimedRotatingFileHandler): |
1130 | - """A mix of TimedRotatingFileHandler and RotatingFileHandler configured for |
1131 | - daily rotation but that uses the suffix and extMatch of Hourly rotation, in |
1132 | - order to allow seconds based rotation on each startup. |
1133 | - The log file is also rotated when the specified size is reached. |
1134 | + """A mix of TimedRotatingFileHandler and RotatingFileHandler. |
1135 | + |
1136 | + This handler is configured for daily rotation but that uses the suffix and |
1137 | + extMatch of Hourly rotation, in order to allow seconds based rotation on |
1138 | + each startup. The log file is also rotated when the specified size is |
1139 | + reached. |
1140 | + |
1141 | """ |
1142 | |
1143 | def __init__(self, *args, **kwargs): |
1144 | - """ create the instance and override the suffix and extMatch. |
1145 | + """Create the instance and override the suffix and extMatch. |
1146 | + |
1147 | Also accepts a maxBytes keyword arg to rotate the file when it reachs |
1148 | maxBytes. |
1149 | """ |
1150 | kwargs['when'] = 'D' |
1151 | - kwargs['backupCount'] = LOGBACKUP |
1152 | - # check if we are in 2.5, only for PQM |
1153 | - if sys.version_info[:2] >= (2, 6): |
1154 | - kwargs['delay'] = 1 |
1155 | if 'maxBytes' in kwargs: |
1156 | self.maxBytes = kwargs.pop('maxBytes') |
1157 | else: |
1158 | @@ -119,41 +119,8 @@ |
1159 | return 0 |
1160 | |
1161 | |
1162 | -class MultiFilter(logging.Filter): |
1163 | - """Our own logging.Filter. |
1164 | - |
1165 | - To allow filter by multiple names in a single handler or logger. |
1166 | - |
1167 | - """ |
1168 | - |
1169 | - def __init__(self, names=None): |
1170 | - logging.Filter.__init__(self) |
1171 | - self.names = names or [] |
1172 | - self.filters = [] |
1173 | - for name in self.names: |
1174 | - self.filters.append(logging.Filter(name)) |
1175 | - |
1176 | - def filter(self, record): |
1177 | - """Determine if the specified record is to be logged. |
1178 | - |
1179 | - This work a bit different from the standard logging.Filter, the |
1180 | - record is logged if at least one filter allows it. |
1181 | - If there are no filters, the record is allowed. |
1182 | - |
1183 | - """ |
1184 | - if not self.filters: |
1185 | - # no filters, allow the record |
1186 | - return True |
1187 | - for f in self.filters: |
1188 | - if f.filter(record): |
1189 | - return True |
1190 | - return False |
1191 | - |
1192 | - |
1193 | class DebugCapture(logging.Handler): |
1194 | - """ |
1195 | - A context manager to capture debug logs. |
1196 | - """ |
1197 | + """A context manager to capture debug logs.""" |
1198 | |
1199 | def __init__(self, logger, raise_unhandled=False, on_error=True): |
1200 | """Creates the instance. |
1201 | @@ -250,7 +217,7 @@ |
1202 | self.emit_debug() |
1203 | self.uninstall() |
1204 | if self.raise_unhandled and exc_type is not None: |
1205 | - raise exc_type, exc_value, traceback |
1206 | + raise (exc_type, exc_value, traceback) |
1207 | else: |
1208 | return True |
1209 | |
1210 | @@ -292,21 +259,157 @@ |
1211 | return middle |
1212 | |
1213 | |
1214 | -# configure the thing # |
1215 | -LOGBACKUP = 5 # the number of log files to keep around |
1216 | - |
1217 | -basic_formatter = logging.Formatter( |
1218 | - fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s") |
1219 | -debug_formatter = logging.Formatter( |
1220 | - fmt="%(asctime)s %(name)s %(module)s %(lineno)s %(funcName)s %(message)s") |
1221 | - |
1222 | -# a constant to change the default DEBUG level value |
1223 | -_DEBUG_LOG_LEVEL = logging.DEBUG |
1224 | - |
1225 | - |
1226 | -# partial config of the handler to rotate when the file size is 1MB |
1227 | -CustomRotatingFileHandler = functools.partial(DayRotatingFileHandler, |
1228 | - maxBytes=1048576) |
1229 | - |
1230 | # use our logger as the default Logger class |
1231 | logging.setLoggerClass(Logger) |
1232 | + |
1233 | +LOGGING = { |
1234 | + 'version': 1, |
1235 | + 'disable_existing_loggers': False, |
1236 | + 'formatters': { |
1237 | + 'simple': { |
1238 | + 'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s' |
1239 | + }, |
1240 | + 'debug': { |
1241 | + 'format': ('%(asctime)s %(name)s %(module)s %(lineno)s ' |
1242 | + '%(funcName)s %(message)s'), |
1243 | + }, |
1244 | + }, |
1245 | + 'filters': { |
1246 | + 'sd_filter': {'name': 'ubuntuone.SyncDaemon'}, |
1247 | + 'twisted_filter': {'name': 'twisted'}, |
1248 | + 'pyinotify_filter': {'name': 'pyinotify'}, |
1249 | + }, |
1250 | + 'handlers': { |
1251 | + 'console': { |
1252 | + 'class': 'logging.StreamHandler', |
1253 | + }, |
1254 | + 'debug': { |
1255 | + 'level': 'DEBUG', |
1256 | + 'class': 'ubuntuone.logger.DayRotatingFileHandler', |
1257 | + 'filename': 'syncdaemon-debug.log', |
1258 | + 'formatter': 'debug', |
1259 | + }, |
1260 | + 'broken_nodes': { |
1261 | + 'level': 'INFO', |
1262 | + 'class': 'ubuntuone.logger.DayRotatingFileHandler', |
1263 | + 'filename': 'broken-nodes.log', |
1264 | + 'formatter': 'simple', |
1265 | + }, |
1266 | + 'exceptions': { |
1267 | + 'level': 'ERROR', |
1268 | + 'class': 'ubuntuone.logger.DayRotatingFileHandler', |
1269 | + 'filename': 'exceptions.log', |
1270 | + 'formatter': 'simple', |
1271 | + }, |
1272 | + 'invalid_names': { |
1273 | + 'level': 'INFO', |
1274 | + 'class': 'ubuntuone.logger.DayRotatingFileHandler', |
1275 | + 'filename': 'invalid-names.log', |
1276 | + 'formatter': 'simple', |
1277 | + }, |
1278 | + 'proxy': { |
1279 | + 'level': 'DEBUG', |
1280 | + 'class': 'ubuntuone.logger.DayRotatingFileHandler', |
1281 | + 'filename': 'proxy.log', |
1282 | + 'formatter': 'simple', |
1283 | + }, |
1284 | + 'syncdaemon': { |
1285 | + 'level': 'DEBUG', |
1286 | + 'class': 'ubuntuone.logger.DayRotatingFileHandler', |
1287 | + 'filename': 'syncdaemon.log', |
1288 | + 'formatter': 'simple', |
1289 | + 'filters': ['sd_filter', 'twisted_filter', 'pyinotify_filter'], |
1290 | + }, |
1291 | + }, |
1292 | + 'loggers': { |
1293 | + '': { |
1294 | + 'handlers': ['exceptions'], |
1295 | + 'level': 'ERROR', |
1296 | + 'propagate': False, |
1297 | + }, |
1298 | + 'pyinotify': { |
1299 | + 'handlers': ['syncdaemon'], |
1300 | + 'level': 'TRACE', |
1301 | + 'propagate': False, |
1302 | + }, |
1303 | + 'twisted': { |
1304 | + 'handlers': ['syncdaemon', 'exceptions'], |
1305 | + 'level': 'ERROR', |
1306 | + 'propagate': False, |
1307 | + }, |
1308 | + 'ubuntuone.proxy': { |
1309 | + 'handlers': ['proxy'], |
1310 | + 'level': 'DEBUG', |
1311 | + 'propagate': False, |
1312 | + }, |
1313 | + 'ubuntuone.SyncDaemon': { |
1314 | + 'handlers': ['syncdaemon'], |
1315 | + 'level': 'INFO', |
1316 | + 'propagate': False, |
1317 | + }, |
1318 | + 'ubuntuone.SyncDaemon': { |
1319 | + 'handlers': ['exceptions'], |
1320 | + 'level': 'ERROR', |
1321 | + 'propagate': False, |
1322 | + }, |
1323 | + 'ubuntuone.SyncDaemon.BrokenNodes': { |
1324 | + 'handlers': ['broken_nodes'], |
1325 | + 'level': 'INFO', |
1326 | + 'propagate': False, |
1327 | + }, |
1328 | + 'ubuntuone.SyncDaemon.InvalidNames': { |
1329 | + 'handlers': ['invalid_names'], |
1330 | + 'level': 'INFO', |
1331 | + 'propagate': False, |
1332 | + }, |
1333 | + }, |
1334 | +} |
1335 | + |
1336 | +def setup_handler(handler, level): |
1337 | + if level < logging.INFO: |
1338 | + # don't cap the file size on DEBUG or less |
1339 | + handler.maxBytes = 0 |
1340 | + handler.maxBytes = max_bytes |
1341 | + handler.backupCount = backup_count |
1342 | + |
1343 | + |
1344 | +def init(logs_dir, level, dest='', max_bytes=1048576, backup_count=5): |
1345 | + valid = ['file', 'stdout', 'stderr'] |
1346 | + if any(v not in valid for v in dest.split()): |
1347 | + # invalid dest value, let the loggers alone |
1348 | + raise ValueError('Invalid logger dest %r (must be one of %s).' % |
1349 | + (dest, valid)) |
1350 | + |
1351 | + logging_config = LOGGING.copy() |
1352 | + |
1353 | + for h in logging_config['handlers']: |
1354 | + if 'filename' in h: |
1355 | + h['filename'] = os.path.join(logs_dir, h['filename']) |
1356 | + |
1357 | + for l in logging_config['loggers']: |
1358 | + handlers = logging_config['loggers'][l]['handlers'] |
1359 | + if 'file' in dest: |
1360 | + handlers.append('debug') |
1361 | + if 'stdout' in dest or 'stderr' in dest: |
1362 | + handlers.append('console') |
1363 | + |
1364 | + # hook twisted.python.log with standard logging |
1365 | + from twisted.python import log |
1366 | + observer = log.PythonLoggingObserver('twisted') |
1367 | + observer.start() |
1368 | + |
1369 | + from logging.config import dictConfig |
1370 | + dictConfig(logging_config) |
1371 | + |
1372 | + for logger_name in logging_config['loggers']: |
1373 | + rotate_logs(logger_name) |
1374 | + |
1375 | + |
1376 | +def rotate_logs(logger_name): |
1377 | + """Do a rollover of all the handlers.""" |
1378 | + for handler in logging.getLogger(logger_name).handlers: |
1379 | + # ignore the missing file error on a failed rollover |
1380 | + try: |
1381 | + handler.doRollover() |
1382 | + except OSError: |
1383 | + pass |
1384 | |
1385 | === modified file 'ubuntuone/platform/__init__.py' |
1386 | --- ubuntuone/platform/__init__.py 2016-06-01 18:28:19 +0000 |
1387 | +++ ubuntuone/platform/__init__.py 2018-04-14 23:11:29 +0000 |
1388 | @@ -31,10 +31,7 @@ |
1389 | import os |
1390 | import sys |
1391 | |
1392 | -from dirspec.utils import user_home |
1393 | - |
1394 | from ubuntuone.platform import ipc |
1395 | -from ubuntuone.platform import logger |
1396 | from ubuntuone.platform import os_helper |
1397 | |
1398 | |
1399 | @@ -59,7 +56,7 @@ |
1400 | if (not path.startswith(tilde) or |
1401 | (len(path) > 1 and path[1:2] != os.path.sep)): |
1402 | return path |
1403 | - result = path.replace('~', user_home, 1) |
1404 | + result = os.path.expanduser(path) |
1405 | |
1406 | assert isinstance(result, str) |
1407 | try: |
1408 | @@ -100,10 +97,6 @@ |
1409 | stat_path = os_helper.stat_path |
1410 | walk = os_helper.walk |
1411 | |
1412 | -# From Logger |
1413 | -setup_filesystem_logging = logger.setup_filesystem_logging |
1414 | -get_filesystem_logger = logger.get_filesystem_logger |
1415 | - |
1416 | # IPC |
1417 | ExternalInterface = ipc.ExternalInterface |
1418 | is_already_running = ipc.is_already_running |
1419 | |
1420 | === modified file 'ubuntuone/platform/filesystem_notifications/monitor/common.py' |
1421 | --- ubuntuone/platform/filesystem_notifications/monitor/common.py 2016-05-31 21:43:25 +0000 |
1422 | +++ ubuntuone/platform/filesystem_notifications/monitor/common.py 2018-04-14 23:11:29 +0000 |
1423 | @@ -34,6 +34,7 @@ |
1424 | |
1425 | from twisted.internet import defer |
1426 | |
1427 | +from ubuntuone import logger |
1428 | from ubuntuone.platform.filesystem_notifications import notify_processor |
1429 | from ubuntuone.platform.filesystem_notifications.pyinotify_agnostic import ( |
1430 | Event, |
1431 | @@ -44,9 +45,6 @@ |
1432 | IN_MOVED_FROM, |
1433 | IN_MOVED_TO, |
1434 | ) |
1435 | - |
1436 | -from ubuntuone import logger |
1437 | - |
1438 | from ubuntuone.platform.os_helper import ( |
1439 | is_valid_syncdaemon_path, |
1440 | is_valid_os_path, |
1441 | |
1442 | === removed directory 'ubuntuone/platform/logger' |
1443 | === removed file 'ubuntuone/platform/logger/__init__.py' |
1444 | --- ubuntuone/platform/logger/__init__.py 2012-06-21 18:58:50 +0000 |
1445 | +++ ubuntuone/platform/logger/__init__.py 1970-01-01 00:00:00 +0000 |
1446 | @@ -1,53 +0,0 @@ |
1447 | -# -*- coding: utf-8 *-* |
1448 | -# |
1449 | -# Copyright 2012 Canonical Ltd. |
1450 | -# |
1451 | -# This program is free software: you can redistribute it and/or modify it |
1452 | -# under the terms of the GNU General Public License version 3, as published |
1453 | -# by the Free Software Foundation. |
1454 | -# |
1455 | -# This program is distributed in the hope that it will be useful, but |
1456 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
1457 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
1458 | -# PURPOSE. See the GNU General Public License for more details. |
1459 | -# |
1460 | -# You should have received a copy of the GNU General Public License along |
1461 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
1462 | -# |
1463 | -# In addition, as a special exception, the copyright holders give |
1464 | -# permission to link the code of portions of this program with the |
1465 | -# OpenSSL library under certain conditions as described in each |
1466 | -# individual source file, and distribute linked combinations |
1467 | -# including the two. |
1468 | -# You must obey the GNU General Public License in all respects |
1469 | -# for all of the code used other than OpenSSL. If you modify |
1470 | -# file(s) with this exception, you may extend this exception to your |
1471 | -# version of the file(s), but you are not obligated to do so. If you |
1472 | -# do not wish to do so, delete this exception statement from your |
1473 | -# version. If you delete this exception statement from all source |
1474 | -# files in the program, then also delete it here. |
1475 | -"""Logger module.""" |
1476 | - |
1477 | -import os |
1478 | -import sys |
1479 | - |
1480 | -from dirspec.basedir import xdg_cache_home |
1481 | - |
1482 | -if sys.platform == "win32": |
1483 | - from ubuntuone.platform.logger import windows |
1484 | - source = windows |
1485 | -elif sys.platform == "darwin": |
1486 | - from ubuntuone.platform.logger import darwin |
1487 | - source = darwin |
1488 | -else: |
1489 | - from ubuntuone.platform.logger import linux |
1490 | - source = linux |
1491 | - |
1492 | - |
1493 | -get_filesystem_logger = source.get_filesystem_logger |
1494 | -setup_filesystem_logging = source.setup_filesystem_logging |
1495 | - |
1496 | -ubuntuone_log_dir = os.path.join(xdg_cache_home, 'ubuntuone', 'log') |
1497 | -ubuntuone_log_dir = ubuntuone_log_dir.decode('utf-8') |
1498 | -if not os.path.exists(ubuntuone_log_dir): |
1499 | - os.makedirs(ubuntuone_log_dir) |
1500 | |
1501 | === removed file 'ubuntuone/platform/logger/darwin.py' |
1502 | --- ubuntuone/platform/logger/darwin.py 2012-05-15 17:10:02 +0000 |
1503 | +++ ubuntuone/platform/logger/darwin.py 1970-01-01 00:00:00 +0000 |
1504 | @@ -1,37 +0,0 @@ |
1505 | -# -*- coding: utf-8 *-* |
1506 | -# |
1507 | -# Copyright 2012 Canonical Ltd. |
1508 | -# |
1509 | -# This program is free software: you can redistribute it and/or modify it |
1510 | -# under the terms of the GNU General Public License version 3, as published |
1511 | -# by the Free Software Foundation. |
1512 | -# |
1513 | -# This program is distributed in the hope that it will be useful, but |
1514 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
1515 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
1516 | -# PURPOSE. See the GNU General Public License for more details. |
1517 | -# |
1518 | -# You should have received a copy of the GNU General Public License along |
1519 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
1520 | -# |
1521 | -# In addition, as a special exception, the copyright holders give |
1522 | -# permission to link the code of portions of this program with the |
1523 | -# OpenSSL library under certain conditions as described in each |
1524 | -# individual source file, and distribute linked combinations |
1525 | -# including the two. |
1526 | -# You must obey the GNU General Public License in all respects |
1527 | -# for all of the code used other than OpenSSL. If you modify |
1528 | -# file(s) with this exception, you may extend this exception to your |
1529 | -# version of the file(s), but you are not obligated to do so. If you |
1530 | -# do not wish to do so, delete this exception statement from your |
1531 | -# version. If you delete this exception statement from all source |
1532 | -# files in the program, then also delete it here. |
1533 | - |
1534 | - |
1535 | -def get_filesystem_logger(): |
1536 | - """Return the logger used in the filesystem.""" |
1537 | - |
1538 | - |
1539 | -def setup_filesystem_logging(logging_class, root_handler): |
1540 | - """Set the syncdameon extra loggin for MAC OS.""" |
1541 | - # there is nothing yet to be done here. |
1542 | |
1543 | === removed file 'ubuntuone/platform/logger/linux.py' |
1544 | --- ubuntuone/platform/logger/linux.py 2012-05-14 21:24:24 +0000 |
1545 | +++ ubuntuone/platform/logger/linux.py 1970-01-01 00:00:00 +0000 |
1546 | @@ -1,50 +0,0 @@ |
1547 | -# Author: Manuel de la Pena <manuel@canonical.com> |
1548 | -# |
1549 | -# Copyright 2011-2012 Canonical Ltd. |
1550 | -# |
1551 | -# This program is free software: you can redistribute it and/or modify it |
1552 | -# under the terms of the GNU General Public License version 3, as published |
1553 | -# by the Free Software Foundation. |
1554 | -# |
1555 | -# This program is distributed in the hope that it will be useful, but |
1556 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
1557 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
1558 | -# PURPOSE. See the GNU General Public License for more details. |
1559 | -# |
1560 | -# You should have received a copy of the GNU General Public License along |
1561 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
1562 | -# |
1563 | -# In addition, as a special exception, the copyright holders give |
1564 | -# permission to link the code of portions of this program with the |
1565 | -# OpenSSL library under certain conditions as described in each |
1566 | -# individual source file, and distribute linked combinations |
1567 | -# including the two. |
1568 | -# You must obey the GNU General Public License in all respects |
1569 | -# for all of the code used other than OpenSSL. If you modify |
1570 | -# file(s) with this exception, you may extend this exception to your |
1571 | -# version of the file(s), but you are not obligated to do so. If you |
1572 | -# do not wish to do so, delete this exception statement from your |
1573 | -# version. If you delete this exception statement from all source |
1574 | -# files in the program, then also delete it here. |
1575 | -"""Provide platform logging settings.""" |
1576 | - |
1577 | -import logging |
1578 | - |
1579 | -import pyinotify |
1580 | - |
1581 | - |
1582 | -def get_filesystem_logger(): |
1583 | - """Return the logger used by the filesystem.""" |
1584 | - return getattr(pyinotify, 'log', logging.getLogger('pyinotify')) |
1585 | - |
1586 | - |
1587 | -def setup_filesystem_logging(filesystem_logger, root_handler): |
1588 | - """Set the extra logging to be used on linux.""" |
1589 | - # hook pyinotify logger, but remove the console handler first |
1590 | - for hdlr in filesystem_logger.handlers: |
1591 | - if isinstance(hdlr, logging.StreamHandler): |
1592 | - filesystem_logger.removeHandler(hdlr) |
1593 | - filesystem_logger.addHandler(root_handler) |
1594 | - filesystem_logger.setLevel(logging.ERROR) |
1595 | - filesystem_logger.propagate = False |
1596 | - return filesystem_logger |
1597 | |
1598 | === removed file 'ubuntuone/platform/logger/windows.py' |
1599 | --- ubuntuone/platform/logger/windows.py 2012-05-14 19:04:43 +0000 |
1600 | +++ ubuntuone/platform/logger/windows.py 1970-01-01 00:00:00 +0000 |
1601 | @@ -1,37 +0,0 @@ |
1602 | -# Author: Manuel de la Pena <manuel@canonical.com> |
1603 | -# |
1604 | -# Copyright 2012 Canonical Ltd. |
1605 | -# |
1606 | -# This program is free software: you can redistribute it and/or modify it |
1607 | -# under the terms of the GNU General Public License version 3, as published |
1608 | -# by the Free Software Foundation. |
1609 | -# |
1610 | -# This program is distributed in the hope that it will be useful, but |
1611 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
1612 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
1613 | -# PURPOSE. See the GNU General Public License for more details. |
1614 | -# |
1615 | -# You should have received a copy of the GNU General Public License along |
1616 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
1617 | -# |
1618 | -# In addition, as a special exception, the copyright holders give |
1619 | -# permission to link the code of portions of this program with the |
1620 | -# OpenSSL library under certain conditions as described in each |
1621 | -# individual source file, and distribute linked combinations |
1622 | -# including the two. |
1623 | -# You must obey the GNU General Public License in all respects |
1624 | -# for all of the code used other than OpenSSL. If you modify |
1625 | -# file(s) with this exception, you may extend this exception to your |
1626 | -# version of the file(s), but you are not obligated to do so. If you |
1627 | -# do not wish to do so, delete this exception statement from your |
1628 | -# version. If you delete this exception statement from all source |
1629 | -# files in the program, then also delete it here. |
1630 | - |
1631 | - |
1632 | -def get_filesystem_logger(): |
1633 | - """Return the logger used in the filesystem.""" |
1634 | - |
1635 | - |
1636 | -def setup_filesystem_logging(logging_class, root_handler): |
1637 | - """Set the syncdameon extra loggin for windows.""" |
1638 | - # there is nothing yet to be done here. |
1639 | |
1640 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/__init__.py' |
1641 | --- ubuntuone/platform/tests/filesystem_notifications/__init__.py 2016-09-17 14:29:53 +0000 |
1642 | +++ ubuntuone/platform/tests/filesystem_notifications/__init__.py 2018-04-14 23:11:29 +0000 |
1643 | @@ -30,10 +30,10 @@ |
1644 | |
1645 | from twisted.internet import defer, reactor |
1646 | |
1647 | -from contrib.testing import testcase |
1648 | from ubuntuone.devtools.handlers import MementoHandler |
1649 | from ubuntuone.syncdaemon import event_queue, filesystem_manager |
1650 | from ubuntuone.syncdaemon.tritcask import Tritcask |
1651 | +from ubuntuone.testing import testcase |
1652 | |
1653 | |
1654 | class BaseFSMonitorTestCase(testcase.BaseTwistedTestCase): |
1655 | |
1656 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/common.py' |
1657 | --- ubuntuone/platform/tests/filesystem_notifications/common.py 2016-06-01 18:28:19 +0000 |
1658 | +++ ubuntuone/platform/tests/filesystem_notifications/common.py 2018-04-14 23:11:29 +0000 |
1659 | @@ -38,7 +38,7 @@ |
1660 | import itertools |
1661 | |
1662 | from twisted.internet import defer |
1663 | -from contrib.testing.testcase import BaseTwistedTestCase |
1664 | + |
1665 | from ubuntuone.devtools.handlers import MementoHandler |
1666 | from ubuntuone.platform.filesystem_notifications.pyinotify_agnostic import ( |
1667 | EventsCodes, |
1668 | @@ -56,6 +56,7 @@ |
1669 | ) |
1670 | from ubuntuone.platform.filesystem_notifications.monitor import ACTIONS |
1671 | from ubuntuone.platform.os_helper import get_os_valid_path |
1672 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
1673 | |
1674 | OP_FLAGS = EventsCodes.FLAG_COLLECTIONS['OP_FLAGS'] |
1675 | IS_DIR = EventsCodes.FLAG_COLLECTIONS['SPECIAL_FLAGS']['IN_ISDIR'] |
1676 | |
1677 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_darwin.py' |
1678 | --- ubuntuone/platform/tests/filesystem_notifications/test_darwin.py 2016-06-01 18:28:19 +0000 |
1679 | +++ ubuntuone/platform/tests/filesystem_notifications/test_darwin.py 2018-04-14 23:11:29 +0000 |
1680 | @@ -38,8 +38,6 @@ |
1681 | |
1682 | import fsevents |
1683 | |
1684 | -from contrib.testing.testcase import BaseTwistedTestCase |
1685 | - |
1686 | from ubuntuone.devtools.handlers import MementoHandler |
1687 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
1688 | common, |
1689 | @@ -63,6 +61,7 @@ |
1690 | BaseFSMonitorTestCase, |
1691 | common as common_tests, |
1692 | ) |
1693 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
1694 | |
1695 | |
1696 | # A reverse mapping for the tests |
1697 | |
1698 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py' |
1699 | --- ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py 2016-06-03 19:52:03 +0000 |
1700 | +++ ubuntuone/platform/tests/filesystem_notifications/test_filesystem_notifications.py 2018-04-14 23:11:29 +0000 |
1701 | @@ -35,12 +35,6 @@ |
1702 | from twisted.trial import unittest |
1703 | from ubuntuone.devtools.handlers import MementoHandler |
1704 | |
1705 | -from contrib.testing.testcase import ( |
1706 | - BaseTwistedTestCase, |
1707 | - FakeVolumeManager, |
1708 | - skip_if_win32_missing_fs_event, |
1709 | - skip_if_darwin_missing_fs_event, |
1710 | -) |
1711 | from ubuntuone.platform import ( |
1712 | remove_file, |
1713 | remove_dir, |
1714 | @@ -53,6 +47,12 @@ |
1715 | filesystem_manager, |
1716 | filesystem_notifications, |
1717 | ) |
1718 | +from ubuntuone.testing.testcase import ( |
1719 | + BaseTwistedTestCase, |
1720 | + FakeVolumeManager, |
1721 | + skip_if_win32_missing_fs_event, |
1722 | + skip_if_darwin_missing_fs_event, |
1723 | +) |
1724 | |
1725 | |
1726 | class IgnoreFileTests(unittest.TestCase): |
1727 | |
1728 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py' |
1729 | --- ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py 2016-06-01 18:28:19 +0000 |
1730 | +++ ubuntuone/platform/tests/filesystem_notifications/test_fsevents_daemon.py 2018-04-14 23:11:29 +0000 |
1731 | @@ -32,7 +32,6 @@ |
1732 | |
1733 | from twisted.internet import defer, protocol |
1734 | |
1735 | -from contrib.testing.testcase import BaseTwistedTestCase |
1736 | from ubuntuone import fseventsd |
1737 | try: |
1738 | from ubuntuone.devtools.testcases import skipIf |
1739 | @@ -50,6 +49,7 @@ |
1740 | IN_MOVED_FROM, |
1741 | IN_MOVED_TO, |
1742 | ) |
1743 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
1744 | |
1745 | |
1746 | class FakeServerProtocol(protocol.Protocol): |
1747 | |
1748 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_linux.py' |
1749 | --- ubuntuone/platform/tests/filesystem_notifications/test_linux.py 2016-06-01 18:28:19 +0000 |
1750 | +++ ubuntuone/platform/tests/filesystem_notifications/test_linux.py 2018-04-14 23:11:29 +0000 |
1751 | @@ -35,7 +35,6 @@ |
1752 | from twisted.internet import defer, reactor |
1753 | from twisted.trial.unittest import TestCase as PlainTestCase |
1754 | |
1755 | -from contrib.testing import testcase |
1756 | from ubuntuone.syncdaemon import volume_manager |
1757 | from ubuntuone.platform.filesystem_notifications import notify_processor |
1758 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
1759 | @@ -44,6 +43,7 @@ |
1760 | from ubuntuone.platform.tests.filesystem_notifications import ( |
1761 | BaseFSMonitorTestCase, |
1762 | ) |
1763 | +from ubuntuone.testing import testcase |
1764 | |
1765 | |
1766 | class FakeVolume(object): |
1767 | |
1768 | === modified file 'ubuntuone/platform/tests/filesystem_notifications/test_windows.py' |
1769 | --- ubuntuone/platform/tests/filesystem_notifications/test_windows.py 2016-06-01 18:28:19 +0000 |
1770 | +++ ubuntuone/platform/tests/filesystem_notifications/test_windows.py 2018-04-14 23:11:29 +0000 |
1771 | @@ -35,8 +35,6 @@ |
1772 | from twisted.internet import defer |
1773 | from win32file import FILE_NOTIFY_INFORMATION |
1774 | |
1775 | -from contrib.testing.testcase import BaseTwistedTestCase |
1776 | - |
1777 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
1778 | common, |
1779 | windows as filesystem_notifications, |
1780 | @@ -58,6 +56,7 @@ |
1781 | from ubuntuone.platform.tests.filesystem_notifications import ( |
1782 | common as common_tests, |
1783 | ) |
1784 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
1785 | |
1786 | |
1787 | class FakeEventsProcessor(object): |
1788 | |
1789 | === modified file 'ubuntuone/platform/tests/ipc/test_linux.py' |
1790 | --- ubuntuone/platform/tests/ipc/test_linux.py 2016-07-30 21:58:48 +0000 |
1791 | +++ ubuntuone/platform/tests/ipc/test_linux.py 2018-04-14 23:11:29 +0000 |
1792 | @@ -38,11 +38,6 @@ |
1793 | except ImportError: |
1794 | from ubuntuone.devtools.testcase import DBusTestCase |
1795 | |
1796 | -from contrib.testing.testcase import ( |
1797 | - FakeMainTestCase, |
1798 | - FakedService, |
1799 | - FakedObject, |
1800 | -) |
1801 | from ubuntuone.platform.ipc import linux as dbus_interface |
1802 | from ubuntuone.platform.ipc.linux import ( |
1803 | DBusExposedObject, |
1804 | @@ -57,6 +52,11 @@ |
1805 | DBUS_IFACE_LAUNCHER_NAME, |
1806 | ) |
1807 | from ubuntuone.platform.tools.linux import DBusClient |
1808 | +from ubuntuone.testing.testcase import ( |
1809 | + FakeMainTestCase, |
1810 | + FakedService, |
1811 | + FakedObject, |
1812 | +) |
1813 | |
1814 | |
1815 | class FakeNetworkManager(DBusExposedObject): |
1816 | |
1817 | === modified file 'ubuntuone/platform/tests/ipc/test_perspective_broker.py' |
1818 | --- ubuntuone/platform/tests/ipc/test_perspective_broker.py 2018-03-08 19:39:13 +0000 |
1819 | +++ ubuntuone/platform/tests/ipc/test_perspective_broker.py 2018-04-14 23:11:29 +0000 |
1820 | @@ -41,10 +41,6 @@ |
1821 | ) |
1822 | from twisted.trial.unittest import TestCase |
1823 | |
1824 | -from contrib.testing.testcase import ( |
1825 | - FakedService, |
1826 | - FakeMainTestCase, |
1827 | -) |
1828 | try: |
1829 | from ubuntuone.devtools.testcases import skipIf, skipIfOS |
1830 | except ImportError: |
1831 | @@ -80,6 +76,10 @@ |
1832 | from ubuntuone.networkstate.networkstates import ONLINE |
1833 | except ImportError: |
1834 | from ubuntuone.networkstate import ONLINE |
1835 | +from ubuntuone.testing.testcase import ( |
1836 | + FakedService, |
1837 | + FakeMainTestCase, |
1838 | +) |
1839 | |
1840 | |
1841 | class NoTestCase(object): |
1842 | |
1843 | === modified file 'ubuntuone/platform/tests/linux/test_vm.py' |
1844 | --- ubuntuone/platform/tests/linux/test_vm.py 2016-07-30 21:58:48 +0000 |
1845 | +++ ubuntuone/platform/tests/linux/test_vm.py 2018-04-14 23:11:29 +0000 |
1846 | @@ -1,7 +1,3 @@ |
1847 | -# tests.platform.linux - linux platform tests |
1848 | -# |
1849 | -# Author: Guillermo Gonzalez <guillermo.gonzalez@canonical.com> |
1850 | -# |
1851 | # Copyright 2010-2012 Canonical Ltd. |
1852 | # |
1853 | # This program is free software: you can redistribute it and/or modify it |
1854 | @@ -35,7 +31,6 @@ |
1855 | |
1856 | from twisted.internet import defer |
1857 | |
1858 | -from contrib.testing.testcase import FakeMain |
1859 | from ubuntuone.storageprotocol import request |
1860 | from ubuntuone.syncdaemon.tests.test_vm import ( |
1861 | MetadataTestCase, |
1862 | @@ -48,6 +43,7 @@ |
1863 | LegacyShareFileShelf, _Share, Share, Shared, Root, UDF, _UDF, |
1864 | MetadataUpgrader, VMFileShelf, |
1865 | ) |
1866 | +from ubuntuone.testing.testcase import FakeMain |
1867 | |
1868 | |
1869 | class VolumesTests(BaseVolumeManagerTests): |
1870 | |
1871 | === modified file 'ubuntuone/platform/tests/os_helper/test_os_helper.py' |
1872 | --- ubuntuone/platform/tests/os_helper/test_os_helper.py 2018-03-08 19:39:13 +0000 |
1873 | +++ ubuntuone/platform/tests/os_helper/test_os_helper.py 2018-04-14 23:11:29 +0000 |
1874 | @@ -36,10 +36,6 @@ |
1875 | |
1876 | from twisted.internet import defer |
1877 | |
1878 | -from contrib.testing.testcase import ( |
1879 | - BaseTwistedTestCase, |
1880 | - skip_if_win32_and_uses_readonly, |
1881 | -) |
1882 | from ubuntuone.platform import ( |
1883 | access, |
1884 | allow_writes, |
1885 | @@ -66,6 +62,10 @@ |
1886 | stat_path, |
1887 | walk, |
1888 | ) |
1889 | +from ubuntuone.testing.testcase import ( |
1890 | + BaseTwistedTestCase, |
1891 | + skip_if_win32_and_uses_readonly, |
1892 | +) |
1893 | |
1894 | |
1895 | class BaseTestCase(BaseTwistedTestCase): |
1896 | |
1897 | === modified file 'ubuntuone/platform/tests/os_helper/test_windows.py' |
1898 | --- ubuntuone/platform/tests/os_helper/test_windows.py 2016-06-01 18:28:19 +0000 |
1899 | +++ ubuntuone/platform/tests/os_helper/test_windows.py 2018-04-14 23:11:29 +0000 |
1900 | @@ -36,8 +36,6 @@ |
1901 | from twisted.internet import defer |
1902 | from twisted.trial.unittest import TestCase |
1903 | |
1904 | -from contrib.testing.testcase import BaseTwistedTestCase |
1905 | - |
1906 | from ntsecuritycon import ( |
1907 | FILE_ALL_ACCESS, |
1908 | FILE_GENERIC_READ, |
1909 | @@ -73,6 +71,7 @@ |
1910 | OSWrapperTests, |
1911 | WalkTests, |
1912 | ) |
1913 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
1914 | |
1915 | |
1916 | # ugly trick to stop pylint for complaining about |
1917 | |
1918 | === removed file 'ubuntuone/platform/tests/test_logger.py' |
1919 | --- ubuntuone/platform/tests/test_logger.py 2012-06-21 18:58:50 +0000 |
1920 | +++ ubuntuone/platform/tests/test_logger.py 1970-01-01 00:00:00 +0000 |
1921 | @@ -1,47 +0,0 @@ |
1922 | -# -*- coding: utf-8 -*- |
1923 | -# |
1924 | -# Copyright 2011-2012 Canonical Ltd. |
1925 | -# |
1926 | -# This program is free software: you can redistribute it and/or modify it |
1927 | -# under the terms of the GNU General Public License version 3, as published |
1928 | -# by the Free Software Foundation. |
1929 | -# |
1930 | -# This program is distributed in the hope that it will be useful, but |
1931 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
1932 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
1933 | -# PURPOSE. See the GNU General Public License for more details. |
1934 | -# |
1935 | -# You should have received a copy of the GNU General Public License along |
1936 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
1937 | -# |
1938 | -# In addition, as a special exception, the copyright holders give |
1939 | -# permission to link the code of portions of this program with the |
1940 | -# OpenSSL library under certain conditions as described in each |
1941 | -# individual source file, and distribute linked combinations |
1942 | -# including the two. |
1943 | -# You must obey the GNU General Public License in all respects |
1944 | -# for all of the code used other than OpenSSL. If you modify |
1945 | -# file(s) with this exception, you may extend this exception to your |
1946 | -# version of the file(s), but you are not obligated to do so. If you |
1947 | -# do not wish to do so, delete this exception statement from your |
1948 | -# version. If you delete this exception statement from all source |
1949 | -# files in the program, then also delete it here. |
1950 | -"""Platform independent tests for the XDG constants.""" |
1951 | - |
1952 | -import os |
1953 | - |
1954 | -from dirspec.basedir import xdg_cache_home |
1955 | -from twisted.trial.unittest import TestCase |
1956 | - |
1957 | -from ubuntuone.platform.logger import ubuntuone_log_dir |
1958 | - |
1959 | - |
1960 | -class TestBaseDirectory(TestCase): |
1961 | - """Tests for the BaseDirectory module.""" |
1962 | - |
1963 | - def test_ubuntuone_log_dir(self): |
1964 | - """The ubuntuone_log_dir is correct.""" |
1965 | - expected = os.path.join(xdg_cache_home, |
1966 | - 'ubuntuone', 'log') |
1967 | - self.assertEqual(expected, ubuntuone_log_dir) |
1968 | - self.assertTrue(os.path.exists(expected)) |
1969 | |
1970 | === modified file 'ubuntuone/platform/tests/test_tools.py' |
1971 | --- ubuntuone/platform/tests/test_tools.py 2016-09-17 01:06:23 +0000 |
1972 | +++ ubuntuone/platform/tests/test_tools.py 2018-04-14 23:11:29 +0000 |
1973 | @@ -36,8 +36,6 @@ |
1974 | from ubuntuone.devtools.handlers import MementoHandler |
1975 | from ubuntuone.devtools.testcases import skipTest, skipIfNotOS |
1976 | |
1977 | -from contrib.testing.testcase import FakeCommand |
1978 | - |
1979 | from ubuntuone.syncdaemon import ( |
1980 | action_queue, |
1981 | event_queue, |
1982 | @@ -49,6 +47,7 @@ |
1983 | ) |
1984 | from ubuntuone.platform import tools |
1985 | from ubuntuone.platform.tests import IPCTestCase |
1986 | +from ubuntuone.testing.testcase import FakeCommand |
1987 | |
1988 | |
1989 | SOME_ERROR = 'CRASH BOOM BANG' |
1990 | |
1991 | === modified file 'ubuntuone/platform/tests/test_u1sdtool.py' |
1992 | --- ubuntuone/platform/tests/test_u1sdtool.py 2016-06-01 18:28:19 +0000 |
1993 | +++ ubuntuone/platform/tests/test_u1sdtool.py 2018-04-14 23:11:29 +0000 |
1994 | @@ -35,11 +35,6 @@ |
1995 | |
1996 | from twisted.internet import defer |
1997 | |
1998 | -from contrib.testing.testcase import ( |
1999 | - FakeCommand, |
2000 | - FakeDownload, |
2001 | - FakeUpload, |
2002 | -) |
2003 | from ubuntuone.syncdaemon.vm_helper import get_udf_path |
2004 | from ubuntuone.syncdaemon.volume_manager import ( |
2005 | ACCESS_LEVEL_RO, |
2006 | @@ -61,6 +56,11 @@ |
2007 | show_waiting_metadata, |
2008 | ) |
2009 | from ubuntuone.platform.tests.test_tools import TestToolsBase |
2010 | +from ubuntuone.testing.testcase import ( |
2011 | + FakeCommand, |
2012 | + FakeDownload, |
2013 | + FakeUpload, |
2014 | +) |
2015 | |
2016 | |
2017 | class U1SDToolTests(TestToolsBase): |
2018 | |
2019 | === modified file 'ubuntuone/syncdaemon/action_queue.py' |
2020 | --- ubuntuone/syncdaemon/action_queue.py 2018-03-14 21:01:56 +0000 |
2021 | +++ ubuntuone/syncdaemon/action_queue.py 2018-04-14 23:11:29 +0000 |
2022 | @@ -47,7 +47,8 @@ |
2023 | from twisted.internet import error as twisted_errors |
2024 | from twisted.python.failure import Failure, DefaultException |
2025 | |
2026 | -from ubuntuone import clientdefs |
2027 | +from ubuntuone import clientdefs, config |
2028 | +from ubuntuone.logger import TRACE |
2029 | from ubuntuone.platform import platform, remove_file |
2030 | from ubuntuone.storageprotocol import protocol_pb2, content_hash |
2031 | from ubuntuone.storageprotocol import errors as protocol_errors |
2032 | @@ -56,9 +57,9 @@ |
2033 | ThrottlingStorageClientFactory, |
2034 | ) |
2035 | from ubuntuone.storageprotocol.context import get_ssl_context |
2036 | +from ubuntuone.syncdaemon import offload_queue |
2037 | from ubuntuone.syncdaemon.interfaces import IActionQueue, IMarker |
2038 | -from ubuntuone.syncdaemon.logger import mklog, TRACE |
2039 | -from ubuntuone.syncdaemon import config, offload_queue |
2040 | + |
2041 | |
2042 | logger = logging.getLogger("ubuntuone.SyncDaemon.ActionQueue") |
2043 | |
2044 | @@ -69,6 +70,96 @@ |
2045 | TRANSFER_PROGRESS_THRESHOLD = 64 * 1024 |
2046 | |
2047 | |
2048 | +class mklog(object): |
2049 | + """A logger that keeps track of where it's being called from. |
2050 | + |
2051 | + Create a logger that keeps track of the method where it's being |
2052 | + called from, in order to make more informative messages. |
2053 | + |
2054 | + """ |
2055 | + __slots__ = ('logger', 'zipped_desc') |
2056 | + |
2057 | + def __init__(self, _logger, _method, _share, _uid, *args, **kwargs): |
2058 | + # args are _-prepended to lower the chances of them |
2059 | + # conflicting with kwargs |
2060 | + |
2061 | + all_args = [] |
2062 | + for arg in args: |
2063 | + all_args.append( |
2064 | + repr(arg).decode('ascii', 'replace').encode('ascii', 'replace') |
2065 | + ) |
2066 | + for k, v in kwargs.items(): |
2067 | + v = repr(v).decode('ascii', 'replace').encode('ascii', 'replace') |
2068 | + all_args.append("%s=%r" % (k, v)) |
2069 | + args = ", ".join(all_args) |
2070 | + |
2071 | + desc = "%-28s share:%-40r node:%-40r %s(%s) " % (_method, _share, |
2072 | + _uid, _method, args) |
2073 | + desc = desc.replace('%', '%%') |
2074 | + self.zipped_desc = zlib.compress(desc, 9) |
2075 | + self.logger = _logger |
2076 | + |
2077 | + def _log(self, logger_func, *args): |
2078 | + """Generalized form of the different logging methods.""" |
2079 | + desc = zlib.decompress(self.zipped_desc) |
2080 | + text = desc + args[0] |
2081 | + logger_func(text, *args[1:]) |
2082 | + |
2083 | + def debug(self, *args): |
2084 | + """Log at level DEBUG""" |
2085 | + self._log(self.logger.debug, *args) |
2086 | + |
2087 | + def info(self, *args): |
2088 | + """Log at level INFO""" |
2089 | + self._log(self.logger.info, *args) |
2090 | + |
2091 | + def warn(self, *args): |
2092 | + """Log at level WARN""" |
2093 | + self._log(self.logger.warn, *args) |
2094 | + |
2095 | + def error(self, *args): |
2096 | + """Log at level ERROR""" |
2097 | + self._log(self.logger.error, *args) |
2098 | + |
2099 | + def exception(self, *args): |
2100 | + """Log an exception""" |
2101 | + self._log(self.logger.exception, *args) |
2102 | + |
2103 | + def note(self, *args): |
2104 | + """Log at NOTE level (high-priority info) """ |
2105 | + self._log(self.logger.high, *args) |
2106 | + |
2107 | + def trace(self, *args): |
2108 | + """Log at level TRACE""" |
2109 | + self._log(self.logger.trace, *args) |
2110 | + |
2111 | + def callbacks(self, success_message='success', success_arg='', |
2112 | + failure_message='failure'): |
2113 | + """ |
2114 | + Return a callback and an errback that log success or failure |
2115 | + messages. |
2116 | + |
2117 | + The callback/errback pair are pass-throughs; they don't |
2118 | + interfere in the callback/errback chain of the deferred you |
2119 | + add them to. |
2120 | + """ |
2121 | + def callback(arg, success_arg=success_arg): |
2122 | + "it worked!" |
2123 | + if callable(success_arg): |
2124 | + success_arg = success_arg(arg) |
2125 | + self.debug(success_message, success_arg) |
2126 | + return arg |
2127 | + |
2128 | + def errback(failure): |
2129 | + "it failed!" |
2130 | + self.error(failure_message, failure.getErrorMessage()) |
2131 | + self.debug('traceback follows:\n\n' + failure.getTraceback(), '') |
2132 | + return failure |
2133 | + return callback, errback |
2134 | + |
2135 | + |
2136 | + |
2137 | + |
2138 | class DeferredInterrupted(Exception): |
2139 | """To stop the run when pausing.""" |
2140 | |
2141 | @@ -966,12 +1057,12 @@ |
2142 | ", client (%r) is not self.client (%r)." |
2143 | logger.warning(msg, req_name, client, self.client) |
2144 | return |
2145 | - except request_error, failure: |
2146 | + except request_error as failure: |
2147 | event = event_error |
2148 | self.event_queue.push(event_error, error=str(failure)) |
2149 | except (twisted_errors.ConnectionLost, |
2150 | twisted_errors.ConnectionDone, |
2151 | - OpenSSL.SSL.Error), failure: |
2152 | + OpenSSL.SSL.Error) as failure: |
2153 | # connection ended, just don't do anything: the SYS_CONNECTION_ETC |
2154 | # will be sent by normal client/protocol mechanisms, and logging |
2155 | # will be done later in this function. |
2156 | |
2157 | === modified file 'ubuntuone/syncdaemon/file_shelf.py' |
2158 | --- ubuntuone/syncdaemon/file_shelf.py 2015-09-19 23:15:50 +0000 |
2159 | +++ ubuntuone/syncdaemon/file_shelf.py 2018-04-14 23:11:29 +0000 |
2160 | @@ -82,7 +82,7 @@ |
2161 | remove_file(path) |
2162 | make_dir(path, True) |
2163 | # else, the dir is already there |
2164 | - except OSError, e: |
2165 | + except OSError as e: |
2166 | if e.errno == errno.ENOENT: |
2167 | # the file or dir don't exist |
2168 | make_dir(path, True) |
2169 | |
2170 | === modified file 'ubuntuone/syncdaemon/filesystem_manager.py' |
2171 | --- ubuntuone/syncdaemon/filesystem_manager.py 2016-06-01 21:38:23 +0000 |
2172 | +++ ubuntuone/syncdaemon/filesystem_manager.py 2018-04-14 23:11:29 +0000 |
2173 | @@ -1,7 +1,3 @@ |
2174 | -# ubuntuone.syncdaemon.filesystem_manager - FSM |
2175 | -# |
2176 | -# Author: Facundo Batista <facundo@canonical.com> |
2177 | -# |
2178 | # Copyright 2009-2012 Canonical Ltd. |
2179 | # |
2180 | # This program is free software: you can redistribute it and/or modify it |
2181 | @@ -43,8 +39,9 @@ |
2182 | import stat |
2183 | import uuid |
2184 | |
2185 | +from ubuntuone import config |
2186 | from ubuntuone.clientdefs import NAME |
2187 | -from ubuntuone.syncdaemon import file_shelf, config |
2188 | +from ubuntuone.syncdaemon import file_shelf |
2189 | from ubuntuone.syncdaemon.volume_manager import VolumeDoesNotExist |
2190 | from ubuntuone.syncdaemon.interfaces import IMarker |
2191 | from ubuntuone.syncdaemon.marker import MDMarker |
2192 | @@ -162,12 +159,7 @@ |
2193 | # |
2194 | # |
2195 | |
2196 | - |
2197 | -# fsm logger |
2198 | -fsm_logger = logging.getLogger('ubuntuone.SyncDaemon.fsm') |
2199 | -logger = functools.partial(fsm_logger.log, logging.INFO) |
2200 | -log_warning = functools.partial(fsm_logger.log, logging.WARNING) |
2201 | -log_debug = functools.partial(fsm_logger.log, logging.DEBUG) |
2202 | +logger = logging.getLogger(__name__) |
2203 | |
2204 | is_forbidden = set("info path node_id share_id is_dir".split()).intersection |
2205 | |
2206 | @@ -332,11 +324,6 @@ |
2207 | self._trash_dir = os.path.join(data_dir, 'trash') |
2208 | self._movelimbo_dir = os.path.join(data_dir, 'move_limbo') |
2209 | self.partials_dir = partials_dir |
2210 | - if not path_exists(self.partials_dir): |
2211 | - make_dir(self.partials_dir, recursive=True) |
2212 | - else: |
2213 | - # ensure that we can write in the partials_dir |
2214 | - set_dir_readwrite(self.partials_dir) |
2215 | self.fs = TritcaskShelf(FSM_ROW_TYPE, db) |
2216 | self.old_fs = file_shelf.CachedFileShelf( |
2217 | fsmdir, cache_size=1500, cache_compact_threshold=4) |
2218 | @@ -368,8 +355,9 @@ |
2219 | # load some config |
2220 | self.user_config = config.get_user_config() |
2221 | |
2222 | - logger("initialized: idx_path: %s, idx_node_id: %s, shares: %s", |
2223 | - len(self._idx_path), len(self._idx_node_id), len(self.shares)) |
2224 | + logger.info( |
2225 | + "initialized: idx_path: %s, idx_node_id: %s, shares: %s", |
2226 | + len(self._idx_path), len(self._idx_node_id), len(self.shares)) |
2227 | |
2228 | def register_eq(self, eq): |
2229 | """Registers an EventQueue here.""" |
2230 | @@ -399,8 +387,9 @@ |
2231 | self._get_share(mdobj["share_id"]) |
2232 | except VolumeDoesNotExist: |
2233 | # oops, the share is gone!, invalidate this mdid |
2234 | - log_warning('Share %r disappeared! deleting mdid: %s', |
2235 | - mdobj['share_id'], mdid) |
2236 | + logger.warning( |
2237 | + 'Share %r disappeared! deleting mdid: %s', |
2238 | + mdobj['share_id'], mdid) |
2239 | del self.old_fs[mdid] |
2240 | return False |
2241 | else: |
2242 | @@ -440,7 +429,7 @@ |
2243 | |
2244 | def _load_metadata_None(self, old_version): |
2245 | """Loads metadata from when it wasn't even versioned.""" |
2246 | - logger("loading metadata from old version %r", old_version) |
2247 | + logger.info("loading metadata from old version %r", old_version) |
2248 | |
2249 | for mdid, mdobj in self._safe_old_fs_iteritems(): |
2250 | # assure path are bytes (new to version 2) |
2251 | @@ -485,7 +474,7 @@ |
2252 | |
2253 | def _load_metadata_1(self, old_version): |
2254 | """Loads metadata from version 1.""" |
2255 | - logger("loading metadata from old version %r", old_version) |
2256 | + logger.info("loading metadata from old version %r", old_version) |
2257 | |
2258 | for mdid, mdobj in self._safe_old_fs_iteritems(): |
2259 | # assure path are bytes (new to version 2) |
2260 | @@ -528,7 +517,7 @@ |
2261 | |
2262 | def _load_metadata_2(self, old_version): |
2263 | """Loads metadata from version 2.""" |
2264 | - logger("loading metadata from old version %r", old_version) |
2265 | + logger.info("loading metadata from old version %r", old_version) |
2266 | |
2267 | for mdid, mdobj in self._safe_old_fs_iteritems(): |
2268 | # convert the "yet without content" hashes to "" (new to v3) |
2269 | @@ -563,7 +552,7 @@ |
2270 | |
2271 | def _load_metadata_3(self, old_version): |
2272 | """Loads metadata from version 3.""" |
2273 | - logger("loading metadata from old version %r", old_version) |
2274 | + logger.info("loading metadata from old version %r", old_version) |
2275 | |
2276 | for mdid, mdobj in self._safe_old_fs_iteritems(): |
2277 | # fix the path |
2278 | @@ -592,7 +581,7 @@ |
2279 | |
2280 | def _load_metadata_4(self, old_version): |
2281 | """Loads metadata from version 4.""" |
2282 | - logger("loading metadata from old version %r", old_version) |
2283 | + logger.info("loading metadata from old version %r", old_version) |
2284 | |
2285 | for mdid, mdobj in self._safe_old_fs_iteritems(): |
2286 | # add the generation number (new to v5) |
2287 | @@ -618,7 +607,7 @@ |
2288 | |
2289 | def _load_metadata_5(self, old_version): |
2290 | """Loads metadata of last version.""" |
2291 | - logger("loading metadata from old version %r", old_version) |
2292 | + logger.info("loading metadata from old version %r", old_version) |
2293 | |
2294 | for mdid, mdobj in self._safe_old_fs_iteritems(): |
2295 | abspath = self.get_abspath(mdobj["share_id"], mdobj["path"]) |
2296 | @@ -639,7 +628,7 @@ |
2297 | |
2298 | def _load_metadata_updated(self): |
2299 | """Loads metadata of last version.""" |
2300 | - logger("loading updated metadata") |
2301 | + logger.info("loading updated metadata") |
2302 | for mdid, mdobj in self.fs.items(): |
2303 | try: |
2304 | abspath = self.get_abspath(mdobj["share_id"], mdobj["path"]) |
2305 | @@ -649,19 +638,20 @@ |
2306 | continue |
2307 | if abspath in self._idx_path: |
2308 | # oh, we already have this path in the idx. |
2309 | - log_warning("Path already in the index: %s", abspath) |
2310 | + logger.warning("Path already in the index: %s", abspath) |
2311 | current_mdobj = self.fs[self._idx_path[abspath]] |
2312 | if current_mdobj['info']['created'] < mdobj['info']['created']: |
2313 | - log_debug("Replacing and deleting node: %s with newer " |
2314 | - "node: %s", current_mdobj['mdid'], mdid) |
2315 | + logger.debug( |
2316 | + "Replacing and deleting node: %s with newer node: %s", |
2317 | + current_mdobj['mdid'], mdid) |
2318 | self._idx_path[abspath] = mdid |
2319 | # and delete the old node |
2320 | del self.fs[current_mdobj['mdid']] |
2321 | else: |
2322 | # do nothing if the current mdobj is newer |
2323 | - log_debug("The node: %s is newer than: %s, " |
2324 | - "leaving it alone and deleting the old one.", |
2325 | - current_mdobj['mdid'], mdid) |
2326 | + logger.debug( |
2327 | + "The node: %s is newer than: %s, leaving it alone and " |
2328 | + "deleting the old one.", current_mdobj['mdid'], mdid) |
2329 | # but delete the old node |
2330 | del self.fs[mdid] |
2331 | else: |
2332 | @@ -691,8 +681,9 @@ |
2333 | if node_id is not None: |
2334 | self._set_node_id(newobj, node_id, path) |
2335 | |
2336 | - log_debug("create: path=%r mdid=%r share_id=%r node_id=%r is_dir=%r", |
2337 | - path, mdid, share_id, None, is_dir) |
2338 | + logger.debug( |
2339 | + "create: path=%r mdid=%r share_id=%r node_id=%r is_dir=%r", |
2340 | + path, mdid, share_id, None, is_dir) |
2341 | self.fs[mdid] = newobj |
2342 | |
2343 | # adjust the index |
2344 | @@ -713,8 +704,9 @@ |
2345 | if mdobj["node_id"] is not None: |
2346 | # the object is already there! it's ok if it has the same id |
2347 | if mdobj["node_id"] == node_id: |
2348 | - log_warning("set_node_id (repeated!): path=%r mdid=%r " |
2349 | - "node_id=%r", path, mdobj['mdid'], node_id) |
2350 | + logger.warning( |
2351 | + "set_node_id (repeated!): path=%r mdid=%r node_id=%r", |
2352 | + path, mdobj['mdid'], node_id) |
2353 | return |
2354 | msg = "The path %r already has node_id (%r)" % (path, node_id) |
2355 | raise ValueError(msg) |
2356 | @@ -726,8 +718,9 @@ |
2357 | mdobj["node_id"] = node_id |
2358 | mdobj["info"]["node_id_assigned"] = time.time() |
2359 | |
2360 | - log_debug("set_node_id: path=%r mdid=%r share_id=%r node_id=%r", |
2361 | - path, mdobj['mdid'], share_id, node_id) |
2362 | + logger.debug( |
2363 | + "set_node_id: path=%r mdid=%r share_id=%r node_id=%r", |
2364 | + path, mdobj['mdid'], share_id, node_id) |
2365 | |
2366 | def get_mdobjs_by_share_id(self, share_id, base_path=None): |
2367 | """Get all the mdobjs from a share. |
2368 | @@ -811,7 +804,7 @@ |
2369 | raise ValueError("The following attributes can not be set " |
2370 | "externally: %s" % forbidden) |
2371 | |
2372 | - log_debug("set mdid=%r: %s", mdid, kwargs) |
2373 | + logger.debug("set mdid=%r: %s", mdid, kwargs) |
2374 | mdobj = self.fs[mdid] |
2375 | for k, v in kwargs.items(): |
2376 | mdobj[k] = v |
2377 | @@ -857,7 +850,7 @@ |
2378 | self.eq.rm_from_mute_filter(expected_event, |
2379 | path_from=path_from, path_to=path_to) |
2380 | m = "IOError %s when trying to move file/dir %r" |
2381 | - log_warning(m, e, path_from) |
2382 | + logger.warning(m, e, path_from) |
2383 | self.moved(new_share_id, path_from, path_to) |
2384 | |
2385 | def moved(self, new_share_id, path_from, path_to): |
2386 | @@ -865,8 +858,9 @@ |
2387 | path_from = normpath(path_from) |
2388 | path_to = normpath(path_to) |
2389 | mdid = self._idx_path.pop(path_from) |
2390 | - log_debug("move_file: mdid=%r path_from=%r path_to=%r", |
2391 | - mdid, path_from, path_to) |
2392 | + logger.debug( |
2393 | + "move_file: mdid=%r path_from=%r path_to=%r", |
2394 | + mdid, path_from, path_to) |
2395 | |
2396 | # if the move overwrites other file, send it to trash |
2397 | if path_to in self._idx_path: |
2398 | @@ -890,7 +884,8 @@ |
2399 | try: |
2400 | mdobj["stat"] = stat_path(path_to) # needed if not the same FS |
2401 | except OSError: |
2402 | - log_warning("Got an OSError while getting the stat of %r", path_to) |
2403 | + logger.warning( |
2404 | + "Got an OSError while getting the stat of %r", path_to) |
2405 | self.fs[mdid] = mdobj |
2406 | |
2407 | if mdobj["is_dir"]: |
2408 | @@ -916,7 +911,7 @@ |
2409 | path = normpath(path) |
2410 | mdid = self._idx_path[path] |
2411 | mdobj = self.fs[mdid] |
2412 | - log_debug("delete metadata: path=%r mdid=%r", path, mdid) |
2413 | + logger.debug("delete metadata: path=%r mdid=%r", path, mdid) |
2414 | |
2415 | # adjust all |
2416 | del self._idx_path[path] |
2417 | @@ -933,15 +928,16 @@ |
2418 | subtree = self.get_paths_starting_with(path, include_base=False) |
2419 | for p, is_dir in subtree: |
2420 | if self.changed(path=p) == self.CHANGED_LOCAL: |
2421 | - logger("Conflicting dir on remove because %r is local", p) |
2422 | + logger.info("Conflicting dir on remove because %r is local", p) |
2423 | raise DirectoryNotRemovable() |
2424 | |
2425 | # check disk searching for previous conflicts |
2426 | for (dirpath, dirnames, filenames) in walk(path): |
2427 | for fname in filenames + dirnames: |
2428 | if fname.endswith(self.CONFLICT_SUFFIX): |
2429 | - logger("Conflicting dir on remove because of previous " |
2430 | - "conflict on: %r", os.path.join(dirpath, fname)) |
2431 | + logger.info( |
2432 | + "Conflicting dir on remove because of previous " |
2433 | + "conflict on: %r", os.path.join(dirpath, fname)) |
2434 | raise DirectoryNotRemovable() |
2435 | |
2436 | return subtree |
2437 | @@ -952,7 +948,7 @@ |
2438 | path = normpath(path) |
2439 | mdid = self._idx_path[path] |
2440 | mdobj = self.fs[mdid] |
2441 | - log_debug("delete: path=%r mdid=%r", path, mdid) |
2442 | + logger.debug("delete: path=%r mdid=%r", path, mdid) |
2443 | |
2444 | is_dir = self.is_dir(path=path) |
2445 | if is_dir: |
2446 | @@ -995,8 +991,8 @@ |
2447 | |
2448 | except OSError, e: |
2449 | self.eq.rm_from_mute_filter(filter_event, path=path) |
2450 | - log_warning("OSError %s when trying to remove file/dir %r", |
2451 | - e, path) |
2452 | + logger.exception( |
2453 | + "OSError when trying to remove file/dir %r", path) |
2454 | |
2455 | self.delete_metadata(path) |
2456 | |
2457 | @@ -1004,7 +1000,7 @@ |
2458 | """Move a file/dir to its .conflict.""" |
2459 | mdobj = self.fs[mdid] |
2460 | path = self.get_abspath(mdobj['share_id'], mdobj['path']) |
2461 | - log_debug("move_to_conflict: path=%r mdid=%r", path, mdid) |
2462 | + logger.debug("move_to_conflict: path=%r mdid=%r", path, mdid) |
2463 | base_to_path = to_path = path + self.CONFLICT_SUFFIX |
2464 | ind = 0 |
2465 | while path_exists(to_path): |
2466 | @@ -1025,7 +1021,7 @@ |
2467 | self.eq.rm_from_mute_filter(expected_event, path=path) |
2468 | if e.errno == errno.ENOENT: |
2469 | m = "Already removed when trying to move to conflict: %r" |
2470 | - log_warning(m, path) |
2471 | + logger.warning(m, path) |
2472 | else: |
2473 | raise |
2474 | |
2475 | @@ -1040,7 +1036,7 @@ |
2476 | # it that, :(. We handle this here because it's possible |
2477 | # and correct that the path is not there anymore |
2478 | m = "Error %s when trying to remove the watch on %r" |
2479 | - log_warning(m, e, path) |
2480 | + logger.warning(m, e, path) |
2481 | |
2482 | self.delete_metadata(p) |
2483 | mdobj["info"]["last_conflicted"] = time.time() |
2484 | @@ -1079,8 +1075,9 @@ |
2485 | def create_partial(self, node_id, share_id): |
2486 | """Create a .partial in disk and set the flag in metadata.""" |
2487 | mdid = self._idx_node_id[(share_id, node_id)] |
2488 | - log_debug("create_partial: mdid=%r share_id=%r node_id=%r", |
2489 | - mdid, share_id, node_id) |
2490 | + logger.debug( |
2491 | + "create_partial: mdid=%r share_id=%r node_id=%r", |
2492 | + mdid, share_id, node_id) |
2493 | if self._check_partial(mdid): |
2494 | raise ValueError("The object with share_id %r and node_id %r is " |
2495 | "already partial!", share_id, node_id) |
2496 | @@ -1123,8 +1120,9 @@ |
2497 | def get_partial_for_writing(self, node_id, share_id): |
2498 | """Get a write-only fd to a partial file""" |
2499 | mdid = self._idx_node_id[(share_id, node_id)] |
2500 | - log_debug("get_partial_for_writing: mdid=%r share_id=%r node_id=%r", |
2501 | - mdid, share_id, node_id) |
2502 | + logger.debug( |
2503 | + "get_partial_for_writing: mdid=%r share_id=%r node_id=%r", |
2504 | + mdid, share_id, node_id) |
2505 | |
2506 | mdobj = self.fs[mdid] |
2507 | partial_path = self._get_partial_path(mdobj) |
2508 | @@ -1153,8 +1151,9 @@ |
2509 | |
2510 | # move the .partial to the real path, and set the md info |
2511 | path = self.get_abspath(mdobj['share_id'], mdobj['path']) |
2512 | - log_debug("commit_partial: path=%r mdid=%r share_id=%r node_id=%r", |
2513 | - path, mdid, share_id, node_id) |
2514 | + logger.debug( |
2515 | + "commit_partial: path=%r mdid=%r share_id=%r node_id=%r", |
2516 | + path, mdid, share_id, node_id) |
2517 | |
2518 | partial_path = self._get_partial_path(mdobj) |
2519 | with self._enable_share_write(share_id, path): |
2520 | @@ -1176,8 +1175,9 @@ |
2521 | # delete the .partial, and set the md info |
2522 | mdobj = self.fs[mdid] |
2523 | path = self.get_abspath(mdobj['share_id'], mdobj['path']) |
2524 | - log_debug("remove_partial: path=%r mdid=%r share_id=%r node_id=%r", |
2525 | - path, mdid, share_id, node_id) |
2526 | + logger.debug( |
2527 | + "remove_partial: path=%r mdid=%r share_id=%r node_id=%r", |
2528 | + path, mdid, share_id, node_id) |
2529 | partial_path = self._get_partial_path(mdobj) |
2530 | try: |
2531 | # don't alert EQ, partials are in other directory, not watched |
2532 | @@ -1185,7 +1185,7 @@ |
2533 | except OSError, e: |
2534 | # we only remove it if its there. |
2535 | m = "OSError %s when trying to remove partial_path %r" |
2536 | - log_warning(m, e, partial_path) |
2537 | + logger.warning(m, e, partial_path) |
2538 | mdobj["info"]["last_partial_removed"] = time.time() |
2539 | mdobj["info"]["is_partial"] = False |
2540 | self.fs[mdid] = mdobj |
2541 | @@ -1390,15 +1390,15 @@ |
2542 | share_id = mdobj["share_id"] |
2543 | path = self.get_abspath(mdobj['share_id'], mdobj['path']) |
2544 | is_dir = mdobj["is_dir"] |
2545 | - log_debug("delete_to_trash: mdid=%r, parent=%r, share=%r, node=%r, " |
2546 | - "path=%r is_dir=%r", mdid, parent_id, share_id, node_id, |
2547 | - path, is_dir) |
2548 | + logger.debug( |
2549 | + "delete_to_trash: mdid=%r, parent=%r, share=%r, node=%r, path=%r " |
2550 | + "is_dir=%r", mdid, parent_id, share_id, node_id, path, is_dir) |
2551 | self.delete_metadata(path) |
2552 | self.trash[(share_id, node_id)] = (mdid, parent_id, path, is_dir) |
2553 | |
2554 | def remove_from_trash(self, share_id, node_id): |
2555 | """Delete the node from the trash.""" |
2556 | - log_debug("remove_from_trash: share=%r, node=%r", share_id, node_id) |
2557 | + logger.debug("remove_from_trash: share=%r, node=%r", share_id, node_id) |
2558 | if (share_id, node_id) in self.trash: |
2559 | del self.trash[(share_id, node_id)] |
2560 | |
2561 | @@ -1431,16 +1431,17 @@ |
2562 | def add_to_move_limbo(self, share_id, node_id, old_parent_id, |
2563 | new_parent_id, new_name, path_from, path_to): |
2564 | """Add the operation info to the move limbo.""" |
2565 | - log_debug("add to move limbo: share=%r, node=%r, old_parent=%r, " |
2566 | - "new_parent=%r, new_name=%r", share_id, node_id, |
2567 | - old_parent_id, new_parent_id, new_name) |
2568 | + logger.debug( |
2569 | + "add to move limbo: share=%r, node=%r, old_parent=%r, " |
2570 | + "new_parent=%r, new_name=%r", share_id, node_id, old_parent_id, |
2571 | + new_parent_id, new_name) |
2572 | self.move_limbo[(share_id, node_id)] = (old_parent_id, new_parent_id, |
2573 | new_name, path_from, path_to) |
2574 | |
2575 | def remove_from_move_limbo(self, share_id, node_id): |
2576 | """Remove the node from the move limbo.""" |
2577 | - log_debug("remove from move limbo: share=%r, node=%r", |
2578 | - share_id, node_id) |
2579 | + logger.debug( |
2580 | + "remove from move limbo: share=%r, node=%r", share_id, node_id) |
2581 | if (share_id, node_id) in self.move_limbo: |
2582 | del self.move_limbo[(share_id, node_id)] |
2583 | |
2584 | @@ -1488,12 +1489,14 @@ |
2585 | if node == marker: |
2586 | del self.trash[(share, node)] |
2587 | self.trash[(share, value)] = (mdid, parent, path, is_dir) |
2588 | - log_debug("dereference ok trash: share=%r marker=%r " |
2589 | - "new node=%r", share, marker, value) |
2590 | + logger.debug( |
2591 | + "dereference ok trash: share=%r marker=%r new node=%r", |
2592 | + share, marker, value) |
2593 | elif parent == marker: |
2594 | self.trash[(share, node)] = (mdid, value, path, is_dir) |
2595 | - log_debug("dereference ok trash: share=%r node=%r marker=%r" |
2596 | - " new parent=%r", share, node, marker, value) |
2597 | + logger.debug( |
2598 | + "dereference ok trash: share=%r node=%r marker=%r " |
2599 | + "new parent=%r", share, node, marker, value) |
2600 | |
2601 | for k, v in self.move_limbo.iteritems(): |
2602 | share, node = k |
2603 | @@ -1502,8 +1505,9 @@ |
2604 | if node == marker: |
2605 | del self.move_limbo[(share, node)] |
2606 | self.move_limbo[(share, value)] = v |
2607 | - log_debug("dereference ok move limbo: share=%r marker=%r " |
2608 | - "new node=%r", share, marker, value) |
2609 | + logger.debug( |
2610 | + "dereference ok move limbo: share=%r marker=%r " |
2611 | + "new node=%r", share, marker, value) |
2612 | else: |
2613 | # both parents can be the same marker at the same time |
2614 | if old_parent == marker or new_parent == marker: |
2615 | @@ -1511,9 +1515,10 @@ |
2616 | old_parent = value |
2617 | if new_parent == marker: |
2618 | new_parent = value |
2619 | - log_debug("dereference ok move limbo: share=%r node=%r " |
2620 | - "marker=%r old_parent=%r new_parent=%r", |
2621 | - share, node, marker, old_parent, new_parent) |
2622 | + logger.debug( |
2623 | + "dereference ok move limbo: share=%r node=%r marker=%r " |
2624 | + "old_parent=%r new_parent=%r", |
2625 | + share, node, marker, old_parent, new_parent) |
2626 | self.move_limbo[k] = (old_parent, new_parent, new_name, |
2627 | path_from, path_to) |
2628 | |
2629 | @@ -1524,15 +1529,17 @@ |
2630 | """ |
2631 | for (share, node), (_, parent, _, _) in self.trash.iteritems(): |
2632 | if node == marker or parent == marker: |
2633 | - log_debug("dereference err trash: share=%r node=%r " |
2634 | - "marker=%r", share, node, marker) |
2635 | + logger.debug( |
2636 | + "dereference err trash: share=%r node=%r marker=%r", |
2637 | + share, node, marker) |
2638 | del self.trash[(share, node)] |
2639 | |
2640 | move_items = self.move_limbo.iteritems() |
2641 | for (share, node), (old_parent, new_parent, _, _, _) in move_items: |
2642 | if node == marker or old_parent == marker or new_parent == marker: |
2643 | - log_debug("dereference err move limbo: share=%r node=%r " |
2644 | - "marker=%r", share, node, marker) |
2645 | + logger.debug( |
2646 | + "dereference err move limbo: share=%r node=%r marker=%r", |
2647 | + share, node, marker) |
2648 | del self.move_limbo[(share, node)] |
2649 | |
2650 | |
2651 | |
2652 | === modified file 'ubuntuone/syncdaemon/filesystem_notifications.py' |
2653 | --- ubuntuone/syncdaemon/filesystem_notifications.py 2018-03-08 19:39:13 +0000 |
2654 | +++ ubuntuone/syncdaemon/filesystem_notifications.py 2018-04-14 23:11:29 +0000 |
2655 | @@ -1,5 +1,3 @@ |
2656 | -# Author: Manuel de la Pena <manuel@canonical.com> |
2657 | -# |
2658 | # Copyright 2011-2012 Canonical Ltd. |
2659 | # |
2660 | # This program is free software: you can redistribute it and/or modify it |
2661 | @@ -34,9 +32,9 @@ |
2662 | |
2663 | from ubuntuone.platform import access, path_exists |
2664 | from ubuntuone.syncdaemon.mute_filter import MuteFilter |
2665 | -from ubuntuone import logger |
2666 | -# our logging level |
2667 | -TRACE = logger.TRACE |
2668 | + |
2669 | + |
2670 | +logger = logging.getLogger(__name__) |
2671 | |
2672 | |
2673 | class GeneralINotifyProcessor(object): |
2674 | @@ -44,9 +42,7 @@ |
2675 | |
2676 | def __init__(self, monitor, handle_dir_delete, name_translations, |
2677 | platform_is_ignored, ignore_mask, ignore_config=None): |
2678 | - self.log = logging.getLogger( |
2679 | - 'ubuntuone.SyncDaemon.filesystem_notifications.GeneralProcessor') |
2680 | - self.log.setLevel(TRACE) |
2681 | + self.log = logger |
2682 | self.invnames_log = logging.getLogger( |
2683 | 'ubuntuone.SyncDaemon.InvalidNames') |
2684 | self.monitor = monitor |
2685 | |
2686 | === modified file 'ubuntuone/syncdaemon/fsm/fsm.py' |
2687 | --- ubuntuone/syncdaemon/fsm/fsm.py 2018-03-08 19:39:13 +0000 |
2688 | +++ ubuntuone/syncdaemon/fsm/fsm.py 2018-04-14 23:11:29 +0000 |
2689 | @@ -1,7 +1,3 @@ |
2690 | -# ubuntuone.syncdaemon.fsm.fsm - a fsm |
2691 | -# |
2692 | -# Author: Lucio Torre <lucio.torre@canonical.com> |
2693 | -# |
2694 | # Copyright 2009-2012 Canonical Ltd. |
2695 | # |
2696 | # This program is free software: you can redistribute it and/or modify it |
2697 | @@ -34,8 +30,10 @@ |
2698 | """ |
2699 | |
2700 | import itertools |
2701 | - |
2702 | -from ubuntuone.syncdaemon import logger |
2703 | +import logging |
2704 | + |
2705 | + |
2706 | +logger= logging.getLogger(__name__) |
2707 | |
2708 | |
2709 | def hash_dict(d): |
2710 | @@ -101,7 +99,7 @@ |
2711 | """Create a state machine based on fsm.""" |
2712 | self.fsm = fsm |
2713 | if log is None: |
2714 | - self.log = logger.root_logger |
2715 | + self.log = logger |
2716 | else: |
2717 | self.log = log |
2718 | |
2719 | |
2720 | === modified file 'ubuntuone/syncdaemon/hash_queue.py' |
2721 | --- ubuntuone/syncdaemon/hash_queue.py 2016-09-17 14:29:53 +0000 |
2722 | +++ ubuntuone/syncdaemon/hash_queue.py 2018-04-14 23:11:29 +0000 |
2723 | @@ -1,9 +1,3 @@ |
2724 | -# ubuntuone.syncdaemon.hash_queue - hash queues |
2725 | -# |
2726 | -# Authors: Facundo Batista <facundo@canonical.com> |
2727 | -# Guillermo Gonzalez <guillermo.gonzalez@canonical.com> |
2728 | -# Alejandro J. Cura <alecu@canonical.com> |
2729 | -# |
2730 | # Copyright 2009-2012 Canonical Ltd. |
2731 | # |
2732 | # This program is free software: you can redistribute it and/or modify it |
2733 | @@ -102,7 +96,7 @@ |
2734 | |
2735 | try: |
2736 | result = self._hash(path) |
2737 | - except (IOError, OSError), e: |
2738 | + except (IOError, OSError) as e: |
2739 | m = "Hasher: hash error %s (path %r mdid %s)" |
2740 | self.logger.debug(m, e, path, mdid) |
2741 | reactor.callLater( |
2742 | |
2743 | === modified file 'ubuntuone/syncdaemon/interaction_interfaces.py' |
2744 | --- ubuntuone/syncdaemon/interaction_interfaces.py 2016-09-17 01:06:23 +0000 |
2745 | +++ ubuntuone/syncdaemon/interaction_interfaces.py 2018-04-14 23:11:29 +0000 |
2746 | @@ -1,5 +1,3 @@ |
2747 | -# -*- coding: utf-8 -*- |
2748 | -# |
2749 | # Copyright 2011-2015 Canonical Ltd. |
2750 | # |
2751 | # This program is free software: you can redistribute it and/or modify it |
2752 | @@ -52,10 +50,10 @@ |
2753 | except ImportError: |
2754 | from ubuntuone.networkstate import ONLINE |
2755 | |
2756 | +from ubuntuone import config |
2757 | from ubuntuone.logger import log_call |
2758 | from ubuntuone.platform import ExternalInterface |
2759 | from ubuntuone.storageprotocol import request |
2760 | -from ubuntuone.syncdaemon import config |
2761 | from ubuntuone.syncdaemon.action_queue import Download, Upload |
2762 | from ubuntuone.syncdaemon.interfaces import IMarker |
2763 | from ubuntuone.syncdaemon.volume_manager import Share, UDF, VolumeDoesNotExist |
2764 | |
2765 | === removed file 'ubuntuone/syncdaemon/logger.py' |
2766 | --- ubuntuone/syncdaemon/logger.py 2018-03-08 19:39:13 +0000 |
2767 | +++ ubuntuone/syncdaemon/logger.py 1970-01-01 00:00:00 +0000 |
2768 | @@ -1,314 +0,0 @@ |
2769 | -# Copyright 2009-2012 Canonical Ltd. |
2770 | -# Copyright 2015-2016 Chicharreros (https://launchpad.net/~chicharreros) |
2771 | -# |
2772 | -# This program is free software: you can redistribute it and/or modify it |
2773 | -# under the terms of the GNU General Public License version 3, as published |
2774 | -# by the Free Software Foundation. |
2775 | -# |
2776 | -# This program is distributed in the hope that it will be useful, but |
2777 | -# WITHOUT ANY WARRANTY; without even the implied warranties of |
2778 | -# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR |
2779 | -# PURPOSE. See the GNU General Public License for more details. |
2780 | -# |
2781 | -# You should have received a copy of the GNU General Public License along |
2782 | -# with this program. If not, see <http://www.gnu.org/licenses/>. |
2783 | -# |
2784 | -# In addition, as a special exception, the copyright holders give |
2785 | -# permission to link the code of portions of this program with the |
2786 | -# OpenSSL library under certain conditions as described in each |
2787 | -# individual source file, and distribute linked combinations |
2788 | -# including the two. |
2789 | -# You must obey the GNU General Public License in all respects |
2790 | -# for all of the code used other than OpenSSL. If you modify |
2791 | -# file(s) with this exception, you may extend this exception to your |
2792 | -# version of the file(s), but you are not obligated to do so. If you |
2793 | -# do not wish to do so, delete this exception statement from your |
2794 | -# version. If you delete this exception statement from all source |
2795 | -# files in the program, then also delete it here. |
2796 | -""" SyncDaemon logging utilities and config. """ |
2797 | - |
2798 | -from __future__ import with_statement |
2799 | - |
2800 | -import logging |
2801 | -import sys |
2802 | -import os |
2803 | -import zlib |
2804 | - |
2805 | -from ubuntuone.logger import ( |
2806 | - _DEBUG_LOG_LEVEL, |
2807 | - basic_formatter, |
2808 | - CustomRotatingFileHandler, |
2809 | - DayRotatingFileHandler, |
2810 | - Logger, |
2811 | - MultiFilter, |
2812 | -) |
2813 | -from ubuntuone.platform.logger import ubuntuone_log_dir |
2814 | -# api compatibility imports |
2815 | -from ubuntuone import logger |
2816 | -from ubuntuone.platform import get_filesystem_logger, setup_filesystem_logging |
2817 | -DebugCapture = logger.DebugCapture |
2818 | -NOTE = logger.NOTE |
2819 | -TRACE = logger.TRACE |
2820 | - |
2821 | - |
2822 | -class mklog(object): |
2823 | - """ |
2824 | - Create a logger that keeps track of the method where it's being |
2825 | - called from, in order to make more informative messages. |
2826 | - """ |
2827 | - __slots__ = ('logger', 'zipped_desc') |
2828 | - |
2829 | - def __init__(self, _logger, _method, _share, _uid, *args, **kwargs): |
2830 | - # args are _-prepended to lower the chances of them |
2831 | - # conflicting with kwargs |
2832 | - |
2833 | - all_args = [] |
2834 | - for arg in args: |
2835 | - all_args.append( |
2836 | - repr(arg).decode('ascii', 'replace').encode('ascii', 'replace') |
2837 | - ) |
2838 | - for k, v in kwargs.items(): |
2839 | - v = repr(v).decode('ascii', 'replace').encode('ascii', 'replace') |
2840 | - all_args.append("%s=%r" % (k, v)) |
2841 | - args = ", ".join(all_args) |
2842 | - |
2843 | - desc = "%-28s share:%-40r node:%-40r %s(%s) " % (_method, _share, |
2844 | - _uid, _method, args) |
2845 | - desc = desc.replace('%', '%%') |
2846 | - self.zipped_desc = zlib.compress(desc, 9) |
2847 | - self.logger = _logger |
2848 | - |
2849 | - def _log(self, logger_func, *args): |
2850 | - """Generalized form of the different logging methods.""" |
2851 | - desc = zlib.decompress(self.zipped_desc) |
2852 | - text = desc + args[0] |
2853 | - logger_func(text, *args[1:]) |
2854 | - |
2855 | - def debug(self, *args): |
2856 | - """Log at level DEBUG""" |
2857 | - self._log(self.logger.debug, *args) |
2858 | - |
2859 | - def info(self, *args): |
2860 | - """Log at level INFO""" |
2861 | - self._log(self.logger.info, *args) |
2862 | - |
2863 | - def warn(self, *args): |
2864 | - """Log at level WARN""" |
2865 | - self._log(self.logger.warn, *args) |
2866 | - |
2867 | - def error(self, *args): |
2868 | - """Log at level ERROR""" |
2869 | - self._log(self.logger.error, *args) |
2870 | - |
2871 | - def exception(self, *args): |
2872 | - """Log an exception""" |
2873 | - self._log(self.logger.exception, *args) |
2874 | - |
2875 | - def note(self, *args): |
2876 | - """Log at NOTE level (high-priority info) """ |
2877 | - self._log(self.logger.high, *args) |
2878 | - |
2879 | - def trace(self, *args): |
2880 | - """Log at level TRACE""" |
2881 | - self._log(self.logger.trace, *args) |
2882 | - |
2883 | - def callbacks(self, success_message='success', success_arg='', |
2884 | - failure_message='failure'): |
2885 | - """ |
2886 | - Return a callback and an errback that log success or failure |
2887 | - messages. |
2888 | - |
2889 | - The callback/errback pair are pass-throughs; they don't |
2890 | - interfere in the callback/errback chain of the deferred you |
2891 | - add them to. |
2892 | - """ |
2893 | - def callback(arg, success_arg=success_arg): |
2894 | - "it worked!" |
2895 | - if callable(success_arg): |
2896 | - success_arg = success_arg(arg) |
2897 | - self.debug(success_message, success_arg) |
2898 | - return arg |
2899 | - |
2900 | - def errback(failure): |
2901 | - "it failed!" |
2902 | - self.error(failure_message, failure.getErrorMessage()) |
2903 | - self.debug('traceback follows:\n\n' + failure.getTraceback(), '') |
2904 | - return failure |
2905 | - return callback, errback |
2906 | - |
2907 | - |
2908 | -LOGFILENAME = os.path.join(ubuntuone_log_dir, 'syncdaemon.log') |
2909 | -EXLOGFILENAME = os.path.join(ubuntuone_log_dir, 'syncdaemon-exceptions.log') |
2910 | -INVALIDLOGFILENAME = os.path.join( |
2911 | - ubuntuone_log_dir, 'syncdaemon-invalid-names.log') |
2912 | -BROKENLOGFILENAME = os.path.join( |
2913 | - ubuntuone_log_dir, 'syncdaemon-broken-nodes.log') |
2914 | - |
2915 | - |
2916 | -root_logger = logging.getLogger("ubuntuone.SyncDaemon") |
2917 | -twisted_logger = logging.getLogger('twisted') |
2918 | - |
2919 | -filesystem_logger = get_filesystem_logger() |
2920 | -# now restore our custom logger class |
2921 | -logging.setLoggerClass(Logger) |
2922 | - |
2923 | -root_handler = CustomRotatingFileHandler(filename=LOGFILENAME) |
2924 | -exception_handler = CustomRotatingFileHandler(filename=EXLOGFILENAME) |
2925 | - |
2926 | - |
2927 | -def init(): |
2928 | - # root logger |
2929 | - root_logger.propagate = False |
2930 | - root_logger.setLevel(_DEBUG_LOG_LEVEL) |
2931 | - root_handler.addFilter(MultiFilter(['ubuntuone.SyncDaemon', |
2932 | - 'twisted', 'pyinotify'])) |
2933 | - root_handler.setFormatter(basic_formatter) |
2934 | - root_handler.setLevel(_DEBUG_LOG_LEVEL) |
2935 | - root_logger.addHandler(root_handler) |
2936 | - # exception logs |
2937 | - exception_handler.setFormatter(basic_formatter) |
2938 | - exception_handler.setLevel(logging.ERROR) |
2939 | - # add the exception handler to the root logger |
2940 | - logging.getLogger('').addHandler(exception_handler) |
2941 | - root_logger.addHandler(exception_handler) |
2942 | - |
2943 | - # hook twisted.python.log with standard logging |
2944 | - from twisted.python import log |
2945 | - observer = log.PythonLoggingObserver('twisted') |
2946 | - observer.start() |
2947 | - # configure the logger to only show errors |
2948 | - twisted_logger.propagate = False |
2949 | - twisted_logger.setLevel(logging.ERROR) |
2950 | - twisted_logger.addHandler(root_handler) |
2951 | - twisted_logger.addHandler(exception_handler) |
2952 | - |
2953 | - # set the filesystem logging |
2954 | - setup_filesystem_logging(filesystem_logger, root_handler) |
2955 | - |
2956 | - # invalid filenames log |
2957 | - invnames_logger = logging.getLogger("ubuntuone.SyncDaemon.InvalidNames") |
2958 | - invnames_logger.setLevel(_DEBUG_LOG_LEVEL) |
2959 | - invnames_handler = CustomRotatingFileHandler(filename=INVALIDLOGFILENAME) |
2960 | - invnames_handler.setFormatter(basic_formatter) |
2961 | - invnames_handler.setLevel(logging.INFO) |
2962 | - invnames_logger.addHandler(invnames_handler) |
2963 | - |
2964 | - # broken nodes log |
2965 | - brokennodes_logger = logging.getLogger("ubuntuone.SyncDaemon.BrokenNodes") |
2966 | - brokennodes_logger.setLevel(_DEBUG_LOG_LEVEL) |
2967 | - brokennodes_handler = CustomRotatingFileHandler(filename=BROKENLOGFILENAME) |
2968 | - brokennodes_handler.setFormatter(basic_formatter) |
2969 | - brokennodes_handler.setLevel(logging.INFO) |
2970 | - brokennodes_logger.addHandler(brokennodes_handler) |
2971 | - |
2972 | - |
2973 | -def configure_logging(level, maxBytes, backupCount): |
2974 | - """configure level, maxBytes and backupCount in all handlers""" |
2975 | - set_level(level) |
2976 | - set_max_bytes(maxBytes) |
2977 | - set_backup_count(backupCount) |
2978 | - |
2979 | - |
2980 | -def set_level(level): |
2981 | - """set 'level' as the level for all the logger/handlers""" |
2982 | - root_logger.setLevel(level) |
2983 | - root_handler.setLevel(level) |
2984 | - |
2985 | - |
2986 | -def set_max_bytes(size): |
2987 | - """set the maxBytes value in all the handlers""" |
2988 | - root_handler.maxBytes = size |
2989 | - exception_handler.maxBytes = size |
2990 | - |
2991 | - |
2992 | -def set_backup_count(count): |
2993 | - """set the backup count in all the handlers""" |
2994 | - root_handler.backupCount = count |
2995 | - exception_handler.backupCount = count |
2996 | - |
2997 | - |
2998 | -def set_debug(dest): |
2999 | - """ Set the level to debug of all registered loggers, and replace their |
3000 | - handlers. if debug_level is file, syncdaemon-debug.log is used. If it's |
3001 | - stdout, all the logging is redirected to stdout. If it's stderr, to stderr. |
3002 | - |
3003 | - @param dest: a string with a one or more of 'file', 'stdout', and 'stderr' |
3004 | - e.g. 'file stdout' |
3005 | - """ |
3006 | - if not [v for v in ['file', 'stdout', 'stderr'] if v in dest]: |
3007 | - # invalid dest value, let the loggers alone |
3008 | - return |
3009 | - sd_filter = MultiFilter(['ubuntuone.SyncDaemon', 'twisted', 'pyinotify']) |
3010 | - if 'file' in dest: |
3011 | - # setup the existing loggers in debug |
3012 | - root_handler.setLevel(_DEBUG_LOG_LEVEL) |
3013 | - logfile = os.path.join(ubuntuone_log_dir, 'syncdaemon-debug.log') |
3014 | - root_handler.baseFilename = os.path.abspath(logfile) |
3015 | - # don't cap the file size |
3016 | - set_max_bytes(0) |
3017 | - for name in ['ubuntuone.SyncDaemon', 'twisted']: |
3018 | - logger = logging.getLogger(name) |
3019 | - logger.setLevel(_DEBUG_LOG_LEVEL) |
3020 | - if 'stderr' in dest: |
3021 | - stderr_handler = logging.StreamHandler() |
3022 | - stderr_handler.setFormatter(basic_formatter) |
3023 | - stderr_handler.setLevel(_DEBUG_LOG_LEVEL) |
3024 | - stderr_handler.addFilter(sd_filter) |
3025 | - logger.addHandler(stderr_handler) |
3026 | - if 'stdout' in dest: |
3027 | - stdout_handler = logging.StreamHandler(sys.stdout) |
3028 | - stdout_handler.setFormatter(basic_formatter) |
3029 | - stdout_handler.setLevel(_DEBUG_LOG_LEVEL) |
3030 | - stdout_handler.addFilter(sd_filter) |
3031 | - logger.addHandler(stdout_handler) |
3032 | - |
3033 | - |
3034 | -def set_server_debug(dest): |
3035 | - """ Set the level to debug of all registered loggers, and replace their |
3036 | - handlers. if debug_level is file, syncdaemon-debug.log is used. If it's |
3037 | - stdout, all the logging is redirected to stdout. |
3038 | - |
3039 | - @param dest: a string containing 'file' and/or 'stdout', e.g: 'file stdout' |
3040 | - """ |
3041 | - logger = logging.getLogger("storage.server") |
3042 | - logger.setLevel(5) # this shows server messages |
3043 | - if 'file' in dest: |
3044 | - filename = os.path.join(ubuntuone_log_dir, 'syncdaemon-debug.log') |
3045 | - handler = DayRotatingFileHandler(filename=filename) |
3046 | - handler.setFormatter(basic_formatter) |
3047 | - handler.setLevel(5) # this shows server messages |
3048 | - logger.addHandler(handler) |
3049 | - if 'stdout' in dest: |
3050 | - stdout_handler = logging.StreamHandler(sys.stdout) |
3051 | - stdout_handler.setFormatter(basic_formatter) |
3052 | - stdout_handler.setLevel(5) # this shows server messages |
3053 | - logger.addHandler(stdout_handler) |
3054 | - if 'stderrt' in dest: |
3055 | - stdout_handler = logging.StreamHandler(sys.stdout) |
3056 | - stdout_handler.setFormatter(basic_formatter) |
3057 | - stdout_handler.setLevel(5) # this shows server messages |
3058 | - logger.addHandler(stdout_handler) |
3059 | - |
3060 | - |
3061 | -# if we are in debug mode, replace/add the handlers |
3062 | -DEBUG = os.environ.get("MAGICICADA_DEBUG", None) |
3063 | -if DEBUG: |
3064 | - set_debug(DEBUG) |
3065 | - |
3066 | -# configure server logging if SERVER_DEBUG != None |
3067 | -SERVER_DEBUG = os.environ.get("SERVER_DEBUG", None) |
3068 | -if SERVER_DEBUG: |
3069 | - set_server_debug(SERVER_DEBUG) |
3070 | - |
3071 | - |
3072 | -def rotate_logs(): |
3073 | - """do a rollover of the three handlers""" |
3074 | - # ignore the missing file error on a failed rollover |
3075 | - try: |
3076 | - root_handler.doRollover() |
3077 | - except OSError: |
3078 | - pass |
3079 | - try: |
3080 | - exception_handler.doRollover() |
3081 | - except OSError: |
3082 | - pass |
3083 | |
3084 | === modified file 'ubuntuone/syncdaemon/main.py' |
3085 | --- ubuntuone/syncdaemon/main.py 2017-02-10 01:15:07 +0000 |
3086 | +++ ubuntuone/syncdaemon/main.py 2018-04-14 23:11:29 +0000 |
3087 | @@ -33,12 +33,19 @@ |
3088 | import os |
3089 | import sys |
3090 | |
3091 | -from dirspec.utils import user_home |
3092 | from twisted.internet import defer, reactor, task |
3093 | |
3094 | +from ubuntuone import config, clientdefs, syncdaemon |
3095 | +from ubuntuone.platform import ( |
3096 | + can_write, |
3097 | + expand_user, |
3098 | + make_dir, |
3099 | + path_exists, |
3100 | + recursive_move, |
3101 | + set_dir_readwrite, |
3102 | +) |
3103 | from ubuntuone.syncdaemon import ( |
3104 | action_queue, |
3105 | - config, |
3106 | event_queue, |
3107 | filesystem_manager, |
3108 | hash_queue, |
3109 | @@ -48,7 +55,6 @@ |
3110 | tritcask, |
3111 | volume_manager, |
3112 | ) |
3113 | -from ubuntuone import syncdaemon, clientdefs |
3114 | from ubuntuone.syncdaemon.interaction_interfaces import SyncdaemonService |
3115 | from ubuntuone.syncdaemon.states import StateManager, QueueManager |
3116 | |
3117 | @@ -88,7 +94,7 @@ |
3118 | shares_symlink_name='Shared With Me', |
3119 | read_limit=None, write_limit=None, throttling_enabled=False, |
3120 | ignore_files=None, auth_credentials=None, |
3121 | - monitor_class=None): |
3122 | + monitor_class=None, cache_dir=None): |
3123 | self.root_dir = root_dir |
3124 | self.shares_dir = shares_dir |
3125 | self.shares_dir_link = os.path.join(self.root_dir, shares_symlink_name) |
3126 | |
3127 | === modified file 'ubuntuone/syncdaemon/sync.py' |
3128 | --- ubuntuone/syncdaemon/sync.py 2018-03-09 09:11:05 +0000 |
3129 | +++ ubuntuone/syncdaemon/sync.py 2018-04-14 23:11:29 +0000 |
3130 | @@ -36,20 +36,17 @@ |
3131 | from operator import attrgetter |
3132 | import sys |
3133 | |
3134 | -from ubuntuone.syncdaemon.marker import MDMarker |
3135 | +from ubuntuone.logger import DebugCapture |
3136 | from ubuntuone.storageprotocol import delta |
3137 | -from ubuntuone.syncdaemon.fsm.fsm import \ |
3138 | - StateMachineRunner, StateMachine |
3139 | from ubuntuone.syncdaemon import u1fsfsm |
3140 | -from ubuntuone.syncdaemon.logger import DebugCapture |
3141 | +from ubuntuone.syncdaemon.fsm.fsm import StateMachineRunner, StateMachine |
3142 | from ubuntuone.syncdaemon.filesystem_manager import ( |
3143 | DirectoryNotRemovable, |
3144 | InconsistencyError, |
3145 | ) |
3146 | +from ubuntuone.syncdaemon.marker import MDMarker |
3147 | from ubuntuone.syncdaemon.volume_manager import VolumeDoesNotExist |
3148 | -from ubuntuone.platform import ( |
3149 | - stat_path, |
3150 | -) |
3151 | +from ubuntuone.platform import stat_path |
3152 | |
3153 | empty_hash = "" |
3154 | |
3155 | |
3156 | === modified file 'ubuntuone/syncdaemon/tests/test_action_queue.py' |
3157 | --- ubuntuone/syncdaemon/tests/test_action_queue.py 2018-03-14 21:01:56 +0000 |
3158 | +++ ubuntuone/syncdaemon/tests/test_action_queue.py 2018-04-14 23:11:29 +0000 |
3159 | @@ -53,17 +53,9 @@ |
3160 | from twisted.trial.unittest import TestCase as TwistedTestCase |
3161 | from zope.interface.verify import verifyObject, verifyClass |
3162 | |
3163 | -from contrib.testing.testcase import ( |
3164 | - BaseTwistedTestCase, |
3165 | - DummyClass, |
3166 | - FakeActionQueue, |
3167 | - FakeCommand, |
3168 | - FakeMain, |
3169 | - FakeUpload, |
3170 | -) |
3171 | +from ubuntuone import config, logger, clientdefs |
3172 | from ubuntuone.devtools import handlers |
3173 | from ubuntuone.devtools.testcases import skipTest |
3174 | -from ubuntuone import logger, clientdefs |
3175 | from ubuntuone.platform import open_file, platform, path_exists |
3176 | from ubuntuone.storageprotocol import ( |
3177 | client, |
3178 | @@ -72,8 +64,7 @@ |
3179 | protocol_pb2, |
3180 | request, |
3181 | ) |
3182 | -from ubuntuone.syncdaemon import interfaces, config |
3183 | -from ubuntuone.syncdaemon import action_queue |
3184 | +from ubuntuone.syncdaemon import action_queue, interfaces |
3185 | from ubuntuone.syncdaemon.action_queue import ( |
3186 | ActionQueue, ActionQueueCommand, ChangePublicAccess, CreateUDF, |
3187 | DeleteVolume, Download, ListVolumes, ActionQueueProtocol, ListShares, |
3188 | @@ -87,7 +78,19 @@ |
3189 | from ubuntuone.syncdaemon.event_queue import EventQueue, EVENTS |
3190 | from ubuntuone.syncdaemon import offload_queue |
3191 | from ubuntuone.syncdaemon.marker import MDMarker |
3192 | -from ubuntuone.syncdaemon.volume_manager import ACCESS_LEVEL_RO |
3193 | +from ubuntuone.syncdaemon.volume_manager import ( |
3194 | + ACCESS_LEVEL_RO, |
3195 | + ACCESS_LEVEL_RW, |
3196 | +) |
3197 | +from ubuntuone.testing.testcase import ( |
3198 | + BaseTwistedTestCase, |
3199 | + DummyClass, |
3200 | + FakeActionQueue, |
3201 | + FakeCommand, |
3202 | + FakeMain, |
3203 | + FakeUpload, |
3204 | +) |
3205 | + |
3206 | |
3207 | PATH = os.path.join(u'~', u'Documents', u'pdfs', u'moño', u'') |
3208 | NAME = u'UDF-me' |
3209 | |
3210 | === modified file 'ubuntuone/syncdaemon/tests/test_eq_inotify.py' |
3211 | --- ubuntuone/syncdaemon/tests/test_eq_inotify.py 2016-09-17 14:29:53 +0000 |
3212 | +++ ubuntuone/syncdaemon/tests/test_eq_inotify.py 2018-04-14 23:11:29 +0000 |
3213 | @@ -40,14 +40,6 @@ |
3214 | from ubuntuone.devtools.handlers import MementoHandler |
3215 | from ubuntuone.devtools.testcases import skipIfOS, skipIfNotOS |
3216 | |
3217 | -from contrib.testing.testcase import ( |
3218 | - BaseTwistedTestCase, |
3219 | - FakeMain, |
3220 | - Listener, |
3221 | - skip_if_darwin_missing_fs_event, |
3222 | - skip_if_win32_missing_fs_event, |
3223 | -) |
3224 | -from ubuntuone.syncdaemon.tests.test_eventqueue import BaseEQTestCase |
3225 | from ubuntuone.platform import ( |
3226 | make_link, |
3227 | make_dir, |
3228 | @@ -61,6 +53,14 @@ |
3229 | set_dir_readwrite, |
3230 | ) |
3231 | from ubuntuone.syncdaemon import event_queue, volume_manager |
3232 | +from ubuntuone.syncdaemon.tests.test_eventqueue import BaseEQTestCase |
3233 | +from ubuntuone.testing.testcase import ( |
3234 | + BaseTwistedTestCase, |
3235 | + FakeMain, |
3236 | + Listener, |
3237 | + skip_if_darwin_missing_fs_event, |
3238 | + skip_if_win32_missing_fs_event, |
3239 | +) |
3240 | |
3241 | # our logging level |
3242 | TRACE = logging.getLevelName('TRACE') |
3243 | |
3244 | === modified file 'ubuntuone/syncdaemon/tests/test_eventqueue.py' |
3245 | --- ubuntuone/syncdaemon/tests/test_eventqueue.py 2018-03-08 19:39:13 +0000 |
3246 | +++ ubuntuone/syncdaemon/tests/test_eventqueue.py 2018-04-14 23:11:29 +0000 |
3247 | @@ -36,11 +36,7 @@ |
3248 | from twisted.internet import defer |
3249 | from twisted.trial.unittest import TestCase |
3250 | |
3251 | -from contrib.testing.testcase import ( |
3252 | - BaseTwistedTestCase, |
3253 | - FakeMonitor, |
3254 | - FakeVolumeManager, |
3255 | -) |
3256 | +from ubuntuone.devtools.handlers import MementoHandler |
3257 | from ubuntuone.platform.filesystem_notifications.monitor import ( |
3258 | FilesystemMonitor, |
3259 | ) |
3260 | @@ -49,7 +45,11 @@ |
3261 | filesystem_manager, |
3262 | tritcask, |
3263 | ) |
3264 | -from ubuntuone.devtools.handlers import MementoHandler |
3265 | +from ubuntuone.testing.testcase import ( |
3266 | + BaseTwistedTestCase, |
3267 | + FakeMonitor, |
3268 | + FakeVolumeManager, |
3269 | +) |
3270 | |
3271 | |
3272 | class BaseEQTestCase(BaseTwistedTestCase): |
3273 | |
3274 | === modified file 'ubuntuone/syncdaemon/tests/test_eventsnanny.py' |
3275 | --- ubuntuone/syncdaemon/tests/test_eventsnanny.py 2016-06-04 21:14:35 +0000 |
3276 | +++ ubuntuone/syncdaemon/tests/test_eventsnanny.py 2018-04-14 23:11:29 +0000 |
3277 | @@ -37,13 +37,13 @@ |
3278 | |
3279 | from twisted.internet import defer, reactor |
3280 | |
3281 | -from contrib.testing.testcase import ( |
3282 | +from ubuntuone.syncdaemon import (filesystem_manager, event_queue, |
3283 | + events_nanny, hash_queue, tritcask) |
3284 | +from ubuntuone.testing.testcase import ( |
3285 | BaseTwistedTestCase, |
3286 | FakeVolumeManager, |
3287 | skip_if_win32_missing_fs_event, |
3288 | ) |
3289 | -from ubuntuone.syncdaemon import (filesystem_manager, event_queue, |
3290 | - events_nanny, hash_queue, tritcask) |
3291 | |
3292 | |
3293 | class EventListener(object): |
3294 | |
3295 | === modified file 'ubuntuone/syncdaemon/tests/test_fileshelf.py' |
3296 | --- ubuntuone/syncdaemon/tests/test_fileshelf.py 2016-06-04 21:14:35 +0000 |
3297 | +++ ubuntuone/syncdaemon/tests/test_fileshelf.py 2018-04-14 23:11:29 +0000 |
3298 | @@ -1,6 +1,3 @@ |
3299 | -# |
3300 | -# Author: Guillermo Gonzalez <guillermo.gonzalez@canonical.com> |
3301 | -# |
3302 | # Copyright 2009-2012 Canonical Ltd. |
3303 | # |
3304 | # This program is free software: you can redistribute it and/or modify it |
3305 | @@ -39,7 +36,6 @@ |
3306 | from twisted.internet import defer |
3307 | from ubuntuone.devtools.testcases import skipIfOS |
3308 | |
3309 | -from contrib.testing.testcase import BaseTwistedTestCase |
3310 | from ubuntuone.platform import ( |
3311 | open_file, |
3312 | path_exists, |
3313 | @@ -50,6 +46,7 @@ |
3314 | LRUCache, |
3315 | CacheInconsistencyError, |
3316 | ) |
3317 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
3318 | |
3319 | |
3320 | BROKEN_PICKLE = '\axb80\x02}q\x01(U\x01aU\x04testq\x02U\x01bU\x06brokenq\x03u.' |
3321 | |
3322 | === modified file 'ubuntuone/syncdaemon/tests/test_fsm.py' |
3323 | --- ubuntuone/syncdaemon/tests/test_fsm.py 2016-06-03 19:52:03 +0000 |
3324 | +++ ubuntuone/syncdaemon/tests/test_fsm.py 2018-04-14 23:11:29 +0000 |
3325 | @@ -37,16 +37,7 @@ |
3326 | from mocker import MockerTestCase, ANY |
3327 | from twisted.internet import defer |
3328 | |
3329 | -from contrib.testing.testcase import ( |
3330 | - BaseTwistedTestCase, |
3331 | - FakeVolumeManager, |
3332 | - FakeMain, |
3333 | - FakeMonitor, |
3334 | - Listener, |
3335 | - skip_if_win32_and_uses_metadata_older_than_5, |
3336 | - skip_if_win32_and_uses_readonly, |
3337 | -) |
3338 | - |
3339 | +from ubuntuone import config |
3340 | from ubuntuone.devtools.handlers import MementoHandler |
3341 | from ubuntuone.platform import ( |
3342 | listdir, |
3343 | @@ -70,7 +61,7 @@ |
3344 | TrashTritcaskShelf, |
3345 | TRASH_ROW_TYPE, |
3346 | ) |
3347 | -from ubuntuone.syncdaemon import filesystem_manager, config, logger |
3348 | +from ubuntuone.syncdaemon import filesystem_manager, logger |
3349 | from ubuntuone.syncdaemon.file_shelf import FileShelf |
3350 | from ubuntuone.syncdaemon.tritcask import Tritcask |
3351 | from ubuntuone.syncdaemon.event_queue import EventQueue |
3352 | @@ -82,6 +73,16 @@ |
3353 | allow_writes, |
3354 | Share, |
3355 | ) |
3356 | +from ubuntuone.testing.testcase import ( |
3357 | + BaseTwistedTestCase, |
3358 | + FakeVolumeManager, |
3359 | + FakeMain, |
3360 | + FakeMonitor, |
3361 | + Listener, |
3362 | + skip_if_win32_and_uses_metadata_older_than_5, |
3363 | + skip_if_win32_and_uses_readonly, |
3364 | +) |
3365 | + |
3366 | |
3367 | BROKEN_PICKLE = '\axb80\x02}q\x01(U\x01aU\x04testq\x02U\x01bU\x06brokenq\x03u.' |
3368 | |
3369 | |
3370 | === modified file 'ubuntuone/syncdaemon/tests/test_hashqueue.py' |
3371 | --- ubuntuone/syncdaemon/tests/test_hashqueue.py 2016-09-17 14:29:53 +0000 |
3372 | +++ ubuntuone/syncdaemon/tests/test_hashqueue.py 2018-04-14 23:11:29 +0000 |
3373 | @@ -1,7 +1,3 @@ |
3374 | -# |
3375 | -# Authors: Facundo Batista <facundo@canonical.com> |
3376 | -# Alejandro J. Cura <alecu@canonical.com> |
3377 | -# |
3378 | # Copyright 2009-2012 Canonical Ltd. |
3379 | # |
3380 | # This program is free software: you can redistribute it and/or modify it |
3381 | @@ -44,11 +40,11 @@ |
3382 | from ubuntuone.devtools.handlers import MementoHandler |
3383 | from ubuntuone.devtools.testcases import skipTest |
3384 | |
3385 | -from contrib.testing.testcase import BaseTwistedTestCase |
3386 | from ubuntuone.platform import open_file, stat_path |
3387 | from ubuntuone.syncdaemon import hash_queue |
3388 | from ubuntuone.syncdaemon.hash_queue import HASHQUEUE_DELAY |
3389 | from ubuntuone.storageprotocol.content_hash import content_hash_factory, crc32 |
3390 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
3391 | |
3392 | FAKE_TIMESTAMP = 1 |
3393 | |
3394 | |
3395 | === modified file 'ubuntuone/syncdaemon/tests/test_interaction_interfaces.py' |
3396 | --- ubuntuone/syncdaemon/tests/test_interaction_interfaces.py 2016-09-17 01:06:23 +0000 |
3397 | +++ ubuntuone/syncdaemon/tests/test_interaction_interfaces.py 2018-04-14 23:11:29 +0000 |
3398 | @@ -41,15 +41,6 @@ |
3399 | FakeNetworkManagerState, |
3400 | ) |
3401 | |
3402 | -from contrib.testing.testcase import ( |
3403 | - FAKED_CREDENTIALS, |
3404 | - FakeCommand, |
3405 | - FakeDownload, |
3406 | - FakeUpload, |
3407 | - FakedObject, |
3408 | - FakeMainTestCase, |
3409 | - skipIfOS, |
3410 | -) |
3411 | from ubuntuone.platform import make_dir, make_link |
3412 | from ubuntuone.storageprotocol.protocol_pb2 import AccountInfo |
3413 | from ubuntuone.syncdaemon import ( |
3414 | @@ -84,6 +75,15 @@ |
3415 | UDF, |
3416 | VolumeDoesNotExist, |
3417 | ) |
3418 | +from ubuntuone.testing.testcase import ( |
3419 | + FAKED_CREDENTIALS, |
3420 | + FakeCommand, |
3421 | + FakeDownload, |
3422 | + FakeUpload, |
3423 | + FakedObject, |
3424 | + FakeMainTestCase, |
3425 | + skipIfOS, |
3426 | +) |
3427 | |
3428 | |
3429 | class CustomError(Exception): |
3430 | |
3431 | === modified file 'ubuntuone/syncdaemon/tests/test_localrescan.py' |
3432 | --- ubuntuone/syncdaemon/tests/test_localrescan.py 2018-03-08 19:39:13 +0000 |
3433 | +++ ubuntuone/syncdaemon/tests/test_localrescan.py 2018-04-14 23:11:29 +0000 |
3434 | @@ -38,11 +38,6 @@ |
3435 | from ubuntuone.devtools.handlers import MementoHandler |
3436 | from ubuntuone.devtools.testcases import skipIfOS |
3437 | |
3438 | -from contrib.testing.testcase import ( |
3439 | - BaseTwistedTestCase, |
3440 | - FakeVolumeManager, |
3441 | - skip_if_win32_and_uses_readonly, |
3442 | -) |
3443 | from ubuntuone.platform import ( |
3444 | make_dir, |
3445 | make_link, |
3446 | @@ -69,6 +64,11 @@ |
3447 | ACCESS_LEVEL_RO, |
3448 | ACCESS_LEVEL_RW, |
3449 | ) |
3450 | +from ubuntuone.testing.testcase import ( |
3451 | + BaseTwistedTestCase, |
3452 | + FakeVolumeManager, |
3453 | + skip_if_win32_and_uses_readonly, |
3454 | +) |
3455 | |
3456 | # our logging level |
3457 | TRACE = logging.getLevelName('TRACE') |
3458 | |
3459 | === modified file 'ubuntuone/syncdaemon/tests/test_main.py' |
3460 | --- ubuntuone/syncdaemon/tests/test_main.py 2017-02-10 01:15:07 +0000 |
3461 | +++ ubuntuone/syncdaemon/tests/test_main.py 2018-04-14 23:11:29 +0000 |
3462 | @@ -36,9 +36,6 @@ |
3463 | from ubuntuone.devtools.handlers import MementoHandler |
3464 | from ubuntuone.platform import expand_user |
3465 | |
3466 | -from contrib.testing.testcase import ( |
3467 | - BaseTwistedTestCase, FAKED_CREDENTIALS, FakeMonitor |
3468 | -) |
3469 | from ubuntuone.clientdefs import VERSION |
3470 | from ubuntuone.logger import NOTE |
3471 | from ubuntuone.platform import ( |
3472 | @@ -49,6 +46,9 @@ |
3473 | remove_dir, |
3474 | ) |
3475 | from ubuntuone.syncdaemon import main as main_mod |
3476 | +from ubuntuone.testing.testcase import ( |
3477 | + BaseTwistedTestCase, FAKED_CREDENTIALS, FakeMonitor |
3478 | +) |
3479 | |
3480 | |
3481 | class FakeListener(object): |
3482 | |
3483 | === modified file 'ubuntuone/syncdaemon/tests/test_states.py' |
3484 | --- ubuntuone/syncdaemon/tests/test_states.py 2018-03-08 18:32:01 +0000 |
3485 | +++ ubuntuone/syncdaemon/tests/test_states.py 2018-04-14 23:11:29 +0000 |
3486 | @@ -33,14 +33,13 @@ |
3487 | from twisted.internet import defer, reactor |
3488 | from twisted.trial.unittest import TestCase as TwistedTestCase |
3489 | |
3490 | -from contrib.testing.testcase import FakeLogger |
3491 | -from ubuntuone.syncdaemon import states |
3492 | from ubuntuone.syncdaemon.states import ( |
3493 | ConnectionManager, |
3494 | Node, |
3495 | QueueManager, |
3496 | StateManager, |
3497 | ) |
3498 | +from ubuntuone.testing.testcase import FakeLogger |
3499 | |
3500 | |
3501 | class FakeEventQueue(object): |
3502 | |
3503 | === modified file 'ubuntuone/syncdaemon/tests/test_sync.py' |
3504 | --- ubuntuone/syncdaemon/tests/test_sync.py 2018-03-09 09:11:05 +0000 |
3505 | +++ ubuntuone/syncdaemon/tests/test_sync.py 2018-04-14 23:11:29 +0000 |
3506 | @@ -43,13 +43,6 @@ |
3507 | from twisted.python.failure import Failure |
3508 | from ubuntuone.devtools.testcases import skipIfOS |
3509 | |
3510 | -from contrib.testing.testcase import ( |
3511 | - FakeMain, |
3512 | - FakeVolumeManager, |
3513 | - BaseTwistedTestCase, |
3514 | - Listener, |
3515 | -) |
3516 | - |
3517 | from ubuntuone.devtools.handlers import MementoHandler |
3518 | from ubuntuone.platform import ( |
3519 | make_dir, |
3520 | @@ -66,6 +59,12 @@ |
3521 | from ubuntuone.storageprotocol.request import ROOT |
3522 | from ubuntuone.storageprotocol import delta |
3523 | from ubuntuone.syncdaemon.marker import MDMarker |
3524 | +from ubuntuone.testing.testcase import ( |
3525 | + FakeMain, |
3526 | + FakeVolumeManager, |
3527 | + BaseTwistedTestCase, |
3528 | + Listener, |
3529 | +) |
3530 | |
3531 | |
3532 | class TestSyncClassAPI(unittest.TestCase): |
3533 | |
3534 | === modified file 'ubuntuone/syncdaemon/tests/test_tritcask.py' |
3535 | --- ubuntuone/syncdaemon/tests/test_tritcask.py 2016-07-30 21:58:48 +0000 |
3536 | +++ ubuntuone/syncdaemon/tests/test_tritcask.py 2018-04-14 23:11:29 +0000 |
3537 | @@ -42,7 +42,6 @@ |
3538 | from operator import attrgetter |
3539 | from twisted.internet import defer |
3540 | |
3541 | -from contrib.testing.testcase import BaseTwistedTestCase |
3542 | from ubuntuone.devtools.handlers import MementoHandler |
3543 | from ubuntuone.syncdaemon import tritcask |
3544 | from ubuntuone.syncdaemon.tritcask import ( |
3545 | @@ -79,6 +78,7 @@ |
3546 | timestamp, |
3547 | logger, |
3548 | ) |
3549 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
3550 | |
3551 | |
3552 | class BaseTestCase(BaseTwistedTestCase): |
3553 | |
3554 | === modified file 'ubuntuone/syncdaemon/tests/test_vm.py' |
3555 | --- ubuntuone/syncdaemon/tests/test_vm.py 2016-06-04 21:14:35 +0000 |
3556 | +++ ubuntuone/syncdaemon/tests/test_vm.py 2018-04-14 23:11:29 +0000 |
3557 | @@ -51,12 +51,8 @@ |
3558 | ShareResponse, |
3559 | ) |
3560 | |
3561 | -from contrib.testing.testcase import ( |
3562 | - BaseTwistedTestCase, |
3563 | - FakeMain, |
3564 | -) |
3565 | -from ubuntuone import platform |
3566 | -from ubuntuone.syncdaemon import config, event_queue, tritcask, volume_manager |
3567 | +from ubuntuone import config, platform |
3568 | +from ubuntuone.syncdaemon import event_queue, tritcask, volume_manager |
3569 | from ubuntuone.syncdaemon.volume_manager import ( |
3570 | ACCESS_LEVEL_RO, |
3571 | ACCESS_LEVEL_RW, |
3572 | @@ -86,6 +82,10 @@ |
3573 | set_dir_readonly, |
3574 | set_dir_readwrite, |
3575 | ) |
3576 | +from ubuntuone.testing.testcase import ( |
3577 | + BaseTwistedTestCase, |
3578 | + FakeMain, |
3579 | +) |
3580 | |
3581 | # grab the metadata version before tests fiddle with it |
3582 | CURRENT_METADATA_VERSION = VolumeManager.METADATA_VERSION |
3583 | |
3584 | === modified file 'ubuntuone/syncdaemon/tests/test_vm_helper.py' |
3585 | --- ubuntuone/syncdaemon/tests/test_vm_helper.py 2016-06-03 19:52:03 +0000 |
3586 | +++ ubuntuone/syncdaemon/tests/test_vm_helper.py 2018-04-14 23:11:29 +0000 |
3587 | @@ -35,7 +35,6 @@ |
3588 | import os |
3589 | import uuid |
3590 | |
3591 | -from contrib.testing.testcase import BaseTwistedTestCase |
3592 | from ubuntuone.platform import expand_user, os_helper |
3593 | from ubuntuone.syncdaemon import vm_helper |
3594 | from ubuntuone.syncdaemon.tests.test_vm import BaseVolumeManagerTests |
3595 | @@ -45,6 +44,7 @@ |
3596 | get_udf_path, |
3597 | get_udf_suggested_path, |
3598 | ) |
3599 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
3600 | |
3601 | |
3602 | class VMHelperTest(BaseVolumeManagerTests): |
3603 | |
3604 | === modified file 'ubuntuone/syncdaemon/utils.py' |
3605 | --- ubuntuone/syncdaemon/utils.py 2018-03-14 21:01:56 +0000 |
3606 | +++ ubuntuone/syncdaemon/utils.py 2018-04-14 23:11:29 +0000 |
3607 | @@ -47,12 +47,14 @@ |
3608 | DARWIN_APP_NAMES = {SYNCDAEMON_EXECUTABLE: 'UbuntuOne Syncdaemon.app'} |
3609 | |
3610 | |
3611 | -def _get_bin_cmd(exe_name, extra_fallbacks=[]): |
3612 | +def _get_bin_cmd(exe_name, extra_fallbacks=None): |
3613 | """Get cmd+args to launch 'exe_name'.""" |
3614 | syncdaemon_dir = os.path.dirname(__file__) |
3615 | ubuntuone_dir = os.path.dirname(syncdaemon_dir) |
3616 | tree_dir = os.path.dirname(ubuntuone_dir) |
3617 | - fallback_dirs = [os.path.join(tree_dir, 'bin')] + extra_fallbacks |
3618 | + fallback_dirs = [os.path.join(tree_dir, 'bin')] |
3619 | + if extra_fallbacks is not None: |
3620 | + fallback_dirs += extra_fallbacks |
3621 | path = get_program_path(exe_name, |
3622 | fallback_dirs=fallback_dirs, |
3623 | app_names=DARWIN_APP_NAMES) |
3624 | |
3625 | === modified file 'ubuntuone/syncdaemon/volume_manager.py' |
3626 | --- ubuntuone/syncdaemon/volume_manager.py 2016-06-01 21:38:23 +0000 |
3627 | +++ ubuntuone/syncdaemon/volume_manager.py 2018-04-14 23:11:29 +0000 |
3628 | @@ -42,17 +42,11 @@ |
3629 | from itertools import ifilter |
3630 | |
3631 | from twisted.internet import defer |
3632 | -from ubuntuone.platform import expand_user |
3633 | -from ubuntuone.storageprotocol import request |
3634 | -from ubuntuone.storageprotocol.volumes import ( |
3635 | - ShareVolume, |
3636 | - UDFVolume, |
3637 | - RootVolume, |
3638 | -) |
3639 | |
3640 | +from ubuntuone import config |
3641 | +from ubuntuone.syncdaemon import file_shelf |
3642 | from ubuntuone.syncdaemon.marker import MDMarker |
3643 | from ubuntuone.syncdaemon.interfaces import IMarker |
3644 | -from ubuntuone.syncdaemon import file_shelf, config |
3645 | from ubuntuone.syncdaemon.tritcask import TritcaskShelf |
3646 | from ubuntuone.syncdaemon.vm_helper import ( |
3647 | create_shares_link, |
3648 | @@ -62,6 +56,7 @@ |
3649 | ) |
3650 | from ubuntuone.platform import ( |
3651 | allow_writes, |
3652 | + expand_user, |
3653 | get_path_list, |
3654 | is_link, |
3655 | listdir, |
3656 | @@ -79,6 +74,12 @@ |
3657 | set_dir_readwrite, |
3658 | walk, |
3659 | ) |
3660 | +from ubuntuone.storageprotocol import request |
3661 | +from ubuntuone.storageprotocol.volumes import ( |
3662 | + ShareVolume, |
3663 | + UDFVolume, |
3664 | + RootVolume, |
3665 | +) |
3666 | |
3667 | # tritcask row types |
3668 | SHARE_ROW_TYPE = 3 |
3669 | |
3670 | === renamed directory 'contrib/testing' => 'ubuntuone/testing' |
3671 | === modified file 'ubuntuone/testing/testcase.py' |
3672 | --- contrib/testing/testcase.py 2018-03-14 21:01:56 +0000 |
3673 | +++ ubuntuone/testing/testcase.py 2018-04-14 23:11:29 +0000 |
3674 | @@ -47,8 +47,8 @@ |
3675 | from zope.interface import implements |
3676 | from zope.interface.verify import verifyObject |
3677 | |
3678 | +from ubuntuone import config, logger, platform |
3679 | from ubuntuone.syncdaemon import ( |
3680 | - config, |
3681 | action_queue, |
3682 | event_queue, |
3683 | filesystem_manager as fs_manager, |
3684 | @@ -61,8 +61,6 @@ |
3685 | RECENT_TRANSFERS, |
3686 | UPLOADING, |
3687 | ) |
3688 | -from ubuntuone.syncdaemon import logger |
3689 | -from ubuntuone import platform |
3690 | from ubuntuone.platform import ( |
3691 | can_write, |
3692 | make_dir, |
3693 | @@ -72,7 +70,9 @@ |
3694 | stat_path, |
3695 | ) |
3696 | |
3697 | -logger.init() |
3698 | +if not path_exists('tmp/logs'): |
3699 | + make_dir('tmp/logs', recursive=True) |
3700 | +logger.init(logs_dir='tmp/logs', level=logging.DEBUG) |
3701 | |
3702 | FAKED_CREDENTIALS = {'username': 'test_username', |
3703 | 'password': 'test_password'} |
3704 | @@ -259,8 +259,8 @@ |
3705 | self.vm = volume_manager.VolumeManager(self) |
3706 | self.fs = fs_manager.FileSystemManager( |
3707 | self.data_dir, self.partials_dir, self.vm, self.db) |
3708 | - self.event_q = event_queue.EventQueue(self.fs, |
3709 | - monitor_class=self._monitor_class) |
3710 | + self.event_q = event_queue.EventQueue( |
3711 | + self.fs, monitor_class=self._monitor_class) |
3712 | self.fs.register_eq(self.event_q) |
3713 | self.action_q = self._fake_AQ_class(self.event_q, self, |
3714 | *self._fake_AQ_params) |
3715 | @@ -405,11 +405,14 @@ |
3716 | |
3717 | # Patch the user home |
3718 | self.home_dir = self.mktemp('ubuntuonehacker') |
3719 | - self.patch(platform, "user_home", self.home_dir) |
3720 | + self.patch( |
3721 | + os.path, "expanduser", lambda p: p.replace('~', self.home_dir)) |
3722 | |
3723 | # use the config from the branch |
3724 | - new_get_config_files = lambda: [os.path.join(os.environ['ROOTDIR'], |
3725 | - 'data', 'syncdaemon.conf')] |
3726 | + def new_get_config_files(): |
3727 | + return [ |
3728 | + os.path.join(os.environ['ROOTDIR'], 'data', 'syncdaemon.conf')] |
3729 | + |
3730 | self.patch(config, 'get_config_files', new_get_config_files) |
3731 | |
3732 | # fake a very basic config file with sane defaults for the tests |
3733 | @@ -626,7 +629,10 @@ |
3734 | try: |
3735 | result = super(FakedObject, self).__getattribute__(attr_name) |
3736 | except AttributeError: |
3737 | - result = lambda *a, **kw: None |
3738 | + |
3739 | + def result(*a, **kw): |
3740 | + return None |
3741 | + |
3742 | super(FakedObject, self).__setattr__(attr_name, result) |
3743 | |
3744 | if attr_name == '_called': |
3745 | |
3746 | === renamed file 'ubuntuone/syncdaemon/tests/test_config.py' => 'ubuntuone/tests/test_config.py' |
3747 | --- ubuntuone/syncdaemon/tests/test_config.py 2018-03-14 21:55:27 +0000 |
3748 | +++ ubuntuone/tests/test_config.py 2018-04-14 23:11:29 +0000 |
3749 | @@ -37,15 +37,10 @@ |
3750 | from ConfigParser import ConfigParser |
3751 | from twisted.internet import defer |
3752 | from twisted.trial.unittest import TestCase |
3753 | -from dirspec.basedir import ( |
3754 | - xdg_data_home, |
3755 | - xdg_cache_home, |
3756 | -) |
3757 | |
3758 | -from contrib.testing.testcase import BaseTwistedTestCase |
3759 | -from ubuntuone import platform |
3760 | +from ubuntuone import config, platform |
3761 | from ubuntuone.platform import open_file, path_exists |
3762 | -from ubuntuone.syncdaemon import config |
3763 | +from ubuntuone.testing.testcase import BaseTwistedTestCase |
3764 | |
3765 | |
3766 | class TestConfigBasic(BaseTwistedTestCase): |
3767 | @@ -441,33 +436,6 @@ |
3768 | self.assertEqual(conf.get_memory_pool_limit(), 666) |
3769 | |
3770 | |
3771 | -class UnicodePathsTestCase(TestCase): |
3772 | - """Tests for unicode paths.""" |
3773 | - |
3774 | - def test_get_config_files_path_encoding(self): |
3775 | - """Check that get_config_files uses paths in the right encoding.""" |
3776 | - temp = self.mktemp() |
3777 | - fake_path = os.path.join(temp, u"Ñandú".encode("utf8")) |
3778 | - assert isinstance(fake_path, str) |
3779 | - os.makedirs(fake_path) |
3780 | - with open(os.path.join(fake_path, config.CONFIG_FILE), "w") as f: |
3781 | - f.write("this is a fake config file") |
3782 | - self.patch(config, "load_config_paths", lambda _: [fake_path]) |
3783 | - config_files = config.get_config_files() |
3784 | - branch_config = os.path.join(fake_path, config.CONFIG_FILE) |
3785 | - self.assertIn(branch_config, config_files) |
3786 | - |
3787 | - def test_load_branch_configuration(self): |
3788 | - """Check that the configuration from the branch is loaded.""" |
3789 | - config_files = [os.path.normpath(p) for p in config.get_config_files()] |
3790 | - rootdir = os.environ['ROOTDIR'] |
3791 | - branch_config = os.path.join(rootdir, "data", config.CONFIG_FILE) |
3792 | - branch_logging_config = os.path.join( |
3793 | - rootdir, "data", config.CONFIG_LOGS) |
3794 | - self.assertIn(branch_config, config_files) |
3795 | - self.assertIn(branch_logging_config, config_files) |
3796 | - |
3797 | - |
3798 | class ConfigglueParsersTests(BaseTwistedTestCase): |
3799 | """Tests for our custom configglue parsers.""" |
3800 | |
3801 | @@ -563,56 +531,6 @@ |
3802 | }]) |
3803 | |
3804 | |
3805 | -class XdgHomeParsersTests(BaseTwistedTestCase): |
3806 | - """Tests for our custom xdg parsers.""" |
3807 | - |
3808 | - good_value = '~/hola/mundo' |
3809 | - name = 'home' |
3810 | - xdg_dir = os.path.join('', 'home', 'fake') |
3811 | - |
3812 | - @defer.inlineCallbacks |
3813 | - def setUp(self): |
3814 | - yield super(XdgHomeParsersTests, self).setUp() |
3815 | - self.parser = getattr(config, '%s_dir_parser' % self.name) |
3816 | - |
3817 | - def test_good_value(self): |
3818 | - """Test the parser using a good value.""" |
3819 | - homedir = os.path.join('', 'home', 'fake') |
3820 | - self.patch(platform, 'user_home', homedir) |
3821 | - expected = os.path.join(self.xdg_dir, 'hola', 'mundo') |
3822 | - actual = self.parser(self.good_value) |
3823 | - self.assertEqual(expected, actual) |
3824 | - self.assertIsInstance(actual, str) |
3825 | - self.assertNotIsInstance(actual, unicode) |
3826 | - |
3827 | - def test_bad_value(self): |
3828 | - """Test the parser using a bad value.""" |
3829 | - bad_value = '/hola' |
3830 | - self.assertEqual(config.path_from_unix(bad_value), |
3831 | - self.parser(bad_value)) |
3832 | - |
3833 | - def test_invalid_value(self): |
3834 | - """Test the parser using an invalid value.""" |
3835 | - invalid_value = None |
3836 | - self.assertRaises(AttributeError, self.parser, invalid_value) |
3837 | - |
3838 | - |
3839 | -class XdgCacheParsersTests(XdgHomeParsersTests): |
3840 | - """Tests for our custom xdg parsers.""" |
3841 | - |
3842 | - good_value = 'hola/mundo' |
3843 | - name = 'xdg_cache' |
3844 | - xdg_dir = xdg_cache_home |
3845 | - |
3846 | - |
3847 | -class XdgDataParsersTests(XdgCacheParsersTests): |
3848 | - """Tests for our custom xdg parsers.""" |
3849 | - |
3850 | - good_value = 'hola/mundo' |
3851 | - name = 'xdg_data' |
3852 | - xdg_dir = xdg_data_home |
3853 | - |
3854 | - |
3855 | class SyncDaemonConfigParserTests(BaseTwistedTestCase): |
3856 | """Tests for SyncDaemonConfigParser.""" |
3857 | |
3858 | |
3859 | === renamed file 'ubuntuone/syncdaemon/tests/test_logger.py' => 'ubuntuone/tests/test_logger.py' |
3860 | --- ubuntuone/syncdaemon/tests/test_logger.py 2016-06-04 21:14:35 +0000 |
3861 | +++ ubuntuone/tests/test_logger.py 2018-04-14 23:11:29 +0000 |
3862 | @@ -40,7 +40,7 @@ |
3863 | from ubuntuone.devtools.handlers import MementoHandler |
3864 | from ubuntuone.devtools.testcases import skipIfOS |
3865 | |
3866 | -from ubuntuone.syncdaemon.logger import ( |
3867 | +from ubuntuone.logger import ( |
3868 | DebugCapture, |
3869 | NOTE, |
3870 | TRACE, |
3871 | |
3872 | === modified file 'ubuntuone/utils/__init__.py' |
3873 | --- ubuntuone/utils/__init__.py 2016-06-04 21:14:35 +0000 |
3874 | +++ ubuntuone/utils/__init__.py 2018-04-14 23:11:29 +0000 |
3875 | @@ -37,9 +37,6 @@ |
3876 | import sys |
3877 | |
3878 | from dirspec.basedir import load_config_paths |
3879 | -from dirspec.utils import get_program_path |
3880 | - |
3881 | -from twisted.python import procutils |
3882 | |
3883 | |
3884 | logger = logging.getLogger(__name__) |
3885 | @@ -82,54 +79,6 @@ |
3886 | logger.error(msg, __file__) |
3887 | |
3888 | |
3889 | -def get_project_dir(): |
3890 | - """Return the absolute path to this project's data/ dir. |
3891 | - |
3892 | - Support symlinks, and priorize local (relative) data/ dir. If not |
3893 | - found, return the value of the PROJECT_DIR. |
3894 | - |
3895 | - """ |
3896 | - result = _get_dir(dir_name=DATA_SUFFIX, dir_constant='PROJECT_DIR') |
3897 | - assert result is not None, '%r dir can not be None.' % DATA_SUFFIX |
3898 | - return result |
3899 | - |
3900 | - |
3901 | -def get_data_file(*args): |
3902 | - """Return the absolute path to 'args' within project data dir.""" |
3903 | - return os.path.join(get_project_dir(), *args) |
3904 | - |
3905 | - |
3906 | -def get_bin_dir(): |
3907 | - """Return the absolute path to this project's bin/ dir. |
3908 | - |
3909 | - Support symlinks, and priorize local (relative) bin/ dir. If not |
3910 | - found, return the value of the BIN_DIR. |
3911 | - |
3912 | - """ |
3913 | - result = _get_dir(dir_name=BIN_SUFFIX, dir_constant='BIN_DIR') |
3914 | - assert result is not None, '%r dir can not be None.' % BIN_SUFFIX |
3915 | - logger.info('get_bin_dir: returning dir located at %r.', result) |
3916 | - return result |
3917 | - |
3918 | - |
3919 | -def get_bin_cmd(program_name): |
3920 | - """Return a list of arguments to launch the given executable.""" |
3921 | - path = get_program_path(program_name, |
3922 | - fallback_dirs=[get_bin_dir()]) |
3923 | - cmd_args = [path] |
3924 | - |
3925 | - # adjust cmd for platforms using buildout-generated python |
3926 | - # wrappers |
3927 | - if getattr(sys, 'frozen', None) is None: |
3928 | - if sys.platform in ('darwin'): |
3929 | - cmd_args.insert(0, 'python') |
3930 | - elif sys.platform in ('win32'): |
3931 | - cmd_args.insert(0, procutils.which("python.exe")[0]) |
3932 | - |
3933 | - logger.debug('get_bin_cmd: returning %r', cmd_args) |
3934 | - return cmd_args |
3935 | - |
3936 | - |
3937 | def get_cert_dir(): |
3938 | """Return directory containing certificate files.""" |
3939 | |
3940 | |
3941 | === modified file 'ubuntuone/utils/tests/test_common.py' |
3942 | --- ubuntuone/utils/tests/test_common.py 2017-01-07 18:51:07 +0000 |
3943 | +++ ubuntuone/utils/tests/test_common.py 2018-04-14 23:11:29 +0000 |
3944 | @@ -32,13 +32,11 @@ |
3945 | |
3946 | from __future__ import unicode_literals |
3947 | |
3948 | -import logging |
3949 | import sys |
3950 | import os |
3951 | |
3952 | from twisted.internet import defer |
3953 | from twisted.web import resource |
3954 | -from ubuntuone.devtools.handlers import MementoHandler |
3955 | from ubuntuone.devtools.testing.txwebserver import HTTPWebServer |
3956 | |
3957 | from ubuntuone import utils |
3958 | @@ -48,112 +46,6 @@ |
3959 | NOT_DEFINED = object() |
3960 | |
3961 | |
3962 | -class FakedConstantsModule(object): |
3963 | - """Fake the 'ubuntuone.controlpanel.constants' module.""" |
3964 | - |
3965 | - PROJECT_DIR = '/tmp/foo/bar' |
3966 | - BIN_DIR = '/tmp/foo/bin' |
3967 | - |
3968 | - |
3969 | -class GetProjectDirTestCase(TestCase): |
3970 | - """Test case for get_project_dir when constants module is not defined.""" |
3971 | - |
3972 | - DIR_NAME = utils.DATA_SUFFIX |
3973 | - DIR_CONSTANT = 'PROJECT_DIR' |
3974 | - DIR_GETTER = 'get_project_dir' |
3975 | - |
3976 | - @defer.inlineCallbacks |
3977 | - def setUp(self): |
3978 | - yield super(GetProjectDirTestCase, self).setUp() |
3979 | - self._constants = sys.modules.get(CONSTANTS_MODULE, NOT_DEFINED) |
3980 | - sys.modules[CONSTANTS_MODULE] = None # force ImportError |
3981 | - |
3982 | - self.memento = MementoHandler() |
3983 | - self.memento.setLevel(logging.DEBUG) |
3984 | - utils.logger.addHandler(self.memento) |
3985 | - self.addCleanup(utils.logger.removeHandler, self.memento) |
3986 | - |
3987 | - self.get_dir = getattr(utils, self.DIR_GETTER) |
3988 | - |
3989 | - @defer.inlineCallbacks |
3990 | - def tearDown(self): |
3991 | - if self._constants is not NOT_DEFINED: |
3992 | - sys.modules[CONSTANTS_MODULE] = self._constants |
3993 | - else: |
3994 | - sys.modules.pop(CONSTANTS_MODULE) |
3995 | - yield super(GetProjectDirTestCase, self).tearDown() |
3996 | - |
3997 | - def test_get_dir_relative(self): |
3998 | - """The relative path for the data directory is correctly retrieved.""" |
3999 | - module = utils.os.path.dirname(utils.__file__) |
4000 | - rel_data = utils.os.path.join(module, |
4001 | - utils.os.path.pardir, |
4002 | - utils.os.path.pardir, |
4003 | - self.DIR_NAME) |
4004 | - expected_dir = utils.os.path.abspath(rel_data) |
4005 | - |
4006 | - # ensure expected_path exists at os level |
4007 | - self.patch(utils.os.path, 'exists', lambda path: path == expected_dir) |
4008 | - |
4009 | - result = self.get_dir() |
4010 | - self.assertEqual(expected_dir, result) |
4011 | - |
4012 | - def test_get_dir_none_exists(self): |
4013 | - """No data directory exists, return None and log as error.""" |
4014 | - self.patch(utils.os.path, 'exists', lambda path: False) |
4015 | - sys.modules[CONSTANTS_MODULE] = None |
4016 | - |
4017 | - self.assertRaises(AssertionError, self.get_dir) |
4018 | - msg = 'get_dir: can not build a valid path.' |
4019 | - self.assertTrue(self.memento.check_error(msg)) |
4020 | - |
4021 | - |
4022 | -class GetProjectDirWithConstantsTestCase(GetProjectDirTestCase): |
4023 | - """Test case for get_dir when constants module is defined.""" |
4024 | - |
4025 | - @defer.inlineCallbacks |
4026 | - def setUp(self): |
4027 | - yield super(GetProjectDirWithConstantsTestCase, self).setUp() |
4028 | - self.patch(utils.os.path, 'exists', lambda path: False) |
4029 | - self._constants = sys.modules.get(CONSTANTS_MODULE, NOT_DEFINED) |
4030 | - sys.modules[CONSTANTS_MODULE] = FakedConstantsModule() |
4031 | - |
4032 | - def test_get_dir(self): |
4033 | - """If the constants.py module exists, use PROJECT_DIR from it.""" |
4034 | - result = self.get_dir() |
4035 | - expected = getattr(sys.modules[CONSTANTS_MODULE], self.DIR_CONSTANT) |
4036 | - self.assertEqual(expected, result) |
4037 | - |
4038 | - |
4039 | -class GetBinDirTestCase(GetProjectDirTestCase): |
4040 | - """Test case for get_bin_dir when constants module is not defined.""" |
4041 | - |
4042 | - DIR_NAME = utils.BIN_SUFFIX |
4043 | - DIR_CONSTANT = 'BIN_DIR' |
4044 | - DIR_GETTER = 'get_bin_dir' |
4045 | - |
4046 | - |
4047 | -class GetBinDirWithConstantsTestCase(GetProjectDirWithConstantsTestCase): |
4048 | - """Test case for get_bin_dir when constants module is defined.""" |
4049 | - |
4050 | - DIR_NAME = utils.BIN_SUFFIX |
4051 | - DIR_CONSTANT = 'BIN_DIR' |
4052 | - DIR_GETTER = 'get_bin_dir' |
4053 | - |
4054 | - |
4055 | -class GetDataFileTestCase(TestCase): |
4056 | - """Test cases for get_data_file.""" |
4057 | - |
4058 | - def test_get_data_file(self): |
4059 | - """The path for a data file is correctly retrieved.""" |
4060 | - dummy_dir = '/yadda/yadda' |
4061 | - dummy_file = 'test.png' |
4062 | - self.patch(utils, 'get_project_dir', lambda: dummy_dir) |
4063 | - result = utils.get_data_file(dummy_file) |
4064 | - expected = utils.os.path.join(dummy_dir, dummy_file) |
4065 | - self.assertEqual(expected, result) |
4066 | - |
4067 | - |
4068 | class GetCertDirTestCase(TestCase): |
4069 | """Test determining the cert location.""" |
4070 |
Some comments inline.
In general, you're touching a lot of files and not fixing the copyright headers (removing specific authors, adding 2016 chicharreros, etc).
Beyond that, I wanted to check if this new "base dirs mangling" didn't break the development starting when trying a local server, but I couldn't make it work because other server issues: http:// linkode. org/HgBj5gXEAty NKzMxnCEyc5
Probably we should go with this branch, but then make sure all is ok for starting the client against a dev local server.
What do you think?