Merge lp:~nataliabidart/magicicada-server/embed-u1sync into lp:magicicada-server

Proposed by Natalia Bidart
Status: Merged
Approved by: Natalia Bidart
Approved revision: 95
Merged at revision: 90
Proposed branch: lp:~nataliabidart/magicicada-server/embed-u1sync
Merge into: lp:magicicada-server
Diff against target: 2652 lines (+2362/-31)
28 files modified
config-manager.txt (+0/-1)
lib/utilities/testlib.py (+0/-1)
lib/utilities/utils.py (+1/-1)
magicicada/filesync/services.py (+1/-2)
magicicada/metrics/tests/test_base.py (+1/-2)
magicicada/monitoring/reactor.py (+1/-2)
magicicada/monitoring/tests/test_reactor.py (+1/-2)
magicicada/server/integtests/test_sync.py (+4/-4)
magicicada/server/server.py (+1/-3)
magicicada/server/ssl_proxy.py (+1/-3)
magicicada/server/stats.py (+1/-2)
magicicada/server/tests/test_server.py (+1/-3)
magicicada/server/tests/test_ssl_proxy.py (+1/-3)
magicicada/server/tests/test_stats.py (+1/-2)
magicicada/u1sync/__init__.py (+14/-0)
magicicada/u1sync/client.py (+736/-0)
magicicada/u1sync/constants.py (+26/-0)
magicicada/u1sync/genericmerge.py (+86/-0)
magicicada/u1sync/main.py (+443/-0)
magicicada/u1sync/merge.py (+181/-0)
magicicada/u1sync/metadata.py (+155/-0)
magicicada/u1sync/scan.py (+84/-0)
magicicada/u1sync/sync.py (+377/-0)
magicicada/u1sync/tests/__init__.py (+1/-0)
magicicada/u1sync/tests/test_client.py (+63/-0)
magicicada/u1sync/tests/test_init.py (+22/-0)
magicicada/u1sync/tests/test_merge.py (+109/-0)
magicicada/u1sync/utils.py (+50/-0)
To merge this branch: bzr merge lp:~nataliabidart/magicicada-server/embed-u1sync
Reviewer Review Type Date Requested Status
Facundo Batista Approve
Review via email: mp+343570@code.launchpad.net

Commit message

- Embed u1sync test dependency inside magicicada source code.

To post a comment you must log in.
Revision history for this message
Facundo Batista (facundo) wrote :

Awesome!

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'config-manager.txt'
2--- config-manager.txt 2018-04-18 20:28:35 +0000
3+++ config-manager.txt 2018-04-20 01:22:48 +0000
4@@ -21,5 +21,4 @@
5 #
6 # make clean-sourcedeps sourcedeps
7
8-./.sourcecode/u1sync ~facundo/u1sync/opensourcing;revno=10
9 ./.sourcecode/magicicada-client ~chicharreros/magicicada-client/trunk;revno=1442
10
11=== removed symlink 'lib/metrics'
12=== target was u'../magicicada/metrics/'
13=== removed symlink 'lib/u1sync'
14=== target was u'../.sourcecode/u1sync/u1sync'
15=== modified file 'lib/utilities/testlib.py'
16--- lib/utilities/testlib.py 2018-04-05 21:08:25 +0000
17+++ lib/utilities/testlib.py 2018-04-20 01:22:48 +0000
18@@ -20,7 +20,6 @@
19
20 from __future__ import unicode_literals
21
22-import logging
23 import os
24 import re
25
26
27=== modified file 'lib/utilities/utils.py'
28--- lib/utilities/utils.py 2018-04-05 21:08:25 +0000
29+++ lib/utilities/utils.py 2018-04-20 01:22:48 +0000
30@@ -37,7 +37,7 @@
31 with open(proc_stat, 'r') as f:
32 if proc_name in f.read():
33 return True
34- except:
35+ except Exception:
36 # ignore all errors
37 pass
38 return False
39
40=== modified file 'magicicada/filesync/services.py'
41--- magicicada/filesync/services.py 2018-04-05 21:08:25 +0000
42+++ magicicada/filesync/services.py 2018-04-20 01:22:48 +0000
43@@ -34,12 +34,11 @@
44 from functools import wraps
45 from weakref import WeakValueDictionary
46
47-import metrics
48-
49 from django.conf import settings
50 from django.db import connection, models
51 from django.utils.timezone import now
52
53+from magicicada import metrics
54 from magicicada.filesync import errors, utils
55 from magicicada.filesync.dbmanager import (
56 fsync_readonly,
57
58=== modified file 'magicicada/metrics/tests/test_base.py'
59--- magicicada/metrics/tests/test_base.py 2016-06-04 19:05:51 +0000
60+++ magicicada/metrics/tests/test_base.py 2018-04-20 01:22:48 +0000
61@@ -22,8 +22,7 @@
62 import logging
63 import unittest
64
65-import metrics
66-
67+from magicicada import metrics
68 from magicicada.testing.testcase import BaseTestCase
69
70
71
72=== modified file 'magicicada/monitoring/reactor.py'
73--- magicicada/monitoring/reactor.py 2018-04-05 21:08:25 +0000
74+++ magicicada/monitoring/reactor.py 2018-04-20 01:22:48 +0000
75@@ -28,8 +28,7 @@
76 import traceback
77 import Queue
78
79-import metrics
80-
81+from magicicada import metrics
82 from magicicada.settings import TRACE
83
84
85
86=== modified file 'magicicada/monitoring/tests/test_reactor.py'
87--- magicicada/monitoring/tests/test_reactor.py 2018-04-05 21:08:25 +0000
88+++ magicicada/monitoring/tests/test_reactor.py 2018-04-20 01:22:48 +0000
89@@ -26,8 +26,7 @@
90 from twisted.trial.unittest import TestCase as TwistedTestCase
91 from twisted.internet import reactor, defer
92
93-import metrics
94-
95+from magicicada import metrics
96 from magicicada.monitoring.reactor import ReactorInspector, logger
97 from magicicada.settings import TRACE
98 from magicicada.testing.testcase import BaseTestCase
99
100=== modified file 'magicicada/server/integtests/test_sync.py'
101--- magicicada/server/integtests/test_sync.py 2018-04-05 21:08:25 +0000
102+++ magicicada/server/integtests/test_sync.py 2018-04-20 01:22:48 +0000
103@@ -35,9 +35,6 @@
104 from twisted.internet import reactor, defer
105 from twisted.python.failure import Failure
106
107-import u1sync.client
108-
109-from u1sync.main import do_diff, do_init, do_sync
110 from ubuntuone.platform import tools
111 from ubuntuone.storageprotocol import client as protocol_client
112 from ubuntuone.storageprotocol import request
113@@ -59,6 +56,9 @@
114 )
115 from magicicada.server.testing.caps_helpers import required_caps
116 from magicicada.server.testing.testcase import logger
117+from magicicada.u1sync import client as u1sync_client
118+from magicicada.u1sync.main import do_diff, do_init, do_sync
119+
120
121 U1SYNC_QUIET = True
122 NO_CONTENT_HASH = ""
123@@ -225,7 +225,7 @@
124
125 def _u1sync_client(self):
126 """Create a u1sync client instance."""
127- client = u1sync.client.Client()
128+ client = u1sync_client.Client()
129 client.connect_ssl("localhost", self.ssl_port, True)
130 client.set_capabilities()
131 auth_info = self.access_tokens['jack']
132
133=== modified file 'magicicada/server/server.py'
134--- magicicada/server/server.py 2018-04-05 21:08:25 +0000
135+++ magicicada/server/server.py 2018-04-20 01:22:48 +0000
136@@ -40,8 +40,6 @@
137 import twisted
138 import twisted.web.error
139
140-import metrics
141-
142 from twisted.application.service import MultiService, Service
143 from twisted.application.internet import TCPServer
144 from twisted.internet.defer import maybeDeferred, inlineCallbacks
145@@ -51,7 +49,7 @@
146 from ubuntuone.storageprotocol import protocol_pb2, request, sharersp
147 from ubuntuone.supervisor import utils as supervisor_utils
148
149-from magicicada import settings
150+from magicicada import metrics, settings
151 from magicicada.filesync import errors as dataerror
152 from magicicada.filesync.notifier import notifier
153 from magicicada.monitoring.reactor import ReactorInspector
154
155=== modified file 'magicicada/server/ssl_proxy.py'
156--- magicicada/server/ssl_proxy.py 2018-04-05 21:08:25 +0000
157+++ magicicada/server/ssl_proxy.py 2018-04-20 01:22:48 +0000
158@@ -31,9 +31,7 @@
159 from twisted.protocols import portforward, basic
160 from twisted.web import server, resource
161
162-import metrics
163-
164-from magicicada import settings
165+from magicicada import metrics, settings
166 from magicicada.server.server import FILESYNC_STATUS_MSG, get_service_port
167 from magicicada.server.ssl import disable_ssl_compression
168 from ubuntuone.supervisor import utils as supervisor_utils
169
170=== modified file 'magicicada/server/stats.py'
171--- magicicada/server/stats.py 2018-04-05 21:08:25 +0000
172+++ magicicada/server/stats.py 2018-04-20 01:22:48 +0000
173@@ -27,8 +27,7 @@
174 from twisted.internet import defer, reactor
175 from twisted.web import server, resource
176
177-import metrics
178-
179+from magicicada import metrics
180 from magicicada.monitoring import dump
181
182 logger = logging.getLogger(__name__)
183
184=== modified file 'magicicada/server/tests/test_server.py'
185--- magicicada/server/tests/test_server.py 2018-04-05 21:08:25 +0000
186+++ magicicada/server/tests/test_server.py 2018-04-20 01:22:48 +0000
187@@ -28,8 +28,6 @@
188 import uuid
189 import weakref
190
191-import metrics
192-
193 from django.utils.timezone import now
194 from mocker import expect, Mocker, MockerTestCase, ARGS, KWARGS, ANY
195 from twisted.python.failure import Failure
196@@ -38,7 +36,7 @@
197 from twisted.trial.unittest import TestCase as TwistedTestCase
198 from ubuntuone.storageprotocol import protocol_pb2, request
199
200-from magicicada import settings
201+from magicicada import metrics, settings
202 from magicicada.filesync import errors as dataerror
203 from magicicada.filesync.models import Share
204 from magicicada.server import errors
205
206=== modified file 'magicicada/server/tests/test_ssl_proxy.py'
207--- magicicada/server/tests/test_ssl_proxy.py 2018-04-05 21:08:25 +0000
208+++ magicicada/server/tests/test_ssl_proxy.py 2018-04-20 01:22:48 +0000
209@@ -34,9 +34,7 @@
210 StorageClientFactory, StorageClient)
211 from ubuntuone.supervisor import utils as supervisor_utils
212
213-import metrics
214-
215-from magicicada import settings
216+from magicicada import metrics, settings
217 from magicicada.server import ssl_proxy
218 from magicicada.server.server import PREFERRED_CAP
219 from magicicada.server.testing.testcase import TestWithDatabase
220
221=== modified file 'magicicada/server/tests/test_stats.py'
222--- magicicada/server/tests/test_stats.py 2018-04-05 21:08:25 +0000
223+++ magicicada/server/tests/test_stats.py 2018-04-20 01:22:48 +0000
224@@ -20,8 +20,7 @@
225
226 from twisted.internet import defer
227
228-import metrics
229-
230+from magicicada import metrics
231 from magicicada.server.stats import StatsWorker
232 from magicicada.server.testing.testcase import TestWithDatabase
233
234
235=== added directory 'magicicada/u1sync'
236=== added file 'magicicada/u1sync/__init__.py'
237--- magicicada/u1sync/__init__.py 1970-01-01 00:00:00 +0000
238+++ magicicada/u1sync/__init__.py 2018-04-20 01:22:48 +0000
239@@ -0,0 +1,14 @@
240+# Copyright 2009 Canonical Ltd.
241+#
242+# This program is free software: you can redistribute it and/or modify it
243+# under the terms of the GNU General Public License version 3, as published
244+# by the Free Software Foundation.
245+#
246+# This program is distributed in the hope that it will be useful, but
247+# WITHOUT ANY WARRANTY; without even the implied warranties of
248+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
249+# PURPOSE. See the GNU General Public License for more details.
250+#
251+# You should have received a copy of the GNU General Public License along
252+# with this program. If not, see <http://www.gnu.org/licenses/>.
253+"""The guts of the u1sync tool."""
254
255=== added file 'magicicada/u1sync/client.py'
256--- magicicada/u1sync/client.py 1970-01-01 00:00:00 +0000
257+++ magicicada/u1sync/client.py 2018-04-20 01:22:48 +0000
258@@ -0,0 +1,736 @@
259+# Copyright 2009-2015 Canonical
260+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
261+#
262+# This program is free software: you can redistribute it and/or modify it
263+# under the terms of the GNU General Public License version 3, as published
264+# by the Free Software Foundation.
265+#
266+# This program is distributed in the hope that it will be useful, but
267+# WITHOUT ANY WARRANTY; without even the implied warranties of
268+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
269+# PURPOSE. See the GNU General Public License for more details.
270+#
271+# You should have received a copy of the GNU General Public License along
272+# with this program. If not, see <http://www.gnu.org/licenses/>.
273+
274+"""Pretty API for protocol client."""
275+
276+from __future__ import with_statement
277+
278+import logging
279+import os
280+import sys
281+import shutil
282+import time
283+import uuid
284+import zlib
285+
286+from cStringIO import StringIO
287+from logging.handlers import RotatingFileHandler
288+from Queue import Queue
289+from threading import Lock
290+
291+from dirspec.basedir import xdg_cache_home
292+from twisted.internet import reactor, defer
293+from twisted.internet.defer import inlineCallbacks, returnValue
294+from ubuntuone.storageprotocol import request, volumes
295+from ubuntuone.storageprotocol.content_hash import crc32
296+from ubuntuone.storageprotocol.context import get_ssl_context
297+from ubuntuone.storageprotocol.client import (
298+ StorageClientFactory, StorageClient)
299+from ubuntuone.storageprotocol.delta import DIRECTORY as delta_DIR
300+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, FILE
301+
302+from magicicada.u1sync.genericmerge import MergeNode
303+from magicicada.u1sync.utils import should_sync
304+
305+
306+CONSUMER_KEY = "ubuntuone"
307+CONSUMER_SECRET = "hammertime"
308+
309+u1sync_log_dir = os.path.join(xdg_cache_home, 'u1sync', 'log')
310+LOGFILENAME = os.path.join(u1sync_log_dir, 'u1sync.log')
311+if not os.path.exists(u1sync_log_dir):
312+ os.makedirs(u1sync_log_dir)
313+u1_logger = logging.getLogger("u1sync.timing.log")
314+handler = RotatingFileHandler(LOGFILENAME)
315+u1_logger.addHandler(handler)
316+
317+
318+def share_str(share_uuid):
319+ """Converts a share UUID to a form the protocol likes."""
320+ return str(share_uuid) if share_uuid is not None else request.ROOT
321+
322+
323+def log_timing(func):
324+ def wrapper(*arg, **kwargs):
325+ start = time.time()
326+ ent = func(*arg, **kwargs)
327+ stop = time.time()
328+ u1_logger.debug('for %s %0.5f ms elapsed',
329+ func.func_name, stop-start * 1000.0)
330+ return ent
331+ return wrapper
332+
333+
334+class ForcedShutdown(Exception):
335+ """Client shutdown forced."""
336+
337+
338+class Waiter(object):
339+ """Wait object for blocking waits."""
340+
341+ def __init__(self):
342+ """Initializes the wait object."""
343+ self.queue = Queue()
344+
345+ def wake(self, result):
346+ """Wakes the waiter with a result."""
347+ self.queue.put((result, None))
348+
349+ def wakeAndRaise(self, exc_info):
350+ """Wakes the waiter, raising the given exception in it."""
351+ self.queue.put((None, exc_info))
352+
353+ def wakeWithResult(self, func, *args, **kw):
354+ """Wakes the waiter with the result of the given function."""
355+ try:
356+ result = func(*args, **kw)
357+ except Exception:
358+ self.wakeAndRaise(sys.exc_info())
359+ else:
360+ self.wake(result)
361+
362+ def wait(self):
363+ """Waits for wakeup."""
364+ (result, exc_info) = self.queue.get()
365+ if exc_info:
366+ try:
367+ raise exc_info[0], exc_info[1], exc_info[2]
368+ finally:
369+ exc_info = None
370+ else:
371+ return result
372+
373+
374+class SyncStorageClient(StorageClient):
375+ """Simple client that calls a callback on connection."""
376+
377+ @log_timing
378+ def connectionMade(self):
379+ """Setup and call callback."""
380+ StorageClient.connectionMade(self)
381+ if self.factory.current_protocol not in (None, self):
382+ self.factory.current_protocol.transport.loseConnection()
383+ self.factory.current_protocol = self
384+ self.factory.observer.connected()
385+
386+ @log_timing
387+ def connectionLost(self, reason=None):
388+ """Callback for established connection lost."""
389+ StorageClient.connectionLost(self, reason)
390+ if self.factory.current_protocol is self:
391+ self.factory.current_protocol = None
392+ self.factory.observer.disconnected(reason)
393+
394+
395+class SyncClientFactory(StorageClientFactory):
396+ """A cmd protocol factory."""
397+
398+ protocol = SyncStorageClient
399+
400+ @log_timing
401+ def __init__(self, observer):
402+ """Create the factory"""
403+ self.observer = observer
404+ self.current_protocol = None
405+
406+ @log_timing
407+ def clientConnectionFailed(self, connector, reason):
408+ """We failed at connecting."""
409+ self.current_protocol = None
410+ self.observer.connection_failed(reason)
411+
412+
413+class UnsupportedOperationError(Exception):
414+ """The operation is unsupported by the protocol version."""
415+
416+
417+class ConnectionError(Exception):
418+ """A connection error."""
419+
420+
421+class AuthenticationError(Exception):
422+ """An authentication error."""
423+
424+
425+class NoSuchShareError(Exception):
426+ """Error when there is no such share available."""
427+
428+
429+class CapabilitiesError(Exception):
430+ """A capabilities set/query related error."""
431+
432+
433+class Client(object):
434+ """U1 storage client facade."""
435+ required_caps = frozenset([
436+ "no-content", "account-info", "resumable-uploads",
437+ "fix462230", "volumes", "generations",
438+ ])
439+
440+ def __init__(self, realm=None, reactor=reactor):
441+ """Create the instance.
442+
443+ 'realm' is no longer used, but is left as param for API compatibility.
444+
445+ """
446+ self.reactor = reactor
447+ self.factory = SyncClientFactory(self)
448+
449+ self._status_lock = Lock()
450+ self._status = "disconnected"
451+ self._status_reason = None
452+ self._status_waiting = []
453+ self._active_waiters = set()
454+
455+ self.consumer_key = CONSUMER_KEY
456+ self.consumer_secret = CONSUMER_SECRET
457+
458+ def force_shutdown(self):
459+ """Forces the client to shut itself down."""
460+ with self._status_lock:
461+ self._status = "forced_shutdown"
462+ self._reason = None
463+ for waiter in self._active_waiters:
464+ waiter.wakeAndRaise((ForcedShutdown("Forced shutdown"),
465+ None, None))
466+ self._active_waiters.clear()
467+
468+ def _get_waiter_locked(self):
469+ """Gets a wait object for blocking waits. Should be called with the
470+ status lock held.
471+ """
472+ waiter = Waiter()
473+ if self._status == "forced_shutdown":
474+ raise ForcedShutdown("Forced shutdown")
475+ self._active_waiters.add(waiter)
476+ return waiter
477+
478+ def _get_waiter(self):
479+ """Get a wait object for blocking waits. Acquires the status lock."""
480+ with self._status_lock:
481+ return self._get_waiter_locked()
482+
483+ def _wait(self, waiter):
484+ """Waits for the waiter."""
485+ try:
486+ return waiter.wait()
487+ finally:
488+ with self._status_lock:
489+ if waiter in self._active_waiters:
490+ self._active_waiters.remove(waiter)
491+
492+ @log_timing
493+ def _change_status(self, status, reason=None):
494+ """Changes the client status. Usually called from the reactor
495+ thread.
496+
497+ """
498+ with self._status_lock:
499+ if self._status == "forced_shutdown":
500+ return
501+ self._status = status
502+ self._status_reason = reason
503+ waiting = self._status_waiting
504+ self._status_waiting = []
505+ for waiter in waiting:
506+ waiter.wake((status, reason))
507+
508+ @log_timing
509+ def _await_status_not(self, *ignore_statuses):
510+ """Blocks until the client status changes, returning the new status.
511+ Should never be called from the reactor thread.
512+
513+ """
514+ with self._status_lock:
515+ status = self._status
516+ reason = self._status_reason
517+ while status in ignore_statuses:
518+ waiter = self._get_waiter_locked()
519+ self._status_waiting.append(waiter)
520+ self._status_lock.release()
521+ try:
522+ status, reason = self._wait(waiter)
523+ finally:
524+ self._status_lock.acquire()
525+ if status == "forced_shutdown":
526+ raise ForcedShutdown("Forced shutdown.")
527+ return (status, reason)
528+
529+ def connection_failed(self, reason):
530+ """Notification that connection failed."""
531+ self._change_status("disconnected", reason)
532+
533+ def connected(self):
534+ """Notification that connection succeeded."""
535+ self._change_status("connected")
536+
537+ def disconnected(self, reason):
538+ """Notification that we were disconnected."""
539+ self._change_status("disconnected", reason)
540+
541+ def defer_from_thread(self, function, *args, **kwargs):
542+ """Do twisted defer magic to get results and show exceptions."""
543+ waiter = self._get_waiter()
544+
545+ @log_timing
546+ def runner():
547+ """inner."""
548+ try:
549+ d = function(*args, **kwargs)
550+ if isinstance(d, defer.Deferred):
551+ d.addCallbacks(lambda r: waiter.wake((r, None, None)),
552+ lambda f: waiter.wake((None, None, f)))
553+ else:
554+ waiter.wake((d, None, None))
555+ except Exception:
556+ waiter.wake((None, sys.exc_info(), None))
557+
558+ self.reactor.callFromThread(runner)
559+ result, exc_info, failure = self._wait(waiter)
560+ if exc_info:
561+ try:
562+ raise exc_info[0], exc_info[1], exc_info[2]
563+ finally:
564+ exc_info = None
565+ elif failure:
566+ failure.raiseException()
567+ else:
568+ return result
569+
570+ @log_timing
571+ def connect(self, host, port):
572+ """Connect to host/port."""
573+ def _connect():
574+ """Deferred part."""
575+ self.reactor.connectTCP(host, port, self.factory)
576+ self._connect_inner(_connect)
577+
578+ @log_timing
579+ def connect_ssl(self, host, port, no_verify):
580+ """Connect to host/port using ssl."""
581+ def _connect():
582+ """deferred part."""
583+ ctx = get_ssl_context(no_verify, host)
584+ self.reactor.connectSSL(host, port, self.factory, ctx)
585+ self._connect_inner(_connect)
586+
587+ @log_timing
588+ def _connect_inner(self, _connect):
589+ """Helper function for connecting."""
590+ self._change_status("connecting")
591+ self.reactor.callFromThread(_connect)
592+ status, reason = self._await_status_not("connecting")
593+ if status != "connected":
594+ raise ConnectionError(reason.value)
595+
596+ @log_timing
597+ def disconnect(self):
598+ """Disconnect."""
599+ if self.factory.current_protocol is not None:
600+ self.reactor.callFromThread(
601+ self.factory.current_protocol.transport.loseConnection)
602+ self._await_status_not("connecting", "connected", "authenticated")
603+
604+ @log_timing
605+ def simple_auth(self, username, password):
606+ """Perform simple authorisation."""
607+
608+ @inlineCallbacks
609+ def _wrapped_authenticate():
610+ """Wrapped authenticate."""
611+ try:
612+ yield self.factory.current_protocol.simple_authenticate(
613+ username, password)
614+ except Exception:
615+ self.factory.current_protocol.transport.loseConnection()
616+ else:
617+ self._change_status("authenticated")
618+
619+ try:
620+ self.defer_from_thread(_wrapped_authenticate)
621+ except request.StorageProtocolError as e:
622+ raise AuthenticationError(e)
623+ status, reason = self._await_status_not("connected")
624+ if status != "authenticated":
625+ raise AuthenticationError(reason.value)
626+
627+ @log_timing
628+ def set_capabilities(self):
629+ """Set the capabilities with the server"""
630+
631+ client = self.factory.current_protocol
632+
633+ @log_timing
634+ def set_caps_callback(req):
635+ "Caps query succeeded"
636+ if not req.accepted:
637+ de = defer.fail("The server denied setting %s capabilities" %
638+ req.caps)
639+ return de
640+
641+ @log_timing
642+ def query_caps_callback(req):
643+ "Caps query succeeded"
644+ if req.accepted:
645+ set_d = client.set_caps(self.required_caps)
646+ set_d.addCallback(set_caps_callback)
647+ return set_d
648+ else:
649+ # the server don't have the requested capabilities.
650+ # return a failure for now, in the future we might want
651+ # to reconnect to another server
652+ de = defer.fail("The server don't have the requested"
653+ " capabilities: %s" % str(req.caps))
654+ return de
655+
656+ @log_timing
657+ def _wrapped_set_capabilities():
658+ """Wrapped set_capabilities """
659+ d = client.query_caps(self.required_caps)
660+ d.addCallback(query_caps_callback)
661+ return d
662+
663+ try:
664+ self.defer_from_thread(_wrapped_set_capabilities)
665+ except request.StorageProtocolError as e:
666+ raise CapabilitiesError(e)
667+
668+ @log_timing
669+ def get_root_info(self, volume_uuid):
670+ """Returns the UUID of the applicable share root."""
671+ if volume_uuid is None:
672+ _get_root = self.factory.current_protocol.get_root
673+ root = self.defer_from_thread(_get_root)
674+ return (uuid.UUID(root), True)
675+ else:
676+ str_volume_uuid = str(volume_uuid)
677+ volume = self._match_volume(
678+ lambda v: str(v.volume_id) == str_volume_uuid)
679+ if isinstance(volume, volumes.ShareVolume):
680+ modify = volume.access_level == "Modify"
681+ if isinstance(volume, volumes.UDFVolume):
682+ modify = True
683+ return (uuid.UUID(str(volume.node_id)), modify)
684+
685+ @log_timing
686+ def resolve_path(self, share_uuid, root_uuid, path):
687+ """Resolve path relative to the given root node."""
688+
689+ @inlineCallbacks
690+ def _resolve_worker():
691+ """Path resolution worker."""
692+ node_uuid = root_uuid
693+ local_path = path.strip('/')
694+
695+ while local_path != '':
696+ local_path, name = os.path.split(local_path)
697+ hashes = yield self._get_node_hashes(share_uuid)
698+ content_hash = hashes.get(root_uuid, None)
699+ if content_hash is None:
700+ raise KeyError("Content hash not available")
701+ entries = yield self._get_dir_entries(share_uuid, root_uuid)
702+ match_name = name.decode('utf-8')
703+ match = None
704+ for entry in entries:
705+ if match_name == entry.name:
706+ match = entry
707+ break
708+
709+ if match is None:
710+ raise KeyError("Path not found")
711+
712+ node_uuid = uuid.UUID(match.node)
713+
714+ returnValue(node_uuid)
715+
716+ return self.defer_from_thread(_resolve_worker)
717+
718+ @log_timing
719+ def find_volume(self, volume_spec):
720+ """Finds a share matching the given UUID. Looks at both share UUIDs
721+ and root node UUIDs."""
722+
723+ def match(s):
724+ return (str(s.volume_id) == volume_spec or
725+ str(s.node_id) == volume_spec)
726+
727+ volume = self._match_volume(match)
728+ return uuid.UUID(str(volume.volume_id))
729+
730+ @log_timing
731+ def _match_volume(self, predicate):
732+ """Finds a volume matching the given predicate."""
733+ _list_shares = self.factory.current_protocol.list_volumes
734+ r = self.defer_from_thread(_list_shares)
735+ for volume in r.volumes:
736+ if predicate(volume):
737+ return volume
738+ raise NoSuchShareError()
739+
740+ @log_timing
741+ def build_tree(self, share_uuid, root_uuid):
742+ """Builds and returns a tree representing the metadata for the given
743+ subtree in the given share.
744+
745+ @param share_uuid: the share UUID or None for the user's volume
746+ @param root_uuid: the root UUID of the subtree (must be a directory)
747+ @return: a MergeNode tree
748+
749+ """
750+ root = MergeNode(node_type=DIRECTORY, uuid=root_uuid)
751+
752+ @log_timing
753+ @inlineCallbacks
754+ def _get_root_content_hash():
755+ """Obtain the content hash for the root node."""
756+ result = yield self._get_node_hashes(share_uuid)
757+ returnValue(result.get(root_uuid, None))
758+
759+ root.content_hash = self.defer_from_thread(_get_root_content_hash)
760+ if root.content_hash is None:
761+ raise ValueError("No content available for node %s" % root_uuid)
762+
763+ @log_timing
764+ @inlineCallbacks
765+ def _get_children(parent_uuid, parent_content_hash):
766+ """Obtain a sequence of MergeNodes corresponding to a node's
767+ immediate children.
768+
769+ """
770+ entries = yield self._get_dir_entries(share_uuid, parent_uuid)
771+ children = {}
772+ for entry in entries:
773+ if should_sync(entry.name):
774+ child = MergeNode(node_type=entry.node_type,
775+ uuid=uuid.UUID(entry.node))
776+ children[entry.name] = child
777+
778+ content_hashes = yield self._get_node_hashes(share_uuid)
779+ for child in children.itervalues():
780+ child.content_hash = content_hashes.get(child.uuid, None)
781+
782+ returnValue(children)
783+
784+ need_children = [root]
785+ while need_children:
786+ node = need_children.pop()
787+ if node.content_hash is not None:
788+ children = self.defer_from_thread(_get_children, node.uuid,
789+ node.content_hash)
790+ node.children = children
791+ for child in children.itervalues():
792+ if child.node_type == DIRECTORY:
793+ need_children.append(child)
794+
795+ return root
796+
797+ @log_timing
798+ @defer.inlineCallbacks
799+ def _get_dir_entries(self, share_uuid, node_uuid):
800+ """Get raw dir entries for the given directory."""
801+ result = yield self.factory.current_protocol.get_delta(
802+ share_str(share_uuid), from_scratch=True)
803+ node_uuid = share_str(node_uuid)
804+ children = []
805+ for n in result.response:
806+ if n.parent_id == node_uuid:
807+ # adapt here some attrs so we don't need to change ALL the code
808+ n.node_type = DIRECTORY if n.file_type == delta_DIR else FILE
809+ n.node = n.node_id
810+ children.append(n)
811+ defer.returnValue(children)
812+
813+ @log_timing
814+ def download_string(self, share_uuid, node_uuid, content_hash):
815+ """Reads a file from the server into a string."""
816+ output = StringIO()
817+ self._download_inner(share_uuid=share_uuid, node_uuid=node_uuid,
818+ content_hash=content_hash, output=output)
819+ return output.getValue()
820+
821+ @log_timing
822+ def download_file(self, share_uuid, node_uuid, content_hash, filename):
823+ """Downloads a file from the server."""
824+ partial_filename = "%s.u1partial" % filename
825+ output = open(partial_filename, "w")
826+
827+ @log_timing
828+ def rename_file():
829+ """Renames the temporary file to the final name."""
830+ output.close()
831+ os.rename(partial_filename, filename)
832+
833+ @log_timing
834+ def delete_file():
835+ """Deletes the temporary file."""
836+ output.close()
837+ os.remove(partial_filename)
838+
839+ self._download_inner(share_uuid=share_uuid, node_uuid=node_uuid,
840+ content_hash=content_hash, output=output,
841+ on_success=rename_file, on_failure=delete_file)
842+
843+ @log_timing
844+ def _download_inner(self, share_uuid, node_uuid, content_hash, output,
845+ on_success=lambda: None, on_failure=lambda: None):
846+ """Helper function for content downloads."""
847+ dec = zlib.decompressobj()
848+
849+ @log_timing
850+ def write_data(data):
851+ """Helper which writes data to the output file."""
852+ uncompressed_data = dec.decompress(data)
853+ output.write(uncompressed_data)
854+
855+ @log_timing
856+ def finish_download(value):
857+ """Helper which finishes the download."""
858+ uncompressed_data = dec.flush()
859+ output.write(uncompressed_data)
860+ on_success()
861+ return value
862+
863+ @log_timing
864+ def abort_download(value):
865+ """Helper which aborts the download."""
866+ on_failure()
867+ return value
868+
869+ @log_timing
870+ def _download():
871+ """Async helper."""
872+ _get_content = self.factory.current_protocol.get_content
873+ d = _get_content(share_str(share_uuid), str(node_uuid),
874+ content_hash, callback=write_data)
875+ d.addCallbacks(finish_download, abort_download)
876+ return d
877+
878+ self.defer_from_thread(_download)
879+
880+ @log_timing
881+ def create_directory(self, share_uuid, parent_uuid, name):
882+ """Creates a directory on the server."""
883+ r = self.defer_from_thread(self.factory.current_protocol.make_dir,
884+ share_str(share_uuid), str(parent_uuid),
885+ name)
886+ return uuid.UUID(r.new_id)
887+
888+ @log_timing
889+ def create_file(self, share_uuid, parent_uuid, name):
890+ """Creates a file on the server."""
891+ r = self.defer_from_thread(self.factory.current_protocol.make_file,
892+ share_str(share_uuid), str(parent_uuid),
893+ name)
894+ return uuid.UUID(r.new_id)
895+
896+ @log_timing
897+ def create_symlink(self, share_uuid, parent_uuid, name, target):
898+ """Creates a symlink on the server."""
899+ raise UnsupportedOperationError("Protocol does not support symlinks")
900+
901+ @log_timing
902+ def upload_string(self, share_uuid, node_uuid, old_content_hash,
903+ content_hash, content):
904+ """Uploads a string to the server as file content."""
905+ crc = crc32(content, 0)
906+ compressed_content = zlib.compress(content, 9)
907+ compressed = StringIO(compressed_content)
908+ self.defer_from_thread(self.factory.current_protocol.put_content,
909+ share_str(share_uuid), str(node_uuid),
910+ old_content_hash, content_hash,
911+ crc, len(content), len(compressed_content),
912+ compressed)
913+
914+ @log_timing
915+ def upload_file(self, share_uuid, node_uuid, old_content_hash,
916+ content_hash, filename):
917+ """Uploads a file to the server."""
918+ parent_dir = os.path.split(filename)[0]
919+ unique_filename = os.path.join(parent_dir, "." + str(uuid.uuid4()))
920+
921+ class StagingFile(object):
922+ """An object which tracks data being compressed for staging."""
923+ def __init__(self, stream):
924+ """Initialize a compression object."""
925+ self.crc32 = 0
926+ self.enc = zlib.compressobj(9)
927+ self.size = 0
928+ self.compressed_size = 0
929+ self.stream = stream
930+
931+ def write(self, bytes):
932+ """Compress bytes, keeping track of length and crc32."""
933+ self.size += len(bytes)
934+ self.crc32 = crc32(bytes, self.crc32)
935+ compressed_bytes = self.enc.compress(bytes)
936+ self.compressed_size += len(compressed_bytes)
937+ self.stream.write(compressed_bytes)
938+
939+ def finish(self):
940+ """Finish staging compressed data."""
941+ compressed_bytes = self.enc.flush()
942+ self.compressed_size += len(compressed_bytes)
943+ self.stream.write(compressed_bytes)
944+
945+ with open(unique_filename, "w+") as compressed:
946+ os.remove(unique_filename)
947+ with open(filename, "r") as original:
948+ staging = StagingFile(compressed)
949+ shutil.copyfileobj(original, staging)
950+ staging.finish()
951+ compressed.seek(0)
952+ self.defer_from_thread(self.factory.current_protocol.put_content,
953+ share_str(share_uuid), str(node_uuid),
954+ old_content_hash, content_hash,
955+ staging.crc32,
956+ staging.size, staging.compressed_size,
957+ compressed)
958+
959+ @log_timing
960+ def move(self, share_uuid, parent_uuid, name, node_uuid):
961+ """Moves a file on the server."""
962+ self.defer_from_thread(self.factory.current_protocol.move,
963+ share_str(share_uuid), str(node_uuid),
964+ str(parent_uuid), name)
965+
966+ @log_timing
967+ def unlink(self, share_uuid, node_uuid):
968+ """Unlinks a file on the server."""
969+ self.defer_from_thread(self.factory.current_protocol.unlink,
970+ share_str(share_uuid), str(node_uuid))
971+
972+ @log_timing
973+ @defer.inlineCallbacks
974+ def _get_node_hashes(self, share_uuid):
975+ """Fetches hashes for the given nodes."""
976+ result = yield self.factory.current_protocol.get_delta(
977+ share_str(share_uuid), from_scratch=True)
978+ hashes = {}
979+ for fid in result.response:
980+ node_uuid = uuid.UUID(fid.node_id)
981+ hashes[node_uuid] = fid.content_hash
982+ defer.returnValue(hashes)
983+
984+ @log_timing
985+ def get_incoming_shares(self):
986+ """Returns a list of incoming shares as (name, uuid, accepted)
987+ tuples.
988+
989+ """
990+ _list_shares = self.factory.current_protocol.list_shares
991+ r = self.defer_from_thread(_list_shares)
992+ return [(s.name, s.id, s.other_visible_name,
993+ s.accepted, s.access_level)
994+ for s in r.shares if s.direction == "to_me"]
995
996=== added file 'magicicada/u1sync/constants.py'
997--- magicicada/u1sync/constants.py 1970-01-01 00:00:00 +0000
998+++ magicicada/u1sync/constants.py 2018-04-20 01:22:48 +0000
999@@ -0,0 +1,26 @@
1000+# Copyright 2009 Canonical Ltd.
1001+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
1002+#
1003+# This program is free software: you can redistribute it and/or modify it
1004+# under the terms of the GNU General Public License version 3, as published
1005+# by the Free Software Foundation.
1006+#
1007+# This program is distributed in the hope that it will be useful, but
1008+# WITHOUT ANY WARRANTY; without even the implied warranties of
1009+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
1010+# PURPOSE. See the GNU General Public License for more details.
1011+#
1012+# You should have received a copy of the GNU General Public License along
1013+# with this program. If not, see <http://www.gnu.org/licenses/>.
1014+
1015+"""Assorted constant definitions which don't fit anywhere else."""
1016+
1017+import re
1018+
1019+# the name of the directory u1sync uses to keep metadata about a mirror
1020+METADATA_DIR_NAME = u".ubuntuone-sync"
1021+
1022+# filenames to ignore
1023+SPECIAL_FILE_RE = re.compile(".*\\.("
1024+ "(u1)?partial|part|"
1025+ "(u1)?conflict(\\.[0-9]+)?)$")
1026
1027=== added file 'magicicada/u1sync/genericmerge.py'
1028--- magicicada/u1sync/genericmerge.py 1970-01-01 00:00:00 +0000
1029+++ magicicada/u1sync/genericmerge.py 2018-04-20 01:22:48 +0000
1030@@ -0,0 +1,86 @@
1031+# Copyright 2009 Canonical Ltd.
1032+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
1033+#
1034+# This program is free software: you can redistribute it and/or modify it
1035+# under the terms of the GNU General Public License version 3, as published
1036+# by the Free Software Foundation.
1037+#
1038+# This program is distributed in the hope that it will be useful, but
1039+# WITHOUT ANY WARRANTY; without even the implied warranties of
1040+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
1041+# PURPOSE. See the GNU General Public License for more details.
1042+#
1043+# You should have received a copy of the GNU General Public License along
1044+# with this program. If not, see <http://www.gnu.org/licenses/>.
1045+
1046+"""A generic abstraction for merge operations on directory trees."""
1047+
1048+from itertools import chain
1049+
1050+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY
1051+
1052+
1053+class MergeNode(object):
1054+ """A filesystem node. Should generally be treated as immutable."""
1055+ def __init__(self, node_type, content_hash=None, uuid=None, children=None,
1056+ conflict_info=None):
1057+ """Initializes a node instance."""
1058+ self.node_type = node_type
1059+ self.children = children
1060+ self.uuid = uuid
1061+ self.content_hash = content_hash
1062+ self.conflict_info = conflict_info
1063+
1064+ def __eq__(self, other):
1065+ """Equality test."""
1066+ if type(other) is not type(self):
1067+ return False
1068+ return (self.node_type == other.node_type and
1069+ self.children == other.children and
1070+ self.uuid == other.uuid and
1071+ self.content_hash == other.content_hash and
1072+ self.conflict_info == other.conflict_info)
1073+
1074+ def __ne__(self, other):
1075+ """Non-equality test."""
1076+ return not self.__eq__(other)
1077+
1078+
1079+def show_tree(tree, indent="", name="/"):
1080+ """Prints a tree."""
1081+ if tree.node_type == DIRECTORY:
1082+ type_str = "DIR "
1083+ else:
1084+ type_str = "FILE"
1085+ print "%s%-36s %s %s %s" % (indent, tree.uuid, type_str, name,
1086+ tree.content_hash)
1087+ if tree.node_type == DIRECTORY and tree.children is not None:
1088+ for name in sorted(tree.children.keys()):
1089+ subtree = tree.children[name]
1090+ show_tree(subtree, indent=" " + indent, name=name)
1091+
1092+
1093+def generic_merge(trees, pre_merge, post_merge, partial_parent, name):
1094+ """Generic tree merging function."""
1095+
1096+ partial_result = pre_merge(nodes=trees, name=name,
1097+ partial_parent=partial_parent)
1098+
1099+ def tree_children(tree):
1100+ """Returns children if tree is not None"""
1101+ return tree.children if tree is not None else None
1102+
1103+ child_dicts = [tree_children(t) or {} for t in trees]
1104+ child_names = set(chain(*[cs.iterkeys() for cs in child_dicts]))
1105+ child_results = {}
1106+ for child_name in child_names:
1107+ subtrees = [cs.get(child_name, None) for cs in child_dicts]
1108+ child_result = generic_merge(trees=subtrees,
1109+ pre_merge=pre_merge,
1110+ post_merge=post_merge,
1111+ partial_parent=partial_result,
1112+ name=child_name)
1113+ child_results[child_name] = child_result
1114+
1115+ return post_merge(nodes=trees, partial_result=partial_result,
1116+ child_results=child_results)
1117
1118=== added file 'magicicada/u1sync/main.py'
1119--- magicicada/u1sync/main.py 1970-01-01 00:00:00 +0000
1120+++ magicicada/u1sync/main.py 2018-04-20 01:22:48 +0000
1121@@ -0,0 +1,443 @@
1122+# Copyright 2009 Canonical Ltd.
1123+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
1124+#
1125+# This program is free software: you can redistribute it and/or modify it
1126+# under the terms of the GNU General Public License version 3, as published
1127+# by the Free Software Foundation.
1128+#
1129+# This program is distributed in the hope that it will be useful, but
1130+# WITHOUT ANY WARRANTY; without even the implied warranties of
1131+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
1132+# PURPOSE. See the GNU General Public License for more details.
1133+#
1134+# You should have received a copy of the GNU General Public License along
1135+# with this program. If not, see <http://www.gnu.org/licenses/>.
1136+
1137+"""A prototype directory sync client."""
1138+
1139+from __future__ import with_statement
1140+
1141+import signal
1142+import os
1143+import sys
1144+import uuid
1145+
1146+from errno import EEXIST
1147+from optparse import OptionParser, SUPPRESS_HELP
1148+from Queue import Queue
1149+
1150+import gobject
1151+
1152+import ubuntuone.storageprotocol.dircontent_pb2 as dircontent_pb2
1153+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, SYMLINK
1154+from twisted.internet import reactor
1155+
1156+from magicicada.u1sync import metadata
1157+from magicicada.u1sync.client import (
1158+ ConnectionError, AuthenticationError, NoSuchShareError,
1159+ ForcedShutdown, Client)
1160+from magicicada.u1sync.constants import METADATA_DIR_NAME
1161+from magicicada.u1sync.genericmerge import show_tree, generic_merge
1162+from magicicada.u1sync.merge import (
1163+ SyncMerge, ClobberServerMerge, ClobberLocalMerge, merge_trees)
1164+from magicicada.u1sync.scan import scan_directory
1165+from magicicada.u1sync.sync import download_tree, upload_tree
1166+from magicicada.u1sync.utils import safe_mkdir
1167+
1168+
1169+gobject.set_application_name('u1sync')
1170+
1171+DEFAULT_MERGE_ACTION = 'auto'
1172+MERGE_ACTIONS = {
1173+ # action: (merge_class, should_upload, should_download)
1174+ 'sync': (SyncMerge, True, True),
1175+ 'clobber-server': (ClobberServerMerge, True, False),
1176+ 'clobber-local': (ClobberLocalMerge, False, True),
1177+ 'upload': (SyncMerge, True, False),
1178+ 'download': (SyncMerge, False, True),
1179+ 'auto': None # special case
1180+}
1181+NODE_TYPE_ENUM = dircontent_pb2._NODETYPE
1182+
1183+
1184+def node_type_str(node_type):
1185+ """Converts a numeric node type to a human-readable string."""
1186+ return NODE_TYPE_ENUM.values_by_number[node_type].name
1187+
1188+
1189+class ReadOnlyShareError(Exception):
1190+ """Share is read-only."""
1191+
1192+
1193+class DirectoryAlreadyInitializedError(Exception):
1194+ """The directory has already been initialized."""
1195+
1196+
1197+class DirectoryNotInitializedError(Exception):
1198+ """The directory has not been initialized."""
1199+
1200+
1201+class NoParentError(Exception):
1202+ """A node has no parent."""
1203+
1204+
1205+class TreesDiffer(Exception):
1206+ """Raised when diff tree differs."""
1207+ def __init__(self, quiet):
1208+ self.quiet = quiet
1209+
1210+
1211+def do_init(client, share_spec, directory, quiet, subtree_path,
1212+ metadata=metadata):
1213+ """Initializes a directory for syncing, and syncs it."""
1214+ info = metadata.Metadata()
1215+
1216+ if share_spec is not None:
1217+ info.share_uuid = client.find_volume(share_spec)
1218+ else:
1219+ info.share_uuid = None
1220+
1221+ if subtree_path is not None:
1222+ info.path = subtree_path
1223+ else:
1224+ info.path = "/"
1225+
1226+ if not quiet:
1227+ print "\nInitializing directory..."
1228+ safe_mkdir(directory)
1229+
1230+ metadata_dir = os.path.join(directory, METADATA_DIR_NAME)
1231+ try:
1232+ os.mkdir(metadata_dir)
1233+ except OSError as e:
1234+ if e.errno == EEXIST:
1235+ raise DirectoryAlreadyInitializedError(directory)
1236+ else:
1237+ raise
1238+
1239+ if not quiet:
1240+ print "\nWriting mirror metadata..."
1241+ metadata.write(metadata_dir, info)
1242+
1243+ if not quiet:
1244+ print "\nDone."
1245+
1246+
1247+def do_sync(client, directory, action, dry_run, quiet):
1248+ """Synchronizes a directory with the given share."""
1249+ absolute_path = os.path.abspath(directory)
1250+ while True:
1251+ metadata_dir = os.path.join(absolute_path, METADATA_DIR_NAME)
1252+ if os.path.exists(metadata_dir):
1253+ break
1254+ if absolute_path == "/":
1255+ raise DirectoryNotInitializedError(directory)
1256+ absolute_path = os.path.split(absolute_path)[0]
1257+
1258+ if not quiet:
1259+ print "\nReading mirror metadata..."
1260+ info = metadata.read(metadata_dir)
1261+
1262+ top_uuid, writable = client.get_root_info(info.share_uuid)
1263+
1264+ if info.root_uuid is None:
1265+ info.root_uuid = client.resolve_path(info.share_uuid, top_uuid,
1266+ info.path)
1267+
1268+ if action == 'auto':
1269+ if writable:
1270+ action = 'sync'
1271+ else:
1272+ action = 'download'
1273+ merge_type, should_upload, should_download = MERGE_ACTIONS[action]
1274+ if should_upload and not writable:
1275+ raise ReadOnlyShareError(info.share_uuid)
1276+
1277+ if not quiet:
1278+ print "\nScanning directory..."
1279+ local_tree = scan_directory(absolute_path, quiet=quiet)
1280+
1281+ if not quiet:
1282+ print "\nFetching metadata..."
1283+ remote_tree = client.build_tree(info.share_uuid, info.root_uuid)
1284+ if not quiet:
1285+ show_tree(remote_tree)
1286+
1287+ if not quiet:
1288+ print "\nMerging trees..."
1289+ merged_tree = merge_trees(old_local_tree=info.local_tree,
1290+ local_tree=local_tree,
1291+ old_remote_tree=info.remote_tree,
1292+ remote_tree=remote_tree,
1293+ merge_action=merge_type())
1294+ if not quiet:
1295+ show_tree(merged_tree)
1296+
1297+ if not quiet:
1298+ print "\nSyncing content..."
1299+ if should_download:
1300+ info.local_tree = download_tree(merged_tree=merged_tree,
1301+ local_tree=local_tree,
1302+ client=client,
1303+ share_uuid=info.share_uuid,
1304+ path=absolute_path, dry_run=dry_run,
1305+ quiet=quiet)
1306+ else:
1307+ info.local_tree = local_tree
1308+ if should_upload:
1309+ info.remote_tree = upload_tree(merged_tree=merged_tree,
1310+ remote_tree=remote_tree,
1311+ client=client,
1312+ share_uuid=info.share_uuid,
1313+ path=absolute_path, dry_run=dry_run,
1314+ quiet=quiet)
1315+ else:
1316+ info.remote_tree = remote_tree
1317+
1318+ if not dry_run:
1319+ if not quiet:
1320+ print "\nUpdating mirror metadata..."
1321+ metadata.write(metadata_dir, info)
1322+
1323+ if not quiet:
1324+ print "\nDone."
1325+
1326+
1327+def do_list_shares(client):
1328+ """Lists available (incoming) shares."""
1329+ shares = client.get_incoming_shares()
1330+ for (name, id, user, accepted, access) in shares:
1331+ if not accepted:
1332+ status = " [not accepted]"
1333+ else:
1334+ status = ""
1335+ name = name.encode("utf-8")
1336+ user = user.encode("utf-8")
1337+ print "%s %s (from %s) [%s]%s" % (id, name, user, access, status)
1338+
1339+
1340+def do_diff(client, share_spec, directory, quiet, subtree_path,
1341+ ignore_symlinks=True):
1342+ """Diffs a local directory with the server."""
1343+ if share_spec is not None:
1344+ share_uuid = client.find_volume(share_spec)
1345+ else:
1346+ share_uuid = None
1347+ if subtree_path is None:
1348+ subtree_path = '/'
1349+
1350+ root_uuid, writable = client.get_root_info(share_uuid)
1351+ subtree_uuid = client.resolve_path(share_uuid, root_uuid, subtree_path)
1352+ local_tree = scan_directory(directory, quiet=True)
1353+ remote_tree = client.build_tree(share_uuid, subtree_uuid)
1354+
1355+ def pre_merge(nodes, name, partial_parent):
1356+ """Compares nodes and prints differences."""
1357+ (local_node, remote_node) = nodes
1358+ (parent_display_path, parent_differs) = partial_parent
1359+ display_path = os.path.join(parent_display_path, name.encode("UTF-8"))
1360+ differs = True
1361+ if local_node is None:
1362+ if not quiet:
1363+ print "%s missing from client" % display_path
1364+ elif remote_node is None:
1365+ if ignore_symlinks and local_node.node_type == SYMLINK:
1366+ differs = False
1367+ elif not quiet:
1368+ print "%s missing from server" % display_path
1369+ elif local_node.node_type != remote_node.node_type:
1370+ local_type = node_type_str(local_node.node_type)
1371+ remote_type = node_type_str(remote_node.node_type)
1372+ if not quiet:
1373+ print ("%s node types differ (client: %s, server: %s)" %
1374+ (display_path, local_type, remote_type))
1375+ elif (local_node.node_type != DIRECTORY and
1376+ local_node.content_hash != remote_node.content_hash):
1377+ local_content = local_node.content_hash
1378+ remote_content = remote_node.content_hash
1379+ if not quiet:
1380+ print ("%s has different content (client: %s, server: %s)" %
1381+ (display_path, local_content, remote_content))
1382+ else:
1383+ differs = False
1384+ return (display_path, differs)
1385+
1386+ def post_merge(nodes, partial_result, child_results):
1387+ """Aggregates 'differs' flags."""
1388+ (display_path, differs) = partial_result
1389+ return differs or any(child_results.itervalues())
1390+
1391+ differs = generic_merge(trees=[local_tree, remote_tree],
1392+ pre_merge=pre_merge, post_merge=post_merge,
1393+ partial_parent=("", False), name=u"")
1394+ if differs:
1395+ raise TreesDiffer(quiet=quiet)
1396+
1397+
1398+def do_main(argv):
1399+ """The main user-facing portion of the script."""
1400+ usage = (
1401+ "Usage: %prog [options] [DIRECTORY]\n"
1402+ " %prog --list-shares [options]\n"
1403+ " %prog --init [--share=SHARE_UUID] [options] DIRECTORY\n"
1404+ " %prog --diff [--share=SHARE_UUID] [options] DIRECTORY")
1405+ parser = OptionParser(usage=usage)
1406+ parser.add_option("--port", dest="port", metavar="PORT",
1407+ default=443,
1408+ help="The port on which to connect to the server")
1409+ parser.add_option("--host", dest="host", metavar="HOST",
1410+ default='fs-1.one.ubuntu.com',
1411+ help="The server address")
1412+
1413+ action_list = ", ".join(sorted(MERGE_ACTIONS.keys()))
1414+ parser.add_option("--action", dest="action", metavar="ACTION",
1415+ default=None,
1416+ help="Select a sync action (%s; default is %s)" %
1417+ (action_list, DEFAULT_MERGE_ACTION))
1418+ parser.add_option("--dry-run", action="store_true", dest="dry_run",
1419+ default=False, help="Do a dry run without actually "
1420+ "making changes")
1421+ parser.add_option("--quiet", action="store_true", dest="quiet",
1422+ default=False, help="Produces less output")
1423+ parser.add_option("--list-shares", action="store_const", dest="mode",
1424+ const="list-shares", default="sync",
1425+ help="List available shares")
1426+ parser.add_option("--init", action="store_const", dest="mode",
1427+ const="init",
1428+ help="Initialize a local directory for syncing")
1429+ parser.add_option("--no-ssl-verify", action="store_true",
1430+ dest="no_ssl_verify",
1431+ default=False, help=SUPPRESS_HELP)
1432+ parser.add_option("--diff", action="store_const", dest="mode",
1433+ const="diff",
1434+ help="Compare tree on server with local tree "
1435+ "(does not require previous --init)")
1436+ parser.add_option("--share", dest="share", metavar="SHARE_UUID",
1437+ default=None,
1438+ help="Sync the directory with a share rather than the "
1439+ "user's own volume")
1440+ parser.add_option("--subtree", dest="subtree", metavar="PATH",
1441+ default=None,
1442+ help="Mirror a subset of the share or volume")
1443+
1444+ (options, args) = parser.parse_args(args=list(argv[1:]))
1445+
1446+ if options.share is not None and options.mode not in ("init", "diff"):
1447+ parser.error("--share is only valid with --init or --diff")
1448+
1449+ directory = None
1450+ if options.mode in ("sync", "init" or "diff"):
1451+ if len(args) > 1:
1452+ parser.error("Too many arguments")
1453+ elif len(args) < 1:
1454+ if options.mode == "init" or options.mode == "diff":
1455+ parser.error("--%s requires a directory to "
1456+ "be specified" % options.mode)
1457+ else:
1458+ directory = "."
1459+ else:
1460+ directory = args[0]
1461+ if options.mode in ("init", "list-shares", "diff"):
1462+ if options.action is not None:
1463+ parser.error("--%s does not take the --action parameter" %
1464+ options.mode)
1465+ if options.dry_run:
1466+ parser.error("--%s does not take the --dry-run parameter" %
1467+ options.mode)
1468+ if options.mode == "list-shares":
1469+ if len(args) != 0:
1470+ parser.error("--list-shares does not take a directory")
1471+ if options.mode not in ("init", "diff"):
1472+ if options.subtree is not None:
1473+ parser.error("--%s does not take the --subtree parameter" %
1474+ options.mode)
1475+
1476+ if options.action is not None and options.action not in MERGE_ACTIONS:
1477+ parser.error("--action: Unknown action %s" % options.action)
1478+
1479+ if options.action is None:
1480+ options.action = DEFAULT_MERGE_ACTION
1481+
1482+ if options.share is not None:
1483+ try:
1484+ uuid.UUID(options.share)
1485+ except ValueError as e:
1486+ parser.error("Invalid --share argument: %s" % e)
1487+ share_spec = options.share
1488+ else:
1489+ share_spec = None
1490+
1491+ client = Client(reactor=reactor)
1492+
1493+ signal.signal(signal.SIGINT, lambda s, f: client.force_shutdown())
1494+ signal.signal(signal.SIGTERM, lambda s, f: client.force_shutdown())
1495+
1496+ def run_client():
1497+ """Run the blocking client."""
1498+ client.connect_ssl(
1499+ options.host, int(options.port), options.no_ssl_verify)
1500+
1501+ try:
1502+ client.set_capabilities()
1503+
1504+ if options.mode == "sync":
1505+ do_sync(client=client, directory=directory,
1506+ action=options.action,
1507+ dry_run=options.dry_run, quiet=options.quiet)
1508+ elif options.mode == "init":
1509+ do_init(client=client, share_spec=share_spec,
1510+ directory=directory,
1511+ quiet=options.quiet, subtree_path=options.subtree)
1512+ elif options.mode == "list-shares":
1513+ do_list_shares(client=client)
1514+ elif options.mode == "diff":
1515+ do_diff(client=client, share_spec=share_spec,
1516+ directory=directory,
1517+ quiet=options.quiet, subtree_path=options.subtree,
1518+ ignore_symlinks=False)
1519+ finally:
1520+ client.disconnect()
1521+
1522+ def capture_exception(queue, func):
1523+ """Capture the exception from calling func."""
1524+ try:
1525+ func()
1526+ except Exception:
1527+ queue.put(sys.exc_info())
1528+ else:
1529+ queue.put(None)
1530+ finally:
1531+ reactor.callWhenRunning(reactor.stop)
1532+
1533+ queue = Queue()
1534+ reactor.callInThread(capture_exception, queue, run_client)
1535+ reactor.run(installSignalHandlers=False)
1536+ exc_info = queue.get(True, 0.1)
1537+ if exc_info:
1538+ raise exc_info[0], exc_info[1], exc_info[2]
1539+
1540+
1541+def main(*argv):
1542+ """Top-level main function."""
1543+ try:
1544+ do_main(argv=argv)
1545+ except AuthenticationError as e:
1546+ print "Authentication failed: %s" % e
1547+ except ConnectionError as e:
1548+ print "Connection failed: %s" % e
1549+ except DirectoryNotInitializedError:
1550+ print "Directory not initialized; use --init [DIRECTORY] to init it."
1551+ except DirectoryAlreadyInitializedError:
1552+ print "Directory already initialized."
1553+ except NoSuchShareError:
1554+ print "No matching share found."
1555+ except ReadOnlyShareError:
1556+ print "The selected action isn't possible on a read-only share."
1557+ except (ForcedShutdown, KeyboardInterrupt):
1558+ print "Interrupted!"
1559+ except TreesDiffer as e:
1560+ if not e.quiet:
1561+ print "Trees differ."
1562+ else:
1563+ return 0
1564+ return 1
1565
1566=== added file 'magicicada/u1sync/merge.py'
1567--- magicicada/u1sync/merge.py 1970-01-01 00:00:00 +0000
1568+++ magicicada/u1sync/merge.py 2018-04-20 01:22:48 +0000
1569@@ -0,0 +1,181 @@
1570+# Copyright 2009 Canonical Ltd.
1571+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
1572+#
1573+# This program is free software: you can redistribute it and/or modify it
1574+# under the terms of the GNU General Public License version 3, as published
1575+# by the Free Software Foundation.
1576+#
1577+# This program is distributed in the hope that it will be useful, but
1578+# WITHOUT ANY WARRANTY; without even the implied warranties of
1579+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
1580+# PURPOSE. See the GNU General Public License for more details.
1581+#
1582+# You should have received a copy of the GNU General Public License along
1583+# with this program. If not, see <http://www.gnu.org/licenses/>.
1584+
1585+"""Code for merging changes between modified trees."""
1586+
1587+from __future__ import with_statement
1588+
1589+import os
1590+import uuid
1591+
1592+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY
1593+
1594+from magicicada.u1sync.genericmerge import MergeNode, generic_merge
1595+
1596+
1597+class NodeTypeMismatchError(Exception):
1598+ """Node types don't match."""
1599+
1600+
1601+def merge_trees(old_local_tree, local_tree, old_remote_tree, remote_tree,
1602+ merge_action):
1603+ """Performs a tree merge using the given merge action."""
1604+
1605+ def pre_merge(nodes, name, partial_parent):
1606+ """Accumulates path and determines merged node type."""
1607+ old_local_node, local_node, old_remote_node, remote_node = nodes
1608+ (parent_path, parent_type) = partial_parent
1609+ path = os.path.join(parent_path, name.encode("utf-8"))
1610+ node_type = merge_action.get_node_type(old_local_node=old_local_node,
1611+ local_node=local_node,
1612+ old_remote_node=old_remote_node,
1613+ remote_node=remote_node,
1614+ path=path)
1615+ return (path, node_type)
1616+
1617+ def post_merge(nodes, partial_result, child_results):
1618+ """Drops deleted children and merges node."""
1619+ old_local_node, local_node, old_remote_node, remote_node = nodes
1620+ (path, node_type) = partial_result
1621+ if node_type == DIRECTORY:
1622+ merged_children = dict(
1623+ (name, child) for (name, child) in child_results.iteritems()
1624+ if child is not None)
1625+ else:
1626+ merged_children = None
1627+ return merge_action.merge_node(old_local_node=old_local_node,
1628+ local_node=local_node,
1629+ old_remote_node=old_remote_node,
1630+ remote_node=remote_node,
1631+ node_type=node_type,
1632+ merged_children=merged_children)
1633+
1634+ return generic_merge(trees=[old_local_tree, local_tree,
1635+ old_remote_tree, remote_tree],
1636+ pre_merge=pre_merge, post_merge=post_merge,
1637+ name=u"", partial_parent=("", None))
1638+
1639+
1640+class SyncMerge(object):
1641+ """Performs a bidirectional sync merge."""
1642+
1643+ def get_node_type(self, old_local_node, local_node,
1644+ old_remote_node, remote_node, path):
1645+ """Requires that all node types match."""
1646+ node_type = None
1647+ for node in (old_local_node, local_node, remote_node):
1648+ if node is not None:
1649+ if node_type is not None:
1650+ if node.node_type != node_type:
1651+ message = "Node types don't match for %s" % path
1652+ raise NodeTypeMismatchError(message)
1653+ else:
1654+ node_type = node.node_type
1655+ return node_type
1656+
1657+ def merge_node(self, old_local_node, local_node,
1658+ old_remote_node, remote_node, node_type, merged_children):
1659+ """Performs bidirectional merge of node state."""
1660+
1661+ def node_content_hash(node):
1662+ """Returns node content hash if node is not None"""
1663+ return node.content_hash if node is not None else None
1664+
1665+ old_local_content_hash = node_content_hash(old_local_node)
1666+ local_content_hash = node_content_hash(local_node)
1667+ old_remote_content_hash = node_content_hash(old_remote_node)
1668+ remote_content_hash = node_content_hash(remote_node)
1669+
1670+ locally_deleted = old_local_node is not None and local_node is None
1671+ deleted_on_server = old_remote_node is not None and remote_node is None
1672+ # updated means modified or created
1673+ locally_updated = (not locally_deleted and
1674+ old_local_content_hash != local_content_hash)
1675+ updated_on_server = (not deleted_on_server and
1676+ old_remote_content_hash != remote_content_hash)
1677+
1678+ has_merged_children = (merged_children is not None and
1679+ len(merged_children) > 0)
1680+
1681+ either_node_exists = local_node is not None or remote_node is not None
1682+ should_delete = (
1683+ (locally_deleted and not updated_on_server) or
1684+ (deleted_on_server and not locally_updated))
1685+
1686+ if (either_node_exists and not should_delete) or has_merged_children:
1687+ if (node_type != DIRECTORY and locally_updated and
1688+ updated_on_server and
1689+ local_content_hash != remote_content_hash):
1690+ # local_content_hash will become the merged content_hash;
1691+ # save remote_content_hash in conflict info
1692+ conflict_info = (str(uuid.uuid4()), remote_content_hash)
1693+ else:
1694+ conflict_info = None
1695+ node_uuid = remote_node.uuid if remote_node is not None else None
1696+ if locally_updated:
1697+ content_hash = local_content_hash or remote_content_hash
1698+ else:
1699+ content_hash = remote_content_hash or local_content_hash
1700+ return MergeNode(
1701+ node_type=node_type, uuid=node_uuid, children=merged_children,
1702+ content_hash=content_hash, conflict_info=conflict_info)
1703+ else:
1704+ return None
1705+
1706+
1707+class ClobberServerMerge(object):
1708+ """Clobber server to match local state."""
1709+
1710+ def get_node_type(self, old_local_node, local_node,
1711+ old_remote_node, remote_node, path):
1712+ """Return local node type."""
1713+ if local_node is not None:
1714+ return local_node.node_type
1715+ else:
1716+ return None
1717+
1718+ def merge_node(self, old_local_node, local_node,
1719+ old_remote_node, remote_node, node_type, merged_children):
1720+ """Copy local node and associate with remote uuid (if applicable)."""
1721+ if local_node is None:
1722+ return None
1723+ if remote_node is not None:
1724+ node_uuid = remote_node.uuid
1725+ else:
1726+ node_uuid = None
1727+ return MergeNode(
1728+ node_type=local_node.node_type, uuid=node_uuid,
1729+ content_hash=local_node.content_hash, children=merged_children)
1730+
1731+
1732+class ClobberLocalMerge(object):
1733+ """Clobber local state to match server."""
1734+
1735+ def get_node_type(self, old_local_node, local_node,
1736+ old_remote_node, remote_node, path):
1737+ """Return remote node type."""
1738+ if remote_node is not None:
1739+ return remote_node.node_type
1740+ else:
1741+ return None
1742+
1743+ def merge_node(self, old_local_node, local_node,
1744+ old_remote_node, remote_node, node_type, merged_children):
1745+ """Copy the remote node."""
1746+ if remote_node is None:
1747+ return None
1748+ return MergeNode(
1749+ node_type=node_type, uuid=remote_node.uuid,
1750+ content_hash=remote_node.content_hash, children=merged_children)
1751
1752=== added file 'magicicada/u1sync/metadata.py'
1753--- magicicada/u1sync/metadata.py 1970-01-01 00:00:00 +0000
1754+++ magicicada/u1sync/metadata.py 2018-04-20 01:22:48 +0000
1755@@ -0,0 +1,155 @@
1756+# Copyright 2009 Canonical Ltd.
1757+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
1758+#
1759+# This program is free software: you can redistribute it and/or modify it
1760+# under the terms of the GNU General Public License version 3, as published
1761+# by the Free Software Foundation.
1762+#
1763+# This program is distributed in the hope that it will be useful, but
1764+# WITHOUT ANY WARRANTY; without even the implied warranties of
1765+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
1766+# PURPOSE. See the GNU General Public License for more details.
1767+#
1768+# You should have received a copy of the GNU General Public License along
1769+# with this program. If not, see <http://www.gnu.org/licenses/>.
1770+
1771+"""Routines for loading/storing u1sync mirror metadata."""
1772+
1773+from __future__ import with_statement
1774+
1775+import os
1776+import uuid
1777+
1778+from contextlib import contextmanager
1779+import cPickle as pickle
1780+from errno import ENOENT
1781+
1782+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY
1783+
1784+from magicicada.u1sync.merge import MergeNode
1785+from magicicada.u1sync.utils import safe_unlink
1786+
1787+
1788+class Metadata(object):
1789+ """Object representing mirror metadata."""
1790+
1791+ def __init__(self, local_tree=None, remote_tree=None, share_uuid=None,
1792+ root_uuid=None, path=None):
1793+ """Populate fields."""
1794+ self.local_tree = local_tree
1795+ self.remote_tree = remote_tree
1796+ self.share_uuid = share_uuid
1797+ self.root_uuid = root_uuid
1798+ self.path = path
1799+
1800+
1801+def read(metadata_dir):
1802+ """Read metadata for a mirror rooted at directory."""
1803+ index_file = os.path.join(metadata_dir, "local-index")
1804+ share_uuid_file = os.path.join(metadata_dir, "share-uuid")
1805+ root_uuid_file = os.path.join(metadata_dir, "root-uuid")
1806+ path_file = os.path.join(metadata_dir, "path")
1807+
1808+ index = read_pickle_file(index_file, {})
1809+ share_uuid = read_uuid_file(share_uuid_file)
1810+ root_uuid = read_uuid_file(root_uuid_file)
1811+ path = read_string_file(path_file, '/')
1812+
1813+ local_tree = index.get("tree", None)
1814+ remote_tree = index.get("remote_tree", None)
1815+
1816+ if local_tree is None:
1817+ local_tree = MergeNode(node_type=DIRECTORY, children={})
1818+ if remote_tree is None:
1819+ remote_tree = MergeNode(node_type=DIRECTORY, children={})
1820+
1821+ return Metadata(local_tree=local_tree, remote_tree=remote_tree,
1822+ share_uuid=share_uuid, root_uuid=root_uuid,
1823+ path=path)
1824+
1825+
1826+def write(metadata_dir, info):
1827+ """Writes all metadata for the mirror rooted at directory."""
1828+ share_uuid_file = os.path.join(metadata_dir, "share-uuid")
1829+ root_uuid_file = os.path.join(metadata_dir, "root-uuid")
1830+ index_file = os.path.join(metadata_dir, "local-index")
1831+ path_file = os.path.join(metadata_dir, "path")
1832+ if info.share_uuid is not None:
1833+ write_uuid_file(share_uuid_file, info.share_uuid)
1834+ else:
1835+ safe_unlink(share_uuid_file)
1836+ if info.root_uuid is not None:
1837+ write_uuid_file(root_uuid_file, info.root_uuid)
1838+ else:
1839+ safe_unlink(root_uuid_file)
1840+ write_string_file(path_file, info.path)
1841+ write_pickle_file(index_file, {"tree": info.local_tree,
1842+ "remote_tree": info.remote_tree})
1843+
1844+
1845+def write_pickle_file(filename, value):
1846+ """Writes a pickled python object to a file."""
1847+ with atomic_update_file(filename) as stream:
1848+ pickle.dump(value, stream, 2)
1849+
1850+
1851+def write_string_file(filename, value):
1852+ """Writes a string to a file with an added line feed, or
1853+ deletes the file if value is None.
1854+ """
1855+ if value is not None:
1856+ with atomic_update_file(filename) as stream:
1857+ stream.write(value)
1858+ stream.write('\n')
1859+ else:
1860+ safe_unlink(filename)
1861+
1862+
1863+def write_uuid_file(filename, value):
1864+ """Writes a UUID to a file."""
1865+ write_string_file(filename, str(value))
1866+
1867+
1868+def read_pickle_file(filename, default_value=None):
1869+ """Reads a pickled python object from a file."""
1870+ try:
1871+ with open(filename, "rb") as stream:
1872+ return pickle.load(stream)
1873+ except IOError as e:
1874+ if e.errno != ENOENT:
1875+ raise
1876+ return default_value
1877+
1878+
1879+def read_string_file(filename, default_value=None):
1880+ """Reads a string from a file, discarding the final character."""
1881+ try:
1882+ with open(filename, "r") as stream:
1883+ return stream.read()[:-1]
1884+ except IOError as e:
1885+ if e.errno != ENOENT:
1886+ raise
1887+ return default_value
1888+
1889+
1890+def read_uuid_file(filename, default_value=None):
1891+ """Reads a UUID from a file."""
1892+ try:
1893+ with open(filename, "r") as stream:
1894+ return uuid.UUID(stream.read()[:-1])
1895+ except IOError as e:
1896+ if e.errno != ENOENT:
1897+ raise
1898+ return default_value
1899+
1900+
1901+@contextmanager
1902+def atomic_update_file(filename):
1903+ """Returns a context manager for atomically updating a file."""
1904+ temp_filename = "%s.%s" % (filename, uuid.uuid4())
1905+ try:
1906+ with open(temp_filename, "w") as stream:
1907+ yield stream
1908+ os.rename(temp_filename, filename)
1909+ finally:
1910+ safe_unlink(temp_filename)
1911
1912=== added file 'magicicada/u1sync/scan.py'
1913--- magicicada/u1sync/scan.py 1970-01-01 00:00:00 +0000
1914+++ magicicada/u1sync/scan.py 2018-04-20 01:22:48 +0000
1915@@ -0,0 +1,84 @@
1916+# Copyright 2009 Canonical Ltd.
1917+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
1918+#
1919+# This program is free software: you can redistribute it and/or modify it
1920+# under the terms of the GNU General Public License version 3, as published
1921+# by the Free Software Foundation.
1922+#
1923+# This program is distributed in the hope that it will be useful, but
1924+# WITHOUT ANY WARRANTY; without even the implied warranties of
1925+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
1926+# PURPOSE. See the GNU General Public License for more details.
1927+#
1928+# You should have received a copy of the GNU General Public License along
1929+# with this program. If not, see <http://www.gnu.org/licenses/>.
1930+
1931+"""Code for scanning local directory state."""
1932+
1933+from __future__ import with_statement
1934+
1935+import os
1936+import hashlib
1937+import shutil
1938+
1939+from errno import ENOTDIR, EINVAL
1940+
1941+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, FILE, SYMLINK
1942+
1943+from magicicada.u1sync.genericmerge import MergeNode
1944+from magicicada.u1sync.utils import should_sync
1945+
1946+
1947+EMPTY_HASH = "sha1:%s" % hashlib.sha1().hexdigest()
1948+
1949+
1950+def scan_directory(path, display_path="", quiet=False):
1951+ """Scans a local directory and builds an in-memory tree from it."""
1952+ if display_path != "" and not quiet:
1953+ print display_path
1954+
1955+ link_target = None
1956+ child_names = None
1957+ try:
1958+ link_target = os.readlink(path)
1959+ except OSError as e:
1960+ if e.errno != EINVAL:
1961+ raise
1962+ try:
1963+ child_names = os.listdir(path)
1964+ except OSError as e:
1965+ if e.errno != ENOTDIR:
1966+ raise
1967+
1968+ if link_target is not None:
1969+ # symlink
1970+ sum = hashlib.sha1()
1971+ sum.update(link_target)
1972+ content_hash = "sha1:%s" % sum.hexdigest()
1973+ return MergeNode(node_type=SYMLINK, content_hash=content_hash)
1974+ elif child_names is not None:
1975+ # directory
1976+ child_names = [
1977+ n for n in child_names if should_sync(n.decode("utf-8"))]
1978+ child_paths = [(os.path.join(path, child_name),
1979+ os.path.join(display_path, child_name))
1980+ for child_name in child_names]
1981+ children = [scan_directory(child_path, child_display_path, quiet)
1982+ for (child_path, child_display_path) in child_paths]
1983+ unicode_child_names = [n.decode("utf-8") for n in child_names]
1984+ children = dict(zip(unicode_child_names, children))
1985+ return MergeNode(node_type=DIRECTORY, children=children)
1986+ else:
1987+ # regular file
1988+ sum = hashlib.sha1()
1989+
1990+ class HashStream(object):
1991+ """Stream that computes hashes."""
1992+ def write(self, bytes):
1993+ """Accumulate bytes."""
1994+ sum.update(bytes)
1995+
1996+ with open(path, "r") as stream:
1997+ shutil.copyfileobj(stream, HashStream())
1998+ content_hash = "sha1:%s" % sum.hexdigest()
1999+ return MergeNode(node_type=FILE, content_hash=content_hash)
2000
2001=== added file 'magicicada/u1sync/sync.py'
2002--- magicicada/u1sync/sync.py 1970-01-01 00:00:00 +0000
2003+++ magicicada/u1sync/sync.py 2018-04-20 01:22:48 +0000
2004@@ -0,0 +1,377 @@
2005+# Copyright 2009 Canonical Ltd.
2006+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
2007+#
2008+# This program is free software: you can redistribute it and/or modify it
2009+# under the terms of the GNU General Public License version 3, as published
2010+# by the Free Software Foundation.
2011+#
2012+# This program is distributed in the hope that it will be useful, but
2013+# WITHOUT ANY WARRANTY; without even the implied warranties of
2014+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
2015+# PURPOSE. See the GNU General Public License for more details.
2016+#
2017+# You should have received a copy of the GNU General Public License along
2018+# with this program. If not, see <http://www.gnu.org/licenses/>.
2019+
2020+"""Sync.
2021+
2022+After merging, these routines are used to synchronize state locally and on
2023+the server to correspond to the merged result.
2024+
2025+"""
2026+
2027+from __future__ import with_statement
2028+
2029+import os
2030+
2031+from ubuntuone.storageprotocol import request
2032+from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, SYMLINK
2033+
2034+from magicicada.u1sync.client import UnsupportedOperationError
2035+from magicicada.u1sync.genericmerge import MergeNode, generic_merge
2036+from magicicada.u1sync.utils import safe_mkdir
2037+
2038+
2039+EMPTY_HASH = ""
2040+UPLOAD_SYMBOL = u"\u25b2".encode("utf-8")
2041+DOWNLOAD_SYMBOL = u"\u25bc".encode("utf-8")
2042+CONFLICT_SYMBOL = "!"
2043+DELETE_SYMBOL = "X"
2044+
2045+
2046+def get_conflict_path(path, conflict_info):
2047+ """Return path for conflict file corresponding to path."""
2048+ dir, name = os.path.split(path)
2049+ unique_id = conflict_info[0]
2050+ return os.path.join(dir, "conflict-%s-%s" % (unique_id, name))
2051+
2052+
2053+def name_from_path(path):
2054+ """Return unicode name from last path component."""
2055+ return os.path.split(path)[1].decode("UTF-8")
2056+
2057+
2058+class NodeSyncError(Exception):
2059+ """Error syncing node."""
2060+
2061+
2062+class NodeCreateError(NodeSyncError):
2063+ """Error creating node."""
2064+
2065+
2066+class NodeUpdateError(NodeSyncError):
2067+ """Error updating node."""
2068+
2069+
2070+class NodeDeleteError(NodeSyncError):
2071+ """Error deleting node."""
2072+
2073+
2074+def sync_tree(merged_tree, original_tree, sync_mode, path, quiet):
2075+ """Performs actual synchronization."""
2076+
2077+ def pre_merge(nodes, name, partial_parent):
2078+ """Create nodes and write content as required."""
2079+ (merged_node, original_node) = nodes
2080+ (parent_path, parent_display_path, parent_uuid,
2081+ parent_synced) = partial_parent
2082+
2083+ utf8_name = name.encode("utf-8")
2084+ path = os.path.join(parent_path, utf8_name)
2085+ display_path = os.path.join(parent_display_path, utf8_name)
2086+ node_uuid = None
2087+
2088+ synced = False
2089+ if merged_node is not None:
2090+ if merged_node.node_type == DIRECTORY:
2091+ if original_node is not None:
2092+ synced = True
2093+ node_uuid = original_node.uuid
2094+ else:
2095+ if not quiet:
2096+ print "%s %s" % (sync_mode.symbol, display_path)
2097+ try:
2098+ create_dir = sync_mode.create_directory
2099+ node_uuid = create_dir(parent_uuid=parent_uuid,
2100+ path=path)
2101+ synced = True
2102+ except NodeCreateError as e:
2103+ print e
2104+ elif merged_node.content_hash is None:
2105+ if not quiet:
2106+ print "? %s" % display_path
2107+ elif (original_node is None or
2108+ original_node.content_hash != merged_node.content_hash or
2109+ merged_node.conflict_info is not None):
2110+ conflict_info = merged_node.conflict_info
2111+ if conflict_info is not None:
2112+ conflict_symbol = CONFLICT_SYMBOL
2113+ else:
2114+ conflict_symbol = " "
2115+ if not quiet:
2116+ print "%s %s %s" % (sync_mode.symbol, conflict_symbol,
2117+ display_path)
2118+ if original_node is not None:
2119+ node_uuid = original_node.uuid or merged_node.uuid
2120+ original_hash = original_node.content_hash or EMPTY_HASH
2121+ else:
2122+ node_uuid = merged_node.uuid
2123+ original_hash = EMPTY_HASH
2124+ try:
2125+ sync_mode.write_file(
2126+ node_uuid=node_uuid,
2127+ content_hash=merged_node.content_hash,
2128+ old_content_hash=original_hash, path=path,
2129+ parent_uuid=parent_uuid, conflict_info=conflict_info,
2130+ node_type=merged_node.node_type)
2131+ synced = True
2132+ except NodeSyncError as e:
2133+ print e
2134+ else:
2135+ synced = True
2136+
2137+ return (path, display_path, node_uuid, synced)
2138+
2139+ def post_merge(nodes, partial_result, child_results):
2140+ """Delete nodes."""
2141+ (merged_node, original_node) = nodes
2142+ (path, display_path, node_uuid, synced) = partial_result
2143+
2144+ if merged_node is None:
2145+ assert original_node is not None
2146+ if not quiet:
2147+ print "%s %s %s" % (sync_mode.symbol, DELETE_SYMBOL,
2148+ display_path)
2149+ try:
2150+ if original_node.node_type == DIRECTORY:
2151+ sync_mode.delete_directory(node_uuid=original_node.uuid,
2152+ path=path)
2153+ else:
2154+ # files or symlinks
2155+ sync_mode.delete_file(node_uuid=original_node.uuid,
2156+ path=path)
2157+ synced = True
2158+ except NodeDeleteError as e:
2159+ print e
2160+
2161+ if synced:
2162+ model_node = merged_node
2163+ else:
2164+ model_node = original_node
2165+
2166+ if model_node is not None:
2167+ if model_node.node_type == DIRECTORY:
2168+ child_iter = child_results.iteritems()
2169+ merged_children = dict(
2170+ (name, child) for (name, child) in child_iter
2171+ if child is not None)
2172+ else:
2173+ # if there are children here it's because they failed to delete
2174+ merged_children = None
2175+ return MergeNode(node_type=model_node.node_type,
2176+ uuid=model_node.uuid,
2177+ children=merged_children,
2178+ content_hash=model_node.content_hash)
2179+ else:
2180+ return None
2181+
2182+ return generic_merge(trees=[merged_tree, original_tree],
2183+ pre_merge=pre_merge, post_merge=post_merge,
2184+ partial_parent=(path, "", None, True), name=u"")
2185+
2186+
2187+def download_tree(merged_tree, local_tree, client, share_uuid, path, dry_run,
2188+ quiet):
2189+ """Downloads a directory."""
2190+ if dry_run:
2191+ downloader = DryRun(symbol=DOWNLOAD_SYMBOL)
2192+ else:
2193+ downloader = Downloader(client=client, share_uuid=share_uuid)
2194+ return sync_tree(merged_tree=merged_tree, original_tree=local_tree,
2195+ sync_mode=downloader, path=path, quiet=quiet)
2196+
2197+
2198+def upload_tree(merged_tree, remote_tree, client, share_uuid, path, dry_run,
2199+ quiet):
2200+ """Uploads a directory."""
2201+ if dry_run:
2202+ uploader = DryRun(symbol=UPLOAD_SYMBOL)
2203+ else:
2204+ uploader = Uploader(client=client, share_uuid=share_uuid)
2205+ return sync_tree(merged_tree=merged_tree, original_tree=remote_tree,
2206+ sync_mode=uploader, path=path, quiet=quiet)
2207+
2208+
2209+class DryRun(object):
2210+ """A class which implements the sync interface but does nothing."""
2211+ def __init__(self, symbol):
2212+ """Initializes a DryRun instance."""
2213+ self.symbol = symbol
2214+
2215+ def create_directory(self, parent_uuid, path):
2216+ """Doesn't create a directory."""
2217+ return None
2218+
2219+ def write_file(self, node_uuid, old_content_hash, content_hash,
2220+ parent_uuid, path, conflict_info, node_type):
2221+ """Doesn't write a file."""
2222+ return None
2223+
2224+ def delete_directory(self, node_uuid, path):
2225+ """Doesn't delete a directory."""
2226+
2227+ def delete_file(self, node_uuid, path):
2228+ """Doesn't delete a file."""
2229+
2230+
2231+class Downloader(object):
2232+ """A class which implements the download half of syncing."""
2233+ def __init__(self, client, share_uuid):
2234+ """Initializes a Downloader instance."""
2235+ self.client = client
2236+ self.share_uuid = share_uuid
2237+ self.symbol = DOWNLOAD_SYMBOL
2238+
2239+ def create_directory(self, parent_uuid, path):
2240+ """Creates a directory."""
2241+ try:
2242+ safe_mkdir(path)
2243+ except OSError as e:
2244+ raise NodeCreateError(
2245+ "Error creating local directory %s: %s" % (path, e))
2246+ return None
2247+
2248+ def write_file(self, node_uuid, old_content_hash, content_hash,
2249+ parent_uuid, path, conflict_info, node_type):
2250+ """Creates a file and downloads new content for it."""
2251+ if conflict_info:
2252+ # download to conflict file rather than overwriting local changes
2253+ path = get_conflict_path(path, conflict_info)
2254+ content_hash = conflict_info[1]
2255+ try:
2256+ if node_type == SYMLINK:
2257+ self.client.download_string(
2258+ share_uuid=self.share_uuid, node_uuid=node_uuid,
2259+ content_hash=content_hash)
2260+ else:
2261+ self.client.download_file(
2262+ share_uuid=self.share_uuid, node_uuid=node_uuid,
2263+ content_hash=content_hash, filename=path)
2264+ except (request.StorageRequestError, UnsupportedOperationError) as e:
2265+ if os.path.exists(path):
2266+ raise NodeUpdateError(
2267+ "Error downloading content for %s: %s" % (path, e))
2268+ else:
2269+ raise NodeCreateError(
2270+ "Error locally creating %s: %s" % (path, e))
2271+
2272+ def delete_directory(self, node_uuid, path):
2273+ """Deletes a directory."""
2274+ try:
2275+ os.rmdir(path)
2276+ except OSError as e:
2277+ raise NodeDeleteError("Error locally deleting %s: %s" % (path, e))
2278+
2279+ def delete_file(self, node_uuid, path):
2280+ """Deletes a file."""
2281+ try:
2282+ os.remove(path)
2283+ except OSError as e:
2284+ raise NodeDeleteError("Error locally deleting %s: %s" % (path, e))
2285+
2286+
2287+class Uploader(object):
2288+ """A class which implements the upload half of syncing."""
2289+ def __init__(self, client, share_uuid):
2290+ """Initializes an uploader instance."""
2291+ self.client = client
2292+ self.share_uuid = share_uuid
2293+ self.symbol = UPLOAD_SYMBOL
2294+
2295+ def create_directory(self, parent_uuid, path):
2296+ """Creates a directory on the server."""
2297+ name = name_from_path(path)
2298+ try:
2299+ return self.client.create_directory(share_uuid=self.share_uuid,
2300+ parent_uuid=parent_uuid,
2301+ name=name)
2302+ except (request.StorageRequestError, UnsupportedOperationError) as e:
2303+ raise NodeCreateError("Error remotely creating %s: %s" % (path, e))
2304+
2305+ def write_file(self, node_uuid, old_content_hash, content_hash,
2306+ parent_uuid, path, conflict_info, node_type):
2307+ """Creates a file on the server and uploads new content for it."""
2308+
2309+ if conflict_info:
2310+ # move conflicting file out of the way on the server
2311+ conflict_path = get_conflict_path(path, conflict_info)
2312+ conflict_name = name_from_path(conflict_path)
2313+ try:
2314+ self.client.move(share_uuid=self.share_uuid,
2315+ parent_uuid=parent_uuid,
2316+ name=conflict_name,
2317+ node_uuid=node_uuid)
2318+ except (request.StorageRequestError,
2319+ UnsupportedOperationError) as e:
2320+ raise NodeUpdateError(
2321+ "Error remotely renaming %s to %s: %s" %
2322+ (path, conflict_path, e))
2323+ node_uuid = None
2324+ old_content_hash = EMPTY_HASH
2325+
2326+ if node_type == SYMLINK:
2327+ try:
2328+ target = os.readlink(path)
2329+ except OSError as e:
2330+ raise NodeCreateError(
2331+ "Error retrieving link target for %s: %s" % (path, e))
2332+ else:
2333+ target = None
2334+
2335+ name = name_from_path(path)
2336+ if node_uuid is None:
2337+ try:
2338+ if node_type == SYMLINK:
2339+ node_uuid = self.client.create_symlink(
2340+ share_uuid=self.share_uuid, parent_uuid=parent_uuid,
2341+ name=name, target=target)
2342+ old_content_hash = content_hash
2343+ else:
2344+ node_uuid = self.client.create_file(
2345+ share_uuid=self.share_uuid, parent_uuid=parent_uuid,
2346+ name=name)
2347+ except (request.StorageRequestError,
2348+ UnsupportedOperationError) as e:
2349+ raise NodeCreateError(
2350+ "Error remotely creating %s: %s" % (path, e))
2351+
2352+ if old_content_hash != content_hash:
2353+ try:
2354+ if node_type == SYMLINK:
2355+ self.client.upload_string(
2356+ share_uuid=self.share_uuid, node_uuid=node_uuid,
2357+ content_hash=content_hash,
2358+ old_content_hash=old_content_hash, content=target)
2359+ else:
2360+ self.client.upload_file(
2361+ share_uuid=self.share_uuid, node_uuid=node_uuid,
2362+ content_hash=content_hash,
2363+ old_content_hash=old_content_hash, filename=path)
2364+ except (request.StorageRequestError,
2365+ UnsupportedOperationError) as e:
2366+ raise NodeUpdateError(
2367+ "Error uploading content for %s: %s" % (path, e))
2368+
2369+ def delete_directory(self, node_uuid, path):
2370+ """Deletes a directory."""
2371+ try:
2372+ self.client.unlink(share_uuid=self.share_uuid, node_uuid=node_uuid)
2373+ except (request.StorageRequestError, UnsupportedOperationError) as e:
2374+ raise NodeDeleteError("Error remotely deleting %s: %s" % (path, e))
2375+
2376+ def delete_file(self, node_uuid, path):
2377+ """Deletes a file."""
2378+ try:
2379+ self.client.unlink(share_uuid=self.share_uuid, node_uuid=node_uuid)
2380+ except (request.StorageRequestError, UnsupportedOperationError) as e:
2381+ raise NodeDeleteError("Error remotely deleting %s: %s" % (path, e))
2382
2383=== added directory 'magicicada/u1sync/tests'
2384=== added file 'magicicada/u1sync/tests/__init__.py'
2385--- magicicada/u1sync/tests/__init__.py 1970-01-01 00:00:00 +0000
2386+++ magicicada/u1sync/tests/__init__.py 2018-04-20 01:22:48 +0000
2387@@ -0,0 +1,1 @@
2388+"""Tests for u1sync."""
2389
2390=== added file 'magicicada/u1sync/tests/test_client.py'
2391--- magicicada/u1sync/tests/test_client.py 1970-01-01 00:00:00 +0000
2392+++ magicicada/u1sync/tests/test_client.py 2018-04-20 01:22:48 +0000
2393@@ -0,0 +1,63 @@
2394+# Copyright 2010 Canonical Ltd.
2395+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
2396+#
2397+# This program is free software: you can redistribute it and/or modify it
2398+# under the terms of the GNU General Public License version 3, as published
2399+# by the Free Software Foundation.
2400+#
2401+# This program is distributed in the hope that it will be useful, but
2402+# WITHOUT ANY WARRANTY; without even the implied warranties of
2403+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
2404+# PURPOSE. See the GNU General Public License for more details.
2405+#
2406+# You should have received a copy of the GNU General Public License along
2407+# with this program. If not, see <http://www.gnu.org/licenses/>.
2408+
2409+"""Test the client code."""
2410+
2411+from mocker import Mocker
2412+from twisted.trial.unittest import TestCase
2413+
2414+from magicicada.u1sync import client
2415+
2416+
2417+class SyncStorageClientTest(TestCase):
2418+ """Test the SyncStorageClient."""
2419+
2420+ def test_conn_made_call_parent(self):
2421+ """The connectionMade method should call the parent."""
2422+ # set up everything
2423+ called = []
2424+ self.patch(client.StorageClient, 'connectionMade',
2425+ lambda s: called.append(True))
2426+ c = client.SyncStorageClient()
2427+ mocker = Mocker()
2428+ obj = mocker.mock()
2429+ obj.current_protocol
2430+ mocker.result(None)
2431+ obj.current_protocol = c
2432+ obj.observer.connected()
2433+ c.factory = obj
2434+
2435+ # call and test
2436+ with mocker:
2437+ c.connectionMade()
2438+ self.assertTrue(called)
2439+
2440+ def test_conn_lost_call_parent(self):
2441+ """The connectionLost method should call the parent."""
2442+ # set up everything
2443+ called = []
2444+ self.patch(client.StorageClient, 'connectionLost',
2445+ lambda s, r: called.append(True))
2446+ c = client.SyncStorageClient()
2447+ mocker = Mocker()
2448+ obj = mocker.mock()
2449+ obj.current_protocol
2450+ mocker.result(None)
2451+ c.factory = obj
2452+
2453+ # call and test
2454+ with mocker:
2455+ c.connectionLost()
2456+ self.assertTrue(called)
2457
2458=== added file 'magicicada/u1sync/tests/test_init.py'
2459--- magicicada/u1sync/tests/test_init.py 1970-01-01 00:00:00 +0000
2460+++ magicicada/u1sync/tests/test_init.py 2018-04-20 01:22:48 +0000
2461@@ -0,0 +1,22 @@
2462+# Copyright 2009 Canonical Ltd.
2463+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
2464+#
2465+# This program is free software: you can redistribute it and/or modify it
2466+# under the terms of the GNU General Public License version 3, as published
2467+# by the Free Software Foundation.
2468+#
2469+# This program is distributed in the hope that it will be useful, but
2470+# WITHOUT ANY WARRANTY; without even the implied warranties of
2471+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
2472+# PURPOSE. See the GNU General Public License for more details.
2473+#
2474+# You should have received a copy of the GNU General Public License along
2475+# with this program. If not, see <http://www.gnu.org/licenses/>.
2476+
2477+"""Tests for u1sync mirror init"""
2478+
2479+from unittest import TestCase
2480+
2481+
2482+class InitTestCase(TestCase):
2483+ """Tests for u1sync --init"""
2484
2485=== added file 'magicicada/u1sync/tests/test_merge.py'
2486--- magicicada/u1sync/tests/test_merge.py 1970-01-01 00:00:00 +0000
2487+++ magicicada/u1sync/tests/test_merge.py 2018-04-20 01:22:48 +0000
2488@@ -0,0 +1,109 @@
2489+# Copyright 2009 Canonical Ltd.
2490+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
2491+#
2492+# This program is free software: you can redistribute it and/or modify it
2493+# under the terms of the GNU General Public License version 3, as published
2494+# by the Free Software Foundation.
2495+#
2496+# This program is distributed in the hope that it will be useful, but
2497+# WITHOUT ANY WARRANTY; without even the implied warranties of
2498+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
2499+# PURPOSE. See the GNU General Public License for more details.
2500+#
2501+# You should have received a copy of the GNU General Public License along
2502+# with this program. If not, see <http://www.gnu.org/licenses/>.
2503+
2504+"""Tests for tree merging."""
2505+
2506+import uuid
2507+import os
2508+
2509+from unittest import TestCase
2510+
2511+from ubuntuone.storageprotocol.dircontent_pb2 import FILE, DIRECTORY
2512+
2513+from magicicada.u1sync.genericmerge import MergeNode, generic_merge
2514+from magicicada.u1sync.merge import (
2515+ merge_trees, ClobberLocalMerge, ClobberServerMerge, SyncMerge)
2516+
2517+
2518+def accumulate_path(nodes, name, partial_parent):
2519+ """pre-merge which accumulates a path"""
2520+ return os.path.join(partial_parent, name)
2521+
2522+
2523+def capture_merge(nodes, partial_result, child_results):
2524+ """post-merge which accumulates merge results."""
2525+ return (nodes, partial_result, child_results)
2526+
2527+
2528+class MergeTest(TestCase):
2529+ """Tests for generic tree merges."""
2530+
2531+ def test_generic_merge(self):
2532+ """Tests that generic merge behaves as expected."""
2533+ tree_a = MergeNode(DIRECTORY, children={
2534+ 'foo': MergeNode(FILE, uuid=uuid.uuid4()),
2535+ 'bar': MergeNode(FILE, uuid=uuid.uuid4()),
2536+ }, uuid=uuid.uuid4())
2537+ tree_b = MergeNode(DIRECTORY, children={
2538+ 'bar': MergeNode(FILE, uuid=uuid.uuid4()),
2539+ 'baz': MergeNode(FILE, uuid=uuid.uuid4()),
2540+ }, uuid=uuid.uuid4())
2541+ result = generic_merge(trees=[tree_a, tree_b],
2542+ pre_merge=accumulate_path,
2543+ post_merge=capture_merge,
2544+ partial_parent="", name="ex")
2545+ expected_result = ([tree_a, tree_b], "ex", {
2546+ 'foo': ([tree_a.children['foo'], None], "ex/foo", {}),
2547+ 'bar': ([tree_a.children['bar'], tree_b.children['bar']],
2548+ "ex/bar", {}),
2549+ 'baz': ([None, tree_b.children['baz']], "ex/baz", {}),
2550+ })
2551+ self.assertEqual(expected_result, result)
2552+
2553+ def test_clobber(self):
2554+ """Tests clobbering merges."""
2555+ server_tree = MergeNode(DIRECTORY, children={
2556+ 'foo': MergeNode(FILE, content_hash="dummy:abc"),
2557+ 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
2558+ 'baz': MergeNode(FILE, content_hash="dummy:aaa"),
2559+ })
2560+ local_tree = MergeNode(DIRECTORY, children={
2561+ 'foo': MergeNode(FILE, content_hash="dummy:cde"),
2562+ 'bar': MergeNode(FILE, content_hash="dummy:zyx"),
2563+ 'hoge': MergeNode(FILE, content_hash="dummy:bbb"),
2564+ })
2565+ result_tree = merge_trees(local_tree, local_tree,
2566+ server_tree, server_tree,
2567+ ClobberServerMerge())
2568+ self.assertEqual(local_tree, result_tree)
2569+ result_tree = merge_trees(local_tree, local_tree,
2570+ server_tree, server_tree,
2571+ ClobberLocalMerge())
2572+ self.assertEqual(server_tree, result_tree)
2573+
2574+ def test_sync(self):
2575+ """Test sync merges."""
2576+ server_tree = MergeNode(DIRECTORY, children={
2577+ 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
2578+ 'baz': MergeNode(FILE, content_hash="dummy:aaa"),
2579+ 'foo': MergeNode(FILE, content_hash="dummy:abc"),
2580+ })
2581+ old_server_tree = MergeNode(DIRECTORY, children={})
2582+ local_tree = MergeNode(DIRECTORY, children={
2583+ 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
2584+ 'foo': MergeNode(FILE, content_hash="dummy:abc"),
2585+ 'hoge': MergeNode(FILE, content_hash="dummy:bbb"),
2586+ })
2587+ old_local_tree = MergeNode(DIRECTORY, children={})
2588+ expected_tree = MergeNode(DIRECTORY, children={
2589+ 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
2590+ 'baz': MergeNode(FILE, content_hash="dummy:aaa"),
2591+ 'foo': MergeNode(FILE, content_hash="dummy:abc"),
2592+ 'hoge': MergeNode(FILE, content_hash="dummy:bbb"),
2593+ })
2594+ result_tree = merge_trees(old_local_tree, local_tree,
2595+ old_server_tree, server_tree,
2596+ SyncMerge())
2597+ self.assertEqual(result_tree, expected_tree)
2598
2599=== added file 'magicicada/u1sync/utils.py'
2600--- magicicada/u1sync/utils.py 1970-01-01 00:00:00 +0000
2601+++ magicicada/u1sync/utils.py 2018-04-20 01:22:48 +0000
2602@@ -0,0 +1,50 @@
2603+# Copyright 2009 Canonical Ltd.
2604+# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
2605+#
2606+# This program is free software: you can redistribute it and/or modify it
2607+# under the terms of the GNU General Public License version 3, as published
2608+# by the Free Software Foundation.
2609+#
2610+# This program is distributed in the hope that it will be useful, but
2611+# WITHOUT ANY WARRANTY; without even the implied warranties of
2612+# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
2613+# PURPOSE. See the GNU General Public License for more details.
2614+#
2615+# You should have received a copy of the GNU General Public License along
2616+# with this program. If not, see <http://www.gnu.org/licenses/>.
2617+
2618+"""Miscellaneous utility functions."""
2619+
2620+import os
2621+
2622+from errno import EEXIST, ENOENT
2623+
2624+from magicicada.u1sync.constants import METADATA_DIR_NAME, SPECIAL_FILE_RE
2625+
2626+
2627+def should_sync(filename):
2628+ """Returns True if the filename should be synced.
2629+
2630+ @param filename: a unicode filename
2631+
2632+ """
2633+ return (filename != METADATA_DIR_NAME and
2634+ not SPECIAL_FILE_RE.match(filename))
2635+
2636+
2637+def safe_mkdir(path):
2638+ """Creates a directory if it does not already exist."""
2639+ try:
2640+ os.mkdir(path)
2641+ except OSError, e:
2642+ if e.errno != EEXIST:
2643+ raise
2644+
2645+
2646+def safe_unlink(path):
2647+ """Unlinks a file if it exists."""
2648+ try:
2649+ os.remove(path)
2650+ except OSError, e:
2651+ if e.errno != ENOENT:
2652+ raise

Subscribers

People subscribed via source and target branches

to all changes: