Merge lp:~nataliabidart/magicicada-server/embed-u1sync into lp:magicicada-server

Proposed by Natalia Bidart
Status: Merged
Approved by: Natalia Bidart
Approved revision: 95
Merged at revision: 90
Proposed branch: lp:~nataliabidart/magicicada-server/embed-u1sync
Merge into: lp:magicicada-server
Diff against target: 2652 lines (+2362/-31)
28 files modified
config-manager.txt (+0/-1)
lib/utilities/testlib.py (+0/-1)
lib/utilities/utils.py (+1/-1)
magicicada/filesync/services.py (+1/-2)
magicicada/metrics/tests/test_base.py (+1/-2)
magicicada/monitoring/reactor.py (+1/-2)
magicicada/monitoring/tests/test_reactor.py (+1/-2)
magicicada/server/integtests/test_sync.py (+4/-4)
magicicada/server/server.py (+1/-3)
magicicada/server/ssl_proxy.py (+1/-3)
magicicada/server/stats.py (+1/-2)
magicicada/server/tests/test_server.py (+1/-3)
magicicada/server/tests/test_ssl_proxy.py (+1/-3)
magicicada/server/tests/test_stats.py (+1/-2)
magicicada/u1sync/__init__.py (+14/-0)
magicicada/u1sync/client.py (+736/-0)
magicicada/u1sync/constants.py (+26/-0)
magicicada/u1sync/genericmerge.py (+86/-0)
magicicada/u1sync/main.py (+443/-0)
magicicada/u1sync/merge.py (+181/-0)
magicicada/u1sync/metadata.py (+155/-0)
magicicada/u1sync/scan.py (+84/-0)
magicicada/u1sync/sync.py (+377/-0)
magicicada/u1sync/tests/__init__.py (+1/-0)
magicicada/u1sync/tests/test_client.py (+63/-0)
magicicada/u1sync/tests/test_init.py (+22/-0)
magicicada/u1sync/tests/test_merge.py (+109/-0)
magicicada/u1sync/utils.py (+50/-0)
To merge this branch: bzr merge lp:~nataliabidart/magicicada-server/embed-u1sync
Reviewer Review Type Date Requested Status
Facundo Batista Approve
Review via email: mp+343570@code.launchpad.net

Commit message

- Embed u1sync test dependency inside magicicada source code.

To post a comment you must log in.
Revision history for this message
Facundo Batista (facundo) wrote :

Awesome!

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'config-manager.txt'
--- config-manager.txt 2018-04-18 20:28:35 +0000
+++ config-manager.txt 2018-04-20 01:22:48 +0000
@@ -21,5 +21,4 @@
21#21#
22# make clean-sourcedeps sourcedeps22# make clean-sourcedeps sourcedeps
2323
24./.sourcecode/u1sync ~facundo/u1sync/opensourcing;revno=10
25./.sourcecode/magicicada-client ~chicharreros/magicicada-client/trunk;revno=144224./.sourcecode/magicicada-client ~chicharreros/magicicada-client/trunk;revno=1442
2625
=== removed symlink 'lib/metrics'
=== target was u'../magicicada/metrics/'
=== removed symlink 'lib/u1sync'
=== target was u'../.sourcecode/u1sync/u1sync'
=== modified file 'lib/utilities/testlib.py'
--- lib/utilities/testlib.py 2018-04-05 21:08:25 +0000
+++ lib/utilities/testlib.py 2018-04-20 01:22:48 +0000
@@ -20,7 +20,6 @@
2020
21from __future__ import unicode_literals21from __future__ import unicode_literals
2222
23import logging
24import os23import os
25import re24import re
2625
2726
=== modified file 'lib/utilities/utils.py'
--- lib/utilities/utils.py 2018-04-05 21:08:25 +0000
+++ lib/utilities/utils.py 2018-04-20 01:22:48 +0000
@@ -37,7 +37,7 @@
37 with open(proc_stat, 'r') as f:37 with open(proc_stat, 'r') as f:
38 if proc_name in f.read():38 if proc_name in f.read():
39 return True39 return True
40 except:40 except Exception:
41 # ignore all errors41 # ignore all errors
42 pass42 pass
43 return False43 return False
4444
=== modified file 'magicicada/filesync/services.py'
--- magicicada/filesync/services.py 2018-04-05 21:08:25 +0000
+++ magicicada/filesync/services.py 2018-04-20 01:22:48 +0000
@@ -34,12 +34,11 @@
34from functools import wraps34from functools import wraps
35from weakref import WeakValueDictionary35from weakref import WeakValueDictionary
3636
37import metrics
38
39from django.conf import settings37from django.conf import settings
40from django.db import connection, models38from django.db import connection, models
41from django.utils.timezone import now39from django.utils.timezone import now
4240
41from magicicada import metrics
43from magicicada.filesync import errors, utils42from magicicada.filesync import errors, utils
44from magicicada.filesync.dbmanager import (43from magicicada.filesync.dbmanager import (
45 fsync_readonly,44 fsync_readonly,
4645
=== modified file 'magicicada/metrics/tests/test_base.py'
--- magicicada/metrics/tests/test_base.py 2016-06-04 19:05:51 +0000
+++ magicicada/metrics/tests/test_base.py 2018-04-20 01:22:48 +0000
@@ -22,8 +22,7 @@
22import logging22import logging
23import unittest23import unittest
2424
25import metrics25from magicicada import metrics
26
27from magicicada.testing.testcase import BaseTestCase26from magicicada.testing.testcase import BaseTestCase
2827
2928
3029
=== modified file 'magicicada/monitoring/reactor.py'
--- magicicada/monitoring/reactor.py 2018-04-05 21:08:25 +0000
+++ magicicada/monitoring/reactor.py 2018-04-20 01:22:48 +0000
@@ -28,8 +28,7 @@
28import traceback28import traceback
29import Queue29import Queue
3030
31import metrics31from magicicada import metrics
32
33from magicicada.settings import TRACE32from magicicada.settings import TRACE
3433
3534
3635
=== modified file 'magicicada/monitoring/tests/test_reactor.py'
--- magicicada/monitoring/tests/test_reactor.py 2018-04-05 21:08:25 +0000
+++ magicicada/monitoring/tests/test_reactor.py 2018-04-20 01:22:48 +0000
@@ -26,8 +26,7 @@
26from twisted.trial.unittest import TestCase as TwistedTestCase26from twisted.trial.unittest import TestCase as TwistedTestCase
27from twisted.internet import reactor, defer27from twisted.internet import reactor, defer
2828
29import metrics29from magicicada import metrics
30
31from magicicada.monitoring.reactor import ReactorInspector, logger30from magicicada.monitoring.reactor import ReactorInspector, logger
32from magicicada.settings import TRACE31from magicicada.settings import TRACE
33from magicicada.testing.testcase import BaseTestCase32from magicicada.testing.testcase import BaseTestCase
3433
=== modified file 'magicicada/server/integtests/test_sync.py'
--- magicicada/server/integtests/test_sync.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/integtests/test_sync.py 2018-04-20 01:22:48 +0000
@@ -35,9 +35,6 @@
35from twisted.internet import reactor, defer35from twisted.internet import reactor, defer
36from twisted.python.failure import Failure36from twisted.python.failure import Failure
3737
38import u1sync.client
39
40from u1sync.main import do_diff, do_init, do_sync
41from ubuntuone.platform import tools38from ubuntuone.platform import tools
42from ubuntuone.storageprotocol import client as protocol_client39from ubuntuone.storageprotocol import client as protocol_client
43from ubuntuone.storageprotocol import request40from ubuntuone.storageprotocol import request
@@ -59,6 +56,9 @@
59)56)
60from magicicada.server.testing.caps_helpers import required_caps57from magicicada.server.testing.caps_helpers import required_caps
61from magicicada.server.testing.testcase import logger58from magicicada.server.testing.testcase import logger
59from magicicada.u1sync import client as u1sync_client
60from magicicada.u1sync.main import do_diff, do_init, do_sync
61
6262
63U1SYNC_QUIET = True63U1SYNC_QUIET = True
64NO_CONTENT_HASH = ""64NO_CONTENT_HASH = ""
@@ -225,7 +225,7 @@
225225
226 def _u1sync_client(self):226 def _u1sync_client(self):
227 """Create a u1sync client instance."""227 """Create a u1sync client instance."""
228 client = u1sync.client.Client()228 client = u1sync_client.Client()
229 client.connect_ssl("localhost", self.ssl_port, True)229 client.connect_ssl("localhost", self.ssl_port, True)
230 client.set_capabilities()230 client.set_capabilities()
231 auth_info = self.access_tokens['jack']231 auth_info = self.access_tokens['jack']
232232
=== modified file 'magicicada/server/server.py'
--- magicicada/server/server.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/server.py 2018-04-20 01:22:48 +0000
@@ -40,8 +40,6 @@
40import twisted40import twisted
41import twisted.web.error41import twisted.web.error
4242
43import metrics
44
45from twisted.application.service import MultiService, Service43from twisted.application.service import MultiService, Service
46from twisted.application.internet import TCPServer44from twisted.application.internet import TCPServer
47from twisted.internet.defer import maybeDeferred, inlineCallbacks45from twisted.internet.defer import maybeDeferred, inlineCallbacks
@@ -51,7 +49,7 @@
51from ubuntuone.storageprotocol import protocol_pb2, request, sharersp49from ubuntuone.storageprotocol import protocol_pb2, request, sharersp
52from ubuntuone.supervisor import utils as supervisor_utils50from ubuntuone.supervisor import utils as supervisor_utils
5351
54from magicicada import settings52from magicicada import metrics, settings
55from magicicada.filesync import errors as dataerror53from magicicada.filesync import errors as dataerror
56from magicicada.filesync.notifier import notifier54from magicicada.filesync.notifier import notifier
57from magicicada.monitoring.reactor import ReactorInspector55from magicicada.monitoring.reactor import ReactorInspector
5856
=== modified file 'magicicada/server/ssl_proxy.py'
--- magicicada/server/ssl_proxy.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/ssl_proxy.py 2018-04-20 01:22:48 +0000
@@ -31,9 +31,7 @@
31from twisted.protocols import portforward, basic31from twisted.protocols import portforward, basic
32from twisted.web import server, resource32from twisted.web import server, resource
3333
34import metrics34from magicicada import metrics, settings
35
36from magicicada import settings
37from magicicada.server.server import FILESYNC_STATUS_MSG, get_service_port35from magicicada.server.server import FILESYNC_STATUS_MSG, get_service_port
38from magicicada.server.ssl import disable_ssl_compression36from magicicada.server.ssl import disable_ssl_compression
39from ubuntuone.supervisor import utils as supervisor_utils37from ubuntuone.supervisor import utils as supervisor_utils
4038
=== modified file 'magicicada/server/stats.py'
--- magicicada/server/stats.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/stats.py 2018-04-20 01:22:48 +0000
@@ -27,8 +27,7 @@
27from twisted.internet import defer, reactor27from twisted.internet import defer, reactor
28from twisted.web import server, resource28from twisted.web import server, resource
2929
30import metrics30from magicicada import metrics
31
32from magicicada.monitoring import dump31from magicicada.monitoring import dump
3332
34logger = logging.getLogger(__name__)33logger = logging.getLogger(__name__)
3534
=== modified file 'magicicada/server/tests/test_server.py'
--- magicicada/server/tests/test_server.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/tests/test_server.py 2018-04-20 01:22:48 +0000
@@ -28,8 +28,6 @@
28import uuid28import uuid
29import weakref29import weakref
3030
31import metrics
32
33from django.utils.timezone import now31from django.utils.timezone import now
34from mocker import expect, Mocker, MockerTestCase, ARGS, KWARGS, ANY32from mocker import expect, Mocker, MockerTestCase, ARGS, KWARGS, ANY
35from twisted.python.failure import Failure33from twisted.python.failure import Failure
@@ -38,7 +36,7 @@
38from twisted.trial.unittest import TestCase as TwistedTestCase36from twisted.trial.unittest import TestCase as TwistedTestCase
39from ubuntuone.storageprotocol import protocol_pb2, request37from ubuntuone.storageprotocol import protocol_pb2, request
4038
41from magicicada import settings39from magicicada import metrics, settings
42from magicicada.filesync import errors as dataerror40from magicicada.filesync import errors as dataerror
43from magicicada.filesync.models import Share41from magicicada.filesync.models import Share
44from magicicada.server import errors42from magicicada.server import errors
4543
=== modified file 'magicicada/server/tests/test_ssl_proxy.py'
--- magicicada/server/tests/test_ssl_proxy.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/tests/test_ssl_proxy.py 2018-04-20 01:22:48 +0000
@@ -34,9 +34,7 @@
34 StorageClientFactory, StorageClient)34 StorageClientFactory, StorageClient)
35from ubuntuone.supervisor import utils as supervisor_utils35from ubuntuone.supervisor import utils as supervisor_utils
3636
37import metrics37from magicicada import metrics, settings
38
39from magicicada import settings
40from magicicada.server import ssl_proxy38from magicicada.server import ssl_proxy
41from magicicada.server.server import PREFERRED_CAP39from magicicada.server.server import PREFERRED_CAP
42from magicicada.server.testing.testcase import TestWithDatabase40from magicicada.server.testing.testcase import TestWithDatabase
4341
=== modified file 'magicicada/server/tests/test_stats.py'
--- magicicada/server/tests/test_stats.py 2018-04-05 21:08:25 +0000
+++ magicicada/server/tests/test_stats.py 2018-04-20 01:22:48 +0000
@@ -20,8 +20,7 @@
2020
21from twisted.internet import defer21from twisted.internet import defer
2222
23import metrics23from magicicada import metrics
24
25from magicicada.server.stats import StatsWorker24from magicicada.server.stats import StatsWorker
26from magicicada.server.testing.testcase import TestWithDatabase25from magicicada.server.testing.testcase import TestWithDatabase
2726
2827
=== added directory 'magicicada/u1sync'
=== added file 'magicicada/u1sync/__init__.py'
--- magicicada/u1sync/__init__.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/__init__.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,14 @@
1# Copyright 2009 Canonical Ltd.
2#
3# This program is free software: you can redistribute it and/or modify it
4# under the terms of the GNU General Public License version 3, as published
5# by the Free Software Foundation.
6#
7# This program is distributed in the hope that it will be useful, but
8# WITHOUT ANY WARRANTY; without even the implied warranties of
9# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
10# PURPOSE. See the GNU General Public License for more details.
11#
12# You should have received a copy of the GNU General Public License along
13# with this program. If not, see <http://www.gnu.org/licenses/>.
14"""The guts of the u1sync tool."""
015
=== added file 'magicicada/u1sync/client.py'
--- magicicada/u1sync/client.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/client.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,736 @@
1# Copyright 2009-2015 Canonical
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Pretty API for protocol client."""
17
18from __future__ import with_statement
19
20import logging
21import os
22import sys
23import shutil
24import time
25import uuid
26import zlib
27
28from cStringIO import StringIO
29from logging.handlers import RotatingFileHandler
30from Queue import Queue
31from threading import Lock
32
33from dirspec.basedir import xdg_cache_home
34from twisted.internet import reactor, defer
35from twisted.internet.defer import inlineCallbacks, returnValue
36from ubuntuone.storageprotocol import request, volumes
37from ubuntuone.storageprotocol.content_hash import crc32
38from ubuntuone.storageprotocol.context import get_ssl_context
39from ubuntuone.storageprotocol.client import (
40 StorageClientFactory, StorageClient)
41from ubuntuone.storageprotocol.delta import DIRECTORY as delta_DIR
42from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, FILE
43
44from magicicada.u1sync.genericmerge import MergeNode
45from magicicada.u1sync.utils import should_sync
46
47
48CONSUMER_KEY = "ubuntuone"
49CONSUMER_SECRET = "hammertime"
50
51u1sync_log_dir = os.path.join(xdg_cache_home, 'u1sync', 'log')
52LOGFILENAME = os.path.join(u1sync_log_dir, 'u1sync.log')
53if not os.path.exists(u1sync_log_dir):
54 os.makedirs(u1sync_log_dir)
55u1_logger = logging.getLogger("u1sync.timing.log")
56handler = RotatingFileHandler(LOGFILENAME)
57u1_logger.addHandler(handler)
58
59
60def share_str(share_uuid):
61 """Converts a share UUID to a form the protocol likes."""
62 return str(share_uuid) if share_uuid is not None else request.ROOT
63
64
65def log_timing(func):
66 def wrapper(*arg, **kwargs):
67 start = time.time()
68 ent = func(*arg, **kwargs)
69 stop = time.time()
70 u1_logger.debug('for %s %0.5f ms elapsed',
71 func.func_name, stop-start * 1000.0)
72 return ent
73 return wrapper
74
75
76class ForcedShutdown(Exception):
77 """Client shutdown forced."""
78
79
80class Waiter(object):
81 """Wait object for blocking waits."""
82
83 def __init__(self):
84 """Initializes the wait object."""
85 self.queue = Queue()
86
87 def wake(self, result):
88 """Wakes the waiter with a result."""
89 self.queue.put((result, None))
90
91 def wakeAndRaise(self, exc_info):
92 """Wakes the waiter, raising the given exception in it."""
93 self.queue.put((None, exc_info))
94
95 def wakeWithResult(self, func, *args, **kw):
96 """Wakes the waiter with the result of the given function."""
97 try:
98 result = func(*args, **kw)
99 except Exception:
100 self.wakeAndRaise(sys.exc_info())
101 else:
102 self.wake(result)
103
104 def wait(self):
105 """Waits for wakeup."""
106 (result, exc_info) = self.queue.get()
107 if exc_info:
108 try:
109 raise exc_info[0], exc_info[1], exc_info[2]
110 finally:
111 exc_info = None
112 else:
113 return result
114
115
116class SyncStorageClient(StorageClient):
117 """Simple client that calls a callback on connection."""
118
119 @log_timing
120 def connectionMade(self):
121 """Setup and call callback."""
122 StorageClient.connectionMade(self)
123 if self.factory.current_protocol not in (None, self):
124 self.factory.current_protocol.transport.loseConnection()
125 self.factory.current_protocol = self
126 self.factory.observer.connected()
127
128 @log_timing
129 def connectionLost(self, reason=None):
130 """Callback for established connection lost."""
131 StorageClient.connectionLost(self, reason)
132 if self.factory.current_protocol is self:
133 self.factory.current_protocol = None
134 self.factory.observer.disconnected(reason)
135
136
137class SyncClientFactory(StorageClientFactory):
138 """A cmd protocol factory."""
139
140 protocol = SyncStorageClient
141
142 @log_timing
143 def __init__(self, observer):
144 """Create the factory"""
145 self.observer = observer
146 self.current_protocol = None
147
148 @log_timing
149 def clientConnectionFailed(self, connector, reason):
150 """We failed at connecting."""
151 self.current_protocol = None
152 self.observer.connection_failed(reason)
153
154
155class UnsupportedOperationError(Exception):
156 """The operation is unsupported by the protocol version."""
157
158
159class ConnectionError(Exception):
160 """A connection error."""
161
162
163class AuthenticationError(Exception):
164 """An authentication error."""
165
166
167class NoSuchShareError(Exception):
168 """Error when there is no such share available."""
169
170
171class CapabilitiesError(Exception):
172 """A capabilities set/query related error."""
173
174
175class Client(object):
176 """U1 storage client facade."""
177 required_caps = frozenset([
178 "no-content", "account-info", "resumable-uploads",
179 "fix462230", "volumes", "generations",
180 ])
181
182 def __init__(self, realm=None, reactor=reactor):
183 """Create the instance.
184
185 'realm' is no longer used, but is left as param for API compatibility.
186
187 """
188 self.reactor = reactor
189 self.factory = SyncClientFactory(self)
190
191 self._status_lock = Lock()
192 self._status = "disconnected"
193 self._status_reason = None
194 self._status_waiting = []
195 self._active_waiters = set()
196
197 self.consumer_key = CONSUMER_KEY
198 self.consumer_secret = CONSUMER_SECRET
199
200 def force_shutdown(self):
201 """Forces the client to shut itself down."""
202 with self._status_lock:
203 self._status = "forced_shutdown"
204 self._reason = None
205 for waiter in self._active_waiters:
206 waiter.wakeAndRaise((ForcedShutdown("Forced shutdown"),
207 None, None))
208 self._active_waiters.clear()
209
210 def _get_waiter_locked(self):
211 """Gets a wait object for blocking waits. Should be called with the
212 status lock held.
213 """
214 waiter = Waiter()
215 if self._status == "forced_shutdown":
216 raise ForcedShutdown("Forced shutdown")
217 self._active_waiters.add(waiter)
218 return waiter
219
220 def _get_waiter(self):
221 """Get a wait object for blocking waits. Acquires the status lock."""
222 with self._status_lock:
223 return self._get_waiter_locked()
224
225 def _wait(self, waiter):
226 """Waits for the waiter."""
227 try:
228 return waiter.wait()
229 finally:
230 with self._status_lock:
231 if waiter in self._active_waiters:
232 self._active_waiters.remove(waiter)
233
234 @log_timing
235 def _change_status(self, status, reason=None):
236 """Changes the client status. Usually called from the reactor
237 thread.
238
239 """
240 with self._status_lock:
241 if self._status == "forced_shutdown":
242 return
243 self._status = status
244 self._status_reason = reason
245 waiting = self._status_waiting
246 self._status_waiting = []
247 for waiter in waiting:
248 waiter.wake((status, reason))
249
250 @log_timing
251 def _await_status_not(self, *ignore_statuses):
252 """Blocks until the client status changes, returning the new status.
253 Should never be called from the reactor thread.
254
255 """
256 with self._status_lock:
257 status = self._status
258 reason = self._status_reason
259 while status in ignore_statuses:
260 waiter = self._get_waiter_locked()
261 self._status_waiting.append(waiter)
262 self._status_lock.release()
263 try:
264 status, reason = self._wait(waiter)
265 finally:
266 self._status_lock.acquire()
267 if status == "forced_shutdown":
268 raise ForcedShutdown("Forced shutdown.")
269 return (status, reason)
270
271 def connection_failed(self, reason):
272 """Notification that connection failed."""
273 self._change_status("disconnected", reason)
274
275 def connected(self):
276 """Notification that connection succeeded."""
277 self._change_status("connected")
278
279 def disconnected(self, reason):
280 """Notification that we were disconnected."""
281 self._change_status("disconnected", reason)
282
283 def defer_from_thread(self, function, *args, **kwargs):
284 """Do twisted defer magic to get results and show exceptions."""
285 waiter = self._get_waiter()
286
287 @log_timing
288 def runner():
289 """inner."""
290 try:
291 d = function(*args, **kwargs)
292 if isinstance(d, defer.Deferred):
293 d.addCallbacks(lambda r: waiter.wake((r, None, None)),
294 lambda f: waiter.wake((None, None, f)))
295 else:
296 waiter.wake((d, None, None))
297 except Exception:
298 waiter.wake((None, sys.exc_info(), None))
299
300 self.reactor.callFromThread(runner)
301 result, exc_info, failure = self._wait(waiter)
302 if exc_info:
303 try:
304 raise exc_info[0], exc_info[1], exc_info[2]
305 finally:
306 exc_info = None
307 elif failure:
308 failure.raiseException()
309 else:
310 return result
311
312 @log_timing
313 def connect(self, host, port):
314 """Connect to host/port."""
315 def _connect():
316 """Deferred part."""
317 self.reactor.connectTCP(host, port, self.factory)
318 self._connect_inner(_connect)
319
320 @log_timing
321 def connect_ssl(self, host, port, no_verify):
322 """Connect to host/port using ssl."""
323 def _connect():
324 """deferred part."""
325 ctx = get_ssl_context(no_verify, host)
326 self.reactor.connectSSL(host, port, self.factory, ctx)
327 self._connect_inner(_connect)
328
329 @log_timing
330 def _connect_inner(self, _connect):
331 """Helper function for connecting."""
332 self._change_status("connecting")
333 self.reactor.callFromThread(_connect)
334 status, reason = self._await_status_not("connecting")
335 if status != "connected":
336 raise ConnectionError(reason.value)
337
338 @log_timing
339 def disconnect(self):
340 """Disconnect."""
341 if self.factory.current_protocol is not None:
342 self.reactor.callFromThread(
343 self.factory.current_protocol.transport.loseConnection)
344 self._await_status_not("connecting", "connected", "authenticated")
345
346 @log_timing
347 def simple_auth(self, username, password):
348 """Perform simple authorisation."""
349
350 @inlineCallbacks
351 def _wrapped_authenticate():
352 """Wrapped authenticate."""
353 try:
354 yield self.factory.current_protocol.simple_authenticate(
355 username, password)
356 except Exception:
357 self.factory.current_protocol.transport.loseConnection()
358 else:
359 self._change_status("authenticated")
360
361 try:
362 self.defer_from_thread(_wrapped_authenticate)
363 except request.StorageProtocolError as e:
364 raise AuthenticationError(e)
365 status, reason = self._await_status_not("connected")
366 if status != "authenticated":
367 raise AuthenticationError(reason.value)
368
369 @log_timing
370 def set_capabilities(self):
371 """Set the capabilities with the server"""
372
373 client = self.factory.current_protocol
374
375 @log_timing
376 def set_caps_callback(req):
377 "Caps query succeeded"
378 if not req.accepted:
379 de = defer.fail("The server denied setting %s capabilities" %
380 req.caps)
381 return de
382
383 @log_timing
384 def query_caps_callback(req):
385 "Caps query succeeded"
386 if req.accepted:
387 set_d = client.set_caps(self.required_caps)
388 set_d.addCallback(set_caps_callback)
389 return set_d
390 else:
391 # the server don't have the requested capabilities.
392 # return a failure for now, in the future we might want
393 # to reconnect to another server
394 de = defer.fail("The server don't have the requested"
395 " capabilities: %s" % str(req.caps))
396 return de
397
398 @log_timing
399 def _wrapped_set_capabilities():
400 """Wrapped set_capabilities """
401 d = client.query_caps(self.required_caps)
402 d.addCallback(query_caps_callback)
403 return d
404
405 try:
406 self.defer_from_thread(_wrapped_set_capabilities)
407 except request.StorageProtocolError as e:
408 raise CapabilitiesError(e)
409
410 @log_timing
411 def get_root_info(self, volume_uuid):
412 """Returns the UUID of the applicable share root."""
413 if volume_uuid is None:
414 _get_root = self.factory.current_protocol.get_root
415 root = self.defer_from_thread(_get_root)
416 return (uuid.UUID(root), True)
417 else:
418 str_volume_uuid = str(volume_uuid)
419 volume = self._match_volume(
420 lambda v: str(v.volume_id) == str_volume_uuid)
421 if isinstance(volume, volumes.ShareVolume):
422 modify = volume.access_level == "Modify"
423 if isinstance(volume, volumes.UDFVolume):
424 modify = True
425 return (uuid.UUID(str(volume.node_id)), modify)
426
427 @log_timing
428 def resolve_path(self, share_uuid, root_uuid, path):
429 """Resolve path relative to the given root node."""
430
431 @inlineCallbacks
432 def _resolve_worker():
433 """Path resolution worker."""
434 node_uuid = root_uuid
435 local_path = path.strip('/')
436
437 while local_path != '':
438 local_path, name = os.path.split(local_path)
439 hashes = yield self._get_node_hashes(share_uuid)
440 content_hash = hashes.get(root_uuid, None)
441 if content_hash is None:
442 raise KeyError("Content hash not available")
443 entries = yield self._get_dir_entries(share_uuid, root_uuid)
444 match_name = name.decode('utf-8')
445 match = None
446 for entry in entries:
447 if match_name == entry.name:
448 match = entry
449 break
450
451 if match is None:
452 raise KeyError("Path not found")
453
454 node_uuid = uuid.UUID(match.node)
455
456 returnValue(node_uuid)
457
458 return self.defer_from_thread(_resolve_worker)
459
460 @log_timing
461 def find_volume(self, volume_spec):
462 """Finds a share matching the given UUID. Looks at both share UUIDs
463 and root node UUIDs."""
464
465 def match(s):
466 return (str(s.volume_id) == volume_spec or
467 str(s.node_id) == volume_spec)
468
469 volume = self._match_volume(match)
470 return uuid.UUID(str(volume.volume_id))
471
472 @log_timing
473 def _match_volume(self, predicate):
474 """Finds a volume matching the given predicate."""
475 _list_shares = self.factory.current_protocol.list_volumes
476 r = self.defer_from_thread(_list_shares)
477 for volume in r.volumes:
478 if predicate(volume):
479 return volume
480 raise NoSuchShareError()
481
482 @log_timing
483 def build_tree(self, share_uuid, root_uuid):
484 """Builds and returns a tree representing the metadata for the given
485 subtree in the given share.
486
487 @param share_uuid: the share UUID or None for the user's volume
488 @param root_uuid: the root UUID of the subtree (must be a directory)
489 @return: a MergeNode tree
490
491 """
492 root = MergeNode(node_type=DIRECTORY, uuid=root_uuid)
493
494 @log_timing
495 @inlineCallbacks
496 def _get_root_content_hash():
497 """Obtain the content hash for the root node."""
498 result = yield self._get_node_hashes(share_uuid)
499 returnValue(result.get(root_uuid, None))
500
501 root.content_hash = self.defer_from_thread(_get_root_content_hash)
502 if root.content_hash is None:
503 raise ValueError("No content available for node %s" % root_uuid)
504
505 @log_timing
506 @inlineCallbacks
507 def _get_children(parent_uuid, parent_content_hash):
508 """Obtain a sequence of MergeNodes corresponding to a node's
509 immediate children.
510
511 """
512 entries = yield self._get_dir_entries(share_uuid, parent_uuid)
513 children = {}
514 for entry in entries:
515 if should_sync(entry.name):
516 child = MergeNode(node_type=entry.node_type,
517 uuid=uuid.UUID(entry.node))
518 children[entry.name] = child
519
520 content_hashes = yield self._get_node_hashes(share_uuid)
521 for child in children.itervalues():
522 child.content_hash = content_hashes.get(child.uuid, None)
523
524 returnValue(children)
525
526 need_children = [root]
527 while need_children:
528 node = need_children.pop()
529 if node.content_hash is not None:
530 children = self.defer_from_thread(_get_children, node.uuid,
531 node.content_hash)
532 node.children = children
533 for child in children.itervalues():
534 if child.node_type == DIRECTORY:
535 need_children.append(child)
536
537 return root
538
539 @log_timing
540 @defer.inlineCallbacks
541 def _get_dir_entries(self, share_uuid, node_uuid):
542 """Get raw dir entries for the given directory."""
543 result = yield self.factory.current_protocol.get_delta(
544 share_str(share_uuid), from_scratch=True)
545 node_uuid = share_str(node_uuid)
546 children = []
547 for n in result.response:
548 if n.parent_id == node_uuid:
549 # adapt here some attrs so we don't need to change ALL the code
550 n.node_type = DIRECTORY if n.file_type == delta_DIR else FILE
551 n.node = n.node_id
552 children.append(n)
553 defer.returnValue(children)
554
555 @log_timing
556 def download_string(self, share_uuid, node_uuid, content_hash):
557 """Reads a file from the server into a string."""
558 output = StringIO()
559 self._download_inner(share_uuid=share_uuid, node_uuid=node_uuid,
560 content_hash=content_hash, output=output)
561 return output.getValue()
562
563 @log_timing
564 def download_file(self, share_uuid, node_uuid, content_hash, filename):
565 """Downloads a file from the server."""
566 partial_filename = "%s.u1partial" % filename
567 output = open(partial_filename, "w")
568
569 @log_timing
570 def rename_file():
571 """Renames the temporary file to the final name."""
572 output.close()
573 os.rename(partial_filename, filename)
574
575 @log_timing
576 def delete_file():
577 """Deletes the temporary file."""
578 output.close()
579 os.remove(partial_filename)
580
581 self._download_inner(share_uuid=share_uuid, node_uuid=node_uuid,
582 content_hash=content_hash, output=output,
583 on_success=rename_file, on_failure=delete_file)
584
585 @log_timing
586 def _download_inner(self, share_uuid, node_uuid, content_hash, output,
587 on_success=lambda: None, on_failure=lambda: None):
588 """Helper function for content downloads."""
589 dec = zlib.decompressobj()
590
591 @log_timing
592 def write_data(data):
593 """Helper which writes data to the output file."""
594 uncompressed_data = dec.decompress(data)
595 output.write(uncompressed_data)
596
597 @log_timing
598 def finish_download(value):
599 """Helper which finishes the download."""
600 uncompressed_data = dec.flush()
601 output.write(uncompressed_data)
602 on_success()
603 return value
604
605 @log_timing
606 def abort_download(value):
607 """Helper which aborts the download."""
608 on_failure()
609 return value
610
611 @log_timing
612 def _download():
613 """Async helper."""
614 _get_content = self.factory.current_protocol.get_content
615 d = _get_content(share_str(share_uuid), str(node_uuid),
616 content_hash, callback=write_data)
617 d.addCallbacks(finish_download, abort_download)
618 return d
619
620 self.defer_from_thread(_download)
621
622 @log_timing
623 def create_directory(self, share_uuid, parent_uuid, name):
624 """Creates a directory on the server."""
625 r = self.defer_from_thread(self.factory.current_protocol.make_dir,
626 share_str(share_uuid), str(parent_uuid),
627 name)
628 return uuid.UUID(r.new_id)
629
630 @log_timing
631 def create_file(self, share_uuid, parent_uuid, name):
632 """Creates a file on the server."""
633 r = self.defer_from_thread(self.factory.current_protocol.make_file,
634 share_str(share_uuid), str(parent_uuid),
635 name)
636 return uuid.UUID(r.new_id)
637
638 @log_timing
639 def create_symlink(self, share_uuid, parent_uuid, name, target):
640 """Creates a symlink on the server."""
641 raise UnsupportedOperationError("Protocol does not support symlinks")
642
643 @log_timing
644 def upload_string(self, share_uuid, node_uuid, old_content_hash,
645 content_hash, content):
646 """Uploads a string to the server as file content."""
647 crc = crc32(content, 0)
648 compressed_content = zlib.compress(content, 9)
649 compressed = StringIO(compressed_content)
650 self.defer_from_thread(self.factory.current_protocol.put_content,
651 share_str(share_uuid), str(node_uuid),
652 old_content_hash, content_hash,
653 crc, len(content), len(compressed_content),
654 compressed)
655
656 @log_timing
657 def upload_file(self, share_uuid, node_uuid, old_content_hash,
658 content_hash, filename):
659 """Uploads a file to the server."""
660 parent_dir = os.path.split(filename)[0]
661 unique_filename = os.path.join(parent_dir, "." + str(uuid.uuid4()))
662
663 class StagingFile(object):
664 """An object which tracks data being compressed for staging."""
665 def __init__(self, stream):
666 """Initialize a compression object."""
667 self.crc32 = 0
668 self.enc = zlib.compressobj(9)
669 self.size = 0
670 self.compressed_size = 0
671 self.stream = stream
672
673 def write(self, bytes):
674 """Compress bytes, keeping track of length and crc32."""
675 self.size += len(bytes)
676 self.crc32 = crc32(bytes, self.crc32)
677 compressed_bytes = self.enc.compress(bytes)
678 self.compressed_size += len(compressed_bytes)
679 self.stream.write(compressed_bytes)
680
681 def finish(self):
682 """Finish staging compressed data."""
683 compressed_bytes = self.enc.flush()
684 self.compressed_size += len(compressed_bytes)
685 self.stream.write(compressed_bytes)
686
687 with open(unique_filename, "w+") as compressed:
688 os.remove(unique_filename)
689 with open(filename, "r") as original:
690 staging = StagingFile(compressed)
691 shutil.copyfileobj(original, staging)
692 staging.finish()
693 compressed.seek(0)
694 self.defer_from_thread(self.factory.current_protocol.put_content,
695 share_str(share_uuid), str(node_uuid),
696 old_content_hash, content_hash,
697 staging.crc32,
698 staging.size, staging.compressed_size,
699 compressed)
700
701 @log_timing
702 def move(self, share_uuid, parent_uuid, name, node_uuid):
703 """Moves a file on the server."""
704 self.defer_from_thread(self.factory.current_protocol.move,
705 share_str(share_uuid), str(node_uuid),
706 str(parent_uuid), name)
707
708 @log_timing
709 def unlink(self, share_uuid, node_uuid):
710 """Unlinks a file on the server."""
711 self.defer_from_thread(self.factory.current_protocol.unlink,
712 share_str(share_uuid), str(node_uuid))
713
714 @log_timing
715 @defer.inlineCallbacks
716 def _get_node_hashes(self, share_uuid):
717 """Fetches hashes for the given nodes."""
718 result = yield self.factory.current_protocol.get_delta(
719 share_str(share_uuid), from_scratch=True)
720 hashes = {}
721 for fid in result.response:
722 node_uuid = uuid.UUID(fid.node_id)
723 hashes[node_uuid] = fid.content_hash
724 defer.returnValue(hashes)
725
726 @log_timing
727 def get_incoming_shares(self):
728 """Returns a list of incoming shares as (name, uuid, accepted)
729 tuples.
730
731 """
732 _list_shares = self.factory.current_protocol.list_shares
733 r = self.defer_from_thread(_list_shares)
734 return [(s.name, s.id, s.other_visible_name,
735 s.accepted, s.access_level)
736 for s in r.shares if s.direction == "to_me"]
0737
=== added file 'magicicada/u1sync/constants.py'
--- magicicada/u1sync/constants.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/constants.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,26 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Assorted constant definitions which don't fit anywhere else."""
17
18import re
19
20# the name of the directory u1sync uses to keep metadata about a mirror
21METADATA_DIR_NAME = u".ubuntuone-sync"
22
23# filenames to ignore
24SPECIAL_FILE_RE = re.compile(".*\\.("
25 "(u1)?partial|part|"
26 "(u1)?conflict(\\.[0-9]+)?)$")
027
=== added file 'magicicada/u1sync/genericmerge.py'
--- magicicada/u1sync/genericmerge.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/genericmerge.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,86 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""A generic abstraction for merge operations on directory trees."""
17
18from itertools import chain
19
20from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY
21
22
23class MergeNode(object):
24 """A filesystem node. Should generally be treated as immutable."""
25 def __init__(self, node_type, content_hash=None, uuid=None, children=None,
26 conflict_info=None):
27 """Initializes a node instance."""
28 self.node_type = node_type
29 self.children = children
30 self.uuid = uuid
31 self.content_hash = content_hash
32 self.conflict_info = conflict_info
33
34 def __eq__(self, other):
35 """Equality test."""
36 if type(other) is not type(self):
37 return False
38 return (self.node_type == other.node_type and
39 self.children == other.children and
40 self.uuid == other.uuid and
41 self.content_hash == other.content_hash and
42 self.conflict_info == other.conflict_info)
43
44 def __ne__(self, other):
45 """Non-equality test."""
46 return not self.__eq__(other)
47
48
49def show_tree(tree, indent="", name="/"):
50 """Prints a tree."""
51 if tree.node_type == DIRECTORY:
52 type_str = "DIR "
53 else:
54 type_str = "FILE"
55 print "%s%-36s %s %s %s" % (indent, tree.uuid, type_str, name,
56 tree.content_hash)
57 if tree.node_type == DIRECTORY and tree.children is not None:
58 for name in sorted(tree.children.keys()):
59 subtree = tree.children[name]
60 show_tree(subtree, indent=" " + indent, name=name)
61
62
63def generic_merge(trees, pre_merge, post_merge, partial_parent, name):
64 """Generic tree merging function."""
65
66 partial_result = pre_merge(nodes=trees, name=name,
67 partial_parent=partial_parent)
68
69 def tree_children(tree):
70 """Returns children if tree is not None"""
71 return tree.children if tree is not None else None
72
73 child_dicts = [tree_children(t) or {} for t in trees]
74 child_names = set(chain(*[cs.iterkeys() for cs in child_dicts]))
75 child_results = {}
76 for child_name in child_names:
77 subtrees = [cs.get(child_name, None) for cs in child_dicts]
78 child_result = generic_merge(trees=subtrees,
79 pre_merge=pre_merge,
80 post_merge=post_merge,
81 partial_parent=partial_result,
82 name=child_name)
83 child_results[child_name] = child_result
84
85 return post_merge(nodes=trees, partial_result=partial_result,
86 child_results=child_results)
087
=== added file 'magicicada/u1sync/main.py'
--- magicicada/u1sync/main.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/main.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,443 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""A prototype directory sync client."""
17
18from __future__ import with_statement
19
20import signal
21import os
22import sys
23import uuid
24
25from errno import EEXIST
26from optparse import OptionParser, SUPPRESS_HELP
27from Queue import Queue
28
29import gobject
30
31import ubuntuone.storageprotocol.dircontent_pb2 as dircontent_pb2
32from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, SYMLINK
33from twisted.internet import reactor
34
35from magicicada.u1sync import metadata
36from magicicada.u1sync.client import (
37 ConnectionError, AuthenticationError, NoSuchShareError,
38 ForcedShutdown, Client)
39from magicicada.u1sync.constants import METADATA_DIR_NAME
40from magicicada.u1sync.genericmerge import show_tree, generic_merge
41from magicicada.u1sync.merge import (
42 SyncMerge, ClobberServerMerge, ClobberLocalMerge, merge_trees)
43from magicicada.u1sync.scan import scan_directory
44from magicicada.u1sync.sync import download_tree, upload_tree
45from magicicada.u1sync.utils import safe_mkdir
46
47
48gobject.set_application_name('u1sync')
49
50DEFAULT_MERGE_ACTION = 'auto'
51MERGE_ACTIONS = {
52 # action: (merge_class, should_upload, should_download)
53 'sync': (SyncMerge, True, True),
54 'clobber-server': (ClobberServerMerge, True, False),
55 'clobber-local': (ClobberLocalMerge, False, True),
56 'upload': (SyncMerge, True, False),
57 'download': (SyncMerge, False, True),
58 'auto': None # special case
59}
60NODE_TYPE_ENUM = dircontent_pb2._NODETYPE
61
62
63def node_type_str(node_type):
64 """Converts a numeric node type to a human-readable string."""
65 return NODE_TYPE_ENUM.values_by_number[node_type].name
66
67
68class ReadOnlyShareError(Exception):
69 """Share is read-only."""
70
71
72class DirectoryAlreadyInitializedError(Exception):
73 """The directory has already been initialized."""
74
75
76class DirectoryNotInitializedError(Exception):
77 """The directory has not been initialized."""
78
79
80class NoParentError(Exception):
81 """A node has no parent."""
82
83
84class TreesDiffer(Exception):
85 """Raised when diff tree differs."""
86 def __init__(self, quiet):
87 self.quiet = quiet
88
89
90def do_init(client, share_spec, directory, quiet, subtree_path,
91 metadata=metadata):
92 """Initializes a directory for syncing, and syncs it."""
93 info = metadata.Metadata()
94
95 if share_spec is not None:
96 info.share_uuid = client.find_volume(share_spec)
97 else:
98 info.share_uuid = None
99
100 if subtree_path is not None:
101 info.path = subtree_path
102 else:
103 info.path = "/"
104
105 if not quiet:
106 print "\nInitializing directory..."
107 safe_mkdir(directory)
108
109 metadata_dir = os.path.join(directory, METADATA_DIR_NAME)
110 try:
111 os.mkdir(metadata_dir)
112 except OSError as e:
113 if e.errno == EEXIST:
114 raise DirectoryAlreadyInitializedError(directory)
115 else:
116 raise
117
118 if not quiet:
119 print "\nWriting mirror metadata..."
120 metadata.write(metadata_dir, info)
121
122 if not quiet:
123 print "\nDone."
124
125
126def do_sync(client, directory, action, dry_run, quiet):
127 """Synchronizes a directory with the given share."""
128 absolute_path = os.path.abspath(directory)
129 while True:
130 metadata_dir = os.path.join(absolute_path, METADATA_DIR_NAME)
131 if os.path.exists(metadata_dir):
132 break
133 if absolute_path == "/":
134 raise DirectoryNotInitializedError(directory)
135 absolute_path = os.path.split(absolute_path)[0]
136
137 if not quiet:
138 print "\nReading mirror metadata..."
139 info = metadata.read(metadata_dir)
140
141 top_uuid, writable = client.get_root_info(info.share_uuid)
142
143 if info.root_uuid is None:
144 info.root_uuid = client.resolve_path(info.share_uuid, top_uuid,
145 info.path)
146
147 if action == 'auto':
148 if writable:
149 action = 'sync'
150 else:
151 action = 'download'
152 merge_type, should_upload, should_download = MERGE_ACTIONS[action]
153 if should_upload and not writable:
154 raise ReadOnlyShareError(info.share_uuid)
155
156 if not quiet:
157 print "\nScanning directory..."
158 local_tree = scan_directory(absolute_path, quiet=quiet)
159
160 if not quiet:
161 print "\nFetching metadata..."
162 remote_tree = client.build_tree(info.share_uuid, info.root_uuid)
163 if not quiet:
164 show_tree(remote_tree)
165
166 if not quiet:
167 print "\nMerging trees..."
168 merged_tree = merge_trees(old_local_tree=info.local_tree,
169 local_tree=local_tree,
170 old_remote_tree=info.remote_tree,
171 remote_tree=remote_tree,
172 merge_action=merge_type())
173 if not quiet:
174 show_tree(merged_tree)
175
176 if not quiet:
177 print "\nSyncing content..."
178 if should_download:
179 info.local_tree = download_tree(merged_tree=merged_tree,
180 local_tree=local_tree,
181 client=client,
182 share_uuid=info.share_uuid,
183 path=absolute_path, dry_run=dry_run,
184 quiet=quiet)
185 else:
186 info.local_tree = local_tree
187 if should_upload:
188 info.remote_tree = upload_tree(merged_tree=merged_tree,
189 remote_tree=remote_tree,
190 client=client,
191 share_uuid=info.share_uuid,
192 path=absolute_path, dry_run=dry_run,
193 quiet=quiet)
194 else:
195 info.remote_tree = remote_tree
196
197 if not dry_run:
198 if not quiet:
199 print "\nUpdating mirror metadata..."
200 metadata.write(metadata_dir, info)
201
202 if not quiet:
203 print "\nDone."
204
205
206def do_list_shares(client):
207 """Lists available (incoming) shares."""
208 shares = client.get_incoming_shares()
209 for (name, id, user, accepted, access) in shares:
210 if not accepted:
211 status = " [not accepted]"
212 else:
213 status = ""
214 name = name.encode("utf-8")
215 user = user.encode("utf-8")
216 print "%s %s (from %s) [%s]%s" % (id, name, user, access, status)
217
218
219def do_diff(client, share_spec, directory, quiet, subtree_path,
220 ignore_symlinks=True):
221 """Diffs a local directory with the server."""
222 if share_spec is not None:
223 share_uuid = client.find_volume(share_spec)
224 else:
225 share_uuid = None
226 if subtree_path is None:
227 subtree_path = '/'
228
229 root_uuid, writable = client.get_root_info(share_uuid)
230 subtree_uuid = client.resolve_path(share_uuid, root_uuid, subtree_path)
231 local_tree = scan_directory(directory, quiet=True)
232 remote_tree = client.build_tree(share_uuid, subtree_uuid)
233
234 def pre_merge(nodes, name, partial_parent):
235 """Compares nodes and prints differences."""
236 (local_node, remote_node) = nodes
237 (parent_display_path, parent_differs) = partial_parent
238 display_path = os.path.join(parent_display_path, name.encode("UTF-8"))
239 differs = True
240 if local_node is None:
241 if not quiet:
242 print "%s missing from client" % display_path
243 elif remote_node is None:
244 if ignore_symlinks and local_node.node_type == SYMLINK:
245 differs = False
246 elif not quiet:
247 print "%s missing from server" % display_path
248 elif local_node.node_type != remote_node.node_type:
249 local_type = node_type_str(local_node.node_type)
250 remote_type = node_type_str(remote_node.node_type)
251 if not quiet:
252 print ("%s node types differ (client: %s, server: %s)" %
253 (display_path, local_type, remote_type))
254 elif (local_node.node_type != DIRECTORY and
255 local_node.content_hash != remote_node.content_hash):
256 local_content = local_node.content_hash
257 remote_content = remote_node.content_hash
258 if not quiet:
259 print ("%s has different content (client: %s, server: %s)" %
260 (display_path, local_content, remote_content))
261 else:
262 differs = False
263 return (display_path, differs)
264
265 def post_merge(nodes, partial_result, child_results):
266 """Aggregates 'differs' flags."""
267 (display_path, differs) = partial_result
268 return differs or any(child_results.itervalues())
269
270 differs = generic_merge(trees=[local_tree, remote_tree],
271 pre_merge=pre_merge, post_merge=post_merge,
272 partial_parent=("", False), name=u"")
273 if differs:
274 raise TreesDiffer(quiet=quiet)
275
276
277def do_main(argv):
278 """The main user-facing portion of the script."""
279 usage = (
280 "Usage: %prog [options] [DIRECTORY]\n"
281 " %prog --list-shares [options]\n"
282 " %prog --init [--share=SHARE_UUID] [options] DIRECTORY\n"
283 " %prog --diff [--share=SHARE_UUID] [options] DIRECTORY")
284 parser = OptionParser(usage=usage)
285 parser.add_option("--port", dest="port", metavar="PORT",
286 default=443,
287 help="The port on which to connect to the server")
288 parser.add_option("--host", dest="host", metavar="HOST",
289 default='fs-1.one.ubuntu.com',
290 help="The server address")
291
292 action_list = ", ".join(sorted(MERGE_ACTIONS.keys()))
293 parser.add_option("--action", dest="action", metavar="ACTION",
294 default=None,
295 help="Select a sync action (%s; default is %s)" %
296 (action_list, DEFAULT_MERGE_ACTION))
297 parser.add_option("--dry-run", action="store_true", dest="dry_run",
298 default=False, help="Do a dry run without actually "
299 "making changes")
300 parser.add_option("--quiet", action="store_true", dest="quiet",
301 default=False, help="Produces less output")
302 parser.add_option("--list-shares", action="store_const", dest="mode",
303 const="list-shares", default="sync",
304 help="List available shares")
305 parser.add_option("--init", action="store_const", dest="mode",
306 const="init",
307 help="Initialize a local directory for syncing")
308 parser.add_option("--no-ssl-verify", action="store_true",
309 dest="no_ssl_verify",
310 default=False, help=SUPPRESS_HELP)
311 parser.add_option("--diff", action="store_const", dest="mode",
312 const="diff",
313 help="Compare tree on server with local tree "
314 "(does not require previous --init)")
315 parser.add_option("--share", dest="share", metavar="SHARE_UUID",
316 default=None,
317 help="Sync the directory with a share rather than the "
318 "user's own volume")
319 parser.add_option("--subtree", dest="subtree", metavar="PATH",
320 default=None,
321 help="Mirror a subset of the share or volume")
322
323 (options, args) = parser.parse_args(args=list(argv[1:]))
324
325 if options.share is not None and options.mode not in ("init", "diff"):
326 parser.error("--share is only valid with --init or --diff")
327
328 directory = None
329 if options.mode in ("sync", "init" or "diff"):
330 if len(args) > 1:
331 parser.error("Too many arguments")
332 elif len(args) < 1:
333 if options.mode == "init" or options.mode == "diff":
334 parser.error("--%s requires a directory to "
335 "be specified" % options.mode)
336 else:
337 directory = "."
338 else:
339 directory = args[0]
340 if options.mode in ("init", "list-shares", "diff"):
341 if options.action is not None:
342 parser.error("--%s does not take the --action parameter" %
343 options.mode)
344 if options.dry_run:
345 parser.error("--%s does not take the --dry-run parameter" %
346 options.mode)
347 if options.mode == "list-shares":
348 if len(args) != 0:
349 parser.error("--list-shares does not take a directory")
350 if options.mode not in ("init", "diff"):
351 if options.subtree is not None:
352 parser.error("--%s does not take the --subtree parameter" %
353 options.mode)
354
355 if options.action is not None and options.action not in MERGE_ACTIONS:
356 parser.error("--action: Unknown action %s" % options.action)
357
358 if options.action is None:
359 options.action = DEFAULT_MERGE_ACTION
360
361 if options.share is not None:
362 try:
363 uuid.UUID(options.share)
364 except ValueError as e:
365 parser.error("Invalid --share argument: %s" % e)
366 share_spec = options.share
367 else:
368 share_spec = None
369
370 client = Client(reactor=reactor)
371
372 signal.signal(signal.SIGINT, lambda s, f: client.force_shutdown())
373 signal.signal(signal.SIGTERM, lambda s, f: client.force_shutdown())
374
375 def run_client():
376 """Run the blocking client."""
377 client.connect_ssl(
378 options.host, int(options.port), options.no_ssl_verify)
379
380 try:
381 client.set_capabilities()
382
383 if options.mode == "sync":
384 do_sync(client=client, directory=directory,
385 action=options.action,
386 dry_run=options.dry_run, quiet=options.quiet)
387 elif options.mode == "init":
388 do_init(client=client, share_spec=share_spec,
389 directory=directory,
390 quiet=options.quiet, subtree_path=options.subtree)
391 elif options.mode == "list-shares":
392 do_list_shares(client=client)
393 elif options.mode == "diff":
394 do_diff(client=client, share_spec=share_spec,
395 directory=directory,
396 quiet=options.quiet, subtree_path=options.subtree,
397 ignore_symlinks=False)
398 finally:
399 client.disconnect()
400
401 def capture_exception(queue, func):
402 """Capture the exception from calling func."""
403 try:
404 func()
405 except Exception:
406 queue.put(sys.exc_info())
407 else:
408 queue.put(None)
409 finally:
410 reactor.callWhenRunning(reactor.stop)
411
412 queue = Queue()
413 reactor.callInThread(capture_exception, queue, run_client)
414 reactor.run(installSignalHandlers=False)
415 exc_info = queue.get(True, 0.1)
416 if exc_info:
417 raise exc_info[0], exc_info[1], exc_info[2]
418
419
420def main(*argv):
421 """Top-level main function."""
422 try:
423 do_main(argv=argv)
424 except AuthenticationError as e:
425 print "Authentication failed: %s" % e
426 except ConnectionError as e:
427 print "Connection failed: %s" % e
428 except DirectoryNotInitializedError:
429 print "Directory not initialized; use --init [DIRECTORY] to init it."
430 except DirectoryAlreadyInitializedError:
431 print "Directory already initialized."
432 except NoSuchShareError:
433 print "No matching share found."
434 except ReadOnlyShareError:
435 print "The selected action isn't possible on a read-only share."
436 except (ForcedShutdown, KeyboardInterrupt):
437 print "Interrupted!"
438 except TreesDiffer as e:
439 if not e.quiet:
440 print "Trees differ."
441 else:
442 return 0
443 return 1
0444
=== added file 'magicicada/u1sync/merge.py'
--- magicicada/u1sync/merge.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/merge.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,181 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Code for merging changes between modified trees."""
17
18from __future__ import with_statement
19
20import os
21import uuid
22
23from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY
24
25from magicicada.u1sync.genericmerge import MergeNode, generic_merge
26
27
28class NodeTypeMismatchError(Exception):
29 """Node types don't match."""
30
31
32def merge_trees(old_local_tree, local_tree, old_remote_tree, remote_tree,
33 merge_action):
34 """Performs a tree merge using the given merge action."""
35
36 def pre_merge(nodes, name, partial_parent):
37 """Accumulates path and determines merged node type."""
38 old_local_node, local_node, old_remote_node, remote_node = nodes
39 (parent_path, parent_type) = partial_parent
40 path = os.path.join(parent_path, name.encode("utf-8"))
41 node_type = merge_action.get_node_type(old_local_node=old_local_node,
42 local_node=local_node,
43 old_remote_node=old_remote_node,
44 remote_node=remote_node,
45 path=path)
46 return (path, node_type)
47
48 def post_merge(nodes, partial_result, child_results):
49 """Drops deleted children and merges node."""
50 old_local_node, local_node, old_remote_node, remote_node = nodes
51 (path, node_type) = partial_result
52 if node_type == DIRECTORY:
53 merged_children = dict(
54 (name, child) for (name, child) in child_results.iteritems()
55 if child is not None)
56 else:
57 merged_children = None
58 return merge_action.merge_node(old_local_node=old_local_node,
59 local_node=local_node,
60 old_remote_node=old_remote_node,
61 remote_node=remote_node,
62 node_type=node_type,
63 merged_children=merged_children)
64
65 return generic_merge(trees=[old_local_tree, local_tree,
66 old_remote_tree, remote_tree],
67 pre_merge=pre_merge, post_merge=post_merge,
68 name=u"", partial_parent=("", None))
69
70
71class SyncMerge(object):
72 """Performs a bidirectional sync merge."""
73
74 def get_node_type(self, old_local_node, local_node,
75 old_remote_node, remote_node, path):
76 """Requires that all node types match."""
77 node_type = None
78 for node in (old_local_node, local_node, remote_node):
79 if node is not None:
80 if node_type is not None:
81 if node.node_type != node_type:
82 message = "Node types don't match for %s" % path
83 raise NodeTypeMismatchError(message)
84 else:
85 node_type = node.node_type
86 return node_type
87
88 def merge_node(self, old_local_node, local_node,
89 old_remote_node, remote_node, node_type, merged_children):
90 """Performs bidirectional merge of node state."""
91
92 def node_content_hash(node):
93 """Returns node content hash if node is not None"""
94 return node.content_hash if node is not None else None
95
96 old_local_content_hash = node_content_hash(old_local_node)
97 local_content_hash = node_content_hash(local_node)
98 old_remote_content_hash = node_content_hash(old_remote_node)
99 remote_content_hash = node_content_hash(remote_node)
100
101 locally_deleted = old_local_node is not None and local_node is None
102 deleted_on_server = old_remote_node is not None and remote_node is None
103 # updated means modified or created
104 locally_updated = (not locally_deleted and
105 old_local_content_hash != local_content_hash)
106 updated_on_server = (not deleted_on_server and
107 old_remote_content_hash != remote_content_hash)
108
109 has_merged_children = (merged_children is not None and
110 len(merged_children) > 0)
111
112 either_node_exists = local_node is not None or remote_node is not None
113 should_delete = (
114 (locally_deleted and not updated_on_server) or
115 (deleted_on_server and not locally_updated))
116
117 if (either_node_exists and not should_delete) or has_merged_children:
118 if (node_type != DIRECTORY and locally_updated and
119 updated_on_server and
120 local_content_hash != remote_content_hash):
121 # local_content_hash will become the merged content_hash;
122 # save remote_content_hash in conflict info
123 conflict_info = (str(uuid.uuid4()), remote_content_hash)
124 else:
125 conflict_info = None
126 node_uuid = remote_node.uuid if remote_node is not None else None
127 if locally_updated:
128 content_hash = local_content_hash or remote_content_hash
129 else:
130 content_hash = remote_content_hash or local_content_hash
131 return MergeNode(
132 node_type=node_type, uuid=node_uuid, children=merged_children,
133 content_hash=content_hash, conflict_info=conflict_info)
134 else:
135 return None
136
137
138class ClobberServerMerge(object):
139 """Clobber server to match local state."""
140
141 def get_node_type(self, old_local_node, local_node,
142 old_remote_node, remote_node, path):
143 """Return local node type."""
144 if local_node is not None:
145 return local_node.node_type
146 else:
147 return None
148
149 def merge_node(self, old_local_node, local_node,
150 old_remote_node, remote_node, node_type, merged_children):
151 """Copy local node and associate with remote uuid (if applicable)."""
152 if local_node is None:
153 return None
154 if remote_node is not None:
155 node_uuid = remote_node.uuid
156 else:
157 node_uuid = None
158 return MergeNode(
159 node_type=local_node.node_type, uuid=node_uuid,
160 content_hash=local_node.content_hash, children=merged_children)
161
162
163class ClobberLocalMerge(object):
164 """Clobber local state to match server."""
165
166 def get_node_type(self, old_local_node, local_node,
167 old_remote_node, remote_node, path):
168 """Return remote node type."""
169 if remote_node is not None:
170 return remote_node.node_type
171 else:
172 return None
173
174 def merge_node(self, old_local_node, local_node,
175 old_remote_node, remote_node, node_type, merged_children):
176 """Copy the remote node."""
177 if remote_node is None:
178 return None
179 return MergeNode(
180 node_type=node_type, uuid=remote_node.uuid,
181 content_hash=remote_node.content_hash, children=merged_children)
0182
=== added file 'magicicada/u1sync/metadata.py'
--- magicicada/u1sync/metadata.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/metadata.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,155 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Routines for loading/storing u1sync mirror metadata."""
17
18from __future__ import with_statement
19
20import os
21import uuid
22
23from contextlib import contextmanager
24import cPickle as pickle
25from errno import ENOENT
26
27from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY
28
29from magicicada.u1sync.merge import MergeNode
30from magicicada.u1sync.utils import safe_unlink
31
32
33class Metadata(object):
34 """Object representing mirror metadata."""
35
36 def __init__(self, local_tree=None, remote_tree=None, share_uuid=None,
37 root_uuid=None, path=None):
38 """Populate fields."""
39 self.local_tree = local_tree
40 self.remote_tree = remote_tree
41 self.share_uuid = share_uuid
42 self.root_uuid = root_uuid
43 self.path = path
44
45
46def read(metadata_dir):
47 """Read metadata for a mirror rooted at directory."""
48 index_file = os.path.join(metadata_dir, "local-index")
49 share_uuid_file = os.path.join(metadata_dir, "share-uuid")
50 root_uuid_file = os.path.join(metadata_dir, "root-uuid")
51 path_file = os.path.join(metadata_dir, "path")
52
53 index = read_pickle_file(index_file, {})
54 share_uuid = read_uuid_file(share_uuid_file)
55 root_uuid = read_uuid_file(root_uuid_file)
56 path = read_string_file(path_file, '/')
57
58 local_tree = index.get("tree", None)
59 remote_tree = index.get("remote_tree", None)
60
61 if local_tree is None:
62 local_tree = MergeNode(node_type=DIRECTORY, children={})
63 if remote_tree is None:
64 remote_tree = MergeNode(node_type=DIRECTORY, children={})
65
66 return Metadata(local_tree=local_tree, remote_tree=remote_tree,
67 share_uuid=share_uuid, root_uuid=root_uuid,
68 path=path)
69
70
71def write(metadata_dir, info):
72 """Writes all metadata for the mirror rooted at directory."""
73 share_uuid_file = os.path.join(metadata_dir, "share-uuid")
74 root_uuid_file = os.path.join(metadata_dir, "root-uuid")
75 index_file = os.path.join(metadata_dir, "local-index")
76 path_file = os.path.join(metadata_dir, "path")
77 if info.share_uuid is not None:
78 write_uuid_file(share_uuid_file, info.share_uuid)
79 else:
80 safe_unlink(share_uuid_file)
81 if info.root_uuid is not None:
82 write_uuid_file(root_uuid_file, info.root_uuid)
83 else:
84 safe_unlink(root_uuid_file)
85 write_string_file(path_file, info.path)
86 write_pickle_file(index_file, {"tree": info.local_tree,
87 "remote_tree": info.remote_tree})
88
89
90def write_pickle_file(filename, value):
91 """Writes a pickled python object to a file."""
92 with atomic_update_file(filename) as stream:
93 pickle.dump(value, stream, 2)
94
95
96def write_string_file(filename, value):
97 """Writes a string to a file with an added line feed, or
98 deletes the file if value is None.
99 """
100 if value is not None:
101 with atomic_update_file(filename) as stream:
102 stream.write(value)
103 stream.write('\n')
104 else:
105 safe_unlink(filename)
106
107
108def write_uuid_file(filename, value):
109 """Writes a UUID to a file."""
110 write_string_file(filename, str(value))
111
112
113def read_pickle_file(filename, default_value=None):
114 """Reads a pickled python object from a file."""
115 try:
116 with open(filename, "rb") as stream:
117 return pickle.load(stream)
118 except IOError as e:
119 if e.errno != ENOENT:
120 raise
121 return default_value
122
123
124def read_string_file(filename, default_value=None):
125 """Reads a string from a file, discarding the final character."""
126 try:
127 with open(filename, "r") as stream:
128 return stream.read()[:-1]
129 except IOError as e:
130 if e.errno != ENOENT:
131 raise
132 return default_value
133
134
135def read_uuid_file(filename, default_value=None):
136 """Reads a UUID from a file."""
137 try:
138 with open(filename, "r") as stream:
139 return uuid.UUID(stream.read()[:-1])
140 except IOError as e:
141 if e.errno != ENOENT:
142 raise
143 return default_value
144
145
146@contextmanager
147def atomic_update_file(filename):
148 """Returns a context manager for atomically updating a file."""
149 temp_filename = "%s.%s" % (filename, uuid.uuid4())
150 try:
151 with open(temp_filename, "w") as stream:
152 yield stream
153 os.rename(temp_filename, filename)
154 finally:
155 safe_unlink(temp_filename)
0156
=== added file 'magicicada/u1sync/scan.py'
--- magicicada/u1sync/scan.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/scan.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,84 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Code for scanning local directory state."""
17
18from __future__ import with_statement
19
20import os
21import hashlib
22import shutil
23
24from errno import ENOTDIR, EINVAL
25
26from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, FILE, SYMLINK
27
28from magicicada.u1sync.genericmerge import MergeNode
29from magicicada.u1sync.utils import should_sync
30
31
32EMPTY_HASH = "sha1:%s" % hashlib.sha1().hexdigest()
33
34
35def scan_directory(path, display_path="", quiet=False):
36 """Scans a local directory and builds an in-memory tree from it."""
37 if display_path != "" and not quiet:
38 print display_path
39
40 link_target = None
41 child_names = None
42 try:
43 link_target = os.readlink(path)
44 except OSError as e:
45 if e.errno != EINVAL:
46 raise
47 try:
48 child_names = os.listdir(path)
49 except OSError as e:
50 if e.errno != ENOTDIR:
51 raise
52
53 if link_target is not None:
54 # symlink
55 sum = hashlib.sha1()
56 sum.update(link_target)
57 content_hash = "sha1:%s" % sum.hexdigest()
58 return MergeNode(node_type=SYMLINK, content_hash=content_hash)
59 elif child_names is not None:
60 # directory
61 child_names = [
62 n for n in child_names if should_sync(n.decode("utf-8"))]
63 child_paths = [(os.path.join(path, child_name),
64 os.path.join(display_path, child_name))
65 for child_name in child_names]
66 children = [scan_directory(child_path, child_display_path, quiet)
67 for (child_path, child_display_path) in child_paths]
68 unicode_child_names = [n.decode("utf-8") for n in child_names]
69 children = dict(zip(unicode_child_names, children))
70 return MergeNode(node_type=DIRECTORY, children=children)
71 else:
72 # regular file
73 sum = hashlib.sha1()
74
75 class HashStream(object):
76 """Stream that computes hashes."""
77 def write(self, bytes):
78 """Accumulate bytes."""
79 sum.update(bytes)
80
81 with open(path, "r") as stream:
82 shutil.copyfileobj(stream, HashStream())
83 content_hash = "sha1:%s" % sum.hexdigest()
84 return MergeNode(node_type=FILE, content_hash=content_hash)
085
=== added file 'magicicada/u1sync/sync.py'
--- magicicada/u1sync/sync.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/sync.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,377 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Sync.
17
18After merging, these routines are used to synchronize state locally and on
19the server to correspond to the merged result.
20
21"""
22
23from __future__ import with_statement
24
25import os
26
27from ubuntuone.storageprotocol import request
28from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, SYMLINK
29
30from magicicada.u1sync.client import UnsupportedOperationError
31from magicicada.u1sync.genericmerge import MergeNode, generic_merge
32from magicicada.u1sync.utils import safe_mkdir
33
34
35EMPTY_HASH = ""
36UPLOAD_SYMBOL = u"\u25b2".encode("utf-8")
37DOWNLOAD_SYMBOL = u"\u25bc".encode("utf-8")
38CONFLICT_SYMBOL = "!"
39DELETE_SYMBOL = "X"
40
41
42def get_conflict_path(path, conflict_info):
43 """Return path for conflict file corresponding to path."""
44 dir, name = os.path.split(path)
45 unique_id = conflict_info[0]
46 return os.path.join(dir, "conflict-%s-%s" % (unique_id, name))
47
48
49def name_from_path(path):
50 """Return unicode name from last path component."""
51 return os.path.split(path)[1].decode("UTF-8")
52
53
54class NodeSyncError(Exception):
55 """Error syncing node."""
56
57
58class NodeCreateError(NodeSyncError):
59 """Error creating node."""
60
61
62class NodeUpdateError(NodeSyncError):
63 """Error updating node."""
64
65
66class NodeDeleteError(NodeSyncError):
67 """Error deleting node."""
68
69
70def sync_tree(merged_tree, original_tree, sync_mode, path, quiet):
71 """Performs actual synchronization."""
72
73 def pre_merge(nodes, name, partial_parent):
74 """Create nodes and write content as required."""
75 (merged_node, original_node) = nodes
76 (parent_path, parent_display_path, parent_uuid,
77 parent_synced) = partial_parent
78
79 utf8_name = name.encode("utf-8")
80 path = os.path.join(parent_path, utf8_name)
81 display_path = os.path.join(parent_display_path, utf8_name)
82 node_uuid = None
83
84 synced = False
85 if merged_node is not None:
86 if merged_node.node_type == DIRECTORY:
87 if original_node is not None:
88 synced = True
89 node_uuid = original_node.uuid
90 else:
91 if not quiet:
92 print "%s %s" % (sync_mode.symbol, display_path)
93 try:
94 create_dir = sync_mode.create_directory
95 node_uuid = create_dir(parent_uuid=parent_uuid,
96 path=path)
97 synced = True
98 except NodeCreateError as e:
99 print e
100 elif merged_node.content_hash is None:
101 if not quiet:
102 print "? %s" % display_path
103 elif (original_node is None or
104 original_node.content_hash != merged_node.content_hash or
105 merged_node.conflict_info is not None):
106 conflict_info = merged_node.conflict_info
107 if conflict_info is not None:
108 conflict_symbol = CONFLICT_SYMBOL
109 else:
110 conflict_symbol = " "
111 if not quiet:
112 print "%s %s %s" % (sync_mode.symbol, conflict_symbol,
113 display_path)
114 if original_node is not None:
115 node_uuid = original_node.uuid or merged_node.uuid
116 original_hash = original_node.content_hash or EMPTY_HASH
117 else:
118 node_uuid = merged_node.uuid
119 original_hash = EMPTY_HASH
120 try:
121 sync_mode.write_file(
122 node_uuid=node_uuid,
123 content_hash=merged_node.content_hash,
124 old_content_hash=original_hash, path=path,
125 parent_uuid=parent_uuid, conflict_info=conflict_info,
126 node_type=merged_node.node_type)
127 synced = True
128 except NodeSyncError as e:
129 print e
130 else:
131 synced = True
132
133 return (path, display_path, node_uuid, synced)
134
135 def post_merge(nodes, partial_result, child_results):
136 """Delete nodes."""
137 (merged_node, original_node) = nodes
138 (path, display_path, node_uuid, synced) = partial_result
139
140 if merged_node is None:
141 assert original_node is not None
142 if not quiet:
143 print "%s %s %s" % (sync_mode.symbol, DELETE_SYMBOL,
144 display_path)
145 try:
146 if original_node.node_type == DIRECTORY:
147 sync_mode.delete_directory(node_uuid=original_node.uuid,
148 path=path)
149 else:
150 # files or symlinks
151 sync_mode.delete_file(node_uuid=original_node.uuid,
152 path=path)
153 synced = True
154 except NodeDeleteError as e:
155 print e
156
157 if synced:
158 model_node = merged_node
159 else:
160 model_node = original_node
161
162 if model_node is not None:
163 if model_node.node_type == DIRECTORY:
164 child_iter = child_results.iteritems()
165 merged_children = dict(
166 (name, child) for (name, child) in child_iter
167 if child is not None)
168 else:
169 # if there are children here it's because they failed to delete
170 merged_children = None
171 return MergeNode(node_type=model_node.node_type,
172 uuid=model_node.uuid,
173 children=merged_children,
174 content_hash=model_node.content_hash)
175 else:
176 return None
177
178 return generic_merge(trees=[merged_tree, original_tree],
179 pre_merge=pre_merge, post_merge=post_merge,
180 partial_parent=(path, "", None, True), name=u"")
181
182
183def download_tree(merged_tree, local_tree, client, share_uuid, path, dry_run,
184 quiet):
185 """Downloads a directory."""
186 if dry_run:
187 downloader = DryRun(symbol=DOWNLOAD_SYMBOL)
188 else:
189 downloader = Downloader(client=client, share_uuid=share_uuid)
190 return sync_tree(merged_tree=merged_tree, original_tree=local_tree,
191 sync_mode=downloader, path=path, quiet=quiet)
192
193
194def upload_tree(merged_tree, remote_tree, client, share_uuid, path, dry_run,
195 quiet):
196 """Uploads a directory."""
197 if dry_run:
198 uploader = DryRun(symbol=UPLOAD_SYMBOL)
199 else:
200 uploader = Uploader(client=client, share_uuid=share_uuid)
201 return sync_tree(merged_tree=merged_tree, original_tree=remote_tree,
202 sync_mode=uploader, path=path, quiet=quiet)
203
204
205class DryRun(object):
206 """A class which implements the sync interface but does nothing."""
207 def __init__(self, symbol):
208 """Initializes a DryRun instance."""
209 self.symbol = symbol
210
211 def create_directory(self, parent_uuid, path):
212 """Doesn't create a directory."""
213 return None
214
215 def write_file(self, node_uuid, old_content_hash, content_hash,
216 parent_uuid, path, conflict_info, node_type):
217 """Doesn't write a file."""
218 return None
219
220 def delete_directory(self, node_uuid, path):
221 """Doesn't delete a directory."""
222
223 def delete_file(self, node_uuid, path):
224 """Doesn't delete a file."""
225
226
227class Downloader(object):
228 """A class which implements the download half of syncing."""
229 def __init__(self, client, share_uuid):
230 """Initializes a Downloader instance."""
231 self.client = client
232 self.share_uuid = share_uuid
233 self.symbol = DOWNLOAD_SYMBOL
234
235 def create_directory(self, parent_uuid, path):
236 """Creates a directory."""
237 try:
238 safe_mkdir(path)
239 except OSError as e:
240 raise NodeCreateError(
241 "Error creating local directory %s: %s" % (path, e))
242 return None
243
244 def write_file(self, node_uuid, old_content_hash, content_hash,
245 parent_uuid, path, conflict_info, node_type):
246 """Creates a file and downloads new content for it."""
247 if conflict_info:
248 # download to conflict file rather than overwriting local changes
249 path = get_conflict_path(path, conflict_info)
250 content_hash = conflict_info[1]
251 try:
252 if node_type == SYMLINK:
253 self.client.download_string(
254 share_uuid=self.share_uuid, node_uuid=node_uuid,
255 content_hash=content_hash)
256 else:
257 self.client.download_file(
258 share_uuid=self.share_uuid, node_uuid=node_uuid,
259 content_hash=content_hash, filename=path)
260 except (request.StorageRequestError, UnsupportedOperationError) as e:
261 if os.path.exists(path):
262 raise NodeUpdateError(
263 "Error downloading content for %s: %s" % (path, e))
264 else:
265 raise NodeCreateError(
266 "Error locally creating %s: %s" % (path, e))
267
268 def delete_directory(self, node_uuid, path):
269 """Deletes a directory."""
270 try:
271 os.rmdir(path)
272 except OSError as e:
273 raise NodeDeleteError("Error locally deleting %s: %s" % (path, e))
274
275 def delete_file(self, node_uuid, path):
276 """Deletes a file."""
277 try:
278 os.remove(path)
279 except OSError as e:
280 raise NodeDeleteError("Error locally deleting %s: %s" % (path, e))
281
282
283class Uploader(object):
284 """A class which implements the upload half of syncing."""
285 def __init__(self, client, share_uuid):
286 """Initializes an uploader instance."""
287 self.client = client
288 self.share_uuid = share_uuid
289 self.symbol = UPLOAD_SYMBOL
290
291 def create_directory(self, parent_uuid, path):
292 """Creates a directory on the server."""
293 name = name_from_path(path)
294 try:
295 return self.client.create_directory(share_uuid=self.share_uuid,
296 parent_uuid=parent_uuid,
297 name=name)
298 except (request.StorageRequestError, UnsupportedOperationError) as e:
299 raise NodeCreateError("Error remotely creating %s: %s" % (path, e))
300
301 def write_file(self, node_uuid, old_content_hash, content_hash,
302 parent_uuid, path, conflict_info, node_type):
303 """Creates a file on the server and uploads new content for it."""
304
305 if conflict_info:
306 # move conflicting file out of the way on the server
307 conflict_path = get_conflict_path(path, conflict_info)
308 conflict_name = name_from_path(conflict_path)
309 try:
310 self.client.move(share_uuid=self.share_uuid,
311 parent_uuid=parent_uuid,
312 name=conflict_name,
313 node_uuid=node_uuid)
314 except (request.StorageRequestError,
315 UnsupportedOperationError) as e:
316 raise NodeUpdateError(
317 "Error remotely renaming %s to %s: %s" %
318 (path, conflict_path, e))
319 node_uuid = None
320 old_content_hash = EMPTY_HASH
321
322 if node_type == SYMLINK:
323 try:
324 target = os.readlink(path)
325 except OSError as e:
326 raise NodeCreateError(
327 "Error retrieving link target for %s: %s" % (path, e))
328 else:
329 target = None
330
331 name = name_from_path(path)
332 if node_uuid is None:
333 try:
334 if node_type == SYMLINK:
335 node_uuid = self.client.create_symlink(
336 share_uuid=self.share_uuid, parent_uuid=parent_uuid,
337 name=name, target=target)
338 old_content_hash = content_hash
339 else:
340 node_uuid = self.client.create_file(
341 share_uuid=self.share_uuid, parent_uuid=parent_uuid,
342 name=name)
343 except (request.StorageRequestError,
344 UnsupportedOperationError) as e:
345 raise NodeCreateError(
346 "Error remotely creating %s: %s" % (path, e))
347
348 if old_content_hash != content_hash:
349 try:
350 if node_type == SYMLINK:
351 self.client.upload_string(
352 share_uuid=self.share_uuid, node_uuid=node_uuid,
353 content_hash=content_hash,
354 old_content_hash=old_content_hash, content=target)
355 else:
356 self.client.upload_file(
357 share_uuid=self.share_uuid, node_uuid=node_uuid,
358 content_hash=content_hash,
359 old_content_hash=old_content_hash, filename=path)
360 except (request.StorageRequestError,
361 UnsupportedOperationError) as e:
362 raise NodeUpdateError(
363 "Error uploading content for %s: %s" % (path, e))
364
365 def delete_directory(self, node_uuid, path):
366 """Deletes a directory."""
367 try:
368 self.client.unlink(share_uuid=self.share_uuid, node_uuid=node_uuid)
369 except (request.StorageRequestError, UnsupportedOperationError) as e:
370 raise NodeDeleteError("Error remotely deleting %s: %s" % (path, e))
371
372 def delete_file(self, node_uuid, path):
373 """Deletes a file."""
374 try:
375 self.client.unlink(share_uuid=self.share_uuid, node_uuid=node_uuid)
376 except (request.StorageRequestError, UnsupportedOperationError) as e:
377 raise NodeDeleteError("Error remotely deleting %s: %s" % (path, e))
0378
=== added directory 'magicicada/u1sync/tests'
=== added file 'magicicada/u1sync/tests/__init__.py'
--- magicicada/u1sync/tests/__init__.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/tests/__init__.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,1 @@
1"""Tests for u1sync."""
02
=== added file 'magicicada/u1sync/tests/test_client.py'
--- magicicada/u1sync/tests/test_client.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/tests/test_client.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,63 @@
1# Copyright 2010 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Test the client code."""
17
18from mocker import Mocker
19from twisted.trial.unittest import TestCase
20
21from magicicada.u1sync import client
22
23
24class SyncStorageClientTest(TestCase):
25 """Test the SyncStorageClient."""
26
27 def test_conn_made_call_parent(self):
28 """The connectionMade method should call the parent."""
29 # set up everything
30 called = []
31 self.patch(client.StorageClient, 'connectionMade',
32 lambda s: called.append(True))
33 c = client.SyncStorageClient()
34 mocker = Mocker()
35 obj = mocker.mock()
36 obj.current_protocol
37 mocker.result(None)
38 obj.current_protocol = c
39 obj.observer.connected()
40 c.factory = obj
41
42 # call and test
43 with mocker:
44 c.connectionMade()
45 self.assertTrue(called)
46
47 def test_conn_lost_call_parent(self):
48 """The connectionLost method should call the parent."""
49 # set up everything
50 called = []
51 self.patch(client.StorageClient, 'connectionLost',
52 lambda s, r: called.append(True))
53 c = client.SyncStorageClient()
54 mocker = Mocker()
55 obj = mocker.mock()
56 obj.current_protocol
57 mocker.result(None)
58 c.factory = obj
59
60 # call and test
61 with mocker:
62 c.connectionLost()
63 self.assertTrue(called)
064
=== added file 'magicicada/u1sync/tests/test_init.py'
--- magicicada/u1sync/tests/test_init.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/tests/test_init.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,22 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Tests for u1sync mirror init"""
17
18from unittest import TestCase
19
20
21class InitTestCase(TestCase):
22 """Tests for u1sync --init"""
023
=== added file 'magicicada/u1sync/tests/test_merge.py'
--- magicicada/u1sync/tests/test_merge.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/tests/test_merge.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,109 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Tests for tree merging."""
17
18import uuid
19import os
20
21from unittest import TestCase
22
23from ubuntuone.storageprotocol.dircontent_pb2 import FILE, DIRECTORY
24
25from magicicada.u1sync.genericmerge import MergeNode, generic_merge
26from magicicada.u1sync.merge import (
27 merge_trees, ClobberLocalMerge, ClobberServerMerge, SyncMerge)
28
29
30def accumulate_path(nodes, name, partial_parent):
31 """pre-merge which accumulates a path"""
32 return os.path.join(partial_parent, name)
33
34
35def capture_merge(nodes, partial_result, child_results):
36 """post-merge which accumulates merge results."""
37 return (nodes, partial_result, child_results)
38
39
40class MergeTest(TestCase):
41 """Tests for generic tree merges."""
42
43 def test_generic_merge(self):
44 """Tests that generic merge behaves as expected."""
45 tree_a = MergeNode(DIRECTORY, children={
46 'foo': MergeNode(FILE, uuid=uuid.uuid4()),
47 'bar': MergeNode(FILE, uuid=uuid.uuid4()),
48 }, uuid=uuid.uuid4())
49 tree_b = MergeNode(DIRECTORY, children={
50 'bar': MergeNode(FILE, uuid=uuid.uuid4()),
51 'baz': MergeNode(FILE, uuid=uuid.uuid4()),
52 }, uuid=uuid.uuid4())
53 result = generic_merge(trees=[tree_a, tree_b],
54 pre_merge=accumulate_path,
55 post_merge=capture_merge,
56 partial_parent="", name="ex")
57 expected_result = ([tree_a, tree_b], "ex", {
58 'foo': ([tree_a.children['foo'], None], "ex/foo", {}),
59 'bar': ([tree_a.children['bar'], tree_b.children['bar']],
60 "ex/bar", {}),
61 'baz': ([None, tree_b.children['baz']], "ex/baz", {}),
62 })
63 self.assertEqual(expected_result, result)
64
65 def test_clobber(self):
66 """Tests clobbering merges."""
67 server_tree = MergeNode(DIRECTORY, children={
68 'foo': MergeNode(FILE, content_hash="dummy:abc"),
69 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
70 'baz': MergeNode(FILE, content_hash="dummy:aaa"),
71 })
72 local_tree = MergeNode(DIRECTORY, children={
73 'foo': MergeNode(FILE, content_hash="dummy:cde"),
74 'bar': MergeNode(FILE, content_hash="dummy:zyx"),
75 'hoge': MergeNode(FILE, content_hash="dummy:bbb"),
76 })
77 result_tree = merge_trees(local_tree, local_tree,
78 server_tree, server_tree,
79 ClobberServerMerge())
80 self.assertEqual(local_tree, result_tree)
81 result_tree = merge_trees(local_tree, local_tree,
82 server_tree, server_tree,
83 ClobberLocalMerge())
84 self.assertEqual(server_tree, result_tree)
85
86 def test_sync(self):
87 """Test sync merges."""
88 server_tree = MergeNode(DIRECTORY, children={
89 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
90 'baz': MergeNode(FILE, content_hash="dummy:aaa"),
91 'foo': MergeNode(FILE, content_hash="dummy:abc"),
92 })
93 old_server_tree = MergeNode(DIRECTORY, children={})
94 local_tree = MergeNode(DIRECTORY, children={
95 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
96 'foo': MergeNode(FILE, content_hash="dummy:abc"),
97 'hoge': MergeNode(FILE, content_hash="dummy:bbb"),
98 })
99 old_local_tree = MergeNode(DIRECTORY, children={})
100 expected_tree = MergeNode(DIRECTORY, children={
101 'bar': MergeNode(FILE, content_hash="dummy:xyz"),
102 'baz': MergeNode(FILE, content_hash="dummy:aaa"),
103 'foo': MergeNode(FILE, content_hash="dummy:abc"),
104 'hoge': MergeNode(FILE, content_hash="dummy:bbb"),
105 })
106 result_tree = merge_trees(old_local_tree, local_tree,
107 old_server_tree, server_tree,
108 SyncMerge())
109 self.assertEqual(result_tree, expected_tree)
0110
=== added file 'magicicada/u1sync/utils.py'
--- magicicada/u1sync/utils.py 1970-01-01 00:00:00 +0000
+++ magicicada/u1sync/utils.py 2018-04-20 01:22:48 +0000
@@ -0,0 +1,50 @@
1# Copyright 2009 Canonical Ltd.
2# Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros)
3#
4# This program is free software: you can redistribute it and/or modify it
5# under the terms of the GNU General Public License version 3, as published
6# by the Free Software Foundation.
7#
8# This program is distributed in the hope that it will be useful, but
9# WITHOUT ANY WARRANTY; without even the implied warranties of
10# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
11# PURPOSE. See the GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program. If not, see <http://www.gnu.org/licenses/>.
15
16"""Miscellaneous utility functions."""
17
18import os
19
20from errno import EEXIST, ENOENT
21
22from magicicada.u1sync.constants import METADATA_DIR_NAME, SPECIAL_FILE_RE
23
24
25def should_sync(filename):
26 """Returns True if the filename should be synced.
27
28 @param filename: a unicode filename
29
30 """
31 return (filename != METADATA_DIR_NAME and
32 not SPECIAL_FILE_RE.match(filename))
33
34
35def safe_mkdir(path):
36 """Creates a directory if it does not already exist."""
37 try:
38 os.mkdir(path)
39 except OSError, e:
40 if e.errno != EEXIST:
41 raise
42
43
44def safe_unlink(path):
45 """Unlinks a file if it exists."""
46 try:
47 os.remove(path)
48 except OSError, e:
49 if e.errno != ENOENT:
50 raise

Subscribers

People subscribed via source and target branches

to all changes: