Merge lp:~nataliabidart/magicicada-server/embed-u1sync into lp:magicicada-server
- embed-u1sync
- Merge into trunk
Proposed by
Natalia Bidart
Status: | Merged |
---|---|
Approved by: | Natalia Bidart |
Approved revision: | 95 |
Merged at revision: | 90 |
Proposed branch: | lp:~nataliabidart/magicicada-server/embed-u1sync |
Merge into: | lp:magicicada-server |
Diff against target: |
2652 lines (+2362/-31) 28 files modified
config-manager.txt (+0/-1) lib/utilities/testlib.py (+0/-1) lib/utilities/utils.py (+1/-1) magicicada/filesync/services.py (+1/-2) magicicada/metrics/tests/test_base.py (+1/-2) magicicada/monitoring/reactor.py (+1/-2) magicicada/monitoring/tests/test_reactor.py (+1/-2) magicicada/server/integtests/test_sync.py (+4/-4) magicicada/server/server.py (+1/-3) magicicada/server/ssl_proxy.py (+1/-3) magicicada/server/stats.py (+1/-2) magicicada/server/tests/test_server.py (+1/-3) magicicada/server/tests/test_ssl_proxy.py (+1/-3) magicicada/server/tests/test_stats.py (+1/-2) magicicada/u1sync/__init__.py (+14/-0) magicicada/u1sync/client.py (+736/-0) magicicada/u1sync/constants.py (+26/-0) magicicada/u1sync/genericmerge.py (+86/-0) magicicada/u1sync/main.py (+443/-0) magicicada/u1sync/merge.py (+181/-0) magicicada/u1sync/metadata.py (+155/-0) magicicada/u1sync/scan.py (+84/-0) magicicada/u1sync/sync.py (+377/-0) magicicada/u1sync/tests/__init__.py (+1/-0) magicicada/u1sync/tests/test_client.py (+63/-0) magicicada/u1sync/tests/test_init.py (+22/-0) magicicada/u1sync/tests/test_merge.py (+109/-0) magicicada/u1sync/utils.py (+50/-0) |
To merge this branch: | bzr merge lp:~nataliabidart/magicicada-server/embed-u1sync |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Facundo Batista | Approve | ||
Review via email: mp+343570@code.launchpad.net |
Commit message
- Embed u1sync test dependency inside magicicada source code.
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'config-manager.txt' | |||
2 | --- config-manager.txt 2018-04-18 20:28:35 +0000 | |||
3 | +++ config-manager.txt 2018-04-20 01:22:48 +0000 | |||
4 | @@ -21,5 +21,4 @@ | |||
5 | 21 | # | 21 | # |
6 | 22 | # make clean-sourcedeps sourcedeps | 22 | # make clean-sourcedeps sourcedeps |
7 | 23 | 23 | ||
8 | 24 | ./.sourcecode/u1sync ~facundo/u1sync/opensourcing;revno=10 | ||
9 | 25 | ./.sourcecode/magicicada-client ~chicharreros/magicicada-client/trunk;revno=1442 | 24 | ./.sourcecode/magicicada-client ~chicharreros/magicicada-client/trunk;revno=1442 |
10 | 26 | 25 | ||
11 | === removed symlink 'lib/metrics' | |||
12 | === target was u'../magicicada/metrics/' | |||
13 | === removed symlink 'lib/u1sync' | |||
14 | === target was u'../.sourcecode/u1sync/u1sync' | |||
15 | === modified file 'lib/utilities/testlib.py' | |||
16 | --- lib/utilities/testlib.py 2018-04-05 21:08:25 +0000 | |||
17 | +++ lib/utilities/testlib.py 2018-04-20 01:22:48 +0000 | |||
18 | @@ -20,7 +20,6 @@ | |||
19 | 20 | 20 | ||
20 | 21 | from __future__ import unicode_literals | 21 | from __future__ import unicode_literals |
21 | 22 | 22 | ||
22 | 23 | import logging | ||
23 | 24 | import os | 23 | import os |
24 | 25 | import re | 24 | import re |
25 | 26 | 25 | ||
26 | 27 | 26 | ||
27 | === modified file 'lib/utilities/utils.py' | |||
28 | --- lib/utilities/utils.py 2018-04-05 21:08:25 +0000 | |||
29 | +++ lib/utilities/utils.py 2018-04-20 01:22:48 +0000 | |||
30 | @@ -37,7 +37,7 @@ | |||
31 | 37 | with open(proc_stat, 'r') as f: | 37 | with open(proc_stat, 'r') as f: |
32 | 38 | if proc_name in f.read(): | 38 | if proc_name in f.read(): |
33 | 39 | return True | 39 | return True |
35 | 40 | except: | 40 | except Exception: |
36 | 41 | # ignore all errors | 41 | # ignore all errors |
37 | 42 | pass | 42 | pass |
38 | 43 | return False | 43 | return False |
39 | 44 | 44 | ||
40 | === modified file 'magicicada/filesync/services.py' | |||
41 | --- magicicada/filesync/services.py 2018-04-05 21:08:25 +0000 | |||
42 | +++ magicicada/filesync/services.py 2018-04-20 01:22:48 +0000 | |||
43 | @@ -34,12 +34,11 @@ | |||
44 | 34 | from functools import wraps | 34 | from functools import wraps |
45 | 35 | from weakref import WeakValueDictionary | 35 | from weakref import WeakValueDictionary |
46 | 36 | 36 | ||
47 | 37 | import metrics | ||
48 | 38 | |||
49 | 39 | from django.conf import settings | 37 | from django.conf import settings |
50 | 40 | from django.db import connection, models | 38 | from django.db import connection, models |
51 | 41 | from django.utils.timezone import now | 39 | from django.utils.timezone import now |
52 | 42 | 40 | ||
53 | 41 | from magicicada import metrics | ||
54 | 43 | from magicicada.filesync import errors, utils | 42 | from magicicada.filesync import errors, utils |
55 | 44 | from magicicada.filesync.dbmanager import ( | 43 | from magicicada.filesync.dbmanager import ( |
56 | 45 | fsync_readonly, | 44 | fsync_readonly, |
57 | 46 | 45 | ||
58 | === modified file 'magicicada/metrics/tests/test_base.py' | |||
59 | --- magicicada/metrics/tests/test_base.py 2016-06-04 19:05:51 +0000 | |||
60 | +++ magicicada/metrics/tests/test_base.py 2018-04-20 01:22:48 +0000 | |||
61 | @@ -22,8 +22,7 @@ | |||
62 | 22 | import logging | 22 | import logging |
63 | 23 | import unittest | 23 | import unittest |
64 | 24 | 24 | ||
67 | 25 | import metrics | 25 | from magicicada import metrics |
66 | 26 | |||
68 | 27 | from magicicada.testing.testcase import BaseTestCase | 26 | from magicicada.testing.testcase import BaseTestCase |
69 | 28 | 27 | ||
70 | 29 | 28 | ||
71 | 30 | 29 | ||
72 | === modified file 'magicicada/monitoring/reactor.py' | |||
73 | --- magicicada/monitoring/reactor.py 2018-04-05 21:08:25 +0000 | |||
74 | +++ magicicada/monitoring/reactor.py 2018-04-20 01:22:48 +0000 | |||
75 | @@ -28,8 +28,7 @@ | |||
76 | 28 | import traceback | 28 | import traceback |
77 | 29 | import Queue | 29 | import Queue |
78 | 30 | 30 | ||
81 | 31 | import metrics | 31 | from magicicada import metrics |
80 | 32 | |||
82 | 33 | from magicicada.settings import TRACE | 32 | from magicicada.settings import TRACE |
83 | 34 | 33 | ||
84 | 35 | 34 | ||
85 | 36 | 35 | ||
86 | === modified file 'magicicada/monitoring/tests/test_reactor.py' | |||
87 | --- magicicada/monitoring/tests/test_reactor.py 2018-04-05 21:08:25 +0000 | |||
88 | +++ magicicada/monitoring/tests/test_reactor.py 2018-04-20 01:22:48 +0000 | |||
89 | @@ -26,8 +26,7 @@ | |||
90 | 26 | from twisted.trial.unittest import TestCase as TwistedTestCase | 26 | from twisted.trial.unittest import TestCase as TwistedTestCase |
91 | 27 | from twisted.internet import reactor, defer | 27 | from twisted.internet import reactor, defer |
92 | 28 | 28 | ||
95 | 29 | import metrics | 29 | from magicicada import metrics |
94 | 30 | |||
96 | 31 | from magicicada.monitoring.reactor import ReactorInspector, logger | 30 | from magicicada.monitoring.reactor import ReactorInspector, logger |
97 | 32 | from magicicada.settings import TRACE | 31 | from magicicada.settings import TRACE |
98 | 33 | from magicicada.testing.testcase import BaseTestCase | 32 | from magicicada.testing.testcase import BaseTestCase |
99 | 34 | 33 | ||
100 | === modified file 'magicicada/server/integtests/test_sync.py' | |||
101 | --- magicicada/server/integtests/test_sync.py 2018-04-05 21:08:25 +0000 | |||
102 | +++ magicicada/server/integtests/test_sync.py 2018-04-20 01:22:48 +0000 | |||
103 | @@ -35,9 +35,6 @@ | |||
104 | 35 | from twisted.internet import reactor, defer | 35 | from twisted.internet import reactor, defer |
105 | 36 | from twisted.python.failure import Failure | 36 | from twisted.python.failure import Failure |
106 | 37 | 37 | ||
107 | 38 | import u1sync.client | ||
108 | 39 | |||
109 | 40 | from u1sync.main import do_diff, do_init, do_sync | ||
110 | 41 | from ubuntuone.platform import tools | 38 | from ubuntuone.platform import tools |
111 | 42 | from ubuntuone.storageprotocol import client as protocol_client | 39 | from ubuntuone.storageprotocol import client as protocol_client |
112 | 43 | from ubuntuone.storageprotocol import request | 40 | from ubuntuone.storageprotocol import request |
113 | @@ -59,6 +56,9 @@ | |||
114 | 59 | ) | 56 | ) |
115 | 60 | from magicicada.server.testing.caps_helpers import required_caps | 57 | from magicicada.server.testing.caps_helpers import required_caps |
116 | 61 | from magicicada.server.testing.testcase import logger | 58 | from magicicada.server.testing.testcase import logger |
117 | 59 | from magicicada.u1sync import client as u1sync_client | ||
118 | 60 | from magicicada.u1sync.main import do_diff, do_init, do_sync | ||
119 | 61 | |||
120 | 62 | 62 | ||
121 | 63 | U1SYNC_QUIET = True | 63 | U1SYNC_QUIET = True |
122 | 64 | NO_CONTENT_HASH = "" | 64 | NO_CONTENT_HASH = "" |
123 | @@ -225,7 +225,7 @@ | |||
124 | 225 | 225 | ||
125 | 226 | def _u1sync_client(self): | 226 | def _u1sync_client(self): |
126 | 227 | """Create a u1sync client instance.""" | 227 | """Create a u1sync client instance.""" |
128 | 228 | client = u1sync.client.Client() | 228 | client = u1sync_client.Client() |
129 | 229 | client.connect_ssl("localhost", self.ssl_port, True) | 229 | client.connect_ssl("localhost", self.ssl_port, True) |
130 | 230 | client.set_capabilities() | 230 | client.set_capabilities() |
131 | 231 | auth_info = self.access_tokens['jack'] | 231 | auth_info = self.access_tokens['jack'] |
132 | 232 | 232 | ||
133 | === modified file 'magicicada/server/server.py' | |||
134 | --- magicicada/server/server.py 2018-04-05 21:08:25 +0000 | |||
135 | +++ magicicada/server/server.py 2018-04-20 01:22:48 +0000 | |||
136 | @@ -40,8 +40,6 @@ | |||
137 | 40 | import twisted | 40 | import twisted |
138 | 41 | import twisted.web.error | 41 | import twisted.web.error |
139 | 42 | 42 | ||
140 | 43 | import metrics | ||
141 | 44 | |||
142 | 45 | from twisted.application.service import MultiService, Service | 43 | from twisted.application.service import MultiService, Service |
143 | 46 | from twisted.application.internet import TCPServer | 44 | from twisted.application.internet import TCPServer |
144 | 47 | from twisted.internet.defer import maybeDeferred, inlineCallbacks | 45 | from twisted.internet.defer import maybeDeferred, inlineCallbacks |
145 | @@ -51,7 +49,7 @@ | |||
146 | 51 | from ubuntuone.storageprotocol import protocol_pb2, request, sharersp | 49 | from ubuntuone.storageprotocol import protocol_pb2, request, sharersp |
147 | 52 | from ubuntuone.supervisor import utils as supervisor_utils | 50 | from ubuntuone.supervisor import utils as supervisor_utils |
148 | 53 | 51 | ||
150 | 54 | from magicicada import settings | 52 | from magicicada import metrics, settings |
151 | 55 | from magicicada.filesync import errors as dataerror | 53 | from magicicada.filesync import errors as dataerror |
152 | 56 | from magicicada.filesync.notifier import notifier | 54 | from magicicada.filesync.notifier import notifier |
153 | 57 | from magicicada.monitoring.reactor import ReactorInspector | 55 | from magicicada.monitoring.reactor import ReactorInspector |
154 | 58 | 56 | ||
155 | === modified file 'magicicada/server/ssl_proxy.py' | |||
156 | --- magicicada/server/ssl_proxy.py 2018-04-05 21:08:25 +0000 | |||
157 | +++ magicicada/server/ssl_proxy.py 2018-04-20 01:22:48 +0000 | |||
158 | @@ -31,9 +31,7 @@ | |||
159 | 31 | from twisted.protocols import portforward, basic | 31 | from twisted.protocols import portforward, basic |
160 | 32 | from twisted.web import server, resource | 32 | from twisted.web import server, resource |
161 | 33 | 33 | ||
165 | 34 | import metrics | 34 | from magicicada import metrics, settings |
163 | 35 | |||
164 | 36 | from magicicada import settings | ||
166 | 37 | from magicicada.server.server import FILESYNC_STATUS_MSG, get_service_port | 35 | from magicicada.server.server import FILESYNC_STATUS_MSG, get_service_port |
167 | 38 | from magicicada.server.ssl import disable_ssl_compression | 36 | from magicicada.server.ssl import disable_ssl_compression |
168 | 39 | from ubuntuone.supervisor import utils as supervisor_utils | 37 | from ubuntuone.supervisor import utils as supervisor_utils |
169 | 40 | 38 | ||
170 | === modified file 'magicicada/server/stats.py' | |||
171 | --- magicicada/server/stats.py 2018-04-05 21:08:25 +0000 | |||
172 | +++ magicicada/server/stats.py 2018-04-20 01:22:48 +0000 | |||
173 | @@ -27,8 +27,7 @@ | |||
174 | 27 | from twisted.internet import defer, reactor | 27 | from twisted.internet import defer, reactor |
175 | 28 | from twisted.web import server, resource | 28 | from twisted.web import server, resource |
176 | 29 | 29 | ||
179 | 30 | import metrics | 30 | from magicicada import metrics |
178 | 31 | |||
180 | 32 | from magicicada.monitoring import dump | 31 | from magicicada.monitoring import dump |
181 | 33 | 32 | ||
182 | 34 | logger = logging.getLogger(__name__) | 33 | logger = logging.getLogger(__name__) |
183 | 35 | 34 | ||
184 | === modified file 'magicicada/server/tests/test_server.py' | |||
185 | --- magicicada/server/tests/test_server.py 2018-04-05 21:08:25 +0000 | |||
186 | +++ magicicada/server/tests/test_server.py 2018-04-20 01:22:48 +0000 | |||
187 | @@ -28,8 +28,6 @@ | |||
188 | 28 | import uuid | 28 | import uuid |
189 | 29 | import weakref | 29 | import weakref |
190 | 30 | 30 | ||
191 | 31 | import metrics | ||
192 | 32 | |||
193 | 33 | from django.utils.timezone import now | 31 | from django.utils.timezone import now |
194 | 34 | from mocker import expect, Mocker, MockerTestCase, ARGS, KWARGS, ANY | 32 | from mocker import expect, Mocker, MockerTestCase, ARGS, KWARGS, ANY |
195 | 35 | from twisted.python.failure import Failure | 33 | from twisted.python.failure import Failure |
196 | @@ -38,7 +36,7 @@ | |||
197 | 38 | from twisted.trial.unittest import TestCase as TwistedTestCase | 36 | from twisted.trial.unittest import TestCase as TwistedTestCase |
198 | 39 | from ubuntuone.storageprotocol import protocol_pb2, request | 37 | from ubuntuone.storageprotocol import protocol_pb2, request |
199 | 40 | 38 | ||
201 | 41 | from magicicada import settings | 39 | from magicicada import metrics, settings |
202 | 42 | from magicicada.filesync import errors as dataerror | 40 | from magicicada.filesync import errors as dataerror |
203 | 43 | from magicicada.filesync.models import Share | 41 | from magicicada.filesync.models import Share |
204 | 44 | from magicicada.server import errors | 42 | from magicicada.server import errors |
205 | 45 | 43 | ||
206 | === modified file 'magicicada/server/tests/test_ssl_proxy.py' | |||
207 | --- magicicada/server/tests/test_ssl_proxy.py 2018-04-05 21:08:25 +0000 | |||
208 | +++ magicicada/server/tests/test_ssl_proxy.py 2018-04-20 01:22:48 +0000 | |||
209 | @@ -34,9 +34,7 @@ | |||
210 | 34 | StorageClientFactory, StorageClient) | 34 | StorageClientFactory, StorageClient) |
211 | 35 | from ubuntuone.supervisor import utils as supervisor_utils | 35 | from ubuntuone.supervisor import utils as supervisor_utils |
212 | 36 | 36 | ||
216 | 37 | import metrics | 37 | from magicicada import metrics, settings |
214 | 38 | |||
215 | 39 | from magicicada import settings | ||
217 | 40 | from magicicada.server import ssl_proxy | 38 | from magicicada.server import ssl_proxy |
218 | 41 | from magicicada.server.server import PREFERRED_CAP | 39 | from magicicada.server.server import PREFERRED_CAP |
219 | 42 | from magicicada.server.testing.testcase import TestWithDatabase | 40 | from magicicada.server.testing.testcase import TestWithDatabase |
220 | 43 | 41 | ||
221 | === modified file 'magicicada/server/tests/test_stats.py' | |||
222 | --- magicicada/server/tests/test_stats.py 2018-04-05 21:08:25 +0000 | |||
223 | +++ magicicada/server/tests/test_stats.py 2018-04-20 01:22:48 +0000 | |||
224 | @@ -20,8 +20,7 @@ | |||
225 | 20 | 20 | ||
226 | 21 | from twisted.internet import defer | 21 | from twisted.internet import defer |
227 | 22 | 22 | ||
230 | 23 | import metrics | 23 | from magicicada import metrics |
229 | 24 | |||
231 | 25 | from magicicada.server.stats import StatsWorker | 24 | from magicicada.server.stats import StatsWorker |
232 | 26 | from magicicada.server.testing.testcase import TestWithDatabase | 25 | from magicicada.server.testing.testcase import TestWithDatabase |
233 | 27 | 26 | ||
234 | 28 | 27 | ||
235 | === added directory 'magicicada/u1sync' | |||
236 | === added file 'magicicada/u1sync/__init__.py' | |||
237 | --- magicicada/u1sync/__init__.py 1970-01-01 00:00:00 +0000 | |||
238 | +++ magicicada/u1sync/__init__.py 2018-04-20 01:22:48 +0000 | |||
239 | @@ -0,0 +1,14 @@ | |||
240 | 1 | # Copyright 2009 Canonical Ltd. | ||
241 | 2 | # | ||
242 | 3 | # This program is free software: you can redistribute it and/or modify it | ||
243 | 4 | # under the terms of the GNU General Public License version 3, as published | ||
244 | 5 | # by the Free Software Foundation. | ||
245 | 6 | # | ||
246 | 7 | # This program is distributed in the hope that it will be useful, but | ||
247 | 8 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
248 | 9 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
249 | 10 | # PURPOSE. See the GNU General Public License for more details. | ||
250 | 11 | # | ||
251 | 12 | # You should have received a copy of the GNU General Public License along | ||
252 | 13 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
253 | 14 | """The guts of the u1sync tool.""" | ||
254 | 0 | 15 | ||
255 | === added file 'magicicada/u1sync/client.py' | |||
256 | --- magicicada/u1sync/client.py 1970-01-01 00:00:00 +0000 | |||
257 | +++ magicicada/u1sync/client.py 2018-04-20 01:22:48 +0000 | |||
258 | @@ -0,0 +1,736 @@ | |||
259 | 1 | # Copyright 2009-2015 Canonical | ||
260 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
261 | 3 | # | ||
262 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
263 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
264 | 6 | # by the Free Software Foundation. | ||
265 | 7 | # | ||
266 | 8 | # This program is distributed in the hope that it will be useful, but | ||
267 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
268 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
269 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
270 | 12 | # | ||
271 | 13 | # You should have received a copy of the GNU General Public License along | ||
272 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
273 | 15 | |||
274 | 16 | """Pretty API for protocol client.""" | ||
275 | 17 | |||
276 | 18 | from __future__ import with_statement | ||
277 | 19 | |||
278 | 20 | import logging | ||
279 | 21 | import os | ||
280 | 22 | import sys | ||
281 | 23 | import shutil | ||
282 | 24 | import time | ||
283 | 25 | import uuid | ||
284 | 26 | import zlib | ||
285 | 27 | |||
286 | 28 | from cStringIO import StringIO | ||
287 | 29 | from logging.handlers import RotatingFileHandler | ||
288 | 30 | from Queue import Queue | ||
289 | 31 | from threading import Lock | ||
290 | 32 | |||
291 | 33 | from dirspec.basedir import xdg_cache_home | ||
292 | 34 | from twisted.internet import reactor, defer | ||
293 | 35 | from twisted.internet.defer import inlineCallbacks, returnValue | ||
294 | 36 | from ubuntuone.storageprotocol import request, volumes | ||
295 | 37 | from ubuntuone.storageprotocol.content_hash import crc32 | ||
296 | 38 | from ubuntuone.storageprotocol.context import get_ssl_context | ||
297 | 39 | from ubuntuone.storageprotocol.client import ( | ||
298 | 40 | StorageClientFactory, StorageClient) | ||
299 | 41 | from ubuntuone.storageprotocol.delta import DIRECTORY as delta_DIR | ||
300 | 42 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, FILE | ||
301 | 43 | |||
302 | 44 | from magicicada.u1sync.genericmerge import MergeNode | ||
303 | 45 | from magicicada.u1sync.utils import should_sync | ||
304 | 46 | |||
305 | 47 | |||
306 | 48 | CONSUMER_KEY = "ubuntuone" | ||
307 | 49 | CONSUMER_SECRET = "hammertime" | ||
308 | 50 | |||
309 | 51 | u1sync_log_dir = os.path.join(xdg_cache_home, 'u1sync', 'log') | ||
310 | 52 | LOGFILENAME = os.path.join(u1sync_log_dir, 'u1sync.log') | ||
311 | 53 | if not os.path.exists(u1sync_log_dir): | ||
312 | 54 | os.makedirs(u1sync_log_dir) | ||
313 | 55 | u1_logger = logging.getLogger("u1sync.timing.log") | ||
314 | 56 | handler = RotatingFileHandler(LOGFILENAME) | ||
315 | 57 | u1_logger.addHandler(handler) | ||
316 | 58 | |||
317 | 59 | |||
318 | 60 | def share_str(share_uuid): | ||
319 | 61 | """Converts a share UUID to a form the protocol likes.""" | ||
320 | 62 | return str(share_uuid) if share_uuid is not None else request.ROOT | ||
321 | 63 | |||
322 | 64 | |||
323 | 65 | def log_timing(func): | ||
324 | 66 | def wrapper(*arg, **kwargs): | ||
325 | 67 | start = time.time() | ||
326 | 68 | ent = func(*arg, **kwargs) | ||
327 | 69 | stop = time.time() | ||
328 | 70 | u1_logger.debug('for %s %0.5f ms elapsed', | ||
329 | 71 | func.func_name, stop-start * 1000.0) | ||
330 | 72 | return ent | ||
331 | 73 | return wrapper | ||
332 | 74 | |||
333 | 75 | |||
334 | 76 | class ForcedShutdown(Exception): | ||
335 | 77 | """Client shutdown forced.""" | ||
336 | 78 | |||
337 | 79 | |||
338 | 80 | class Waiter(object): | ||
339 | 81 | """Wait object for blocking waits.""" | ||
340 | 82 | |||
341 | 83 | def __init__(self): | ||
342 | 84 | """Initializes the wait object.""" | ||
343 | 85 | self.queue = Queue() | ||
344 | 86 | |||
345 | 87 | def wake(self, result): | ||
346 | 88 | """Wakes the waiter with a result.""" | ||
347 | 89 | self.queue.put((result, None)) | ||
348 | 90 | |||
349 | 91 | def wakeAndRaise(self, exc_info): | ||
350 | 92 | """Wakes the waiter, raising the given exception in it.""" | ||
351 | 93 | self.queue.put((None, exc_info)) | ||
352 | 94 | |||
353 | 95 | def wakeWithResult(self, func, *args, **kw): | ||
354 | 96 | """Wakes the waiter with the result of the given function.""" | ||
355 | 97 | try: | ||
356 | 98 | result = func(*args, **kw) | ||
357 | 99 | except Exception: | ||
358 | 100 | self.wakeAndRaise(sys.exc_info()) | ||
359 | 101 | else: | ||
360 | 102 | self.wake(result) | ||
361 | 103 | |||
362 | 104 | def wait(self): | ||
363 | 105 | """Waits for wakeup.""" | ||
364 | 106 | (result, exc_info) = self.queue.get() | ||
365 | 107 | if exc_info: | ||
366 | 108 | try: | ||
367 | 109 | raise exc_info[0], exc_info[1], exc_info[2] | ||
368 | 110 | finally: | ||
369 | 111 | exc_info = None | ||
370 | 112 | else: | ||
371 | 113 | return result | ||
372 | 114 | |||
373 | 115 | |||
374 | 116 | class SyncStorageClient(StorageClient): | ||
375 | 117 | """Simple client that calls a callback on connection.""" | ||
376 | 118 | |||
377 | 119 | @log_timing | ||
378 | 120 | def connectionMade(self): | ||
379 | 121 | """Setup and call callback.""" | ||
380 | 122 | StorageClient.connectionMade(self) | ||
381 | 123 | if self.factory.current_protocol not in (None, self): | ||
382 | 124 | self.factory.current_protocol.transport.loseConnection() | ||
383 | 125 | self.factory.current_protocol = self | ||
384 | 126 | self.factory.observer.connected() | ||
385 | 127 | |||
386 | 128 | @log_timing | ||
387 | 129 | def connectionLost(self, reason=None): | ||
388 | 130 | """Callback for established connection lost.""" | ||
389 | 131 | StorageClient.connectionLost(self, reason) | ||
390 | 132 | if self.factory.current_protocol is self: | ||
391 | 133 | self.factory.current_protocol = None | ||
392 | 134 | self.factory.observer.disconnected(reason) | ||
393 | 135 | |||
394 | 136 | |||
395 | 137 | class SyncClientFactory(StorageClientFactory): | ||
396 | 138 | """A cmd protocol factory.""" | ||
397 | 139 | |||
398 | 140 | protocol = SyncStorageClient | ||
399 | 141 | |||
400 | 142 | @log_timing | ||
401 | 143 | def __init__(self, observer): | ||
402 | 144 | """Create the factory""" | ||
403 | 145 | self.observer = observer | ||
404 | 146 | self.current_protocol = None | ||
405 | 147 | |||
406 | 148 | @log_timing | ||
407 | 149 | def clientConnectionFailed(self, connector, reason): | ||
408 | 150 | """We failed at connecting.""" | ||
409 | 151 | self.current_protocol = None | ||
410 | 152 | self.observer.connection_failed(reason) | ||
411 | 153 | |||
412 | 154 | |||
413 | 155 | class UnsupportedOperationError(Exception): | ||
414 | 156 | """The operation is unsupported by the protocol version.""" | ||
415 | 157 | |||
416 | 158 | |||
417 | 159 | class ConnectionError(Exception): | ||
418 | 160 | """A connection error.""" | ||
419 | 161 | |||
420 | 162 | |||
421 | 163 | class AuthenticationError(Exception): | ||
422 | 164 | """An authentication error.""" | ||
423 | 165 | |||
424 | 166 | |||
425 | 167 | class NoSuchShareError(Exception): | ||
426 | 168 | """Error when there is no such share available.""" | ||
427 | 169 | |||
428 | 170 | |||
429 | 171 | class CapabilitiesError(Exception): | ||
430 | 172 | """A capabilities set/query related error.""" | ||
431 | 173 | |||
432 | 174 | |||
433 | 175 | class Client(object): | ||
434 | 176 | """U1 storage client facade.""" | ||
435 | 177 | required_caps = frozenset([ | ||
436 | 178 | "no-content", "account-info", "resumable-uploads", | ||
437 | 179 | "fix462230", "volumes", "generations", | ||
438 | 180 | ]) | ||
439 | 181 | |||
440 | 182 | def __init__(self, realm=None, reactor=reactor): | ||
441 | 183 | """Create the instance. | ||
442 | 184 | |||
443 | 185 | 'realm' is no longer used, but is left as param for API compatibility. | ||
444 | 186 | |||
445 | 187 | """ | ||
446 | 188 | self.reactor = reactor | ||
447 | 189 | self.factory = SyncClientFactory(self) | ||
448 | 190 | |||
449 | 191 | self._status_lock = Lock() | ||
450 | 192 | self._status = "disconnected" | ||
451 | 193 | self._status_reason = None | ||
452 | 194 | self._status_waiting = [] | ||
453 | 195 | self._active_waiters = set() | ||
454 | 196 | |||
455 | 197 | self.consumer_key = CONSUMER_KEY | ||
456 | 198 | self.consumer_secret = CONSUMER_SECRET | ||
457 | 199 | |||
458 | 200 | def force_shutdown(self): | ||
459 | 201 | """Forces the client to shut itself down.""" | ||
460 | 202 | with self._status_lock: | ||
461 | 203 | self._status = "forced_shutdown" | ||
462 | 204 | self._reason = None | ||
463 | 205 | for waiter in self._active_waiters: | ||
464 | 206 | waiter.wakeAndRaise((ForcedShutdown("Forced shutdown"), | ||
465 | 207 | None, None)) | ||
466 | 208 | self._active_waiters.clear() | ||
467 | 209 | |||
468 | 210 | def _get_waiter_locked(self): | ||
469 | 211 | """Gets a wait object for blocking waits. Should be called with the | ||
470 | 212 | status lock held. | ||
471 | 213 | """ | ||
472 | 214 | waiter = Waiter() | ||
473 | 215 | if self._status == "forced_shutdown": | ||
474 | 216 | raise ForcedShutdown("Forced shutdown") | ||
475 | 217 | self._active_waiters.add(waiter) | ||
476 | 218 | return waiter | ||
477 | 219 | |||
478 | 220 | def _get_waiter(self): | ||
479 | 221 | """Get a wait object for blocking waits. Acquires the status lock.""" | ||
480 | 222 | with self._status_lock: | ||
481 | 223 | return self._get_waiter_locked() | ||
482 | 224 | |||
483 | 225 | def _wait(self, waiter): | ||
484 | 226 | """Waits for the waiter.""" | ||
485 | 227 | try: | ||
486 | 228 | return waiter.wait() | ||
487 | 229 | finally: | ||
488 | 230 | with self._status_lock: | ||
489 | 231 | if waiter in self._active_waiters: | ||
490 | 232 | self._active_waiters.remove(waiter) | ||
491 | 233 | |||
492 | 234 | @log_timing | ||
493 | 235 | def _change_status(self, status, reason=None): | ||
494 | 236 | """Changes the client status. Usually called from the reactor | ||
495 | 237 | thread. | ||
496 | 238 | |||
497 | 239 | """ | ||
498 | 240 | with self._status_lock: | ||
499 | 241 | if self._status == "forced_shutdown": | ||
500 | 242 | return | ||
501 | 243 | self._status = status | ||
502 | 244 | self._status_reason = reason | ||
503 | 245 | waiting = self._status_waiting | ||
504 | 246 | self._status_waiting = [] | ||
505 | 247 | for waiter in waiting: | ||
506 | 248 | waiter.wake((status, reason)) | ||
507 | 249 | |||
508 | 250 | @log_timing | ||
509 | 251 | def _await_status_not(self, *ignore_statuses): | ||
510 | 252 | """Blocks until the client status changes, returning the new status. | ||
511 | 253 | Should never be called from the reactor thread. | ||
512 | 254 | |||
513 | 255 | """ | ||
514 | 256 | with self._status_lock: | ||
515 | 257 | status = self._status | ||
516 | 258 | reason = self._status_reason | ||
517 | 259 | while status in ignore_statuses: | ||
518 | 260 | waiter = self._get_waiter_locked() | ||
519 | 261 | self._status_waiting.append(waiter) | ||
520 | 262 | self._status_lock.release() | ||
521 | 263 | try: | ||
522 | 264 | status, reason = self._wait(waiter) | ||
523 | 265 | finally: | ||
524 | 266 | self._status_lock.acquire() | ||
525 | 267 | if status == "forced_shutdown": | ||
526 | 268 | raise ForcedShutdown("Forced shutdown.") | ||
527 | 269 | return (status, reason) | ||
528 | 270 | |||
529 | 271 | def connection_failed(self, reason): | ||
530 | 272 | """Notification that connection failed.""" | ||
531 | 273 | self._change_status("disconnected", reason) | ||
532 | 274 | |||
533 | 275 | def connected(self): | ||
534 | 276 | """Notification that connection succeeded.""" | ||
535 | 277 | self._change_status("connected") | ||
536 | 278 | |||
537 | 279 | def disconnected(self, reason): | ||
538 | 280 | """Notification that we were disconnected.""" | ||
539 | 281 | self._change_status("disconnected", reason) | ||
540 | 282 | |||
541 | 283 | def defer_from_thread(self, function, *args, **kwargs): | ||
542 | 284 | """Do twisted defer magic to get results and show exceptions.""" | ||
543 | 285 | waiter = self._get_waiter() | ||
544 | 286 | |||
545 | 287 | @log_timing | ||
546 | 288 | def runner(): | ||
547 | 289 | """inner.""" | ||
548 | 290 | try: | ||
549 | 291 | d = function(*args, **kwargs) | ||
550 | 292 | if isinstance(d, defer.Deferred): | ||
551 | 293 | d.addCallbacks(lambda r: waiter.wake((r, None, None)), | ||
552 | 294 | lambda f: waiter.wake((None, None, f))) | ||
553 | 295 | else: | ||
554 | 296 | waiter.wake((d, None, None)) | ||
555 | 297 | except Exception: | ||
556 | 298 | waiter.wake((None, sys.exc_info(), None)) | ||
557 | 299 | |||
558 | 300 | self.reactor.callFromThread(runner) | ||
559 | 301 | result, exc_info, failure = self._wait(waiter) | ||
560 | 302 | if exc_info: | ||
561 | 303 | try: | ||
562 | 304 | raise exc_info[0], exc_info[1], exc_info[2] | ||
563 | 305 | finally: | ||
564 | 306 | exc_info = None | ||
565 | 307 | elif failure: | ||
566 | 308 | failure.raiseException() | ||
567 | 309 | else: | ||
568 | 310 | return result | ||
569 | 311 | |||
570 | 312 | @log_timing | ||
571 | 313 | def connect(self, host, port): | ||
572 | 314 | """Connect to host/port.""" | ||
573 | 315 | def _connect(): | ||
574 | 316 | """Deferred part.""" | ||
575 | 317 | self.reactor.connectTCP(host, port, self.factory) | ||
576 | 318 | self._connect_inner(_connect) | ||
577 | 319 | |||
578 | 320 | @log_timing | ||
579 | 321 | def connect_ssl(self, host, port, no_verify): | ||
580 | 322 | """Connect to host/port using ssl.""" | ||
581 | 323 | def _connect(): | ||
582 | 324 | """deferred part.""" | ||
583 | 325 | ctx = get_ssl_context(no_verify, host) | ||
584 | 326 | self.reactor.connectSSL(host, port, self.factory, ctx) | ||
585 | 327 | self._connect_inner(_connect) | ||
586 | 328 | |||
587 | 329 | @log_timing | ||
588 | 330 | def _connect_inner(self, _connect): | ||
589 | 331 | """Helper function for connecting.""" | ||
590 | 332 | self._change_status("connecting") | ||
591 | 333 | self.reactor.callFromThread(_connect) | ||
592 | 334 | status, reason = self._await_status_not("connecting") | ||
593 | 335 | if status != "connected": | ||
594 | 336 | raise ConnectionError(reason.value) | ||
595 | 337 | |||
596 | 338 | @log_timing | ||
597 | 339 | def disconnect(self): | ||
598 | 340 | """Disconnect.""" | ||
599 | 341 | if self.factory.current_protocol is not None: | ||
600 | 342 | self.reactor.callFromThread( | ||
601 | 343 | self.factory.current_protocol.transport.loseConnection) | ||
602 | 344 | self._await_status_not("connecting", "connected", "authenticated") | ||
603 | 345 | |||
604 | 346 | @log_timing | ||
605 | 347 | def simple_auth(self, username, password): | ||
606 | 348 | """Perform simple authorisation.""" | ||
607 | 349 | |||
608 | 350 | @inlineCallbacks | ||
609 | 351 | def _wrapped_authenticate(): | ||
610 | 352 | """Wrapped authenticate.""" | ||
611 | 353 | try: | ||
612 | 354 | yield self.factory.current_protocol.simple_authenticate( | ||
613 | 355 | username, password) | ||
614 | 356 | except Exception: | ||
615 | 357 | self.factory.current_protocol.transport.loseConnection() | ||
616 | 358 | else: | ||
617 | 359 | self._change_status("authenticated") | ||
618 | 360 | |||
619 | 361 | try: | ||
620 | 362 | self.defer_from_thread(_wrapped_authenticate) | ||
621 | 363 | except request.StorageProtocolError as e: | ||
622 | 364 | raise AuthenticationError(e) | ||
623 | 365 | status, reason = self._await_status_not("connected") | ||
624 | 366 | if status != "authenticated": | ||
625 | 367 | raise AuthenticationError(reason.value) | ||
626 | 368 | |||
627 | 369 | @log_timing | ||
628 | 370 | def set_capabilities(self): | ||
629 | 371 | """Set the capabilities with the server""" | ||
630 | 372 | |||
631 | 373 | client = self.factory.current_protocol | ||
632 | 374 | |||
633 | 375 | @log_timing | ||
634 | 376 | def set_caps_callback(req): | ||
635 | 377 | "Caps query succeeded" | ||
636 | 378 | if not req.accepted: | ||
637 | 379 | de = defer.fail("The server denied setting %s capabilities" % | ||
638 | 380 | req.caps) | ||
639 | 381 | return de | ||
640 | 382 | |||
641 | 383 | @log_timing | ||
642 | 384 | def query_caps_callback(req): | ||
643 | 385 | "Caps query succeeded" | ||
644 | 386 | if req.accepted: | ||
645 | 387 | set_d = client.set_caps(self.required_caps) | ||
646 | 388 | set_d.addCallback(set_caps_callback) | ||
647 | 389 | return set_d | ||
648 | 390 | else: | ||
649 | 391 | # the server don't have the requested capabilities. | ||
650 | 392 | # return a failure for now, in the future we might want | ||
651 | 393 | # to reconnect to another server | ||
652 | 394 | de = defer.fail("The server don't have the requested" | ||
653 | 395 | " capabilities: %s" % str(req.caps)) | ||
654 | 396 | return de | ||
655 | 397 | |||
656 | 398 | @log_timing | ||
657 | 399 | def _wrapped_set_capabilities(): | ||
658 | 400 | """Wrapped set_capabilities """ | ||
659 | 401 | d = client.query_caps(self.required_caps) | ||
660 | 402 | d.addCallback(query_caps_callback) | ||
661 | 403 | return d | ||
662 | 404 | |||
663 | 405 | try: | ||
664 | 406 | self.defer_from_thread(_wrapped_set_capabilities) | ||
665 | 407 | except request.StorageProtocolError as e: | ||
666 | 408 | raise CapabilitiesError(e) | ||
667 | 409 | |||
668 | 410 | @log_timing | ||
669 | 411 | def get_root_info(self, volume_uuid): | ||
670 | 412 | """Returns the UUID of the applicable share root.""" | ||
671 | 413 | if volume_uuid is None: | ||
672 | 414 | _get_root = self.factory.current_protocol.get_root | ||
673 | 415 | root = self.defer_from_thread(_get_root) | ||
674 | 416 | return (uuid.UUID(root), True) | ||
675 | 417 | else: | ||
676 | 418 | str_volume_uuid = str(volume_uuid) | ||
677 | 419 | volume = self._match_volume( | ||
678 | 420 | lambda v: str(v.volume_id) == str_volume_uuid) | ||
679 | 421 | if isinstance(volume, volumes.ShareVolume): | ||
680 | 422 | modify = volume.access_level == "Modify" | ||
681 | 423 | if isinstance(volume, volumes.UDFVolume): | ||
682 | 424 | modify = True | ||
683 | 425 | return (uuid.UUID(str(volume.node_id)), modify) | ||
684 | 426 | |||
685 | 427 | @log_timing | ||
686 | 428 | def resolve_path(self, share_uuid, root_uuid, path): | ||
687 | 429 | """Resolve path relative to the given root node.""" | ||
688 | 430 | |||
689 | 431 | @inlineCallbacks | ||
690 | 432 | def _resolve_worker(): | ||
691 | 433 | """Path resolution worker.""" | ||
692 | 434 | node_uuid = root_uuid | ||
693 | 435 | local_path = path.strip('/') | ||
694 | 436 | |||
695 | 437 | while local_path != '': | ||
696 | 438 | local_path, name = os.path.split(local_path) | ||
697 | 439 | hashes = yield self._get_node_hashes(share_uuid) | ||
698 | 440 | content_hash = hashes.get(root_uuid, None) | ||
699 | 441 | if content_hash is None: | ||
700 | 442 | raise KeyError("Content hash not available") | ||
701 | 443 | entries = yield self._get_dir_entries(share_uuid, root_uuid) | ||
702 | 444 | match_name = name.decode('utf-8') | ||
703 | 445 | match = None | ||
704 | 446 | for entry in entries: | ||
705 | 447 | if match_name == entry.name: | ||
706 | 448 | match = entry | ||
707 | 449 | break | ||
708 | 450 | |||
709 | 451 | if match is None: | ||
710 | 452 | raise KeyError("Path not found") | ||
711 | 453 | |||
712 | 454 | node_uuid = uuid.UUID(match.node) | ||
713 | 455 | |||
714 | 456 | returnValue(node_uuid) | ||
715 | 457 | |||
716 | 458 | return self.defer_from_thread(_resolve_worker) | ||
717 | 459 | |||
718 | 460 | @log_timing | ||
719 | 461 | def find_volume(self, volume_spec): | ||
720 | 462 | """Finds a share matching the given UUID. Looks at both share UUIDs | ||
721 | 463 | and root node UUIDs.""" | ||
722 | 464 | |||
723 | 465 | def match(s): | ||
724 | 466 | return (str(s.volume_id) == volume_spec or | ||
725 | 467 | str(s.node_id) == volume_spec) | ||
726 | 468 | |||
727 | 469 | volume = self._match_volume(match) | ||
728 | 470 | return uuid.UUID(str(volume.volume_id)) | ||
729 | 471 | |||
730 | 472 | @log_timing | ||
731 | 473 | def _match_volume(self, predicate): | ||
732 | 474 | """Finds a volume matching the given predicate.""" | ||
733 | 475 | _list_shares = self.factory.current_protocol.list_volumes | ||
734 | 476 | r = self.defer_from_thread(_list_shares) | ||
735 | 477 | for volume in r.volumes: | ||
736 | 478 | if predicate(volume): | ||
737 | 479 | return volume | ||
738 | 480 | raise NoSuchShareError() | ||
739 | 481 | |||
740 | 482 | @log_timing | ||
741 | 483 | def build_tree(self, share_uuid, root_uuid): | ||
742 | 484 | """Builds and returns a tree representing the metadata for the given | ||
743 | 485 | subtree in the given share. | ||
744 | 486 | |||
745 | 487 | @param share_uuid: the share UUID or None for the user's volume | ||
746 | 488 | @param root_uuid: the root UUID of the subtree (must be a directory) | ||
747 | 489 | @return: a MergeNode tree | ||
748 | 490 | |||
749 | 491 | """ | ||
750 | 492 | root = MergeNode(node_type=DIRECTORY, uuid=root_uuid) | ||
751 | 493 | |||
752 | 494 | @log_timing | ||
753 | 495 | @inlineCallbacks | ||
754 | 496 | def _get_root_content_hash(): | ||
755 | 497 | """Obtain the content hash for the root node.""" | ||
756 | 498 | result = yield self._get_node_hashes(share_uuid) | ||
757 | 499 | returnValue(result.get(root_uuid, None)) | ||
758 | 500 | |||
759 | 501 | root.content_hash = self.defer_from_thread(_get_root_content_hash) | ||
760 | 502 | if root.content_hash is None: | ||
761 | 503 | raise ValueError("No content available for node %s" % root_uuid) | ||
762 | 504 | |||
763 | 505 | @log_timing | ||
764 | 506 | @inlineCallbacks | ||
765 | 507 | def _get_children(parent_uuid, parent_content_hash): | ||
766 | 508 | """Obtain a sequence of MergeNodes corresponding to a node's | ||
767 | 509 | immediate children. | ||
768 | 510 | |||
769 | 511 | """ | ||
770 | 512 | entries = yield self._get_dir_entries(share_uuid, parent_uuid) | ||
771 | 513 | children = {} | ||
772 | 514 | for entry in entries: | ||
773 | 515 | if should_sync(entry.name): | ||
774 | 516 | child = MergeNode(node_type=entry.node_type, | ||
775 | 517 | uuid=uuid.UUID(entry.node)) | ||
776 | 518 | children[entry.name] = child | ||
777 | 519 | |||
778 | 520 | content_hashes = yield self._get_node_hashes(share_uuid) | ||
779 | 521 | for child in children.itervalues(): | ||
780 | 522 | child.content_hash = content_hashes.get(child.uuid, None) | ||
781 | 523 | |||
782 | 524 | returnValue(children) | ||
783 | 525 | |||
784 | 526 | need_children = [root] | ||
785 | 527 | while need_children: | ||
786 | 528 | node = need_children.pop() | ||
787 | 529 | if node.content_hash is not None: | ||
788 | 530 | children = self.defer_from_thread(_get_children, node.uuid, | ||
789 | 531 | node.content_hash) | ||
790 | 532 | node.children = children | ||
791 | 533 | for child in children.itervalues(): | ||
792 | 534 | if child.node_type == DIRECTORY: | ||
793 | 535 | need_children.append(child) | ||
794 | 536 | |||
795 | 537 | return root | ||
796 | 538 | |||
797 | 539 | @log_timing | ||
798 | 540 | @defer.inlineCallbacks | ||
799 | 541 | def _get_dir_entries(self, share_uuid, node_uuid): | ||
800 | 542 | """Get raw dir entries for the given directory.""" | ||
801 | 543 | result = yield self.factory.current_protocol.get_delta( | ||
802 | 544 | share_str(share_uuid), from_scratch=True) | ||
803 | 545 | node_uuid = share_str(node_uuid) | ||
804 | 546 | children = [] | ||
805 | 547 | for n in result.response: | ||
806 | 548 | if n.parent_id == node_uuid: | ||
807 | 549 | # adapt here some attrs so we don't need to change ALL the code | ||
808 | 550 | n.node_type = DIRECTORY if n.file_type == delta_DIR else FILE | ||
809 | 551 | n.node = n.node_id | ||
810 | 552 | children.append(n) | ||
811 | 553 | defer.returnValue(children) | ||
812 | 554 | |||
813 | 555 | @log_timing | ||
814 | 556 | def download_string(self, share_uuid, node_uuid, content_hash): | ||
815 | 557 | """Reads a file from the server into a string.""" | ||
816 | 558 | output = StringIO() | ||
817 | 559 | self._download_inner(share_uuid=share_uuid, node_uuid=node_uuid, | ||
818 | 560 | content_hash=content_hash, output=output) | ||
819 | 561 | return output.getValue() | ||
820 | 562 | |||
821 | 563 | @log_timing | ||
822 | 564 | def download_file(self, share_uuid, node_uuid, content_hash, filename): | ||
823 | 565 | """Downloads a file from the server.""" | ||
824 | 566 | partial_filename = "%s.u1partial" % filename | ||
825 | 567 | output = open(partial_filename, "w") | ||
826 | 568 | |||
827 | 569 | @log_timing | ||
828 | 570 | def rename_file(): | ||
829 | 571 | """Renames the temporary file to the final name.""" | ||
830 | 572 | output.close() | ||
831 | 573 | os.rename(partial_filename, filename) | ||
832 | 574 | |||
833 | 575 | @log_timing | ||
834 | 576 | def delete_file(): | ||
835 | 577 | """Deletes the temporary file.""" | ||
836 | 578 | output.close() | ||
837 | 579 | os.remove(partial_filename) | ||
838 | 580 | |||
839 | 581 | self._download_inner(share_uuid=share_uuid, node_uuid=node_uuid, | ||
840 | 582 | content_hash=content_hash, output=output, | ||
841 | 583 | on_success=rename_file, on_failure=delete_file) | ||
842 | 584 | |||
843 | 585 | @log_timing | ||
844 | 586 | def _download_inner(self, share_uuid, node_uuid, content_hash, output, | ||
845 | 587 | on_success=lambda: None, on_failure=lambda: None): | ||
846 | 588 | """Helper function for content downloads.""" | ||
847 | 589 | dec = zlib.decompressobj() | ||
848 | 590 | |||
849 | 591 | @log_timing | ||
850 | 592 | def write_data(data): | ||
851 | 593 | """Helper which writes data to the output file.""" | ||
852 | 594 | uncompressed_data = dec.decompress(data) | ||
853 | 595 | output.write(uncompressed_data) | ||
854 | 596 | |||
855 | 597 | @log_timing | ||
856 | 598 | def finish_download(value): | ||
857 | 599 | """Helper which finishes the download.""" | ||
858 | 600 | uncompressed_data = dec.flush() | ||
859 | 601 | output.write(uncompressed_data) | ||
860 | 602 | on_success() | ||
861 | 603 | return value | ||
862 | 604 | |||
863 | 605 | @log_timing | ||
864 | 606 | def abort_download(value): | ||
865 | 607 | """Helper which aborts the download.""" | ||
866 | 608 | on_failure() | ||
867 | 609 | return value | ||
868 | 610 | |||
869 | 611 | @log_timing | ||
870 | 612 | def _download(): | ||
871 | 613 | """Async helper.""" | ||
872 | 614 | _get_content = self.factory.current_protocol.get_content | ||
873 | 615 | d = _get_content(share_str(share_uuid), str(node_uuid), | ||
874 | 616 | content_hash, callback=write_data) | ||
875 | 617 | d.addCallbacks(finish_download, abort_download) | ||
876 | 618 | return d | ||
877 | 619 | |||
878 | 620 | self.defer_from_thread(_download) | ||
879 | 621 | |||
880 | 622 | @log_timing | ||
881 | 623 | def create_directory(self, share_uuid, parent_uuid, name): | ||
882 | 624 | """Creates a directory on the server.""" | ||
883 | 625 | r = self.defer_from_thread(self.factory.current_protocol.make_dir, | ||
884 | 626 | share_str(share_uuid), str(parent_uuid), | ||
885 | 627 | name) | ||
886 | 628 | return uuid.UUID(r.new_id) | ||
887 | 629 | |||
888 | 630 | @log_timing | ||
889 | 631 | def create_file(self, share_uuid, parent_uuid, name): | ||
890 | 632 | """Creates a file on the server.""" | ||
891 | 633 | r = self.defer_from_thread(self.factory.current_protocol.make_file, | ||
892 | 634 | share_str(share_uuid), str(parent_uuid), | ||
893 | 635 | name) | ||
894 | 636 | return uuid.UUID(r.new_id) | ||
895 | 637 | |||
896 | 638 | @log_timing | ||
897 | 639 | def create_symlink(self, share_uuid, parent_uuid, name, target): | ||
898 | 640 | """Creates a symlink on the server.""" | ||
899 | 641 | raise UnsupportedOperationError("Protocol does not support symlinks") | ||
900 | 642 | |||
901 | 643 | @log_timing | ||
902 | 644 | def upload_string(self, share_uuid, node_uuid, old_content_hash, | ||
903 | 645 | content_hash, content): | ||
904 | 646 | """Uploads a string to the server as file content.""" | ||
905 | 647 | crc = crc32(content, 0) | ||
906 | 648 | compressed_content = zlib.compress(content, 9) | ||
907 | 649 | compressed = StringIO(compressed_content) | ||
908 | 650 | self.defer_from_thread(self.factory.current_protocol.put_content, | ||
909 | 651 | share_str(share_uuid), str(node_uuid), | ||
910 | 652 | old_content_hash, content_hash, | ||
911 | 653 | crc, len(content), len(compressed_content), | ||
912 | 654 | compressed) | ||
913 | 655 | |||
914 | 656 | @log_timing | ||
915 | 657 | def upload_file(self, share_uuid, node_uuid, old_content_hash, | ||
916 | 658 | content_hash, filename): | ||
917 | 659 | """Uploads a file to the server.""" | ||
918 | 660 | parent_dir = os.path.split(filename)[0] | ||
919 | 661 | unique_filename = os.path.join(parent_dir, "." + str(uuid.uuid4())) | ||
920 | 662 | |||
921 | 663 | class StagingFile(object): | ||
922 | 664 | """An object which tracks data being compressed for staging.""" | ||
923 | 665 | def __init__(self, stream): | ||
924 | 666 | """Initialize a compression object.""" | ||
925 | 667 | self.crc32 = 0 | ||
926 | 668 | self.enc = zlib.compressobj(9) | ||
927 | 669 | self.size = 0 | ||
928 | 670 | self.compressed_size = 0 | ||
929 | 671 | self.stream = stream | ||
930 | 672 | |||
931 | 673 | def write(self, bytes): | ||
932 | 674 | """Compress bytes, keeping track of length and crc32.""" | ||
933 | 675 | self.size += len(bytes) | ||
934 | 676 | self.crc32 = crc32(bytes, self.crc32) | ||
935 | 677 | compressed_bytes = self.enc.compress(bytes) | ||
936 | 678 | self.compressed_size += len(compressed_bytes) | ||
937 | 679 | self.stream.write(compressed_bytes) | ||
938 | 680 | |||
939 | 681 | def finish(self): | ||
940 | 682 | """Finish staging compressed data.""" | ||
941 | 683 | compressed_bytes = self.enc.flush() | ||
942 | 684 | self.compressed_size += len(compressed_bytes) | ||
943 | 685 | self.stream.write(compressed_bytes) | ||
944 | 686 | |||
945 | 687 | with open(unique_filename, "w+") as compressed: | ||
946 | 688 | os.remove(unique_filename) | ||
947 | 689 | with open(filename, "r") as original: | ||
948 | 690 | staging = StagingFile(compressed) | ||
949 | 691 | shutil.copyfileobj(original, staging) | ||
950 | 692 | staging.finish() | ||
951 | 693 | compressed.seek(0) | ||
952 | 694 | self.defer_from_thread(self.factory.current_protocol.put_content, | ||
953 | 695 | share_str(share_uuid), str(node_uuid), | ||
954 | 696 | old_content_hash, content_hash, | ||
955 | 697 | staging.crc32, | ||
956 | 698 | staging.size, staging.compressed_size, | ||
957 | 699 | compressed) | ||
958 | 700 | |||
959 | 701 | @log_timing | ||
960 | 702 | def move(self, share_uuid, parent_uuid, name, node_uuid): | ||
961 | 703 | """Moves a file on the server.""" | ||
962 | 704 | self.defer_from_thread(self.factory.current_protocol.move, | ||
963 | 705 | share_str(share_uuid), str(node_uuid), | ||
964 | 706 | str(parent_uuid), name) | ||
965 | 707 | |||
966 | 708 | @log_timing | ||
967 | 709 | def unlink(self, share_uuid, node_uuid): | ||
968 | 710 | """Unlinks a file on the server.""" | ||
969 | 711 | self.defer_from_thread(self.factory.current_protocol.unlink, | ||
970 | 712 | share_str(share_uuid), str(node_uuid)) | ||
971 | 713 | |||
972 | 714 | @log_timing | ||
973 | 715 | @defer.inlineCallbacks | ||
974 | 716 | def _get_node_hashes(self, share_uuid): | ||
975 | 717 | """Fetches hashes for the given nodes.""" | ||
976 | 718 | result = yield self.factory.current_protocol.get_delta( | ||
977 | 719 | share_str(share_uuid), from_scratch=True) | ||
978 | 720 | hashes = {} | ||
979 | 721 | for fid in result.response: | ||
980 | 722 | node_uuid = uuid.UUID(fid.node_id) | ||
981 | 723 | hashes[node_uuid] = fid.content_hash | ||
982 | 724 | defer.returnValue(hashes) | ||
983 | 725 | |||
984 | 726 | @log_timing | ||
985 | 727 | def get_incoming_shares(self): | ||
986 | 728 | """Returns a list of incoming shares as (name, uuid, accepted) | ||
987 | 729 | tuples. | ||
988 | 730 | |||
989 | 731 | """ | ||
990 | 732 | _list_shares = self.factory.current_protocol.list_shares | ||
991 | 733 | r = self.defer_from_thread(_list_shares) | ||
992 | 734 | return [(s.name, s.id, s.other_visible_name, | ||
993 | 735 | s.accepted, s.access_level) | ||
994 | 736 | for s in r.shares if s.direction == "to_me"] | ||
995 | 0 | 737 | ||
996 | === added file 'magicicada/u1sync/constants.py' | |||
997 | --- magicicada/u1sync/constants.py 1970-01-01 00:00:00 +0000 | |||
998 | +++ magicicada/u1sync/constants.py 2018-04-20 01:22:48 +0000 | |||
999 | @@ -0,0 +1,26 @@ | |||
1000 | 1 | # Copyright 2009 Canonical Ltd. | ||
1001 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
1002 | 3 | # | ||
1003 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
1004 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
1005 | 6 | # by the Free Software Foundation. | ||
1006 | 7 | # | ||
1007 | 8 | # This program is distributed in the hope that it will be useful, but | ||
1008 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1009 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1010 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
1011 | 12 | # | ||
1012 | 13 | # You should have received a copy of the GNU General Public License along | ||
1013 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1014 | 15 | |||
1015 | 16 | """Assorted constant definitions which don't fit anywhere else.""" | ||
1016 | 17 | |||
1017 | 18 | import re | ||
1018 | 19 | |||
1019 | 20 | # the name of the directory u1sync uses to keep metadata about a mirror | ||
1020 | 21 | METADATA_DIR_NAME = u".ubuntuone-sync" | ||
1021 | 22 | |||
1022 | 23 | # filenames to ignore | ||
1023 | 24 | SPECIAL_FILE_RE = re.compile(".*\\.(" | ||
1024 | 25 | "(u1)?partial|part|" | ||
1025 | 26 | "(u1)?conflict(\\.[0-9]+)?)$") | ||
1026 | 0 | 27 | ||
1027 | === added file 'magicicada/u1sync/genericmerge.py' | |||
1028 | --- magicicada/u1sync/genericmerge.py 1970-01-01 00:00:00 +0000 | |||
1029 | +++ magicicada/u1sync/genericmerge.py 2018-04-20 01:22:48 +0000 | |||
1030 | @@ -0,0 +1,86 @@ | |||
1031 | 1 | # Copyright 2009 Canonical Ltd. | ||
1032 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
1033 | 3 | # | ||
1034 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
1035 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
1036 | 6 | # by the Free Software Foundation. | ||
1037 | 7 | # | ||
1038 | 8 | # This program is distributed in the hope that it will be useful, but | ||
1039 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1040 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1041 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
1042 | 12 | # | ||
1043 | 13 | # You should have received a copy of the GNU General Public License along | ||
1044 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1045 | 15 | |||
1046 | 16 | """A generic abstraction for merge operations on directory trees.""" | ||
1047 | 17 | |||
1048 | 18 | from itertools import chain | ||
1049 | 19 | |||
1050 | 20 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY | ||
1051 | 21 | |||
1052 | 22 | |||
1053 | 23 | class MergeNode(object): | ||
1054 | 24 | """A filesystem node. Should generally be treated as immutable.""" | ||
1055 | 25 | def __init__(self, node_type, content_hash=None, uuid=None, children=None, | ||
1056 | 26 | conflict_info=None): | ||
1057 | 27 | """Initializes a node instance.""" | ||
1058 | 28 | self.node_type = node_type | ||
1059 | 29 | self.children = children | ||
1060 | 30 | self.uuid = uuid | ||
1061 | 31 | self.content_hash = content_hash | ||
1062 | 32 | self.conflict_info = conflict_info | ||
1063 | 33 | |||
1064 | 34 | def __eq__(self, other): | ||
1065 | 35 | """Equality test.""" | ||
1066 | 36 | if type(other) is not type(self): | ||
1067 | 37 | return False | ||
1068 | 38 | return (self.node_type == other.node_type and | ||
1069 | 39 | self.children == other.children and | ||
1070 | 40 | self.uuid == other.uuid and | ||
1071 | 41 | self.content_hash == other.content_hash and | ||
1072 | 42 | self.conflict_info == other.conflict_info) | ||
1073 | 43 | |||
1074 | 44 | def __ne__(self, other): | ||
1075 | 45 | """Non-equality test.""" | ||
1076 | 46 | return not self.__eq__(other) | ||
1077 | 47 | |||
1078 | 48 | |||
1079 | 49 | def show_tree(tree, indent="", name="/"): | ||
1080 | 50 | """Prints a tree.""" | ||
1081 | 51 | if tree.node_type == DIRECTORY: | ||
1082 | 52 | type_str = "DIR " | ||
1083 | 53 | else: | ||
1084 | 54 | type_str = "FILE" | ||
1085 | 55 | print "%s%-36s %s %s %s" % (indent, tree.uuid, type_str, name, | ||
1086 | 56 | tree.content_hash) | ||
1087 | 57 | if tree.node_type == DIRECTORY and tree.children is not None: | ||
1088 | 58 | for name in sorted(tree.children.keys()): | ||
1089 | 59 | subtree = tree.children[name] | ||
1090 | 60 | show_tree(subtree, indent=" " + indent, name=name) | ||
1091 | 61 | |||
1092 | 62 | |||
1093 | 63 | def generic_merge(trees, pre_merge, post_merge, partial_parent, name): | ||
1094 | 64 | """Generic tree merging function.""" | ||
1095 | 65 | |||
1096 | 66 | partial_result = pre_merge(nodes=trees, name=name, | ||
1097 | 67 | partial_parent=partial_parent) | ||
1098 | 68 | |||
1099 | 69 | def tree_children(tree): | ||
1100 | 70 | """Returns children if tree is not None""" | ||
1101 | 71 | return tree.children if tree is not None else None | ||
1102 | 72 | |||
1103 | 73 | child_dicts = [tree_children(t) or {} for t in trees] | ||
1104 | 74 | child_names = set(chain(*[cs.iterkeys() for cs in child_dicts])) | ||
1105 | 75 | child_results = {} | ||
1106 | 76 | for child_name in child_names: | ||
1107 | 77 | subtrees = [cs.get(child_name, None) for cs in child_dicts] | ||
1108 | 78 | child_result = generic_merge(trees=subtrees, | ||
1109 | 79 | pre_merge=pre_merge, | ||
1110 | 80 | post_merge=post_merge, | ||
1111 | 81 | partial_parent=partial_result, | ||
1112 | 82 | name=child_name) | ||
1113 | 83 | child_results[child_name] = child_result | ||
1114 | 84 | |||
1115 | 85 | return post_merge(nodes=trees, partial_result=partial_result, | ||
1116 | 86 | child_results=child_results) | ||
1117 | 0 | 87 | ||
1118 | === added file 'magicicada/u1sync/main.py' | |||
1119 | --- magicicada/u1sync/main.py 1970-01-01 00:00:00 +0000 | |||
1120 | +++ magicicada/u1sync/main.py 2018-04-20 01:22:48 +0000 | |||
1121 | @@ -0,0 +1,443 @@ | |||
1122 | 1 | # Copyright 2009 Canonical Ltd. | ||
1123 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
1124 | 3 | # | ||
1125 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
1126 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
1127 | 6 | # by the Free Software Foundation. | ||
1128 | 7 | # | ||
1129 | 8 | # This program is distributed in the hope that it will be useful, but | ||
1130 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1131 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1132 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
1133 | 12 | # | ||
1134 | 13 | # You should have received a copy of the GNU General Public License along | ||
1135 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1136 | 15 | |||
1137 | 16 | """A prototype directory sync client.""" | ||
1138 | 17 | |||
1139 | 18 | from __future__ import with_statement | ||
1140 | 19 | |||
1141 | 20 | import signal | ||
1142 | 21 | import os | ||
1143 | 22 | import sys | ||
1144 | 23 | import uuid | ||
1145 | 24 | |||
1146 | 25 | from errno import EEXIST | ||
1147 | 26 | from optparse import OptionParser, SUPPRESS_HELP | ||
1148 | 27 | from Queue import Queue | ||
1149 | 28 | |||
1150 | 29 | import gobject | ||
1151 | 30 | |||
1152 | 31 | import ubuntuone.storageprotocol.dircontent_pb2 as dircontent_pb2 | ||
1153 | 32 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, SYMLINK | ||
1154 | 33 | from twisted.internet import reactor | ||
1155 | 34 | |||
1156 | 35 | from magicicada.u1sync import metadata | ||
1157 | 36 | from magicicada.u1sync.client import ( | ||
1158 | 37 | ConnectionError, AuthenticationError, NoSuchShareError, | ||
1159 | 38 | ForcedShutdown, Client) | ||
1160 | 39 | from magicicada.u1sync.constants import METADATA_DIR_NAME | ||
1161 | 40 | from magicicada.u1sync.genericmerge import show_tree, generic_merge | ||
1162 | 41 | from magicicada.u1sync.merge import ( | ||
1163 | 42 | SyncMerge, ClobberServerMerge, ClobberLocalMerge, merge_trees) | ||
1164 | 43 | from magicicada.u1sync.scan import scan_directory | ||
1165 | 44 | from magicicada.u1sync.sync import download_tree, upload_tree | ||
1166 | 45 | from magicicada.u1sync.utils import safe_mkdir | ||
1167 | 46 | |||
1168 | 47 | |||
1169 | 48 | gobject.set_application_name('u1sync') | ||
1170 | 49 | |||
1171 | 50 | DEFAULT_MERGE_ACTION = 'auto' | ||
1172 | 51 | MERGE_ACTIONS = { | ||
1173 | 52 | # action: (merge_class, should_upload, should_download) | ||
1174 | 53 | 'sync': (SyncMerge, True, True), | ||
1175 | 54 | 'clobber-server': (ClobberServerMerge, True, False), | ||
1176 | 55 | 'clobber-local': (ClobberLocalMerge, False, True), | ||
1177 | 56 | 'upload': (SyncMerge, True, False), | ||
1178 | 57 | 'download': (SyncMerge, False, True), | ||
1179 | 58 | 'auto': None # special case | ||
1180 | 59 | } | ||
1181 | 60 | NODE_TYPE_ENUM = dircontent_pb2._NODETYPE | ||
1182 | 61 | |||
1183 | 62 | |||
1184 | 63 | def node_type_str(node_type): | ||
1185 | 64 | """Converts a numeric node type to a human-readable string.""" | ||
1186 | 65 | return NODE_TYPE_ENUM.values_by_number[node_type].name | ||
1187 | 66 | |||
1188 | 67 | |||
1189 | 68 | class ReadOnlyShareError(Exception): | ||
1190 | 69 | """Share is read-only.""" | ||
1191 | 70 | |||
1192 | 71 | |||
1193 | 72 | class DirectoryAlreadyInitializedError(Exception): | ||
1194 | 73 | """The directory has already been initialized.""" | ||
1195 | 74 | |||
1196 | 75 | |||
1197 | 76 | class DirectoryNotInitializedError(Exception): | ||
1198 | 77 | """The directory has not been initialized.""" | ||
1199 | 78 | |||
1200 | 79 | |||
1201 | 80 | class NoParentError(Exception): | ||
1202 | 81 | """A node has no parent.""" | ||
1203 | 82 | |||
1204 | 83 | |||
1205 | 84 | class TreesDiffer(Exception): | ||
1206 | 85 | """Raised when diff tree differs.""" | ||
1207 | 86 | def __init__(self, quiet): | ||
1208 | 87 | self.quiet = quiet | ||
1209 | 88 | |||
1210 | 89 | |||
1211 | 90 | def do_init(client, share_spec, directory, quiet, subtree_path, | ||
1212 | 91 | metadata=metadata): | ||
1213 | 92 | """Initializes a directory for syncing, and syncs it.""" | ||
1214 | 93 | info = metadata.Metadata() | ||
1215 | 94 | |||
1216 | 95 | if share_spec is not None: | ||
1217 | 96 | info.share_uuid = client.find_volume(share_spec) | ||
1218 | 97 | else: | ||
1219 | 98 | info.share_uuid = None | ||
1220 | 99 | |||
1221 | 100 | if subtree_path is not None: | ||
1222 | 101 | info.path = subtree_path | ||
1223 | 102 | else: | ||
1224 | 103 | info.path = "/" | ||
1225 | 104 | |||
1226 | 105 | if not quiet: | ||
1227 | 106 | print "\nInitializing directory..." | ||
1228 | 107 | safe_mkdir(directory) | ||
1229 | 108 | |||
1230 | 109 | metadata_dir = os.path.join(directory, METADATA_DIR_NAME) | ||
1231 | 110 | try: | ||
1232 | 111 | os.mkdir(metadata_dir) | ||
1233 | 112 | except OSError as e: | ||
1234 | 113 | if e.errno == EEXIST: | ||
1235 | 114 | raise DirectoryAlreadyInitializedError(directory) | ||
1236 | 115 | else: | ||
1237 | 116 | raise | ||
1238 | 117 | |||
1239 | 118 | if not quiet: | ||
1240 | 119 | print "\nWriting mirror metadata..." | ||
1241 | 120 | metadata.write(metadata_dir, info) | ||
1242 | 121 | |||
1243 | 122 | if not quiet: | ||
1244 | 123 | print "\nDone." | ||
1245 | 124 | |||
1246 | 125 | |||
1247 | 126 | def do_sync(client, directory, action, dry_run, quiet): | ||
1248 | 127 | """Synchronizes a directory with the given share.""" | ||
1249 | 128 | absolute_path = os.path.abspath(directory) | ||
1250 | 129 | while True: | ||
1251 | 130 | metadata_dir = os.path.join(absolute_path, METADATA_DIR_NAME) | ||
1252 | 131 | if os.path.exists(metadata_dir): | ||
1253 | 132 | break | ||
1254 | 133 | if absolute_path == "/": | ||
1255 | 134 | raise DirectoryNotInitializedError(directory) | ||
1256 | 135 | absolute_path = os.path.split(absolute_path)[0] | ||
1257 | 136 | |||
1258 | 137 | if not quiet: | ||
1259 | 138 | print "\nReading mirror metadata..." | ||
1260 | 139 | info = metadata.read(metadata_dir) | ||
1261 | 140 | |||
1262 | 141 | top_uuid, writable = client.get_root_info(info.share_uuid) | ||
1263 | 142 | |||
1264 | 143 | if info.root_uuid is None: | ||
1265 | 144 | info.root_uuid = client.resolve_path(info.share_uuid, top_uuid, | ||
1266 | 145 | info.path) | ||
1267 | 146 | |||
1268 | 147 | if action == 'auto': | ||
1269 | 148 | if writable: | ||
1270 | 149 | action = 'sync' | ||
1271 | 150 | else: | ||
1272 | 151 | action = 'download' | ||
1273 | 152 | merge_type, should_upload, should_download = MERGE_ACTIONS[action] | ||
1274 | 153 | if should_upload and not writable: | ||
1275 | 154 | raise ReadOnlyShareError(info.share_uuid) | ||
1276 | 155 | |||
1277 | 156 | if not quiet: | ||
1278 | 157 | print "\nScanning directory..." | ||
1279 | 158 | local_tree = scan_directory(absolute_path, quiet=quiet) | ||
1280 | 159 | |||
1281 | 160 | if not quiet: | ||
1282 | 161 | print "\nFetching metadata..." | ||
1283 | 162 | remote_tree = client.build_tree(info.share_uuid, info.root_uuid) | ||
1284 | 163 | if not quiet: | ||
1285 | 164 | show_tree(remote_tree) | ||
1286 | 165 | |||
1287 | 166 | if not quiet: | ||
1288 | 167 | print "\nMerging trees..." | ||
1289 | 168 | merged_tree = merge_trees(old_local_tree=info.local_tree, | ||
1290 | 169 | local_tree=local_tree, | ||
1291 | 170 | old_remote_tree=info.remote_tree, | ||
1292 | 171 | remote_tree=remote_tree, | ||
1293 | 172 | merge_action=merge_type()) | ||
1294 | 173 | if not quiet: | ||
1295 | 174 | show_tree(merged_tree) | ||
1296 | 175 | |||
1297 | 176 | if not quiet: | ||
1298 | 177 | print "\nSyncing content..." | ||
1299 | 178 | if should_download: | ||
1300 | 179 | info.local_tree = download_tree(merged_tree=merged_tree, | ||
1301 | 180 | local_tree=local_tree, | ||
1302 | 181 | client=client, | ||
1303 | 182 | share_uuid=info.share_uuid, | ||
1304 | 183 | path=absolute_path, dry_run=dry_run, | ||
1305 | 184 | quiet=quiet) | ||
1306 | 185 | else: | ||
1307 | 186 | info.local_tree = local_tree | ||
1308 | 187 | if should_upload: | ||
1309 | 188 | info.remote_tree = upload_tree(merged_tree=merged_tree, | ||
1310 | 189 | remote_tree=remote_tree, | ||
1311 | 190 | client=client, | ||
1312 | 191 | share_uuid=info.share_uuid, | ||
1313 | 192 | path=absolute_path, dry_run=dry_run, | ||
1314 | 193 | quiet=quiet) | ||
1315 | 194 | else: | ||
1316 | 195 | info.remote_tree = remote_tree | ||
1317 | 196 | |||
1318 | 197 | if not dry_run: | ||
1319 | 198 | if not quiet: | ||
1320 | 199 | print "\nUpdating mirror metadata..." | ||
1321 | 200 | metadata.write(metadata_dir, info) | ||
1322 | 201 | |||
1323 | 202 | if not quiet: | ||
1324 | 203 | print "\nDone." | ||
1325 | 204 | |||
1326 | 205 | |||
1327 | 206 | def do_list_shares(client): | ||
1328 | 207 | """Lists available (incoming) shares.""" | ||
1329 | 208 | shares = client.get_incoming_shares() | ||
1330 | 209 | for (name, id, user, accepted, access) in shares: | ||
1331 | 210 | if not accepted: | ||
1332 | 211 | status = " [not accepted]" | ||
1333 | 212 | else: | ||
1334 | 213 | status = "" | ||
1335 | 214 | name = name.encode("utf-8") | ||
1336 | 215 | user = user.encode("utf-8") | ||
1337 | 216 | print "%s %s (from %s) [%s]%s" % (id, name, user, access, status) | ||
1338 | 217 | |||
1339 | 218 | |||
1340 | 219 | def do_diff(client, share_spec, directory, quiet, subtree_path, | ||
1341 | 220 | ignore_symlinks=True): | ||
1342 | 221 | """Diffs a local directory with the server.""" | ||
1343 | 222 | if share_spec is not None: | ||
1344 | 223 | share_uuid = client.find_volume(share_spec) | ||
1345 | 224 | else: | ||
1346 | 225 | share_uuid = None | ||
1347 | 226 | if subtree_path is None: | ||
1348 | 227 | subtree_path = '/' | ||
1349 | 228 | |||
1350 | 229 | root_uuid, writable = client.get_root_info(share_uuid) | ||
1351 | 230 | subtree_uuid = client.resolve_path(share_uuid, root_uuid, subtree_path) | ||
1352 | 231 | local_tree = scan_directory(directory, quiet=True) | ||
1353 | 232 | remote_tree = client.build_tree(share_uuid, subtree_uuid) | ||
1354 | 233 | |||
1355 | 234 | def pre_merge(nodes, name, partial_parent): | ||
1356 | 235 | """Compares nodes and prints differences.""" | ||
1357 | 236 | (local_node, remote_node) = nodes | ||
1358 | 237 | (parent_display_path, parent_differs) = partial_parent | ||
1359 | 238 | display_path = os.path.join(parent_display_path, name.encode("UTF-8")) | ||
1360 | 239 | differs = True | ||
1361 | 240 | if local_node is None: | ||
1362 | 241 | if not quiet: | ||
1363 | 242 | print "%s missing from client" % display_path | ||
1364 | 243 | elif remote_node is None: | ||
1365 | 244 | if ignore_symlinks and local_node.node_type == SYMLINK: | ||
1366 | 245 | differs = False | ||
1367 | 246 | elif not quiet: | ||
1368 | 247 | print "%s missing from server" % display_path | ||
1369 | 248 | elif local_node.node_type != remote_node.node_type: | ||
1370 | 249 | local_type = node_type_str(local_node.node_type) | ||
1371 | 250 | remote_type = node_type_str(remote_node.node_type) | ||
1372 | 251 | if not quiet: | ||
1373 | 252 | print ("%s node types differ (client: %s, server: %s)" % | ||
1374 | 253 | (display_path, local_type, remote_type)) | ||
1375 | 254 | elif (local_node.node_type != DIRECTORY and | ||
1376 | 255 | local_node.content_hash != remote_node.content_hash): | ||
1377 | 256 | local_content = local_node.content_hash | ||
1378 | 257 | remote_content = remote_node.content_hash | ||
1379 | 258 | if not quiet: | ||
1380 | 259 | print ("%s has different content (client: %s, server: %s)" % | ||
1381 | 260 | (display_path, local_content, remote_content)) | ||
1382 | 261 | else: | ||
1383 | 262 | differs = False | ||
1384 | 263 | return (display_path, differs) | ||
1385 | 264 | |||
1386 | 265 | def post_merge(nodes, partial_result, child_results): | ||
1387 | 266 | """Aggregates 'differs' flags.""" | ||
1388 | 267 | (display_path, differs) = partial_result | ||
1389 | 268 | return differs or any(child_results.itervalues()) | ||
1390 | 269 | |||
1391 | 270 | differs = generic_merge(trees=[local_tree, remote_tree], | ||
1392 | 271 | pre_merge=pre_merge, post_merge=post_merge, | ||
1393 | 272 | partial_parent=("", False), name=u"") | ||
1394 | 273 | if differs: | ||
1395 | 274 | raise TreesDiffer(quiet=quiet) | ||
1396 | 275 | |||
1397 | 276 | |||
1398 | 277 | def do_main(argv): | ||
1399 | 278 | """The main user-facing portion of the script.""" | ||
1400 | 279 | usage = ( | ||
1401 | 280 | "Usage: %prog [options] [DIRECTORY]\n" | ||
1402 | 281 | " %prog --list-shares [options]\n" | ||
1403 | 282 | " %prog --init [--share=SHARE_UUID] [options] DIRECTORY\n" | ||
1404 | 283 | " %prog --diff [--share=SHARE_UUID] [options] DIRECTORY") | ||
1405 | 284 | parser = OptionParser(usage=usage) | ||
1406 | 285 | parser.add_option("--port", dest="port", metavar="PORT", | ||
1407 | 286 | default=443, | ||
1408 | 287 | help="The port on which to connect to the server") | ||
1409 | 288 | parser.add_option("--host", dest="host", metavar="HOST", | ||
1410 | 289 | default='fs-1.one.ubuntu.com', | ||
1411 | 290 | help="The server address") | ||
1412 | 291 | |||
1413 | 292 | action_list = ", ".join(sorted(MERGE_ACTIONS.keys())) | ||
1414 | 293 | parser.add_option("--action", dest="action", metavar="ACTION", | ||
1415 | 294 | default=None, | ||
1416 | 295 | help="Select a sync action (%s; default is %s)" % | ||
1417 | 296 | (action_list, DEFAULT_MERGE_ACTION)) | ||
1418 | 297 | parser.add_option("--dry-run", action="store_true", dest="dry_run", | ||
1419 | 298 | default=False, help="Do a dry run without actually " | ||
1420 | 299 | "making changes") | ||
1421 | 300 | parser.add_option("--quiet", action="store_true", dest="quiet", | ||
1422 | 301 | default=False, help="Produces less output") | ||
1423 | 302 | parser.add_option("--list-shares", action="store_const", dest="mode", | ||
1424 | 303 | const="list-shares", default="sync", | ||
1425 | 304 | help="List available shares") | ||
1426 | 305 | parser.add_option("--init", action="store_const", dest="mode", | ||
1427 | 306 | const="init", | ||
1428 | 307 | help="Initialize a local directory for syncing") | ||
1429 | 308 | parser.add_option("--no-ssl-verify", action="store_true", | ||
1430 | 309 | dest="no_ssl_verify", | ||
1431 | 310 | default=False, help=SUPPRESS_HELP) | ||
1432 | 311 | parser.add_option("--diff", action="store_const", dest="mode", | ||
1433 | 312 | const="diff", | ||
1434 | 313 | help="Compare tree on server with local tree " | ||
1435 | 314 | "(does not require previous --init)") | ||
1436 | 315 | parser.add_option("--share", dest="share", metavar="SHARE_UUID", | ||
1437 | 316 | default=None, | ||
1438 | 317 | help="Sync the directory with a share rather than the " | ||
1439 | 318 | "user's own volume") | ||
1440 | 319 | parser.add_option("--subtree", dest="subtree", metavar="PATH", | ||
1441 | 320 | default=None, | ||
1442 | 321 | help="Mirror a subset of the share or volume") | ||
1443 | 322 | |||
1444 | 323 | (options, args) = parser.parse_args(args=list(argv[1:])) | ||
1445 | 324 | |||
1446 | 325 | if options.share is not None and options.mode not in ("init", "diff"): | ||
1447 | 326 | parser.error("--share is only valid with --init or --diff") | ||
1448 | 327 | |||
1449 | 328 | directory = None | ||
1450 | 329 | if options.mode in ("sync", "init" or "diff"): | ||
1451 | 330 | if len(args) > 1: | ||
1452 | 331 | parser.error("Too many arguments") | ||
1453 | 332 | elif len(args) < 1: | ||
1454 | 333 | if options.mode == "init" or options.mode == "diff": | ||
1455 | 334 | parser.error("--%s requires a directory to " | ||
1456 | 335 | "be specified" % options.mode) | ||
1457 | 336 | else: | ||
1458 | 337 | directory = "." | ||
1459 | 338 | else: | ||
1460 | 339 | directory = args[0] | ||
1461 | 340 | if options.mode in ("init", "list-shares", "diff"): | ||
1462 | 341 | if options.action is not None: | ||
1463 | 342 | parser.error("--%s does not take the --action parameter" % | ||
1464 | 343 | options.mode) | ||
1465 | 344 | if options.dry_run: | ||
1466 | 345 | parser.error("--%s does not take the --dry-run parameter" % | ||
1467 | 346 | options.mode) | ||
1468 | 347 | if options.mode == "list-shares": | ||
1469 | 348 | if len(args) != 0: | ||
1470 | 349 | parser.error("--list-shares does not take a directory") | ||
1471 | 350 | if options.mode not in ("init", "diff"): | ||
1472 | 351 | if options.subtree is not None: | ||
1473 | 352 | parser.error("--%s does not take the --subtree parameter" % | ||
1474 | 353 | options.mode) | ||
1475 | 354 | |||
1476 | 355 | if options.action is not None and options.action not in MERGE_ACTIONS: | ||
1477 | 356 | parser.error("--action: Unknown action %s" % options.action) | ||
1478 | 357 | |||
1479 | 358 | if options.action is None: | ||
1480 | 359 | options.action = DEFAULT_MERGE_ACTION | ||
1481 | 360 | |||
1482 | 361 | if options.share is not None: | ||
1483 | 362 | try: | ||
1484 | 363 | uuid.UUID(options.share) | ||
1485 | 364 | except ValueError as e: | ||
1486 | 365 | parser.error("Invalid --share argument: %s" % e) | ||
1487 | 366 | share_spec = options.share | ||
1488 | 367 | else: | ||
1489 | 368 | share_spec = None | ||
1490 | 369 | |||
1491 | 370 | client = Client(reactor=reactor) | ||
1492 | 371 | |||
1493 | 372 | signal.signal(signal.SIGINT, lambda s, f: client.force_shutdown()) | ||
1494 | 373 | signal.signal(signal.SIGTERM, lambda s, f: client.force_shutdown()) | ||
1495 | 374 | |||
1496 | 375 | def run_client(): | ||
1497 | 376 | """Run the blocking client.""" | ||
1498 | 377 | client.connect_ssl( | ||
1499 | 378 | options.host, int(options.port), options.no_ssl_verify) | ||
1500 | 379 | |||
1501 | 380 | try: | ||
1502 | 381 | client.set_capabilities() | ||
1503 | 382 | |||
1504 | 383 | if options.mode == "sync": | ||
1505 | 384 | do_sync(client=client, directory=directory, | ||
1506 | 385 | action=options.action, | ||
1507 | 386 | dry_run=options.dry_run, quiet=options.quiet) | ||
1508 | 387 | elif options.mode == "init": | ||
1509 | 388 | do_init(client=client, share_spec=share_spec, | ||
1510 | 389 | directory=directory, | ||
1511 | 390 | quiet=options.quiet, subtree_path=options.subtree) | ||
1512 | 391 | elif options.mode == "list-shares": | ||
1513 | 392 | do_list_shares(client=client) | ||
1514 | 393 | elif options.mode == "diff": | ||
1515 | 394 | do_diff(client=client, share_spec=share_spec, | ||
1516 | 395 | directory=directory, | ||
1517 | 396 | quiet=options.quiet, subtree_path=options.subtree, | ||
1518 | 397 | ignore_symlinks=False) | ||
1519 | 398 | finally: | ||
1520 | 399 | client.disconnect() | ||
1521 | 400 | |||
1522 | 401 | def capture_exception(queue, func): | ||
1523 | 402 | """Capture the exception from calling func.""" | ||
1524 | 403 | try: | ||
1525 | 404 | func() | ||
1526 | 405 | except Exception: | ||
1527 | 406 | queue.put(sys.exc_info()) | ||
1528 | 407 | else: | ||
1529 | 408 | queue.put(None) | ||
1530 | 409 | finally: | ||
1531 | 410 | reactor.callWhenRunning(reactor.stop) | ||
1532 | 411 | |||
1533 | 412 | queue = Queue() | ||
1534 | 413 | reactor.callInThread(capture_exception, queue, run_client) | ||
1535 | 414 | reactor.run(installSignalHandlers=False) | ||
1536 | 415 | exc_info = queue.get(True, 0.1) | ||
1537 | 416 | if exc_info: | ||
1538 | 417 | raise exc_info[0], exc_info[1], exc_info[2] | ||
1539 | 418 | |||
1540 | 419 | |||
1541 | 420 | def main(*argv): | ||
1542 | 421 | """Top-level main function.""" | ||
1543 | 422 | try: | ||
1544 | 423 | do_main(argv=argv) | ||
1545 | 424 | except AuthenticationError as e: | ||
1546 | 425 | print "Authentication failed: %s" % e | ||
1547 | 426 | except ConnectionError as e: | ||
1548 | 427 | print "Connection failed: %s" % e | ||
1549 | 428 | except DirectoryNotInitializedError: | ||
1550 | 429 | print "Directory not initialized; use --init [DIRECTORY] to init it." | ||
1551 | 430 | except DirectoryAlreadyInitializedError: | ||
1552 | 431 | print "Directory already initialized." | ||
1553 | 432 | except NoSuchShareError: | ||
1554 | 433 | print "No matching share found." | ||
1555 | 434 | except ReadOnlyShareError: | ||
1556 | 435 | print "The selected action isn't possible on a read-only share." | ||
1557 | 436 | except (ForcedShutdown, KeyboardInterrupt): | ||
1558 | 437 | print "Interrupted!" | ||
1559 | 438 | except TreesDiffer as e: | ||
1560 | 439 | if not e.quiet: | ||
1561 | 440 | print "Trees differ." | ||
1562 | 441 | else: | ||
1563 | 442 | return 0 | ||
1564 | 443 | return 1 | ||
1565 | 0 | 444 | ||
1566 | === added file 'magicicada/u1sync/merge.py' | |||
1567 | --- magicicada/u1sync/merge.py 1970-01-01 00:00:00 +0000 | |||
1568 | +++ magicicada/u1sync/merge.py 2018-04-20 01:22:48 +0000 | |||
1569 | @@ -0,0 +1,181 @@ | |||
1570 | 1 | # Copyright 2009 Canonical Ltd. | ||
1571 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
1572 | 3 | # | ||
1573 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
1574 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
1575 | 6 | # by the Free Software Foundation. | ||
1576 | 7 | # | ||
1577 | 8 | # This program is distributed in the hope that it will be useful, but | ||
1578 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1579 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1580 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
1581 | 12 | # | ||
1582 | 13 | # You should have received a copy of the GNU General Public License along | ||
1583 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1584 | 15 | |||
1585 | 16 | """Code for merging changes between modified trees.""" | ||
1586 | 17 | |||
1587 | 18 | from __future__ import with_statement | ||
1588 | 19 | |||
1589 | 20 | import os | ||
1590 | 21 | import uuid | ||
1591 | 22 | |||
1592 | 23 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY | ||
1593 | 24 | |||
1594 | 25 | from magicicada.u1sync.genericmerge import MergeNode, generic_merge | ||
1595 | 26 | |||
1596 | 27 | |||
1597 | 28 | class NodeTypeMismatchError(Exception): | ||
1598 | 29 | """Node types don't match.""" | ||
1599 | 30 | |||
1600 | 31 | |||
1601 | 32 | def merge_trees(old_local_tree, local_tree, old_remote_tree, remote_tree, | ||
1602 | 33 | merge_action): | ||
1603 | 34 | """Performs a tree merge using the given merge action.""" | ||
1604 | 35 | |||
1605 | 36 | def pre_merge(nodes, name, partial_parent): | ||
1606 | 37 | """Accumulates path and determines merged node type.""" | ||
1607 | 38 | old_local_node, local_node, old_remote_node, remote_node = nodes | ||
1608 | 39 | (parent_path, parent_type) = partial_parent | ||
1609 | 40 | path = os.path.join(parent_path, name.encode("utf-8")) | ||
1610 | 41 | node_type = merge_action.get_node_type(old_local_node=old_local_node, | ||
1611 | 42 | local_node=local_node, | ||
1612 | 43 | old_remote_node=old_remote_node, | ||
1613 | 44 | remote_node=remote_node, | ||
1614 | 45 | path=path) | ||
1615 | 46 | return (path, node_type) | ||
1616 | 47 | |||
1617 | 48 | def post_merge(nodes, partial_result, child_results): | ||
1618 | 49 | """Drops deleted children and merges node.""" | ||
1619 | 50 | old_local_node, local_node, old_remote_node, remote_node = nodes | ||
1620 | 51 | (path, node_type) = partial_result | ||
1621 | 52 | if node_type == DIRECTORY: | ||
1622 | 53 | merged_children = dict( | ||
1623 | 54 | (name, child) for (name, child) in child_results.iteritems() | ||
1624 | 55 | if child is not None) | ||
1625 | 56 | else: | ||
1626 | 57 | merged_children = None | ||
1627 | 58 | return merge_action.merge_node(old_local_node=old_local_node, | ||
1628 | 59 | local_node=local_node, | ||
1629 | 60 | old_remote_node=old_remote_node, | ||
1630 | 61 | remote_node=remote_node, | ||
1631 | 62 | node_type=node_type, | ||
1632 | 63 | merged_children=merged_children) | ||
1633 | 64 | |||
1634 | 65 | return generic_merge(trees=[old_local_tree, local_tree, | ||
1635 | 66 | old_remote_tree, remote_tree], | ||
1636 | 67 | pre_merge=pre_merge, post_merge=post_merge, | ||
1637 | 68 | name=u"", partial_parent=("", None)) | ||
1638 | 69 | |||
1639 | 70 | |||
1640 | 71 | class SyncMerge(object): | ||
1641 | 72 | """Performs a bidirectional sync merge.""" | ||
1642 | 73 | |||
1643 | 74 | def get_node_type(self, old_local_node, local_node, | ||
1644 | 75 | old_remote_node, remote_node, path): | ||
1645 | 76 | """Requires that all node types match.""" | ||
1646 | 77 | node_type = None | ||
1647 | 78 | for node in (old_local_node, local_node, remote_node): | ||
1648 | 79 | if node is not None: | ||
1649 | 80 | if node_type is not None: | ||
1650 | 81 | if node.node_type != node_type: | ||
1651 | 82 | message = "Node types don't match for %s" % path | ||
1652 | 83 | raise NodeTypeMismatchError(message) | ||
1653 | 84 | else: | ||
1654 | 85 | node_type = node.node_type | ||
1655 | 86 | return node_type | ||
1656 | 87 | |||
1657 | 88 | def merge_node(self, old_local_node, local_node, | ||
1658 | 89 | old_remote_node, remote_node, node_type, merged_children): | ||
1659 | 90 | """Performs bidirectional merge of node state.""" | ||
1660 | 91 | |||
1661 | 92 | def node_content_hash(node): | ||
1662 | 93 | """Returns node content hash if node is not None""" | ||
1663 | 94 | return node.content_hash if node is not None else None | ||
1664 | 95 | |||
1665 | 96 | old_local_content_hash = node_content_hash(old_local_node) | ||
1666 | 97 | local_content_hash = node_content_hash(local_node) | ||
1667 | 98 | old_remote_content_hash = node_content_hash(old_remote_node) | ||
1668 | 99 | remote_content_hash = node_content_hash(remote_node) | ||
1669 | 100 | |||
1670 | 101 | locally_deleted = old_local_node is not None and local_node is None | ||
1671 | 102 | deleted_on_server = old_remote_node is not None and remote_node is None | ||
1672 | 103 | # updated means modified or created | ||
1673 | 104 | locally_updated = (not locally_deleted and | ||
1674 | 105 | old_local_content_hash != local_content_hash) | ||
1675 | 106 | updated_on_server = (not deleted_on_server and | ||
1676 | 107 | old_remote_content_hash != remote_content_hash) | ||
1677 | 108 | |||
1678 | 109 | has_merged_children = (merged_children is not None and | ||
1679 | 110 | len(merged_children) > 0) | ||
1680 | 111 | |||
1681 | 112 | either_node_exists = local_node is not None or remote_node is not None | ||
1682 | 113 | should_delete = ( | ||
1683 | 114 | (locally_deleted and not updated_on_server) or | ||
1684 | 115 | (deleted_on_server and not locally_updated)) | ||
1685 | 116 | |||
1686 | 117 | if (either_node_exists and not should_delete) or has_merged_children: | ||
1687 | 118 | if (node_type != DIRECTORY and locally_updated and | ||
1688 | 119 | updated_on_server and | ||
1689 | 120 | local_content_hash != remote_content_hash): | ||
1690 | 121 | # local_content_hash will become the merged content_hash; | ||
1691 | 122 | # save remote_content_hash in conflict info | ||
1692 | 123 | conflict_info = (str(uuid.uuid4()), remote_content_hash) | ||
1693 | 124 | else: | ||
1694 | 125 | conflict_info = None | ||
1695 | 126 | node_uuid = remote_node.uuid if remote_node is not None else None | ||
1696 | 127 | if locally_updated: | ||
1697 | 128 | content_hash = local_content_hash or remote_content_hash | ||
1698 | 129 | else: | ||
1699 | 130 | content_hash = remote_content_hash or local_content_hash | ||
1700 | 131 | return MergeNode( | ||
1701 | 132 | node_type=node_type, uuid=node_uuid, children=merged_children, | ||
1702 | 133 | content_hash=content_hash, conflict_info=conflict_info) | ||
1703 | 134 | else: | ||
1704 | 135 | return None | ||
1705 | 136 | |||
1706 | 137 | |||
1707 | 138 | class ClobberServerMerge(object): | ||
1708 | 139 | """Clobber server to match local state.""" | ||
1709 | 140 | |||
1710 | 141 | def get_node_type(self, old_local_node, local_node, | ||
1711 | 142 | old_remote_node, remote_node, path): | ||
1712 | 143 | """Return local node type.""" | ||
1713 | 144 | if local_node is not None: | ||
1714 | 145 | return local_node.node_type | ||
1715 | 146 | else: | ||
1716 | 147 | return None | ||
1717 | 148 | |||
1718 | 149 | def merge_node(self, old_local_node, local_node, | ||
1719 | 150 | old_remote_node, remote_node, node_type, merged_children): | ||
1720 | 151 | """Copy local node and associate with remote uuid (if applicable).""" | ||
1721 | 152 | if local_node is None: | ||
1722 | 153 | return None | ||
1723 | 154 | if remote_node is not None: | ||
1724 | 155 | node_uuid = remote_node.uuid | ||
1725 | 156 | else: | ||
1726 | 157 | node_uuid = None | ||
1727 | 158 | return MergeNode( | ||
1728 | 159 | node_type=local_node.node_type, uuid=node_uuid, | ||
1729 | 160 | content_hash=local_node.content_hash, children=merged_children) | ||
1730 | 161 | |||
1731 | 162 | |||
1732 | 163 | class ClobberLocalMerge(object): | ||
1733 | 164 | """Clobber local state to match server.""" | ||
1734 | 165 | |||
1735 | 166 | def get_node_type(self, old_local_node, local_node, | ||
1736 | 167 | old_remote_node, remote_node, path): | ||
1737 | 168 | """Return remote node type.""" | ||
1738 | 169 | if remote_node is not None: | ||
1739 | 170 | return remote_node.node_type | ||
1740 | 171 | else: | ||
1741 | 172 | return None | ||
1742 | 173 | |||
1743 | 174 | def merge_node(self, old_local_node, local_node, | ||
1744 | 175 | old_remote_node, remote_node, node_type, merged_children): | ||
1745 | 176 | """Copy the remote node.""" | ||
1746 | 177 | if remote_node is None: | ||
1747 | 178 | return None | ||
1748 | 179 | return MergeNode( | ||
1749 | 180 | node_type=node_type, uuid=remote_node.uuid, | ||
1750 | 181 | content_hash=remote_node.content_hash, children=merged_children) | ||
1751 | 0 | 182 | ||
1752 | === added file 'magicicada/u1sync/metadata.py' | |||
1753 | --- magicicada/u1sync/metadata.py 1970-01-01 00:00:00 +0000 | |||
1754 | +++ magicicada/u1sync/metadata.py 2018-04-20 01:22:48 +0000 | |||
1755 | @@ -0,0 +1,155 @@ | |||
1756 | 1 | # Copyright 2009 Canonical Ltd. | ||
1757 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
1758 | 3 | # | ||
1759 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
1760 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
1761 | 6 | # by the Free Software Foundation. | ||
1762 | 7 | # | ||
1763 | 8 | # This program is distributed in the hope that it will be useful, but | ||
1764 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1765 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1766 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
1767 | 12 | # | ||
1768 | 13 | # You should have received a copy of the GNU General Public License along | ||
1769 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1770 | 15 | |||
1771 | 16 | """Routines for loading/storing u1sync mirror metadata.""" | ||
1772 | 17 | |||
1773 | 18 | from __future__ import with_statement | ||
1774 | 19 | |||
1775 | 20 | import os | ||
1776 | 21 | import uuid | ||
1777 | 22 | |||
1778 | 23 | from contextlib import contextmanager | ||
1779 | 24 | import cPickle as pickle | ||
1780 | 25 | from errno import ENOENT | ||
1781 | 26 | |||
1782 | 27 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY | ||
1783 | 28 | |||
1784 | 29 | from magicicada.u1sync.merge import MergeNode | ||
1785 | 30 | from magicicada.u1sync.utils import safe_unlink | ||
1786 | 31 | |||
1787 | 32 | |||
1788 | 33 | class Metadata(object): | ||
1789 | 34 | """Object representing mirror metadata.""" | ||
1790 | 35 | |||
1791 | 36 | def __init__(self, local_tree=None, remote_tree=None, share_uuid=None, | ||
1792 | 37 | root_uuid=None, path=None): | ||
1793 | 38 | """Populate fields.""" | ||
1794 | 39 | self.local_tree = local_tree | ||
1795 | 40 | self.remote_tree = remote_tree | ||
1796 | 41 | self.share_uuid = share_uuid | ||
1797 | 42 | self.root_uuid = root_uuid | ||
1798 | 43 | self.path = path | ||
1799 | 44 | |||
1800 | 45 | |||
1801 | 46 | def read(metadata_dir): | ||
1802 | 47 | """Read metadata for a mirror rooted at directory.""" | ||
1803 | 48 | index_file = os.path.join(metadata_dir, "local-index") | ||
1804 | 49 | share_uuid_file = os.path.join(metadata_dir, "share-uuid") | ||
1805 | 50 | root_uuid_file = os.path.join(metadata_dir, "root-uuid") | ||
1806 | 51 | path_file = os.path.join(metadata_dir, "path") | ||
1807 | 52 | |||
1808 | 53 | index = read_pickle_file(index_file, {}) | ||
1809 | 54 | share_uuid = read_uuid_file(share_uuid_file) | ||
1810 | 55 | root_uuid = read_uuid_file(root_uuid_file) | ||
1811 | 56 | path = read_string_file(path_file, '/') | ||
1812 | 57 | |||
1813 | 58 | local_tree = index.get("tree", None) | ||
1814 | 59 | remote_tree = index.get("remote_tree", None) | ||
1815 | 60 | |||
1816 | 61 | if local_tree is None: | ||
1817 | 62 | local_tree = MergeNode(node_type=DIRECTORY, children={}) | ||
1818 | 63 | if remote_tree is None: | ||
1819 | 64 | remote_tree = MergeNode(node_type=DIRECTORY, children={}) | ||
1820 | 65 | |||
1821 | 66 | return Metadata(local_tree=local_tree, remote_tree=remote_tree, | ||
1822 | 67 | share_uuid=share_uuid, root_uuid=root_uuid, | ||
1823 | 68 | path=path) | ||
1824 | 69 | |||
1825 | 70 | |||
1826 | 71 | def write(metadata_dir, info): | ||
1827 | 72 | """Writes all metadata for the mirror rooted at directory.""" | ||
1828 | 73 | share_uuid_file = os.path.join(metadata_dir, "share-uuid") | ||
1829 | 74 | root_uuid_file = os.path.join(metadata_dir, "root-uuid") | ||
1830 | 75 | index_file = os.path.join(metadata_dir, "local-index") | ||
1831 | 76 | path_file = os.path.join(metadata_dir, "path") | ||
1832 | 77 | if info.share_uuid is not None: | ||
1833 | 78 | write_uuid_file(share_uuid_file, info.share_uuid) | ||
1834 | 79 | else: | ||
1835 | 80 | safe_unlink(share_uuid_file) | ||
1836 | 81 | if info.root_uuid is not None: | ||
1837 | 82 | write_uuid_file(root_uuid_file, info.root_uuid) | ||
1838 | 83 | else: | ||
1839 | 84 | safe_unlink(root_uuid_file) | ||
1840 | 85 | write_string_file(path_file, info.path) | ||
1841 | 86 | write_pickle_file(index_file, {"tree": info.local_tree, | ||
1842 | 87 | "remote_tree": info.remote_tree}) | ||
1843 | 88 | |||
1844 | 89 | |||
1845 | 90 | def write_pickle_file(filename, value): | ||
1846 | 91 | """Writes a pickled python object to a file.""" | ||
1847 | 92 | with atomic_update_file(filename) as stream: | ||
1848 | 93 | pickle.dump(value, stream, 2) | ||
1849 | 94 | |||
1850 | 95 | |||
1851 | 96 | def write_string_file(filename, value): | ||
1852 | 97 | """Writes a string to a file with an added line feed, or | ||
1853 | 98 | deletes the file if value is None. | ||
1854 | 99 | """ | ||
1855 | 100 | if value is not None: | ||
1856 | 101 | with atomic_update_file(filename) as stream: | ||
1857 | 102 | stream.write(value) | ||
1858 | 103 | stream.write('\n') | ||
1859 | 104 | else: | ||
1860 | 105 | safe_unlink(filename) | ||
1861 | 106 | |||
1862 | 107 | |||
1863 | 108 | def write_uuid_file(filename, value): | ||
1864 | 109 | """Writes a UUID to a file.""" | ||
1865 | 110 | write_string_file(filename, str(value)) | ||
1866 | 111 | |||
1867 | 112 | |||
1868 | 113 | def read_pickle_file(filename, default_value=None): | ||
1869 | 114 | """Reads a pickled python object from a file.""" | ||
1870 | 115 | try: | ||
1871 | 116 | with open(filename, "rb") as stream: | ||
1872 | 117 | return pickle.load(stream) | ||
1873 | 118 | except IOError as e: | ||
1874 | 119 | if e.errno != ENOENT: | ||
1875 | 120 | raise | ||
1876 | 121 | return default_value | ||
1877 | 122 | |||
1878 | 123 | |||
1879 | 124 | def read_string_file(filename, default_value=None): | ||
1880 | 125 | """Reads a string from a file, discarding the final character.""" | ||
1881 | 126 | try: | ||
1882 | 127 | with open(filename, "r") as stream: | ||
1883 | 128 | return stream.read()[:-1] | ||
1884 | 129 | except IOError as e: | ||
1885 | 130 | if e.errno != ENOENT: | ||
1886 | 131 | raise | ||
1887 | 132 | return default_value | ||
1888 | 133 | |||
1889 | 134 | |||
1890 | 135 | def read_uuid_file(filename, default_value=None): | ||
1891 | 136 | """Reads a UUID from a file.""" | ||
1892 | 137 | try: | ||
1893 | 138 | with open(filename, "r") as stream: | ||
1894 | 139 | return uuid.UUID(stream.read()[:-1]) | ||
1895 | 140 | except IOError as e: | ||
1896 | 141 | if e.errno != ENOENT: | ||
1897 | 142 | raise | ||
1898 | 143 | return default_value | ||
1899 | 144 | |||
1900 | 145 | |||
1901 | 146 | @contextmanager | ||
1902 | 147 | def atomic_update_file(filename): | ||
1903 | 148 | """Returns a context manager for atomically updating a file.""" | ||
1904 | 149 | temp_filename = "%s.%s" % (filename, uuid.uuid4()) | ||
1905 | 150 | try: | ||
1906 | 151 | with open(temp_filename, "w") as stream: | ||
1907 | 152 | yield stream | ||
1908 | 153 | os.rename(temp_filename, filename) | ||
1909 | 154 | finally: | ||
1910 | 155 | safe_unlink(temp_filename) | ||
1911 | 0 | 156 | ||
1912 | === added file 'magicicada/u1sync/scan.py' | |||
1913 | --- magicicada/u1sync/scan.py 1970-01-01 00:00:00 +0000 | |||
1914 | +++ magicicada/u1sync/scan.py 2018-04-20 01:22:48 +0000 | |||
1915 | @@ -0,0 +1,84 @@ | |||
1916 | 1 | # Copyright 2009 Canonical Ltd. | ||
1917 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
1918 | 3 | # | ||
1919 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
1920 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
1921 | 6 | # by the Free Software Foundation. | ||
1922 | 7 | # | ||
1923 | 8 | # This program is distributed in the hope that it will be useful, but | ||
1924 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
1925 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
1926 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
1927 | 12 | # | ||
1928 | 13 | # You should have received a copy of the GNU General Public License along | ||
1929 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
1930 | 15 | |||
1931 | 16 | """Code for scanning local directory state.""" | ||
1932 | 17 | |||
1933 | 18 | from __future__ import with_statement | ||
1934 | 19 | |||
1935 | 20 | import os | ||
1936 | 21 | import hashlib | ||
1937 | 22 | import shutil | ||
1938 | 23 | |||
1939 | 24 | from errno import ENOTDIR, EINVAL | ||
1940 | 25 | |||
1941 | 26 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, FILE, SYMLINK | ||
1942 | 27 | |||
1943 | 28 | from magicicada.u1sync.genericmerge import MergeNode | ||
1944 | 29 | from magicicada.u1sync.utils import should_sync | ||
1945 | 30 | |||
1946 | 31 | |||
1947 | 32 | EMPTY_HASH = "sha1:%s" % hashlib.sha1().hexdigest() | ||
1948 | 33 | |||
1949 | 34 | |||
1950 | 35 | def scan_directory(path, display_path="", quiet=False): | ||
1951 | 36 | """Scans a local directory and builds an in-memory tree from it.""" | ||
1952 | 37 | if display_path != "" and not quiet: | ||
1953 | 38 | print display_path | ||
1954 | 39 | |||
1955 | 40 | link_target = None | ||
1956 | 41 | child_names = None | ||
1957 | 42 | try: | ||
1958 | 43 | link_target = os.readlink(path) | ||
1959 | 44 | except OSError as e: | ||
1960 | 45 | if e.errno != EINVAL: | ||
1961 | 46 | raise | ||
1962 | 47 | try: | ||
1963 | 48 | child_names = os.listdir(path) | ||
1964 | 49 | except OSError as e: | ||
1965 | 50 | if e.errno != ENOTDIR: | ||
1966 | 51 | raise | ||
1967 | 52 | |||
1968 | 53 | if link_target is not None: | ||
1969 | 54 | # symlink | ||
1970 | 55 | sum = hashlib.sha1() | ||
1971 | 56 | sum.update(link_target) | ||
1972 | 57 | content_hash = "sha1:%s" % sum.hexdigest() | ||
1973 | 58 | return MergeNode(node_type=SYMLINK, content_hash=content_hash) | ||
1974 | 59 | elif child_names is not None: | ||
1975 | 60 | # directory | ||
1976 | 61 | child_names = [ | ||
1977 | 62 | n for n in child_names if should_sync(n.decode("utf-8"))] | ||
1978 | 63 | child_paths = [(os.path.join(path, child_name), | ||
1979 | 64 | os.path.join(display_path, child_name)) | ||
1980 | 65 | for child_name in child_names] | ||
1981 | 66 | children = [scan_directory(child_path, child_display_path, quiet) | ||
1982 | 67 | for (child_path, child_display_path) in child_paths] | ||
1983 | 68 | unicode_child_names = [n.decode("utf-8") for n in child_names] | ||
1984 | 69 | children = dict(zip(unicode_child_names, children)) | ||
1985 | 70 | return MergeNode(node_type=DIRECTORY, children=children) | ||
1986 | 71 | else: | ||
1987 | 72 | # regular file | ||
1988 | 73 | sum = hashlib.sha1() | ||
1989 | 74 | |||
1990 | 75 | class HashStream(object): | ||
1991 | 76 | """Stream that computes hashes.""" | ||
1992 | 77 | def write(self, bytes): | ||
1993 | 78 | """Accumulate bytes.""" | ||
1994 | 79 | sum.update(bytes) | ||
1995 | 80 | |||
1996 | 81 | with open(path, "r") as stream: | ||
1997 | 82 | shutil.copyfileobj(stream, HashStream()) | ||
1998 | 83 | content_hash = "sha1:%s" % sum.hexdigest() | ||
1999 | 84 | return MergeNode(node_type=FILE, content_hash=content_hash) | ||
2000 | 0 | 85 | ||
2001 | === added file 'magicicada/u1sync/sync.py' | |||
2002 | --- magicicada/u1sync/sync.py 1970-01-01 00:00:00 +0000 | |||
2003 | +++ magicicada/u1sync/sync.py 2018-04-20 01:22:48 +0000 | |||
2004 | @@ -0,0 +1,377 @@ | |||
2005 | 1 | # Copyright 2009 Canonical Ltd. | ||
2006 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
2007 | 3 | # | ||
2008 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
2009 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
2010 | 6 | # by the Free Software Foundation. | ||
2011 | 7 | # | ||
2012 | 8 | # This program is distributed in the hope that it will be useful, but | ||
2013 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
2014 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
2015 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
2016 | 12 | # | ||
2017 | 13 | # You should have received a copy of the GNU General Public License along | ||
2018 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
2019 | 15 | |||
2020 | 16 | """Sync. | ||
2021 | 17 | |||
2022 | 18 | After merging, these routines are used to synchronize state locally and on | ||
2023 | 19 | the server to correspond to the merged result. | ||
2024 | 20 | |||
2025 | 21 | """ | ||
2026 | 22 | |||
2027 | 23 | from __future__ import with_statement | ||
2028 | 24 | |||
2029 | 25 | import os | ||
2030 | 26 | |||
2031 | 27 | from ubuntuone.storageprotocol import request | ||
2032 | 28 | from ubuntuone.storageprotocol.dircontent_pb2 import DIRECTORY, SYMLINK | ||
2033 | 29 | |||
2034 | 30 | from magicicada.u1sync.client import UnsupportedOperationError | ||
2035 | 31 | from magicicada.u1sync.genericmerge import MergeNode, generic_merge | ||
2036 | 32 | from magicicada.u1sync.utils import safe_mkdir | ||
2037 | 33 | |||
2038 | 34 | |||
2039 | 35 | EMPTY_HASH = "" | ||
2040 | 36 | UPLOAD_SYMBOL = u"\u25b2".encode("utf-8") | ||
2041 | 37 | DOWNLOAD_SYMBOL = u"\u25bc".encode("utf-8") | ||
2042 | 38 | CONFLICT_SYMBOL = "!" | ||
2043 | 39 | DELETE_SYMBOL = "X" | ||
2044 | 40 | |||
2045 | 41 | |||
2046 | 42 | def get_conflict_path(path, conflict_info): | ||
2047 | 43 | """Return path for conflict file corresponding to path.""" | ||
2048 | 44 | dir, name = os.path.split(path) | ||
2049 | 45 | unique_id = conflict_info[0] | ||
2050 | 46 | return os.path.join(dir, "conflict-%s-%s" % (unique_id, name)) | ||
2051 | 47 | |||
2052 | 48 | |||
2053 | 49 | def name_from_path(path): | ||
2054 | 50 | """Return unicode name from last path component.""" | ||
2055 | 51 | return os.path.split(path)[1].decode("UTF-8") | ||
2056 | 52 | |||
2057 | 53 | |||
2058 | 54 | class NodeSyncError(Exception): | ||
2059 | 55 | """Error syncing node.""" | ||
2060 | 56 | |||
2061 | 57 | |||
2062 | 58 | class NodeCreateError(NodeSyncError): | ||
2063 | 59 | """Error creating node.""" | ||
2064 | 60 | |||
2065 | 61 | |||
2066 | 62 | class NodeUpdateError(NodeSyncError): | ||
2067 | 63 | """Error updating node.""" | ||
2068 | 64 | |||
2069 | 65 | |||
2070 | 66 | class NodeDeleteError(NodeSyncError): | ||
2071 | 67 | """Error deleting node.""" | ||
2072 | 68 | |||
2073 | 69 | |||
2074 | 70 | def sync_tree(merged_tree, original_tree, sync_mode, path, quiet): | ||
2075 | 71 | """Performs actual synchronization.""" | ||
2076 | 72 | |||
2077 | 73 | def pre_merge(nodes, name, partial_parent): | ||
2078 | 74 | """Create nodes and write content as required.""" | ||
2079 | 75 | (merged_node, original_node) = nodes | ||
2080 | 76 | (parent_path, parent_display_path, parent_uuid, | ||
2081 | 77 | parent_synced) = partial_parent | ||
2082 | 78 | |||
2083 | 79 | utf8_name = name.encode("utf-8") | ||
2084 | 80 | path = os.path.join(parent_path, utf8_name) | ||
2085 | 81 | display_path = os.path.join(parent_display_path, utf8_name) | ||
2086 | 82 | node_uuid = None | ||
2087 | 83 | |||
2088 | 84 | synced = False | ||
2089 | 85 | if merged_node is not None: | ||
2090 | 86 | if merged_node.node_type == DIRECTORY: | ||
2091 | 87 | if original_node is not None: | ||
2092 | 88 | synced = True | ||
2093 | 89 | node_uuid = original_node.uuid | ||
2094 | 90 | else: | ||
2095 | 91 | if not quiet: | ||
2096 | 92 | print "%s %s" % (sync_mode.symbol, display_path) | ||
2097 | 93 | try: | ||
2098 | 94 | create_dir = sync_mode.create_directory | ||
2099 | 95 | node_uuid = create_dir(parent_uuid=parent_uuid, | ||
2100 | 96 | path=path) | ||
2101 | 97 | synced = True | ||
2102 | 98 | except NodeCreateError as e: | ||
2103 | 99 | print e | ||
2104 | 100 | elif merged_node.content_hash is None: | ||
2105 | 101 | if not quiet: | ||
2106 | 102 | print "? %s" % display_path | ||
2107 | 103 | elif (original_node is None or | ||
2108 | 104 | original_node.content_hash != merged_node.content_hash or | ||
2109 | 105 | merged_node.conflict_info is not None): | ||
2110 | 106 | conflict_info = merged_node.conflict_info | ||
2111 | 107 | if conflict_info is not None: | ||
2112 | 108 | conflict_symbol = CONFLICT_SYMBOL | ||
2113 | 109 | else: | ||
2114 | 110 | conflict_symbol = " " | ||
2115 | 111 | if not quiet: | ||
2116 | 112 | print "%s %s %s" % (sync_mode.symbol, conflict_symbol, | ||
2117 | 113 | display_path) | ||
2118 | 114 | if original_node is not None: | ||
2119 | 115 | node_uuid = original_node.uuid or merged_node.uuid | ||
2120 | 116 | original_hash = original_node.content_hash or EMPTY_HASH | ||
2121 | 117 | else: | ||
2122 | 118 | node_uuid = merged_node.uuid | ||
2123 | 119 | original_hash = EMPTY_HASH | ||
2124 | 120 | try: | ||
2125 | 121 | sync_mode.write_file( | ||
2126 | 122 | node_uuid=node_uuid, | ||
2127 | 123 | content_hash=merged_node.content_hash, | ||
2128 | 124 | old_content_hash=original_hash, path=path, | ||
2129 | 125 | parent_uuid=parent_uuid, conflict_info=conflict_info, | ||
2130 | 126 | node_type=merged_node.node_type) | ||
2131 | 127 | synced = True | ||
2132 | 128 | except NodeSyncError as e: | ||
2133 | 129 | print e | ||
2134 | 130 | else: | ||
2135 | 131 | synced = True | ||
2136 | 132 | |||
2137 | 133 | return (path, display_path, node_uuid, synced) | ||
2138 | 134 | |||
2139 | 135 | def post_merge(nodes, partial_result, child_results): | ||
2140 | 136 | """Delete nodes.""" | ||
2141 | 137 | (merged_node, original_node) = nodes | ||
2142 | 138 | (path, display_path, node_uuid, synced) = partial_result | ||
2143 | 139 | |||
2144 | 140 | if merged_node is None: | ||
2145 | 141 | assert original_node is not None | ||
2146 | 142 | if not quiet: | ||
2147 | 143 | print "%s %s %s" % (sync_mode.symbol, DELETE_SYMBOL, | ||
2148 | 144 | display_path) | ||
2149 | 145 | try: | ||
2150 | 146 | if original_node.node_type == DIRECTORY: | ||
2151 | 147 | sync_mode.delete_directory(node_uuid=original_node.uuid, | ||
2152 | 148 | path=path) | ||
2153 | 149 | else: | ||
2154 | 150 | # files or symlinks | ||
2155 | 151 | sync_mode.delete_file(node_uuid=original_node.uuid, | ||
2156 | 152 | path=path) | ||
2157 | 153 | synced = True | ||
2158 | 154 | except NodeDeleteError as e: | ||
2159 | 155 | print e | ||
2160 | 156 | |||
2161 | 157 | if synced: | ||
2162 | 158 | model_node = merged_node | ||
2163 | 159 | else: | ||
2164 | 160 | model_node = original_node | ||
2165 | 161 | |||
2166 | 162 | if model_node is not None: | ||
2167 | 163 | if model_node.node_type == DIRECTORY: | ||
2168 | 164 | child_iter = child_results.iteritems() | ||
2169 | 165 | merged_children = dict( | ||
2170 | 166 | (name, child) for (name, child) in child_iter | ||
2171 | 167 | if child is not None) | ||
2172 | 168 | else: | ||
2173 | 169 | # if there are children here it's because they failed to delete | ||
2174 | 170 | merged_children = None | ||
2175 | 171 | return MergeNode(node_type=model_node.node_type, | ||
2176 | 172 | uuid=model_node.uuid, | ||
2177 | 173 | children=merged_children, | ||
2178 | 174 | content_hash=model_node.content_hash) | ||
2179 | 175 | else: | ||
2180 | 176 | return None | ||
2181 | 177 | |||
2182 | 178 | return generic_merge(trees=[merged_tree, original_tree], | ||
2183 | 179 | pre_merge=pre_merge, post_merge=post_merge, | ||
2184 | 180 | partial_parent=(path, "", None, True), name=u"") | ||
2185 | 181 | |||
2186 | 182 | |||
2187 | 183 | def download_tree(merged_tree, local_tree, client, share_uuid, path, dry_run, | ||
2188 | 184 | quiet): | ||
2189 | 185 | """Downloads a directory.""" | ||
2190 | 186 | if dry_run: | ||
2191 | 187 | downloader = DryRun(symbol=DOWNLOAD_SYMBOL) | ||
2192 | 188 | else: | ||
2193 | 189 | downloader = Downloader(client=client, share_uuid=share_uuid) | ||
2194 | 190 | return sync_tree(merged_tree=merged_tree, original_tree=local_tree, | ||
2195 | 191 | sync_mode=downloader, path=path, quiet=quiet) | ||
2196 | 192 | |||
2197 | 193 | |||
2198 | 194 | def upload_tree(merged_tree, remote_tree, client, share_uuid, path, dry_run, | ||
2199 | 195 | quiet): | ||
2200 | 196 | """Uploads a directory.""" | ||
2201 | 197 | if dry_run: | ||
2202 | 198 | uploader = DryRun(symbol=UPLOAD_SYMBOL) | ||
2203 | 199 | else: | ||
2204 | 200 | uploader = Uploader(client=client, share_uuid=share_uuid) | ||
2205 | 201 | return sync_tree(merged_tree=merged_tree, original_tree=remote_tree, | ||
2206 | 202 | sync_mode=uploader, path=path, quiet=quiet) | ||
2207 | 203 | |||
2208 | 204 | |||
2209 | 205 | class DryRun(object): | ||
2210 | 206 | """A class which implements the sync interface but does nothing.""" | ||
2211 | 207 | def __init__(self, symbol): | ||
2212 | 208 | """Initializes a DryRun instance.""" | ||
2213 | 209 | self.symbol = symbol | ||
2214 | 210 | |||
2215 | 211 | def create_directory(self, parent_uuid, path): | ||
2216 | 212 | """Doesn't create a directory.""" | ||
2217 | 213 | return None | ||
2218 | 214 | |||
2219 | 215 | def write_file(self, node_uuid, old_content_hash, content_hash, | ||
2220 | 216 | parent_uuid, path, conflict_info, node_type): | ||
2221 | 217 | """Doesn't write a file.""" | ||
2222 | 218 | return None | ||
2223 | 219 | |||
2224 | 220 | def delete_directory(self, node_uuid, path): | ||
2225 | 221 | """Doesn't delete a directory.""" | ||
2226 | 222 | |||
2227 | 223 | def delete_file(self, node_uuid, path): | ||
2228 | 224 | """Doesn't delete a file.""" | ||
2229 | 225 | |||
2230 | 226 | |||
2231 | 227 | class Downloader(object): | ||
2232 | 228 | """A class which implements the download half of syncing.""" | ||
2233 | 229 | def __init__(self, client, share_uuid): | ||
2234 | 230 | """Initializes a Downloader instance.""" | ||
2235 | 231 | self.client = client | ||
2236 | 232 | self.share_uuid = share_uuid | ||
2237 | 233 | self.symbol = DOWNLOAD_SYMBOL | ||
2238 | 234 | |||
2239 | 235 | def create_directory(self, parent_uuid, path): | ||
2240 | 236 | """Creates a directory.""" | ||
2241 | 237 | try: | ||
2242 | 238 | safe_mkdir(path) | ||
2243 | 239 | except OSError as e: | ||
2244 | 240 | raise NodeCreateError( | ||
2245 | 241 | "Error creating local directory %s: %s" % (path, e)) | ||
2246 | 242 | return None | ||
2247 | 243 | |||
2248 | 244 | def write_file(self, node_uuid, old_content_hash, content_hash, | ||
2249 | 245 | parent_uuid, path, conflict_info, node_type): | ||
2250 | 246 | """Creates a file and downloads new content for it.""" | ||
2251 | 247 | if conflict_info: | ||
2252 | 248 | # download to conflict file rather than overwriting local changes | ||
2253 | 249 | path = get_conflict_path(path, conflict_info) | ||
2254 | 250 | content_hash = conflict_info[1] | ||
2255 | 251 | try: | ||
2256 | 252 | if node_type == SYMLINK: | ||
2257 | 253 | self.client.download_string( | ||
2258 | 254 | share_uuid=self.share_uuid, node_uuid=node_uuid, | ||
2259 | 255 | content_hash=content_hash) | ||
2260 | 256 | else: | ||
2261 | 257 | self.client.download_file( | ||
2262 | 258 | share_uuid=self.share_uuid, node_uuid=node_uuid, | ||
2263 | 259 | content_hash=content_hash, filename=path) | ||
2264 | 260 | except (request.StorageRequestError, UnsupportedOperationError) as e: | ||
2265 | 261 | if os.path.exists(path): | ||
2266 | 262 | raise NodeUpdateError( | ||
2267 | 263 | "Error downloading content for %s: %s" % (path, e)) | ||
2268 | 264 | else: | ||
2269 | 265 | raise NodeCreateError( | ||
2270 | 266 | "Error locally creating %s: %s" % (path, e)) | ||
2271 | 267 | |||
2272 | 268 | def delete_directory(self, node_uuid, path): | ||
2273 | 269 | """Deletes a directory.""" | ||
2274 | 270 | try: | ||
2275 | 271 | os.rmdir(path) | ||
2276 | 272 | except OSError as e: | ||
2277 | 273 | raise NodeDeleteError("Error locally deleting %s: %s" % (path, e)) | ||
2278 | 274 | |||
2279 | 275 | def delete_file(self, node_uuid, path): | ||
2280 | 276 | """Deletes a file.""" | ||
2281 | 277 | try: | ||
2282 | 278 | os.remove(path) | ||
2283 | 279 | except OSError as e: | ||
2284 | 280 | raise NodeDeleteError("Error locally deleting %s: %s" % (path, e)) | ||
2285 | 281 | |||
2286 | 282 | |||
2287 | 283 | class Uploader(object): | ||
2288 | 284 | """A class which implements the upload half of syncing.""" | ||
2289 | 285 | def __init__(self, client, share_uuid): | ||
2290 | 286 | """Initializes an uploader instance.""" | ||
2291 | 287 | self.client = client | ||
2292 | 288 | self.share_uuid = share_uuid | ||
2293 | 289 | self.symbol = UPLOAD_SYMBOL | ||
2294 | 290 | |||
2295 | 291 | def create_directory(self, parent_uuid, path): | ||
2296 | 292 | """Creates a directory on the server.""" | ||
2297 | 293 | name = name_from_path(path) | ||
2298 | 294 | try: | ||
2299 | 295 | return self.client.create_directory(share_uuid=self.share_uuid, | ||
2300 | 296 | parent_uuid=parent_uuid, | ||
2301 | 297 | name=name) | ||
2302 | 298 | except (request.StorageRequestError, UnsupportedOperationError) as e: | ||
2303 | 299 | raise NodeCreateError("Error remotely creating %s: %s" % (path, e)) | ||
2304 | 300 | |||
2305 | 301 | def write_file(self, node_uuid, old_content_hash, content_hash, | ||
2306 | 302 | parent_uuid, path, conflict_info, node_type): | ||
2307 | 303 | """Creates a file on the server and uploads new content for it.""" | ||
2308 | 304 | |||
2309 | 305 | if conflict_info: | ||
2310 | 306 | # move conflicting file out of the way on the server | ||
2311 | 307 | conflict_path = get_conflict_path(path, conflict_info) | ||
2312 | 308 | conflict_name = name_from_path(conflict_path) | ||
2313 | 309 | try: | ||
2314 | 310 | self.client.move(share_uuid=self.share_uuid, | ||
2315 | 311 | parent_uuid=parent_uuid, | ||
2316 | 312 | name=conflict_name, | ||
2317 | 313 | node_uuid=node_uuid) | ||
2318 | 314 | except (request.StorageRequestError, | ||
2319 | 315 | UnsupportedOperationError) as e: | ||
2320 | 316 | raise NodeUpdateError( | ||
2321 | 317 | "Error remotely renaming %s to %s: %s" % | ||
2322 | 318 | (path, conflict_path, e)) | ||
2323 | 319 | node_uuid = None | ||
2324 | 320 | old_content_hash = EMPTY_HASH | ||
2325 | 321 | |||
2326 | 322 | if node_type == SYMLINK: | ||
2327 | 323 | try: | ||
2328 | 324 | target = os.readlink(path) | ||
2329 | 325 | except OSError as e: | ||
2330 | 326 | raise NodeCreateError( | ||
2331 | 327 | "Error retrieving link target for %s: %s" % (path, e)) | ||
2332 | 328 | else: | ||
2333 | 329 | target = None | ||
2334 | 330 | |||
2335 | 331 | name = name_from_path(path) | ||
2336 | 332 | if node_uuid is None: | ||
2337 | 333 | try: | ||
2338 | 334 | if node_type == SYMLINK: | ||
2339 | 335 | node_uuid = self.client.create_symlink( | ||
2340 | 336 | share_uuid=self.share_uuid, parent_uuid=parent_uuid, | ||
2341 | 337 | name=name, target=target) | ||
2342 | 338 | old_content_hash = content_hash | ||
2343 | 339 | else: | ||
2344 | 340 | node_uuid = self.client.create_file( | ||
2345 | 341 | share_uuid=self.share_uuid, parent_uuid=parent_uuid, | ||
2346 | 342 | name=name) | ||
2347 | 343 | except (request.StorageRequestError, | ||
2348 | 344 | UnsupportedOperationError) as e: | ||
2349 | 345 | raise NodeCreateError( | ||
2350 | 346 | "Error remotely creating %s: %s" % (path, e)) | ||
2351 | 347 | |||
2352 | 348 | if old_content_hash != content_hash: | ||
2353 | 349 | try: | ||
2354 | 350 | if node_type == SYMLINK: | ||
2355 | 351 | self.client.upload_string( | ||
2356 | 352 | share_uuid=self.share_uuid, node_uuid=node_uuid, | ||
2357 | 353 | content_hash=content_hash, | ||
2358 | 354 | old_content_hash=old_content_hash, content=target) | ||
2359 | 355 | else: | ||
2360 | 356 | self.client.upload_file( | ||
2361 | 357 | share_uuid=self.share_uuid, node_uuid=node_uuid, | ||
2362 | 358 | content_hash=content_hash, | ||
2363 | 359 | old_content_hash=old_content_hash, filename=path) | ||
2364 | 360 | except (request.StorageRequestError, | ||
2365 | 361 | UnsupportedOperationError) as e: | ||
2366 | 362 | raise NodeUpdateError( | ||
2367 | 363 | "Error uploading content for %s: %s" % (path, e)) | ||
2368 | 364 | |||
2369 | 365 | def delete_directory(self, node_uuid, path): | ||
2370 | 366 | """Deletes a directory.""" | ||
2371 | 367 | try: | ||
2372 | 368 | self.client.unlink(share_uuid=self.share_uuid, node_uuid=node_uuid) | ||
2373 | 369 | except (request.StorageRequestError, UnsupportedOperationError) as e: | ||
2374 | 370 | raise NodeDeleteError("Error remotely deleting %s: %s" % (path, e)) | ||
2375 | 371 | |||
2376 | 372 | def delete_file(self, node_uuid, path): | ||
2377 | 373 | """Deletes a file.""" | ||
2378 | 374 | try: | ||
2379 | 375 | self.client.unlink(share_uuid=self.share_uuid, node_uuid=node_uuid) | ||
2380 | 376 | except (request.StorageRequestError, UnsupportedOperationError) as e: | ||
2381 | 377 | raise NodeDeleteError("Error remotely deleting %s: %s" % (path, e)) | ||
2382 | 0 | 378 | ||
2383 | === added directory 'magicicada/u1sync/tests' | |||
2384 | === added file 'magicicada/u1sync/tests/__init__.py' | |||
2385 | --- magicicada/u1sync/tests/__init__.py 1970-01-01 00:00:00 +0000 | |||
2386 | +++ magicicada/u1sync/tests/__init__.py 2018-04-20 01:22:48 +0000 | |||
2387 | @@ -0,0 +1,1 @@ | |||
2388 | 1 | """Tests for u1sync.""" | ||
2389 | 0 | 2 | ||
2390 | === added file 'magicicada/u1sync/tests/test_client.py' | |||
2391 | --- magicicada/u1sync/tests/test_client.py 1970-01-01 00:00:00 +0000 | |||
2392 | +++ magicicada/u1sync/tests/test_client.py 2018-04-20 01:22:48 +0000 | |||
2393 | @@ -0,0 +1,63 @@ | |||
2394 | 1 | # Copyright 2010 Canonical Ltd. | ||
2395 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
2396 | 3 | # | ||
2397 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
2398 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
2399 | 6 | # by the Free Software Foundation. | ||
2400 | 7 | # | ||
2401 | 8 | # This program is distributed in the hope that it will be useful, but | ||
2402 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
2403 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
2404 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
2405 | 12 | # | ||
2406 | 13 | # You should have received a copy of the GNU General Public License along | ||
2407 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
2408 | 15 | |||
2409 | 16 | """Test the client code.""" | ||
2410 | 17 | |||
2411 | 18 | from mocker import Mocker | ||
2412 | 19 | from twisted.trial.unittest import TestCase | ||
2413 | 20 | |||
2414 | 21 | from magicicada.u1sync import client | ||
2415 | 22 | |||
2416 | 23 | |||
2417 | 24 | class SyncStorageClientTest(TestCase): | ||
2418 | 25 | """Test the SyncStorageClient.""" | ||
2419 | 26 | |||
2420 | 27 | def test_conn_made_call_parent(self): | ||
2421 | 28 | """The connectionMade method should call the parent.""" | ||
2422 | 29 | # set up everything | ||
2423 | 30 | called = [] | ||
2424 | 31 | self.patch(client.StorageClient, 'connectionMade', | ||
2425 | 32 | lambda s: called.append(True)) | ||
2426 | 33 | c = client.SyncStorageClient() | ||
2427 | 34 | mocker = Mocker() | ||
2428 | 35 | obj = mocker.mock() | ||
2429 | 36 | obj.current_protocol | ||
2430 | 37 | mocker.result(None) | ||
2431 | 38 | obj.current_protocol = c | ||
2432 | 39 | obj.observer.connected() | ||
2433 | 40 | c.factory = obj | ||
2434 | 41 | |||
2435 | 42 | # call and test | ||
2436 | 43 | with mocker: | ||
2437 | 44 | c.connectionMade() | ||
2438 | 45 | self.assertTrue(called) | ||
2439 | 46 | |||
2440 | 47 | def test_conn_lost_call_parent(self): | ||
2441 | 48 | """The connectionLost method should call the parent.""" | ||
2442 | 49 | # set up everything | ||
2443 | 50 | called = [] | ||
2444 | 51 | self.patch(client.StorageClient, 'connectionLost', | ||
2445 | 52 | lambda s, r: called.append(True)) | ||
2446 | 53 | c = client.SyncStorageClient() | ||
2447 | 54 | mocker = Mocker() | ||
2448 | 55 | obj = mocker.mock() | ||
2449 | 56 | obj.current_protocol | ||
2450 | 57 | mocker.result(None) | ||
2451 | 58 | c.factory = obj | ||
2452 | 59 | |||
2453 | 60 | # call and test | ||
2454 | 61 | with mocker: | ||
2455 | 62 | c.connectionLost() | ||
2456 | 63 | self.assertTrue(called) | ||
2457 | 0 | 64 | ||
2458 | === added file 'magicicada/u1sync/tests/test_init.py' | |||
2459 | --- magicicada/u1sync/tests/test_init.py 1970-01-01 00:00:00 +0000 | |||
2460 | +++ magicicada/u1sync/tests/test_init.py 2018-04-20 01:22:48 +0000 | |||
2461 | @@ -0,0 +1,22 @@ | |||
2462 | 1 | # Copyright 2009 Canonical Ltd. | ||
2463 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
2464 | 3 | # | ||
2465 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
2466 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
2467 | 6 | # by the Free Software Foundation. | ||
2468 | 7 | # | ||
2469 | 8 | # This program is distributed in the hope that it will be useful, but | ||
2470 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
2471 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
2472 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
2473 | 12 | # | ||
2474 | 13 | # You should have received a copy of the GNU General Public License along | ||
2475 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
2476 | 15 | |||
2477 | 16 | """Tests for u1sync mirror init""" | ||
2478 | 17 | |||
2479 | 18 | from unittest import TestCase | ||
2480 | 19 | |||
2481 | 20 | |||
2482 | 21 | class InitTestCase(TestCase): | ||
2483 | 22 | """Tests for u1sync --init""" | ||
2484 | 0 | 23 | ||
2485 | === added file 'magicicada/u1sync/tests/test_merge.py' | |||
2486 | --- magicicada/u1sync/tests/test_merge.py 1970-01-01 00:00:00 +0000 | |||
2487 | +++ magicicada/u1sync/tests/test_merge.py 2018-04-20 01:22:48 +0000 | |||
2488 | @@ -0,0 +1,109 @@ | |||
2489 | 1 | # Copyright 2009 Canonical Ltd. | ||
2490 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
2491 | 3 | # | ||
2492 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
2493 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
2494 | 6 | # by the Free Software Foundation. | ||
2495 | 7 | # | ||
2496 | 8 | # This program is distributed in the hope that it will be useful, but | ||
2497 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
2498 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
2499 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
2500 | 12 | # | ||
2501 | 13 | # You should have received a copy of the GNU General Public License along | ||
2502 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
2503 | 15 | |||
2504 | 16 | """Tests for tree merging.""" | ||
2505 | 17 | |||
2506 | 18 | import uuid | ||
2507 | 19 | import os | ||
2508 | 20 | |||
2509 | 21 | from unittest import TestCase | ||
2510 | 22 | |||
2511 | 23 | from ubuntuone.storageprotocol.dircontent_pb2 import FILE, DIRECTORY | ||
2512 | 24 | |||
2513 | 25 | from magicicada.u1sync.genericmerge import MergeNode, generic_merge | ||
2514 | 26 | from magicicada.u1sync.merge import ( | ||
2515 | 27 | merge_trees, ClobberLocalMerge, ClobberServerMerge, SyncMerge) | ||
2516 | 28 | |||
2517 | 29 | |||
2518 | 30 | def accumulate_path(nodes, name, partial_parent): | ||
2519 | 31 | """pre-merge which accumulates a path""" | ||
2520 | 32 | return os.path.join(partial_parent, name) | ||
2521 | 33 | |||
2522 | 34 | |||
2523 | 35 | def capture_merge(nodes, partial_result, child_results): | ||
2524 | 36 | """post-merge which accumulates merge results.""" | ||
2525 | 37 | return (nodes, partial_result, child_results) | ||
2526 | 38 | |||
2527 | 39 | |||
2528 | 40 | class MergeTest(TestCase): | ||
2529 | 41 | """Tests for generic tree merges.""" | ||
2530 | 42 | |||
2531 | 43 | def test_generic_merge(self): | ||
2532 | 44 | """Tests that generic merge behaves as expected.""" | ||
2533 | 45 | tree_a = MergeNode(DIRECTORY, children={ | ||
2534 | 46 | 'foo': MergeNode(FILE, uuid=uuid.uuid4()), | ||
2535 | 47 | 'bar': MergeNode(FILE, uuid=uuid.uuid4()), | ||
2536 | 48 | }, uuid=uuid.uuid4()) | ||
2537 | 49 | tree_b = MergeNode(DIRECTORY, children={ | ||
2538 | 50 | 'bar': MergeNode(FILE, uuid=uuid.uuid4()), | ||
2539 | 51 | 'baz': MergeNode(FILE, uuid=uuid.uuid4()), | ||
2540 | 52 | }, uuid=uuid.uuid4()) | ||
2541 | 53 | result = generic_merge(trees=[tree_a, tree_b], | ||
2542 | 54 | pre_merge=accumulate_path, | ||
2543 | 55 | post_merge=capture_merge, | ||
2544 | 56 | partial_parent="", name="ex") | ||
2545 | 57 | expected_result = ([tree_a, tree_b], "ex", { | ||
2546 | 58 | 'foo': ([tree_a.children['foo'], None], "ex/foo", {}), | ||
2547 | 59 | 'bar': ([tree_a.children['bar'], tree_b.children['bar']], | ||
2548 | 60 | "ex/bar", {}), | ||
2549 | 61 | 'baz': ([None, tree_b.children['baz']], "ex/baz", {}), | ||
2550 | 62 | }) | ||
2551 | 63 | self.assertEqual(expected_result, result) | ||
2552 | 64 | |||
2553 | 65 | def test_clobber(self): | ||
2554 | 66 | """Tests clobbering merges.""" | ||
2555 | 67 | server_tree = MergeNode(DIRECTORY, children={ | ||
2556 | 68 | 'foo': MergeNode(FILE, content_hash="dummy:abc"), | ||
2557 | 69 | 'bar': MergeNode(FILE, content_hash="dummy:xyz"), | ||
2558 | 70 | 'baz': MergeNode(FILE, content_hash="dummy:aaa"), | ||
2559 | 71 | }) | ||
2560 | 72 | local_tree = MergeNode(DIRECTORY, children={ | ||
2561 | 73 | 'foo': MergeNode(FILE, content_hash="dummy:cde"), | ||
2562 | 74 | 'bar': MergeNode(FILE, content_hash="dummy:zyx"), | ||
2563 | 75 | 'hoge': MergeNode(FILE, content_hash="dummy:bbb"), | ||
2564 | 76 | }) | ||
2565 | 77 | result_tree = merge_trees(local_tree, local_tree, | ||
2566 | 78 | server_tree, server_tree, | ||
2567 | 79 | ClobberServerMerge()) | ||
2568 | 80 | self.assertEqual(local_tree, result_tree) | ||
2569 | 81 | result_tree = merge_trees(local_tree, local_tree, | ||
2570 | 82 | server_tree, server_tree, | ||
2571 | 83 | ClobberLocalMerge()) | ||
2572 | 84 | self.assertEqual(server_tree, result_tree) | ||
2573 | 85 | |||
2574 | 86 | def test_sync(self): | ||
2575 | 87 | """Test sync merges.""" | ||
2576 | 88 | server_tree = MergeNode(DIRECTORY, children={ | ||
2577 | 89 | 'bar': MergeNode(FILE, content_hash="dummy:xyz"), | ||
2578 | 90 | 'baz': MergeNode(FILE, content_hash="dummy:aaa"), | ||
2579 | 91 | 'foo': MergeNode(FILE, content_hash="dummy:abc"), | ||
2580 | 92 | }) | ||
2581 | 93 | old_server_tree = MergeNode(DIRECTORY, children={}) | ||
2582 | 94 | local_tree = MergeNode(DIRECTORY, children={ | ||
2583 | 95 | 'bar': MergeNode(FILE, content_hash="dummy:xyz"), | ||
2584 | 96 | 'foo': MergeNode(FILE, content_hash="dummy:abc"), | ||
2585 | 97 | 'hoge': MergeNode(FILE, content_hash="dummy:bbb"), | ||
2586 | 98 | }) | ||
2587 | 99 | old_local_tree = MergeNode(DIRECTORY, children={}) | ||
2588 | 100 | expected_tree = MergeNode(DIRECTORY, children={ | ||
2589 | 101 | 'bar': MergeNode(FILE, content_hash="dummy:xyz"), | ||
2590 | 102 | 'baz': MergeNode(FILE, content_hash="dummy:aaa"), | ||
2591 | 103 | 'foo': MergeNode(FILE, content_hash="dummy:abc"), | ||
2592 | 104 | 'hoge': MergeNode(FILE, content_hash="dummy:bbb"), | ||
2593 | 105 | }) | ||
2594 | 106 | result_tree = merge_trees(old_local_tree, local_tree, | ||
2595 | 107 | old_server_tree, server_tree, | ||
2596 | 108 | SyncMerge()) | ||
2597 | 109 | self.assertEqual(result_tree, expected_tree) | ||
2598 | 0 | 110 | ||
2599 | === added file 'magicicada/u1sync/utils.py' | |||
2600 | --- magicicada/u1sync/utils.py 1970-01-01 00:00:00 +0000 | |||
2601 | +++ magicicada/u1sync/utils.py 2018-04-20 01:22:48 +0000 | |||
2602 | @@ -0,0 +1,50 @@ | |||
2603 | 1 | # Copyright 2009 Canonical Ltd. | ||
2604 | 2 | # Copyright 2015-2018 Chicharreros (https://launchpad.net/~chicharreros) | ||
2605 | 3 | # | ||
2606 | 4 | # This program is free software: you can redistribute it and/or modify it | ||
2607 | 5 | # under the terms of the GNU General Public License version 3, as published | ||
2608 | 6 | # by the Free Software Foundation. | ||
2609 | 7 | # | ||
2610 | 8 | # This program is distributed in the hope that it will be useful, but | ||
2611 | 9 | # WITHOUT ANY WARRANTY; without even the implied warranties of | ||
2612 | 10 | # MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR | ||
2613 | 11 | # PURPOSE. See the GNU General Public License for more details. | ||
2614 | 12 | # | ||
2615 | 13 | # You should have received a copy of the GNU General Public License along | ||
2616 | 14 | # with this program. If not, see <http://www.gnu.org/licenses/>. | ||
2617 | 15 | |||
2618 | 16 | """Miscellaneous utility functions.""" | ||
2619 | 17 | |||
2620 | 18 | import os | ||
2621 | 19 | |||
2622 | 20 | from errno import EEXIST, ENOENT | ||
2623 | 21 | |||
2624 | 22 | from magicicada.u1sync.constants import METADATA_DIR_NAME, SPECIAL_FILE_RE | ||
2625 | 23 | |||
2626 | 24 | |||
2627 | 25 | def should_sync(filename): | ||
2628 | 26 | """Returns True if the filename should be synced. | ||
2629 | 27 | |||
2630 | 28 | @param filename: a unicode filename | ||
2631 | 29 | |||
2632 | 30 | """ | ||
2633 | 31 | return (filename != METADATA_DIR_NAME and | ||
2634 | 32 | not SPECIAL_FILE_RE.match(filename)) | ||
2635 | 33 | |||
2636 | 34 | |||
2637 | 35 | def safe_mkdir(path): | ||
2638 | 36 | """Creates a directory if it does not already exist.""" | ||
2639 | 37 | try: | ||
2640 | 38 | os.mkdir(path) | ||
2641 | 39 | except OSError, e: | ||
2642 | 40 | if e.errno != EEXIST: | ||
2643 | 41 | raise | ||
2644 | 42 | |||
2645 | 43 | |||
2646 | 44 | def safe_unlink(path): | ||
2647 | 45 | """Unlinks a file if it exists.""" | ||
2648 | 46 | try: | ||
2649 | 47 | os.remove(path) | ||
2650 | 48 | except OSError, e: | ||
2651 | 49 | if e.errno != ENOENT: | ||
2652 | 50 | raise |
Awesome!