Merge lp:~fo0bar/turku/turku-storage-log-output into lp:turku
- turku-storage-log-output
- Merge into turku-storage
Status: | Superseded |
---|---|
Proposed branch: | lp:~fo0bar/turku/turku-storage-log-output |
Merge into: | lp:turku |
Diff against target: |
1257 lines (+557/-347) 10 files modified
.bzrignore (+59/-2) MANIFEST.in (+10/-0) Makefile (+28/-0) README (+4/-0) setup.py (+11/-12) tests/test_stub.py (+8/-0) tox.ini (+38/-0) turku_storage/ping.py (+216/-166) turku_storage/update_config.py (+57/-48) turku_storage/utils.py (+126/-119) |
To merge this branch: | bzr merge lp:~fo0bar/turku/turku-storage-log-output |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Canonical IS Reviewers | Pending | ||
Review via email: mp+386135@code.launchpad.net |
This proposal has been superseded by a proposal from 2020-06-21.
Commit message
Log run output directly to logger
Description of the change
Python 2.7 made real-time logging of subprocess piped output difficult
(and actually a bit dangerous). But now that Turku is Python 3, Popen's
context manager will DTRT and clean up after itself, so we don't need an
intermediary.
Note that this removes return_output from run_logging(), but
return_output was not actually used.
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
- 39. By Ryan Finnie
-
Mega-noop cleanup
- Add stub README
- Sort imports
- Create MANIFEST.in so `setup.py sdist` produces usable tarballs
- Create stub tests
- Add tox.ini
- Add blank requirements.txt
- Add Makefile
- make black
- Update .bzrignore
- Clean up flake8:
- setup.py: 'sys' imported but unused
- ping.py: 're' imported but unused
- ping.py: ambiguous variable name 'l'
- utils.py: invalid escape sequence '\d' - 40. By Ryan Finnie
-
Log run output directly to logger
Python 2.7 made real-time logging of subprocess piped output difficult
(and actually a bit dangerous). But now that Turku is Python 3, Popen's
context manager will DTRT and clean up after itself, so we don't need an
intermediary.Note that this removes return_output from run_logging(), but
return_output was not actually used.
Unmerged revisions
Preview Diff
1 | === modified file '.bzrignore' |
2 | --- .bzrignore 2015-02-25 01:17:29 +0000 |
3 | +++ .bzrignore 2020-06-21 22:41:04 +0000 |
4 | @@ -1,4 +1,61 @@ |
5 | +MANIFEST |
6 | +.pybuild/ |
7 | +.pytest_cache/ |
8 | + |
9 | +# Byte-compiled / optimized / DLL files |
10 | +__pycache__/ |
11 | +*.py[cod] |
12 | + |
13 | +# C extensions |
14 | +*.so |
15 | + |
16 | +# Distribution / packaging |
17 | +.Python |
18 | +env/ |
19 | build/ |
20 | +develop-eggs/ |
21 | dist/ |
22 | -*.pyc |
23 | -MANIFEST |
24 | +downloads/ |
25 | +eggs/ |
26 | +.eggs/ |
27 | +lib/ |
28 | +lib64/ |
29 | +parts/ |
30 | +sdist/ |
31 | +var/ |
32 | +*.egg-info/ |
33 | +.installed.cfg |
34 | +*.egg |
35 | + |
36 | +# PyInstaller |
37 | +# Usually these files are written by a python script from a template |
38 | +# before PyInstaller builds the exe, so as to inject date/other infos into it. |
39 | +*.manifest |
40 | +*.spec |
41 | + |
42 | +# Installer logs |
43 | +pip-log.txt |
44 | +pip-delete-this-directory.txt |
45 | + |
46 | +# Unit test / coverage reports |
47 | +htmlcov/ |
48 | +.tox/ |
49 | +.coverage |
50 | +.coverage.* |
51 | +.cache |
52 | +nosetests.xml |
53 | +coverage.xml |
54 | +*,cover |
55 | + |
56 | +# Translations |
57 | +*.mo |
58 | +*.pot |
59 | + |
60 | +# Django stuff: |
61 | +*.log |
62 | + |
63 | +# Sphinx documentation |
64 | +docs/_build/ |
65 | + |
66 | +# PyBuilder |
67 | +target/ |
68 | |
69 | === added file 'MANIFEST.in' |
70 | --- MANIFEST.in 1970-01-01 00:00:00 +0000 |
71 | +++ MANIFEST.in 2020-06-21 22:41:04 +0000 |
72 | @@ -0,0 +1,10 @@ |
73 | +include Makefile |
74 | +include requirements.txt |
75 | +include tests/*.py |
76 | +include tox.ini |
77 | +include turku-storage.cron |
78 | +include turku-storage.logrotate |
79 | +include turku-storage-ping |
80 | +include turku-storage-update-config |
81 | +include turku-storage-update-config.service |
82 | +include turku-storage-update-config.timer |
83 | |
84 | === added file 'Makefile' |
85 | --- Makefile 1970-01-01 00:00:00 +0000 |
86 | +++ Makefile 2020-06-21 22:41:04 +0000 |
87 | @@ -0,0 +1,28 @@ |
88 | +PYTHON := python3 |
89 | + |
90 | +all: build |
91 | + |
92 | +build: |
93 | + $(PYTHON) setup.py build |
94 | + |
95 | +lint: |
96 | + $(PYTHON) -mtox -e flake8 |
97 | + |
98 | +test: |
99 | + $(PYTHON) -mtox |
100 | + |
101 | +test-quick: |
102 | + $(PYTHON) -mtox -e black,flake8,pytest-quick |
103 | + |
104 | +black-check: |
105 | + $(PYTHON) -mtox -e black |
106 | + |
107 | +black: |
108 | + $(PYTHON) -mblack $(CURDIR) |
109 | + |
110 | +install: build |
111 | + $(PYTHON) setup.py install |
112 | + |
113 | +clean: |
114 | + $(PYTHON) setup.py clean |
115 | + $(RM) -r build MANIFEST |
116 | |
117 | === added file 'README' |
118 | --- README 1970-01-01 00:00:00 +0000 |
119 | +++ README 2020-06-21 22:41:04 +0000 |
120 | @@ -0,0 +1,4 @@ |
121 | +Turku backups - storage agent |
122 | +Copyright 2015-2020 Canonical Ltd. |
123 | + |
124 | +https://launchpad.net/turku |
125 | |
126 | === added file 'requirements.txt' |
127 | === modified file 'setup.py' |
128 | --- setup.py 2019-05-22 17:35:00 +0000 |
129 | +++ setup.py 2020-06-21 22:41:04 +0000 |
130 | @@ -15,22 +15,21 @@ |
131 | # You should have received a copy of the GNU General Public License along with |
132 | # this program. If not, see <http://www.gnu.org/licenses/>. |
133 | |
134 | -import sys |
135 | from setuptools import setup |
136 | |
137 | |
138 | setup( |
139 | - name='turku_storage', |
140 | - description='Turku backups - storage units', |
141 | - author='Ryan Finnie', |
142 | - author_email='ryan.finnie@canonical.com', |
143 | - url='https://launchpad.net/turku', |
144 | - python_requires='~=3.4', |
145 | - packages=['turku_storage'], |
146 | + name="turku_storage", |
147 | + description="Turku backups - storage units", |
148 | + author="Ryan Finnie", |
149 | + author_email="ryan.finnie@canonical.com", |
150 | + url="https://launchpad.net/turku", |
151 | + python_requires="~=3.4", |
152 | + packages=["turku_storage"], |
153 | entry_points={ |
154 | - 'console_scripts': [ |
155 | - 'turku-storage-ping = turku_storage.ping:main', |
156 | - 'turku-storage-update-config = turku_storage.update_config:main', |
157 | - ], |
158 | + "console_scripts": [ |
159 | + "turku-storage-ping = turku_storage.ping:main", |
160 | + "turku-storage-update-config = turku_storage.update_config:main", |
161 | + ] |
162 | }, |
163 | ) |
164 | |
165 | === added directory 'tests' |
166 | === added file 'tests/__init__.py' |
167 | === added file 'tests/test_stub.py' |
168 | --- tests/test_stub.py 1970-01-01 00:00:00 +0000 |
169 | +++ tests/test_stub.py 2020-06-21 22:41:04 +0000 |
170 | @@ -0,0 +1,8 @@ |
171 | +import unittest |
172 | +import warnings |
173 | + |
174 | + |
175 | +class TestStub(unittest.TestCase): |
176 | + def test_stub(self): |
177 | + # pytest doesn't like a tests/ with no tests |
178 | + warnings.warn("Remove this file once unit tests are added") |
179 | |
180 | === added file 'tox.ini' |
181 | --- tox.ini 1970-01-01 00:00:00 +0000 |
182 | +++ tox.ini 2020-06-21 22:41:04 +0000 |
183 | @@ -0,0 +1,38 @@ |
184 | +[tox] |
185 | +envlist = black, flake8, pytest |
186 | + |
187 | +[testenv] |
188 | +basepython = python |
189 | + |
190 | +[testenv:black] |
191 | +commands = python -mblack --check . |
192 | +deps = black |
193 | + |
194 | +[testenv:flake8] |
195 | +commands = python -mflake8 |
196 | +deps = flake8 |
197 | + |
198 | +[testenv:pytest] |
199 | +commands = python -mpytest --cov=turku_storage --cov-report=term-missing |
200 | +deps = pytest |
201 | + pytest-cov |
202 | + -r{toxinidir}/requirements.txt |
203 | + |
204 | +[testenv:pytest-quick] |
205 | +commands = python -mpytest -m "not slow" |
206 | +deps = pytest |
207 | + -r{toxinidir}/requirements.txt |
208 | + |
209 | +[flake8] |
210 | +exclude = |
211 | + .git, |
212 | + __pycache__, |
213 | + .tox, |
214 | +# TODO: remove C901 once complexity is reduced |
215 | +ignore = C901,E203,E231,W503 |
216 | +max-line-length = 120 |
217 | +max-complexity = 10 |
218 | + |
219 | +[pytest] |
220 | +markers = |
221 | + slow |
222 | |
223 | === modified file 'turku_storage/ping.py' |
224 | --- turku_storage/ping.py 2020-03-24 23:42:59 +0000 |
225 | +++ turku_storage/ping.py 2020-06-21 22:41:04 +0000 |
226 | @@ -13,223 +13,258 @@ |
227 | # You should have received a copy of the GNU General Public License along with |
228 | # this program. If not, see <http://www.gnu.org/licenses/>. |
229 | |
230 | +import datetime |
231 | import json |
232 | +import logging |
233 | import os |
234 | +import subprocess |
235 | import sys |
236 | -import subprocess |
237 | -import datetime |
238 | -import re |
239 | -import logging |
240 | import tempfile |
241 | import time |
242 | -from .utils import load_config, acquire_lock, api_call, random_weighted, get_latest_snapshot, get_snapshots_to_delete |
243 | - |
244 | - |
245 | -class StoragePing(): |
246 | - def __init__(self, uuid, config_dir='/etc/turku-storage'): |
247 | + |
248 | +from .utils import ( |
249 | + load_config, |
250 | + acquire_lock, |
251 | + api_call, |
252 | + random_weighted, |
253 | + get_latest_snapshot, |
254 | + get_snapshots_to_delete, |
255 | +) |
256 | + |
257 | + |
258 | +class StoragePing: |
259 | + def __init__(self, uuid, config_dir="/etc/turku-storage"): |
260 | self.arg_uuid = uuid |
261 | |
262 | self.config = load_config(config_dir) |
263 | - for k in ('name', 'secret'): |
264 | + for k in ("name", "secret"): |
265 | if k not in self.config: |
266 | - raise Exception('Incomplete config') |
267 | + raise Exception("Incomplete config") |
268 | |
269 | - self.logger = logging.getLogger(self.config['name']) |
270 | + self.logger = logging.getLogger(self.config["name"]) |
271 | self.logger.setLevel(logging.DEBUG) |
272 | |
273 | self.lh_console = logging.StreamHandler() |
274 | - self.lh_console_formatter = logging.Formatter('[%(asctime)s %(name)s] %(levelname)s: %(message)s') |
275 | + self.lh_console_formatter = logging.Formatter( |
276 | + "[%(asctime)s %(name)s] %(levelname)s: %(message)s" |
277 | + ) |
278 | self.lh_console.setFormatter(self.lh_console_formatter) |
279 | self.lh_console.setLevel(logging.ERROR) |
280 | self.logger.addHandler(self.lh_console) |
281 | |
282 | - self.lh_master = logging.FileHandler(self.config['log_file']) |
283 | - self.lh_master_formatter = logging.Formatter('[%(asctime)s ' + self.arg_uuid + ' %(process)s] %(levelname)s: %(message)s') |
284 | + self.lh_master = logging.FileHandler(self.config["log_file"]) |
285 | + self.lh_master_formatter = logging.Formatter( |
286 | + "[%(asctime)s " + self.arg_uuid + " %(process)s] %(levelname)s: %(message)s" |
287 | + ) |
288 | self.lh_master.setFormatter(self.lh_master_formatter) |
289 | self.lh_master.setLevel(logging.DEBUG) |
290 | self.logger.addHandler(self.lh_master) |
291 | |
292 | - def run_logging(self, args, loglevel=logging.DEBUG, cwd=None, env=None, return_output=False): |
293 | - self.logger.log(loglevel, 'Running: %s' % repr(args)) |
294 | - t = tempfile.NamedTemporaryFile(mode='w+', encoding='UTF-8') |
295 | - self.logger.log(loglevel, '(Command output is in %s until written here at the end)' % t.name) |
296 | - returncode = subprocess.call(args, cwd=cwd, env=env, stdout=t, stderr=t) |
297 | - t.flush() |
298 | - t.seek(0) |
299 | - out = '' |
300 | - for line in t.readlines(): |
301 | - if return_output: |
302 | - out = out + line |
303 | - self.logger.log(loglevel, line.rstrip('\n')) |
304 | - t.close() |
305 | - self.logger.log(loglevel, 'Return code: %d' % returncode) |
306 | - if return_output: |
307 | - return (returncode, out) |
308 | - else: |
309 | - return returncode |
310 | + def run_logging(self, args, loglevel=logging.DEBUG, cwd=None, env=None): |
311 | + self.logger.log(loglevel, "Running: %s" % repr(args)) |
312 | + with subprocess.Popen( |
313 | + args, |
314 | + cwd=cwd, |
315 | + env=env, |
316 | + encoding="UTF-8", |
317 | + stdout=subprocess.PIPE, |
318 | + stderr=subprocess.STDOUT, |
319 | + ) as proc: |
320 | + with proc.stdout as stdout: |
321 | + for line in iter(stdout.readline, ""): |
322 | + self.logger.log(loglevel, line.rstrip("\n")) |
323 | + self.logger.log(loglevel, "Return code: %d" % proc.returncode) |
324 | + return proc.returncode |
325 | |
326 | def process_ping(self): |
327 | - jsonin = '' |
328 | + jsonin = "" |
329 | while True: |
330 | - l = sys.stdin.readline() |
331 | - if (l == '.\n') or (not l): |
332 | + line = sys.stdin.readline() |
333 | + if (line == ".\n") or (not line): |
334 | break |
335 | - jsonin = jsonin + l |
336 | + jsonin = jsonin + line |
337 | try: |
338 | j = json.loads(jsonin) |
339 | except ValueError: |
340 | - raise Exception('Invalid input JSON') |
341 | - |
342 | - lock = acquire_lock(os.path.join(self.config['lock_dir'], 'turku-storage-ping-%s.lock' % self.arg_uuid)) |
343 | - |
344 | - if 'port' not in j: |
345 | - raise Exception('Port required') |
346 | - forwarded_port = int(j['port']) |
347 | + raise Exception("Invalid input JSON") |
348 | + |
349 | + lock = acquire_lock( |
350 | + os.path.join( |
351 | + self.config["lock_dir"], "turku-storage-ping-%s.lock" % self.arg_uuid |
352 | + ) |
353 | + ) |
354 | + |
355 | + if "port" not in j: |
356 | + raise Exception("Port required") |
357 | + forwarded_port = int(j["port"]) |
358 | |
359 | verbose = False |
360 | - if 'verbose' in j and j['verbose']: |
361 | + if "verbose" in j and j["verbose"]: |
362 | verbose = True |
363 | if verbose: |
364 | self.lh_console.setLevel(logging.INFO) |
365 | |
366 | - if 'action' in j and j['action'] == 'restore': |
367 | - self.logger.info('Restore mode active on port %d. Good luck.' % forwarded_port) |
368 | - subprocess.call(['/bin/cat']) |
369 | + if "action" in j and j["action"] == "restore": |
370 | + self.logger.info( |
371 | + "Restore mode active on port %d. Good luck." % forwarded_port |
372 | + ) |
373 | + subprocess.call(["/bin/cat"]) |
374 | return |
375 | |
376 | api_out = { |
377 | - 'storage': { |
378 | - 'name': self.config['name'], |
379 | - 'secret': self.config['secret'], |
380 | - }, |
381 | - 'machine': { |
382 | - 'uuid': self.arg_uuid, |
383 | - }, |
384 | + "storage": {"name": self.config["name"], "secret": self.config["secret"]}, |
385 | + "machine": {"uuid": self.arg_uuid}, |
386 | } |
387 | - api_reply = api_call(self.config['api_url'], 'storage_ping_checkin', api_out) |
388 | + api_reply = api_call(self.config["api_url"], "storage_ping_checkin", api_out) |
389 | |
390 | - machine = api_reply['machine'] |
391 | - scheduled_sources = machine['scheduled_sources'] |
392 | + machine = api_reply["machine"] |
393 | + scheduled_sources = machine["scheduled_sources"] |
394 | if len(scheduled_sources) > 0: |
395 | - self.logger.info('Sources to back up: %s' % ', '.join([s for s in scheduled_sources])) |
396 | + self.logger.info( |
397 | + "Sources to back up: %s" % ", ".join([s for s in scheduled_sources]) |
398 | + ) |
399 | else: |
400 | - self.logger.info('No sources to back up now') |
401 | + self.logger.info("No sources to back up now") |
402 | for source_name in scheduled_sources: |
403 | time_begin = time.time() |
404 | s = scheduled_sources[source_name] |
405 | source_username = None |
406 | source_password = None |
407 | - if ('sources' in j) and (source_name in j['sources']): |
408 | - if ('username' in j['sources'][source_name]) and j['sources'][source_name]['username']: |
409 | - source_username = j['sources'][source_name]['username'] |
410 | - if ('password' in j['sources'][source_name]) and j['sources'][source_name]['password']: |
411 | - source_password = j['sources'][source_name]['password'] |
412 | + if ("sources" in j) and (source_name in j["sources"]): |
413 | + if ("username" in j["sources"][source_name]) and j["sources"][ |
414 | + source_name |
415 | + ]["username"]: |
416 | + source_username = j["sources"][source_name]["username"] |
417 | + if ("password" in j["sources"][source_name]) and j["sources"][ |
418 | + source_name |
419 | + ]["password"]: |
420 | + source_password = j["sources"][source_name]["password"] |
421 | else: |
422 | - if ('username' in s) and s['username']: |
423 | - source_username = s['username'] |
424 | - if ('password' in s) and s['password']: |
425 | - source_password = s['password'] |
426 | + if ("username" in s) and s["username"]: |
427 | + source_username = s["username"] |
428 | + if ("password" in s) and s["password"]: |
429 | + source_password = s["password"] |
430 | if not (source_username and source_password): |
431 | - self.logger.error('Cannot find authentication for source "%s"' % source_name) |
432 | + self.logger.error( |
433 | + 'Cannot find authentication for source "%s"' % source_name |
434 | + ) |
435 | continue |
436 | - snapshot_mode = self.config['snapshot_mode'] |
437 | - if snapshot_mode == 'link-dest': |
438 | - if 'large_rotating_files' in s and s['large_rotating_files']: |
439 | - snapshot_mode = 'none' |
440 | - if 'large_modifying_files' in s and s['large_modifying_files']: |
441 | - snapshot_mode = 'none' |
442 | - if 'snapshot_mode' in s and s['snapshot_mode']: |
443 | - snapshot_mode = s['snapshot_mode'] |
444 | + snapshot_mode = self.config["snapshot_mode"] |
445 | + if snapshot_mode == "link-dest": |
446 | + if "large_rotating_files" in s and s["large_rotating_files"]: |
447 | + snapshot_mode = "none" |
448 | + if "large_modifying_files" in s and s["large_modifying_files"]: |
449 | + snapshot_mode = "none" |
450 | + if "snapshot_mode" in s and s["snapshot_mode"]: |
451 | + snapshot_mode = s["snapshot_mode"] |
452 | |
453 | - var_machines = os.path.join(self.config['var_dir'], 'machines') |
454 | + var_machines = os.path.join(self.config["var_dir"], "machines") |
455 | if not os.path.exists(var_machines): |
456 | os.makedirs(var_machines) |
457 | |
458 | - if os.path.islink(os.path.join(var_machines, machine['uuid'])): |
459 | - machine_dir = os.readlink(os.path.join(var_machines, machine['uuid'])) |
460 | + if os.path.islink(os.path.join(var_machines, machine["uuid"])): |
461 | + machine_dir = os.readlink(os.path.join(var_machines, machine["uuid"])) |
462 | else: |
463 | weights = {} |
464 | - for volume_name in self.config['volumes']: |
465 | - v = self.config['volumes'][volume_name] |
466 | + for volume_name in self.config["volumes"]: |
467 | + v = self.config["volumes"][volume_name] |
468 | try: |
469 | - sv = os.statvfs(v['path']) |
470 | + sv = os.statvfs(v["path"]) |
471 | except OSError: |
472 | continue |
473 | - s_t = (sv.f_bsize * sv.f_blocks / 1048576) |
474 | - s_a = (sv.f_bsize * sv.f_bavail / 1048576) |
475 | + s_t = sv.f_bsize * sv.f_blocks / 1048576 |
476 | + s_a = sv.f_bsize * sv.f_bavail / 1048576 |
477 | pct_used = (1.0 - float(s_a) / float(s_t)) * 100.0 |
478 | - if (not v['accept_new']) or (pct_used > v['accept_new_high_water_pct']): |
479 | + if (not v["accept_new"]) or ( |
480 | + pct_used > v["accept_new_high_water_pct"] |
481 | + ): |
482 | continue |
483 | weights[volume_name] = s_a |
484 | if len(weights) == 0: |
485 | - raise Exception('Cannot find a suitable storage directory') |
486 | + raise Exception("Cannot find a suitable storage directory") |
487 | chosen_volume = random_weighted(weights) |
488 | if not chosen_volume: |
489 | - raise Exception('Cannot find a suitable storage directory') |
490 | - machine_dir = os.path.join(self.config['volumes'][chosen_volume]['path'], machine['uuid']) |
491 | - os.symlink(machine_dir, os.path.join(var_machines, machine['uuid'])) |
492 | + raise Exception("Cannot find a suitable storage directory") |
493 | + machine_dir = os.path.join( |
494 | + self.config["volumes"][chosen_volume]["path"], machine["uuid"] |
495 | + ) |
496 | + os.symlink(machine_dir, os.path.join(var_machines, machine["uuid"])) |
497 | if not os.path.exists(machine_dir): |
498 | os.makedirs(machine_dir) |
499 | |
500 | - machine_symlink = machine['unit_name'] |
501 | - if 'service_name' in machine and machine['service_name']: |
502 | - machine_symlink = machine['service_name'] + '-' + machine_symlink |
503 | - if 'environment_name' in machine and machine['environment_name']: |
504 | - machine_symlink = machine['environment_name'] + '-' + machine_symlink |
505 | - machine_symlink = machine_symlink.replace('/', '_') |
506 | + machine_symlink = machine["unit_name"] |
507 | + if "service_name" in machine and machine["service_name"]: |
508 | + machine_symlink = machine["service_name"] + "-" + machine_symlink |
509 | + if "environment_name" in machine and machine["environment_name"]: |
510 | + machine_symlink = machine["environment_name"] + "-" + machine_symlink |
511 | + machine_symlink = machine_symlink.replace("/", "_") |
512 | if os.path.islink(os.path.join(var_machines, machine_symlink)): |
513 | os.unlink(os.path.join(var_machines, machine_symlink)) |
514 | if not os.path.exists(os.path.join(var_machines, machine_symlink)): |
515 | - os.symlink(machine['uuid'], os.path.join(var_machines, machine_symlink)) |
516 | - |
517 | - self.logger.info('Begin: %s %s' % (machine['unit_name'], source_name)) |
518 | - |
519 | - rsync_args = ['rsync', '--archive', '--compress', '--numeric-ids', '--delete', '--delete-excluded'] |
520 | - rsync_args.append('--verbose') |
521 | + os.symlink(machine["uuid"], os.path.join(var_machines, machine_symlink)) |
522 | + |
523 | + self.logger.info("Begin: %s %s" % (machine["unit_name"], source_name)) |
524 | + |
525 | + rsync_args = [ |
526 | + "rsync", |
527 | + "--archive", |
528 | + "--compress", |
529 | + "--numeric-ids", |
530 | + "--delete", |
531 | + "--delete-excluded", |
532 | + ] |
533 | + rsync_args.append("--verbose") |
534 | |
535 | dest_dir = os.path.join(machine_dir, source_name) |
536 | if not os.path.exists(dest_dir): |
537 | os.makedirs(dest_dir) |
538 | - if snapshot_mode == 'link-dest': |
539 | - snapshot_dir = os.path.join(machine_dir, '%s.snapshots' % source_name) |
540 | + if snapshot_mode == "link-dest": |
541 | + snapshot_dir = os.path.join(machine_dir, "%s.snapshots" % source_name) |
542 | if not os.path.exists(snapshot_dir): |
543 | os.makedirs(snapshot_dir) |
544 | - dirs = [d for d in os.listdir(snapshot_dir) if os.path.isdir(os.path.join(snapshot_dir, d))] |
545 | + dirs = [ |
546 | + d |
547 | + for d in os.listdir(snapshot_dir) |
548 | + if os.path.isdir(os.path.join(snapshot_dir, d)) |
549 | + ] |
550 | base_snapshot = get_latest_snapshot(dirs) |
551 | if base_snapshot: |
552 | - rsync_args.append('--link-dest=%s' % os.path.join(snapshot_dir, base_snapshot)) |
553 | + rsync_args.append( |
554 | + "--link-dest=%s" % os.path.join(snapshot_dir, base_snapshot) |
555 | + ) |
556 | else: |
557 | - rsync_args.append('--inplace') |
558 | - if self.config['preserve_hard_links']: |
559 | - rsync_args.append('--hard-links') |
560 | + rsync_args.append("--inplace") |
561 | + if self.config["preserve_hard_links"]: |
562 | + rsync_args.append("--hard-links") |
563 | |
564 | filter_file = None |
565 | - filter_data = '' |
566 | - if 'filter' in s: |
567 | - for filter in s['filter']: |
568 | - if filter.startswith('merge') or filter.startswith(':'): |
569 | + filter_data = "" |
570 | + if "filter" in s: |
571 | + for filter in s["filter"]: |
572 | + if filter.startswith("merge") or filter.startswith(":"): |
573 | # Do not allow local merges |
574 | continue |
575 | - filter_data += '%s\n' % filter |
576 | - if 'exclude' in s: |
577 | - for exclude in s['exclude']: |
578 | - filter_data += '- %s\n' % exclude |
579 | + filter_data += "%s\n" % filter |
580 | + if "exclude" in s: |
581 | + for exclude in s["exclude"]: |
582 | + filter_data += "- %s\n" % exclude |
583 | if filter_data: |
584 | - filter_file = tempfile.NamedTemporaryFile(mode='w+', encoding='UTF-8') |
585 | + filter_file = tempfile.NamedTemporaryFile(mode="w+", encoding="UTF-8") |
586 | filter_file.write(filter_data) |
587 | filter_file.flush() |
588 | - rsync_args.append('--filter=merge %s' % filter_file.name) |
589 | - |
590 | - if 'bwlimit' in s and s['bwlimit']: |
591 | - rsync_args.append('--bwlimit=%s' % s['bwlimit']) |
592 | - |
593 | - rsync_args.append('rsync://%s@127.0.0.1:%d/%s/' % (source_username, forwarded_port, source_name)) |
594 | - |
595 | - rsync_args.append('%s/' % dest_dir) |
596 | - |
597 | - rsync_env = { |
598 | - 'RSYNC_PASSWORD': source_password |
599 | - } |
600 | + rsync_args.append("--filter=merge %s" % filter_file.name) |
601 | + |
602 | + if "bwlimit" in s and s["bwlimit"]: |
603 | + rsync_args.append("--bwlimit=%s" % s["bwlimit"]) |
604 | + |
605 | + rsync_args.append( |
606 | + "rsync://%s@127.0.0.1:%d/%s/" |
607 | + % (source_username, forwarded_port, source_name) |
608 | + ) |
609 | + |
610 | + rsync_args.append("%s/" % dest_dir) |
611 | + |
612 | + rsync_env = {"RSYNC_PASSWORD": source_password} |
613 | returncode = self.run_logging(rsync_args, env=rsync_env) |
614 | if returncode in (0, 24): |
615 | success = True |
616 | @@ -241,52 +276,66 @@ |
617 | snapshot_name = None |
618 | summary_output = None |
619 | if success: |
620 | - if snapshot_mode == 'link-dest': |
621 | - summary_output = '' |
622 | + if snapshot_mode == "link-dest": |
623 | + summary_output = "" |
624 | if base_snapshot: |
625 | - summary_output = summary_output + 'Base snapshot: %s\n' % base_snapshot |
626 | + summary_output = ( |
627 | + summary_output + "Base snapshot: %s\n" % base_snapshot |
628 | + ) |
629 | snapshot_name = datetime.datetime.now().isoformat() |
630 | os.rename(dest_dir, os.path.join(snapshot_dir, snapshot_name)) |
631 | - if os.path.islink(os.path.join(snapshot_dir, 'latest')): |
632 | - os.unlink(os.path.join(snapshot_dir, 'latest')) |
633 | - if not os.path.exists(os.path.join(snapshot_dir, 'latest')): |
634 | - os.symlink(snapshot_name, os.path.join(snapshot_dir, 'latest')) |
635 | - if 'retention' in s: |
636 | - dirs = [d for d in os.listdir(snapshot_dir) if os.path.isdir(os.path.join(snapshot_dir, d))] |
637 | - to_delete = get_snapshots_to_delete(s['retention'], dirs) |
638 | + if os.path.islink(os.path.join(snapshot_dir, "latest")): |
639 | + os.unlink(os.path.join(snapshot_dir, "latest")) |
640 | + if not os.path.exists(os.path.join(snapshot_dir, "latest")): |
641 | + os.symlink(snapshot_name, os.path.join(snapshot_dir, "latest")) |
642 | + if "retention" in s: |
643 | + dirs = [ |
644 | + d |
645 | + for d in os.listdir(snapshot_dir) |
646 | + if os.path.isdir(os.path.join(snapshot_dir, d)) |
647 | + ] |
648 | + to_delete = get_snapshots_to_delete(s["retention"], dirs) |
649 | for snapshot in to_delete: |
650 | - temp_delete_tree = os.path.join(snapshot_dir, '_delete-%s' % snapshot) |
651 | - os.rename(os.path.join(snapshot_dir, snapshot), temp_delete_tree) |
652 | + temp_delete_tree = os.path.join( |
653 | + snapshot_dir, "_delete-%s" % snapshot |
654 | + ) |
655 | + os.rename( |
656 | + os.path.join(snapshot_dir, snapshot), temp_delete_tree |
657 | + ) |
658 | # Should better handle this |
659 | - subprocess.call(['rm', '-rf', temp_delete_tree]) |
660 | - summary_output = summary_output + 'Removed old snapshot: %s\n' % snapshot |
661 | + subprocess.call(["rm", "-rf", temp_delete_tree]) |
662 | + summary_output = ( |
663 | + summary_output + "Removed old snapshot: %s\n" % snapshot |
664 | + ) |
665 | else: |
666 | - summary_output = 'rsync exited with return code %d' % returncode |
667 | + summary_output = "rsync exited with return code %d" % returncode |
668 | |
669 | time_end = time.time() |
670 | api_out = { |
671 | - 'storage': { |
672 | - 'name': self.config['name'], |
673 | - 'secret': self.config['secret'], |
674 | + "storage": { |
675 | + "name": self.config["name"], |
676 | + "secret": self.config["secret"], |
677 | }, |
678 | - 'machine': { |
679 | - 'uuid': self.arg_uuid, |
680 | - 'sources': { |
681 | + "machine": { |
682 | + "uuid": self.arg_uuid, |
683 | + "sources": { |
684 | source_name: { |
685 | - 'success': success, |
686 | - 'snapshot': snapshot_name, |
687 | - 'summary': summary_output, |
688 | - 'time_begin': time_begin, |
689 | - 'time_end': time_end, |
690 | - }, |
691 | + "success": success, |
692 | + "snapshot": snapshot_name, |
693 | + "summary": summary_output, |
694 | + "time_begin": time_begin, |
695 | + "time_end": time_end, |
696 | + } |
697 | }, |
698 | }, |
699 | } |
700 | - api_reply = api_call(self.config['api_url'], 'storage_ping_source_update', api_out) |
701 | - |
702 | - self.logger.info('End: %s %s' % (machine['unit_name'], source_name)) |
703 | - |
704 | - self.logger.info('Done') |
705 | + api_reply = api_call( |
706 | + self.config["api_url"], "storage_ping_source_update", api_out |
707 | + ) |
708 | + |
709 | + self.logger.info("End: %s %s" % (machine["unit_name"], source_name)) |
710 | + |
711 | + self.logger.info("Done") |
712 | lock.close() |
713 | |
714 | def main(self): |
715 | @@ -301,9 +350,10 @@ |
716 | import argparse |
717 | |
718 | parser = argparse.ArgumentParser( |
719 | - formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
720 | - parser.add_argument('--config-dir', '-c', type=str, default='/etc/turku-storage') |
721 | - parser.add_argument('uuid') |
722 | + formatter_class=argparse.ArgumentDefaultsHelpFormatter |
723 | + ) |
724 | + parser.add_argument("--config-dir", "-c", type=str, default="/etc/turku-storage") |
725 | + parser.add_argument("uuid") |
726 | return parser.parse_args() |
727 | |
728 | |
729 | |
730 | === modified file 'turku_storage/update_config.py' |
731 | --- turku_storage/update_config.py 2019-05-22 17:35:00 +0000 |
732 | +++ turku_storage/update_config.py 2020-06-21 22:41:04 +0000 |
733 | @@ -14,9 +14,10 @@ |
734 | # this program. If not, see <http://www.gnu.org/licenses/>. |
735 | |
736 | import os |
737 | +import pwd |
738 | +import random |
739 | import time |
740 | -import random |
741 | -import pwd |
742 | + |
743 | from .utils import load_config, acquire_lock, api_call |
744 | |
745 | |
746 | @@ -24,9 +25,10 @@ |
747 | import argparse |
748 | |
749 | parser = argparse.ArgumentParser( |
750 | - formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
751 | - parser.add_argument('--config-dir', '-c', type=str, default='/etc/turku-storage') |
752 | - parser.add_argument('--wait', '-w', type=float) |
753 | + formatter_class=argparse.ArgumentDefaultsHelpFormatter |
754 | + ) |
755 | + parser.add_argument("--config-dir", "-c", type=str, default="/etc/turku-storage") |
756 | + parser.add_argument("--wait", "-w", type=float) |
757 | return parser.parse_args() |
758 | |
759 | |
760 | @@ -39,77 +41,84 @@ |
761 | |
762 | config = load_config(args.config_dir) |
763 | |
764 | - lock = acquire_lock(os.path.join(config['lock_dir'], 'turku-storage-update-config.lock')) |
765 | + lock = acquire_lock( |
766 | + os.path.join(config["lock_dir"], "turku-storage-update-config.lock") |
767 | + ) |
768 | |
769 | space_total = 0 |
770 | space_available = 0 |
771 | seen_devs = [] |
772 | - for volume_name in config['volumes']: |
773 | - v = config['volumes'][volume_name] |
774 | - st_dev = os.stat(v['path']).st_dev |
775 | + for volume_name in config["volumes"]: |
776 | + v = config["volumes"][volume_name] |
777 | + st_dev = os.stat(v["path"]).st_dev |
778 | if st_dev in seen_devs: |
779 | continue |
780 | seen_devs.append(st_dev) |
781 | try: |
782 | - sv = os.statvfs(v['path']) |
783 | + sv = os.statvfs(v["path"]) |
784 | except OSError: |
785 | continue |
786 | - s_t = (sv.f_bsize * sv.f_blocks / 1048576) |
787 | - s_a = (sv.f_bsize * sv.f_bavail / 1048576) |
788 | + s_t = sv.f_bsize * sv.f_blocks / 1048576 |
789 | + s_a = sv.f_bsize * sv.f_bavail / 1048576 |
790 | pct_used = (1.0 - float(s_a) / float(s_t)) * 100.0 |
791 | - if (not v['accept_new']) or (pct_used > v['accept_new_high_water_pct']): |
792 | + if (not v["accept_new"]) or (pct_used > v["accept_new_high_water_pct"]): |
793 | s_a = 0 |
794 | space_total += s_t |
795 | space_available += s_a |
796 | |
797 | api_out = { |
798 | - 'storage': { |
799 | - 'name': config['name'], |
800 | - 'secret': config['secret'], |
801 | - 'ssh_ping_host': config['ssh_ping_host'], |
802 | - 'ssh_ping_port': config['ssh_ping_port'], |
803 | - 'ssh_ping_user': config['ssh_ping_user'], |
804 | - 'ssh_ping_host_keys': config['ssh_ping_host_keys'], |
805 | - 'space_total': space_total, |
806 | - 'space_available': space_available, |
807 | - }, |
808 | + "storage": { |
809 | + "name": config["name"], |
810 | + "secret": config["secret"], |
811 | + "ssh_ping_host": config["ssh_ping_host"], |
812 | + "ssh_ping_port": config["ssh_ping_port"], |
813 | + "ssh_ping_user": config["ssh_ping_user"], |
814 | + "ssh_ping_host_keys": config["ssh_ping_host_keys"], |
815 | + "space_total": space_total, |
816 | + "space_available": space_available, |
817 | + } |
818 | } |
819 | - if ('api_auth_name' in config) and ('api_auth_secret' in config): |
820 | - api_out['auth'] = { |
821 | - 'name': config['api_auth_name'], |
822 | - 'secret': config['api_auth_secret'], |
823 | + if ("api_auth_name" in config) and ("api_auth_secret" in config): |
824 | + api_out["auth"] = { |
825 | + "name": config["api_auth_name"], |
826 | + "secret": config["api_auth_secret"], |
827 | } |
828 | else: |
829 | # XXX legacy |
830 | - api_out['auth'] = config['api_auth'] |
831 | - if 'published' in config: |
832 | - api_out['storage']['published'] = config['published'] |
833 | - |
834 | - api_reply = api_call(config['api_url'], 'storage_update_config', api_out) |
835 | - |
836 | - authorized_keys_out = '# Automatically generated, please do not edit\n' |
837 | - authorized_keys_out += '# Local additions may be placed in %s.static\n' % config['authorized_keys_file'] |
838 | - if os.path.isfile(config['authorized_keys_file'] + '.static'): |
839 | - with open(config['authorized_keys_file'] + '.static') as f: |
840 | + api_out["auth"] = config["api_auth"] |
841 | + if "published" in config: |
842 | + api_out["storage"]["published"] = config["published"] |
843 | + |
844 | + api_reply = api_call(config["api_url"], "storage_update_config", api_out) |
845 | + |
846 | + authorized_keys_out = "# Automatically generated, please do not edit\n" |
847 | + authorized_keys_out += ( |
848 | + "# Local additions may be placed in %s.static\n" |
849 | + % config["authorized_keys_file"] |
850 | + ) |
851 | + if os.path.isfile(config["authorized_keys_file"] + ".static"): |
852 | + with open(config["authorized_keys_file"] + ".static") as f: |
853 | authorized_keys_out += f.read() |
854 | - for machine_uuid in api_reply['machines']: |
855 | - machine = api_reply['machines'][machine_uuid] |
856 | + for machine_uuid in api_reply["machines"]: |
857 | + machine = api_reply["machines"][machine_uuid] |
858 | authorized_keys_out += '%s,command="%s %s" %s (%s)\n' % ( |
859 | - 'no-pty,no-agent-forwarding,no-X11-forwarding,no-user-rc', |
860 | - config['authorized_keys_command'], machine_uuid, |
861 | - machine['ssh_public_key'], machine['unit_name'] |
862 | + "no-pty,no-agent-forwarding,no-X11-forwarding,no-user-rc", |
863 | + config["authorized_keys_command"], |
864 | + machine_uuid, |
865 | + machine["ssh_public_key"], |
866 | + machine["unit_name"], |
867 | ) |
868 | |
869 | - f_uid = pwd.getpwnam(config['authorized_keys_user']).pw_uid |
870 | - f_gid = pwd.getpwnam(config['authorized_keys_user']).pw_gid |
871 | - keys_dirname = os.path.dirname(config['authorized_keys_file']) |
872 | + f_uid = pwd.getpwnam(config["authorized_keys_user"]).pw_uid |
873 | + f_gid = pwd.getpwnam(config["authorized_keys_user"]).pw_gid |
874 | + keys_dirname = os.path.dirname(config["authorized_keys_file"]) |
875 | if not os.path.isdir(keys_dirname): |
876 | os.makedirs(keys_dirname) |
877 | os.chown(keys_dirname, f_uid, f_gid) |
878 | - temp_fn = '%s.tmp.%s' % (config['authorized_keys_file'], os.getpid()) |
879 | - with open(temp_fn, 'w') as f: |
880 | + temp_fn = "%s.tmp.%s" % (config["authorized_keys_file"], os.getpid()) |
881 | + with open(temp_fn, "w") as f: |
882 | os.fchown(f.fileno(), f_uid, f_gid) |
883 | f.write(authorized_keys_out) |
884 | - os.rename(temp_fn, config['authorized_keys_file']) |
885 | + os.rename(temp_fn, config["authorized_keys_file"]) |
886 | |
887 | lock.close() |
888 | |
889 | === modified file 'turku_storage/utils.py' |
890 | --- turku_storage/utils.py 2019-10-29 09:47:32 +0000 |
891 | +++ turku_storage/utils.py 2020-06-21 22:41:04 +0000 |
892 | @@ -13,34 +13,36 @@ |
893 | # You should have received a copy of the GNU General Public License along with |
894 | # this program. If not, see <http://www.gnu.org/licenses/>. |
895 | |
896 | -import os |
897 | -import urllib.parse |
898 | +import copy |
899 | +import datetime |
900 | +import glob |
901 | import http.client |
902 | import json |
903 | -import copy |
904 | +import os |
905 | +import pwd |
906 | import random |
907 | +import re |
908 | import socket |
909 | -import glob |
910 | -import pwd |
911 | -import datetime |
912 | -import re |
913 | import time |
914 | - |
915 | - |
916 | -class RuntimeLock(): |
917 | +import urllib.parse |
918 | + |
919 | + |
920 | +class RuntimeLock: |
921 | name = None |
922 | file = None |
923 | |
924 | def __init__(self, name): |
925 | import fcntl |
926 | - file = open(name, 'w') |
927 | + |
928 | + file = open(name, "w") |
929 | try: |
930 | fcntl.lockf(file, fcntl.LOCK_EX | fcntl.LOCK_NB) |
931 | except IOError as e: |
932 | import errno |
933 | + |
934 | if e.errno in (errno.EACCES, errno.EAGAIN): |
935 | raise |
936 | - file.write('%10s\n' % os.getpid()) |
937 | + file.write("%10s\n" % os.getpid()) |
938 | file.flush() |
939 | file.seek(0) |
940 | self.name = name |
941 | @@ -71,12 +73,12 @@ |
942 | |
943 | def json_dump_p(obj, f): |
944 | """Calls json.dump with standard (pretty) formatting""" |
945 | - return json.dump(obj, f, sort_keys=True, indent=4, separators=(',', ': ')) |
946 | + return json.dump(obj, f, sort_keys=True, indent=4, separators=(",", ": ")) |
947 | |
948 | |
949 | def json_dumps_p(obj): |
950 | """Calls json.dumps with standard (pretty) formatting""" |
951 | - return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) |
952 | + return json.dumps(obj, sort_keys=True, indent=4, separators=(",", ": ")) |
953 | |
954 | |
955 | def json_load_file(file): |
956 | @@ -103,27 +105,29 @@ |
957 | |
958 | def api_call(api_url, cmd, post_data, timeout=5): |
959 | url = urllib.parse.urlparse(api_url) |
960 | - if url.scheme == 'https': |
961 | + if url.scheme == "https": |
962 | h = http.client.HTTPSConnection(url.netloc, timeout=timeout) |
963 | else: |
964 | h = http.client.HTTPConnection(url.netloc, timeout=timeout) |
965 | out = json.dumps(post_data) |
966 | - h.putrequest('POST', '%s/%s' % (url.path, cmd)) |
967 | - h.putheader('Content-Type', 'application/json') |
968 | - h.putheader('Content-Length', len(out)) |
969 | - h.putheader('Accept', 'application/json') |
970 | + h.putrequest("POST", "%s/%s" % (url.path, cmd)) |
971 | + h.putheader("Content-Type", "application/json") |
972 | + h.putheader("Content-Length", len(out)) |
973 | + h.putheader("Accept", "application/json") |
974 | h.endheaders() |
975 | - h.send(out.encode('UTF-8')) |
976 | + h.send(out.encode("UTF-8")) |
977 | |
978 | res = h.getresponse() |
979 | if not res.status == http.client.OK: |
980 | - raise Exception('Received error %d (%s) from API server' % (res.status, res.reason)) |
981 | - if not res.getheader('content-type') == 'application/json': |
982 | - raise Exception('Received invalid reply from API server') |
983 | + raise Exception( |
984 | + "Received error %d (%s) from API server" % (res.status, res.reason) |
985 | + ) |
986 | + if not res.getheader("content-type") == "application/json": |
987 | + raise Exception("Received invalid reply from API server") |
988 | try: |
989 | - return json.loads(res.read().decode('UTF-8')) |
990 | + return json.loads(res.read().decode("UTF-8")) |
991 | except ValueError: |
992 | - raise Exception('Received invalid reply from API server') |
993 | + raise Exception("Received invalid reply from API server") |
994 | |
995 | |
996 | def random_weighted(m): |
997 | @@ -144,11 +148,11 @@ |
998 | |
999 | def load_config(config_dir): |
1000 | config = {} |
1001 | - config_d = os.path.join(config_dir, 'config.d') |
1002 | + config_d = os.path.join(config_dir, "config.d") |
1003 | config_files = [ |
1004 | os.path.join(config_d, fn) |
1005 | for fn in os.listdir(config_d) |
1006 | - if fn.endswith('.json') |
1007 | + if fn.endswith(".json") |
1008 | and os.path.isfile(os.path.join(config_d, fn)) |
1009 | and os.access(os.path.join(config_d, fn), os.R_OK) |
1010 | ] |
1011 | @@ -156,66 +160,70 @@ |
1012 | for file in config_files: |
1013 | config = dict_merge(config, json_load_file(file)) |
1014 | |
1015 | - required_keys = ['name', 'secret', 'api_url', 'volumes'] |
1016 | + required_keys = ["name", "secret", "api_url", "volumes"] |
1017 | # XXX legacy |
1018 | - if 'api_auth' not in config: |
1019 | - required_keys += ['api_auth_name', 'api_auth_secret'] |
1020 | + if "api_auth" not in config: |
1021 | + required_keys += ["api_auth_name", "api_auth_secret"] |
1022 | for k in required_keys: |
1023 | if k not in config: |
1024 | - raise Exception('Incomplete config') |
1025 | - |
1026 | - if 'accept_new_high_water_pct' not in config: |
1027 | - config['accept_new_high_water_pct'] = 80 |
1028 | - |
1029 | - for volume_name in config['volumes']: |
1030 | - if 'path' not in config['volumes'][volume_name]: |
1031 | - del(config['volumes'][volume_name]) |
1032 | + raise Exception("Incomplete config") |
1033 | + |
1034 | + if "accept_new_high_water_pct" not in config: |
1035 | + config["accept_new_high_water_pct"] = 80 |
1036 | + |
1037 | + for volume_name in config["volumes"]: |
1038 | + if "path" not in config["volumes"][volume_name]: |
1039 | + del config["volumes"][volume_name] |
1040 | continue |
1041 | - if 'accept_new' not in config['volumes'][volume_name]: |
1042 | - config['volumes'][volume_name]['accept_new'] = True |
1043 | - if 'accept_new_high_water_pct' not in config['volumes'][volume_name]: |
1044 | - config['volumes'][volume_name]['accept_new_high_water_pct'] = config['accept_new_high_water_pct'] |
1045 | - |
1046 | - if len(config['volumes']) == 0: |
1047 | - raise Exception('Incomplete config') |
1048 | - |
1049 | - if 'log_file' not in config: |
1050 | - config['log_file'] = '/var/log/turku-storage.log' |
1051 | - if 'lock_dir' not in config: |
1052 | - config['lock_dir'] = '/var/lock' |
1053 | - if 'var_dir' not in config: |
1054 | - config['var_dir'] = '/var/lib/turku-storage' |
1055 | - |
1056 | - if 'snapshot_mode' not in config: |
1057 | - config['snapshot_mode'] = 'link-dest' |
1058 | - if 'preserve_hard_links' not in config: |
1059 | - config['preserve_hard_links'] = False |
1060 | - |
1061 | - if 'ssh_ping_host' not in config: |
1062 | - config['ssh_ping_host'] = socket.getfqdn() |
1063 | - if 'ssh_ping_port' not in config: |
1064 | - config['ssh_ping_port'] = 22 |
1065 | - if 'ssh_ping_user' not in config: |
1066 | - config['ssh_ping_user'] = 'root' |
1067 | - if 'ssh_ping_host_keys' not in config: |
1068 | - config['ssh_ping_host_keys'] = [] |
1069 | - keys_glob = '/etc/ssh/ssh_host_*_key.pub' |
1070 | - if 'ssh_ping_host_keys_glob' in config: |
1071 | - keys_glob = config['ssh_ping_host_keys_glob'] |
1072 | + if "accept_new" not in config["volumes"][volume_name]: |
1073 | + config["volumes"][volume_name]["accept_new"] = True |
1074 | + if "accept_new_high_water_pct" not in config["volumes"][volume_name]: |
1075 | + config["volumes"][volume_name]["accept_new_high_water_pct"] = config[ |
1076 | + "accept_new_high_water_pct" |
1077 | + ] |
1078 | + |
1079 | + if len(config["volumes"]) == 0: |
1080 | + raise Exception("Incomplete config") |
1081 | + |
1082 | + if "log_file" not in config: |
1083 | + config["log_file"] = "/var/log/turku-storage.log" |
1084 | + if "lock_dir" not in config: |
1085 | + config["lock_dir"] = "/var/lock" |
1086 | + if "var_dir" not in config: |
1087 | + config["var_dir"] = "/var/lib/turku-storage" |
1088 | + |
1089 | + if "snapshot_mode" not in config: |
1090 | + config["snapshot_mode"] = "link-dest" |
1091 | + if "preserve_hard_links" not in config: |
1092 | + config["preserve_hard_links"] = False |
1093 | + |
1094 | + if "ssh_ping_host" not in config: |
1095 | + config["ssh_ping_host"] = socket.getfqdn() |
1096 | + if "ssh_ping_port" not in config: |
1097 | + config["ssh_ping_port"] = 22 |
1098 | + if "ssh_ping_user" not in config: |
1099 | + config["ssh_ping_user"] = "root" |
1100 | + if "ssh_ping_host_keys" not in config: |
1101 | + config["ssh_ping_host_keys"] = [] |
1102 | + keys_glob = "/etc/ssh/ssh_host_*_key.pub" |
1103 | + if "ssh_ping_host_keys_glob" in config: |
1104 | + keys_glob = config["ssh_ping_host_keys_glob"] |
1105 | for pubkey in glob.glob(keys_glob): |
1106 | with open(pubkey) as f: |
1107 | - config['ssh_ping_host_keys'].append(f.read().rstrip()) |
1108 | - if 'authorized_keys_file' not in config: |
1109 | - config['authorized_keys_file'] = '%s/.ssh/authorized_keys' % pwd.getpwnam(config['ssh_ping_user']).pw_dir |
1110 | - if 'authorized_keys_user' not in config: |
1111 | - config['authorized_keys_user'] = config['ssh_ping_user'] |
1112 | - if 'authorized_keys_command' not in config: |
1113 | - config['authorized_keys_command'] = 'turku-storage-ping' |
1114 | + config["ssh_ping_host_keys"].append(f.read().rstrip()) |
1115 | + if "authorized_keys_file" not in config: |
1116 | + config["authorized_keys_file"] = ( |
1117 | + "%s/.ssh/authorized_keys" % pwd.getpwnam(config["ssh_ping_user"]).pw_dir |
1118 | + ) |
1119 | + if "authorized_keys_user" not in config: |
1120 | + config["authorized_keys_user"] = config["ssh_ping_user"] |
1121 | + if "authorized_keys_command" not in config: |
1122 | + config["authorized_keys_command"] = "turku-storage-ping" |
1123 | |
1124 | - if 'timezone' not in config: |
1125 | - config['timezone'] = 'UTC' |
1126 | - if config['timezone']: |
1127 | - os.environ['TZ'] = config['timezone'] |
1128 | + if "timezone" not in config: |
1129 | + config["timezone"] = "UTC" |
1130 | + if config["timezone"]: |
1131 | + os.environ["TZ"] = config["timezone"] |
1132 | time.tzset() |
1133 | |
1134 | return config |
1135 | @@ -227,23 +235,23 @@ |
1136 | # 2015-02-20T03:20:36 |
1137 | # 2015-02-20T03:21:18.152575 |
1138 | # use it as a timestamp, otherwise ignore it |
1139 | - if 'save' in ss: |
1140 | - raise ValueError('Excluded snapshot') |
1141 | - if ss == 'working': |
1142 | - raise ValueError('Excluded snapshot') |
1143 | + if "save" in ss: |
1144 | + raise ValueError("Excluded snapshot") |
1145 | + if ss == "working": |
1146 | + raise ValueError("Excluded snapshot") |
1147 | try: |
1148 | - return datetime.datetime.strptime(ss, '%Y-%m-%dT%H:%M:%S.%f') |
1149 | + return datetime.datetime.strptime(ss, "%Y-%m-%dT%H:%M:%S.%f") |
1150 | except ValueError: |
1151 | pass |
1152 | try: |
1153 | - return datetime.datetime.strptime(ss, '%Y-%m-%dT%H:%M:%S') |
1154 | + return datetime.datetime.strptime(ss, "%Y-%m-%dT%H:%M:%S") |
1155 | except ValueError: |
1156 | pass |
1157 | try: |
1158 | return datetime.datetime.utcfromtimestamp(float(ss)) |
1159 | except ValueError: |
1160 | pass |
1161 | - raise ValueError('Unknown snapshot name format') |
1162 | + raise ValueError("Unknown snapshot name format") |
1163 | |
1164 | |
1165 | def get_latest_snapshot(snapshots): |
1166 | @@ -268,43 +276,42 @@ |
1167 | |
1168 | now = datetime.datetime.now() |
1169 | to_keep = [] |
1170 | - for ritem in retention.split(','): |
1171 | + for ritem in retention.split(","): |
1172 | ritem = ritem.strip() |
1173 | - r = re.findall('^earliest of (?:(\d+) )?(day|week|month)', ritem) |
1174 | + r = re.findall(r"^earliest of (?:(\d+) )?(day|week|month)", ritem) |
1175 | if len(r) > 0: |
1176 | - if r[0][0] == '': |
1177 | + if r[0][0] == "": |
1178 | earliest_num = 1 |
1179 | else: |
1180 | earliest_num = int(r[0][0]) |
1181 | earliest_word = r[0][1] |
1182 | - if earliest_word == 'fortnight': |
1183 | - earliest_word = 'week' |
1184 | + if earliest_word == "fortnight": |
1185 | + earliest_word = "week" |
1186 | earliest_num = earliest_num * 2 |
1187 | - if earliest_word == 'day': |
1188 | - cutoff_time = ( |
1189 | - now.replace(hour=0, minute=0, second=0, microsecond=0) - |
1190 | - datetime.timedelta(days=(earliest_num - 1)) |
1191 | - ) |
1192 | - elif earliest_word == 'week': |
1193 | - cutoff_time = ( |
1194 | - now.replace(hour=0, minute=0, second=0, microsecond=0) - |
1195 | - datetime.timedelta(days=((now.weekday() + 1) % 7)) |
1196 | - ) |
1197 | - for i in range(earliest_num - 1): |
1198 | - cutoff_time = ( |
1199 | - cutoff_time - datetime.timedelta(weeks=1) |
1200 | - ).replace(day=1, hour=0, minute=0, second=0, microsecond=0) |
1201 | - elif earliest_word == 'month': |
1202 | - cutoff_time = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) |
1203 | - for i in range(earliest_num - 1): |
1204 | - cutoff_time = ( |
1205 | - cutoff_time - datetime.timedelta(days=1) |
1206 | - ).replace(day=1, hour=0, minute=0, second=0, microsecond=0) |
1207 | + if earliest_word == "day": |
1208 | + cutoff_time = now.replace( |
1209 | + hour=0, minute=0, second=0, microsecond=0 |
1210 | + ) - datetime.timedelta(days=(earliest_num - 1)) |
1211 | + elif earliest_word == "week": |
1212 | + cutoff_time = now.replace( |
1213 | + hour=0, minute=0, second=0, microsecond=0 |
1214 | + ) - datetime.timedelta(days=((now.weekday() + 1) % 7)) |
1215 | + for i in range(earliest_num - 1): |
1216 | + cutoff_time = (cutoff_time - datetime.timedelta(weeks=1)).replace( |
1217 | + day=1, hour=0, minute=0, second=0, microsecond=0 |
1218 | + ) |
1219 | + elif earliest_word == "month": |
1220 | + cutoff_time = now.replace( |
1221 | + day=1, hour=0, minute=0, second=0, microsecond=0 |
1222 | + ) |
1223 | + for i in range(earliest_num - 1): |
1224 | + cutoff_time = (cutoff_time - datetime.timedelta(days=1)).replace( |
1225 | + day=1, hour=0, minute=0, second=0, microsecond=0 |
1226 | + ) |
1227 | else: |
1228 | - cutoff_time = ( |
1229 | - now.replace(hour=0, minute=0, second=0, microsecond=0) - |
1230 | - datetime.timedelta(days=(earliest_num - 1)) |
1231 | - ) |
1232 | + cutoff_time = now.replace( |
1233 | + hour=0, minute=0, second=0, microsecond=0 |
1234 | + ) - datetime.timedelta(days=(earliest_num - 1)) |
1235 | candidate_s = None |
1236 | for s in list(snapshot_dict.keys()): |
1237 | if s < cutoff_time: |
1238 | @@ -317,16 +324,16 @@ |
1239 | candidate_s = s |
1240 | if candidate_s and candidate_s not in to_keep: |
1241 | to_keep.append(candidate_s) |
1242 | - r = re.findall('^last (\d+) day', ritem) |
1243 | + r = re.findall(r"^last (\d+) day", ritem) |
1244 | if len(r) > 0: |
1245 | last_days = int(r[0]) |
1246 | - cutoff_time = (now - datetime.timedelta(days=last_days)) |
1247 | + cutoff_time = now - datetime.timedelta(days=last_days) |
1248 | for s in list(snapshot_dict.keys()): |
1249 | if s < cutoff_time: |
1250 | continue |
1251 | if s not in to_keep: |
1252 | to_keep.append(s) |
1253 | - r = re.findall('^last (\d+) snapshot', ritem) |
1254 | + r = re.findall(r"^last (\d+) snapshot", ritem) |
1255 | if len(r) > 0: |
1256 | last_snapshots = int(r[0]) |
1257 | i = 0 |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.