Merge lp:~fo0bar/turku/turku-agent-cleanup into lp:turku/turku-agent
- turku-agent-cleanup
- Merge into turku-agent
Status: | Merged |
---|---|
Approved by: | Barry Price |
Approved revision: | 57 |
Merged at revision: | 57 |
Proposed branch: | lp:~fo0bar/turku/turku-agent-cleanup |
Merge into: | lp:turku/turku-agent |
Diff against target: |
1204 lines (+487/-298) 10 files modified
.bzrignore (+61/-5) MANIFEST.in (+13/-0) Makefile (+28/-0) setup.py (+13/-13) tests/test_stub.py (+8/-0) tox.ini (+38/-0) turku_agent/ping.py (+108/-84) turku_agent/rsyncd_wrapper.py (+11/-7) turku_agent/update_config.py (+71/-69) turku_agent/utils.py (+136/-120) |
To merge this branch: | bzr merge lp:~fo0bar/turku/turku-agent-cleanup |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Stuart Bishop (community) | Approve | ||
Review via email:
|
Commit message
Mega-noop cleanup
Description of the change
This is the minimum required for:
- tox test suite with all passing tests
- black-managed formatting
- Shippable sdist module
It is intended as a base for the other MPs, so they don't have to e.g. establish tests/*, or worry about about existing failing flake8, or worry about how to add additional optional modules.

Canonical IS Mergebot (canonical-is-mergebot) wrote : | # |
- 57. By Ryan Finnie
-
Mega-noop cleanup
- Sort imports
- Remove shabangs from non-scripts
- Update MANIFEST.in so `setup.py sdist` produces usable tarballs
- Create stub tests
- Add tox.ini
- Add blank requirements.txt
- Add Makefile
- make black
- Update .bzrignore
- Clean up flake8:
- update_config.py: '.utils.json_dump_ p' imported but unused
- update_config.py: 'api_reply' is assigned to but never used
- utils.py: 'sources_secrets_d' is assigned to but never used

Stuart Bishop (stub) wrote : | # |
Looks good. The new bits seem fine (bzr, buildchain, tox). The code changes all appear to be nothing but Black reformatting (as expected).

Canonical IS Mergebot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 57
Preview Diff
1 | === modified file '.bzrignore' |
2 | --- .bzrignore 2019-04-22 01:16:04 +0000 |
3 | +++ .bzrignore 2020-06-21 22:22:36 +0000 |
4 | @@ -1,5 +1,61 @@ |
5 | -*.pyc |
6 | -./build/ |
7 | -./dist/ |
8 | -./MANIFEST |
9 | -./*.egg-info/ |
10 | +MANIFEST |
11 | +.pybuild/ |
12 | +.pytest_cache/ |
13 | + |
14 | +# Byte-compiled / optimized / DLL files |
15 | +__pycache__/ |
16 | +*.py[cod] |
17 | + |
18 | +# C extensions |
19 | +*.so |
20 | + |
21 | +# Distribution / packaging |
22 | +.Python |
23 | +env/ |
24 | +build/ |
25 | +develop-eggs/ |
26 | +dist/ |
27 | +downloads/ |
28 | +eggs/ |
29 | +.eggs/ |
30 | +lib/ |
31 | +lib64/ |
32 | +parts/ |
33 | +sdist/ |
34 | +var/ |
35 | +*.egg-info/ |
36 | +.installed.cfg |
37 | +*.egg |
38 | + |
39 | +# PyInstaller |
40 | +# Usually these files are written by a python script from a template |
41 | +# before PyInstaller builds the exe, so as to inject date/other infos into it. |
42 | +*.manifest |
43 | +*.spec |
44 | + |
45 | +# Installer logs |
46 | +pip-log.txt |
47 | +pip-delete-this-directory.txt |
48 | + |
49 | +# Unit test / coverage reports |
50 | +htmlcov/ |
51 | +.tox/ |
52 | +.coverage |
53 | +.coverage.* |
54 | +.cache |
55 | +nosetests.xml |
56 | +coverage.xml |
57 | +*,cover |
58 | + |
59 | +# Translations |
60 | +*.mo |
61 | +*.pot |
62 | + |
63 | +# Django stuff: |
64 | +*.log |
65 | + |
66 | +# Sphinx documentation |
67 | +docs/_build/ |
68 | + |
69 | +# PyBuilder |
70 | +target/ |
71 | |
72 | === modified file 'MANIFEST.in' |
73 | --- MANIFEST.in 2015-03-29 07:59:33 +0000 |
74 | +++ MANIFEST.in 2020-06-21 22:22:36 +0000 |
75 | @@ -1,3 +1,16 @@ |
76 | +include Makefile |
77 | +include README |
78 | +include requirements.txt |
79 | +include tests/*.py |
80 | +include tox.ini |
81 | include turku-agent.cron |
82 | +include turku-agent-ping |
83 | +include turku-agent-ping.service |
84 | +include turku-agent-ping.timer |
85 | include turku-agent-rsyncd.conf |
86 | +include turku-agent-rsyncd.init-debian |
87 | include turku-agent-rsyncd.service |
88 | +include turku-agent-rsyncd-wrapper |
89 | +include turku-update-config |
90 | +include turku-update-config.service |
91 | +include turku-update-config.timer |
92 | |
93 | === added file 'Makefile' |
94 | --- Makefile 1970-01-01 00:00:00 +0000 |
95 | +++ Makefile 2020-06-21 22:22:36 +0000 |
96 | @@ -0,0 +1,28 @@ |
97 | +PYTHON := python3 |
98 | + |
99 | +all: build |
100 | + |
101 | +build: |
102 | + $(PYTHON) setup.py build |
103 | + |
104 | +lint: |
105 | + $(PYTHON) -mtox -e flake8 |
106 | + |
107 | +test: |
108 | + $(PYTHON) -mtox |
109 | + |
110 | +test-quick: |
111 | + $(PYTHON) -mtox -e black,flake8,pytest-quick |
112 | + |
113 | +black-check: |
114 | + $(PYTHON) -mtox -e black |
115 | + |
116 | +black: |
117 | + $(PYTHON) -mblack $(CURDIR) |
118 | + |
119 | +install: build |
120 | + $(PYTHON) setup.py install |
121 | + |
122 | +clean: |
123 | + $(PYTHON) setup.py clean |
124 | + $(RM) -r build MANIFEST |
125 | |
126 | === added file 'requirements.txt' |
127 | === modified file 'setup.py' |
128 | --- setup.py 2019-04-22 01:16:04 +0000 |
129 | +++ setup.py 2020-06-21 22:22:36 +0000 |
130 | @@ -18,22 +18,22 @@ |
131 | import sys |
132 | from setuptools import setup |
133 | |
134 | -assert(sys.version_info > (3, 4)) |
135 | +assert sys.version_info > (3, 4) |
136 | |
137 | |
138 | setup( |
139 | - name='turku_agent', |
140 | - description='Turku backups - client agent', |
141 | - version='0.2.0', |
142 | - author='Ryan Finnie', |
143 | - author_email='ryan.finnie@canonical.com', |
144 | - url='https://launchpad.net/turku', |
145 | - packages=['turku_agent'], |
146 | + name="turku_agent", |
147 | + description="Turku backups - client agent", |
148 | + version="0.2.0", |
149 | + author="Ryan Finnie", |
150 | + author_email="ryan.finnie@canonical.com", |
151 | + url="https://launchpad.net/turku", |
152 | + packages=["turku_agent"], |
153 | entry_points={ |
154 | - 'console_scripts': [ |
155 | - 'turku-agent-ping = turku_agent.ping:main', |
156 | - 'turku-agent-rsyncd-wrapper = turku_agent.rsyncd_wrapper:main', |
157 | - 'turku-update-config = turku_agent.update_config:main', |
158 | - ], |
159 | + "console_scripts": [ |
160 | + "turku-agent-ping = turku_agent.ping:main", |
161 | + "turku-agent-rsyncd-wrapper = turku_agent.rsyncd_wrapper:main", |
162 | + "turku-update-config = turku_agent.update_config:main", |
163 | + ] |
164 | }, |
165 | ) |
166 | |
167 | === added directory 'tests' |
168 | === added file 'tests/__init__.py' |
169 | === added file 'tests/test_stub.py' |
170 | --- tests/test_stub.py 1970-01-01 00:00:00 +0000 |
171 | +++ tests/test_stub.py 2020-06-21 22:22:36 +0000 |
172 | @@ -0,0 +1,8 @@ |
173 | +import unittest |
174 | +import warnings |
175 | + |
176 | + |
177 | +class TestStub(unittest.TestCase): |
178 | + def test_stub(self): |
179 | + # pytest doesn't like a tests/ with no tests |
180 | + warnings.warn("Remove this file once unit tests are added") |
181 | |
182 | === added file 'tox.ini' |
183 | --- tox.ini 1970-01-01 00:00:00 +0000 |
184 | +++ tox.ini 2020-06-21 22:22:36 +0000 |
185 | @@ -0,0 +1,38 @@ |
186 | +[tox] |
187 | +envlist = black, flake8, pytest |
188 | + |
189 | +[testenv] |
190 | +basepython = python |
191 | + |
192 | +[testenv:black] |
193 | +commands = python -mblack --check . |
194 | +deps = black |
195 | + |
196 | +[testenv:flake8] |
197 | +commands = python -mflake8 |
198 | +deps = flake8 |
199 | + |
200 | +[testenv:pytest] |
201 | +commands = python -mpytest --cov=turku_agent --cov-report=term-missing |
202 | +deps = pytest |
203 | + pytest-cov |
204 | + -r{toxinidir}/requirements.txt |
205 | + |
206 | +[testenv:pytest-quick] |
207 | +commands = python -mpytest -m "not slow" |
208 | +deps = pytest |
209 | + -r{toxinidir}/requirements.txt |
210 | + |
211 | +[flake8] |
212 | +exclude = |
213 | + .git, |
214 | + __pycache__, |
215 | + .tox, |
216 | +# TODO: remove C901 once complexity is reduced |
217 | +ignore = C901,E203,E231,W503 |
218 | +max-line-length = 120 |
219 | +max-complexity = 10 |
220 | + |
221 | +[pytest] |
222 | +markers = |
223 | + slow |
224 | |
225 | === modified file 'turku_agent/ping.py' |
226 | --- turku_agent/ping.py 2020-03-26 06:06:23 +0000 |
227 | +++ turku_agent/ping.py 2020-06-21 22:22:36 +0000 |
228 | @@ -1,5 +1,3 @@ |
229 | -#!/usr/bin/env python3 |
230 | - |
231 | # Turku backups - client agent |
232 | # Copyright 2015 Canonical Ltd. |
233 | # |
234 | @@ -16,13 +14,14 @@ |
235 | # this program. If not, see <http://www.gnu.org/licenses/>. |
236 | |
237 | |
238 | +import json |
239 | import os |
240 | -import json |
241 | import random |
242 | import shlex |
243 | import subprocess |
244 | import tempfile |
245 | import time |
246 | + |
247 | from .utils import load_config, acquire_lock, api_call |
248 | |
249 | |
250 | @@ -30,42 +29,60 @@ |
251 | import argparse |
252 | |
253 | parser = argparse.ArgumentParser( |
254 | - formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
255 | - parser.add_argument('--config-dir', '-c', type=str, default='/etc/turku-agent') |
256 | - parser.add_argument('--wait', '-w', type=float) |
257 | - parser.add_argument('--restore', action='store_true') |
258 | - parser.add_argument('--restore-storage', type=str, default=None) |
259 | - parser.add_argument('--gonogo-program', type=str, default=None, |
260 | - help='Go/no-go program run each time to determine whether to ping') |
261 | + formatter_class=argparse.ArgumentDefaultsHelpFormatter |
262 | + ) |
263 | + parser.add_argument("--config-dir", "-c", type=str, default="/etc/turku-agent") |
264 | + parser.add_argument("--wait", "-w", type=float) |
265 | + parser.add_argument("--restore", action="store_true") |
266 | + parser.add_argument("--restore-storage", type=str, default=None) |
267 | + parser.add_argument( |
268 | + "--gonogo-program", |
269 | + type=str, |
270 | + default=None, |
271 | + help="Go/no-go program run each time to determine whether to ping", |
272 | + ) |
273 | return parser.parse_args() |
274 | |
275 | |
276 | def call_ssh(config, storage, ssh_req): |
277 | # Write the server host public key |
278 | - t = tempfile.NamedTemporaryFile(mode='w+', encoding='UTF-8') |
279 | - for key in storage['ssh_ping_host_keys']: |
280 | - t.write('%s %s\n' % (storage['ssh_ping_host'], key)) |
281 | + t = tempfile.NamedTemporaryFile(mode="w+", encoding="UTF-8") |
282 | + for key in storage["ssh_ping_host_keys"]: |
283 | + t.write("%s %s\n" % (storage["ssh_ping_host"], key)) |
284 | t.flush() |
285 | |
286 | # Call ssh |
287 | - ssh_command = config['ssh_command'] |
288 | + ssh_command = config["ssh_command"] |
289 | ssh_command += [ |
290 | - '-T', |
291 | - '-o', 'BatchMode=yes', |
292 | - '-o', 'UserKnownHostsFile=%s' % t.name, |
293 | - '-o', 'StrictHostKeyChecking=yes', |
294 | - '-o', 'CheckHostIP=no', |
295 | - '-i', config['ssh_private_key_file'], |
296 | - '-R', '%d:%s:%d' % (ssh_req['port'], config['rsyncd_local_address'], config['rsyncd_local_port']), |
297 | - '-p', str(storage['ssh_ping_port']), |
298 | - '-l', storage['ssh_ping_user'], |
299 | - storage['ssh_ping_host'], |
300 | - 'turku-ping-remote', |
301 | + "-T", |
302 | + "-o", |
303 | + "BatchMode=yes", |
304 | + "-o", |
305 | + "UserKnownHostsFile=%s" % t.name, |
306 | + "-o", |
307 | + "StrictHostKeyChecking=yes", |
308 | + "-o", |
309 | + "CheckHostIP=no", |
310 | + "-i", |
311 | + config["ssh_private_key_file"], |
312 | + "-R", |
313 | + "%d:%s:%d" |
314 | + % ( |
315 | + ssh_req["port"], |
316 | + config["rsyncd_local_address"], |
317 | + config["rsyncd_local_port"], |
318 | + ), |
319 | + "-p", |
320 | + str(storage["ssh_ping_port"]), |
321 | + "-l", |
322 | + storage["ssh_ping_user"], |
323 | + storage["ssh_ping_host"], |
324 | + "turku-ping-remote", |
325 | ] |
326 | p = subprocess.Popen(ssh_command, stdin=subprocess.PIPE) |
327 | |
328 | # Write the ssh request |
329 | - p.stdin.write((json.dumps(ssh_req) + '\n.\n').encode('UTF-8')) |
330 | + p.stdin.write((json.dumps(ssh_req) + "\n.\n").encode("UTF-8")) |
331 | p.stdin.flush() |
332 | |
333 | # Wait for the server to close the SSH connection |
334 | @@ -88,16 +105,18 @@ |
335 | config = load_config(args.config_dir) |
336 | |
337 | # Basic checks |
338 | - for i in ('ssh_private_key_file', 'machine_uuid', 'machine_secret', 'api_url'): |
339 | + for i in ("ssh_private_key_file", "machine_uuid", "machine_secret", "api_url"): |
340 | if i not in config: |
341 | return |
342 | - if not os.path.isfile(config['ssh_private_key_file']): |
343 | + if not os.path.isfile(config["ssh_private_key_file"]): |
344 | return |
345 | |
346 | # If a go/no-go program is defined, run it and only go if it exits 0. |
347 | # Example: prevent backups during high-load for sensitive systems: |
348 | # ['check_load', '-c', '1,5,15'] |
349 | - gonogo_program = args.gonogo_program if args.gonogo_program else config['gonogo_program'] |
350 | + gonogo_program = ( |
351 | + args.gonogo_program if args.gonogo_program else config["gonogo_program"] |
352 | + ) |
353 | if isinstance(gonogo_program, (list, tuple)): |
354 | # List, program name first, optional arguments after |
355 | gonogo_program_and_args = list(gonogo_program) |
356 | @@ -113,106 +132,111 @@ |
357 | except (subprocess.CalledProcessError, OSError): |
358 | return |
359 | |
360 | - lock = acquire_lock(os.path.join(config['lock_dir'], 'turku-agent-ping.lock')) |
361 | + lock = acquire_lock(os.path.join(config["lock_dir"], "turku-agent-ping.lock")) |
362 | |
363 | restore_mode = args.restore |
364 | |
365 | # Check with the API server |
366 | api_out = {} |
367 | |
368 | - machine_merge_map = ( |
369 | - ('machine_uuid', 'uuid'), |
370 | - ('machine_secret', 'secret'), |
371 | - ) |
372 | - api_out['machine'] = {} |
373 | + machine_merge_map = (("machine_uuid", "uuid"), ("machine_secret", "secret")) |
374 | + api_out["machine"] = {} |
375 | for a, b in machine_merge_map: |
376 | if a in config: |
377 | - api_out['machine'][b] = config[a] |
378 | + api_out["machine"][b] = config[a] |
379 | |
380 | if restore_mode: |
381 | - print('Entering restore mode.') |
382 | + print("Entering restore mode.") |
383 | print() |
384 | - api_reply = api_call(config['api_url'], 'agent_ping_restore', api_out) |
385 | + api_reply = api_call(config["api_url"], "agent_ping_restore", api_out) |
386 | |
387 | sources_by_storage = {} |
388 | - for source_name in api_reply['machine']['sources']: |
389 | - source = api_reply['machine']['sources'][source_name] |
390 | - if source_name not in config['sources']: |
391 | - continue |
392 | - if 'storage' not in source: |
393 | - continue |
394 | - if source['storage']['name'] not in sources_by_storage: |
395 | - sources_by_storage[source['storage']['name']] = {} |
396 | - sources_by_storage[source['storage']['name']][source_name] = source |
397 | + for source_name in api_reply["machine"]["sources"]: |
398 | + source = api_reply["machine"]["sources"][source_name] |
399 | + if source_name not in config["sources"]: |
400 | + continue |
401 | + if "storage" not in source: |
402 | + continue |
403 | + if source["storage"]["name"] not in sources_by_storage: |
404 | + sources_by_storage[source["storage"]["name"]] = {} |
405 | + sources_by_storage[source["storage"]["name"]][source_name] = source |
406 | |
407 | if len(sources_by_storage) == 0: |
408 | - print('Cannot find any appropraite sources.') |
409 | + print("Cannot find any appropraite sources.") |
410 | return |
411 | - print('This machine\'s sources are on the following storage units:') |
412 | + print("This machine's sources are on the following storage units:") |
413 | for storage_name in sources_by_storage: |
414 | - print(' %s' % storage_name) |
415 | + print(" %s" % storage_name) |
416 | for source_name in sources_by_storage[storage_name]: |
417 | - print(' %s' % source_name) |
418 | + print(" %s" % source_name) |
419 | print() |
420 | if len(sources_by_storage) == 1: |
421 | - storage = list(list(sources_by_storage.values())[0].values())[0]['storage'] |
422 | + storage = list(list(sources_by_storage.values())[0].values())[0]["storage"] |
423 | elif args.restore_storage: |
424 | if args.restore_storage in sources_by_storage: |
425 | - storage = sources_by_storage[args.restore_storage]['storage'] |
426 | + storage = sources_by_storage[args.restore_storage]["storage"] |
427 | else: |
428 | print('Cannot find appropriate storage "%s"' % args.restore_storage) |
429 | return |
430 | else: |
431 | - print('Multiple storages found. Please use --restore-storage to specify one.') |
432 | + print( |
433 | + "Multiple storages found. Please use --restore-storage to specify one." |
434 | + ) |
435 | return |
436 | |
437 | ssh_req = { |
438 | - 'verbose': True, |
439 | - 'action': 'restore', |
440 | - 'port': random.randint(49152, 65535), |
441 | + "verbose": True, |
442 | + "action": "restore", |
443 | + "port": random.randint(49152, 65535), |
444 | } |
445 | - print('Storage unit: %s' % storage['name']) |
446 | - if 'restore_path' in config: |
447 | - print('Local destination path: %s' % config['restore_path']) |
448 | - print('Sample restore usage from storage unit:') |
449 | + print("Storage unit: %s" % storage["name"]) |
450 | + if "restore_path" in config: |
451 | + print("Local destination path: %s" % config["restore_path"]) |
452 | + print("Sample restore usage from storage unit:") |
453 | print( |
454 | - ' RSYNC_PASSWORD=%s rsync -avzP --numeric-ids ${P?}/ rsync://%s@127.0.0.1:%s/%s/' % ( |
455 | - config['restore_password'], |
456 | - config['restore_username'], |
457 | - ssh_req['port'], config['restore_module'] |
458 | + " RSYNC_PASSWORD=%s rsync -avzP --numeric-ids ${P?}/ rsync://%s@127.0.0.1:%s/%s/" |
459 | + % ( |
460 | + config["restore_password"], |
461 | + config["restore_username"], |
462 | + ssh_req["port"], |
463 | + config["restore_module"], |
464 | ) |
465 | ) |
466 | print() |
467 | call_ssh(config, storage, ssh_req) |
468 | else: |
469 | - api_reply = api_call(config['api_url'], 'agent_ping_checkin', api_out) |
470 | + api_reply = api_call(config["api_url"], "agent_ping_checkin", api_out) |
471 | |
472 | - if 'scheduled_sources' not in api_reply: |
473 | + if "scheduled_sources" not in api_reply: |
474 | return |
475 | sources_by_storage = {} |
476 | - for source_name in api_reply['machine']['scheduled_sources']: |
477 | - source = api_reply['machine']['scheduled_sources'][source_name] |
478 | - if source_name not in config['sources']: |
479 | - continue |
480 | - if 'storage' not in source: |
481 | - continue |
482 | - if source['storage']['name'] not in sources_by_storage: |
483 | - sources_by_storage[source['storage']['name']] = {} |
484 | - sources_by_storage[source['storage']['name']][source_name] = source |
485 | + for source_name in api_reply["machine"]["scheduled_sources"]: |
486 | + source = api_reply["machine"]["scheduled_sources"][source_name] |
487 | + if source_name not in config["sources"]: |
488 | + continue |
489 | + if "storage" not in source: |
490 | + continue |
491 | + if source["storage"]["name"] not in sources_by_storage: |
492 | + sources_by_storage[source["storage"]["name"]] = {} |
493 | + sources_by_storage[source["storage"]["name"]][source_name] = source |
494 | |
495 | for storage_name in sources_by_storage: |
496 | ssh_req = { |
497 | - 'verbose': True, |
498 | - 'action': 'checkin', |
499 | - 'port': random.randint(49152, 65535), |
500 | - 'sources': {}, |
501 | + "verbose": True, |
502 | + "action": "checkin", |
503 | + "port": random.randint(49152, 65535), |
504 | + "sources": {}, |
505 | } |
506 | for source in sources_by_storage[storage_name]: |
507 | - ssh_req['sources'][source] = { |
508 | - 'username': config['sources'][source]['username'], |
509 | - 'password': config['sources'][source]['password'], |
510 | + ssh_req["sources"][source] = { |
511 | + "username": config["sources"][source]["username"], |
512 | + "password": config["sources"][source]["password"], |
513 | } |
514 | - call_ssh(config, list(sources_by_storage[storage_name].values())[0]['storage'], ssh_req) |
515 | + call_ssh( |
516 | + config, |
517 | + list(sources_by_storage[storage_name].values())[0]["storage"], |
518 | + ssh_req, |
519 | + ) |
520 | |
521 | # Cleanup |
522 | lock.close() |
523 | |
524 | === modified file 'turku_agent/rsyncd_wrapper.py' |
525 | --- turku_agent/rsyncd_wrapper.py 2019-04-22 01:16:04 +0000 |
526 | +++ turku_agent/rsyncd_wrapper.py 2020-06-21 22:22:36 +0000 |
527 | @@ -16,6 +16,7 @@ |
528 | # this program. If not, see <http://www.gnu.org/licenses/>. |
529 | |
530 | import os |
531 | + |
532 | from .utils import load_config |
533 | |
534 | |
535 | @@ -23,9 +24,10 @@ |
536 | import argparse |
537 | |
538 | parser = argparse.ArgumentParser( |
539 | - formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
540 | - parser.add_argument('--config-dir', '-c', type=str, default='/etc/turku-agent') |
541 | - parser.add_argument('--detach', action='store_true') |
542 | + formatter_class=argparse.ArgumentDefaultsHelpFormatter |
543 | + ) |
544 | + parser.add_argument("--config-dir", "-c", type=str, default="/etc/turku-agent") |
545 | + parser.add_argument("--detach", action="store_true") |
546 | return parser.parse_known_args() |
547 | |
548 | |
549 | @@ -33,10 +35,12 @@ |
550 | args, rest = parse_args() |
551 | |
552 | config = load_config(args.config_dir) |
553 | - rsyncd_command = config['rsyncd_command'] |
554 | + rsyncd_command = config["rsyncd_command"] |
555 | if not args.detach: |
556 | - rsyncd_command.append('--no-detach') |
557 | - rsyncd_command.append('--daemon') |
558 | - rsyncd_command.append('--config=%s' % os.path.join(config['var_dir'], 'rsyncd.conf')) |
559 | + rsyncd_command.append("--no-detach") |
560 | + rsyncd_command.append("--daemon") |
561 | + rsyncd_command.append( |
562 | + "--config=%s" % os.path.join(config["var_dir"], "rsyncd.conf") |
563 | + ) |
564 | rsyncd_command += rest |
565 | os.execvp(rsyncd_command[0], rsyncd_command) |
566 | |
567 | === modified file 'turku_agent/update_config.py' |
568 | --- turku_agent/update_config.py 2019-05-23 14:18:25 +0000 |
569 | +++ turku_agent/update_config.py 2020-06-21 22:22:36 +0000 |
570 | @@ -1,5 +1,3 @@ |
571 | -#!/usr/bin/env python3 |
572 | - |
573 | # Turku backups - client agent |
574 | # Copyright 2015 Canonical Ltd. |
575 | # |
576 | @@ -15,12 +13,13 @@ |
577 | # You should have received a copy of the GNU General Public License along with |
578 | # this program. If not, see <http://www.gnu.org/licenses/>. |
579 | |
580 | +import logging |
581 | +import os |
582 | import random |
583 | -import os |
584 | import subprocess |
585 | import time |
586 | -import logging |
587 | -from .utils import json_dump_p, json_dumps_p, load_config, fill_config, acquire_lock, api_call |
588 | + |
589 | +from .utils import json_dumps_p, load_config, fill_config, acquire_lock, api_call |
590 | |
591 | |
592 | class IncompleteConfigError(Exception): |
593 | @@ -31,69 +30,70 @@ |
594 | import argparse |
595 | |
596 | parser = argparse.ArgumentParser( |
597 | - formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
598 | - parser.add_argument('--config-dir', '-c', type=str, default='/etc/turku-agent') |
599 | - parser.add_argument('--wait', '-w', type=float) |
600 | - parser.add_argument('--debug', action='store_true') |
601 | + formatter_class=argparse.ArgumentDefaultsHelpFormatter |
602 | + ) |
603 | + parser.add_argument("--config-dir", "-c", type=str, default="/etc/turku-agent") |
604 | + parser.add_argument("--wait", "-w", type=float) |
605 | + parser.add_argument("--debug", action="store_true") |
606 | return parser.parse_args() |
607 | |
608 | |
609 | def write_conf_files(config): |
610 | # Build rsyncd.conf |
611 | built_rsyncd_conf = ( |
612 | - 'address = %s\n' % config['rsyncd_local_address'] + |
613 | - 'port = %d\n' % config['rsyncd_local_port'] + |
614 | - 'log file = /dev/stdout\n' + |
615 | - 'uid = root\n' + |
616 | - 'gid = root\n' + |
617 | - 'list = false\n\n' |
618 | + "address = %s\n" % config["rsyncd_local_address"] |
619 | + + "port = %d\n" % config["rsyncd_local_port"] |
620 | + + "log file = /dev/stdout\n" |
621 | + + "uid = root\n" |
622 | + + "gid = root\n" |
623 | + + "list = false\n\n" |
624 | ) |
625 | rsyncd_secrets = [] |
626 | - rsyncd_secrets.append((config['restore_username'], config['restore_password'])) |
627 | + rsyncd_secrets.append((config["restore_username"], config["restore_password"])) |
628 | built_rsyncd_conf += ( |
629 | - '[%s]\n' + |
630 | - ' path = %s\n' + |
631 | - ' auth users = %s\n' + |
632 | - ' secrets file = %s\n' + |
633 | - ' read only = false\n\n' |
634 | + "[%s]\n" |
635 | + + " path = %s\n" |
636 | + + " auth users = %s\n" |
637 | + + " secrets file = %s\n" |
638 | + + " read only = false\n\n" |
639 | ) % ( |
640 | - config['restore_module'], |
641 | - config['restore_path'], |
642 | - config['restore_username'], |
643 | - os.path.join(config['var_dir'], 'rsyncd.secrets'), |
644 | + config["restore_module"], |
645 | + config["restore_path"], |
646 | + config["restore_username"], |
647 | + os.path.join(config["var_dir"], "rsyncd.secrets"), |
648 | ) |
649 | - for s in config['sources']: |
650 | - sd = config['sources'][s] |
651 | - rsyncd_secrets.append((sd['username'], sd['password'])) |
652 | + for s in config["sources"]: |
653 | + sd = config["sources"][s] |
654 | + rsyncd_secrets.append((sd["username"], sd["password"])) |
655 | built_rsyncd_conf += ( |
656 | - '[%s]\n' + |
657 | - ' path = %s\n' + |
658 | - ' auth users = %s\n' + |
659 | - ' secrets file = %s\n' + |
660 | - ' read only = true\n\n' |
661 | + "[%s]\n" |
662 | + + " path = %s\n" |
663 | + + " auth users = %s\n" |
664 | + + " secrets file = %s\n" |
665 | + + " read only = true\n\n" |
666 | ) % ( |
667 | s, |
668 | - sd['path'], |
669 | - sd['username'], |
670 | - os.path.join(config['var_dir'], 'rsyncd.secrets'), |
671 | + sd["path"], |
672 | + sd["username"], |
673 | + os.path.join(config["var_dir"], "rsyncd.secrets"), |
674 | ) |
675 | - with open(os.path.join(config['var_dir'], 'rsyncd.conf'), 'w') as f: |
676 | + with open(os.path.join(config["var_dir"], "rsyncd.conf"), "w") as f: |
677 | f.write(built_rsyncd_conf) |
678 | |
679 | # Build rsyncd.secrets |
680 | - built_rsyncd_secrets = '' |
681 | + built_rsyncd_secrets = "" |
682 | for (username, password) in rsyncd_secrets: |
683 | - built_rsyncd_secrets += username + ':' + password + '\n' |
684 | - with open(os.path.join(config['var_dir'], 'rsyncd.secrets'), 'w') as f: |
685 | + built_rsyncd_secrets += username + ":" + password + "\n" |
686 | + with open(os.path.join(config["var_dir"], "rsyncd.secrets"), "w") as f: |
687 | os.fchmod(f.fileno(), 0o600) |
688 | f.write(built_rsyncd_secrets) |
689 | |
690 | |
691 | def init_is_upstart(): |
692 | try: |
693 | - return 'upstart' in subprocess.check_output( |
694 | - ['initctl', 'version'], |
695 | - stderr=subprocess.DEVNULL, universal_newlines=True) |
696 | + return "upstart" in subprocess.check_output( |
697 | + ["initctl", "version"], stderr=subprocess.DEVNULL, universal_newlines=True |
698 | + ) |
699 | except (FileNotFoundError, subprocess.CalledProcessError): |
700 | return False |
701 | |
702 | @@ -107,52 +107,54 @@ |
703 | # With Upstart, start will fail if the service is already running, |
704 | # so we need to check for that first. |
705 | try: |
706 | - if 'start/running' in subprocess.check_output( |
707 | - ['status', 'turku-agent-rsyncd'], |
708 | - stderr=subprocess.STDOUT, universal_newlines=True): |
709 | + if "start/running" in subprocess.check_output( |
710 | + ["status", "turku-agent-rsyncd"], |
711 | + stderr=subprocess.STDOUT, |
712 | + universal_newlines=True, |
713 | + ): |
714 | return |
715 | except subprocess.CalledProcessError: |
716 | pass |
717 | - subprocess.check_call(['service', 'turku-agent-rsyncd', 'start']) |
718 | + subprocess.check_call(["service", "turku-agent-rsyncd", "start"]) |
719 | |
720 | |
721 | def send_config(config): |
722 | - required_keys = ['api_url'] |
723 | - if 'api_auth' not in config: |
724 | - required_keys += ['api_auth_name', 'api_auth_secret'] |
725 | + required_keys = ["api_url"] |
726 | + if "api_auth" not in config: |
727 | + required_keys += ["api_auth_name", "api_auth_secret"] |
728 | for k in required_keys: |
729 | if k not in config: |
730 | raise IncompleteConfigError('Required config "%s" not found.' % k) |
731 | |
732 | api_out = {} |
733 | - if ('api_auth_name' in config) and ('api_auth_secret' in config): |
734 | + if ("api_auth_name" in config) and ("api_auth_secret" in config): |
735 | # name/secret style |
736 | - api_out['auth'] = { |
737 | - 'name': config['api_auth_name'], |
738 | - 'secret': config['api_auth_secret'], |
739 | + api_out["auth"] = { |
740 | + "name": config["api_auth_name"], |
741 | + "secret": config["api_auth_secret"], |
742 | } |
743 | else: |
744 | # nameless secret style |
745 | - api_out['auth'] = config['api_auth'] |
746 | + api_out["auth"] = config["api_auth"] |
747 | |
748 | # Merge the following options into the machine section |
749 | machine_merge_map = ( |
750 | - ('machine_uuid', 'uuid'), |
751 | - ('machine_secret', 'secret'), |
752 | - ('environment_name', 'environment_name'), |
753 | - ('service_name', 'service_name'), |
754 | - ('unit_name', 'unit_name'), |
755 | - ('ssh_public_key', 'ssh_public_key'), |
756 | - ('published', 'published'), |
757 | + ("machine_uuid", "uuid"), |
758 | + ("machine_secret", "secret"), |
759 | + ("environment_name", "environment_name"), |
760 | + ("service_name", "service_name"), |
761 | + ("unit_name", "unit_name"), |
762 | + ("ssh_public_key", "ssh_public_key"), |
763 | + ("published", "published"), |
764 | ) |
765 | - api_out['machine'] = {} |
766 | + api_out["machine"] = {} |
767 | for a, b in machine_merge_map: |
768 | if a in config: |
769 | - api_out['machine'][b] = config[a] |
770 | - |
771 | - api_out['machine']['sources'] = config['sources'] |
772 | - |
773 | - api_reply = api_call(config['api_url'], 'update_config', api_out) |
774 | + api_out["machine"][b] = config[a] |
775 | + |
776 | + api_out["machine"]["sources"] = config["sources"] |
777 | + |
778 | + api_call(config["api_url"], "update_config", api_out) |
779 | |
780 | |
781 | def main(): |
782 | @@ -162,7 +164,7 @@ |
783 | time.sleep(random.uniform(0, args.wait)) |
784 | |
785 | config = load_config(args.config_dir) |
786 | - lock = acquire_lock(os.path.join(config['lock_dir'], 'turku-update-config.lock')) |
787 | + lock = acquire_lock(os.path.join(config["lock_dir"], "turku-update-config.lock")) |
788 | fill_config(config) |
789 | if args.debug: |
790 | print(json_dumps_p(config)) |
791 | |
792 | === modified file 'turku_agent/utils.py' |
793 | --- turku_agent/utils.py 2020-03-23 22:31:56 +0000 |
794 | +++ turku_agent/utils.py 2020-06-21 22:22:36 +0000 |
795 | @@ -1,5 +1,3 @@ |
796 | -#!/usr/bin/env python3 |
797 | - |
798 | # Turku backups - client agent |
799 | # Copyright 2015 Canonical Ltd. |
800 | # |
801 | @@ -15,32 +13,34 @@ |
802 | # You should have received a copy of the GNU General Public License along with |
803 | # this program. If not, see <http://www.gnu.org/licenses/>. |
804 | |
805 | -import uuid |
806 | -import string |
807 | -import random |
808 | +import copy |
809 | +import http.client |
810 | import json |
811 | import os |
812 | -import copy |
813 | +import platform |
814 | +import random |
815 | +import string |
816 | import subprocess |
817 | -import platform |
818 | import urllib.parse |
819 | -import http.client |
820 | - |
821 | - |
822 | -class RuntimeLock(): |
823 | +import uuid |
824 | + |
825 | + |
826 | +class RuntimeLock: |
827 | name = None |
828 | file = None |
829 | |
830 | def __init__(self, name): |
831 | import fcntl |
832 | - file = open(name, 'w') |
833 | + |
834 | + file = open(name, "w") |
835 | try: |
836 | fcntl.lockf(file, fcntl.LOCK_EX | fcntl.LOCK_NB) |
837 | except IOError as e: |
838 | import errno |
839 | + |
840 | if e.errno in (errno.EACCES, errno.EAGAIN): |
841 | raise |
842 | - file.write('%10s\n' % os.getpid()) |
843 | + file.write("%10s\n" % os.getpid()) |
844 | file.flush() |
845 | file.seek(0) |
846 | self.name = name |
847 | @@ -71,12 +71,12 @@ |
848 | |
849 | def json_dump_p(obj, f): |
850 | """Calls json.dump with standard (pretty) formatting""" |
851 | - return json.dump(obj, f, sort_keys=True, indent=4, separators=(',', ': ')) |
852 | + return json.dump(obj, f, sort_keys=True, indent=4, separators=(",", ": ")) |
853 | |
854 | |
855 | def json_dumps_p(obj): |
856 | """Calls json.dumps with standard (pretty) formatting""" |
857 | - return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': ')) |
858 | + return json.dumps(obj, sort_keys=True, indent=4, separators=(",", ": ")) |
859 | |
860 | |
861 | def json_load_file(file): |
862 | @@ -103,10 +103,10 @@ |
863 | |
864 | def load_config(config_dir): |
865 | config = {} |
866 | - config['config_dir'] = config_dir |
867 | + config["config_dir"] = config_dir |
868 | |
869 | - config_d = os.path.join(config['config_dir'], 'config.d') |
870 | - sources_d = os.path.join(config['config_dir'], 'sources.d') |
871 | + config_d = os.path.join(config["config_dir"], "config.d") |
872 | + sources_d = os.path.join(config["config_dir"], "sources.d") |
873 | |
874 | # Merge in config.d/*.json to the root level |
875 | config_files = [] |
876 | @@ -114,7 +114,7 @@ |
877 | config_files = [ |
878 | os.path.join(config_d, fn) |
879 | for fn in os.listdir(config_d) |
880 | - if fn.endswith('.json') |
881 | + if fn.endswith(".json") |
882 | and os.path.isfile(os.path.join(config_d, fn)) |
883 | and os.access(os.path.join(config_d, fn), os.R_OK) |
884 | ] |
885 | @@ -122,10 +122,10 @@ |
886 | for file in config_files: |
887 | config = dict_merge(config, json_load_file(file)) |
888 | |
889 | - if 'var_dir' not in config: |
890 | - config['var_dir'] = '/var/lib/turku-agent' |
891 | + if "var_dir" not in config: |
892 | + config["var_dir"] = "/var/lib/turku-agent" |
893 | |
894 | - var_config_d = os.path.join(config['var_dir'], 'config.d') |
895 | + var_config_d = os.path.join(config["var_dir"], "config.d") |
896 | |
897 | # Load /var config.d files |
898 | var_config = {} |
899 | @@ -134,7 +134,7 @@ |
900 | var_config_files = [ |
901 | os.path.join(var_config_d, fn) |
902 | for fn in os.listdir(var_config_d) |
903 | - if fn.endswith('.json') |
904 | + if fn.endswith(".json") |
905 | and os.path.isfile(os.path.join(var_config_d, fn)) |
906 | and os.access(os.path.join(var_config_d, fn), os.R_OK) |
907 | ] |
908 | @@ -145,40 +145,40 @@ |
909 | var_config = dict_merge(var_config, config) |
910 | config = var_config |
911 | |
912 | - if 'lock_dir' not in config: |
913 | - config['lock_dir'] = '/var/lock' |
914 | - |
915 | - if 'rsyncd_command' not in config: |
916 | - config['rsyncd_command'] = ['rsync'] |
917 | - |
918 | - if 'rsyncd_local_address' not in config: |
919 | - config['rsyncd_local_address'] = '127.0.0.1' |
920 | - |
921 | - if 'rsyncd_local_port' not in config: |
922 | - config['rsyncd_local_port'] = 27873 |
923 | - |
924 | - if 'ssh_command' not in config: |
925 | - config['ssh_command'] = ['ssh'] |
926 | + if "lock_dir" not in config: |
927 | + config["lock_dir"] = "/var/lock" |
928 | + |
929 | + if "rsyncd_command" not in config: |
930 | + config["rsyncd_command"] = ["rsync"] |
931 | + |
932 | + if "rsyncd_local_address" not in config: |
933 | + config["rsyncd_local_address"] = "127.0.0.1" |
934 | + |
935 | + if "rsyncd_local_port" not in config: |
936 | + config["rsyncd_local_port"] = 27873 |
937 | + |
938 | + if "ssh_command" not in config: |
939 | + config["ssh_command"] = ["ssh"] |
940 | |
941 | # If a go/no-go program is defined, run it and only go if it exits 0. |
942 | # Type: String (program with no args) or list (program first, optional arguments after) |
943 | - if 'gonogo_program' not in config: |
944 | - config['gonogo_program'] = None |
945 | + if "gonogo_program" not in config: |
946 | + config["gonogo_program"] = None |
947 | |
948 | - var_sources_d = os.path.join(config['var_dir'], 'sources.d') |
949 | + var_sources_d = os.path.join(config["var_dir"], "sources.d") |
950 | |
951 | # Validate the unit name |
952 | - if 'unit_name' not in config: |
953 | - config['unit_name'] = platform.node() |
954 | + if "unit_name" not in config: |
955 | + config["unit_name"] = platform.node() |
956 | # If this isn't in the on-disk config, don't write it; just |
957 | # generate it every time |
958 | |
959 | # Pull the SSH public key |
960 | - if os.path.isfile(os.path.join(config['var_dir'], 'ssh_key.pub')): |
961 | - with open(os.path.join(config['var_dir'], 'ssh_key.pub')) as f: |
962 | - config['ssh_public_key'] = f.read().rstrip() |
963 | - config['ssh_public_key_file'] = os.path.join(config['var_dir'], 'ssh_key.pub') |
964 | - config['ssh_private_key_file'] = os.path.join(config['var_dir'], 'ssh_key') |
965 | + if os.path.isfile(os.path.join(config["var_dir"], "ssh_key.pub")): |
966 | + with open(os.path.join(config["var_dir"], "ssh_key.pub")) as f: |
967 | + config["ssh_public_key"] = f.read().rstrip() |
968 | + config["ssh_public_key_file"] = os.path.join(config["var_dir"], "ssh_key.pub") |
969 | + config["ssh_private_key_file"] = os.path.join(config["var_dir"], "ssh_key") |
970 | |
971 | sources_config = {} |
972 | # Merge in sources.d/*.json to the sources dict |
973 | @@ -187,7 +187,7 @@ |
974 | sources_files = [ |
975 | os.path.join(sources_d, fn) |
976 | for fn in os.listdir(sources_d) |
977 | - if fn.endswith('.json') |
978 | + if fn.endswith(".json") |
979 | and os.path.isfile(os.path.join(sources_d, fn)) |
980 | and os.access(os.path.join(sources_d, fn), os.R_OK) |
981 | ] |
982 | @@ -197,7 +197,7 @@ |
983 | var_sources_files = [ |
984 | os.path.join(var_sources_d, fn) |
985 | for fn in os.listdir(var_sources_d) |
986 | - if fn.endswith('.json') |
987 | + if fn.endswith(".json") |
988 | and os.path.isfile(os.path.join(var_sources_d, fn)) |
989 | and os.access(os.path.join(var_sources_d, fn), os.R_OK) |
990 | ] |
991 | @@ -208,19 +208,19 @@ |
992 | |
993 | # Check for required sources options |
994 | for s in list(sources_config.keys()): |
995 | - if 'path' not in sources_config[s]: |
996 | + if "path" not in sources_config[s]: |
997 | del sources_config[s] |
998 | |
999 | - config['sources'] = sources_config |
1000 | + config["sources"] = sources_config |
1001 | |
1002 | return config |
1003 | |
1004 | |
1005 | def fill_config(config): |
1006 | - config_d = os.path.join(config['config_dir'], 'config.d') |
1007 | - sources_d = os.path.join(config['config_dir'], 'sources.d') |
1008 | - var_config_d = os.path.join(config['var_dir'], 'config.d') |
1009 | - var_sources_d = os.path.join(config['var_dir'], 'sources.d') |
1010 | + config_d = os.path.join(config["config_dir"], "config.d") |
1011 | + sources_d = os.path.join(config["config_dir"], "sources.d") |
1012 | + var_config_d = os.path.join(config["var_dir"], "config.d") |
1013 | + var_sources_d = os.path.join(config["var_dir"], "sources.d") |
1014 | |
1015 | # Create required directories |
1016 | for d in (config_d, sources_d, var_config_d, var_sources_d): |
1017 | @@ -229,106 +229,122 @@ |
1018 | |
1019 | # Validate the machine UUID/secret |
1020 | write_uuid_data = False |
1021 | - if 'machine_uuid' not in config: |
1022 | - config['machine_uuid'] = str(uuid.uuid4()) |
1023 | + if "machine_uuid" not in config: |
1024 | + config["machine_uuid"] = str(uuid.uuid4()) |
1025 | write_uuid_data = True |
1026 | - if 'machine_secret' not in config: |
1027 | - config['machine_secret'] = ''.join( |
1028 | - random.choice(string.ascii_letters + string.digits) |
1029 | - for i in range(30) |
1030 | + if "machine_secret" not in config: |
1031 | + config["machine_secret"] = "".join( |
1032 | + random.choice(string.ascii_letters + string.digits) for i in range(30) |
1033 | ) |
1034 | write_uuid_data = True |
1035 | # Write out the machine UUID/secret if needed |
1036 | if write_uuid_data: |
1037 | - with open(os.path.join(var_config_d, '10-machine_uuid.json'), 'w') as f: |
1038 | + with open(os.path.join(var_config_d, "10-machine_uuid.json"), "w") as f: |
1039 | os.fchmod(f.fileno(), 0o600) |
1040 | - json_dump_p({ |
1041 | - 'machine_uuid': config['machine_uuid'], |
1042 | - 'machine_secret': config['machine_secret'], |
1043 | - }, f) |
1044 | + json_dump_p( |
1045 | + { |
1046 | + "machine_uuid": config["machine_uuid"], |
1047 | + "machine_secret": config["machine_secret"], |
1048 | + }, |
1049 | + f, |
1050 | + ) |
1051 | |
1052 | # Restoration configuration |
1053 | write_restore_data = False |
1054 | - if 'restore_path' not in config: |
1055 | - config['restore_path'] = '/var/backups/turku-agent/restore' |
1056 | - write_restore_data = True |
1057 | - if 'restore_module' not in config: |
1058 | - config['restore_module'] = 'turku-restore' |
1059 | - write_restore_data = True |
1060 | - if 'restore_username' not in config: |
1061 | - config['restore_username'] = str(uuid.uuid4()) |
1062 | - write_restore_data = True |
1063 | - if 'restore_password' not in config: |
1064 | - config['restore_password'] = ''.join( |
1065 | - random.choice(string.ascii_letters + string.digits) |
1066 | - for i in range(30) |
1067 | + if "restore_path" not in config: |
1068 | + config["restore_path"] = "/var/backups/turku-agent/restore" |
1069 | + write_restore_data = True |
1070 | + if "restore_module" not in config: |
1071 | + config["restore_module"] = "turku-restore" |
1072 | + write_restore_data = True |
1073 | + if "restore_username" not in config: |
1074 | + config["restore_username"] = str(uuid.uuid4()) |
1075 | + write_restore_data = True |
1076 | + if "restore_password" not in config: |
1077 | + config["restore_password"] = "".join( |
1078 | + random.choice(string.ascii_letters + string.digits) for i in range(30) |
1079 | ) |
1080 | write_restore_data = True |
1081 | if write_restore_data: |
1082 | - with open(os.path.join(var_config_d, '10-restore.json'), 'w') as f: |
1083 | + with open(os.path.join(var_config_d, "10-restore.json"), "w") as f: |
1084 | os.fchmod(f.fileno(), 0o600) |
1085 | restore_out = { |
1086 | - 'restore_path': config['restore_path'], |
1087 | - 'restore_module': config['restore_module'], |
1088 | - 'restore_username': config['restore_username'], |
1089 | - 'restore_password': config['restore_password'], |
1090 | + "restore_path": config["restore_path"], |
1091 | + "restore_module": config["restore_module"], |
1092 | + "restore_username": config["restore_username"], |
1093 | + "restore_password": config["restore_password"], |
1094 | } |
1095 | json_dump_p(restore_out, f) |
1096 | - if not os.path.isdir(config['restore_path']): |
1097 | - os.makedirs(config['restore_path']) |
1098 | + if not os.path.isdir(config["restore_path"]): |
1099 | + os.makedirs(config["restore_path"]) |
1100 | |
1101 | # Generate the SSH keypair if it doesn't exist |
1102 | - if 'ssh_private_key_file' not in config: |
1103 | - subprocess.check_call([ |
1104 | - 'ssh-keygen', '-t', 'rsa', '-N', '', '-C', 'turku', |
1105 | - '-f', os.path.join(config['var_dir'], 'ssh_key') |
1106 | - ]) |
1107 | - with open(os.path.join(config['var_dir'], 'ssh_key.pub')) as f: |
1108 | - config['ssh_public_key'] = f.read().rstrip() |
1109 | - config['ssh_public_key_file'] = os.path.join(config['var_dir'], 'ssh_key.pub') |
1110 | - config['ssh_private_key_file'] = os.path.join(config['var_dir'], 'ssh_key') |
1111 | + if "ssh_private_key_file" not in config: |
1112 | + subprocess.check_call( |
1113 | + [ |
1114 | + "ssh-keygen", |
1115 | + "-t", |
1116 | + "rsa", |
1117 | + "-N", |
1118 | + "", |
1119 | + "-C", |
1120 | + "turku", |
1121 | + "-f", |
1122 | + os.path.join(config["var_dir"], "ssh_key"), |
1123 | + ] |
1124 | + ) |
1125 | + with open(os.path.join(config["var_dir"], "ssh_key.pub")) as f: |
1126 | + config["ssh_public_key"] = f.read().rstrip() |
1127 | + config["ssh_public_key_file"] = os.path.join(config["var_dir"], "ssh_key.pub") |
1128 | + config["ssh_private_key_file"] = os.path.join(config["var_dir"], "ssh_key") |
1129 | |
1130 | - for s in config['sources']: |
1131 | + for s in config["sources"]: |
1132 | # Check for missing usernames/passwords |
1133 | - if not ('username' in config['sources'][s] or 'password' in config['sources'][s]): |
1134 | - sources_secrets_d = os.path.join(config['config_dir'], 'sources_secrets.d') |
1135 | - if 'username' not in config['sources'][s]: |
1136 | - config['sources'][s]['username'] = str(uuid.uuid4()) |
1137 | - if 'password' not in config['sources'][s]: |
1138 | - config['sources'][s]['password'] = ''.join( |
1139 | + if not ( |
1140 | + "username" in config["sources"][s] or "password" in config["sources"][s] |
1141 | + ): |
1142 | + if "username" not in config["sources"][s]: |
1143 | + config["sources"][s]["username"] = str(uuid.uuid4()) |
1144 | + if "password" not in config["sources"][s]: |
1145 | + config["sources"][s]["password"] = "".join( |
1146 | random.choice(string.ascii_letters + string.digits) |
1147 | for i in range(30) |
1148 | ) |
1149 | - with open(os.path.join(var_sources_d, '10-' + s + '.json'), 'w') as f: |
1150 | + with open(os.path.join(var_sources_d, "10-" + s + ".json"), "w") as f: |
1151 | os.fchmod(f.fileno(), 0o600) |
1152 | - json_dump_p({ |
1153 | - s: { |
1154 | - 'username': config['sources'][s]['username'], |
1155 | - 'password': config['sources'][s]['password'], |
1156 | - } |
1157 | - }, f) |
1158 | + json_dump_p( |
1159 | + { |
1160 | + s: { |
1161 | + "username": config["sources"][s]["username"], |
1162 | + "password": config["sources"][s]["password"], |
1163 | + } |
1164 | + }, |
1165 | + f, |
1166 | + ) |
1167 | |
1168 | |
1169 | def api_call(api_url, cmd, post_data, timeout=5): |
1170 | url = urllib.parse.urlparse(api_url) |
1171 | - if url.scheme == 'https': |
1172 | + if url.scheme == "https": |
1173 | h = http.client.HTTPSConnection(url.netloc, timeout=timeout) |
1174 | else: |
1175 | h = http.client.HTTPConnection(url.netloc, timeout=timeout) |
1176 | out = json.dumps(post_data) |
1177 | - h.putrequest('POST', '%s/%s' % (url.path, cmd)) |
1178 | - h.putheader('Content-Type', 'application/json') |
1179 | - h.putheader('Content-Length', len(out)) |
1180 | - h.putheader('Accept', 'application/json') |
1181 | + h.putrequest("POST", "%s/%s" % (url.path, cmd)) |
1182 | + h.putheader("Content-Type", "application/json") |
1183 | + h.putheader("Content-Length", len(out)) |
1184 | + h.putheader("Accept", "application/json") |
1185 | h.endheaders() |
1186 | - h.send(out.encode('UTF-8')) |
1187 | + h.send(out.encode("UTF-8")) |
1188 | |
1189 | res = h.getresponse() |
1190 | if not res.status == http.client.OK: |
1191 | - raise Exception('Received error %d (%s) from API server' % (res.status, res.reason)) |
1192 | - if not res.getheader('content-type') == 'application/json': |
1193 | - raise Exception('Received invalid reply from API server') |
1194 | + raise Exception( |
1195 | + "Received error %d (%s) from API server" % (res.status, res.reason) |
1196 | + ) |
1197 | + if not res.getheader("content-type") == "application/json": |
1198 | + raise Exception("Received invalid reply from API server") |
1199 | try: |
1200 | - return json.loads(res.read().decode('UTF-8')) |
1201 | + return json.loads(res.read().decode("UTF-8")) |
1202 | except ValueError: |
1203 | - raise Exception('Received invalid reply from API server') |
1204 | + raise Exception("Received invalid reply from API server") |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.