Merge lp:~fo0bar/turku/turku-api-cleanup into lp:turku/turku-api
- turku-api-cleanup
- Merge into turku-api
Status: | Merged |
---|---|
Approved by: | Barry Price |
Approved revision: | 65 |
Merged at revision: | 65 |
Proposed branch: | lp:~fo0bar/turku/turku-api-cleanup |
Merge into: | lp:turku/turku-api |
Diff against target: |
2291 lines (+901/-541) 12 files modified
.bzrignore (+61/-1) MANIFEST.in (+9/-0) Makefile (+28/-0) setup.py (+28/-0) tests/test_stub.py (+8/-0) tox.ini (+38/-0) turku_api/admin.py (+109/-82) turku_api/models.py (+167/-162) turku_api/settings.py (+37/-35) turku_api/urls.py (+31/-13) turku_api/views.py (+382/-247) turku_api/wsgi.py (+3/-1) |
To merge this branch: | bzr merge lp:~fo0bar/turku/turku-api-cleanup |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Stuart Bishop (community) | Approve | ||
Review via email: mp+386143@code.launchpad.net |
Commit message
Mega-noop cleanup
Description of the change
This is the minimum required for:
- tox test suite with all passing tests
- black-managed formatting
- Shippable sdist module
It is intended as a base for the other MPs, so they don't have to e.g. establish tests/*, or worry about about existing failing flake8, or worry about how to add additional optional modules.
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
- 65. By Ryan Finnie
-
Mega-noop cleanup
- Add setup.py
- This is a Django application, but treating it as a full Python
module helps with tox testing
- Sort imports
- Create MANIFEST.in so `setup.py sdist` produces usable tarballs
- Create stub tests
- Add tox.ini
- Add blank requirements.txt
- Add Makefile
- make black
- Update .bzrignore
- Remove blank turku_api/tests.py
- Clean up flake8:
- Ignore local_settings/local_urls import * F401/F403
- Ignore wsgi.py import E402
- Fix urls.py 'django.conf.urls. include' imported but unused
Stuart Bishop (stub) wrote : | # |
Yup, same deal as the other two turku cleanup branches.
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 65
Preview Diff
1 | === modified file '.bzrignore' | |||
2 | --- .bzrignore 2015-03-26 05:04:41 +0000 | |||
3 | +++ .bzrignore 2020-06-21 23:58:30 +0000 | |||
4 | @@ -1,4 +1,64 @@ | |||
5 | 1 | *.pyc | ||
6 | 2 | db.sqlite3 | 1 | db.sqlite3 |
7 | 3 | turku_api/local_settings.py | 2 | turku_api/local_settings.py |
8 | 4 | turku_api/local_urls.py | 3 | turku_api/local_urls.py |
9 | 4 | MANIFEST | ||
10 | 5 | .pybuild/ | ||
11 | 6 | .pytest_cache/ | ||
12 | 7 | |||
13 | 8 | # Byte-compiled / optimized / DLL files | ||
14 | 9 | __pycache__/ | ||
15 | 10 | *.py[cod] | ||
16 | 11 | |||
17 | 12 | # C extensions | ||
18 | 13 | *.so | ||
19 | 14 | |||
20 | 15 | # Distribution / packaging | ||
21 | 16 | .Python | ||
22 | 17 | env/ | ||
23 | 18 | build/ | ||
24 | 19 | develop-eggs/ | ||
25 | 20 | dist/ | ||
26 | 21 | downloads/ | ||
27 | 22 | eggs/ | ||
28 | 23 | .eggs/ | ||
29 | 24 | lib/ | ||
30 | 25 | lib64/ | ||
31 | 26 | parts/ | ||
32 | 27 | sdist/ | ||
33 | 28 | var/ | ||
34 | 29 | *.egg-info/ | ||
35 | 30 | .installed.cfg | ||
36 | 31 | *.egg | ||
37 | 32 | |||
38 | 33 | # PyInstaller | ||
39 | 34 | # Usually these files are written by a python script from a template | ||
40 | 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. | ||
41 | 36 | *.manifest | ||
42 | 37 | *.spec | ||
43 | 38 | |||
44 | 39 | # Installer logs | ||
45 | 40 | pip-log.txt | ||
46 | 41 | pip-delete-this-directory.txt | ||
47 | 42 | |||
48 | 43 | # Unit test / coverage reports | ||
49 | 44 | htmlcov/ | ||
50 | 45 | .tox/ | ||
51 | 46 | .coverage | ||
52 | 47 | .coverage.* | ||
53 | 48 | .cache | ||
54 | 49 | nosetests.xml | ||
55 | 50 | coverage.xml | ||
56 | 51 | *,cover | ||
57 | 52 | |||
58 | 53 | # Translations | ||
59 | 54 | *.mo | ||
60 | 55 | *.pot | ||
61 | 56 | |||
62 | 57 | # Django stuff: | ||
63 | 58 | *.log | ||
64 | 59 | |||
65 | 60 | # Sphinx documentation | ||
66 | 61 | docs/_build/ | ||
67 | 62 | |||
68 | 63 | # PyBuilder | ||
69 | 64 | target/ | ||
70 | 5 | 65 | ||
71 | === added file 'MANIFEST.in' | |||
72 | --- MANIFEST.in 1970-01-01 00:00:00 +0000 | |||
73 | +++ MANIFEST.in 2020-06-21 23:58:30 +0000 | |||
74 | @@ -0,0 +1,9 @@ | |||
75 | 1 | include Makefile | ||
76 | 2 | include manage.py | ||
77 | 3 | include MANIFEST.in | ||
78 | 4 | include README.md | ||
79 | 5 | include requirements.txt | ||
80 | 6 | include scripts/turku_health | ||
81 | 7 | include tests/*.py | ||
82 | 8 | include tox.ini | ||
83 | 9 | include turku_api/templates/admin/*.html | ||
84 | 0 | 10 | ||
85 | === added file 'Makefile' | |||
86 | --- Makefile 1970-01-01 00:00:00 +0000 | |||
87 | +++ Makefile 2020-06-21 23:58:30 +0000 | |||
88 | @@ -0,0 +1,28 @@ | |||
89 | 1 | PYTHON := python3 | ||
90 | 2 | |||
91 | 3 | all: build | ||
92 | 4 | |||
93 | 5 | build: | ||
94 | 6 | $(PYTHON) setup.py build | ||
95 | 7 | |||
96 | 8 | lint: | ||
97 | 9 | $(PYTHON) -mtox -e flake8 | ||
98 | 10 | |||
99 | 11 | test: | ||
100 | 12 | $(PYTHON) -mtox | ||
101 | 13 | |||
102 | 14 | test-quick: | ||
103 | 15 | $(PYTHON) -mtox -e black,flake8,pytest-quick | ||
104 | 16 | |||
105 | 17 | black-check: | ||
106 | 18 | $(PYTHON) -mtox -e black | ||
107 | 19 | |||
108 | 20 | black: | ||
109 | 21 | $(PYTHON) -mblack $(CURDIR) | ||
110 | 22 | |||
111 | 23 | install: build | ||
112 | 24 | $(PYTHON) setup.py install | ||
113 | 25 | |||
114 | 26 | clean: | ||
115 | 27 | $(PYTHON) setup.py clean | ||
116 | 28 | $(RM) -r build MANIFEST | ||
117 | 0 | 29 | ||
118 | === added file 'requirements.txt' | |||
119 | === added file 'setup.py' | |||
120 | --- setup.py 1970-01-01 00:00:00 +0000 | |||
121 | +++ setup.py 2020-06-21 23:58:30 +0000 | |||
122 | @@ -0,0 +1,28 @@ | |||
123 | 1 | #!/usr/bin/env python3 | ||
124 | 2 | |||
125 | 3 | # Turku backups - API application | ||
126 | 4 | # Copyright 2015-2020 Canonical Ltd. | ||
127 | 5 | # | ||
128 | 6 | # This program is free software: you can redistribute it and/or modify it | ||
129 | 7 | # under the terms of the GNU General Public License version 3, as published by | ||
130 | 8 | # the Free Software Foundation. | ||
131 | 9 | # | ||
132 | 10 | # This program is distributed in the hope that it will be useful, but WITHOUT | ||
133 | 11 | # ANY WARRANTY; without even the implied warranties of MERCHANTABILITY, | ||
134 | 12 | # SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
135 | 13 | # General Public License for more details. | ||
136 | 14 | # | ||
137 | 15 | # You should have received a copy of the GNU General Public License along with | ||
138 | 16 | # this program. If not, see <http://www.gnu.org/licenses/>. | ||
139 | 17 | |||
140 | 18 | from setuptools import setup | ||
141 | 19 | |||
142 | 20 | |||
143 | 21 | setup( | ||
144 | 22 | name="turku_api", | ||
145 | 23 | description="Turku backups - API application", | ||
146 | 24 | author="Ryan Finnie", | ||
147 | 25 | url="https://launchpad.net/turku", | ||
148 | 26 | python_requires="~=3.4", | ||
149 | 27 | packages=["turku_api"], | ||
150 | 28 | ) | ||
151 | 0 | 29 | ||
152 | === added directory 'tests' | |||
153 | === added file 'tests/__init__.py' | |||
154 | === added file 'tests/test_stub.py' | |||
155 | --- tests/test_stub.py 1970-01-01 00:00:00 +0000 | |||
156 | +++ tests/test_stub.py 2020-06-21 23:58:30 +0000 | |||
157 | @@ -0,0 +1,8 @@ | |||
158 | 1 | import unittest | ||
159 | 2 | import warnings | ||
160 | 3 | |||
161 | 4 | |||
162 | 5 | class TestStub(unittest.TestCase): | ||
163 | 6 | def test_stub(self): | ||
164 | 7 | # pytest doesn't like a tests/ with no tests | ||
165 | 8 | warnings.warn("Remove this file once unit tests are added") | ||
166 | 0 | 9 | ||
167 | === added file 'tox.ini' | |||
168 | --- tox.ini 1970-01-01 00:00:00 +0000 | |||
169 | +++ tox.ini 2020-06-21 23:58:30 +0000 | |||
170 | @@ -0,0 +1,38 @@ | |||
171 | 1 | [tox] | ||
172 | 2 | envlist = black, flake8, pytest | ||
173 | 3 | |||
174 | 4 | [testenv] | ||
175 | 5 | basepython = python | ||
176 | 6 | |||
177 | 7 | [testenv:black] | ||
178 | 8 | commands = python -mblack --check . | ||
179 | 9 | deps = black | ||
180 | 10 | |||
181 | 11 | [testenv:flake8] | ||
182 | 12 | commands = python -mflake8 | ||
183 | 13 | deps = flake8 | ||
184 | 14 | |||
185 | 15 | [testenv:pytest] | ||
186 | 16 | commands = python -mpytest --cov=turku_api --cov-report=term-missing | ||
187 | 17 | deps = pytest | ||
188 | 18 | pytest-cov | ||
189 | 19 | -r{toxinidir}/requirements.txt | ||
190 | 20 | |||
191 | 21 | [testenv:pytest-quick] | ||
192 | 22 | commands = python -mpytest -m "not slow" | ||
193 | 23 | deps = pytest | ||
194 | 24 | -r{toxinidir}/requirements.txt | ||
195 | 25 | |||
196 | 26 | [flake8] | ||
197 | 27 | exclude = | ||
198 | 28 | .git, | ||
199 | 29 | __pycache__, | ||
200 | 30 | .tox, | ||
201 | 31 | # TODO: remove C901 once complexity is reduced | ||
202 | 32 | ignore = C901,E203,E231,W503 | ||
203 | 33 | max-line-length = 120 | ||
204 | 34 | max-complexity = 10 | ||
205 | 35 | |||
206 | 36 | [pytest] | ||
207 | 37 | markers = | ||
208 | 38 | slow | ||
209 | 0 | 39 | ||
210 | === modified file 'turku_api/admin.py' | |||
211 | --- turku_api/admin.py 2020-05-06 02:41:37 +0000 | |||
212 | +++ turku_api/admin.py 2020-06-21 23:58:30 +0000 | |||
213 | @@ -14,38 +14,39 @@ | |||
214 | 14 | # License along with this program. If not, see | 14 | # License along with this program. If not, see |
215 | 15 | # <http://www.gnu.org/licenses/>. | 15 | # <http://www.gnu.org/licenses/>. |
216 | 16 | 16 | ||
217 | 17 | import datetime | ||
218 | 18 | |||
219 | 17 | from django import forms | 19 | from django import forms |
220 | 18 | from django.contrib import admin | 20 | from django.contrib import admin |
222 | 19 | from turku_api.models import Machine, Source, Auth, Storage, BackupLog, FilterSet | 21 | from django.contrib.humanize.templatetags.humanize import naturaltime |
223 | 22 | from django.utils import timezone | ||
224 | 20 | from django.utils.html import format_html | 23 | from django.utils.html import format_html |
227 | 21 | from django.utils import timezone | 24 | |
226 | 22 | from django.contrib.humanize.templatetags.humanize import naturaltime | ||
228 | 23 | try: | 25 | try: |
229 | 24 | from django.urls import reverse # 1.10+ | 26 | from django.urls import reverse # 1.10+ |
230 | 25 | except ModuleNotFoundError: | 27 | except ModuleNotFoundError: |
231 | 26 | from django.core.urlresolvers import reverse # pre-1.10 | 28 | from django.core.urlresolvers import reverse # pre-1.10 |
233 | 27 | import datetime | 29 | |
234 | 30 | from turku_api.models import Auth, BackupLog, FilterSet, Machine, Source, Storage | ||
235 | 28 | 31 | ||
236 | 29 | 32 | ||
237 | 30 | def get_admin_change_link(obj, name=None): | 33 | def get_admin_change_link(obj, name=None): |
238 | 31 | url = reverse( | 34 | url = reverse( |
241 | 32 | 'admin:%s_%s_change' % (obj._meta.app_label, obj._meta.model_name), | 35 | "admin:%s_%s_change" % (obj._meta.app_label, obj._meta.model_name), |
242 | 33 | args=(obj.id,) | 36 | args=(obj.id,), |
243 | 34 | ) | 37 | ) |
244 | 35 | if not name: | 38 | if not name: |
245 | 36 | name = obj | 39 | name = obj |
249 | 37 | return format_html( | 40 | return format_html('<a href="{}">{}</a>'.format(url, name)) |
247 | 38 | '<a href="{}">{}</a>'.format(url, name) | ||
248 | 39 | ) | ||
250 | 40 | 41 | ||
251 | 41 | 42 | ||
252 | 42 | def human_si(v, begin=0): | 43 | def human_si(v, begin=0): |
254 | 43 | p = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') | 44 | p = ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi", "Yi") |
255 | 44 | i = begin | 45 | i = begin |
256 | 45 | while v >= 1024.0: | 46 | while v >= 1024.0: |
257 | 46 | v = int(v / 10.24) / 100.0 | 47 | v = int(v / 10.24) / 100.0 |
258 | 47 | i += 1 | 48 | i += 1 |
260 | 48 | return '%g %sB' % (v, p[i]) | 49 | return "%g %sB" % (v, p[i]) |
261 | 49 | 50 | ||
262 | 50 | 51 | ||
263 | 51 | def human_time(t): | 52 | def human_time(t): |
264 | @@ -58,58 +59,62 @@ | |||
265 | 58 | 59 | ||
266 | 59 | 60 | ||
267 | 60 | class CustomModelAdmin(admin.ModelAdmin): | 61 | class CustomModelAdmin(admin.ModelAdmin): |
269 | 61 | change_form_template = 'admin/custom_change_form.html' | 62 | change_form_template = "admin/custom_change_form.html" |
270 | 62 | 63 | ||
271 | 63 | def render_change_form(self, request, context, *args, **kwargs): | 64 | def render_change_form(self, request, context, *args, **kwargs): |
272 | 64 | # Build a list of related children objects and their counts | 65 | # Build a list of related children objects and their counts |
273 | 65 | # so they may be linked to in the admin interface | 66 | # so they may be linked to in the admin interface |
274 | 66 | related_links = [] | 67 | related_links = [] |
276 | 67 | if 'object_id' in context and hasattr(self.model._meta, 'get_fields'): | 68 | if "object_id" in context and hasattr(self.model._meta, "get_fields"): |
277 | 68 | related_objs = [ | 69 | related_objs = [ |
281 | 69 | f for f in self.model._meta.get_fields() | 70 | f |
282 | 70 | if (f.one_to_many or f.one_to_one) | 71 | for f in self.model._meta.get_fields() |
283 | 71 | and f.auto_created and not f.concrete | 72 | if (f.one_to_many or f.one_to_one) and f.auto_created and not f.concrete |
284 | 72 | ] | 73 | ] |
285 | 73 | for obj in related_objs: | 74 | for obj in related_objs: |
287 | 74 | count = obj.related_model.objects.filter(**{obj.field.name: context['object_id']}).count() | 75 | count = obj.related_model.objects.filter( |
288 | 76 | **{obj.field.name: context["object_id"]} | ||
289 | 77 | ).count() | ||
290 | 75 | if count > 0: | 78 | if count > 0: |
291 | 76 | related_links.append((obj, obj.related_model._meta, count)) | 79 | related_links.append((obj, obj.related_model._meta, count)) |
293 | 77 | context.update({'related_links': related_links}) | 80 | context.update({"related_links": related_links}) |
294 | 78 | 81 | ||
296 | 79 | return super(CustomModelAdmin, self).render_change_form(request, context, *args, **kwargs) | 82 | return super(CustomModelAdmin, self).render_change_form( |
297 | 83 | request, context, *args, **kwargs | ||
298 | 84 | ) | ||
299 | 80 | 85 | ||
300 | 81 | 86 | ||
301 | 82 | class MachineAdminForm(forms.ModelForm): | 87 | class MachineAdminForm(forms.ModelForm): |
302 | 83 | class Meta: | 88 | class Meta: |
303 | 84 | model = Machine | 89 | model = Machine |
305 | 85 | fields = '__all__' | 90 | fields = "__all__" |
306 | 86 | 91 | ||
307 | 87 | def __init__(self, *args, **kwargs): | 92 | def __init__(self, *args, **kwargs): |
308 | 88 | super(MachineAdminForm, self).__init__(*args, **kwargs) | 93 | super(MachineAdminForm, self).__init__(*args, **kwargs) |
310 | 89 | self.fields['auth'].queryset = Auth.objects.filter(secret_type='machine_reg') | 94 | self.fields["auth"].queryset = Auth.objects.filter(secret_type="machine_reg") |
311 | 90 | 95 | ||
312 | 91 | 96 | ||
313 | 92 | class StorageAdminForm(forms.ModelForm): | 97 | class StorageAdminForm(forms.ModelForm): |
314 | 93 | class Meta: | 98 | class Meta: |
315 | 94 | model = Storage | 99 | model = Storage |
317 | 95 | fields = '__all__' | 100 | fields = "__all__" |
318 | 96 | 101 | ||
319 | 97 | def __init__(self, *args, **kwargs): | 102 | def __init__(self, *args, **kwargs): |
320 | 98 | super(StorageAdminForm, self).__init__(*args, **kwargs) | 103 | super(StorageAdminForm, self).__init__(*args, **kwargs) |
322 | 99 | self.fields['auth'].queryset = Auth.objects.filter(secret_type='storage_reg') | 104 | self.fields["auth"].queryset = Auth.objects.filter(secret_type="storage_reg") |
323 | 100 | 105 | ||
324 | 101 | 106 | ||
325 | 102 | class AuthAdmin(CustomModelAdmin): | 107 | class AuthAdmin(CustomModelAdmin): |
329 | 103 | list_display = ('name', 'secret_type', 'date_added', 'active') | 108 | list_display = ("name", "secret_type", "date_added", "active") |
330 | 104 | ordering = ('name',) | 109 | ordering = ("name",) |
331 | 105 | search_fields = ('name', 'comment',) | 110 | search_fields = ("name", "comment") |
332 | 106 | 111 | ||
333 | 107 | 112 | ||
334 | 108 | class ExcludeListFilter(admin.SimpleListFilter): | 113 | class ExcludeListFilter(admin.SimpleListFilter): |
335 | 109 | def __init__(self, *args, **kwargs): | 114 | def __init__(self, *args, **kwargs): |
336 | 110 | if not self.title: | 115 | if not self.title: |
337 | 111 | self.title = self.parameter_name | 116 | self.title = self.parameter_name |
339 | 112 | self.parameter_name += '__exclude' | 117 | self.parameter_name += "__exclude" |
340 | 113 | super(ExcludeListFilter, self).__init__(*args, **kwargs) | 118 | super(ExcludeListFilter, self).__init__(*args, **kwargs) |
341 | 114 | 119 | ||
342 | 115 | def has_output(self): | 120 | def has_output(self): |
343 | @@ -125,7 +130,7 @@ | |||
344 | 125 | 130 | ||
345 | 126 | 131 | ||
346 | 127 | class NameExcludeListFilter(ExcludeListFilter): | 132 | class NameExcludeListFilter(ExcludeListFilter): |
348 | 128 | parameter_name = 'name' | 133 | parameter_name = "name" |
349 | 129 | 134 | ||
350 | 130 | 135 | ||
351 | 131 | class MachineAdmin(CustomModelAdmin): | 136 | class MachineAdmin(CustomModelAdmin): |
352 | @@ -133,61 +138,73 @@ | |||
353 | 133 | return get_admin_change_link(obj.storage) | 138 | return get_admin_change_link(obj.storage) |
354 | 134 | 139 | ||
355 | 135 | storage_link.allow_tags = True | 140 | storage_link.allow_tags = True |
358 | 136 | storage_link.admin_order_field = 'storage__name' | 141 | storage_link.admin_order_field = "storage__name" |
359 | 137 | storage_link.short_description = 'storage' | 142 | storage_link.short_description = "storage" |
360 | 138 | 143 | ||
361 | 139 | def date_checked_in_human(self, obj): | 144 | def date_checked_in_human(self, obj): |
362 | 140 | return human_time(obj.date_checked_in) | 145 | return human_time(obj.date_checked_in) |
363 | 141 | 146 | ||
366 | 142 | date_checked_in_human.admin_order_field = 'date_checked_in' | 147 | date_checked_in_human.admin_order_field = "date_checked_in" |
367 | 143 | date_checked_in_human.short_description = 'date checked in' | 148 | date_checked_in_human.short_description = "date checked in" |
368 | 144 | 149 | ||
369 | 145 | form = MachineAdminForm | 150 | form = MachineAdminForm |
370 | 146 | list_display = ( | 151 | list_display = ( |
382 | 147 | 'unit_name', 'uuid', 'storage_link', 'environment_name', | 152 | "unit_name", |
383 | 148 | 'service_name', 'date_checked_in_human', 'published', 'active', | 153 | "uuid", |
384 | 149 | 'healthy', | 154 | "storage_link", |
385 | 150 | ) | 155 | "environment_name", |
386 | 151 | list_display_links = ('unit_name',) | 156 | "service_name", |
387 | 152 | list_filter = ('date_checked_in', 'storage', 'active', 'published') | 157 | "date_checked_in_human", |
388 | 153 | ordering = ('unit_name',) | 158 | "published", |
389 | 154 | search_fields = ( | 159 | "active", |
390 | 155 | 'unit_name', 'uuid', 'environment_name', 'service_name', | 160 | "healthy", |
391 | 156 | 'comment', | 161 | ) |
392 | 157 | ) | 162 | list_display_links = ("unit_name",) |
393 | 163 | list_filter = ("date_checked_in", "storage", "active", "published") | ||
394 | 164 | ordering = ("unit_name",) | ||
395 | 165 | search_fields = ("unit_name", "uuid", "environment_name", "service_name", "comment") | ||
396 | 158 | 166 | ||
397 | 159 | 167 | ||
398 | 160 | class SourceAdmin(CustomModelAdmin): | 168 | class SourceAdmin(CustomModelAdmin): |
399 | 161 | def date_last_backed_up_human(self, obj): | 169 | def date_last_backed_up_human(self, obj): |
400 | 162 | return human_time(obj.date_last_backed_up) | 170 | return human_time(obj.date_last_backed_up) |
401 | 163 | 171 | ||
404 | 164 | date_last_backed_up_human.admin_order_field = 'date_last_backed_up' | 172 | date_last_backed_up_human.admin_order_field = "date_last_backed_up" |
405 | 165 | date_last_backed_up_human.short_description = 'date last backed up' | 173 | date_last_backed_up_human.short_description = "date last backed up" |
406 | 166 | 174 | ||
407 | 167 | def date_next_backup_human(self, obj): | 175 | def date_next_backup_human(self, obj): |
408 | 168 | return human_time(obj.date_next_backup) | 176 | return human_time(obj.date_next_backup) |
409 | 169 | 177 | ||
412 | 170 | date_next_backup_human.admin_order_field = 'date_next_backup' | 178 | date_next_backup_human.admin_order_field = "date_next_backup" |
413 | 171 | date_next_backup_human.short_description = 'date next backup' | 179 | date_next_backup_human.short_description = "date next backup" |
414 | 172 | 180 | ||
415 | 173 | def machine_link(self, obj): | 181 | def machine_link(self, obj): |
416 | 174 | return get_admin_change_link(obj.machine) | 182 | return get_admin_change_link(obj.machine) |
417 | 175 | 183 | ||
418 | 176 | machine_link.allow_tags = True | 184 | machine_link.allow_tags = True |
421 | 177 | machine_link.admin_order_field = 'machine__unit_name' | 185 | machine_link.admin_order_field = "machine__unit_name" |
422 | 178 | machine_link.short_description = 'machine' | 186 | machine_link.short_description = "machine" |
423 | 179 | 187 | ||
424 | 180 | list_display = ( | 188 | list_display = ( |
427 | 181 | 'name', 'machine_link', 'path', 'date_last_backed_up_human', | 189 | "name", |
428 | 182 | 'date_next_backup_human', 'published', 'active', 'healthy', | 190 | "machine_link", |
429 | 191 | "path", | ||
430 | 192 | "date_last_backed_up_human", | ||
431 | 193 | "date_next_backup_human", | ||
432 | 194 | "published", | ||
433 | 195 | "active", | ||
434 | 196 | "healthy", | ||
435 | 183 | ) | 197 | ) |
437 | 184 | list_display_links = ('name',) | 198 | list_display_links = ("name",) |
438 | 185 | list_filter = ( | 199 | list_filter = ( |
440 | 186 | 'date_last_backed_up', 'date_next_backup', 'active', 'published', | 200 | "date_last_backed_up", |
441 | 201 | "date_next_backup", | ||
442 | 202 | "active", | ||
443 | 203 | "published", | ||
444 | 187 | NameExcludeListFilter, | 204 | NameExcludeListFilter, |
445 | 188 | ) | 205 | ) |
448 | 189 | ordering = ('machine__unit_name', 'name') | 206 | ordering = ("machine__unit_name", "name") |
449 | 190 | search_fields = ('name', 'comment', 'path',) | 207 | search_fields = ("name", "comment", "path") |
450 | 191 | 208 | ||
451 | 192 | 209 | ||
452 | 193 | class BackupLogAdmin(CustomModelAdmin): | 210 | class BackupLogAdmin(CustomModelAdmin): |
453 | @@ -195,8 +212,8 @@ | |||
454 | 195 | return get_admin_change_link(obj.source) | 212 | return get_admin_change_link(obj.source) |
455 | 196 | 213 | ||
456 | 197 | source_link.allow_tags = True | 214 | source_link.allow_tags = True |
459 | 198 | source_link.admin_order_field = 'source__name' | 215 | source_link.admin_order_field = "source__name" |
460 | 199 | source_link.short_description = 'source' | 216 | source_link.short_description = "source" |
461 | 200 | 217 | ||
462 | 201 | def duration(self, obj): | 218 | def duration(self, obj): |
463 | 202 | if not (obj.date_end and obj.date_begin): | 219 | if not (obj.date_end and obj.date_begin): |
464 | @@ -204,8 +221,8 @@ | |||
465 | 204 | d = obj.date_end - obj.date_begin | 221 | d = obj.date_end - obj.date_begin |
466 | 205 | return d - datetime.timedelta(microseconds=d.microseconds) | 222 | return d - datetime.timedelta(microseconds=d.microseconds) |
467 | 206 | 223 | ||
470 | 207 | duration.admin_order_field = 'date_end' | 224 | duration.admin_order_field = "date_end" |
471 | 208 | duration.short_description = 'duration' | 225 | duration.short_description = "duration" |
472 | 209 | 226 | ||
473 | 210 | def storage_link(self, obj): | 227 | def storage_link(self, obj): |
474 | 211 | if not obj.storage: | 228 | if not obj.storage: |
475 | @@ -213,57 +230,67 @@ | |||
476 | 213 | return get_admin_change_link(obj.storage) | 230 | return get_admin_change_link(obj.storage) |
477 | 214 | 231 | ||
478 | 215 | storage_link.allow_tags = True | 232 | storage_link.allow_tags = True |
481 | 216 | storage_link.admin_order_field = 'storage__name' | 233 | storage_link.admin_order_field = "storage__name" |
482 | 217 | storage_link.short_description = 'storage' | 234 | storage_link.short_description = "storage" |
483 | 218 | 235 | ||
484 | 219 | def date_human(self, obj): | 236 | def date_human(self, obj): |
485 | 220 | return human_time(obj.date) | 237 | return human_time(obj.date) |
486 | 221 | 238 | ||
489 | 222 | date_human.admin_order_field = 'date' | 239 | date_human.admin_order_field = "date" |
490 | 223 | date_human.short_description = 'date' | 240 | date_human.short_description = "date" |
491 | 224 | 241 | ||
492 | 225 | list_display = ( | 242 | list_display = ( |
495 | 226 | 'date_human', 'source_link', 'success', 'snapshot', 'storage_link', | 243 | "date_human", |
496 | 227 | 'duration', | 244 | "source_link", |
497 | 245 | "success", | ||
498 | 246 | "snapshot", | ||
499 | 247 | "storage_link", | ||
500 | 248 | "duration", | ||
501 | 228 | ) | 249 | ) |
505 | 229 | list_display_links = ('date_human',) | 250 | list_display_links = ("date_human",) |
506 | 230 | list_filter = ('date', 'success') | 251 | list_filter = ("date", "success") |
507 | 231 | ordering = ('-date',) | 252 | ordering = ("-date",) |
508 | 232 | 253 | ||
509 | 233 | 254 | ||
510 | 234 | class FilterSetAdmin(CustomModelAdmin): | 255 | class FilterSetAdmin(CustomModelAdmin): |
514 | 235 | list_display = ('name', 'date_added', 'active') | 256 | list_display = ("name", "date_added", "active") |
515 | 236 | ordering = ('name',) | 257 | ordering = ("name",) |
516 | 237 | search_fields = ('name', 'comment',) | 258 | search_fields = ("name", "comment") |
517 | 238 | 259 | ||
518 | 239 | 260 | ||
519 | 240 | class StorageAdmin(CustomModelAdmin): | 261 | class StorageAdmin(CustomModelAdmin): |
520 | 241 | def space_total_human(self, obj): | 262 | def space_total_human(self, obj): |
521 | 242 | return human_si(obj.space_total, 2) | 263 | return human_si(obj.space_total, 2) |
522 | 243 | 264 | ||
525 | 244 | space_total_human.admin_order_field = 'space_total' | 265 | space_total_human.admin_order_field = "space_total" |
526 | 245 | space_total_human.short_description = 'space total' | 266 | space_total_human.short_description = "space total" |
527 | 246 | 267 | ||
528 | 247 | def space_available_human(self, obj): | 268 | def space_available_human(self, obj): |
529 | 248 | return human_si(obj.space_available, 2) | 269 | return human_si(obj.space_available, 2) |
530 | 249 | 270 | ||
533 | 250 | space_available_human.admin_order_field = 'space_available' | 271 | space_available_human.admin_order_field = "space_available" |
534 | 251 | space_available_human.short_description = 'space available' | 272 | space_available_human.short_description = "space available" |
535 | 252 | 273 | ||
536 | 253 | def date_checked_in_human(self, obj): | 274 | def date_checked_in_human(self, obj): |
537 | 254 | return human_time(obj.date_checked_in) | 275 | return human_time(obj.date_checked_in) |
538 | 255 | 276 | ||
541 | 256 | date_checked_in_human.admin_order_field = 'date_checked_in' | 277 | date_checked_in_human.admin_order_field = "date_checked_in" |
542 | 257 | date_checked_in_human.short_description = 'date checked in' | 278 | date_checked_in_human.short_description = "date checked in" |
543 | 258 | 279 | ||
544 | 259 | form = StorageAdminForm | 280 | form = StorageAdminForm |
545 | 260 | list_display = ( | 281 | list_display = ( |
549 | 261 | 'name', 'ssh_ping_host', 'ssh_ping_user', 'date_checked_in_human', | 282 | "name", |
550 | 262 | 'space_total_human', 'space_available_human', 'published', 'active', | 283 | "ssh_ping_host", |
551 | 263 | 'healthy', | 284 | "ssh_ping_user", |
552 | 285 | "date_checked_in_human", | ||
553 | 286 | "space_total_human", | ||
554 | 287 | "space_available_human", | ||
555 | 288 | "published", | ||
556 | 289 | "active", | ||
557 | 290 | "healthy", | ||
558 | 264 | ) | 291 | ) |
561 | 265 | ordering = ('name',) | 292 | ordering = ("name",) |
562 | 266 | search_fields = ('name', 'comment', 'ssh_ping_host',) | 293 | search_fields = ("name", "comment", "ssh_ping_host") |
563 | 267 | 294 | ||
564 | 268 | 295 | ||
565 | 269 | admin.site.register(Auth, AuthAdmin) | 296 | admin.site.register(Auth, AuthAdmin) |
566 | 270 | 297 | ||
567 | === modified file 'turku_api/models.py' | |||
568 | --- turku_api/models.py 2020-04-11 21:20:31 +0000 | |||
569 | +++ turku_api/models.py 2020-06-21 23:58:30 +0000 | |||
570 | @@ -14,14 +14,15 @@ | |||
571 | 14 | # License along with this program. If not, see | 14 | # License along with this program. If not, see |
572 | 15 | # <http://www.gnu.org/licenses/>. | 15 | # <http://www.gnu.org/licenses/>. |
573 | 16 | 16 | ||
574 | 17 | from datetime import timedelta | ||
575 | 18 | import json | ||
576 | 19 | import uuid | ||
577 | 20 | |||
578 | 17 | from django.db import models | 21 | from django.db import models |
579 | 22 | from django.contrib.auth.hashers import is_password_usable | ||
580 | 18 | from django.core.exceptions import ValidationError | 23 | from django.core.exceptions import ValidationError |
581 | 19 | from django.core.validators import MaxValueValidator, MinValueValidator | 24 | from django.core.validators import MaxValueValidator, MinValueValidator |
582 | 20 | from django.contrib.auth.hashers import is_password_usable | ||
583 | 21 | from django.utils import timezone | 25 | from django.utils import timezone |
584 | 22 | from datetime import timedelta | ||
585 | 23 | import json | ||
586 | 24 | import uuid | ||
587 | 25 | 26 | ||
588 | 26 | 27 | ||
589 | 27 | def new_uuid(): | 28 | def new_uuid(): |
590 | @@ -32,85 +33,83 @@ | |||
591 | 32 | try: | 33 | try: |
592 | 33 | str(uuid.UUID(value)) | 34 | str(uuid.UUID(value)) |
593 | 34 | except ValueError: | 35 | except ValueError: |
595 | 35 | raise ValidationError('Invalid UUID format') | 36 | raise ValidationError("Invalid UUID format") |
596 | 36 | 37 | ||
597 | 37 | 38 | ||
598 | 38 | def validate_hashed_password(value): | 39 | def validate_hashed_password(value): |
599 | 39 | if not is_password_usable(value): | 40 | if not is_password_usable(value): |
601 | 40 | raise ValidationError('Invalid hashed password') | 41 | raise ValidationError("Invalid hashed password") |
602 | 41 | 42 | ||
603 | 42 | 43 | ||
604 | 43 | def validate_json_string_list(value): | 44 | def validate_json_string_list(value): |
605 | 44 | try: | 45 | try: |
606 | 45 | decoded_json = json.loads(value) | 46 | decoded_json = json.loads(value) |
607 | 46 | except ValueError: | 47 | except ValueError: |
609 | 47 | raise ValidationError('Must be a valid JSON string list') | 48 | raise ValidationError("Must be a valid JSON string list") |
610 | 48 | if not isinstance(decoded_json, (list, tuple, set)): | 49 | if not isinstance(decoded_json, (list, tuple, set)): |
612 | 49 | raise ValidationError('Must be a valid JSON string list') | 50 | raise ValidationError("Must be a valid JSON string list") |
613 | 50 | for i in decoded_json: | 51 | for i in decoded_json: |
614 | 51 | if not isinstance(i, str): | 52 | if not isinstance(i, str): |
616 | 52 | raise ValidationError('Must be a valid JSON string list') | 53 | raise ValidationError("Must be a valid JSON string list") |
617 | 53 | 54 | ||
618 | 54 | 55 | ||
619 | 55 | def validate_storage_auth(value): | 56 | def validate_storage_auth(value): |
620 | 56 | try: | 57 | try: |
621 | 57 | a = Auth.objects.get(id=value) | 58 | a = Auth.objects.get(id=value) |
622 | 58 | except Auth.DoesNotExist: | 59 | except Auth.DoesNotExist: |
626 | 59 | raise ValidationError('Auth %s does not exist' % value) | 60 | raise ValidationError("Auth %s does not exist" % value) |
627 | 60 | if a.secret_type != 'storage_reg': | 61 | if a.secret_type != "storage_reg": |
628 | 61 | raise ValidationError('Must be a Storage registration') | 62 | raise ValidationError("Must be a Storage registration") |
629 | 62 | 63 | ||
630 | 63 | 64 | ||
631 | 64 | def validate_machine_auth(value): | 65 | def validate_machine_auth(value): |
632 | 65 | try: | 66 | try: |
633 | 66 | a = Auth.objects.get(id=value) | 67 | a = Auth.objects.get(id=value) |
634 | 67 | except Auth.DoesNotExist: | 68 | except Auth.DoesNotExist: |
638 | 68 | raise ValidationError('Auth %s does not exist' % value) | 69 | raise ValidationError("Auth %s does not exist" % value) |
639 | 69 | if a.secret_type != 'machine_reg': | 70 | if a.secret_type != "machine_reg": |
640 | 70 | raise ValidationError('Must be a Machine registration') | 71 | raise ValidationError("Must be a Machine registration") |
641 | 71 | 72 | ||
642 | 72 | 73 | ||
643 | 73 | class UuidPrimaryKeyField(models.CharField): | 74 | class UuidPrimaryKeyField(models.CharField): |
644 | 74 | def __init__(self, *args, **kwargs): | 75 | def __init__(self, *args, **kwargs): |
650 | 75 | kwargs['blank'] = True | 76 | kwargs["blank"] = True |
651 | 76 | kwargs['default'] = new_uuid | 77 | kwargs["default"] = new_uuid |
652 | 77 | kwargs['editable'] = False | 78 | kwargs["editable"] = False |
653 | 78 | kwargs['max_length'] = 36 | 79 | kwargs["max_length"] = 36 |
654 | 79 | kwargs['primary_key'] = True | 80 | kwargs["primary_key"] = True |
655 | 80 | super(UuidPrimaryKeyField, self).__init__(*args, **kwargs) | 81 | super(UuidPrimaryKeyField, self).__init__(*args, **kwargs) |
656 | 81 | 82 | ||
657 | 82 | 83 | ||
658 | 83 | class Auth(models.Model): | 84 | class Auth(models.Model): |
659 | 84 | SECRET_TYPES = ( | 85 | SECRET_TYPES = ( |
662 | 85 | ('machine_reg', 'Machine registration'), | 86 | ("machine_reg", "Machine registration"), |
663 | 86 | ('storage_reg', 'Storage registration'), | 87 | ("storage_reg", "Storage registration"), |
664 | 87 | ) | 88 | ) |
665 | 88 | id = UuidPrimaryKeyField() | 89 | id = UuidPrimaryKeyField() |
666 | 89 | name = models.CharField( | 90 | name = models.CharField( |
669 | 90 | max_length=200, unique=True, | 91 | max_length=200, unique=True, help_text="Human-readable name of this auth." |
668 | 91 | help_text='Human-readable name of this auth.', | ||
670 | 92 | ) | 92 | ) |
671 | 93 | secret_hash = models.CharField( | 93 | secret_hash = models.CharField( |
672 | 94 | max_length=200, | 94 | max_length=200, |
673 | 95 | validators=[validate_hashed_password], | 95 | validators=[validate_hashed_password], |
675 | 96 | help_text='Hashed secret (password) of this auth.', | 96 | help_text="Hashed secret (password) of this auth.", |
676 | 97 | ) | 97 | ) |
677 | 98 | secret_type = models.CharField( | 98 | secret_type = models.CharField( |
680 | 99 | max_length=200, choices=SECRET_TYPES, | 99 | max_length=200, |
681 | 100 | help_text='Auth secret type (machine/storage).', | 100 | choices=SECRET_TYPES, |
682 | 101 | help_text="Auth secret type (machine/storage).", | ||
683 | 101 | ) | 102 | ) |
684 | 102 | comment = models.CharField( | 103 | comment = models.CharField( |
687 | 103 | max_length=200, blank=True, null=True, | 104 | max_length=200, blank=True, null=True, help_text="Human-readable comment." |
686 | 104 | help_text='Human-readable comment.', | ||
688 | 105 | ) | 105 | ) |
689 | 106 | active = models.BooleanField( | 106 | active = models.BooleanField( |
690 | 107 | default=True, | 107 | default=True, |
693 | 108 | help_text='Whether this auth is enabled. Disabling prevents new registrations using its key, and prevents ' + | 108 | help_text="Whether this auth is enabled. Disabling prevents new registrations using its key, and prevents " |
694 | 109 | 'existing machines using its key from updating their configs.', | 109 | + "existing machines using its key from updating their configs.", |
695 | 110 | ) | 110 | ) |
696 | 111 | date_added = models.DateTimeField( | 111 | date_added = models.DateTimeField( |
699 | 112 | default=timezone.now, | 112 | default=timezone.now, help_text="Date/time this auth was added." |
698 | 113 | help_text='Date/time this auth was added.', | ||
700 | 114 | ) | 113 | ) |
701 | 115 | 114 | ||
702 | 116 | def __str__(self): | 115 | def __str__(self): |
703 | @@ -125,77 +124,78 @@ | |||
704 | 125 | return True | 124 | return True |
705 | 126 | if not self.date_checked_in: | 125 | if not self.date_checked_in: |
706 | 127 | return False | 126 | return False |
708 | 128 | return (now <= (self.date_checked_in + timedelta(minutes=30))) | 127 | return now <= (self.date_checked_in + timedelta(minutes=30)) |
709 | 128 | |||
710 | 129 | healthy.boolean = True | 129 | healthy.boolean = True |
711 | 130 | 130 | ||
712 | 131 | id = UuidPrimaryKeyField() | 131 | id = UuidPrimaryKeyField() |
713 | 132 | name = models.CharField( | 132 | name = models.CharField( |
716 | 133 | max_length=200, unique=True, | 133 | max_length=200, |
717 | 134 | help_text='Name of this storage unit. This is used as its login ID and must be unique.', | 134 | unique=True, |
718 | 135 | help_text="Name of this storage unit. This is used as its login ID and must be unique.", | ||
719 | 135 | ) | 136 | ) |
720 | 136 | secret_hash = models.CharField( | 137 | secret_hash = models.CharField( |
721 | 137 | max_length=200, | 138 | max_length=200, |
722 | 138 | validators=[validate_hashed_password], | 139 | validators=[validate_hashed_password], |
724 | 139 | help_text='Hashed secret (password) of this storage unit.', | 140 | help_text="Hashed secret (password) of this storage unit.", |
725 | 140 | ) | 141 | ) |
726 | 141 | comment = models.CharField( | 142 | comment = models.CharField( |
729 | 142 | max_length=200, blank=True, null=True, | 143 | max_length=200, blank=True, null=True, help_text="Human-readable comment." |
728 | 143 | help_text='Human-readable comment.', | ||
730 | 144 | ) | 144 | ) |
731 | 145 | ssh_ping_host = models.CharField( | 145 | ssh_ping_host = models.CharField( |
732 | 146 | max_length=200, | 146 | max_length=200, |
735 | 147 | verbose_name='SSH ping host', | 147 | verbose_name="SSH ping host", |
736 | 148 | help_text='Hostname/IP address of this storage unit\'s SSH server.', | 148 | help_text="Hostname/IP address of this storage unit's SSH server.", |
737 | 149 | ) | 149 | ) |
738 | 150 | ssh_ping_host_keys = models.CharField( | 150 | ssh_ping_host_keys = models.CharField( |
740 | 151 | max_length=65536, default='[]', | 151 | max_length=65536, |
741 | 152 | default="[]", | ||
742 | 152 | validators=[validate_json_string_list], | 153 | validators=[validate_json_string_list], |
745 | 153 | verbose_name='SSH ping host keys', | 154 | verbose_name="SSH ping host keys", |
746 | 154 | help_text='JSON list of this storage unit\'s SSH host keys.', | 155 | help_text="JSON list of this storage unit's SSH host keys.", |
747 | 155 | ) | 156 | ) |
748 | 156 | ssh_ping_port = models.PositiveIntegerField( | 157 | ssh_ping_port = models.PositiveIntegerField( |
749 | 157 | validators=[MinValueValidator(1), MaxValueValidator(65535)], | 158 | validators=[MinValueValidator(1), MaxValueValidator(65535)], |
752 | 158 | verbose_name='SSH ping port', | 159 | verbose_name="SSH ping port", |
753 | 159 | help_text='Port number of this storage unit\'s SSH server.', | 160 | help_text="Port number of this storage unit's SSH server.", |
754 | 160 | ) | 161 | ) |
755 | 161 | ssh_ping_user = models.CharField( | 162 | ssh_ping_user = models.CharField( |
756 | 162 | max_length=200, | 163 | max_length=200, |
759 | 163 | verbose_name='SSH ping user', | 164 | verbose_name="SSH ping user", |
760 | 164 | help_text='Username of this storage unit\'s SSH server.', | 165 | help_text="Username of this storage unit's SSH server.", |
761 | 165 | ) | 166 | ) |
762 | 166 | space_total = models.PositiveIntegerField( | 167 | space_total = models.PositiveIntegerField( |
763 | 167 | default=0, | 168 | default=0, |
765 | 168 | help_text='Total disk space of this storage unit\'s storage directories, in MiB.', | 169 | help_text="Total disk space of this storage unit's storage directories, in MiB.", |
766 | 169 | ) | 170 | ) |
767 | 170 | space_available = models.PositiveIntegerField( | 171 | space_available = models.PositiveIntegerField( |
768 | 171 | default=0, | 172 | default=0, |
770 | 172 | help_text='Available disk space of this storage unit\'s storage directories, in MiB.', | 173 | help_text="Available disk space of this storage unit's storage directories, in MiB.", |
771 | 173 | ) | 174 | ) |
772 | 174 | auth = models.ForeignKey( | 175 | auth = models.ForeignKey( |
775 | 175 | Auth, validators=[validate_storage_auth], on_delete=models.CASCADE, | 176 | Auth, |
776 | 176 | help_text='Storage auth used to register this storage unit.', | 177 | validators=[validate_storage_auth], |
777 | 178 | on_delete=models.CASCADE, | ||
778 | 179 | help_text="Storage auth used to register this storage unit.", | ||
779 | 177 | ) | 180 | ) |
780 | 178 | active = models.BooleanField( | 181 | active = models.BooleanField( |
781 | 179 | default=True, | 182 | default=True, |
785 | 180 | help_text='Whether this storage unit is enabled. Disabling prevents this storage unit from checking in or ' + | 183 | help_text="Whether this storage unit is enabled. Disabling prevents this storage unit from checking in or " |
786 | 181 | 'being assigned to new machines. Existing machines which ping this storage unit will get errors ' + | 184 | + "being assigned to new machines. Existing machines which ping this storage unit will get errors " |
787 | 182 | 'because this storage unit can no longer query the API server.', | 185 | + "because this storage unit can no longer query the API server.", |
788 | 183 | ) | 186 | ) |
789 | 184 | published = models.BooleanField( | 187 | published = models.BooleanField( |
792 | 185 | default=True, | 188 | default=True, help_text="Whether this storage unit has been enabled by itself." |
791 | 186 | help_text='Whether this storage unit has been enabled by itself.', | ||
793 | 187 | ) | 189 | ) |
794 | 188 | date_registered = models.DateTimeField( | 190 | date_registered = models.DateTimeField( |
797 | 189 | default=timezone.now, | 191 | default=timezone.now, help_text="Date/time this storage unit was registered." |
796 | 190 | help_text='Date/time this storage unit was registered.', | ||
798 | 191 | ) | 192 | ) |
799 | 192 | date_updated = models.DateTimeField( | 193 | date_updated = models.DateTimeField( |
800 | 193 | default=timezone.now, | 194 | default=timezone.now, |
802 | 194 | help_text='Date/time this storage unit presented a modified config.', | 195 | help_text="Date/time this storage unit presented a modified config.", |
803 | 195 | ) | 196 | ) |
804 | 196 | date_checked_in = models.DateTimeField( | 197 | date_checked_in = models.DateTimeField( |
807 | 197 | blank=True, null=True, | 198 | blank=True, null=True, help_text="Date/time this storage unit last checked in." |
806 | 198 | help_text='Date/time this storage unit last checked in.', | ||
808 | 199 | ) | 199 | ) |
809 | 200 | 200 | ||
810 | 201 | def __str__(self): | 201 | def __str__(self): |
811 | @@ -210,75 +210,82 @@ | |||
812 | 210 | return True | 210 | return True |
813 | 211 | if not self.date_checked_in: | 211 | if not self.date_checked_in: |
814 | 212 | return False | 212 | return False |
816 | 213 | return (now <= (self.date_checked_in + timedelta(hours=10))) | 213 | return now <= (self.date_checked_in + timedelta(hours=10)) |
817 | 214 | |||
818 | 214 | healthy.boolean = True | 215 | healthy.boolean = True |
819 | 215 | 216 | ||
820 | 216 | id = UuidPrimaryKeyField() | 217 | id = UuidPrimaryKeyField() |
821 | 217 | uuid = models.CharField( | 218 | uuid = models.CharField( |
825 | 218 | max_length=36, unique=True, validators=[validate_uuid], | 219 | max_length=36, |
826 | 219 | verbose_name='UUID', | 220 | unique=True, |
827 | 220 | help_text='UUID of this machine. This UUID is set by the machine and must be globally unique.', | 221 | validators=[validate_uuid], |
828 | 222 | verbose_name="UUID", | ||
829 | 223 | help_text="UUID of this machine. This UUID is set by the machine and must be globally unique.", | ||
830 | 221 | ) | 224 | ) |
831 | 222 | secret_hash = models.CharField( | 225 | secret_hash = models.CharField( |
832 | 223 | max_length=200, | 226 | max_length=200, |
833 | 224 | validators=[validate_hashed_password], | 227 | validators=[validate_hashed_password], |
835 | 225 | help_text='Hashed secret (password) of this machine.', | 228 | help_text="Hashed secret (password) of this machine.", |
836 | 226 | ) | 229 | ) |
837 | 227 | environment_name = models.CharField( | 230 | environment_name = models.CharField( |
840 | 228 | max_length=200, blank=True, null=True, | 231 | max_length=200, |
841 | 229 | help_text='Environment this machine is part of.', | 232 | blank=True, |
842 | 233 | null=True, | ||
843 | 234 | help_text="Environment this machine is part of.", | ||
844 | 230 | ) | 235 | ) |
845 | 231 | service_name = models.CharField( | 236 | service_name = models.CharField( |
849 | 232 | max_length=200, blank=True, null=True, | 237 | max_length=200, |
850 | 233 | help_text='Service this machine is part of. For Juju units, this is the first part of the unit name ' + | 238 | blank=True, |
851 | 234 | '(before the slash).', | 239 | null=True, |
852 | 240 | help_text="Service this machine is part of. For Juju units, this is the first part of the unit name " | ||
853 | 241 | + "(before the slash).", | ||
854 | 235 | ) | 242 | ) |
855 | 236 | unit_name = models.CharField( | 243 | unit_name = models.CharField( |
856 | 237 | max_length=200, | 244 | max_length=200, |
859 | 238 | help_text='Unit name of this machine. For Juju units, this is the full unit name (e.g. "service-name/0"). ' + | 245 | help_text='Unit name of this machine. For Juju units, this is the full unit name (e.g. "service-name/0"). ' |
860 | 239 | 'Otherwise, this should be the machine\'s hostname.', | 246 | + "Otherwise, this should be the machine's hostname.", |
861 | 240 | ) | 247 | ) |
862 | 241 | comment = models.CharField( | 248 | comment = models.CharField( |
865 | 242 | max_length=200, blank=True, null=True, | 249 | max_length=200, blank=True, null=True, help_text="Human-readable comment." |
864 | 243 | help_text='Human-readable comment.', | ||
866 | 244 | ) | 250 | ) |
867 | 245 | ssh_public_key = models.CharField( | 251 | ssh_public_key = models.CharField( |
868 | 246 | max_length=2048, | 252 | max_length=2048, |
871 | 247 | verbose_name='SSH public key', | 253 | verbose_name="SSH public key", |
872 | 248 | help_text='SSH public key of this machine\'s agent.', | 254 | help_text="SSH public key of this machine's agent.", |
873 | 249 | ) | 255 | ) |
874 | 250 | auth = models.ForeignKey( | 256 | auth = models.ForeignKey( |
877 | 251 | Auth, validators=[validate_machine_auth], on_delete=models.CASCADE, | 257 | Auth, |
878 | 252 | help_text='Machine auth used to register this machine.', | 258 | validators=[validate_machine_auth], |
879 | 259 | on_delete=models.CASCADE, | ||
880 | 260 | help_text="Machine auth used to register this machine.", | ||
881 | 253 | ) | 261 | ) |
882 | 254 | storage = models.ForeignKey( | 262 | storage = models.ForeignKey( |
885 | 255 | Storage, on_delete=models.CASCADE, | 263 | Storage, |
886 | 256 | help_text='Storage unit this machine is assigned to.', | 264 | on_delete=models.CASCADE, |
887 | 265 | help_text="Storage unit this machine is assigned to.", | ||
888 | 257 | ) | 266 | ) |
889 | 258 | active = models.BooleanField( | 267 | active = models.BooleanField( |
890 | 259 | default=True, | 268 | default=True, |
893 | 260 | help_text='Whether this machine is enabled. Disabling removes its key from its storage unit, stops this ' + | 269 | help_text="Whether this machine is enabled. Disabling removes its key from its storage unit, stops this " |
894 | 261 | 'machine from updating its registration, etc.', | 270 | + "machine from updating its registration, etc.", |
895 | 262 | ) | 271 | ) |
896 | 263 | published = models.BooleanField( | 272 | published = models.BooleanField( |
897 | 264 | default=True, | 273 | default=True, |
899 | 265 | help_text='Whether this machine has been enabled by the machine agent.', | 274 | help_text="Whether this machine has been enabled by the machine agent.", |
900 | 266 | ) | 275 | ) |
901 | 267 | date_registered = models.DateTimeField( | 276 | date_registered = models.DateTimeField( |
904 | 268 | default=timezone.now, | 277 | default=timezone.now, help_text="Date/time this machine was registered." |
903 | 269 | help_text='Date/time this machine was registered.', | ||
905 | 270 | ) | 278 | ) |
906 | 271 | date_updated = models.DateTimeField( | 279 | date_updated = models.DateTimeField( |
907 | 272 | default=timezone.now, | 280 | default=timezone.now, |
909 | 273 | help_text='Date/time this machine presented a modified config.', | 281 | help_text="Date/time this machine presented a modified config.", |
910 | 274 | ) | 282 | ) |
911 | 275 | date_checked_in = models.DateTimeField( | 283 | date_checked_in = models.DateTimeField( |
914 | 276 | blank=True, null=True, | 284 | blank=True, null=True, help_text="Date/time this machine last checked in." |
913 | 277 | help_text='Date/time this machine last checked in.', | ||
915 | 278 | ) | 285 | ) |
916 | 279 | 286 | ||
917 | 280 | def __str__(self): | 287 | def __str__(self): |
919 | 281 | return '%s (%s)' % (self.unit_name, self.uuid[0:8]) | 288 | return "%s (%s)" % (self.unit_name, self.uuid[0:8]) |
920 | 282 | 289 | ||
921 | 283 | 290 | ||
922 | 284 | class Source(models.Model): | 291 | class Source(models.Model): |
923 | @@ -289,174 +296,172 @@ | |||
924 | 289 | return True | 296 | return True |
925 | 290 | if not self.success: | 297 | if not self.success: |
926 | 291 | return False | 298 | return False |
928 | 292 | return (now <= (self.date_next_backup + timedelta(hours=10))) | 299 | return now <= (self.date_next_backup + timedelta(hours=10)) |
929 | 300 | |||
930 | 293 | healthy.boolean = True | 301 | healthy.boolean = True |
931 | 294 | 302 | ||
932 | 295 | SNAPSHOT_MODES = ( | 303 | SNAPSHOT_MODES = ( |
936 | 296 | ('none', 'No snapshotting'), | 304 | ("none", "No snapshotting"), |
937 | 297 | ('attic', 'Attic'), | 305 | ("attic", "Attic"), |
938 | 298 | ('link-dest', 'Hardlink trees (rsync --link-dest)'), | 306 | ("link-dest", "Hardlink trees (rsync --link-dest)"), |
939 | 299 | ) | 307 | ) |
940 | 300 | id = UuidPrimaryKeyField() | 308 | id = UuidPrimaryKeyField() |
941 | 301 | name = models.CharField( | 309 | name = models.CharField( |
944 | 302 | max_length=200, | 310 | max_length=200, help_text="Computer-readable source name identifier." |
943 | 303 | help_text='Computer-readable source name identifier.', | ||
945 | 304 | ) | 311 | ) |
946 | 305 | machine = models.ForeignKey( | 312 | machine = models.ForeignKey( |
949 | 306 | Machine, on_delete=models.CASCADE, | 313 | Machine, on_delete=models.CASCADE, help_text="Machine this source belongs to." |
948 | 307 | help_text='Machine this source belongs to.', | ||
950 | 308 | ) | 314 | ) |
951 | 309 | comment = models.CharField( | 315 | comment = models.CharField( |
954 | 310 | max_length=200, blank=True, null=True, | 316 | max_length=200, blank=True, null=True, help_text="Human-readable comment." |
953 | 311 | help_text='Human-readable comment.', | ||
955 | 312 | ) | 317 | ) |
956 | 313 | path = models.CharField( | 318 | path = models.CharField( |
959 | 314 | max_length=200, | 319 | max_length=200, help_text="Full filesystem path of this source." |
958 | 315 | help_text='Full filesystem path of this source.', | ||
960 | 316 | ) | 320 | ) |
961 | 317 | filter = models.CharField( | 321 | filter = models.CharField( |
964 | 318 | max_length=2048, default='[]', validators=[validate_json_string_list], | 322 | max_length=2048, |
965 | 319 | help_text='JSON list of rsync-compatible --filter options.', | 323 | default="[]", |
966 | 324 | validators=[validate_json_string_list], | ||
967 | 325 | help_text="JSON list of rsync-compatible --filter options.", | ||
968 | 320 | ) | 326 | ) |
969 | 321 | exclude = models.CharField( | 327 | exclude = models.CharField( |
972 | 322 | max_length=2048, default='[]', validators=[validate_json_string_list], | 328 | max_length=2048, |
973 | 323 | help_text='JSON list of rsync-compatible --exclude options.', | 329 | default="[]", |
974 | 330 | validators=[validate_json_string_list], | ||
975 | 331 | help_text="JSON list of rsync-compatible --exclude options.", | ||
976 | 324 | ) | 332 | ) |
977 | 325 | frequency = models.CharField( | 333 | frequency = models.CharField( |
980 | 326 | max_length=200, default='daily', | 334 | max_length=200, default="daily", help_text="How often to back up this source." |
979 | 327 | help_text='How often to back up this source.', | ||
981 | 328 | ) | 335 | ) |
982 | 329 | retention = models.CharField( | 336 | retention = models.CharField( |
985 | 330 | max_length=200, default='last 5 days, earliest of month', | 337 | max_length=200, |
986 | 331 | help_text='Retention schedule, describing when to preserve snapshots.', | 338 | default="last 5 days, earliest of month", |
987 | 339 | help_text="Retention schedule, describing when to preserve snapshots.", | ||
988 | 332 | ) | 340 | ) |
989 | 333 | bwlimit = models.CharField( | 341 | bwlimit = models.CharField( |
990 | 334 | max_length=200, | 342 | max_length=200, |
994 | 335 | blank=True, null=True, | 343 | blank=True, |
995 | 336 | verbose_name='bandwidth limit', | 344 | null=True, |
996 | 337 | help_text='Bandwith limit for remote transfer, using the rsync --bwlimit format.', | 345 | verbose_name="bandwidth limit", |
997 | 346 | help_text="Bandwith limit for remote transfer, using the rsync --bwlimit format.", | ||
998 | 338 | ) | 347 | ) |
999 | 339 | snapshot_mode = models.CharField( | 348 | snapshot_mode = models.CharField( |
1003 | 340 | blank=True, null=True, | 349 | blank=True, |
1004 | 341 | max_length=200, choices=SNAPSHOT_MODES, | 350 | null=True, |
1005 | 342 | help_text='Override the storage unit\'s snapshot logic and use an explicit snapshot mode for this source.', | 351 | max_length=200, |
1006 | 352 | choices=SNAPSHOT_MODES, | ||
1007 | 353 | help_text="Override the storage unit's snapshot logic and use an explicit snapshot mode for this source.", | ||
1008 | 343 | ) | 354 | ) |
1009 | 344 | preserve_hard_links = models.BooleanField( | 355 | preserve_hard_links = models.BooleanField( |
1010 | 345 | default=False, | 356 | default=False, |
1012 | 346 | help_text='Whether to preserve hard links when backing up this source.', | 357 | help_text="Whether to preserve hard links when backing up this source.", |
1013 | 347 | ) | 358 | ) |
1014 | 348 | shared_service = models.BooleanField( | 359 | shared_service = models.BooleanField( |
1015 | 349 | default=False, | 360 | default=False, |
1017 | 350 | help_text='Whether this source is part of a shared service of multiple machines to be backed up.', | 361 | help_text="Whether this source is part of a shared service of multiple machines to be backed up.", |
1018 | 351 | ) | 362 | ) |
1019 | 352 | large_rotating_files = models.BooleanField( | 363 | large_rotating_files = models.BooleanField( |
1020 | 353 | default=False, | 364 | default=False, |
1023 | 354 | help_text='Whether this source contains a number of large files which rotate through filenames, e.g. ' + | 365 | help_text="Whether this source contains a number of large files which rotate through filenames, e.g. " |
1024 | 355 | '"postgresql.1.dump.gz" becomes "postgresql.2.dump.gz".', | 366 | + '"postgresql.1.dump.gz" becomes "postgresql.2.dump.gz".', |
1025 | 356 | ) | 367 | ) |
1026 | 357 | large_modifying_files = models.BooleanField( | 368 | large_modifying_files = models.BooleanField( |
1027 | 358 | default=False, | 369 | default=False, |
1030 | 359 | help_text='Whether this source contains a number of large files which grow or are otherwise modified, ' + | 370 | help_text="Whether this source contains a number of large files which grow or are otherwise modified, " |
1031 | 360 | 'e.g. log files or filesystem images.', | 371 | + "e.g. log files or filesystem images.", |
1032 | 361 | ) | 372 | ) |
1033 | 362 | active = models.BooleanField( | 373 | active = models.BooleanField( |
1034 | 363 | default=True, | 374 | default=True, |
1037 | 364 | help_text='Whether this source is enabled. Disabling means the API server no longer gives it to the ' + | 375 | help_text="Whether this source is enabled. Disabling means the API server no longer gives it to the " |
1038 | 365 | 'storage unit, even if it\'s time for a backup.', | 376 | + "storage unit, even if it's time for a backup.", |
1039 | 366 | ) | 377 | ) |
1040 | 367 | success = models.BooleanField( | 378 | success = models.BooleanField( |
1043 | 368 | default=True, | 379 | default=True, help_text="Whether this source's last backup was successful." |
1042 | 369 | help_text='Whether this source\'s last backup was successful.', | ||
1044 | 370 | ) | 380 | ) |
1045 | 371 | published = models.BooleanField( | 381 | published = models.BooleanField( |
1046 | 372 | default=True, | 382 | default=True, |
1048 | 373 | help_text='Whether this source is actively being published by the machine agent.', | 383 | help_text="Whether this source is actively being published by the machine agent.", |
1049 | 374 | ) | 384 | ) |
1050 | 375 | date_added = models.DateTimeField( | 385 | date_added = models.DateTimeField( |
1051 | 376 | default=timezone.now, | 386 | default=timezone.now, |
1053 | 377 | help_text='Date/time this source was first added by the machine agent.', | 387 | help_text="Date/time this source was first added by the machine agent.", |
1054 | 378 | ) | 388 | ) |
1055 | 379 | date_updated = models.DateTimeField( | 389 | date_updated = models.DateTimeField( |
1056 | 380 | default=timezone.now, | 390 | default=timezone.now, |
1058 | 381 | help_text='Date/time the machine presented a modified config of this source.', | 391 | help_text="Date/time the machine presented a modified config of this source.", |
1059 | 382 | ) | 392 | ) |
1060 | 383 | date_last_backed_up = models.DateTimeField( | 393 | date_last_backed_up = models.DateTimeField( |
1063 | 384 | blank=True, null=True, | 394 | blank=True, |
1064 | 385 | help_text='Date/time this source was last successfully backed up.', | 395 | null=True, |
1065 | 396 | help_text="Date/time this source was last successfully backed up.", | ||
1066 | 386 | ) | 397 | ) |
1067 | 387 | date_next_backup = models.DateTimeField( | 398 | date_next_backup = models.DateTimeField( |
1068 | 388 | default=timezone.now, | 399 | default=timezone.now, |
1071 | 389 | help_text='Date/time this source is next scheduled to be backed up. Set to now (or in the past) to ' + | 400 | help_text="Date/time this source is next scheduled to be backed up. Set to now (or in the past) to " |
1072 | 390 | 'trigger a backup as soon as possible.', | 401 | + "trigger a backup as soon as possible.", |
1073 | 391 | ) | 402 | ) |
1074 | 392 | 403 | ||
1075 | 393 | class Meta: | 404 | class Meta: |
1077 | 394 | unique_together = (('machine', 'name'),) | 405 | unique_together = (("machine", "name"),) |
1078 | 395 | 406 | ||
1079 | 396 | def __str__(self): | 407 | def __str__(self): |
1081 | 397 | return '%s %s' % (self.machine.unit_name, self.name) | 408 | return "%s %s" % (self.machine.unit_name, self.name) |
1082 | 398 | 409 | ||
1083 | 399 | 410 | ||
1084 | 400 | class BackupLog(models.Model): | 411 | class BackupLog(models.Model): |
1085 | 401 | id = UuidPrimaryKeyField() | 412 | id = UuidPrimaryKeyField() |
1086 | 402 | source = models.ForeignKey( | 413 | source = models.ForeignKey( |
1089 | 403 | Source, on_delete=models.CASCADE, | 414 | Source, on_delete=models.CASCADE, help_text="Source this log entry belongs to." |
1088 | 404 | help_text='Source this log entry belongs to.', | ||
1090 | 405 | ) | 415 | ) |
1091 | 406 | date = models.DateTimeField( | 416 | date = models.DateTimeField( |
1092 | 407 | default=timezone.now, | 417 | default=timezone.now, |
1094 | 408 | help_text='Date/time this log entry was received/processed.', | 418 | help_text="Date/time this log entry was received/processed.", |
1095 | 409 | ) | 419 | ) |
1096 | 410 | storage = models.ForeignKey( | 420 | storage = models.ForeignKey( |
1099 | 411 | Storage, blank=True, null=True, on_delete=models.CASCADE, | 421 | Storage, |
1100 | 412 | help_text='Storage unit this backup occurred on.', | 422 | blank=True, |
1101 | 423 | null=True, | ||
1102 | 424 | on_delete=models.CASCADE, | ||
1103 | 425 | help_text="Storage unit this backup occurred on.", | ||
1104 | 413 | ) | 426 | ) |
1105 | 414 | success = models.BooleanField( | 427 | success = models.BooleanField( |
1108 | 415 | default=False, | 428 | default=False, help_text="Whether this backup succeeded." |
1107 | 416 | help_text='Whether this backup succeeded.', | ||
1109 | 417 | ) | 429 | ) |
1110 | 418 | date_begin = models.DateTimeField( | 430 | date_begin = models.DateTimeField( |
1113 | 419 | blank=True, null=True, | 431 | blank=True, null=True, help_text="Date/time this backup began." |
1112 | 420 | help_text='Date/time this backup began.', | ||
1114 | 421 | ) | 432 | ) |
1115 | 422 | date_end = models.DateTimeField( | 433 | date_end = models.DateTimeField( |
1118 | 423 | blank=True, null=True, | 434 | blank=True, null=True, help_text="Date/time this backup ended." |
1117 | 424 | help_text='Date/time this backup ended.', | ||
1119 | 425 | ) | 435 | ) |
1120 | 426 | snapshot = models.CharField( | 436 | snapshot = models.CharField( |
1123 | 427 | max_length=200, blank=True, null=True, | 437 | max_length=200, blank=True, null=True, help_text="Name of the created snapshot." |
1122 | 428 | help_text='Name of the created snapshot.', | ||
1124 | 429 | ) | 438 | ) |
1125 | 430 | summary = models.TextField( | 439 | summary = models.TextField( |
1128 | 431 | blank=True, null=True, | 440 | blank=True, null=True, help_text="Summary of the backup's events." |
1127 | 432 | help_text='Summary of the backup\'s events.', | ||
1129 | 433 | ) | 441 | ) |
1130 | 434 | 442 | ||
1131 | 435 | def __str__(self): | 443 | def __str__(self): |
1133 | 436 | return '%s %s' % (str(self.source), self.date.strftime('%Y-%m-%d %H:%M:%S')) | 444 | return "%s %s" % (str(self.source), self.date.strftime("%Y-%m-%d %H:%M:%S")) |
1134 | 437 | 445 | ||
1135 | 438 | 446 | ||
1136 | 439 | class FilterSet(models.Model): | 447 | class FilterSet(models.Model): |
1137 | 440 | id = UuidPrimaryKeyField() | 448 | id = UuidPrimaryKeyField() |
1138 | 441 | name = models.CharField( | 449 | name = models.CharField( |
1141 | 442 | max_length=200, unique=True, | 450 | max_length=200, unique=True, help_text="Name of this filter set." |
1140 | 443 | help_text='Name of this filter set.', | ||
1142 | 444 | ) | 451 | ) |
1143 | 445 | filters = models.TextField( | 452 | filters = models.TextField( |
1146 | 446 | default='[]', validators=[validate_json_string_list], | 453 | default="[]", |
1147 | 447 | help_text='JSON list of this filter set\'s filter rules.', | 454 | validators=[validate_json_string_list], |
1148 | 455 | help_text="JSON list of this filter set's filter rules.", | ||
1149 | 448 | ) | 456 | ) |
1150 | 449 | comment = models.CharField( | 457 | comment = models.CharField( |
1153 | 450 | max_length=200, blank=True, null=True, | 458 | max_length=200, blank=True, null=True, help_text="Human-readable comment." |
1152 | 451 | help_text='Human-readable comment.', | ||
1154 | 452 | ) | 459 | ) |
1155 | 453 | active = models.BooleanField( | 460 | active = models.BooleanField( |
1158 | 454 | default=True, | 461 | default=True, help_text="Whether this filter set is enabled." |
1157 | 455 | help_text='Whether this filter set is enabled.', | ||
1159 | 456 | ) | 462 | ) |
1160 | 457 | date_added = models.DateTimeField( | 463 | date_added = models.DateTimeField( |
1163 | 458 | default=timezone.now, | 464 | default=timezone.now, help_text="Date/time this filter set was added." |
1162 | 459 | help_text='Date/time this filter set was added.', | ||
1164 | 460 | ) | 465 | ) |
1165 | 461 | 466 | ||
1166 | 462 | def __str__(self): | 467 | def __str__(self): |
1167 | 463 | 468 | ||
1168 | === modified file 'turku_api/settings.py' | |||
1169 | --- turku_api/settings.py 2020-04-11 21:20:31 +0000 | |||
1170 | +++ turku_api/settings.py 2020-06-21 23:58:30 +0000 | |||
1171 | @@ -21,57 +21,59 @@ | |||
1172 | 21 | BASE_DIR = os.path.dirname(os.path.dirname(__file__)) | 21 | BASE_DIR = os.path.dirname(os.path.dirname(__file__)) |
1173 | 22 | DEBUG = False | 22 | DEBUG = False |
1174 | 23 | TEMPLATE_DEBUG = False | 23 | TEMPLATE_DEBUG = False |
1176 | 24 | ALLOWED_HOSTS = ('*',) | 24 | ALLOWED_HOSTS = ("*",) |
1177 | 25 | INSTALLED_APPS = ( | 25 | INSTALLED_APPS = ( |
1185 | 26 | 'django.contrib.admin', | 26 | "django.contrib.admin", |
1186 | 27 | 'django.contrib.auth', | 27 | "django.contrib.auth", |
1187 | 28 | 'django.contrib.contenttypes', | 28 | "django.contrib.contenttypes", |
1188 | 29 | 'django.contrib.sessions', | 29 | "django.contrib.sessions", |
1189 | 30 | 'django.contrib.messages', | 30 | "django.contrib.messages", |
1190 | 31 | 'django.contrib.staticfiles', | 31 | "django.contrib.staticfiles", |
1191 | 32 | 'turku_api', | 32 | "turku_api", |
1192 | 33 | ) | 33 | ) |
1193 | 34 | MIDDLEWARE = ( | 34 | MIDDLEWARE = ( |
1200 | 35 | 'django.contrib.sessions.middleware.SessionMiddleware', | 35 | "django.contrib.sessions.middleware.SessionMiddleware", |
1201 | 36 | 'django.middleware.common.CommonMiddleware', | 36 | "django.middleware.common.CommonMiddleware", |
1202 | 37 | 'django.middleware.csrf.CsrfViewMiddleware', | 37 | "django.middleware.csrf.CsrfViewMiddleware", |
1203 | 38 | 'django.contrib.auth.middleware.AuthenticationMiddleware', | 38 | "django.contrib.auth.middleware.AuthenticationMiddleware", |
1204 | 39 | 'django.contrib.messages.middleware.MessageMiddleware', | 39 | "django.contrib.messages.middleware.MessageMiddleware", |
1205 | 40 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', | 40 | "django.middleware.clickjacking.XFrameOptionsMiddleware", |
1206 | 41 | ) | 41 | ) |
1207 | 42 | MIDDLEWARE_CLASSES = MIDDLEWARE # pre-1.10 | 42 | MIDDLEWARE_CLASSES = MIDDLEWARE # pre-1.10 |
1212 | 43 | ROOT_URLCONF = 'turku_api.urls' | 43 | ROOT_URLCONF = "turku_api.urls" |
1213 | 44 | WSGI_APPLICATION = 'turku_api.wsgi.application' | 44 | WSGI_APPLICATION = "turku_api.wsgi.application" |
1214 | 45 | LANGUAGE_CODE = 'en-us' | 45 | LANGUAGE_CODE = "en-us" |
1215 | 46 | TIME_ZONE = 'UTC' | 46 | TIME_ZONE = "UTC" |
1216 | 47 | USE_I18N = True | 47 | USE_I18N = True |
1217 | 48 | USE_L10N = True | 48 | USE_L10N = True |
1218 | 49 | USE_TZ = True | 49 | USE_TZ = True |
1220 | 50 | STATIC_URL = '/static/' | 50 | STATIC_URL = "/static/" |
1221 | 51 | TEMPLATES = [ | 51 | TEMPLATES = [ |
1222 | 52 | { | 52 | { |
1233 | 53 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', | 53 | "BACKEND": "django.template.backends.django.DjangoTemplates", |
1234 | 54 | 'DIRS': [os.path.join(BASE_DIR, 'turku_api/templates')], | 54 | "DIRS": [os.path.join(BASE_DIR, "turku_api/templates")], |
1235 | 55 | 'APP_DIRS': True, | 55 | "APP_DIRS": True, |
1236 | 56 | 'OPTIONS': { | 56 | "OPTIONS": { |
1237 | 57 | 'context_processors': [ | 57 | "context_processors": [ |
1238 | 58 | 'django.template.context_processors.debug', | 58 | "django.template.context_processors.debug", |
1239 | 59 | 'django.template.context_processors.request', | 59 | "django.template.context_processors.request", |
1240 | 60 | 'django.contrib.auth.context_processors.auth', | 60 | "django.contrib.auth.context_processors.auth", |
1241 | 61 | 'django.contrib.messages.context_processors.messages', | 61 | "django.contrib.messages.context_processors.messages", |
1242 | 62 | ], | 62 | ] |
1243 | 63 | }, | 63 | }, |
1245 | 64 | }, | 64 | } |
1246 | 65 | ] | 65 | ] |
1247 | 66 | DATABASES = { | 66 | DATABASES = { |
1251 | 67 | 'default': { | 67 | "default": { |
1252 | 68 | 'ENGINE': 'django.db.backends.sqlite3', | 68 | "ENGINE": "django.db.backends.sqlite3", |
1253 | 69 | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), | 69 | "NAME": os.path.join(BASE_DIR, "db.sqlite3"), |
1254 | 70 | } | 70 | } |
1255 | 71 | } | 71 | } |
1257 | 72 | SECRET_KEY = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(30)) | 72 | SECRET_KEY = "".join( |
1258 | 73 | random.choice(string.ascii_letters + string.digits) for i in range(30) | ||
1259 | 74 | ) | ||
1260 | 73 | 75 | ||
1261 | 74 | try: | 76 | try: |
1263 | 75 | from turku_api.local_settings import * | 77 | from turku_api.local_settings import * # noqa: F401,F403 |
1264 | 76 | except ImportError: | 78 | except ImportError: |
1265 | 77 | pass | 79 | pass |
1266 | 78 | 80 | ||
1267 | === removed file 'turku_api/tests.py' | |||
1268 | === modified file 'turku_api/urls.py' | |||
1269 | --- turku_api/urls.py 2020-04-11 21:20:31 +0000 | |||
1270 | +++ turku_api/urls.py 2020-06-21 23:58:30 +0000 | |||
1271 | @@ -14,31 +14,49 @@ | |||
1272 | 14 | # License along with this program. If not, see | 14 | # License along with this program. If not, see |
1273 | 15 | # <http://www.gnu.org/licenses/>. | 15 | # <http://www.gnu.org/licenses/>. |
1274 | 16 | 16 | ||
1276 | 17 | from django.conf.urls import include, url | 17 | from django.conf.urls import url |
1277 | 18 | from django.contrib import admin | ||
1278 | 19 | from django.views.generic.base import RedirectView | ||
1279 | 20 | |||
1280 | 18 | try: | 21 | try: |
1281 | 19 | from django.urls import reverse_lazy # 1.10+ | 22 | from django.urls import reverse_lazy # 1.10+ |
1282 | 20 | except ModuleNotFoundError: | 23 | except ModuleNotFoundError: |
1283 | 21 | from django.core.urlresolvers import reverse_lazy # pre-1.10 | 24 | from django.core.urlresolvers import reverse_lazy # pre-1.10 |
1285 | 22 | from django.views.generic.base import RedirectView | 25 | |
1286 | 23 | from turku_api import views | 26 | from turku_api import views |
1287 | 24 | from django.contrib import admin | ||
1288 | 25 | 27 | ||
1289 | 26 | 28 | ||
1290 | 27 | admin.autodiscover() | 29 | admin.autodiscover() |
1291 | 28 | 30 | ||
1292 | 29 | urlpatterns = [ | 31 | urlpatterns = [ |
1302 | 30 | url(r'^$', RedirectView.as_view(url=reverse_lazy('admin:index'))), | 32 | url(r"^$", RedirectView.as_view(url=reverse_lazy("admin:index"))), |
1303 | 31 | url(r'^v1/health$', views.health, name='health'), | 33 | url(r"^v1/health$", views.health, name="health"), |
1304 | 32 | url(r'^v1/update_config$', views.update_config, name='update_config'), | 34 | url(r"^v1/update_config$", views.update_config, name="update_config"), |
1305 | 33 | url(r'^v1/agent_ping_checkin$', views.agent_ping_checkin, name='agent_ping_checkin'), | 35 | url( |
1306 | 34 | url(r'^v1/agent_ping_restore$', views.agent_ping_restore, name='agent_ping_restore'), | 36 | r"^v1/agent_ping_checkin$", views.agent_ping_checkin, name="agent_ping_checkin" |
1307 | 35 | url(r'^v1/storage_ping_checkin$', views.storage_ping_checkin, name='storage_ping_checkin'), | 37 | ), |
1308 | 36 | url(r'^v1/storage_ping_source_update$', views.storage_ping_source_update, name='storage_ping_source_update'), | 38 | url( |
1309 | 37 | url(r'^v1/storage_update_config$', views.storage_update_config, name='storage_update_config'), | 39 | r"^v1/agent_ping_restore$", views.agent_ping_restore, name="agent_ping_restore" |
1310 | 38 | url(r'^admin/', admin.site.urls), | 40 | ), |
1311 | 41 | url( | ||
1312 | 42 | r"^v1/storage_ping_checkin$", | ||
1313 | 43 | views.storage_ping_checkin, | ||
1314 | 44 | name="storage_ping_checkin", | ||
1315 | 45 | ), | ||
1316 | 46 | url( | ||
1317 | 47 | r"^v1/storage_ping_source_update$", | ||
1318 | 48 | views.storage_ping_source_update, | ||
1319 | 49 | name="storage_ping_source_update", | ||
1320 | 50 | ), | ||
1321 | 51 | url( | ||
1322 | 52 | r"^v1/storage_update_config$", | ||
1323 | 53 | views.storage_update_config, | ||
1324 | 54 | name="storage_update_config", | ||
1325 | 55 | ), | ||
1326 | 56 | url(r"^admin/", admin.site.urls), | ||
1327 | 39 | ] | 57 | ] |
1328 | 40 | 58 | ||
1329 | 41 | try: | 59 | try: |
1331 | 42 | from local_urls import * | 60 | from local_urls import * # noqa: F401,F403 |
1332 | 43 | except ImportError: | 61 | except ImportError: |
1333 | 44 | pass | 62 | pass |
1334 | 45 | 63 | ||
1335 | === modified file 'turku_api/views.py' | |||
1336 | --- turku_api/views.py 2020-03-24 23:07:22 +0000 | |||
1337 | +++ turku_api/views.py 2020-06-21 23:58:30 +0000 | |||
1338 | @@ -14,87 +14,107 @@ | |||
1339 | 14 | # License along with this program. If not, see | 14 | # License along with this program. If not, see |
1340 | 15 | # <http://www.gnu.org/licenses/>. | 15 | # <http://www.gnu.org/licenses/>. |
1341 | 16 | 16 | ||
1342 | 17 | from datetime import datetime, timedelta | ||
1343 | 18 | import json | ||
1344 | 19 | import random | ||
1345 | 20 | |||
1346 | 21 | from django.contrib.auth import hashers | ||
1347 | 22 | from django.core.exceptions import ValidationError | ||
1348 | 17 | from django.http import ( | 23 | from django.http import ( |
1351 | 18 | HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, | 24 | HttpResponse, |
1352 | 19 | HttpResponseForbidden, HttpResponseNotFound, | 25 | HttpResponseBadRequest, |
1353 | 26 | HttpResponseForbidden, | ||
1354 | 27 | HttpResponseNotAllowed, | ||
1355 | 28 | HttpResponseNotFound, | ||
1356 | 20 | ) | 29 | ) |
1357 | 30 | from django.utils import timezone | ||
1358 | 21 | from django.views.decorators.csrf import csrf_exempt | 31 | from django.views.decorators.csrf import csrf_exempt |
1368 | 22 | from django.utils import timezone | 32 | |
1369 | 23 | from django.core.exceptions import ValidationError | 33 | from turku_api.models import Auth, BackupLog, FilterSet, Machine, Source, Storage |
1361 | 24 | |||
1362 | 25 | from turku_api.models import Auth, Machine, Source, Storage, BackupLog, FilterSet | ||
1363 | 26 | |||
1364 | 27 | import json | ||
1365 | 28 | import random | ||
1366 | 29 | from datetime import timedelta, datetime | ||
1367 | 30 | from django.contrib.auth import hashers | ||
1370 | 31 | 34 | ||
1371 | 32 | 35 | ||
1372 | 33 | def frequency_next_scheduled(frequency, base_time=None): | 36 | def frequency_next_scheduled(frequency, base_time=None): |
1373 | 34 | if not base_time: | 37 | if not base_time: |
1374 | 35 | base_time = timezone.now() | 38 | base_time = timezone.now() |
1376 | 36 | f = [x.strip() for x in frequency.split(',')] | 39 | f = [x.strip() for x in frequency.split(",")] |
1377 | 37 | 40 | ||
1384 | 38 | if f[0] == 'hourly': | 41 | if f[0] == "hourly": |
1385 | 39 | target_time = ( | 42 | target_time = base_time.replace( |
1386 | 40 | base_time.replace( | 43 | minute=random.randint(0, 59), second=random.randint(0, 59), microsecond=0 |
1387 | 41 | minute=random.randint(0, 59), second=random.randint(0, 59), microsecond=0 | 44 | ) + timedelta(hours=1) |
1382 | 42 | ) + timedelta(hours=1) | ||
1383 | 43 | ) | ||
1388 | 44 | # Push it out 10 minutes if it falls within 10 minutes of now | 45 | # Push it out 10 minutes if it falls within 10 minutes of now |
1389 | 45 | if target_time < (base_time + timedelta(minutes=10)): | 46 | if target_time < (base_time + timedelta(minutes=10)): |
1391 | 46 | target_time = (target_time + timedelta(minutes=10)) | 47 | target_time = target_time + timedelta(minutes=10) |
1392 | 47 | return target_time | 48 | return target_time |
1393 | 48 | 49 | ||
1394 | 49 | today = base_time.replace(hour=0, minute=0, second=0, microsecond=0) | 50 | today = base_time.replace(hour=0, minute=0, second=0, microsecond=0) |
1396 | 50 | if f[0] == 'daily': | 51 | if f[0] == "daily": |
1397 | 51 | # Tomorrow | 52 | # Tomorrow |
1400 | 52 | target_date = (today + timedelta(days=1)) | 53 | target_date = today + timedelta(days=1) |
1401 | 53 | elif f[0] == 'weekly': | 54 | elif f[0] == "weekly": |
1402 | 54 | # Random day next week | 55 | # Random day next week |
1403 | 55 | target_day = random.randint(0, 6) | 56 | target_day = random.randint(0, 6) |
1405 | 56 | target_date = (today + timedelta(weeks=1) - timedelta(days=((today.weekday() + 1) % 7)) + timedelta(days=target_day)) | 57 | target_date = ( |
1406 | 58 | today | ||
1407 | 59 | + timedelta(weeks=1) | ||
1408 | 60 | - timedelta(days=((today.weekday() + 1) % 7)) | ||
1409 | 61 | + timedelta(days=target_day) | ||
1410 | 62 | ) | ||
1411 | 57 | # Push it out 3 days if it falls within 3 days of now | 63 | # Push it out 3 days if it falls within 3 days of now |
1412 | 58 | if target_date < (base_time + timedelta(days=3)): | 64 | if target_date < (base_time + timedelta(days=3)): |
1415 | 59 | target_date = (target_date + timedelta(days=3)) | 65 | target_date = target_date + timedelta(days=3) |
1416 | 60 | elif f[0] in ('sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday'): | 66 | elif f[0] in ( |
1417 | 67 | "sunday", | ||
1418 | 68 | "monday", | ||
1419 | 69 | "tuesday", | ||
1420 | 70 | "wednesday", | ||
1421 | 71 | "thursday", | ||
1422 | 72 | "friday", | ||
1423 | 73 | "saturday", | ||
1424 | 74 | ): | ||
1425 | 61 | # Next Xday | 75 | # Next Xday |
1426 | 62 | day_map = { | 76 | day_map = { |
1434 | 63 | 'sunday': 0, | 77 | "sunday": 0, |
1435 | 64 | 'monday': 1, | 78 | "monday": 1, |
1436 | 65 | 'tuesday': 2, | 79 | "tuesday": 2, |
1437 | 66 | 'wednesday': 3, | 80 | "wednesday": 3, |
1438 | 67 | 'thursday': 4, | 81 | "thursday": 4, |
1439 | 68 | 'friday': 5, | 82 | "friday": 5, |
1440 | 69 | 'saturday': 6, | 83 | "saturday": 6, |
1441 | 70 | } | 84 | } |
1442 | 71 | target_day = day_map[f[0]] | 85 | target_day = day_map[f[0]] |
1444 | 72 | target_date = (today - timedelta(days=((today.weekday() + 1) % 7)) + timedelta(days=target_day)) | 86 | target_date = ( |
1445 | 87 | today | ||
1446 | 88 | - timedelta(days=((today.weekday() + 1) % 7)) | ||
1447 | 89 | + timedelta(days=target_day) | ||
1448 | 90 | ) | ||
1449 | 73 | if target_date < today: | 91 | if target_date < today: |
1452 | 74 | target_date = (target_date + timedelta(weeks=1)) | 92 | target_date = target_date + timedelta(weeks=1) |
1453 | 75 | elif f[0] == 'monthly': | 93 | elif f[0] == "monthly": |
1454 | 76 | next_month = (today.replace(day=1) + timedelta(days=40)).replace(day=1) | 94 | next_month = (today.replace(day=1) + timedelta(days=40)).replace(day=1) |
1455 | 77 | month_after = (next_month.replace(day=1) + timedelta(days=40)).replace(day=1) | 95 | month_after = (next_month.replace(day=1) + timedelta(days=40)).replace(day=1) |
1457 | 78 | target_date = (next_month + timedelta(days=random.randint(1, (month_after - next_month).days))) | 96 | target_date = next_month + timedelta( |
1458 | 97 | days=random.randint(1, (month_after - next_month).days) | ||
1459 | 98 | ) | ||
1460 | 79 | # Push it out a week if it falls within a week of now | 99 | # Push it out a week if it falls within a week of now |
1461 | 80 | if target_date < (base_time + timedelta(days=7)): | 100 | if target_date < (base_time + timedelta(days=7)): |
1463 | 81 | target_date = (target_date + timedelta(days=7)) | 101 | target_date = target_date + timedelta(days=7) |
1464 | 82 | else: | 102 | else: |
1465 | 83 | # Fall back to tomorrow | 103 | # Fall back to tomorrow |
1467 | 84 | target_date = (today + timedelta(days=1)) | 104 | target_date = today + timedelta(days=1) |
1468 | 85 | 105 | ||
1469 | 86 | if len(f) == 1: | 106 | if len(f) == 1: |
1472 | 87 | return (target_date + timedelta(seconds=random.randint(0, 86399))) | 107 | return target_date + timedelta(seconds=random.randint(0, 86399)) |
1473 | 88 | time_range = f[1].split('-') | 108 | time_range = f[1].split("-") |
1474 | 89 | start = (int(time_range[0][0:2]) * 60 * 60) + (int(time_range[0][2:4]) * 60) | 109 | start = (int(time_range[0][0:2]) * 60 * 60) + (int(time_range[0][2:4]) * 60) |
1475 | 90 | if len(time_range) == 1: | 110 | if len(time_range) == 1: |
1476 | 91 | # Not a range | 111 | # Not a range |
1478 | 92 | return (target_date + timedelta(seconds=start)) | 112 | return target_date + timedelta(seconds=start) |
1479 | 93 | end = (int(time_range[1][0:2]) * 60 * 60) + (int(time_range[1][2:4]) * 60) | 113 | end = (int(time_range[1][0:2]) * 60 * 60) + (int(time_range[1][2:4]) * 60) |
1480 | 94 | if end < start: | 114 | if end < start: |
1481 | 95 | # Day rollover | 115 | # Day rollover |
1482 | 96 | end = end + 86400 | 116 | end = end + 86400 |
1484 | 97 | return (target_date + timedelta(seconds=random.randint(start, end))) | 117 | return target_date + timedelta(seconds=random.randint(start, end)) |
1485 | 98 | 118 | ||
1486 | 99 | 119 | ||
1487 | 100 | def random_weighted(m): | 120 | def random_weighted(m): |
1488 | @@ -115,8 +135,9 @@ | |||
1489 | 115 | 135 | ||
1490 | 116 | def get_repo_revision(): | 136 | def get_repo_revision(): |
1491 | 117 | import os | 137 | import os |
1492 | 138 | |||
1493 | 118 | base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | 139 | base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
1495 | 119 | if os.path.isdir(os.path.join(base_dir, '.bzr')): | 140 | if os.path.isdir(os.path.join(base_dir, ".bzr")): |
1496 | 120 | try: | 141 | try: |
1497 | 121 | import bzrlib.errors | 142 | import bzrlib.errors |
1498 | 122 | from bzrlib.branch import Branch | 143 | from bzrlib.branch import Branch |
1499 | @@ -137,17 +158,22 @@ | |||
1500 | 137 | return repr(self.message) | 158 | return repr(self.message) |
1501 | 138 | 159 | ||
1502 | 139 | 160 | ||
1504 | 140 | class ViewV1(): | 161 | class ViewV1: |
1505 | 141 | def __init__(self, django_request): | 162 | def __init__(self, django_request): |
1506 | 142 | self.django_request = django_request | 163 | self.django_request = django_request |
1507 | 143 | self._parse_json_post() | 164 | self._parse_json_post() |
1508 | 144 | 165 | ||
1509 | 145 | def _parse_json_post(self): | 166 | def _parse_json_post(self): |
1510 | 146 | # Require JSON POST | 167 | # Require JSON POST |
1515 | 147 | if not self.django_request.method == 'POST': | 168 | if not self.django_request.method == "POST": |
1516 | 148 | raise HttpResponseException(HttpResponseNotAllowed(['POST'])) | 169 | raise HttpResponseException(HttpResponseNotAllowed(["POST"])) |
1517 | 149 | if not (('CONTENT_TYPE' in self.django_request.META) and (self.django_request.META['CONTENT_TYPE'] == 'application/json')): | 170 | if not ( |
1518 | 150 | raise HttpResponseException(HttpResponseBadRequest('Bad Content-Type (expected application/json)')) | 171 | ("CONTENT_TYPE" in self.django_request.META) |
1519 | 172 | and (self.django_request.META["CONTENT_TYPE"] == "application/json") | ||
1520 | 173 | ): | ||
1521 | 174 | raise HttpResponseException( | ||
1522 | 175 | HttpResponseBadRequest("Bad Content-Type (expected application/json)") | ||
1523 | 176 | ) | ||
1524 | 151 | 177 | ||
1525 | 152 | # Load the POSTed JSON | 178 | # Load the POSTed JSON |
1526 | 153 | try: | 179 | try: |
1527 | @@ -157,80 +183,103 @@ | |||
1528 | 157 | 183 | ||
1529 | 158 | def _storage_authenticate(self): | 184 | def _storage_authenticate(self): |
1530 | 159 | # Check for storage auth | 185 | # Check for storage auth |
1536 | 160 | if 'storage' not in self.req: | 186 | if "storage" not in self.req: |
1537 | 161 | raise HttpResponseException(HttpResponseBadRequest('Missing required option "storage"')) | 187 | raise HttpResponseException( |
1538 | 162 | for k in ('name', 'secret'): | 188 | HttpResponseBadRequest('Missing required option "storage"') |
1539 | 163 | if k not in self.req['storage']: | 189 | ) |
1540 | 164 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 190 | for k in ("name", "secret"): |
1541 | 191 | if k not in self.req["storage"]: | ||
1542 | 192 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) | ||
1543 | 165 | try: | 193 | try: |
1545 | 166 | self.storage = Storage.objects.get(name=self.req['storage']['name'], active=True) | 194 | self.storage = Storage.objects.get( |
1546 | 195 | name=self.req["storage"]["name"], active=True | ||
1547 | 196 | ) | ||
1548 | 167 | except Storage.DoesNotExist: | 197 | except Storage.DoesNotExist: |
1552 | 168 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 198 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1553 | 169 | if not hashers.check_password(self.req['storage']['secret'], self.storage.secret_hash): | 199 | if not hashers.check_password( |
1554 | 170 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 200 | self.req["storage"]["secret"], self.storage.secret_hash |
1555 | 201 | ): | ||
1556 | 202 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) | ||
1557 | 171 | 203 | ||
1558 | 172 | def _storage_get_machine(self): | 204 | def _storage_get_machine(self): |
1559 | 173 | # Make sure these exist in the request | 205 | # Make sure these exist in the request |
1564 | 174 | if 'machine' not in self.req: | 206 | if "machine" not in self.req: |
1565 | 175 | raise HttpResponseException(HttpResponseBadRequest('Missing required option "machine"')) | 207 | raise HttpResponseException( |
1566 | 176 | if 'uuid' not in self.req['machine']: | 208 | HttpResponseBadRequest('Missing required option "machine"') |
1567 | 177 | raise HttpResponseException(HttpResponseBadRequest('Missing required option "machine.uuid"')) | 209 | ) |
1568 | 210 | if "uuid" not in self.req["machine"]: | ||
1569 | 211 | raise HttpResponseException( | ||
1570 | 212 | HttpResponseBadRequest('Missing required option "machine.uuid"') | ||
1571 | 213 | ) | ||
1572 | 178 | 214 | ||
1573 | 179 | # Create or load the machine | 215 | # Create or load the machine |
1574 | 180 | try: | 216 | try: |
1576 | 181 | return Machine.objects.get(uuid=self.req['machine']['uuid'], storage=self.storage, active=True, published=True) | 217 | return Machine.objects.get( |
1577 | 218 | uuid=self.req["machine"]["uuid"], | ||
1578 | 219 | storage=self.storage, | ||
1579 | 220 | active=True, | ||
1580 | 221 | published=True, | ||
1581 | 222 | ) | ||
1582 | 182 | except Machine.DoesNotExist: | 223 | except Machine.DoesNotExist: |
1584 | 183 | raise HttpResponseException(HttpResponseNotFound('Machine not found')) | 224 | raise HttpResponseException(HttpResponseNotFound("Machine not found")) |
1585 | 184 | 225 | ||
1586 | 185 | def get_registration_auth(self, secret_type): | 226 | def get_registration_auth(self, secret_type): |
1587 | 186 | # Check for global auth | 227 | # Check for global auth |
1593 | 187 | if 'auth' not in self.req: | 228 | if "auth" not in self.req: |
1594 | 188 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 229 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1595 | 189 | if isinstance(self.req['auth'], dict): | 230 | if isinstance(self.req["auth"], dict): |
1596 | 190 | if not (('name' in self.req['auth']) and ('secret' in self.req['auth'])): | 231 | if not (("name" in self.req["auth"]) and ("secret" in self.req["auth"])): |
1597 | 191 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 232 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1598 | 192 | try: | 233 | try: |
1600 | 193 | a = Auth.objects.get(name=self.req['auth']['name'], secret_type=secret_type, active=True) | 234 | a = Auth.objects.get( |
1601 | 235 | name=self.req["auth"]["name"], secret_type=secret_type, active=True | ||
1602 | 236 | ) | ||
1603 | 194 | except Auth.DoesNotExist: | 237 | except Auth.DoesNotExist: |
1606 | 195 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 238 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1607 | 196 | if hashers.check_password(self.req['auth']['secret'], a.secret_hash): | 239 | if hashers.check_password(self.req["auth"]["secret"], a.secret_hash): |
1608 | 197 | return a | 240 | return a |
1609 | 198 | else: | 241 | else: |
1610 | 199 | # XXX inefficient but temporary (legacy) | 242 | # XXX inefficient but temporary (legacy) |
1611 | 200 | for a in Auth.objects.filter(secret_type=secret_type, active=True): | 243 | for a in Auth.objects.filter(secret_type=secret_type, active=True): |
1613 | 201 | if hashers.check_password(self.req['auth'], a.secret_hash): | 244 | if hashers.check_password(self.req["auth"], a.secret_hash): |
1614 | 202 | return a | 245 | return a |
1616 | 203 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 246 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1617 | 204 | 247 | ||
1618 | 205 | def update_config(self): | 248 | def update_config(self): |
1622 | 206 | if not (('machine' in self.req) and (isinstance(self.req['machine'], dict))): | 249 | if not (("machine" in self.req) and (isinstance(self.req["machine"], dict))): |
1623 | 207 | raise HttpResponseException(HttpResponseBadRequest('"machine" dict required')) | 250 | raise HttpResponseException( |
1624 | 208 | req_machine = self.req['machine'] | 251 | HttpResponseBadRequest('"machine" dict required') |
1625 | 252 | ) | ||
1626 | 253 | req_machine = self.req["machine"] | ||
1627 | 209 | 254 | ||
1628 | 210 | # Make sure these exist in the request (validation comes later) | 255 | # Make sure these exist in the request (validation comes later) |
1630 | 211 | for k in ('uuid', 'secret'): | 256 | for k in ("uuid", "secret"): |
1631 | 212 | if k not in req_machine: | 257 | if k not in req_machine: |
1633 | 213 | raise HttpResponseException(HttpResponseBadRequest('Missing required machine option "%s"' % k)) | 258 | raise HttpResponseException( |
1634 | 259 | HttpResponseBadRequest('Missing required machine option "%s"' % k) | ||
1635 | 260 | ) | ||
1636 | 214 | 261 | ||
1637 | 215 | # Create or load the machine | 262 | # Create or load the machine |
1638 | 216 | try: | 263 | try: |
1640 | 217 | m = Machine.objects.get(uuid=req_machine['uuid'], active=True) | 264 | m = Machine.objects.get(uuid=req_machine["uuid"], active=True) |
1641 | 218 | modified = False | 265 | modified = False |
1642 | 219 | except Machine.DoesNotExist: | 266 | except Machine.DoesNotExist: |
1646 | 220 | m = Machine(uuid=req_machine['uuid']) | 267 | m = Machine(uuid=req_machine["uuid"]) |
1647 | 221 | m.secret_hash = hashers.make_password(req_machine['secret']) | 268 | m.secret_hash = hashers.make_password(req_machine["secret"]) |
1648 | 222 | m.auth = self.get_registration_auth('machine_reg') | 269 | m.auth = self.get_registration_auth("machine_reg") |
1649 | 223 | modified = True | 270 | modified = True |
1650 | 224 | 271 | ||
1651 | 225 | # If the machine existed before, it had a secret. Make sure that | 272 | # If the machine existed before, it had a secret. Make sure that |
1652 | 226 | # hasn't changed. | 273 | # hasn't changed. |
1655 | 227 | if not hashers.check_password(req_machine['secret'], m.secret_hash): | 274 | if not hashers.check_password(req_machine["secret"], m.secret_hash): |
1656 | 228 | raise HttpResponseException(HttpResponseForbidden('Bad secret for existing machine')) | 275 | raise HttpResponseException( |
1657 | 276 | HttpResponseForbidden("Bad secret for existing machine") | ||
1658 | 277 | ) | ||
1659 | 229 | 278 | ||
1660 | 230 | # Change the machine published status if needed | 279 | # Change the machine published status if needed |
1664 | 231 | if ('published' in req_machine): | 280 | if "published" in req_machine: |
1665 | 232 | if req_machine['published'] != m.published: | 281 | if req_machine["published"] != m.published: |
1666 | 233 | m.published = req_machine['published'] | 282 | m.published = req_machine["published"] |
1667 | 234 | modified = True | 283 | modified = True |
1668 | 235 | else: | 284 | else: |
1669 | 236 | # If not present, default to want published | 285 | # If not present, default to want published |
1670 | @@ -251,11 +300,19 @@ | |||
1671 | 251 | m.storage = random_weighted(weights) | 300 | m.storage = random_weighted(weights) |
1672 | 252 | modified = True | 301 | modified = True |
1673 | 253 | except IndexError: | 302 | except IndexError: |
1675 | 254 | raise HttpResponseException(HttpResponseNotFound('No storages are currently available')) | 303 | raise HttpResponseException( |
1676 | 304 | HttpResponseNotFound("No storages are currently available") | ||
1677 | 305 | ) | ||
1678 | 255 | 306 | ||
1679 | 256 | # If any of these exist in the request, add or update them in the | 307 | # If any of these exist in the request, add or update them in the |
1680 | 257 | # machine. | 308 | # machine. |
1682 | 258 | for k in ('environment_name', 'service_name', 'unit_name', 'comment', 'ssh_public_key'): | 309 | for k in ( |
1683 | 310 | "environment_name", | ||
1684 | 311 | "service_name", | ||
1685 | 312 | "unit_name", | ||
1686 | 313 | "comment", | ||
1687 | 314 | "ssh_public_key", | ||
1688 | 315 | ): | ||
1689 | 259 | if (k in req_machine) and (getattr(m, k) != req_machine[k]): | 316 | if (k in req_machine) and (getattr(m, k) != req_machine[k]): |
1690 | 260 | setattr(m, k, req_machine[k]) | 317 | setattr(m, k, req_machine[k]) |
1691 | 261 | modified = True | 318 | modified = True |
1692 | @@ -266,18 +323,24 @@ | |||
1693 | 266 | try: | 323 | try: |
1694 | 267 | m.full_clean() | 324 | m.full_clean() |
1695 | 268 | except ValidationError as e: | 325 | except ValidationError as e: |
1697 | 269 | raise HttpResponseException(HttpResponseBadRequest('Validation error: %s' % str(e))) | 326 | raise HttpResponseException( |
1698 | 327 | HttpResponseBadRequest("Validation error: %s" % str(e)) | ||
1699 | 328 | ) | ||
1700 | 270 | m.save() | 329 | m.save() |
1701 | 271 | 330 | ||
1704 | 272 | if 'sources' in req_machine: | 331 | if "sources" in req_machine: |
1705 | 273 | req_sources = req_machine['sources'] | 332 | req_sources = req_machine["sources"] |
1706 | 274 | if not isinstance(req_sources, dict): | 333 | if not isinstance(req_sources, dict): |
1709 | 275 | raise HttpResponseException(HttpResponseBadRequest('Invalid type for "sources"')) | 334 | raise HttpResponseException( |
1710 | 276 | elif 'sources' in self.req: | 335 | HttpResponseBadRequest('Invalid type for "sources"') |
1711 | 336 | ) | ||
1712 | 337 | elif "sources" in self.req: | ||
1713 | 277 | # XXX legacy | 338 | # XXX legacy |
1715 | 278 | req_sources = self.req['sources'] | 339 | req_sources = self.req["sources"] |
1716 | 279 | if not isinstance(req_sources, dict): | 340 | if not isinstance(req_sources, dict): |
1718 | 280 | raise HttpResponseException(HttpResponseBadRequest('Invalid type for "sources"')) | 341 | raise HttpResponseException( |
1719 | 342 | HttpResponseBadRequest('Invalid type for "sources"') | ||
1720 | 343 | ) | ||
1721 | 281 | else: | 344 | else: |
1722 | 282 | req_sources = {} | 345 | req_sources = {} |
1723 | 283 | 346 | ||
1724 | @@ -291,17 +354,27 @@ | |||
1725 | 291 | 354 | ||
1726 | 292 | modified = False | 355 | modified = False |
1727 | 293 | for k in ( | 356 | for k in ( |
1732 | 294 | 'path', 'frequency', 'retention', | 357 | "path", |
1733 | 295 | 'comment', 'shared_service', 'large_rotating_files', | 358 | "frequency", |
1734 | 296 | 'large_modifying_files', 'bwlimit', 'snapshot_mode', | 359 | "retention", |
1735 | 297 | 'preserve_hard_links', | 360 | "comment", |
1736 | 361 | "shared_service", | ||
1737 | 362 | "large_rotating_files", | ||
1738 | 363 | "large_modifying_files", | ||
1739 | 364 | "bwlimit", | ||
1740 | 365 | "snapshot_mode", | ||
1741 | 366 | "preserve_hard_links", | ||
1742 | 298 | ): | 367 | ): |
1744 | 299 | if (k in req_sources[s.name]) and (getattr(s, k) != req_sources[s.name][k]): | 368 | if (k in req_sources[s.name]) and ( |
1745 | 369 | getattr(s, k) != req_sources[s.name][k] | ||
1746 | 370 | ): | ||
1747 | 300 | setattr(s, k, req_sources[s.name][k]) | 371 | setattr(s, k, req_sources[s.name][k]) |
1750 | 301 | if k == 'frequency': | 372 | if k == "frequency": |
1751 | 302 | s.date_next_backup = frequency_next_scheduled(req_sources[s.name][k]) | 373 | s.date_next_backup = frequency_next_scheduled( |
1752 | 374 | req_sources[s.name][k] | ||
1753 | 375 | ) | ||
1754 | 303 | modified = True | 376 | modified = True |
1756 | 304 | for k in ('filter', 'exclude'): | 377 | for k in ("filter", "exclude"): |
1757 | 305 | if k not in req_sources[s.name]: | 378 | if k not in req_sources[s.name]: |
1758 | 306 | continue | 379 | continue |
1759 | 307 | v = json.dumps(req_sources[s.name][k], sort_keys=True) | 380 | v = json.dumps(req_sources[s.name][k], sort_keys=True) |
1760 | @@ -315,7 +388,9 @@ | |||
1761 | 315 | try: | 388 | try: |
1762 | 316 | s.full_clean() | 389 | s.full_clean() |
1763 | 317 | except ValidationError as e: | 390 | except ValidationError as e: |
1765 | 318 | raise HttpResponseException(HttpResponseBadRequest('Validation error: %s' % str(e))) | 391 | raise HttpResponseException( |
1766 | 392 | HttpResponseBadRequest("Validation error: %s" % str(e)) | ||
1767 | 393 | ) | ||
1768 | 319 | s.save() | 394 | s.save() |
1769 | 320 | 395 | ||
1770 | 321 | for name in req_sources: | 396 | for name in req_sources: |
1771 | @@ -326,15 +401,21 @@ | |||
1772 | 326 | s.machine = m | 401 | s.machine = m |
1773 | 327 | 402 | ||
1774 | 328 | for k in ( | 403 | for k in ( |
1779 | 329 | 'path', 'frequency', 'retention', | 404 | "path", |
1780 | 330 | 'comment', 'shared_service', 'large_rotating_files', | 405 | "frequency", |
1781 | 331 | 'large_modifying_files', 'bwlimit', 'snapshot_mode', | 406 | "retention", |
1782 | 332 | 'preserve_hard_links', | 407 | "comment", |
1783 | 408 | "shared_service", | ||
1784 | 409 | "large_rotating_files", | ||
1785 | 410 | "large_modifying_files", | ||
1786 | 411 | "bwlimit", | ||
1787 | 412 | "snapshot_mode", | ||
1788 | 413 | "preserve_hard_links", | ||
1789 | 333 | ): | 414 | ): |
1790 | 334 | if k not in req_sources[s.name]: | 415 | if k not in req_sources[s.name]: |
1791 | 335 | continue | 416 | continue |
1792 | 336 | setattr(s, k, req_sources[s.name][k]) | 417 | setattr(s, k, req_sources[s.name][k]) |
1794 | 337 | for k in ('filter', 'exclude'): | 418 | for k in ("filter", "exclude"): |
1795 | 338 | if k not in req_sources[s.name]: | 419 | if k not in req_sources[s.name]: |
1796 | 339 | continue | 420 | continue |
1797 | 340 | v = json.dumps(req_sources[s.name][k], sort_keys=True) | 421 | v = json.dumps(req_sources[s.name][k], sort_keys=True) |
1798 | @@ -346,18 +427,20 @@ | |||
1799 | 346 | try: | 427 | try: |
1800 | 347 | s.full_clean() | 428 | s.full_clean() |
1801 | 348 | except ValidationError as e: | 429 | except ValidationError as e: |
1803 | 349 | raise HttpResponseException(HttpResponseBadRequest('Validation error: %s' % str(e))) | 430 | raise HttpResponseException( |
1804 | 431 | HttpResponseBadRequest("Validation error: %s" % str(e)) | ||
1805 | 432 | ) | ||
1806 | 350 | s.save() | 433 | s.save() |
1807 | 351 | 434 | ||
1808 | 352 | # XXX legacy | 435 | # XXX legacy |
1809 | 353 | out = { | 436 | out = { |
1815 | 354 | 'storage_name': m.storage.name, | 437 | "storage_name": m.storage.name, |
1816 | 355 | 'ssh_ping_host': m.storage.ssh_ping_host, | 438 | "ssh_ping_host": m.storage.ssh_ping_host, |
1817 | 356 | 'ssh_ping_host_keys': json.loads(m.storage.ssh_ping_host_keys), | 439 | "ssh_ping_host_keys": json.loads(m.storage.ssh_ping_host_keys), |
1818 | 357 | 'ssh_ping_port': m.storage.ssh_ping_port, | 440 | "ssh_ping_port": m.storage.ssh_ping_port, |
1819 | 358 | 'ssh_ping_user': m.storage.ssh_ping_user, | 441 | "ssh_ping_user": m.storage.ssh_ping_user, |
1820 | 359 | } | 442 | } |
1822 | 360 | return HttpResponse(json.dumps(out), content_type='application/json') | 443 | return HttpResponse(json.dumps(out), content_type="application/json") |
1823 | 361 | 444 | ||
1824 | 362 | def build_filters(self, set, loaded_sets=None): | 445 | def build_filters(self, set, loaded_sets=None): |
1825 | 363 | if not loaded_sets: | 446 | if not loaded_sets: |
1826 | @@ -365,10 +448,10 @@ | |||
1827 | 365 | out = [] | 448 | out = [] |
1828 | 366 | for f in set: | 449 | for f in set: |
1829 | 367 | try: | 450 | try: |
1831 | 368 | (verb, subsetname) = f.split(' ', 1) | 451 | (verb, subsetname) = f.split(" ", 1) |
1832 | 369 | except ValueError: | 452 | except ValueError: |
1833 | 370 | continue | 453 | continue |
1835 | 371 | if verb in ('merge', '.'): | 454 | if verb in ("merge", "."): |
1836 | 372 | if subsetname in loaded_sets: | 455 | if subsetname in loaded_sets: |
1837 | 373 | continue | 456 | continue |
1838 | 374 | try: | 457 | try: |
1839 | @@ -379,10 +462,22 @@ | |||
1840 | 379 | out.append(f2) | 462 | out.append(f2) |
1841 | 380 | loaded_sets.append(subsetname) | 463 | loaded_sets.append(subsetname) |
1842 | 381 | elif verb in ( | 464 | elif verb in ( |
1847 | 382 | 'dir-merge', ':', 'clear', '!', | 465 | "dir-merge", |
1848 | 383 | 'exclude', '-', 'include', '+', | 466 | ":", |
1849 | 384 | 'hide', 'H', 'show', 'S', | 467 | "clear", |
1850 | 385 | 'protect', 'P', 'risk', 'R', | 468 | "!", |
1851 | 469 | "exclude", | ||
1852 | 470 | "-", | ||
1853 | 471 | "include", | ||
1854 | 472 | "+", | ||
1855 | 473 | "hide", | ||
1856 | 474 | "H", | ||
1857 | 475 | "show", | ||
1858 | 476 | "S", | ||
1859 | 477 | "protect", | ||
1860 | 478 | "P", | ||
1861 | 479 | "risk", | ||
1862 | 480 | "R", | ||
1863 | 386 | ): | 481 | ): |
1864 | 387 | out.append(f) | 482 | out.append(f) |
1865 | 388 | return out | 483 | return out |
1866 | @@ -390,109 +485,117 @@ | |||
1867 | 390 | def get_checkin_scheduled_sources(self, m): | 485 | def get_checkin_scheduled_sources(self, m): |
1868 | 391 | scheduled_sources = {} | 486 | scheduled_sources = {} |
1869 | 392 | now = timezone.now() | 487 | now = timezone.now() |
1871 | 393 | for s in m.source_set.filter(date_next_backup__lte=now, active=True, published=True): | 488 | for s in m.source_set.filter( |
1872 | 489 | date_next_backup__lte=now, active=True, published=True | ||
1873 | 490 | ): | ||
1874 | 394 | scheduled_sources[s.name] = { | 491 | scheduled_sources[s.name] = { |
1892 | 395 | 'path': s.path, | 492 | "path": s.path, |
1893 | 396 | 'retention': s.retention, | 493 | "retention": s.retention, |
1894 | 397 | 'bwlimit': s.bwlimit, | 494 | "bwlimit": s.bwlimit, |
1895 | 398 | 'filter': self.build_filters(json.loads(s.filter)), | 495 | "filter": self.build_filters(json.loads(s.filter)), |
1896 | 399 | 'exclude': json.loads(s.exclude), | 496 | "exclude": json.loads(s.exclude), |
1897 | 400 | 'shared_service': s.shared_service, | 497 | "shared_service": s.shared_service, |
1898 | 401 | 'large_rotating_files': s.large_rotating_files, | 498 | "large_rotating_files": s.large_rotating_files, |
1899 | 402 | 'large_modifying_files': s.large_modifying_files, | 499 | "large_modifying_files": s.large_modifying_files, |
1900 | 403 | 'snapshot_mode': s.snapshot_mode, | 500 | "snapshot_mode": s.snapshot_mode, |
1901 | 404 | 'preserve_hard_links': s.preserve_hard_links, | 501 | "preserve_hard_links": s.preserve_hard_links, |
1902 | 405 | 'storage': { | 502 | "storage": { |
1903 | 406 | 'name': s.machine.storage.name, | 503 | "name": s.machine.storage.name, |
1904 | 407 | 'ssh_ping_host': s.machine.storage.ssh_ping_host, | 504 | "ssh_ping_host": s.machine.storage.ssh_ping_host, |
1905 | 408 | 'ssh_ping_host_keys': json.loads(s.machine.storage.ssh_ping_host_keys), | 505 | "ssh_ping_host_keys": json.loads( |
1906 | 409 | 'ssh_ping_port': s.machine.storage.ssh_ping_port, | 506 | s.machine.storage.ssh_ping_host_keys |
1907 | 410 | 'ssh_ping_user': s.machine.storage.ssh_ping_user, | 507 | ), |
1908 | 411 | } | 508 | "ssh_ping_port": s.machine.storage.ssh_ping_port, |
1909 | 509 | "ssh_ping_user": s.machine.storage.ssh_ping_user, | ||
1910 | 510 | }, | ||
1911 | 412 | } | 511 | } |
1912 | 413 | return scheduled_sources | 512 | return scheduled_sources |
1913 | 414 | 513 | ||
1914 | 415 | def agent_ping_checkin(self): | 514 | def agent_ping_checkin(self): |
1918 | 416 | if not (('machine' in self.req) and (isinstance(self.req['machine'], dict))): | 515 | if not (("machine" in self.req) and (isinstance(self.req["machine"], dict))): |
1919 | 417 | raise HttpResponseException(HttpResponseBadRequest('"machine" dict required')) | 516 | raise HttpResponseException( |
1920 | 418 | req_machine = self.req['machine'] | 517 | HttpResponseBadRequest('"machine" dict required') |
1921 | 518 | ) | ||
1922 | 519 | req_machine = self.req["machine"] | ||
1923 | 419 | 520 | ||
1924 | 420 | # Make sure these exist in the request | 521 | # Make sure these exist in the request |
1926 | 421 | for k in ('uuid', 'secret'): | 522 | for k in ("uuid", "secret"): |
1927 | 422 | if k not in req_machine: | 523 | if k not in req_machine: |
1929 | 423 | raise HttpResponseException(HttpResponseBadRequest('Missing required machine option "%s"' % k)) | 524 | raise HttpResponseException( |
1930 | 525 | HttpResponseBadRequest('Missing required machine option "%s"' % k) | ||
1931 | 526 | ) | ||
1932 | 424 | 527 | ||
1933 | 425 | # Load the machine | 528 | # Load the machine |
1934 | 426 | try: | 529 | try: |
1936 | 427 | m = Machine.objects.get(uuid=req_machine['uuid'], active=True, published=True) | 530 | m = Machine.objects.get( |
1937 | 531 | uuid=req_machine["uuid"], active=True, published=True | ||
1938 | 532 | ) | ||
1939 | 428 | except Machine.DoesNotExist: | 533 | except Machine.DoesNotExist: |
1943 | 429 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 534 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1944 | 430 | if not hashers.check_password(req_machine['secret'], m.secret_hash): | 535 | if not hashers.check_password(req_machine["secret"], m.secret_hash): |
1945 | 431 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 536 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1946 | 432 | 537 | ||
1947 | 433 | scheduled_sources = self.get_checkin_scheduled_sources(m) | 538 | scheduled_sources = self.get_checkin_scheduled_sources(m) |
1948 | 434 | now = timezone.now() | 539 | now = timezone.now() |
1949 | 435 | 540 | ||
1955 | 436 | out = { | 541 | out = {"machine": {"scheduled_sources": scheduled_sources}} |
1951 | 437 | 'machine': { | ||
1952 | 438 | 'scheduled_sources': scheduled_sources, | ||
1953 | 439 | }, | ||
1954 | 440 | } | ||
1956 | 441 | 542 | ||
1957 | 442 | # XXX legacy | 543 | # XXX legacy |
1959 | 443 | out['scheduled_sources'] = scheduled_sources | 544 | out["scheduled_sources"] = scheduled_sources |
1960 | 444 | 545 | ||
1961 | 445 | m.date_checked_in = now | 546 | m.date_checked_in = now |
1962 | 446 | m.save() | 547 | m.save() |
1964 | 447 | return HttpResponse(json.dumps(out), content_type='application/json') | 548 | return HttpResponse(json.dumps(out), content_type="application/json") |
1965 | 448 | 549 | ||
1966 | 449 | def agent_ping_restore(self): | 550 | def agent_ping_restore(self): |
1970 | 450 | if not (('machine' in self.req) and (isinstance(self.req['machine'], dict))): | 551 | if not (("machine" in self.req) and (isinstance(self.req["machine"], dict))): |
1971 | 451 | raise HttpResponseException(HttpResponseBadRequest('"machine" dict required')) | 552 | raise HttpResponseException( |
1972 | 452 | req_machine = self.req['machine'] | 553 | HttpResponseBadRequest('"machine" dict required') |
1973 | 554 | ) | ||
1974 | 555 | req_machine = self.req["machine"] | ||
1975 | 453 | 556 | ||
1976 | 454 | # Make sure these exist in the request | 557 | # Make sure these exist in the request |
1978 | 455 | for k in ('uuid', 'secret'): | 558 | for k in ("uuid", "secret"): |
1979 | 456 | if k not in req_machine: | 559 | if k not in req_machine: |
1981 | 457 | raise HttpResponseException(HttpResponseBadRequest('Missing required machine option "%s"' % k)) | 560 | raise HttpResponseException( |
1982 | 561 | HttpResponseBadRequest('Missing required machine option "%s"' % k) | ||
1983 | 562 | ) | ||
1984 | 458 | 563 | ||
1985 | 459 | # Load the machine | 564 | # Load the machine |
1986 | 460 | try: | 565 | try: |
1988 | 461 | m = Machine.objects.get(uuid=req_machine['uuid'], active=True) | 566 | m = Machine.objects.get(uuid=req_machine["uuid"], active=True) |
1989 | 462 | except Machine.DoesNotExist: | 567 | except Machine.DoesNotExist: |
1993 | 463 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 568 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1994 | 464 | if not hashers.check_password(req_machine['secret'], m.secret_hash): | 569 | if not hashers.check_password(req_machine["secret"], m.secret_hash): |
1995 | 465 | raise HttpResponseException(HttpResponseForbidden('Bad auth')) | 570 | raise HttpResponseException(HttpResponseForbidden("Bad auth")) |
1996 | 466 | 571 | ||
1997 | 467 | sources = {} | 572 | sources = {} |
1998 | 468 | for s in m.source_set.filter(active=True): | 573 | for s in m.source_set.filter(active=True): |
1999 | 469 | sources[s.name] = { | 574 | sources[s.name] = { |
2017 | 470 | 'path': s.path, | 575 | "path": s.path, |
2018 | 471 | 'retention': s.retention, | 576 | "retention": s.retention, |
2019 | 472 | 'bwlimit': s.bwlimit, | 577 | "bwlimit": s.bwlimit, |
2020 | 473 | 'filter': self.build_filters(json.loads(s.filter)), | 578 | "filter": self.build_filters(json.loads(s.filter)), |
2021 | 474 | 'exclude': json.loads(s.exclude), | 579 | "exclude": json.loads(s.exclude), |
2022 | 475 | 'shared_service': s.shared_service, | 580 | "shared_service": s.shared_service, |
2023 | 476 | 'large_rotating_files': s.large_rotating_files, | 581 | "large_rotating_files": s.large_rotating_files, |
2024 | 477 | 'large_modifying_files': s.large_modifying_files, | 582 | "large_modifying_files": s.large_modifying_files, |
2025 | 478 | 'snapshot_mode': s.snapshot_mode, | 583 | "snapshot_mode": s.snapshot_mode, |
2026 | 479 | 'preserve_hard_links': s.preserve_hard_links, | 584 | "preserve_hard_links": s.preserve_hard_links, |
2027 | 480 | 'storage': { | 585 | "storage": { |
2028 | 481 | 'name': s.machine.storage.name, | 586 | "name": s.machine.storage.name, |
2029 | 482 | 'ssh_ping_host': s.machine.storage.ssh_ping_host, | 587 | "ssh_ping_host": s.machine.storage.ssh_ping_host, |
2030 | 483 | 'ssh_ping_host_keys': json.loads(s.machine.storage.ssh_ping_host_keys), | 588 | "ssh_ping_host_keys": json.loads( |
2031 | 484 | 'ssh_ping_port': s.machine.storage.ssh_ping_port, | 589 | s.machine.storage.ssh_ping_host_keys |
2032 | 485 | 'ssh_ping_user': s.machine.storage.ssh_ping_user, | 590 | ), |
2033 | 486 | } | 591 | "ssh_ping_port": s.machine.storage.ssh_ping_port, |
2034 | 592 | "ssh_ping_user": s.machine.storage.ssh_ping_user, | ||
2035 | 593 | }, | ||
2036 | 487 | } | 594 | } |
2037 | 488 | 595 | ||
2043 | 489 | out = { | 596 | out = {"machine": {"sources": sources}} |
2039 | 490 | 'machine': { | ||
2040 | 491 | 'sources': sources, | ||
2041 | 492 | }, | ||
2042 | 493 | } | ||
2044 | 494 | 597 | ||
2046 | 495 | return HttpResponse(json.dumps(out), content_type='application/json') | 598 | return HttpResponse(json.dumps(out), content_type="application/json") |
2047 | 496 | 599 | ||
2048 | 497 | def storage_ping_checkin(self): | 600 | def storage_ping_checkin(self): |
2049 | 498 | self._storage_authenticate() | 601 | self._storage_authenticate() |
2050 | @@ -502,32 +605,34 @@ | |||
2051 | 502 | now = timezone.now() | 605 | now = timezone.now() |
2052 | 503 | 606 | ||
2053 | 504 | out = { | 607 | out = { |
2061 | 505 | 'machine': { | 608 | "machine": { |
2062 | 506 | 'uuid': m.uuid, | 609 | "uuid": m.uuid, |
2063 | 507 | 'environment_name': m.environment_name, | 610 | "environment_name": m.environment_name, |
2064 | 508 | 'service_name': m.service_name, | 611 | "service_name": m.service_name, |
2065 | 509 | 'unit_name': m.unit_name, | 612 | "unit_name": m.unit_name, |
2066 | 510 | 'scheduled_sources': scheduled_sources, | 613 | "scheduled_sources": scheduled_sources, |
2067 | 511 | }, | 614 | } |
2068 | 512 | } | 615 | } |
2069 | 513 | m.date_checked_in = now | 616 | m.date_checked_in = now |
2070 | 514 | m.save() | 617 | m.save() |
2072 | 515 | return HttpResponse(json.dumps(out), content_type='application/json') | 618 | return HttpResponse(json.dumps(out), content_type="application/json") |
2073 | 516 | 619 | ||
2074 | 517 | def storage_ping_source_update(self): | 620 | def storage_ping_source_update(self): |
2075 | 518 | self._storage_authenticate() | 621 | self._storage_authenticate() |
2076 | 519 | m = self._storage_get_machine() | 622 | m = self._storage_get_machine() |
2077 | 520 | 623 | ||
2082 | 521 | if 'sources' not in self.req['machine']: | 624 | if "sources" not in self.req["machine"]: |
2083 | 522 | raise HttpResponseException(HttpResponseBadRequest('Missing required option "machine.sources"')) | 625 | raise HttpResponseException( |
2084 | 523 | for source_name in self.req['machine']['sources']: | 626 | HttpResponseBadRequest('Missing required option "machine.sources"') |
2085 | 524 | source_data = self.req['machine']['sources'][source_name] | 627 | ) |
2086 | 628 | for source_name in self.req["machine"]["sources"]: | ||
2087 | 629 | source_data = self.req["machine"]["sources"][source_name] | ||
2088 | 525 | try: | 630 | try: |
2089 | 526 | s = m.source_set.get(name=source_name, active=True, published=True) | 631 | s = m.source_set.get(name=source_name, active=True, published=True) |
2090 | 527 | except Source.DoesNotExist: | 632 | except Source.DoesNotExist: |
2092 | 528 | raise HttpResponseException(HttpResponseNotFound('Source not found')) | 633 | raise HttpResponseException(HttpResponseNotFound("Source not found")) |
2093 | 529 | now = timezone.now() | 634 | now = timezone.now() |
2095 | 530 | is_success = ('success' in source_data and source_data['success']) | 635 | is_success = "success" in source_data and source_data["success"] |
2096 | 531 | s.success = is_success | 636 | s.success = is_success |
2097 | 532 | if is_success: | 637 | if is_success: |
2098 | 533 | s.date_last_backed_up = now | 638 | s.date_last_backed_up = now |
2099 | @@ -538,46 +643,63 @@ | |||
2100 | 538 | bl.date = now | 643 | bl.date = now |
2101 | 539 | bl.storage = self.storage | 644 | bl.storage = self.storage |
2102 | 540 | bl.success = is_success | 645 | bl.success = is_success |
2111 | 541 | if 'snapshot' in source_data: | 646 | if "snapshot" in source_data: |
2112 | 542 | bl.snapshot = source_data['snapshot'] | 647 | bl.snapshot = source_data["snapshot"] |
2113 | 543 | if 'summary' in source_data: | 648 | if "summary" in source_data: |
2114 | 544 | bl.summary = source_data['summary'] | 649 | bl.summary = source_data["summary"] |
2115 | 545 | if 'time_begin' in source_data: | 650 | if "time_begin" in source_data: |
2116 | 546 | bl.date_begin = timezone.make_aware(datetime.utcfromtimestamp(source_data['time_begin']), timezone.utc) | 651 | bl.date_begin = timezone.make_aware( |
2117 | 547 | if 'time_end' in source_data: | 652 | datetime.utcfromtimestamp(source_data["time_begin"]), timezone.utc |
2118 | 548 | bl.date_end = timezone.make_aware(datetime.utcfromtimestamp(source_data['time_end']), timezone.utc) | 653 | ) |
2119 | 654 | if "time_end" in source_data: | ||
2120 | 655 | bl.date_end = timezone.make_aware( | ||
2121 | 656 | datetime.utcfromtimestamp(source_data["time_end"]), timezone.utc | ||
2122 | 657 | ) | ||
2123 | 549 | bl.save() | 658 | bl.save() |
2125 | 550 | return HttpResponse(json.dumps({}), content_type='application/json') | 659 | return HttpResponse(json.dumps({}), content_type="application/json") |
2126 | 551 | 660 | ||
2127 | 552 | def storage_update_config(self): | 661 | def storage_update_config(self): |
2131 | 553 | if not (('storage' in self.req) and (isinstance(self.req['storage'], dict))): | 662 | if not (("storage" in self.req) and (isinstance(self.req["storage"], dict))): |
2132 | 554 | raise HttpResponseException(HttpResponseBadRequest('"storage" dict required')) | 663 | raise HttpResponseException( |
2133 | 555 | req_storage = self.req['storage'] | 664 | HttpResponseBadRequest('"storage" dict required') |
2134 | 665 | ) | ||
2135 | 666 | req_storage = self.req["storage"] | ||
2136 | 556 | 667 | ||
2137 | 557 | # Make sure these exist in the request (validation comes later) | 668 | # Make sure these exist in the request (validation comes later) |
2139 | 558 | for k in ('name', 'secret', 'ssh_ping_host', 'ssh_ping_port', 'ssh_ping_user', 'ssh_ping_host_keys'): | 669 | for k in ( |
2140 | 670 | "name", | ||
2141 | 671 | "secret", | ||
2142 | 672 | "ssh_ping_host", | ||
2143 | 673 | "ssh_ping_port", | ||
2144 | 674 | "ssh_ping_user", | ||
2145 | 675 | "ssh_ping_host_keys", | ||
2146 | 676 | ): | ||
2147 | 559 | if k not in req_storage: | 677 | if k not in req_storage: |
2149 | 560 | raise HttpResponseException(HttpResponseBadRequest('Missing required storage option "%s"' % k)) | 678 | raise HttpResponseException( |
2150 | 679 | HttpResponseBadRequest('Missing required storage option "%s"' % k) | ||
2151 | 680 | ) | ||
2152 | 561 | 681 | ||
2153 | 562 | # Create or load the storage | 682 | # Create or load the storage |
2154 | 563 | try: | 683 | try: |
2156 | 564 | self.storage = Storage.objects.get(name=req_storage['name'], active=True) | 684 | self.storage = Storage.objects.get(name=req_storage["name"], active=True) |
2157 | 565 | modified = False | 685 | modified = False |
2158 | 566 | except Storage.DoesNotExist: | 686 | except Storage.DoesNotExist: |
2162 | 567 | self.storage = Storage(name=req_storage['name']) | 687 | self.storage = Storage(name=req_storage["name"]) |
2163 | 568 | self.storage.secret_hash = hashers.make_password(req_storage['secret']) | 688 | self.storage.secret_hash = hashers.make_password(req_storage["secret"]) |
2164 | 569 | self.storage.auth = self.get_registration_auth('storage_reg') | 689 | self.storage.auth = self.get_registration_auth("storage_reg") |
2165 | 570 | modified = True | 690 | modified = True |
2166 | 571 | 691 | ||
2167 | 572 | # If the storage existed before, it had a secret. Make sure that | 692 | # If the storage existed before, it had a secret. Make sure that |
2168 | 573 | # hasn't changed. | 693 | # hasn't changed. |
2171 | 574 | if not hashers.check_password(req_storage['secret'], self.storage.secret_hash): | 694 | if not hashers.check_password(req_storage["secret"], self.storage.secret_hash): |
2172 | 575 | raise HttpResponseException(HttpResponseForbidden('Bad secret for existing storage')) | 695 | raise HttpResponseException( |
2173 | 696 | HttpResponseForbidden("Bad secret for existing storage") | ||
2174 | 697 | ) | ||
2175 | 576 | 698 | ||
2176 | 577 | # Change the storage published status if needed | 699 | # Change the storage published status if needed |
2180 | 578 | if ('published' in req_storage): | 700 | if "published" in req_storage: |
2181 | 579 | if req_storage['published'] != self.storage.published: | 701 | if req_storage["published"] != self.storage.published: |
2182 | 580 | self.storage.published = req_storage['published'] | 702 | self.storage.published = req_storage["published"] |
2183 | 581 | modified = True | 703 | modified = True |
2184 | 582 | else: | 704 | else: |
2185 | 583 | # If not present, default to want published | 705 | # If not present, default to want published |
2186 | @@ -587,12 +709,19 @@ | |||
2187 | 587 | 709 | ||
2188 | 588 | # If any of these exist in the request, add or update them in the | 710 | # If any of these exist in the request, add or update them in the |
2189 | 589 | # self.storage. | 711 | # self.storage. |
2191 | 590 | for k in ('comment', 'ssh_ping_host', 'ssh_ping_port', 'ssh_ping_user', 'space_total', 'space_available'): | 712 | for k in ( |
2192 | 713 | "comment", | ||
2193 | 714 | "ssh_ping_host", | ||
2194 | 715 | "ssh_ping_port", | ||
2195 | 716 | "ssh_ping_user", | ||
2196 | 717 | "space_total", | ||
2197 | 718 | "space_available", | ||
2198 | 719 | ): | ||
2199 | 591 | if (k in req_storage) and (getattr(self.storage, k) != req_storage[k]): | 720 | if (k in req_storage) and (getattr(self.storage, k) != req_storage[k]): |
2200 | 592 | setattr(self.storage, k, req_storage[k]) | 721 | setattr(self.storage, k, req_storage[k]) |
2201 | 593 | modified = True | 722 | modified = True |
2202 | 594 | 723 | ||
2204 | 595 | for k in ('ssh_ping_host_keys',): | 724 | for k in ("ssh_ping_host_keys",): |
2205 | 596 | if k not in req_storage: | 725 | if k not in req_storage: |
2206 | 597 | continue | 726 | continue |
2207 | 598 | v = json.dumps(req_storage[k], sort_keys=True) | 727 | v = json.dumps(req_storage[k], sort_keys=True) |
2208 | @@ -606,21 +735,27 @@ | |||
2209 | 606 | try: | 735 | try: |
2210 | 607 | self.storage.full_clean() | 736 | self.storage.full_clean() |
2211 | 608 | except ValidationError as e: | 737 | except ValidationError as e: |
2213 | 609 | raise HttpResponseException(HttpResponseBadRequest('Validation error: %s' % str(e))) | 738 | raise HttpResponseException( |
2214 | 739 | HttpResponseBadRequest("Validation error: %s" % str(e)) | ||
2215 | 740 | ) | ||
2216 | 610 | 741 | ||
2217 | 611 | self.storage.date_checked_in = timezone.now() | 742 | self.storage.date_checked_in = timezone.now() |
2218 | 612 | self.storage.save() | 743 | self.storage.save() |
2219 | 613 | 744 | ||
2220 | 614 | machines = {} | 745 | machines = {} |
2222 | 615 | for m in Machine.objects.filter(storage=self.storage, active=True, published=True): | 746 | for m in Machine.objects.filter( |
2223 | 747 | storage=self.storage, active=True, published=True | ||
2224 | 748 | ): | ||
2225 | 616 | machines[m.uuid] = { | 749 | machines[m.uuid] = { |
2231 | 617 | 'environment_name': m.environment_name, | 750 | "environment_name": m.environment_name, |
2232 | 618 | 'service_name': m.service_name, | 751 | "service_name": m.service_name, |
2233 | 619 | 'unit_name': m.unit_name, | 752 | "unit_name": m.unit_name, |
2234 | 620 | 'comment': m.comment, | 753 | "comment": m.comment, |
2235 | 621 | 'ssh_public_key': m.ssh_public_key, | 754 | "ssh_public_key": m.ssh_public_key, |
2236 | 622 | } | 755 | } |
2238 | 623 | return HttpResponse(json.dumps({'machines': machines}), content_type='application/json') | 756 | return HttpResponse( |
2239 | 757 | json.dumps({"machines": machines}), content_type="application/json" | ||
2240 | 758 | ) | ||
2241 | 624 | 759 | ||
2242 | 625 | 760 | ||
2243 | 626 | @csrf_exempt | 761 | @csrf_exempt |
2244 | @@ -629,19 +764,19 @@ | |||
2245 | 629 | # to connect to its database and serve data). It does not | 764 | # to connect to its database and serve data). It does not |
2246 | 630 | # indicate the health of machines, storage units, etc. | 765 | # indicate the health of machines, storage units, etc. |
2247 | 631 | out = { | 766 | out = { |
2258 | 632 | 'healthy': True, | 767 | "healthy": True, |
2259 | 633 | 'date': timezone.now().isoformat(), | 768 | "date": timezone.now().isoformat(), |
2260 | 634 | 'repo_revision': get_repo_revision(), | 769 | "repo_revision": get_repo_revision(), |
2261 | 635 | 'counts': { | 770 | "counts": { |
2262 | 636 | 'auth': Auth.objects.count(), | 771 | "auth": Auth.objects.count(), |
2263 | 637 | 'storage': Storage.objects.count(), | 772 | "storage": Storage.objects.count(), |
2264 | 638 | 'machine': Machine.objects.count(), | 773 | "machine": Machine.objects.count(), |
2265 | 639 | 'source': Source.objects.count(), | 774 | "source": Source.objects.count(), |
2266 | 640 | 'filter_set': FilterSet.objects.count(), | 775 | "filter_set": FilterSet.objects.count(), |
2267 | 641 | 'backup_log': BackupLog.objects.count(), | 776 | "backup_log": BackupLog.objects.count(), |
2268 | 642 | }, | 777 | }, |
2269 | 643 | } | 778 | } |
2271 | 644 | return HttpResponse(json.dumps(out), content_type='application/json') | 779 | return HttpResponse(json.dumps(out), content_type="application/json") |
2272 | 645 | 780 | ||
2273 | 646 | 781 | ||
2274 | 647 | @csrf_exempt | 782 | @csrf_exempt |
2275 | 648 | 783 | ||
2276 | === modified file 'turku_api/wsgi.py' | |||
2277 | --- turku_api/wsgi.py 2015-07-30 22:41:42 +0000 | |||
2278 | +++ turku_api/wsgi.py 2020-06-21 23:58:30 +0000 | |||
2279 | @@ -25,9 +25,11 @@ | |||
2280 | 25 | 25 | ||
2281 | 26 | import os | 26 | import os |
2282 | 27 | import sys | 27 | import sys |
2283 | 28 | |||
2284 | 28 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | 29 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
2285 | 29 | sys.path.append(BASE_DIR) | 30 | sys.path.append(BASE_DIR) |
2286 | 30 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "turku_api.settings") | 31 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "turku_api.settings") |
2287 | 31 | 32 | ||
2289 | 32 | from django.core.wsgi import get_wsgi_application | 33 | from django.core.wsgi import get_wsgi_application # noqa: E402 |
2290 | 34 | |||
2291 | 33 | application = get_wsgi_application() | 35 | application = get_wsgi_application() |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.