Merge ~gabrielcocenza/juju-lint:bug/1990885 into juju-lint:master
- Git
- lp:~gabrielcocenza/juju-lint
- bug/1990885
- Merge into master
Status: | Merged |
---|---|
Approved by: | Eric Chen |
Approved revision: | d11c04e32c8c1d34e3f58ade4bf6c6297686773d |
Merged at revision: | 52fe9bef72b5aec36de0f320e32c994d8fbd96ed |
Proposed branch: | ~gabrielcocenza/juju-lint:bug/1990885 |
Merge into: | juju-lint:master |
Diff against target: |
1083 lines (+661/-46) 10 files modified
jujulint/checks/hyper_converged.py (+38/-0) jujulint/lint.py (+31/-2) jujulint/model_input.py (+68/-3) tests/unit/conftest.py (+337/-1) tests/unit/test_hyper_converged.py (+32/-0) tests/unit/test_input.py (+61/-0) tests/unit/test_jujulint.py (+62/-4) tests/unit/test_relations.py (+2/-2) tests/unit/test_spaces.py (+29/-33) tox.ini (+1/-1) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Eric Chen | Approve | ||
🤖 prod-jenkaas-bootstack (community) | continuous-integration | Approve | |
Mert Kirpici (community) | Approve | ||
JamesLin | Approve | ||
Review via email: mp+431602@code.launchpad.net |
Commit message
added check for hyper converged deployments
- warning for hyper converged deployments with Masakari
- added machines_to_apps field, filter_
and filter_
- created subdirectory form checks
- renamed module check_spaces.py to spaces.py
Closes-Bug: #1990885
Description of the change
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
🤖 prod-jenkaas-bootstack (prod-jenkaas-bootstack) wrote : | # |
PASSED: Continuous integration, rev:4634d824478
https:/
Executed test runs:
SUCCESS: https:/
None: https:/
Click here to trigger a rebuild:
https:/
🤖 prod-jenkaas-bootstack (prod-jenkaas-bootstack) wrote : | # |
PASSED: Continuous integration, rev:895444691d9
https:/
Executed test runs:
SUCCESS: https:/
None: https:/
Click here to trigger a rebuild:
https:/
Mert Kirpici (mertkirpici) wrote : | # |
thanks Gabriel. I proposed one suggestion inline.
Eric Chen (eric-chen) wrote : | # |
Agree with Mert, please change it before we merge it. thanks!
🤖 prod-jenkaas-bootstack (prod-jenkaas-bootstack) wrote : | # |
PASSED: Continuous integration, rev:d11c04e32c8
https:/
Executed test runs:
SUCCESS: https:/
None: https:/
Click here to trigger a rebuild:
https:/
Eric Chen (eric-chen) : | # |
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 52fe9bef72b5aec
Preview Diff
1 | diff --git a/jujulint/checks/hyper_converged.py b/jujulint/checks/hyper_converged.py | |||
2 | 0 | new file mode 100644 | 0 | new file mode 100644 |
3 | index 0000000..0dd5da6 | |||
4 | --- /dev/null | |||
5 | +++ b/jujulint/checks/hyper_converged.py | |||
6 | @@ -0,0 +1,38 @@ | |||
7 | 1 | #!/usr/bin/python3 | ||
8 | 2 | """Checks if nodes can be Hyper-Converged.""" | ||
9 | 3 | |||
10 | 4 | from collections import defaultdict | ||
11 | 5 | from typing import DefaultDict, Union | ||
12 | 6 | |||
13 | 7 | from jujulint.model_input import JujuBundleFile, JujuStatusFile | ||
14 | 8 | |||
15 | 9 | |||
16 | 10 | # see LP#1990885 | ||
17 | 11 | def check_hyper_converged( | ||
18 | 12 | input_file: Union[JujuBundleFile, JujuStatusFile] | ||
19 | 13 | ) -> DefaultDict[str, DefaultDict[str, set]]: | ||
20 | 14 | """Check if other services are collocated with nova/osd with masakari. | ||
21 | 15 | |||
22 | 16 | Hyperconvered is nova/osd collocated with openstack services. | ||
23 | 17 | Masakari uses ha-cluster to monitor nodes. If the node is not responsive then the | ||
24 | 18 | node is taken down. This is fine for nova/osd units, but if there are collocated | ||
25 | 19 | with openstack services this can be problematic. | ||
26 | 20 | |||
27 | 21 | |||
28 | 22 | :param input_file: mapped content of the input file. | ||
29 | 23 | :type input_file: Union[JujuBundleFile, JujuStatusFile] | ||
30 | 24 | :return: Services on lxds that are on nova/osd machines. | ||
31 | 25 | :rtype: DefaultDict[str, DefaultDict[str, set]] | ||
32 | 26 | """ | ||
33 | 27 | hyper_converged_warning = defaultdict(lambda: defaultdict(set)) | ||
34 | 28 | if "masakari" in input_file.charms: | ||
35 | 29 | nova_machines = input_file.filter_machines_by_charm("nova-compute") | ||
36 | 30 | ods_machines = input_file.filter_machines_by_charm("ceph-osd") | ||
37 | 31 | nova_osd_machines = nova_machines.intersection(ods_machines) | ||
38 | 32 | if nova_osd_machines: | ||
39 | 33 | for machine in nova_osd_machines: | ||
40 | 34 | lxds = input_file.filter_lxd_on_machine(machine) | ||
41 | 35 | for lxd in lxds: | ||
42 | 36 | apps = input_file.machines_to_apps[lxd] | ||
43 | 37 | hyper_converged_warning[machine][lxd] = apps | ||
44 | 38 | return hyper_converged_warning | ||
45 | diff --git a/jujulint/relations.py b/jujulint/checks/relations.py | |||
46 | 0 | similarity index 100% | 39 | similarity index 100% |
47 | 1 | rename from jujulint/relations.py | 40 | rename from jujulint/relations.py |
48 | 2 | rename to jujulint/checks/relations.py | 41 | rename to jujulint/checks/relations.py |
49 | diff --git a/jujulint/check_spaces.py b/jujulint/checks/spaces.py | |||
50 | 3 | similarity index 100% | 42 | similarity index 100% |
51 | 4 | rename from jujulint/check_spaces.py | 43 | rename from jujulint/check_spaces.py |
52 | 5 | rename to jujulint/checks/spaces.py | 44 | rename to jujulint/checks/spaces.py |
53 | diff --git a/jujulint/lint.py b/jujulint/lint.py | |||
54 | index b8fa535..f73b7ea 100755 | |||
55 | --- a/jujulint/lint.py | |||
56 | +++ b/jujulint/lint.py | |||
57 | @@ -33,11 +33,12 @@ import yaml | |||
58 | 33 | from attr import attrib, attrs | 33 | from attr import attrib, attrs |
59 | 34 | from dateutil import relativedelta | 34 | from dateutil import relativedelta |
60 | 35 | 35 | ||
61 | 36 | import jujulint.checks.hyper_converged as hyper_converged | ||
62 | 36 | import jujulint.util as utils | 37 | import jujulint.util as utils |
64 | 37 | from jujulint.check_spaces import Relation, find_space_mismatches | 38 | from jujulint.checks.relations import RelationError, RelationsRulesBootStrap |
65 | 39 | from jujulint.checks.spaces import Relation, find_space_mismatches | ||
66 | 38 | from jujulint.logging import Logger | 40 | from jujulint.logging import Logger |
67 | 39 | from jujulint.model_input import input_handler | 41 | from jujulint.model_input import input_handler |
68 | 40 | from jujulint.relations import RelationError, RelationsRulesBootStrap | ||
69 | 41 | 42 | ||
70 | 42 | VALID_CONFIG_CHECKS = ("isset", "eq", "neq", "gte", "search") | 43 | VALID_CONFIG_CHECKS = ("isset", "eq", "neq", "gte", "search") |
71 | 43 | VALID_LOG_LEVEL = { | 44 | VALID_LOG_LEVEL = { |
72 | @@ -757,6 +758,33 @@ class Linter: | |||
73 | 757 | } | 758 | } |
74 | 758 | ) | 759 | ) |
75 | 759 | 760 | ||
76 | 761 | def check_hyper_converged(self, input_file): | ||
77 | 762 | """Check hyper converged deployments. | ||
78 | 763 | |||
79 | 764 | :param input_file: mapped content of the input file. | ||
80 | 765 | :type input_file: Union[JujuBundleFile, JujuStatusFile] | ||
81 | 766 | """ | ||
82 | 767 | hyper_converged_warning = hyper_converged.check_hyper_converged(input_file) | ||
83 | 768 | |||
84 | 769 | if hyper_converged_warning: | ||
85 | 770 | for machine in hyper_converged_warning: | ||
86 | 771 | for lxd in hyper_converged_warning[machine]: | ||
87 | 772 | self.message_handler( | ||
88 | 773 | { | ||
89 | 774 | "id": "hyper-converged-masakari", | ||
90 | 775 | "tags": ["hyper-converged", "masakari"], | ||
91 | 776 | "message": ( | ||
92 | 777 | "Deployment has Masakari and the machine: '{}' " | ||
93 | 778 | "has nova/osd and the lxd: '{}' with those services {}" | ||
94 | 779 | ).format( | ||
95 | 780 | machine, | ||
96 | 781 | lxd, | ||
97 | 782 | sorted(list(hyper_converged_warning[machine][lxd])), | ||
98 | 783 | ), | ||
99 | 784 | }, | ||
100 | 785 | log_level=logging.WARNING, | ||
101 | 786 | ) | ||
102 | 787 | |||
103 | 760 | def check_charms_ops_mandatory(self, charm): | 788 | def check_charms_ops_mandatory(self, charm): |
104 | 761 | """ | 789 | """ |
105 | 762 | Check if a mandatory ops charms is present in the model. | 790 | Check if a mandatory ops charms is present in the model. |
106 | @@ -1352,6 +1380,7 @@ class Linter: | |||
107 | 1352 | 1380 | ||
108 | 1353 | self.check_subs(parsed_yaml["machines"]) | 1381 | self.check_subs(parsed_yaml["machines"]) |
109 | 1354 | self.check_relations(input_file) | 1382 | self.check_relations(input_file) |
110 | 1383 | self.check_hyper_converged(input_file) | ||
111 | 1355 | self.check_charms() | 1384 | self.check_charms() |
112 | 1356 | 1385 | ||
113 | 1357 | if "relations" in parsed_yaml: | 1386 | if "relations" in parsed_yaml: |
114 | diff --git a/jujulint/model_input.py b/jujulint/model_input.py | |||
115 | index a14551c..8233ebb 100644 | |||
116 | --- a/jujulint/model_input.py | |||
117 | +++ b/jujulint/model_input.py | |||
118 | @@ -25,6 +25,7 @@ class BaseFile: | |||
119 | 25 | app_to_charm: Dict = field(default_factory=dict) | 25 | app_to_charm: Dict = field(default_factory=dict) |
120 | 26 | charm_to_app: defaultdict[set] = field(default_factory=lambda: defaultdict(set)) | 26 | charm_to_app: defaultdict[set] = field(default_factory=lambda: defaultdict(set)) |
121 | 27 | apps_to_machines: defaultdict[set] = field(default_factory=lambda: defaultdict(set)) | 27 | apps_to_machines: defaultdict[set] = field(default_factory=lambda: defaultdict(set)) |
122 | 28 | machines_to_apps: defaultdict[set] = field(default_factory=lambda: defaultdict(set)) | ||
123 | 28 | 29 | ||
124 | 29 | def __post_init__(self): | 30 | def __post_init__(self): |
125 | 30 | """Dunder method to map file after instantiating.""" | 31 | """Dunder method to map file after instantiating.""" |
126 | @@ -124,6 +125,20 @@ class BaseFile: | |||
127 | 124 | else set() | 125 | else set() |
128 | 125 | ) | 126 | ) |
129 | 126 | 127 | ||
130 | 128 | def filter_machines_by_charm(self, charm: str) -> Set: | ||
131 | 129 | """Filter machines that has a specific charm. | ||
132 | 130 | |||
133 | 131 | :param charm: Charm name. | ||
134 | 132 | :type charm: str | ||
135 | 133 | :return: Machines that contains the charm. | ||
136 | 134 | :rtype: Set | ||
137 | 135 | """ | ||
138 | 136 | charm_machines = set() | ||
139 | 137 | charm_apps = self.charm_to_app[charm] | ||
140 | 138 | for charm_app in charm_apps: | ||
141 | 139 | charm_machines.update(self.apps_to_machines[charm_app]) | ||
142 | 140 | return charm_machines | ||
143 | 141 | |||
144 | 127 | def map_machines(self): | 142 | def map_machines(self): |
145 | 128 | """Map machines method to be implemented. | 143 | """Map machines method to be implemented. |
146 | 129 | 144 | ||
147 | @@ -158,6 +173,17 @@ class BaseFile: | |||
148 | 158 | """ | 173 | """ |
149 | 159 | raise NotImplementedError(f"{self.__class__.__name__} missing: sorted_machines") | 174 | raise NotImplementedError(f"{self.__class__.__name__} missing: sorted_machines") |
150 | 160 | 175 | ||
151 | 176 | def filter_lxd_on_machine(self, machine: str): | ||
152 | 177 | """Lxd containers on a machine. | ||
153 | 178 | |||
154 | 179 | :param machine: machine id. | ||
155 | 180 | :type machine: str | ||
156 | 181 | :raises NotImplementedError: Raise if not implemented on child classes. | ||
157 | 182 | """ | ||
158 | 183 | raise NotImplementedError( | ||
159 | 184 | f"{self.__class__.__name__} missing: filter_lxd_on_machine" | ||
160 | 185 | ) | ||
161 | 186 | |||
162 | 161 | 187 | ||
163 | 162 | @dataclass | 188 | @dataclass |
164 | 163 | class JujuStatusFile(BaseFile): | 189 | class JujuStatusFile(BaseFile): |
165 | @@ -176,9 +202,12 @@ class JujuStatusFile(BaseFile): | |||
166 | 176 | for unit in units: | 202 | for unit in units: |
167 | 177 | machine = units[unit].get("machine") | 203 | machine = units[unit].get("machine") |
168 | 178 | self.apps_to_machines[app].add(machine) | 204 | self.apps_to_machines[app].add(machine) |
169 | 205 | self.machines_to_apps[machine].add(app) | ||
170 | 179 | subordinates = units[unit].get("subordinates", {}) | 206 | subordinates = units[unit].get("subordinates", {}) |
171 | 180 | for sub in subordinates: | 207 | for sub in subordinates: |
173 | 181 | self.apps_to_machines[sub.split("/")[0]].add(machine) | 208 | sub_name = sub.split("/")[0] |
174 | 209 | self.apps_to_machines[sub_name].add(machine) | ||
175 | 210 | self.machines_to_apps[machine].add(sub_name) | ||
176 | 182 | 211 | ||
177 | 183 | @staticmethod | 212 | @staticmethod |
178 | 184 | def sorted_machines(machine: str) -> Tuple[int, str, int]: | 213 | def sorted_machines(machine: str) -> Tuple[int, str, int]: |
179 | @@ -218,6 +247,20 @@ class JujuStatusFile(BaseFile): | |||
180 | 218 | apps_related.update(relations.get(endpoint, [])) | 247 | apps_related.update(relations.get(endpoint, [])) |
181 | 219 | return apps_related | 248 | return apps_related |
182 | 220 | 249 | ||
183 | 250 | def filter_lxd_on_machine(self, machine: str) -> Set: | ||
184 | 251 | """Lxd containers on a machine. | ||
185 | 252 | |||
186 | 253 | :param machine: machine id. | ||
187 | 254 | :type machine: str | ||
188 | 255 | :return: lxd containers in the machine. | ||
189 | 256 | :rtype: Set | ||
190 | 257 | """ | ||
191 | 258 | return { | ||
192 | 259 | lxd_machine | ||
193 | 260 | for lxd_machine in self.machines | ||
194 | 261 | if "lxd" in lxd_machine and lxd_machine.split("/")[0] == machine | ||
195 | 262 | } | ||
196 | 263 | |||
197 | 221 | 264 | ||
198 | 222 | @dataclass | 265 | @dataclass |
199 | 223 | class JujuBundleFile(BaseFile): | 266 | class JujuBundleFile(BaseFile): |
200 | @@ -241,18 +284,26 @@ class JujuBundleFile(BaseFile): | |||
201 | 241 | for app in self.applications_data: | 284 | for app in self.applications_data: |
202 | 242 | machines = self.applications_data[app].get("to", []) | 285 | machines = self.applications_data[app].get("to", []) |
203 | 243 | self.apps_to_machines[app].update(machines) | 286 | self.apps_to_machines[app].update(machines) |
204 | 287 | for machine in machines: | ||
205 | 288 | self.machines_to_apps[machine].add(app) | ||
206 | 244 | # NOTE(gabrielcocenza) subordinates won't have the 'to' field because | 289 | # NOTE(gabrielcocenza) subordinates won't have the 'to' field because |
207 | 245 | # they are deployed thru relations. | 290 | # they are deployed thru relations. |
208 | 246 | subordinates = { | 291 | subordinates = { |
209 | 247 | sub for sub, machines in self.apps_to_machines.items() if machines == set() | 292 | sub for sub, machines in self.apps_to_machines.items() if machines == set() |
210 | 248 | } | 293 | } |
211 | 249 | for relation in self.relations_data: | 294 | for relation in self.relations_data: |
213 | 250 | app_1, endpoint_1, app_2, endpoint_2 = self.split_relation(relation) | 295 | app_1, _, app_2, _ = self.split_relation(relation) |
214 | 251 | # update with the machines of the application that the subordinate charm relate. | 296 | # update with the machines of the application that the subordinate charm relate. |
215 | 252 | if app_1 in subordinates: | 297 | if app_1 in subordinates: |
217 | 253 | self.apps_to_machines[app_1].update(self.apps_to_machines[app_2]) | 298 | sub_machines = self.apps_to_machines[app_2] |
218 | 299 | self.apps_to_machines[app_1].update(sub_machines) | ||
219 | 300 | for sub_machine in sub_machines: | ||
220 | 301 | self.machines_to_apps[sub_machine].add(app_1) | ||
221 | 254 | elif app_2 in subordinates: | 302 | elif app_2 in subordinates: |
222 | 303 | sub_machines = self.apps_to_machines[app_1] | ||
223 | 255 | self.apps_to_machines[app_2].update(self.apps_to_machines[app_1]) | 304 | self.apps_to_machines[app_2].update(self.apps_to_machines[app_1]) |
224 | 305 | for sub_machine in sub_machines: | ||
225 | 306 | self.machines_to_apps[sub_machine].add(app_2) | ||
226 | 256 | 307 | ||
227 | 257 | @staticmethod | 308 | @staticmethod |
228 | 258 | def sorted_machines(machine: str) -> Tuple[int, str]: | 309 | def sorted_machines(machine: str) -> Tuple[int, str]: |
229 | @@ -303,6 +354,20 @@ class JujuBundleFile(BaseFile): | |||
230 | 303 | apps_related.add(app_1_ep_1.split(":")[0]) | 354 | apps_related.add(app_1_ep_1.split(":")[0]) |
231 | 304 | return apps_related | 355 | return apps_related |
232 | 305 | 356 | ||
233 | 357 | def filter_lxd_on_machine(self, machine: str) -> Set: | ||
234 | 358 | """Lxd containers on a machine. | ||
235 | 359 | |||
236 | 360 | :param machine: machine id. | ||
237 | 361 | :type machine: str | ||
238 | 362 | :return: lxd containers in the machine. | ||
239 | 363 | :rtype: Set | ||
240 | 364 | """ | ||
241 | 365 | return { | ||
242 | 366 | lxd_machine | ||
243 | 367 | for lxd_machine in self.machines | ||
244 | 368 | if "lxd" in lxd_machine and lxd_machine.split(":")[1] == machine | ||
245 | 369 | } | ||
246 | 370 | |||
247 | 306 | 371 | ||
248 | 307 | def input_handler( | 372 | def input_handler( |
249 | 308 | parsed_yaml: Dict[str, Any], applications_key: str | 373 | parsed_yaml: Dict[str, Any], applications_key: str |
250 | diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py | |||
251 | index b49a4b2..0f09bba 100644 | |||
252 | --- a/tests/unit/conftest.py | |||
253 | +++ b/tests/unit/conftest.py | |||
254 | @@ -242,17 +242,31 @@ def rules_files(): | |||
255 | 242 | 242 | ||
256 | 243 | 243 | ||
257 | 244 | @pytest.fixture | 244 | @pytest.fixture |
259 | 245 | def input_files(parsed_yaml_status, parsed_yaml_bundle): | 245 | def input_files( |
260 | 246 | parsed_yaml_status, | ||
261 | 247 | parsed_yaml_bundle, | ||
262 | 248 | parsed_hyper_converged_yaml_status, | ||
263 | 249 | parsed_hyper_converged_yaml_bundle, | ||
264 | 250 | ): | ||
265 | 246 | return { | 251 | return { |
266 | 247 | "juju-status": JujuStatusFile( | 252 | "juju-status": JujuStatusFile( |
267 | 248 | applications_data=parsed_yaml_status["applications"], | 253 | applications_data=parsed_yaml_status["applications"], |
268 | 249 | machines_data=parsed_yaml_status["machines"], | 254 | machines_data=parsed_yaml_status["machines"], |
269 | 250 | ), | 255 | ), |
270 | 256 | "juju-status-hyper-converged": JujuStatusFile( | ||
271 | 257 | applications_data=parsed_hyper_converged_yaml_status["applications"], | ||
272 | 258 | machines_data=parsed_hyper_converged_yaml_status["machines"], | ||
273 | 259 | ), | ||
274 | 251 | "juju-bundle": JujuBundleFile( | 260 | "juju-bundle": JujuBundleFile( |
275 | 252 | applications_data=parsed_yaml_bundle["applications"], | 261 | applications_data=parsed_yaml_bundle["applications"], |
276 | 253 | machines_data=parsed_yaml_bundle["machines"], | 262 | machines_data=parsed_yaml_bundle["machines"], |
277 | 254 | relations_data=parsed_yaml_bundle["relations"], | 263 | relations_data=parsed_yaml_bundle["relations"], |
278 | 255 | ), | 264 | ), |
279 | 265 | "juju-bundle-parsed-hyper-converged": JujuBundleFile( | ||
280 | 266 | applications_data=parsed_hyper_converged_yaml_bundle["applications"], | ||
281 | 267 | machines_data=parsed_hyper_converged_yaml_bundle["machines"], | ||
282 | 268 | relations_data=parsed_hyper_converged_yaml_bundle["relations"], | ||
283 | 269 | ), | ||
284 | 256 | } | 270 | } |
285 | 257 | 271 | ||
286 | 258 | 272 | ||
287 | @@ -476,3 +490,325 @@ def parsed_yaml_bundle(): | |||
288 | 476 | ], | 490 | ], |
289 | 477 | ], | 491 | ], |
290 | 478 | } | 492 | } |
291 | 493 | |||
292 | 494 | |||
293 | 495 | @pytest.fixture | ||
294 | 496 | def parsed_hyper_converged_yaml_status(): | ||
295 | 497 | """Representation of a hyper converged model with masakari.""" | ||
296 | 498 | return { | ||
297 | 499 | "machines": { | ||
298 | 500 | "0": { | ||
299 | 501 | "juju-status": {"current": "started"}, | ||
300 | 502 | "machine-status": {"current": "running"}, | ||
301 | 503 | "modification-status": { | ||
302 | 504 | "current": "idle", | ||
303 | 505 | }, | ||
304 | 506 | "containers": { | ||
305 | 507 | "0/lxd/0": { | ||
306 | 508 | "juju-status": {"current": "started"}, | ||
307 | 509 | "machine-status": {"current": "running"}, | ||
308 | 510 | "modification-status": {"current": "applied"}, | ||
309 | 511 | "constraints": "arch=amd64 spaces=", | ||
310 | 512 | "hardware": "availability-zone=nova", | ||
311 | 513 | }, | ||
312 | 514 | "0/lxd/1": { | ||
313 | 515 | "juju-status": {"current": "started"}, | ||
314 | 516 | "machine-status": {"current": "running"}, | ||
315 | 517 | "modification-status": {"current": "applied"}, | ||
316 | 518 | "constraints": "arch=amd64 spaces=", | ||
317 | 519 | "hardware": "availability-zone=nova", | ||
318 | 520 | }, | ||
319 | 521 | }, | ||
320 | 522 | "constraints": "arch=amd64 mem=4096M", | ||
321 | 523 | "hardware": "arch=amd64 cores=2 mem=4096M root-disk=40960M availability-zone=nova", | ||
322 | 524 | }, | ||
323 | 525 | "1": { | ||
324 | 526 | "juju-status": {"current": "started"}, | ||
325 | 527 | "machine-status": {"current": "running"}, | ||
326 | 528 | "modification-status": {"current": "idle"}, | ||
327 | 529 | "constraints": "arch=amd64 mem=4096M", | ||
328 | 530 | "hardware": "arch=amd64 cores=2 mem=4096M root-disk=40960M availability-zone=nova", | ||
329 | 531 | }, | ||
330 | 532 | "2": { | ||
331 | 533 | "juju-status": {"current": "started"}, | ||
332 | 534 | "machine-status": {"current": "running"}, | ||
333 | 535 | "modification-status": {"current": "idle"}, | ||
334 | 536 | "constraints": "arch=amd64 mem=4096M", | ||
335 | 537 | "hardware": "arch=amd64 cores=2 mem=4096M root-disk=40960M availability-zone=nova", | ||
336 | 538 | }, | ||
337 | 539 | "3": { | ||
338 | 540 | "juju-status": {"current": "started"}, | ||
339 | 541 | "machine-status": {"current": "running"}, | ||
340 | 542 | "modification-status": {"current": "idle"}, | ||
341 | 543 | "constraints": "arch=amd64", | ||
342 | 544 | "hardware": "arch=amd64 cores=1 mem=2048M root-disk=20480M availability-zone=nova", | ||
343 | 545 | }, | ||
344 | 546 | }, | ||
345 | 547 | "applications": { | ||
346 | 548 | "ceilometer": { | ||
347 | 549 | "charm": "ceilometer", | ||
348 | 550 | "charm-name": "ceilometer", | ||
349 | 551 | "application-status": {"current": "active"}, | ||
350 | 552 | "relations": {"cluster": ["ceilometer"]}, | ||
351 | 553 | "units": { | ||
352 | 554 | "ceilometer/0": { | ||
353 | 555 | "workload-status": {"current": "active"}, | ||
354 | 556 | "juju-status": {"current": "idle"}, | ||
355 | 557 | "machine": "0/lxd/0", | ||
356 | 558 | } | ||
357 | 559 | }, | ||
358 | 560 | "endpoint-bindings": { | ||
359 | 561 | "": "alpha", | ||
360 | 562 | "admin": "alpha", | ||
361 | 563 | "amqp": "alpha", | ||
362 | 564 | "amqp-listener": "alpha", | ||
363 | 565 | "ceilometer-service": "alpha", | ||
364 | 566 | "certificates": "alpha", | ||
365 | 567 | "cluster": "alpha", | ||
366 | 568 | "event-service": "alpha", | ||
367 | 569 | "ha": "alpha", | ||
368 | 570 | "identity-credentials": "alpha", | ||
369 | 571 | "identity-notifications": "alpha", | ||
370 | 572 | "identity-service": "alpha", | ||
371 | 573 | "internal": "alpha", | ||
372 | 574 | "metric-service": "alpha", | ||
373 | 575 | "nrpe-external-master": "alpha", | ||
374 | 576 | "public": "alpha", | ||
375 | 577 | "shared-db": "alpha", | ||
376 | 578 | }, | ||
377 | 579 | }, | ||
378 | 580 | "ceph-mon": { | ||
379 | 581 | "charm": "ceph-mon", | ||
380 | 582 | "charm-name": "ceph-mon", | ||
381 | 583 | "application-status": {"current": "active"}, | ||
382 | 584 | "relations": { | ||
383 | 585 | "client": ["nova-compute"], | ||
384 | 586 | "mon": ["ceph-mon"], | ||
385 | 587 | "osd": ["ceph-osd"], | ||
386 | 588 | }, | ||
387 | 589 | "units": { | ||
388 | 590 | "ceph-mon/0": { | ||
389 | 591 | "workload-status": {"current": "idle"}, | ||
390 | 592 | "juju-status": {"current": "idle"}, | ||
391 | 593 | "machine": "0", | ||
392 | 594 | }, | ||
393 | 595 | "ceph-mon/1": { | ||
394 | 596 | "workload-status": {"current": "idle"}, | ||
395 | 597 | "juju-status": {"current": "idle"}, | ||
396 | 598 | "machine": "1", | ||
397 | 599 | }, | ||
398 | 600 | "ceph-mon/2": { | ||
399 | 601 | "workload-status": {"current": "idle"}, | ||
400 | 602 | "juju-status": {"current": "idle"}, | ||
401 | 603 | "machine": "2", | ||
402 | 604 | }, | ||
403 | 605 | }, | ||
404 | 606 | "endpoint-bindings": { | ||
405 | 607 | "": "alpha", | ||
406 | 608 | "admin": "alpha", | ||
407 | 609 | "bootstrap-source": "alpha", | ||
408 | 610 | "client": "alpha", | ||
409 | 611 | "cluster": "alpha", | ||
410 | 612 | "dashboard": "alpha", | ||
411 | 613 | "mds": "alpha", | ||
412 | 614 | "mon": "alpha", | ||
413 | 615 | "nrpe-external-master": "alpha", | ||
414 | 616 | "osd": "alpha", | ||
415 | 617 | "prometheus": "alpha", | ||
416 | 618 | "public": "alpha", | ||
417 | 619 | "radosgw": "alpha", | ||
418 | 620 | "rbd-mirror": "alpha", | ||
419 | 621 | }, | ||
420 | 622 | }, | ||
421 | 623 | "ceph-osd": { | ||
422 | 624 | "charm": "ceph-osd", | ||
423 | 625 | "charm-name": "ceph-osd", | ||
424 | 626 | "application-status": {"current": "active"}, | ||
425 | 627 | "relations": {"mon": ["ceph-mon"]}, | ||
426 | 628 | "units": { | ||
427 | 629 | "ceph-osd/0": { | ||
428 | 630 | "workload-status": {"current": "idle"}, | ||
429 | 631 | "juju-status": {"current": "idle"}, | ||
430 | 632 | "machine": "0", | ||
431 | 633 | }, | ||
432 | 634 | "ceph-osd/1": { | ||
433 | 635 | "workload-status": {"current": "idle"}, | ||
434 | 636 | "juju-status": {"current": "idle"}, | ||
435 | 637 | "machine": "1", | ||
436 | 638 | }, | ||
437 | 639 | "ceph-osd/2": { | ||
438 | 640 | "workload-status": {"current": "idle"}, | ||
439 | 641 | "juju-status": {"current": "idle"}, | ||
440 | 642 | "machine": "2", | ||
441 | 643 | }, | ||
442 | 644 | }, | ||
443 | 645 | "endpoint-bindings": { | ||
444 | 646 | "": "alpha", | ||
445 | 647 | "cluster": "alpha", | ||
446 | 648 | "mon": "alpha", | ||
447 | 649 | "nrpe-external-master": "alpha", | ||
448 | 650 | "public": "alpha", | ||
449 | 651 | "secrets-storage": "alpha", | ||
450 | 652 | }, | ||
451 | 653 | }, | ||
452 | 654 | "heat": { | ||
453 | 655 | "charm": "heat", | ||
454 | 656 | "series": "focal", | ||
455 | 657 | "charm-name": "heat", | ||
456 | 658 | "application-status": {"current": "active"}, | ||
457 | 659 | "relations": {"cluster": ["heat"]}, | ||
458 | 660 | "units": { | ||
459 | 661 | "heat/0": { | ||
460 | 662 | "workload-status": {"current": "idle"}, | ||
461 | 663 | "juju-status": {"current": "idle"}, | ||
462 | 664 | "machine": "0/lxd/1", | ||
463 | 665 | } | ||
464 | 666 | }, | ||
465 | 667 | "endpoint-bindings": { | ||
466 | 668 | "": "alpha", | ||
467 | 669 | "admin": "alpha", | ||
468 | 670 | "amqp": "alpha", | ||
469 | 671 | "certificates": "alpha", | ||
470 | 672 | "cluster": "alpha", | ||
471 | 673 | "ha": "alpha", | ||
472 | 674 | "heat-plugin-subordinate": "alpha", | ||
473 | 675 | "identity-service": "alpha", | ||
474 | 676 | "internal": "alpha", | ||
475 | 677 | "nrpe-external-master": "alpha", | ||
476 | 678 | "public": "alpha", | ||
477 | 679 | "shared-db": "alpha", | ||
478 | 680 | }, | ||
479 | 681 | }, | ||
480 | 682 | "masakari": { | ||
481 | 683 | "charm": "masakari", | ||
482 | 684 | "charm-name": "masakari", | ||
483 | 685 | "application-status": {"current": "active"}, | ||
484 | 686 | "relations": {"cluster": ["masakari"]}, | ||
485 | 687 | "units": { | ||
486 | 688 | "masakari/0": { | ||
487 | 689 | "workload-status": {"current": "idle"}, | ||
488 | 690 | "juju-status": {"current": "idle"}, | ||
489 | 691 | "machine": "3", | ||
490 | 692 | } | ||
491 | 693 | }, | ||
492 | 694 | "endpoint-bindings": { | ||
493 | 695 | "": "alpha", | ||
494 | 696 | "admin": "alpha", | ||
495 | 697 | "amqp": "alpha", | ||
496 | 698 | "certificates": "alpha", | ||
497 | 699 | "cluster": "alpha", | ||
498 | 700 | "ha": "alpha", | ||
499 | 701 | "identity-service": "alpha", | ||
500 | 702 | "internal": "alpha", | ||
501 | 703 | "public": "alpha", | ||
502 | 704 | "shared-db": "alpha", | ||
503 | 705 | }, | ||
504 | 706 | }, | ||
505 | 707 | "nova-compute": { | ||
506 | 708 | "charm": "nova-compute", | ||
507 | 709 | "charm-name": "nova-compute", | ||
508 | 710 | "application-status": {"current": "active"}, | ||
509 | 711 | "relations": {"ceph": ["ceph-mon"], "compute-peer": ["nova-compute"]}, | ||
510 | 712 | "units": { | ||
511 | 713 | "nova-compute/0": { | ||
512 | 714 | "workload-status": {"current": "idle"}, | ||
513 | 715 | "juju-status": {"current": "idle"}, | ||
514 | 716 | "machine": "0", | ||
515 | 717 | }, | ||
516 | 718 | "nova-compute/1": { | ||
517 | 719 | "workload-status": {"current": "idle"}, | ||
518 | 720 | "juju-status": {"current": "idle"}, | ||
519 | 721 | "machine": "1", | ||
520 | 722 | }, | ||
521 | 723 | "nova-compute/2": { | ||
522 | 724 | "workload-status": {"current": "idle"}, | ||
523 | 725 | "juju-status": {"current": "idle"}, | ||
524 | 726 | "machine": "2", | ||
525 | 727 | }, | ||
526 | 728 | }, | ||
527 | 729 | "endpoint-bindings": { | ||
528 | 730 | "": "alpha", | ||
529 | 731 | "amqp": "alpha", | ||
530 | 732 | "ceph": "alpha", | ||
531 | 733 | "ceph-access": "alpha", | ||
532 | 734 | "cloud-compute": "alpha", | ||
533 | 735 | "cloud-credentials": "alpha", | ||
534 | 736 | "compute-peer": "alpha", | ||
535 | 737 | "ephemeral-backend": "alpha", | ||
536 | 738 | "image-service": "alpha", | ||
537 | 739 | "internal": "alpha", | ||
538 | 740 | "ironic-api": "alpha", | ||
539 | 741 | "lxd": "alpha", | ||
540 | 742 | "migration": "alpha", | ||
541 | 743 | "neutron-plugin": "alpha", | ||
542 | 744 | "nova-ceilometer": "alpha", | ||
543 | 745 | "nrpe-external-master": "alpha", | ||
544 | 746 | "secrets-storage": "alpha", | ||
545 | 747 | }, | ||
546 | 748 | }, | ||
547 | 749 | }, | ||
548 | 750 | } | ||
549 | 751 | |||
550 | 752 | |||
551 | 753 | @pytest.fixture | ||
552 | 754 | def parsed_hyper_converged_yaml_bundle(): | ||
553 | 755 | """Representation of a hyper converged model with masakari.""" | ||
554 | 756 | return { | ||
555 | 757 | "series": "focal", | ||
556 | 758 | "applications": { | ||
557 | 759 | "ceilometer": { | ||
558 | 760 | "charm": "ceilometer", | ||
559 | 761 | "num_units": 1, | ||
560 | 762 | "to": ["lxd:0"], | ||
561 | 763 | "constraints": "arch=amd64", | ||
562 | 764 | }, | ||
563 | 765 | "ceph-mon": { | ||
564 | 766 | "charm": "ceph-mon", | ||
565 | 767 | "num_units": 3, | ||
566 | 768 | "to": ["0", "1", "2"], | ||
567 | 769 | "constraints": "arch=amd64", | ||
568 | 770 | }, | ||
569 | 771 | "ceph-osd": { | ||
570 | 772 | "charm": "ceph-osd", | ||
571 | 773 | "num_units": 3, | ||
572 | 774 | "to": ["0", "1", "2"], | ||
573 | 775 | "constraints": "arch=amd64 mem=4096", | ||
574 | 776 | "storage": { | ||
575 | 777 | "bluestore-db": "loop,0,1024", | ||
576 | 778 | "bluestore-wal": "loop,0,1024", | ||
577 | 779 | "osd-devices": "loop,0,1024", | ||
578 | 780 | "osd-journals": "loop,0,1024", | ||
579 | 781 | }, | ||
580 | 782 | }, | ||
581 | 783 | "heat": { | ||
582 | 784 | "charm": "heat", | ||
583 | 785 | "resources": {"policyd-override": 0}, | ||
584 | 786 | "num_units": 1, | ||
585 | 787 | "to": ["lxd:0"], | ||
586 | 788 | "constraints": "arch=amd64", | ||
587 | 789 | }, | ||
588 | 790 | "masakari": { | ||
589 | 791 | "charm": "masakari", | ||
590 | 792 | "num_units": 1, | ||
591 | 793 | "to": ["3"], | ||
592 | 794 | "constraints": "arch=amd64", | ||
593 | 795 | }, | ||
594 | 796 | "nova-compute": { | ||
595 | 797 | "charm": "nova-compute", | ||
596 | 798 | "num_units": 3, | ||
597 | 799 | "to": ["0", "1", "2"], | ||
598 | 800 | "constraints": "arch=amd64", | ||
599 | 801 | "storage": {"ephemeral-device": "loop,0,10240"}, | ||
600 | 802 | }, | ||
601 | 803 | }, | ||
602 | 804 | "machines": { | ||
603 | 805 | "0": {"constraints": "arch=amd64 mem=4096"}, | ||
604 | 806 | "1": {"constraints": "arch=amd64 mem=4096"}, | ||
605 | 807 | "2": {"constraints": "arch=amd64 mem=4096"}, | ||
606 | 808 | "3": {"constraints": "arch=amd64"}, | ||
607 | 809 | }, | ||
608 | 810 | "relations": [ | ||
609 | 811 | ["ceph-mon:client", "nova-compute:ceph"], | ||
610 | 812 | ["ceph-mon:osd", "ceph-osd:mon"], | ||
611 | 813 | ], | ||
612 | 814 | } | ||
613 | diff --git a/tests/unit/test_hyper_converged.py b/tests/unit/test_hyper_converged.py | |||
614 | 479 | new file mode 100644 | 815 | new file mode 100644 |
615 | index 0000000..751af7a | |||
616 | --- /dev/null | |||
617 | +++ b/tests/unit/test_hyper_converged.py | |||
618 | @@ -0,0 +1,32 @@ | |||
619 | 1 | from collections import defaultdict | ||
620 | 2 | |||
621 | 3 | import pytest | ||
622 | 4 | |||
623 | 5 | from jujulint.checks import hyper_converged | ||
624 | 6 | |||
625 | 7 | |||
626 | 8 | @pytest.mark.parametrize( | ||
627 | 9 | "masakari, input_file_type", | ||
628 | 10 | [ | ||
629 | 11 | (True, "juju-status-hyper-converged"), | ||
630 | 12 | (False, "juju-status-hyper-converged"), | ||
631 | 13 | (True, "juju-bundle-parsed-hyper-converged"), | ||
632 | 14 | (False, "juju-bundle-parsed-hyper-converged"), | ||
633 | 15 | ], | ||
634 | 16 | ) | ||
635 | 17 | def test_check_hyper_converged(input_files, masakari, input_file_type): | ||
636 | 18 | """Test hyper_converged models.""" | ||
637 | 19 | input_file = input_files[input_file_type] | ||
638 | 20 | expected_result = defaultdict(lambda: defaultdict(set)) | ||
639 | 21 | if masakari and "juju-status" in input_file_type: | ||
640 | 22 | expected_result["0"]["0/lxd/0"] = {"ceilometer"} | ||
641 | 23 | expected_result["0"]["0/lxd/1"] = {"heat"} | ||
642 | 24 | elif masakari and "juju-bundle" in input_file_type: | ||
643 | 25 | expected_result["0"]["lxd:0"] = {"ceilometer", "heat"} | ||
644 | 26 | else: | ||
645 | 27 | # remove masakari from input file | ||
646 | 28 | del input_file.applications_data["masakari"] | ||
647 | 29 | del input_file.machines_data["3"] | ||
648 | 30 | input_file.charms = set() | ||
649 | 31 | input_file.map_file() | ||
650 | 32 | assert hyper_converged.check_hyper_converged(input_file) == expected_result | ||
651 | diff --git a/tests/unit/test_input.py b/tests/unit/test_input.py | |||
652 | index 682b664..f06ff4b 100644 | |||
653 | --- a/tests/unit/test_input.py | |||
654 | +++ b/tests/unit/test_input.py | |||
655 | @@ -51,6 +51,18 @@ def test_file_inputs(input_files, input_file_type): | |||
656 | 51 | "keystone": {"lxd:1"}, | 51 | "keystone": {"lxd:1"}, |
657 | 52 | }, | 52 | }, |
658 | 53 | }, | 53 | }, |
659 | 54 | "machines_to_apps": { | ||
660 | 55 | "juju-status": { | ||
661 | 56 | "0": {"nrpe-host", "elasticsearch"}, | ||
662 | 57 | "1": {"ubuntu", "nrpe-host"}, | ||
663 | 58 | "1/lxd/0": {"nrpe-container", "keystone"}, | ||
664 | 59 | }, | ||
665 | 60 | "juju-bundle": { | ||
666 | 61 | "0": {"nrpe-host", "elasticsearch"}, | ||
667 | 62 | "1": {"ubuntu", "nrpe-host"}, | ||
668 | 63 | "lxd:1": {"nrpe-container", "keystone"}, | ||
669 | 64 | }, | ||
670 | 65 | }, | ||
671 | 54 | } | 66 | } |
672 | 55 | assert input_file.applications == expected_output["applications"] | 67 | assert input_file.applications == expected_output["applications"] |
673 | 56 | assert input_file.machines == expected_output["machines"][input_file_type] | 68 | assert input_file.machines == expected_output["machines"][input_file_type] |
674 | @@ -58,6 +70,10 @@ def test_file_inputs(input_files, input_file_type): | |||
675 | 58 | input_file.apps_to_machines | 70 | input_file.apps_to_machines |
676 | 59 | == expected_output["apps_to_machines"][input_file_type] | 71 | == expected_output["apps_to_machines"][input_file_type] |
677 | 60 | ) | 72 | ) |
678 | 73 | assert ( | ||
679 | 74 | input_file.machines_to_apps | ||
680 | 75 | == expected_output["machines_to_apps"][input_file_type] | ||
681 | 76 | ) | ||
682 | 61 | assert input_file.charms == expected_output["charms"] | 77 | assert input_file.charms == expected_output["charms"] |
683 | 62 | assert input_file.app_to_charm == expected_output["app_to_charm"] | 78 | assert input_file.app_to_charm == expected_output["app_to_charm"] |
684 | 63 | assert input_file.charm_to_app == expected_output["charm_to_app"] | 79 | assert input_file.charm_to_app == expected_output["charm_to_app"] |
685 | @@ -207,6 +223,48 @@ def test_input_handler(parsed_yaml, expected_output, request): | |||
686 | 207 | ) | 223 | ) |
687 | 208 | 224 | ||
688 | 209 | 225 | ||
689 | 226 | @pytest.mark.parametrize("input_file_type", ["juju-status", "juju-bundle"]) | ||
690 | 227 | def test_filter_machines_by_charm(input_files, input_file_type): | ||
691 | 228 | """Test filter_machines_by_charm method.""" | ||
692 | 229 | input_file = input_files[input_file_type] | ||
693 | 230 | if input_file_type == "juju-status": | ||
694 | 231 | expected_output = { | ||
695 | 232 | "nrpe": {"0", "1", "1/lxd/0"}, | ||
696 | 233 | "keystone": {"1/lxd/0"}, | ||
697 | 234 | "ubuntu": {"1"}, | ||
698 | 235 | "elasticsearch": {"0"}, | ||
699 | 236 | } | ||
700 | 237 | else: | ||
701 | 238 | expected_output = { | ||
702 | 239 | "nrpe": {"0", "1", "lxd:1"}, | ||
703 | 240 | "keystone": {"lxd:1"}, | ||
704 | 241 | "ubuntu": {"1"}, | ||
705 | 242 | "elasticsearch": {"0"}, | ||
706 | 243 | } | ||
707 | 244 | for charm in input_file.charms: | ||
708 | 245 | assert input_file.filter_machines_by_charm(charm) == expected_output[charm] | ||
709 | 246 | |||
710 | 247 | |||
711 | 248 | @pytest.mark.parametrize("input_file_type", ["juju-status", "juju-bundle"]) | ||
712 | 249 | def test_filter_lxd_on_machine(input_files, input_file_type): | ||
713 | 250 | """Test filter_lxd_on_machine method.""" | ||
714 | 251 | input_file = input_files[input_file_type] | ||
715 | 252 | if input_file_type == "juju-status": | ||
716 | 253 | expected_output = { | ||
717 | 254 | "0": set(), | ||
718 | 255 | "1": {"1/lxd/0"}, | ||
719 | 256 | "1/lxd/0": set(), | ||
720 | 257 | } | ||
721 | 258 | else: | ||
722 | 259 | expected_output = { | ||
723 | 260 | "0": set(), | ||
724 | 261 | "1": {"lxd:1"}, | ||
725 | 262 | "lxd:1": set(), | ||
726 | 263 | } | ||
727 | 264 | for machine in input_file.machines: | ||
728 | 265 | assert input_file.filter_lxd_on_machine(machine) == expected_output[machine] | ||
729 | 266 | |||
730 | 267 | |||
731 | 210 | def test_raise_not_implemented_methods(parsed_yaml_status): | 268 | def test_raise_not_implemented_methods(parsed_yaml_status): |
732 | 211 | # declare a new input class | 269 | # declare a new input class |
733 | 212 | @dataclass | 270 | @dataclass |
734 | @@ -231,3 +289,6 @@ def test_raise_not_implemented_methods(parsed_yaml_status): | |||
735 | 231 | 289 | ||
736 | 232 | with pytest.raises(NotImplementedError): | 290 | with pytest.raises(NotImplementedError): |
737 | 233 | new_input.sorted_machines("0") | 291 | new_input.sorted_machines("0") |
738 | 292 | |||
739 | 293 | with pytest.raises(NotImplementedError): | ||
740 | 294 | new_input.filter_lxd_on_machine("0") | ||
741 | diff --git a/tests/unit/test_jujulint.py b/tests/unit/test_jujulint.py | |||
742 | index 77dbb5e..c87722d 100644 | |||
743 | --- a/tests/unit/test_jujulint.py | |||
744 | +++ b/tests/unit/test_jujulint.py | |||
745 | @@ -7,7 +7,8 @@ from unittest import mock | |||
746 | 7 | import pytest | 7 | import pytest |
747 | 8 | import yaml | 8 | import yaml |
748 | 9 | 9 | ||
750 | 10 | from jujulint import check_spaces, lint, relations | 10 | from jujulint import lint |
751 | 11 | from jujulint.checks import relations, spaces | ||
752 | 11 | from jujulint.lint import VALID_LOG_LEVEL | 12 | from jujulint.lint import VALID_LOG_LEVEL |
753 | 12 | 13 | ||
754 | 13 | 14 | ||
755 | @@ -1424,7 +1425,7 @@ applications: | |||
756 | 1424 | This warning should be triggerred if some applications have bindings and some | 1425 | This warning should be triggerred if some applications have bindings and some |
757 | 1425 | dont. | 1426 | dont. |
758 | 1426 | """ | 1427 | """ |
760 | 1427 | logger_mock = mocker.patch.object(check_spaces, "LOGGER") | 1428 | logger_mock = mocker.patch.object(spaces, "LOGGER") |
761 | 1428 | 1429 | ||
762 | 1429 | app_without_binding = "prometheus-app" | 1430 | app_without_binding = "prometheus-app" |
763 | 1430 | bundle = { | 1431 | bundle = { |
764 | @@ -1458,7 +1459,7 @@ applications: | |||
765 | 1458 | mentioned explicitly will be bound to this default space. | 1459 | mentioned explicitly will be bound to this default space. |
766 | 1459 | Juju lint should raise warning if bundles do not define default space. | 1460 | Juju lint should raise warning if bundles do not define default space. |
767 | 1460 | """ | 1461 | """ |
769 | 1461 | logger_mock = mocker.patch.object(check_spaces, "LOGGER") | 1462 | logger_mock = mocker.patch.object(spaces, "LOGGER") |
770 | 1462 | app_without_default_space = "telegraf-app" | 1463 | app_without_default_space = "telegraf-app" |
771 | 1463 | 1464 | ||
772 | 1464 | bundle = { | 1465 | bundle = { |
773 | @@ -1490,7 +1491,7 @@ applications: | |||
774 | 1490 | 1491 | ||
775 | 1491 | def test_check_spaces_multi_model_warning(self, linter, mocker): | 1492 | def test_check_spaces_multi_model_warning(self, linter, mocker): |
776 | 1492 | """Test that check_spaces shows warning if some application are from another model.""" | 1493 | """Test that check_spaces shows warning if some application are from another model.""" |
778 | 1493 | logger_mock = mocker.patch.object(check_spaces, "LOGGER") | 1494 | logger_mock = mocker.patch.object(spaces, "LOGGER") |
779 | 1494 | 1495 | ||
780 | 1495 | app_another_model = "prometheus-app" | 1496 | app_another_model = "prometheus-app" |
781 | 1496 | bundle = { | 1497 | bundle = { |
782 | @@ -1745,3 +1746,60 @@ applications: | |||
783 | 1745 | logger_mock.assert_has_calls( | 1746 | logger_mock.assert_has_calls( |
784 | 1746 | [mocker.call(expected_message, level=logging.ERROR)] | 1747 | [mocker.call(expected_message, level=logging.ERROR)] |
785 | 1747 | ) | 1748 | ) |
786 | 1749 | |||
787 | 1750 | @pytest.mark.parametrize( | ||
788 | 1751 | "input_file_type", | ||
789 | 1752 | ["juju-status-hyper-converged", "juju-bundle-parsed-hyper-converged"], | ||
790 | 1753 | ) | ||
791 | 1754 | def test_check_hyper_converged(self, linter, input_files, mocker, input_file_type): | ||
792 | 1755 | """Test check_hyper_converged.""" | ||
793 | 1756 | input_file = input_files[input_file_type] | ||
794 | 1757 | mock_message_handler = mocker.patch("jujulint.lint.Linter.message_handler") | ||
795 | 1758 | msg = ( | ||
796 | 1759 | "Deployment has Masakari and the machine: '{}' " | ||
797 | 1760 | "has nova/osd and the lxd: '{}' with those services {}" | ||
798 | 1761 | ) | ||
799 | 1762 | expected_output = [ | ||
800 | 1763 | mocker.call( | ||
801 | 1764 | { | ||
802 | 1765 | "id": "hyper-converged-masakari", | ||
803 | 1766 | "tags": ["hyper-converged", "masakari"], | ||
804 | 1767 | "message": msg.format( | ||
805 | 1768 | "0", | ||
806 | 1769 | "lxd:0", | ||
807 | 1770 | ["ceilometer", "heat"], | ||
808 | 1771 | ), | ||
809 | 1772 | }, | ||
810 | 1773 | log_level=logging.WARNING, | ||
811 | 1774 | ) | ||
812 | 1775 | ] | ||
813 | 1776 | if "juju-status" in input_file_type: | ||
814 | 1777 | expected_output = [ | ||
815 | 1778 | mocker.call( | ||
816 | 1779 | { | ||
817 | 1780 | "id": "hyper-converged-masakari", | ||
818 | 1781 | "tags": ["hyper-converged", "masakari"], | ||
819 | 1782 | "message": msg.format( | ||
820 | 1783 | "0", | ||
821 | 1784 | "0/lxd/0", | ||
822 | 1785 | ["ceilometer"], | ||
823 | 1786 | ), | ||
824 | 1787 | }, | ||
825 | 1788 | log_level=logging.WARNING, | ||
826 | 1789 | ), | ||
827 | 1790 | mocker.call( | ||
828 | 1791 | { | ||
829 | 1792 | "id": "hyper-converged-masakari", | ||
830 | 1793 | "tags": ["hyper-converged", "masakari"], | ||
831 | 1794 | "message": msg.format( | ||
832 | 1795 | "0", | ||
833 | 1796 | "0/lxd/1", | ||
834 | 1797 | ["heat"], | ||
835 | 1798 | ), | ||
836 | 1799 | }, | ||
837 | 1800 | log_level=logging.WARNING, | ||
838 | 1801 | ), | ||
839 | 1802 | ] | ||
840 | 1803 | |||
841 | 1804 | linter.check_hyper_converged(input_file) | ||
842 | 1805 | mock_message_handler.assert_has_calls(expected_output, any_order=True) | ||
843 | diff --git a/tests/unit/test_relations.py b/tests/unit/test_relations.py | |||
844 | index f13d544..121fe9e 100644 | |||
845 | --- a/tests/unit/test_relations.py | |||
846 | +++ b/tests/unit/test_relations.py | |||
847 | @@ -2,7 +2,7 @@ | |||
848 | 2 | """Test the relations module.""" | 2 | """Test the relations module.""" |
849 | 3 | import pytest | 3 | import pytest |
850 | 4 | 4 | ||
852 | 5 | from jujulint import relations | 5 | from jujulint.checks import relations |
853 | 6 | 6 | ||
854 | 7 | CHARM_TO_APP = {"nrpe-host", "nrpe-container"} | 7 | CHARM_TO_APP = {"nrpe-host", "nrpe-container"} |
855 | 8 | CHARM = "nrpe" | 8 | CHARM = "nrpe" |
856 | @@ -352,7 +352,7 @@ def test_relations_raise_not_implemented(input_files, mocker): | |||
857 | 352 | """Ensure that a new class that not implement mandatory methods raises error.""" | 352 | """Ensure that a new class that not implement mandatory methods raises error.""" |
858 | 353 | logger_mock = mocker.patch.object(relations, "LOGGER") | 353 | logger_mock = mocker.patch.object(relations, "LOGGER") |
859 | 354 | mocker.patch( | 354 | mocker.patch( |
861 | 355 | "jujulint.relations.RelationRule.relation_exist_check", | 355 | "jujulint.checks.relations.RelationRule.relation_exist_check", |
862 | 356 | side_effect=NotImplementedError(), | 356 | side_effect=NotImplementedError(), |
863 | 357 | ) | 357 | ) |
864 | 358 | input_file = input_files["juju-status"] | 358 | input_file = input_files["juju-status"] |
865 | diff --git a/tests/unit/test_check_spaces.py b/tests/unit/test_spaces.py | |||
866 | 359 | similarity index 80% | 359 | similarity index 80% |
867 | 360 | rename from tests/unit/test_check_spaces.py | 360 | rename from tests/unit/test_check_spaces.py |
868 | 361 | rename to tests/unit/test_spaces.py | 361 | rename to tests/unit/test_spaces.py |
869 | index 9b77e66..db16505 100644 | |||
870 | --- a/tests/unit/test_check_spaces.py | |||
871 | +++ b/tests/unit/test_spaces.py | |||
872 | @@ -1,9 +1,9 @@ | |||
874 | 1 | """Tests for check_spaces.py module.""" | 1 | """Tests for spaces.py module.""" |
875 | 2 | from unittest.mock import call | 2 | from unittest.mock import call |
876 | 3 | 3 | ||
877 | 4 | import pytest | 4 | import pytest |
878 | 5 | 5 | ||
880 | 6 | from jujulint import check_spaces | 6 | from jujulint.checks import spaces |
881 | 7 | 7 | ||
882 | 8 | 8 | ||
883 | 9 | def test_relation_init(): | 9 | def test_relation_init(): |
884 | @@ -11,7 +11,7 @@ def test_relation_init(): | |||
885 | 11 | ep_1 = "Endpoint 1" | 11 | ep_1 = "Endpoint 1" |
886 | 12 | ep_2 = "Endpoint 2" | 12 | ep_2 = "Endpoint 2" |
887 | 13 | 13 | ||
889 | 14 | relation = check_spaces.Relation(ep_1, ep_2) | 14 | relation = spaces.Relation(ep_1, ep_2) |
890 | 15 | 15 | ||
891 | 16 | assert relation.endpoint1 == ep_1 | 16 | assert relation.endpoint1 == ep_1 |
892 | 17 | assert relation.endpoint2 == ep_2 | 17 | assert relation.endpoint2 == ep_2 |
893 | @@ -23,7 +23,7 @@ def test_relation_str(): | |||
894 | 23 | ep_2 = "Endpoint 2" | 23 | ep_2 = "Endpoint 2" |
895 | 24 | expected_str = "Relation({} - {})".format(ep_1, ep_2) | 24 | expected_str = "Relation({} - {})".format(ep_1, ep_2) |
896 | 25 | 25 | ||
898 | 26 | relation = check_spaces.Relation(ep_1, ep_2) | 26 | relation = spaces.Relation(ep_1, ep_2) |
899 | 27 | 27 | ||
900 | 28 | assert str(relation) == expected_str | 28 | assert str(relation) == expected_str |
901 | 29 | 29 | ||
902 | @@ -63,8 +63,8 @@ def test_relation_str(): | |||
903 | 63 | ) | 63 | ) |
904 | 64 | def test_relation_eq(rel_1_ep_1, rel_1_ep_2, rel_2_ep_1, rel_2_ep_2, expected_result): | 64 | def test_relation_eq(rel_1_ep_1, rel_1_ep_2, rel_2_ep_1, rel_2_ep_2, expected_result): |
905 | 65 | """Test equality operator of Relation class. Only return true if both endpoints match.""" | 65 | """Test equality operator of Relation class. Only return true if both endpoints match.""" |
908 | 66 | relation_1 = check_spaces.Relation(rel_1_ep_1, rel_1_ep_2) | 66 | relation_1 = spaces.Relation(rel_1_ep_1, rel_1_ep_2) |
909 | 67 | relation_2 = check_spaces.Relation(rel_2_ep_1, rel_2_ep_2) | 67 | relation_2 = spaces.Relation(rel_2_ep_1, rel_2_ep_2) |
910 | 68 | 68 | ||
911 | 69 | assert (relation_1 == relation_2) == expected_result | 69 | assert (relation_1 == relation_2) == expected_result |
912 | 70 | 70 | ||
913 | @@ -74,7 +74,7 @@ def test_relation_endpoints_prop(): | |||
914 | 74 | ep_1 = "Endpoint 1" | 74 | ep_1 = "Endpoint 1" |
915 | 75 | ep_2 = "Endpoint 2" | 75 | ep_2 = "Endpoint 2" |
916 | 76 | 76 | ||
918 | 77 | relation = check_spaces.Relation(ep_1, ep_2) | 77 | relation = spaces.Relation(ep_1, ep_2) |
919 | 78 | 78 | ||
920 | 79 | assert relation.endpoints == [ep_1, ep_2] | 79 | assert relation.endpoints == [ep_1, ep_2] |
921 | 80 | 80 | ||
922 | @@ -105,7 +105,7 @@ def test_space_mismatch_init(input_order, output_order): | |||
923 | 105 | This test also verifies that spaces in SpaceMismatch instance are ordered | 105 | This test also verifies that spaces in SpaceMismatch instance are ordered |
924 | 106 | alphabetically based on the endpoint name. | 106 | alphabetically based on the endpoint name. |
925 | 107 | """ | 107 | """ |
927 | 108 | mismatch_instance = check_spaces.SpaceMismatch(*input_order) | 108 | mismatch_instance = spaces.SpaceMismatch(*input_order) |
928 | 109 | 109 | ||
929 | 110 | # Assert that endpoints are alphabetically reordered | 110 | # Assert that endpoints are alphabetically reordered |
930 | 111 | assert mismatch_instance.endpoint1 == output_order[0] | 111 | assert mismatch_instance.endpoint1 == output_order[0] |
931 | @@ -124,7 +124,7 @@ def test_space_mismatch_str(): | |||
932 | 124 | ep_1, space_1, ep_2, space_2 | 124 | ep_1, space_1, ep_2, space_2 |
933 | 125 | ) | 125 | ) |
934 | 126 | 126 | ||
936 | 127 | mismatch_instance = check_spaces.SpaceMismatch(ep_1, space_1, ep_2, space_2) | 127 | mismatch_instance = spaces.SpaceMismatch(ep_1, space_1, ep_2, space_2) |
937 | 128 | 128 | ||
938 | 129 | assert str(mismatch_instance) == expected_str | 129 | assert str(mismatch_instance) == expected_str |
939 | 130 | 130 | ||
940 | @@ -136,9 +136,9 @@ def test_space_mismatch_relation_prop(): | |||
941 | 136 | space_1 = "Space 1" | 136 | space_1 = "Space 1" |
942 | 137 | space_2 = "Space 2" | 137 | space_2 = "Space 2" |
943 | 138 | 138 | ||
945 | 139 | expected_relation = check_spaces.Relation(ep_1, ep_2) | 139 | expected_relation = spaces.Relation(ep_1, ep_2) |
946 | 140 | 140 | ||
948 | 141 | mismatch_instance = check_spaces.SpaceMismatch(ep_1, space_1, ep_2, space_2) | 141 | mismatch_instance = spaces.SpaceMismatch(ep_1, space_1, ep_2, space_2) |
949 | 142 | 142 | ||
950 | 143 | assert mismatch_instance.relation == expected_relation | 143 | assert mismatch_instance.relation == expected_relation |
951 | 144 | 144 | ||
952 | @@ -156,9 +156,9 @@ def test_space_mismatch_get_charm_relation(): | |||
953 | 156 | 156 | ||
954 | 157 | app_map = {app_1: charm_1, app_2: charm_2} | 157 | app_map = {app_1: charm_1, app_2: charm_2} |
955 | 158 | 158 | ||
957 | 159 | expected_relation = check_spaces.Relation("ubuntu:endpoint_1", "nrpe:endpoint_2") | 159 | expected_relation = spaces.Relation("ubuntu:endpoint_1", "nrpe:endpoint_2") |
958 | 160 | 160 | ||
960 | 161 | mismatch_instance = check_spaces.SpaceMismatch(ep_1, space_1, ep_2, space_2) | 161 | mismatch_instance = spaces.SpaceMismatch(ep_1, space_1, ep_2, space_2) |
961 | 162 | 162 | ||
962 | 163 | assert mismatch_instance.get_charm_relation(app_map) == expected_relation | 163 | assert mismatch_instance.get_charm_relation(app_map) == expected_relation |
963 | 164 | 164 | ||
964 | @@ -173,32 +173,28 @@ def test_find_space_mismatches(use_cmr, mocker): | |||
965 | 173 | space_2 = "space 2" | 173 | space_2 = "space 2" |
966 | 174 | app_endpoint_1 = app_1 + ":endpoint" | 174 | app_endpoint_1 = app_1 + ":endpoint" |
967 | 175 | app_endpoint_2 = app_2 + ":endpoint" | 175 | app_endpoint_2 = app_2 + ":endpoint" |
971 | 176 | relation = check_spaces.Relation( | 176 | relation = spaces.Relation(app_endpoint_1, "XModel" if use_cmr else app_endpoint_2) |
969 | 177 | app_endpoint_1, "XModel" if use_cmr else app_endpoint_2 | ||
970 | 178 | ) | ||
972 | 179 | app_list = [app_1, app_2] | 177 | app_list = [app_1, app_2] |
973 | 180 | app_spaces = {app_1: {space_1: "foo"}, app_2: {space_2: "bar"}} | 178 | app_spaces = {app_1: {space_1: "foo"}, app_2: {space_2: "bar"}} |
974 | 181 | 179 | ||
975 | 182 | app_list_mock = mocker.patch.object( | 180 | app_list_mock = mocker.patch.object( |
977 | 183 | check_spaces, "get_juju_applications", return_value=app_list | 181 | spaces, "get_juju_applications", return_value=app_list |
978 | 184 | ) | 182 | ) |
979 | 185 | app_spaces_mock = mocker.patch.object( | 183 | app_spaces_mock = mocker.patch.object( |
981 | 186 | check_spaces, "get_application_spaces", return_value=app_spaces | 184 | spaces, "get_application_spaces", return_value=app_spaces |
982 | 187 | ) | 185 | ) |
983 | 188 | rel_list_mock = mocker.patch.object( | 186 | rel_list_mock = mocker.patch.object( |
985 | 189 | check_spaces, "get_application_relations", return_value=[relation] | 187 | spaces, "get_application_relations", return_value=[relation] |
986 | 190 | ) | 188 | ) |
987 | 191 | rel_space_mock = mocker.patch.object( | 189 | rel_space_mock = mocker.patch.object( |
989 | 192 | check_spaces, "get_relation_space", side_effect=[space_1, space_2] | 190 | spaces, "get_relation_space", side_effect=[space_1, space_2] |
990 | 193 | ) | 191 | ) |
991 | 194 | 192 | ||
992 | 195 | expected_mismatch = [ | 193 | expected_mismatch = [ |
996 | 196 | check_spaces.SpaceMismatch( | 194 | spaces.SpaceMismatch(relation.endpoint1, space_1, relation.endpoint2, space_2) |
994 | 197 | relation.endpoint1, space_1, relation.endpoint2, space_2 | ||
995 | 198 | ) | ||
997 | 199 | ] | 195 | ] |
998 | 200 | 196 | ||
1000 | 201 | mismatch = check_spaces.find_space_mismatches(sample_yaml, True) | 197 | mismatch = spaces.find_space_mismatches(sample_yaml, True) |
1001 | 202 | result_pairs = zip(expected_mismatch, mismatch) | 198 | result_pairs = zip(expected_mismatch, mismatch) |
1002 | 203 | 199 | ||
1003 | 204 | app_list_mock.assert_called_once_with(sample_yaml) | 200 | app_list_mock.assert_called_once_with(sample_yaml) |
1004 | @@ -224,7 +220,7 @@ def test_get_juju_applications(): | |||
1005 | 224 | 220 | ||
1006 | 225 | expected_apps = [app_1, app_2] | 221 | expected_apps = [app_1, app_2] |
1007 | 226 | 222 | ||
1009 | 227 | apps = check_spaces.get_juju_applications(sample_yaml) | 223 | apps = spaces.get_juju_applications(sample_yaml) |
1010 | 228 | 224 | ||
1011 | 229 | assert apps == expected_apps | 225 | assert apps == expected_apps |
1012 | 230 | 226 | ||
1013 | @@ -235,7 +231,7 @@ def test_get_application_spaces(mocker): | |||
1014 | 235 | This test also verifies that default binding to space "alpha" is added to applications | 231 | This test also verifies that default binding to space "alpha" is added to applications |
1015 | 236 | that do not specify any bindings. | 232 | that do not specify any bindings. |
1016 | 237 | """ | 233 | """ |
1018 | 238 | logger_mock = mocker.patch.object(check_spaces, "LOGGER") | 234 | logger_mock = mocker.patch.object(spaces, "LOGGER") |
1019 | 239 | default_binding = "" | 235 | default_binding = "" |
1020 | 240 | default_space = "custom_default_space" | 236 | default_space = "custom_default_space" |
1021 | 241 | public_binding = "public" | 237 | public_binding = "public" |
1022 | @@ -269,7 +265,7 @@ def test_get_application_spaces(mocker): | |||
1023 | 269 | app_list[2]: {default_binding: "alpha"}, | 265 | app_list[2]: {default_binding: "alpha"}, |
1024 | 270 | } | 266 | } |
1025 | 271 | 267 | ||
1027 | 272 | app_spaces = check_spaces.get_application_spaces(app_list, sample_yaml) | 268 | app_spaces = spaces.get_application_spaces(app_list, sample_yaml) |
1028 | 273 | 269 | ||
1029 | 274 | # Verify that all the bindings for properly defined app were returned | 270 | # Verify that all the bindings for properly defined app were returned |
1030 | 275 | # Verify that default binding was added to app that did not have any bindings defined | 271 | # Verify that default binding was added to app that did not have any bindings defined |
1031 | @@ -298,11 +294,11 @@ def test_get_application_relations(): | |||
1032 | 298 | } | 294 | } |
1033 | 299 | 295 | ||
1034 | 300 | expected_relations = [ | 296 | expected_relations = [ |
1037 | 301 | check_spaces.Relation("ubuntu:juju-info", "nrpe:general-info"), | 297 | spaces.Relation("ubuntu:juju-info", "nrpe:general-info"), |
1038 | 302 | check_spaces.Relation("vault:shared-db", "mysql-innodb-cluster:shared-db"), | 298 | spaces.Relation("vault:shared-db", "mysql-innodb-cluster:shared-db"), |
1039 | 303 | ] | 299 | ] |
1040 | 304 | 300 | ||
1042 | 305 | relations = check_spaces.get_application_relations(sample_yaml) | 301 | relations = spaces.get_application_relations(sample_yaml) |
1043 | 306 | 302 | ||
1044 | 307 | assert relations == expected_relations | 303 | assert relations == expected_relations |
1045 | 308 | 304 | ||
1046 | @@ -323,21 +319,21 @@ def test_get_relation_space(use_explicit_binding): | |||
1047 | 323 | else: | 319 | else: |
1048 | 324 | expected_space = default_space | 320 | expected_space = default_space |
1049 | 325 | 321 | ||
1051 | 326 | space = check_spaces.get_relation_space(endpoint, app_spaces) | 322 | space = spaces.get_relation_space(endpoint, app_spaces) |
1052 | 327 | 323 | ||
1053 | 328 | assert space == expected_space | 324 | assert space == expected_space |
1054 | 329 | 325 | ||
1055 | 330 | 326 | ||
1056 | 331 | def test_get_relation_space_cmr(mocker): | 327 | def test_get_relation_space_cmr(mocker): |
1057 | 332 | """Test getting space for cross model relation.""" | 328 | """Test getting space for cross model relation.""" |
1059 | 333 | logger_mock = mocker.patch.object(check_spaces, "LOGGER") | 329 | logger_mock = mocker.patch.object(spaces, "LOGGER") |
1060 | 334 | app_name = "ubuntu" | 330 | app_name = "ubuntu" |
1061 | 335 | interface = "juju_info" | 331 | interface = "juju_info" |
1062 | 336 | endpoint = app_name + ":" + interface | 332 | endpoint = app_name + ":" + interface |
1063 | 337 | 333 | ||
1064 | 338 | app_spaces = {} | 334 | app_spaces = {} |
1065 | 339 | 335 | ||
1067 | 340 | space = check_spaces.get_relation_space(endpoint, app_spaces) | 336 | space = spaces.get_relation_space(endpoint, app_spaces) |
1068 | 341 | 337 | ||
1069 | 342 | assert space == "XModel" | 338 | assert space == "XModel" |
1070 | 343 | logger_mock.warning.assert_called_once_with( | 339 | logger_mock.warning.assert_called_once_with( |
1071 | diff --git a/tox.ini b/tox.ini | |||
1072 | index 4dea114..08183e2 100644 | |||
1073 | --- a/tox.ini | |||
1074 | +++ b/tox.ini | |||
1075 | @@ -23,7 +23,7 @@ deps = | |||
1076 | 23 | -r{toxinidir}/requirements.txt | 23 | -r{toxinidir}/requirements.txt |
1077 | 24 | -r{toxinidir}/tests/unit/requirements.txt | 24 | -r{toxinidir}/tests/unit/requirements.txt |
1078 | 25 | commands = | 25 | commands = |
1080 | 26 | pytest -v \ | 26 | pytest -vv \ |
1081 | 27 | --cov=jujulint \ | 27 | --cov=jujulint \ |
1082 | 28 | --new-first \ | 28 | --new-first \ |
1083 | 29 | --last-failed \ | 29 | --last-failed \ |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.