Merge ~afreiberger/charm-hw-health:blacken-20.08 into charm-hw-health:master
- Git
- lp:~afreiberger/charm-hw-health
- blacken-20.08
- Merge into master
Proposed by
Drew Freiberger
Status: | Merged |
---|---|
Merged at revision: | 410c7d4c6a87b86a2e2b8209d2c3f739877bda48 |
Proposed branch: | ~afreiberger/charm-hw-health:blacken-20.08 |
Merge into: | charm-hw-health:master |
Prerequisite: | ~afreiberger/charm-hw-health:makefile-20.08 |
Diff against target: |
5526 lines (+1489/-1426) 37 files modified
src/actions/actions.py (+8/-8) src/files/common/check_hw_health_cron_output.py (+12/-10) src/files/common/hw_health_lib.py (+74/-73) src/files/hplog/cron_hplog.py (+81/-74) src/files/ilorest/check_ilorest.py (+6/-9) src/files/ilorest/cron_ilorest.py (+43/-41) src/files/ipmi/check_ipmi.py (+17/-11) src/files/ipmi/cron_ipmi_sensors.py (+16/-16) src/files/mdadm/check_mdadm.py (+8/-13) src/files/mdadm/cron_mdadm.py (+55/-64) src/files/megacli/check_megacli.py (+36/-40) src/files/nvme/check_nvme.py (+31/-22) src/files/sas2ircu/check_sas2ircu.py (+16/-19) src/files/sas3ircu/check_sas3ircu.py (+79/-90) src/files/ssacli/cron_ssacli.py (+39/-33) src/lib/hwhealth/discovery/lshw.py (+87/-87) src/lib/hwhealth/discovery/supported_vendors.py (+14/-19) src/lib/hwhealth/hwdiscovery.py (+26/-27) src/lib/hwhealth/tools.py (+174/-180) src/reactive/hw_health.py (+82/-73) src/tests/download_nagios_plugin3.py (+7/-6) src/tests/functional/conftest.py (+43/-34) src/tests/functional/test_hwhealth.py (+222/-192) src/tests/unit/lib/samples.py (+1/-6) src/tests/unit/test_actions.py (+113/-72) src/tests/unit/test_check_mdadm.py (+19/-30) src/tests/unit/test_check_megacli.py (+15/-17) src/tests/unit/test_check_nvme.py (+8/-8) src/tests/unit/test_check_sas2ircu.py (+5/-5) src/tests/unit/test_check_sas3ircu.py (+5/-5) src/tests/unit/test_cron_hplog.py (+10/-7) src/tests/unit/test_cron_ilorest.py (+12/-5) src/tests/unit/test_cron_mdadm.py (+65/-70) src/tests/unit/test_cron_ssacli.py (+20/-9) src/tests/unit/test_hwdiscovery.py (+27/-37) src/tests/unit/test_lshw.py (+11/-11) src/tox.ini (+2/-3) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Xav Paice (community) | Approve | ||
Review via email: mp+388951@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/src/actions/actions.py b/src/actions/actions.py | |||
2 | index e3f04cd..734841b 100755 | |||
3 | --- a/src/actions/actions.py | |||
4 | +++ b/src/actions/actions.py | |||
5 | @@ -21,7 +21,7 @@ import sys | |||
6 | 21 | from charmhelpers.core.hookenv import action_set, action_get, action_fail, log | 21 | from charmhelpers.core.hookenv import action_set, action_get, action_fail, log |
7 | 22 | 22 | ||
8 | 23 | 23 | ||
10 | 24 | IPMI_SEL = '/usr/sbin/ipmi-sel' | 24 | IPMI_SEL = "/usr/sbin/ipmi-sel" |
11 | 25 | 25 | ||
12 | 26 | 26 | ||
13 | 27 | def clear_sel(): | 27 | def clear_sel(): |
14 | @@ -30,11 +30,11 @@ def clear_sel(): | |||
15 | 30 | Uses ipmi-sel --post-clear, clears the SEL log and stores the cleared entries | 30 | Uses ipmi-sel --post-clear, clears the SEL log and stores the cleared entries |
16 | 31 | in action output. | 31 | in action output. |
17 | 32 | """ | 32 | """ |
19 | 33 | command = [IPMI_SEL, '--post-clear'] | 33 | command = [IPMI_SEL, "--post-clear"] |
20 | 34 | try: | 34 | try: |
21 | 35 | output = subprocess.check_output(command) | 35 | output = subprocess.check_output(command) |
22 | 36 | log("Action clear-sel completed, sel log cleared: {}".format(output)) | 36 | log("Action clear-sel completed, sel log cleared: {}".format(output)) |
24 | 37 | action_set({'message': output.decode('UTF-8')}) | 37 | action_set({"message": output.decode("UTF-8")}) |
25 | 38 | except subprocess.CalledProcessError as e: | 38 | except subprocess.CalledProcessError as e: |
26 | 39 | action_fail("Action failed with {}".format(e)) | 39 | action_fail("Action failed with {}".format(e)) |
27 | 40 | 40 | ||
28 | @@ -45,23 +45,23 @@ def show_sel(): | |||
29 | 45 | By default, this will show all non-nominal events. If you specify show-all, | 45 | By default, this will show all non-nominal events. If you specify show-all, |
30 | 46 | it will show all events. | 46 | it will show all events. |
31 | 47 | """ | 47 | """ |
34 | 48 | show_all = action_get('show-all') | 48 | show_all = action_get("show-all") |
35 | 49 | command = [IPMI_SEL, '--output-event-state'] | 49 | command = [IPMI_SEL, "--output-event-state"] |
36 | 50 | try: | 50 | try: |
37 | 51 | header, body = None, None | 51 | header, body = None, None |
39 | 52 | output = subprocess.check_output(command).decode('UTF-8') | 52 | output = subprocess.check_output(command).decode("UTF-8") |
40 | 53 | lines = output.splitlines() | 53 | lines = output.splitlines() |
41 | 54 | if lines: | 54 | if lines: |
42 | 55 | header, body = lines[0], lines[1:] | 55 | header, body = lines[0], lines[1:] |
43 | 56 | if not show_all: | 56 | if not show_all: |
44 | 57 | # This is fairly naive, but it may be good enough for now | 57 | # This is fairly naive, but it may be good enough for now |
46 | 58 | body = [line for line in body if 'Nominal' not in line] | 58 | body = [line for line in body if "Nominal" not in line] |
47 | 59 | if body: | 59 | if body: |
48 | 60 | final_output = "\n".join([header] + body) | 60 | final_output = "\n".join([header] + body) |
49 | 61 | else: | 61 | else: |
50 | 62 | final_output = "No matching entries found" | 62 | final_output = "No matching entries found" |
51 | 63 | log("Action show-sel completed:\n{}".format(final_output)) | 63 | log("Action show-sel completed:\n{}".format(final_output)) |
53 | 64 | action_set({'message': final_output}) | 64 | action_set({"message": final_output}) |
54 | 65 | except subprocess.CalledProcessError as e: | 65 | except subprocess.CalledProcessError as e: |
55 | 66 | action_fail("Action failed with {}".format(e)) | 66 | action_fail("Action failed with {}".format(e)) |
56 | 67 | 67 | ||
57 | diff --git a/src/files/common/check_hw_health_cron_output.py b/src/files/common/check_hw_health_cron_output.py | |||
58 | index ebb63e1..92ff372 100755 | |||
59 | --- a/src/files/common/check_hw_health_cron_output.py | |||
60 | +++ b/src/files/common/check_hw_health_cron_output.py | |||
61 | @@ -10,11 +10,13 @@ | |||
62 | 10 | 10 | ||
63 | 11 | from optparse import OptionParser | 11 | from optparse import OptionParser |
64 | 12 | 12 | ||
70 | 13 | from nagios_plugin3 import (check_file_freshness, | 13 | from nagios_plugin3 import ( |
71 | 14 | try_check, | 14 | check_file_freshness, |
72 | 15 | WarnError, | 15 | try_check, |
73 | 16 | UnknownError, | 16 | WarnError, |
74 | 17 | CriticalError) | 17 | UnknownError, |
75 | 18 | CriticalError, | ||
76 | 19 | ) | ||
77 | 18 | 20 | ||
78 | 19 | 21 | ||
79 | 20 | ############################################################################### | 22 | ############################################################################### |
80 | @@ -55,17 +57,17 @@ def main(): | |||
81 | 55 | help="freshness time limit [default=%default]", | 57 | help="freshness time limit [default=%default]", |
82 | 56 | metavar="SECONDS", | 58 | metavar="SECONDS", |
83 | 57 | default=1200, | 59 | default=1200, |
85 | 58 | type=int | 60 | type=int, |
86 | 59 | ) | 61 | ) |
87 | 60 | parser.add_option( | 62 | parser.add_option( |
89 | 61 | "-f", "--filename", | 63 | "-f", |
90 | 64 | "--filename", | ||
91 | 62 | dest="input_file", | 65 | dest="input_file", |
94 | 63 | help=('file containing the output of ' | 66 | help=("file containing the output of cron_ilorest.py [default=%default]"), |
93 | 64 | 'cron_ilorest.py [default=%default]'), | ||
95 | 65 | metavar="FILE", | 67 | metavar="FILE", |
96 | 66 | nargs=1, | 68 | nargs=1, |
97 | 67 | # default="/var/lib/nagios/UNSETOUTPUTFILE.out", | 69 | # default="/var/lib/nagios/UNSETOUTPUTFILE.out", |
99 | 68 | type=str | 70 | type=str, |
100 | 69 | ) | 71 | ) |
101 | 70 | 72 | ||
102 | 71 | (opts, args) = parser.parse_args() | 73 | (opts, args) = parser.parse_args() |
103 | diff --git a/src/files/common/hw_health_lib.py b/src/files/common/hw_health_lib.py | |||
104 | index c75a4e5..945de0d 100644 | |||
105 | --- a/src/files/common/hw_health_lib.py | |||
106 | +++ b/src/files/common/hw_health_lib.py | |||
107 | @@ -60,25 +60,30 @@ def read_ignore_file(ignore_file): # noqa C901 | |||
108 | 60 | ignores = [] | 60 | ignores = [] |
109 | 61 | if os.path.isfile(ignore_file): | 61 | if os.path.isfile(ignore_file): |
110 | 62 | for line in open(ignore_file).readlines(): | 62 | for line in open(ignore_file).readlines(): |
112 | 63 | d = {'matched': False, 'expired': False, 'line': line.rstrip(), 'ignore': None} | 63 | d = { |
113 | 64 | "matched": False, | ||
114 | 65 | "expired": False, | ||
115 | 66 | "line": line.rstrip(), | ||
116 | 67 | "ignore": None, | ||
117 | 68 | } | ||
118 | 64 | line = line.strip() | 69 | line = line.strip() |
119 | 65 | # special case lines starting with '*', do not disable if unmatched | 70 | # special case lines starting with '*', do not disable if unmatched |
120 | 66 | persist = False | 71 | persist = False |
122 | 67 | if line.startswith('*'): | 72 | if line.startswith("*"): |
123 | 68 | persist = True | 73 | persist = True |
125 | 69 | line = line.lstrip('*').strip() | 74 | line = line.lstrip("*").strip() |
126 | 70 | # parse date lines | 75 | # parse date lines |
129 | 71 | if line.startswith('['): | 76 | if line.startswith("["): |
130 | 72 | parts = re.split('\\[|\\]', line, maxsplit=2) | 77 | parts = re.split("\\[|\\]", line, maxsplit=2) |
131 | 73 | date = parts[1].strip() | 78 | date = parts[1].strip() |
132 | 74 | ignore = parts[2].strip() | 79 | ignore = parts[2].strip() |
133 | 75 | try: | 80 | try: |
135 | 76 | date = datetime.strptime(date, '%Y-%m-%d %H:%M') | 81 | date = datetime.strptime(date, "%Y-%m-%d %H:%M") |
136 | 77 | except ValueError: | 82 | except ValueError: |
137 | 78 | try: | 83 | try: |
139 | 79 | date = datetime.strptime(date, '%Y-%m-%d') | 84 | date = datetime.strptime(date, "%Y-%m-%d") |
140 | 80 | except ValueError: | 85 | except ValueError: |
142 | 81 | print("Failed to parse ignore date: {}".format(d['line'])) | 86 | print("Failed to parse ignore date: {}".format(d["line"])) |
143 | 82 | date = None | 87 | date = None |
144 | 83 | if date: | 88 | if date: |
145 | 84 | # Do not alert directly at midnight UTC | 89 | # Do not alert directly at midnight UTC |
146 | @@ -88,18 +93,18 @@ def read_ignore_file(ignore_file): # noqa C901 | |||
147 | 88 | if date: | 93 | if date: |
148 | 89 | if datetime.now().weekday() in (5, 6): | 94 | if datetime.now().weekday() in (5, 6): |
149 | 90 | # Ignore Saturday/Sunday to not annoy on-call | 95 | # Ignore Saturday/Sunday to not annoy on-call |
151 | 91 | d['ignore'] = ignore | 96 | d["ignore"] = ignore |
152 | 92 | elif date > datetime.now(): | 97 | elif date > datetime.now(): |
154 | 93 | d['ignore'] = ignore | 98 | d["ignore"] = ignore |
155 | 94 | else: | 99 | else: |
158 | 95 | d['expired'] = True | 100 | d["expired"] = True |
159 | 96 | if persist and not d['expired']: | 101 | if persist and not d["expired"]: |
160 | 97 | # set matched True so will not get disabled on non-match | 102 | # set matched True so will not get disabled on non-match |
162 | 98 | d['matched'] = True | 103 | d["matched"] = True |
163 | 99 | # comment lines and empty lines are just added | 104 | # comment lines and empty lines are just added |
165 | 100 | elif line.startswith('#') or not line: | 105 | elif line.startswith("#") or not line: |
166 | 101 | # add with matched True so does not trigger a file rewrite | 106 | # add with matched True so does not trigger a file rewrite |
168 | 102 | d['matched'] = True | 107 | d["matched"] = True |
169 | 103 | # unrecognized lines left matched False so rewritten | 108 | # unrecognized lines left matched False so rewritten |
170 | 104 | ignores.append(d) | 109 | ignores.append(d) |
171 | 105 | return ignores | 110 | return ignores |
172 | @@ -107,22 +112,24 @@ def read_ignore_file(ignore_file): # noqa C901 | |||
173 | 107 | 112 | ||
174 | 108 | def write_ignore_file(ignores, ignore_file): | 113 | def write_ignore_file(ignores, ignore_file): |
175 | 109 | # if any ignores are not matched then write out file lines again | 114 | # if any ignores are not matched then write out file lines again |
177 | 110 | if any([not i['matched'] for i in ignores]): | 115 | if any([not i["matched"] for i in ignores]): |
178 | 111 | dirname, basename = os.path.split(ignore_file) | 116 | dirname, basename = os.path.split(ignore_file) |
179 | 112 | date = datetime.now() | 117 | date = datetime.now() |
180 | 113 | try: | 118 | try: |
181 | 114 | f = tempfile.NamedTemporaryFile(dir=dirname, prefix=basename, delete=False) | 119 | f = tempfile.NamedTemporaryFile(dir=dirname, prefix=basename, delete=False) |
182 | 115 | for ignore in ignores: | 120 | for ignore in ignores: |
186 | 116 | if not ignore['matched'] and ignore['ignore']: | 121 | if not ignore["matched"] and ignore["ignore"]: |
187 | 117 | ignore['line'] = "# not matched at {} #{}".format( | 122 | ignore["line"] = "# not matched at {} #{}".format( |
188 | 118 | date.strftime("%Y-%m-%dT%H:%M:%S"), ignore['line'] | 123 | date.strftime("%Y-%m-%dT%H:%M:%S"), ignore["line"] |
189 | 119 | ) | 124 | ) |
191 | 120 | elif ignore['expired']: | 125 | elif ignore["expired"]: |
192 | 121 | # this won't get updated unless the alert has cleared | 126 | # this won't get updated unless the alert has cleared |
197 | 122 | ignore['line'] = "# expired #{}".format(ignore['line']) | 127 | ignore["line"] = "# expired #{}".format(ignore["line"]) |
198 | 123 | elif not ignore['matched']: | 128 | elif not ignore["matched"]: |
199 | 124 | ignore['line'] = "# unknown or bad format #{}".format(ignore['line']) | 129 | ignore["line"] = "# unknown or bad format #{}".format( |
200 | 125 | f.write(ignore['line'] + '\n') | 130 | ignore["line"] |
201 | 131 | ) | ||
202 | 132 | f.write(ignore["line"] + "\n") | ||
203 | 126 | f.flush() | 133 | f.flush() |
204 | 127 | os.fsync(f.fileno()) | 134 | os.fsync(f.fileno()) |
205 | 128 | f.close() | 135 | f.close() |
206 | @@ -137,11 +144,11 @@ def write_ignore_file(ignores, ignore_file): | |||
207 | 137 | def ignore(line, ignores): | 144 | def ignore(line, ignores): |
208 | 138 | # check if each ignore is in line | 145 | # check if each ignore is in line |
209 | 139 | for ignore in ignores: | 146 | for ignore in ignores: |
211 | 140 | if ignore['ignore'] and ignore['ignore'] in line: | 147 | if ignore["ignore"] and ignore["ignore"] in line: |
212 | 141 | ignoring_output.append("Ignoring: {}".format(line)) | 148 | ignoring_output.append("Ignoring: {}".format(line)) |
213 | 142 | # note: ignores can be updated since it is passed by reference | 149 | # note: ignores can be updated since it is passed by reference |
214 | 143 | # matched=True to keep using this ignore (see write_ignore_file) | 150 | # matched=True to keep using this ignore (see write_ignore_file) |
216 | 144 | ignore['matched'] = True | 151 | ignore["matched"] = True |
217 | 145 | return True | 152 | return True |
218 | 146 | return False | 153 | return False |
219 | 147 | 154 | ||
220 | @@ -153,102 +160,96 @@ def get_hp_controller_slots(): | |||
221 | 153 | Use the utility to determine the current controller slot(s) available for probing | 160 | Use the utility to determine the current controller slot(s) available for probing |
222 | 154 | """ | 161 | """ |
223 | 155 | slots = [] | 162 | slots = [] |
225 | 156 | cmd = ['/usr/sbin/hpssacli', 'ctrl', 'all', 'show'] | 163 | cmd = ["/usr/sbin/hpssacli", "ctrl", "all", "show"] |
226 | 157 | try: | 164 | try: |
228 | 158 | results = subprocess.check_output(cmd).decode('UTF-8') | 165 | results = subprocess.check_output(cmd).decode("UTF-8") |
229 | 159 | except subprocess.CalledProcessError: | 166 | except subprocess.CalledProcessError: |
230 | 160 | return slots | 167 | return slots |
231 | 161 | for line in results.splitlines(): | 168 | for line in results.splitlines(): |
233 | 162 | if 'in Slot' in line: | 169 | if "in Slot" in line: |
234 | 163 | slots.append(line.split()[5]) | 170 | slots.append(line.split()[5]) |
235 | 164 | return slots | 171 | return slots |
236 | 165 | 172 | ||
237 | 166 | 173 | ||
238 | 167 | class HWCronArgumentParser(argparse.ArgumentParser): | 174 | class HWCronArgumentParser(argparse.ArgumentParser): |
245 | 168 | def __init__( | 175 | def __init__(self, def_write_file=None, *args, **kwargs): |
240 | 169 | self, | ||
241 | 170 | def_write_file=None, | ||
242 | 171 | *args, | ||
243 | 172 | **kwargs | ||
244 | 173 | ): | ||
246 | 174 | super().__init__( | 176 | super().__init__( |
250 | 175 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, | 177 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, *args, **kwargs |
248 | 176 | *args, | ||
249 | 177 | **kwargs | ||
251 | 178 | ) | 178 | ) |
252 | 179 | # self.prog is populated by ArgumentParser | 179 | # self.prog is populated by ArgumentParser |
255 | 180 | self._def_write_file = \ | 180 | self._def_write_file = def_write_file or "/var/lib/nagios/{}.out".format( |
256 | 181 | def_write_file or '/var/lib/nagios/{}.out'.format(self.prog) | 181 | self.prog |
257 | 182 | ) | ||
258 | 182 | 183 | ||
259 | 183 | def parse_args(self, *args, **kwargs): | 184 | def parse_args(self, *args, **kwargs): |
260 | 184 | self.add_argument( | 185 | self.add_argument( |
262 | 185 | '-w', '--write', dest='write', type=str, | 186 | "-w", |
263 | 187 | "--write", | ||
264 | 188 | dest="write", | ||
265 | 189 | type=str, | ||
266 | 186 | default=self._def_write_file, | 190 | default=self._def_write_file, |
268 | 187 | help='cache tool output in this file', | 191 | help="cache tool output in this file", |
269 | 188 | ) | 192 | ) |
270 | 189 | super().parse_args(*args, **kwargs) | 193 | super().parse_args(*args, **kwargs) |
271 | 190 | 194 | ||
272 | 191 | 195 | ||
273 | 192 | class HWCheckArgumentParser(argparse.ArgumentParser): | 196 | class HWCheckArgumentParser(argparse.ArgumentParser): |
280 | 193 | def __init__( | 197 | def __init__(self, def_input_file=None, *args, **kwargs): |
275 | 194 | self, | ||
276 | 195 | def_input_file=None, | ||
277 | 196 | *args, | ||
278 | 197 | **kwargs | ||
279 | 198 | ): | ||
281 | 199 | super().__init__( | 198 | super().__init__( |
285 | 200 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, | 199 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, *args, **kwargs |
283 | 201 | *args, | ||
284 | 202 | **kwargs | ||
286 | 203 | ) | 200 | ) |
287 | 204 | # self.prog is populated by ArgumentParser | 201 | # self.prog is populated by ArgumentParser |
290 | 205 | self._def_input_file = \ | 202 | self._def_input_file = def_input_file or "/var/lib/nagios/{}.out".format( |
291 | 206 | def_input_file or '/var/lib/nagios/{}.out'.format(self.prog) | 203 | self.prog |
292 | 204 | ) | ||
293 | 207 | 205 | ||
294 | 208 | def parse_args(self, *args, **kwargs): | 206 | def parse_args(self, *args, **kwargs): |
295 | 209 | self.add_argument( | 207 | self.add_argument( |
297 | 210 | '-i', '--input', dest='input_file', type=str, | 208 | "-i", |
298 | 209 | "--input", | ||
299 | 210 | dest="input_file", | ||
300 | 211 | type=str, | ||
301 | 211 | default=self._def_input_file, | 212 | default=self._def_input_file, |
303 | 212 | help='read cached tool output from this file', | 213 | help="read cached tool output from this file", |
304 | 213 | ) | 214 | ) |
305 | 214 | super().parse_args(*args, **kwargs) | 215 | super().parse_args(*args, **kwargs) |
306 | 215 | 216 | ||
307 | 216 | 217 | ||
308 | 217 | class HPArgumentParser(HWCronArgumentParser): | 218 | class HPArgumentParser(HWCronArgumentParser): |
315 | 218 | def __init__( | 219 | def __init__(self, def_exclude_file=None, *args, **kwargs): |
310 | 219 | self, | ||
311 | 220 | def_exclude_file=None, | ||
312 | 221 | *args, | ||
313 | 222 | **kwargs | ||
314 | 223 | ): | ||
316 | 224 | super().__init__(*args, **kwargs) | 220 | super().__init__(*args, **kwargs) |
319 | 225 | self._def_exclude_file = \ | 221 | self._def_exclude_file = ( |
320 | 226 | def_exclude_file or '/etc/nagios/{}.exclude.yaml'.format(self.prog) | 222 | def_exclude_file or "/etc/nagios/{}.exclude.yaml".format(self.prog) |
321 | 223 | ) | ||
322 | 227 | 224 | ||
323 | 228 | def _expired(self, exclusion): | 225 | def _expired(self, exclusion): |
325 | 229 | return 'expires' in exclusion and exclusion['expires'] < datetime.now() | 226 | return "expires" in exclusion and exclusion["expires"] < datetime.now() |
326 | 230 | 227 | ||
327 | 231 | def parse_args(self, *args, **kwargs): | 228 | def parse_args(self, *args, **kwargs): |
328 | 232 | self.add_argument( | 229 | self.add_argument( |
331 | 233 | '--debug', dest='debug', action='store_true', | 230 | "--debug", dest="debug", action="store_true", help="Extra debugging", |
330 | 234 | help='Extra debugging', | ||
332 | 235 | ) | 231 | ) |
333 | 236 | 232 | ||
334 | 237 | self.add_argument( | 233 | self.add_argument( |
337 | 238 | '--exclude', dest='exclude', type=str, action='append', | 234 | "--exclude", |
338 | 239 | help='Errors to ignore (multiple)', | 235 | dest="exclude", |
339 | 236 | type=str, | ||
340 | 237 | action="append", | ||
341 | 238 | help="Errors to ignore (multiple)", | ||
342 | 240 | ) | 239 | ) |
343 | 241 | 240 | ||
344 | 242 | self.add_argument( | 241 | self.add_argument( |
346 | 243 | '--exclude-file', dest='exclude_file', type=str, | 242 | "--exclude-file", |
347 | 243 | dest="exclude_file", | ||
348 | 244 | type=str, | ||
349 | 244 | default=self._def_exclude_file, | 245 | default=self._def_exclude_file, |
351 | 245 | help='YAML file with errors to ignore', | 246 | help="YAML file with errors to ignore", |
352 | 246 | ) | 247 | ) |
353 | 247 | 248 | ||
354 | 248 | # Ensure we initialize a namespace if needed, | 249 | # Ensure we initialize a namespace if needed, |
355 | 249 | # and have a reference to it | 250 | # and have a reference to it |
358 | 250 | namespace = kwargs.get('namespace') or argparse.Namespace() | 251 | namespace = kwargs.get("namespace") or argparse.Namespace() |
359 | 251 | kwargs['namespace'] = namespace | 252 | kwargs["namespace"] = namespace |
360 | 252 | # now parse args and put them in the namespace | 253 | # now parse args and put them in the namespace |
361 | 253 | super().parse_args(*args, **kwargs) | 254 | super().parse_args(*args, **kwargs) |
362 | 254 | 255 | ||
363 | @@ -258,6 +259,6 @@ class HPArgumentParser(HWCronArgumentParser): | |||
364 | 258 | with open(namespace.exclude_file) as f: | 259 | with open(namespace.exclude_file) as f: |
365 | 259 | for i in yaml.safe_load(f): | 260 | for i in yaml.safe_load(f): |
366 | 260 | if not self._expired(i): | 261 | if not self._expired(i): |
368 | 261 | namespace.exclude.append(i['error']) | 262 | namespace.exclude.append(i["error"]) |
369 | 262 | 263 | ||
370 | 263 | return namespace | 264 | return namespace |
371 | diff --git a/src/files/hplog/cron_hplog.py b/src/files/hplog/cron_hplog.py | |||
372 | index d2b21cf..57b52e2 100755 | |||
373 | --- a/src/files/hplog/cron_hplog.py | |||
374 | +++ b/src/files/hplog/cron_hplog.py | |||
375 | @@ -25,7 +25,7 @@ try: | |||
376 | 25 | except ImportError: | 25 | except ImportError: |
377 | 26 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 26 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
378 | 27 | common_libs_dir = os.path.abspath( | 27 | common_libs_dir = os.path.abspath( |
380 | 28 | os.path.join(os.path.dirname(__file__), '..', 'common') | 28 | os.path.join(os.path.dirname(__file__), "..", "common") |
381 | 29 | ) | 29 | ) |
382 | 30 | if common_libs_dir not in sys.path: | 30 | if common_libs_dir not in sys.path: |
383 | 31 | sys.path.append(common_libs_dir) | 31 | sys.path.append(common_libs_dir) |
384 | @@ -37,35 +37,43 @@ except ImportError: | |||
385 | 37 | ) | 37 | ) |
386 | 38 | 38 | ||
387 | 39 | FLAG_PROCESSOR = { | 39 | FLAG_PROCESSOR = { |
392 | 40 | 't': 'parse_temperature', | 40 | "t": "parse_temperature", |
393 | 41 | 'f': 'parse_fans', | 41 | "f": "parse_fans", |
394 | 42 | 'p': 'parse_power', | 42 | "p": "parse_power", |
395 | 43 | 'v': 'save_log', | 43 | "v": "save_log", |
396 | 44 | } | 44 | } |
400 | 45 | OUTPUT_FILE = '/var/lib/nagios/hplog.out' | 45 | OUTPUT_FILE = "/var/lib/nagios/hplog.out" |
401 | 46 | EXCLUDE_FILE = '/etc/nagios/hplog.exclude.yaml' | 46 | EXCLUDE_FILE = "/etc/nagios/hplog.exclude.yaml" |
402 | 47 | DEBUG_FILE = '/var/lib/nagios/hplog.debug' | 47 | DEBUG_FILE = "/var/lib/nagios/hplog.debug" |
403 | 48 | 48 | ||
404 | 49 | 49 | ||
405 | 50 | def parse_args(argv=None): | 50 | def parse_args(argv=None): |
406 | 51 | parser = HPArgumentParser( | 51 | parser = HPArgumentParser( |
410 | 52 | prog='cron_hplog', | 52 | prog="cron_hplog", def_write_file=OUTPUT_FILE, def_exclude_file=EXCLUDE_FILE |
408 | 53 | def_write_file=OUTPUT_FILE, | ||
409 | 54 | def_exclude_file=EXCLUDE_FILE | ||
411 | 55 | ) | 53 | ) |
412 | 56 | parser.add_argument( | 54 | parser.add_argument( |
416 | 57 | '-f', '--hplog_flags', dest='hplog_flags', type=str, | 55 | "-f", |
417 | 58 | default=','.join(FLAG_PROCESSOR.keys()), | 56 | "--hplog_flags", |
418 | 59 | help='Flags to call hplog with', | 57 | dest="hplog_flags", |
419 | 58 | type=str, | ||
420 | 59 | default=",".join(FLAG_PROCESSOR.keys()), | ||
421 | 60 | help="Flags to call hplog with", | ||
422 | 60 | ) | 61 | ) |
423 | 61 | parser.add_argument( | 62 | parser.add_argument( |
425 | 62 | '-l', '--log_path', dest='log_path', type=str, | 63 | "-l", |
426 | 64 | "--log_path", | ||
427 | 65 | dest="log_path", | ||
428 | 66 | type=str, | ||
429 | 63 | default=DEBUG_FILE, | 67 | default=DEBUG_FILE, |
431 | 64 | help='Where to write hplog -v output for troubleshooting', | 68 | help="Where to write hplog -v output for troubleshooting", |
432 | 65 | ) | 69 | ) |
433 | 66 | parser.add_argument( | 70 | parser.add_argument( |
436 | 67 | '-s', '--single_psu', dest='single_psu', action='store_true', | 71 | "-s", |
437 | 68 | help='Do not alert on lack of PSU redundancy', default=False, | 72 | "--single_psu", |
438 | 73 | dest="single_psu", | ||
439 | 74 | action="store_true", | ||
440 | 75 | help="Do not alert on lack of PSU redundancy", | ||
441 | 76 | default=False, | ||
442 | 69 | ) | 77 | ) |
443 | 70 | return parser.parse_args(args=argv) | 78 | return parser.parse_args(args=argv) |
444 | 71 | 79 | ||
445 | @@ -77,15 +85,16 @@ def call_hplog(flag): | |||
446 | 77 | The output of this cron script will be checked by | 85 | The output of this cron script will be checked by |
447 | 78 | nagios via check_hplog.py. The | 86 | nagios via check_hplog.py. The |
448 | 79 | """ | 87 | """ |
450 | 80 | env = 'export PATH=$PATH:/usr/sbin:/sbin' | 88 | env = "export PATH=$PATH:/usr/sbin:/sbin" |
451 | 81 | 89 | ||
453 | 82 | cmd = 'hplog -{}'.format(flag) | 90 | cmd = "hplog -{}".format(flag) |
454 | 83 | try: | 91 | try: |
457 | 84 | cmdline = '{}; {}'.format(env, cmd) | 92 | cmdline = "{}; {}".format(env, cmd) |
458 | 85 | output = subprocess.check_output(cmdline, shell=True).decode('UTF-8') | 93 | output = subprocess.check_output(cmdline, shell=True).decode("UTF-8") |
459 | 86 | except subprocess.CalledProcessError as e: | 94 | except subprocess.CalledProcessError as e: |
462 | 87 | return ('Failed running command "{}" Return Code {}: {}' | 95 | return 'Failed running command "{}" Return Code {}: {}' "".format( |
463 | 88 | ''.format(cmd, e.returncode, e.output)) | 96 | cmd, e.returncode, e.output |
464 | 97 | ) | ||
465 | 89 | 98 | ||
466 | 90 | funcname = FLAG_PROCESSOR[flag] | 99 | funcname = FLAG_PROCESSOR[flag] |
467 | 91 | return globals()[funcname](output) | 100 | return globals()[funcname](output) |
468 | @@ -106,10 +115,12 @@ def parse_temperature(result): | |||
469 | 106 | 7 Basic Sensor CPU (2) Normal ---F/---C 260F/127C | 115 | 7 Basic Sensor CPU (2) Normal ---F/---C 260F/127C |
470 | 107 | """ | 116 | """ |
471 | 108 | input_file = result.splitlines() | 117 | input_file = result.splitlines() |
473 | 109 | if os.path.isfile('/etc/nagios/skip-cat-hp-temperature.txt'): | 118 | if os.path.isfile("/etc/nagios/skip-cat-hp-temperature.txt"): |
474 | 110 | return | 119 | return |
475 | 111 | header_line = input_file.pop(0).strip() | 120 | header_line = input_file.pop(0).strip() |
477 | 112 | if header_line != "ID TYPE LOCATION STATUS CURRENT THRESHOLD": # noqa E501 | 121 | if ( |
478 | 122 | header_line != "ID TYPE LOCATION STATUS CURRENT THRESHOLD" | ||
479 | 123 | ): # noqa E501 | ||
480 | 113 | return "UNKNOWN Unrecognised header line in 'hplog -t' output" | 124 | return "UNKNOWN Unrecognised header line in 'hplog -t' output" |
481 | 114 | for line in input_file: | 125 | for line in input_file: |
482 | 115 | line = line.rstrip() | 126 | line = line.rstrip() |
483 | @@ -123,11 +134,11 @@ def parse_temperature(result): | |||
484 | 123 | temp_current = line[42:51].split("/")[1].strip() | 134 | temp_current = line[42:51].split("/")[1].strip() |
485 | 124 | temp_threshold = line[52:].split("/")[1].strip() | 135 | temp_threshold = line[52:].split("/")[1].strip() |
486 | 125 | if temp_status not in ["Normal", "Nominal", "Absent"]: | 136 | if temp_status not in ["Normal", "Nominal", "Absent"]: |
492 | 126 | return ( | 137 | return "%s: temperature is '%s' (%s / %s)" % ( |
493 | 127 | "%s: temperature is '%s' (%s / %s)" % (temp_location, | 138 | temp_location, |
494 | 128 | temp_status, | 139 | temp_status, |
495 | 129 | temp_current, | 140 | temp_current, |
496 | 130 | temp_threshold) | 141 | temp_threshold, |
497 | 131 | ) | 142 | ) |
498 | 132 | 143 | ||
499 | 133 | return | 144 | return |
500 | @@ -147,13 +158,15 @@ def parse_fans(result): | |||
501 | 147 | 6 Var. Speed Processor Zone Normal Yes Low ( 36) | 158 | 6 Var. Speed Processor Zone Normal Yes Low ( 36) |
502 | 148 | """ | 159 | """ |
503 | 149 | input_file = result.splitlines() | 160 | input_file = result.splitlines() |
505 | 150 | if os.path.isfile('/etc/nagios/skip-cat-hp-fans.txt'): | 161 | if os.path.isfile("/etc/nagios/skip-cat-hp-fans.txt"): |
506 | 151 | return | 162 | return |
507 | 152 | header_line = input_file.pop(0).strip() | 163 | header_line = input_file.pop(0).strip() |
509 | 153 | if header_line != "ID TYPE LOCATION STATUS REDUNDANT FAN SPEED": # noqa E501 | 164 | if ( |
510 | 165 | header_line != "ID TYPE LOCATION STATUS REDUNDANT FAN SPEED" | ||
511 | 166 | ): # noqa E501 | ||
512 | 154 | return "UNKNOWN Unrecognised header line in 'hplog -f' output" | 167 | return "UNKNOWN Unrecognised header line in 'hplog -f' output" |
513 | 155 | 168 | ||
515 | 156 | ignore_file = '/etc/nagios/ignores/ignores-cat-hp-fans.txt' | 169 | ignore_file = "/etc/nagios/ignores/ignores-cat-hp-fans.txt" |
516 | 157 | ignores = read_ignore_file(ignore_file) | 170 | ignores = read_ignore_file(ignore_file) |
517 | 158 | for line in input_file: | 171 | for line in input_file: |
518 | 159 | line = line.rstrip() | 172 | line = line.rstrip() |
519 | @@ -168,12 +181,7 @@ def parse_fans(result): | |||
520 | 168 | fan_speed = line[51:].strip() | 181 | fan_speed = line[51:].strip() |
521 | 169 | 182 | ||
522 | 170 | (return_now, msg) = process_fan_line( | 183 | (return_now, msg) = process_fan_line( |
529 | 171 | fan_type, | 184 | fan_type, fan_location, fan_status, fan_speed, fan_redundant, ignores |
524 | 172 | fan_location, | ||
525 | 173 | fan_status, | ||
526 | 174 | fan_speed, | ||
527 | 175 | fan_redundant, | ||
528 | 176 | ignores | ||
530 | 177 | ) | 185 | ) |
531 | 178 | if return_now: | 186 | if return_now: |
532 | 179 | return msg | 187 | return msg |
533 | @@ -182,36 +190,37 @@ def parse_fans(result): | |||
534 | 182 | 190 | ||
535 | 183 | 191 | ||
536 | 184 | def process_fan_line( | 192 | def process_fan_line( |
543 | 185 | fan_type, | 193 | fan_type, fan_location, fan_status, fan_speed, fan_redundant, ignores |
538 | 186 | fan_location, | ||
539 | 187 | fan_status, | ||
540 | 188 | fan_speed, | ||
541 | 189 | fan_redundant, | ||
542 | 190 | ignores | ||
544 | 191 | ): | 194 | ): |
545 | 192 | if fan_type == "Basic Fan": | 195 | if fan_type == "Basic Fan": |
547 | 193 | return(False, None) | 196 | return (False, None) |
548 | 194 | 197 | ||
549 | 195 | if fan_type not in ["Var. Speed", "Pwr. Supply", "Auto. Speed"]: | 198 | if fan_type not in ["Var. Speed", "Pwr. Supply", "Auto. Speed"]: |
552 | 196 | return(True, "UNKNOWN %s: Unrecognised fan type '%s'" % (fan_location, | 199 | return ( |
553 | 197 | fan_type)) | 200 | True, |
554 | 201 | "UNKNOWN %s: Unrecognised fan type '%s'" % (fan_location, fan_type), | ||
555 | 202 | ) | ||
556 | 198 | 203 | ||
557 | 199 | if fan_status not in ["Normal", "Nominal"]: | 204 | if fan_status not in ["Normal", "Nominal"]: |
562 | 200 | err = "%s: fans are '%s' (%s / Redundant: %s)" % (fan_location, | 205 | err = "%s: fans are '%s' (%s / Redundant: %s)" % ( |
563 | 201 | fan_status, | 206 | fan_location, |
564 | 202 | fan_speed, | 207 | fan_status, |
565 | 203 | fan_redundant) | 208 | fan_speed, |
566 | 209 | fan_redundant, | ||
567 | 210 | ) | ||
568 | 204 | if not ignore(err, ignores): | 211 | if not ignore(err, ignores): |
570 | 205 | return(True, err) | 212 | return (True, err) |
571 | 206 | 213 | ||
572 | 207 | if fan_redundant not in ["Yes", "N/A"] and fan_type == "Var. Speed": | 214 | if fan_redundant not in ["Yes", "N/A"] and fan_type == "Var. Speed": |
576 | 208 | err = "%s: fans are not redundant (%s / Status: %s)" % (fan_location, | 215 | err = "%s: fans are not redundant (%s / Status: %s)" % ( |
577 | 209 | fan_speed, | 216 | fan_location, |
578 | 210 | fan_redundant) | 217 | fan_speed, |
579 | 218 | fan_redundant, | ||
580 | 219 | ) | ||
581 | 211 | if not ignore(err, ignores): | 220 | if not ignore(err, ignores): |
583 | 212 | return(True, err) | 221 | return (True, err) |
584 | 213 | 222 | ||
586 | 214 | return(False, None) | 223 | return (False, None) |
587 | 215 | 224 | ||
588 | 216 | 225 | ||
589 | 217 | def parse_power(result): | 226 | def parse_power(result): |
590 | @@ -224,13 +233,13 @@ def parse_power(result): | |||
591 | 224 | 2 Standard Pwr. Supply Bay Normal Yes | 233 | 2 Standard Pwr. Supply Bay Normal Yes |
592 | 225 | """ | 234 | """ |
593 | 226 | input_file = result.splitlines() | 235 | input_file = result.splitlines() |
595 | 227 | if os.path.isfile('/etc/nagios/skip-cat-hp-power.txt'): | 236 | if os.path.isfile("/etc/nagios/skip-cat-hp-power.txt"): |
596 | 228 | return | 237 | return |
597 | 229 | header_line = input_file.pop(0).strip() | 238 | header_line = input_file.pop(0).strip() |
598 | 230 | if header_line != "ID TYPE LOCATION STATUS REDUNDANT": | 239 | if header_line != "ID TYPE LOCATION STATUS REDUNDANT": |
599 | 231 | return "UNKNOWN Unrecognised header line in 'hplog -p' output" | 240 | return "UNKNOWN Unrecognised header line in 'hplog -p' output" |
600 | 232 | 241 | ||
602 | 233 | ignore_file = '/etc/nagios/ignores/ignores-cat-hp-power.txt' | 242 | ignore_file = "/etc/nagios/ignores/ignores-cat-hp-power.txt" |
603 | 234 | ignores = read_ignore_file(ignore_file) | 243 | ignores = read_ignore_file(ignore_file) |
604 | 235 | 244 | ||
605 | 236 | for line in input_file: | 245 | for line in input_file: |
606 | @@ -244,10 +253,9 @@ def parse_power(result): | |||
607 | 244 | power_status = line[33:40].strip() | 253 | power_status = line[33:40].strip() |
608 | 245 | # power_redundant = line[41:50].strip() | 254 | # power_redundant = line[41:50].strip() |
609 | 246 | if power_type != "Standard": | 255 | if power_type != "Standard": |
612 | 247 | err = "%s: Unrecognised power type '%s'" % (power_location, | 256 | err = "%s: Unrecognised power type '%s'" % (power_location, power_type) |
611 | 248 | power_type) | ||
613 | 249 | if not ignore(err, ignores): | 257 | if not ignore(err, ignores): |
615 | 250 | return 'UNKNOWN {}'.format(err) | 258 | return "UNKNOWN {}".format(err) |
616 | 251 | if not ARGS.single_psu and power_status not in ["Normal", "Nominal"]: | 259 | if not ARGS.single_psu and power_status not in ["Normal", "Nominal"]: |
617 | 252 | err = "%s: power supply is '%s'" % (power_location, power_status) | 260 | err = "%s: power supply is '%s'" % (power_location, power_status) |
618 | 253 | if not ignore(err, ignores): | 261 | if not ignore(err, ignores): |
619 | @@ -259,7 +267,7 @@ def save_log(result): | |||
620 | 259 | """ | 267 | """ |
621 | 260 | Save full hplog -v output for troubleshooting after alert | 268 | Save full hplog -v output for troubleshooting after alert |
622 | 261 | """ | 269 | """ |
624 | 262 | with open(ARGS.log_path, 'w') as f: | 270 | with open(ARGS.log_path, "w") as f: |
625 | 263 | f.write(result) | 271 | f.write(result) |
626 | 264 | return | 272 | return |
627 | 265 | 273 | ||
628 | @@ -270,35 +278,34 @@ def main(): | |||
629 | 270 | 278 | ||
630 | 271 | try: | 279 | try: |
631 | 272 | # This matches hpasmlited on latest packages for bionic on <= gen9 | 280 | # This matches hpasmlited on latest packages for bionic on <= gen9 |
633 | 273 | subprocess.check_call('ps -ef | grep -q hp[a]sm', shell=True) | 281 | subprocess.check_call("ps -ef | grep -q hp[a]sm", shell=True) |
634 | 274 | except subprocess.CalledProcessError as e: | 282 | except subprocess.CalledProcessError as e: |
635 | 275 | msg = ( | 283 | msg = ( |
638 | 276 | 'UNKNOWN hp[a]sm daemon not found running, cannot run hplog: ' | 284 | "UNKNOWN hp[a]sm daemon not found running, cannot run hplog: " |
639 | 277 | '{}'.format(e.output) | 285 | "{}".format(e.output) |
640 | 278 | ) | 286 | ) |
641 | 279 | exit = 3 | 287 | exit = 3 |
642 | 280 | else: | 288 | else: |
643 | 281 | errors = [] | 289 | errors = [] |
645 | 282 | for flag in ARGS.hplog_flags.split(','): | 290 | for flag in ARGS.hplog_flags.split(","): |
646 | 283 | log_output = call_hplog(flag) | 291 | log_output = call_hplog(flag) |
647 | 284 | if log_output: | 292 | if log_output: |
648 | 285 | errors.append(log_output) | 293 | errors.append(log_output) |
649 | 286 | 294 | ||
650 | 287 | if len(errors) > 0: | 295 | if len(errors) > 0: |
653 | 288 | msg = ('CRIT {} error(s): {}' | 296 | msg = "CRIT {} error(s): {}".format(len(errors), " - ".join(errors)) |
652 | 289 | ''.format(len(errors), ' - '.join(errors))) | ||
654 | 290 | exit = 2 | 297 | exit = 2 |
655 | 291 | else: | 298 | else: |
657 | 292 | msg = 'OK No errors found' | 299 | msg = "OK No errors found" |
658 | 293 | exit = 0 | 300 | exit = 0 |
659 | 294 | 301 | ||
660 | 295 | if ARGS.write: | 302 | if ARGS.write: |
662 | 296 | with open(ARGS.write, 'w') as f: | 303 | with open(ARGS.write, "w") as f: |
663 | 297 | f.write(msg) | 304 | f.write(msg) |
664 | 298 | else: | 305 | else: |
665 | 299 | print(msg) | 306 | print(msg) |
666 | 300 | sys.exit(exit) | 307 | sys.exit(exit) |
667 | 301 | 308 | ||
668 | 302 | 309 | ||
670 | 303 | if __name__ == '__main__': | 310 | if __name__ == "__main__": |
671 | 304 | main() | 311 | main() |
672 | diff --git a/src/files/ilorest/check_ilorest.py b/src/files/ilorest/check_ilorest.py | |||
673 | index 430c632..8b477c8 100755 | |||
674 | --- a/src/files/ilorest/check_ilorest.py | |||
675 | +++ b/src/files/ilorest/check_ilorest.py | |||
676 | @@ -10,10 +10,7 @@ | |||
677 | 10 | 10 | ||
678 | 11 | from optparse import OptionParser | 11 | from optparse import OptionParser |
679 | 12 | 12 | ||
684 | 13 | from nagios_plugin3 import (check_file_freshness, | 13 | from nagios_plugin3 import check_file_freshness, try_check, WarnError, CriticalError |
681 | 14 | try_check, | ||
682 | 15 | WarnError, | ||
683 | 16 | CriticalError) | ||
685 | 17 | 14 | ||
686 | 18 | 15 | ||
687 | 19 | ############################################################################### | 16 | ############################################################################### |
688 | @@ -52,16 +49,16 @@ def main(): | |||
689 | 52 | help="freshness time limit [default=%default]", | 49 | help="freshness time limit [default=%default]", |
690 | 53 | metavar="SECONDS", | 50 | metavar="SECONDS", |
691 | 54 | default=1200, | 51 | default=1200, |
693 | 55 | type=int | 52 | type=int, |
694 | 56 | ) | 53 | ) |
695 | 57 | parser.add_option( | 54 | parser.add_option( |
697 | 58 | "-f", "--filename", | 55 | "-f", |
698 | 56 | "--filename", | ||
699 | 59 | dest="input_file", | 57 | dest="input_file", |
702 | 60 | help=('file containing the output of ' | 58 | help=("file containing the output of cron_ilorest.py [default=%default]"), |
701 | 61 | 'cron_ilorest.py [default=%default]'), | ||
703 | 62 | metavar="FILE", | 59 | metavar="FILE", |
704 | 63 | default="/var/lib/nagios/ilorest.nagios", | 60 | default="/var/lib/nagios/ilorest.nagios", |
706 | 64 | type=str | 61 | type=str, |
707 | 65 | ) | 62 | ) |
708 | 66 | 63 | ||
709 | 67 | (opts, args) = parser.parse_args() | 64 | (opts, args) = parser.parse_args() |
710 | diff --git a/src/files/ilorest/cron_ilorest.py b/src/files/ilorest/cron_ilorest.py | |||
711 | index 739438d..561d0aa 100755 | |||
712 | --- a/src/files/ilorest/cron_ilorest.py | |||
713 | +++ b/src/files/ilorest/cron_ilorest.py | |||
714 | @@ -18,7 +18,7 @@ try: | |||
715 | 18 | except ImportError: | 18 | except ImportError: |
716 | 19 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 19 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
717 | 20 | common_libs_dir = os.path.abspath( | 20 | common_libs_dir = os.path.abspath( |
719 | 21 | os.path.join(os.path.dirname(__file__), '..', 'common') | 21 | os.path.join(os.path.dirname(__file__), "..", "common") |
720 | 22 | ) | 22 | ) |
721 | 23 | if common_libs_dir not in sys.path: | 23 | if common_libs_dir not in sys.path: |
722 | 24 | sys.path.append(common_libs_dir) | 24 | sys.path.append(common_libs_dir) |
723 | @@ -26,16 +26,16 @@ except ImportError: | |||
724 | 26 | 26 | ||
725 | 27 | 27 | ||
726 | 28 | DEFAULT_SELECTORS = [ | 28 | DEFAULT_SELECTORS = [ |
733 | 29 | 'Chassis', | 29 | "Chassis", |
734 | 30 | 'HpeSmartStorage', | 30 | "HpeSmartStorage", |
735 | 31 | 'Memory', | 31 | "Memory", |
736 | 32 | 'Power', | 32 | "Power", |
737 | 33 | 'Processor', | 33 | "Processor", |
738 | 34 | 'Thermal', | 34 | "Thermal", |
739 | 35 | ] | 35 | ] |
740 | 36 | 36 | ||
743 | 37 | EXCLUDE_FILE = '/etc/nagios/cron_ilorest.exclude.yaml' | 37 | EXCLUDE_FILE = "/etc/nagios/cron_ilorest.exclude.yaml" |
744 | 38 | OUTPUT_FILE = '/var/lib/nagios/ilorest.nagios' | 38 | OUTPUT_FILE = "/var/lib/nagios/ilorest.nagios" |
745 | 39 | 39 | ||
746 | 40 | 40 | ||
747 | 41 | class CronILOrest: | 41 | class CronILOrest: |
748 | @@ -44,62 +44,64 @@ class CronILOrest: | |||
749 | 44 | 44 | ||
750 | 45 | def parse_args(self, argv=None): | 45 | def parse_args(self, argv=None): |
751 | 46 | parser = HPArgumentParser( | 46 | parser = HPArgumentParser( |
755 | 47 | prog='cron_ilorest', | 47 | prog="cron_ilorest", |
756 | 48 | description=('Convert the output of ilorest into an appropriate ' | 48 | description=( |
757 | 49 | 'Nagios status line'), | 49 | "Convert the output of ilorest into an appropriate " |
758 | 50 | "Nagios status line" | ||
759 | 51 | ), | ||
760 | 50 | def_write_file=OUTPUT_FILE, | 52 | def_write_file=OUTPUT_FILE, |
762 | 51 | def_exclude_file=EXCLUDE_FILE | 53 | def_exclude_file=EXCLUDE_FILE, |
763 | 52 | ) | 54 | ) |
764 | 53 | 55 | ||
765 | 54 | parser.add_argument( | 56 | parser.add_argument( |
767 | 55 | '--selectors', dest='selectors', type=str, nargs='+', | 57 | "--selectors", |
768 | 58 | dest="selectors", | ||
769 | 59 | type=str, | ||
770 | 60 | nargs="+", | ||
771 | 56 | default=DEFAULT_SELECTORS, | 61 | default=DEFAULT_SELECTORS, |
773 | 57 | help='iLO selectors to run', | 62 | help="iLO selectors to run", |
774 | 58 | ) | 63 | ) |
775 | 59 | 64 | ||
776 | 60 | return parser.parse_args(args=argv) | 65 | return parser.parse_args(args=argv) |
777 | 61 | 66 | ||
778 | 62 | def check_selector(self, selector): | 67 | def check_selector(self, selector): |
779 | 63 | if self.args.debug: | 68 | if self.args.debug: |
781 | 64 | print('Checking selector {}'.format(selector), file=sys.stderr) | 69 | print("Checking selector {}".format(selector), file=sys.stderr) |
782 | 65 | ilorest_output = self._get_json_ilorest_output(selector) | 70 | ilorest_output = self._get_json_ilorest_output(selector) |
783 | 66 | 71 | ||
784 | 67 | errors = [] | 72 | errors = [] |
785 | 68 | jsonidx = -1 | 73 | jsonidx = -1 |
786 | 69 | # Disregard the first chunk of data, it's banner/debug/etc | 74 | # Disregard the first chunk of data, it's banner/debug/etc |
788 | 70 | for jsondata in ilorest_output.split('\n{\n')[1:]: | 75 | for jsondata in ilorest_output.split("\n{\n")[1:]: |
789 | 71 | # The output will be one or more JSON defs | 76 | # The output will be one or more JSON defs |
790 | 72 | jsonidx += 1 | 77 | jsonidx += 1 |
792 | 73 | j = json.loads('{' + jsondata) | 78 | j = json.loads("{" + jsondata) |
793 | 74 | errors += self._walk_selector(j, [selector, str(jsonidx)]) | 79 | errors += self._walk_selector(j, [selector, str(jsonidx)]) |
794 | 75 | return errors | 80 | return errors |
795 | 76 | 81 | ||
796 | 77 | def _get_json_ilorest_output(self, selector): | 82 | def _get_json_ilorest_output(self, selector): |
799 | 78 | cmd = ['ilorest', 'list', '-j', '--selector={}'.format(selector)] | 83 | cmd = ["ilorest", "list", "-j", "--selector={}".format(selector)] |
800 | 79 | return check_output(cmd).decode('UTF-8') | 84 | return check_output(cmd).decode("UTF-8") |
801 | 80 | 85 | ||
802 | 81 | def _get_health_status_message(self, j, crumb_trail=[]): | 86 | def _get_health_status_message(self, j, crumb_trail=[]): |
812 | 82 | desc = j['Name'] | 87 | desc = j["Name"] |
813 | 83 | if 'SerialNumber' in j: | 88 | if "SerialNumber" in j: |
814 | 84 | desc += ' ({})'.format(j['SerialNumber']) | 89 | desc += " ({})".format(j["SerialNumber"]) |
815 | 85 | state = j.get('Status', 'null').get('State', 'unknown') | 90 | state = j.get("Status", "null").get("State", "unknown") |
816 | 86 | health = j.get('Status', 'null').get('Health', 'unknown') | 91 | health = j.get("Status", "null").get("Health", "unknown") |
817 | 87 | msg = '{} ({}): {} health {}'.format(' '.join(crumb_trail), | 92 | msg = "{} ({}): {} health {}".format(" ".join(crumb_trail), desc, state, health) |
809 | 88 | desc, | ||
810 | 89 | state, | ||
811 | 90 | health) | ||
818 | 91 | if self.args.debug: | 93 | if self.args.debug: |
819 | 92 | print(msg, file=sys.stderr) | 94 | print(msg, file=sys.stderr) |
820 | 93 | 95 | ||
821 | 94 | if msg in self.args.exclude and self.args.debug: | 96 | if msg in self.args.exclude and self.args.debug: |
823 | 95 | print('Ignoring excluded error: {}'.format(msg), file=sys.stderr) | 97 | print("Ignoring excluded error: {}".format(msg), file=sys.stderr) |
824 | 96 | return [] | 98 | return [] |
825 | 97 | else: | 99 | else: |
826 | 98 | return [msg] | 100 | return [msg] |
827 | 99 | 101 | ||
828 | 100 | def _walk_selector(self, j, crumb_trail=[]): | 102 | def _walk_selector(self, j, crumb_trail=[]): |
829 | 101 | errors = [] | 103 | errors = [] |
831 | 102 | if j.get('Status') and j.get('Status').get('Health') != 'OK': | 104 | if j.get("Status") and j.get("Status").get("Health") != "OK": |
832 | 103 | errors.extend(self._get_health_status_message(j, crumb_trail)) | 105 | errors.extend(self._get_health_status_message(j, crumb_trail)) |
833 | 104 | 106 | ||
834 | 105 | for keyname in j.keys(): | 107 | for keyname in j.keys(): |
835 | @@ -110,31 +112,31 @@ class CronILOrest: | |||
836 | 110 | for i in range(len(j[keyname])): | 112 | for i in range(len(j[keyname])): |
837 | 111 | if type(j[keyname][i]) != dict: | 113 | if type(j[keyname][i]) != dict: |
838 | 112 | continue | 114 | continue |
840 | 113 | if 'Status' not in j[keyname][i]: | 115 | if "Status" not in j[keyname][i]: |
841 | 114 | continue | 116 | continue |
844 | 115 | self._walk_selector(j[keyname][i], | 117 | self._walk_selector(j[keyname][i], (crumb_trail + [keyname, str(i)])) |
843 | 116 | (crumb_trail + [keyname, str(i)])) | ||
845 | 117 | return errors | 118 | return errors |
846 | 118 | 119 | ||
847 | 119 | 120 | ||
848 | 120 | def main(argv=None): | 121 | def main(argv=None): |
849 | 121 | cronilorest = CronILOrest(argv) | 122 | cronilorest = CronILOrest(argv) |
850 | 122 | 123 | ||
853 | 123 | errors = [cronilorest.check_selector(selector) | 124 | errors = [ |
854 | 124 | for selector in cronilorest.args.selectors] | 125 | cronilorest.check_selector(selector) for selector in cronilorest.args.selectors |
855 | 126 | ] | ||
856 | 125 | 127 | ||
857 | 126 | if len(errors) > 0: | 128 | if len(errors) > 0: |
859 | 127 | msg = 'CRIT {} error(s): {}'.format(len(errors), ' - '.join(errors)) | 129 | msg = "CRIT {} error(s): {}".format(len(errors), " - ".join(errors)) |
860 | 128 | exit = 2 | 130 | exit = 2 |
861 | 129 | else: | 131 | else: |
863 | 130 | msg = 'OK No errors found' | 132 | msg = "OK No errors found" |
864 | 131 | exit = 0 | 133 | exit = 0 |
865 | 132 | 134 | ||
866 | 133 | if cronilorest.args.write: | 135 | if cronilorest.args.write: |
868 | 134 | if cronilorest.args.write == '-': | 136 | if cronilorest.args.write == "-": |
869 | 135 | print(msg) | 137 | print(msg) |
870 | 136 | else: | 138 | else: |
872 | 137 | with open(cronilorest.args.write, 'w') as f: | 139 | with open(cronilorest.args.write, "w") as f: |
873 | 138 | f.write(msg) | 140 | f.write(msg) |
874 | 139 | else: | 141 | else: |
875 | 140 | # This should never happen since 'write' has a default value | 142 | # This should never happen since 'write' has a default value |
876 | @@ -142,5 +144,5 @@ def main(argv=None): | |||
877 | 142 | sys.exit(exit) | 144 | sys.exit(exit) |
878 | 143 | 145 | ||
879 | 144 | 146 | ||
881 | 145 | if __name__ == '__main__': | 147 | if __name__ == "__main__": |
882 | 146 | main(sys.argv[1:]) | 148 | main(sys.argv[1:]) |
883 | diff --git a/src/files/ipmi/check_ipmi.py b/src/files/ipmi/check_ipmi.py | |||
884 | index fafd774..1fbe34c 100644 | |||
885 | --- a/src/files/ipmi/check_ipmi.py | |||
886 | +++ b/src/files/ipmi/check_ipmi.py | |||
887 | @@ -3,36 +3,42 @@ | |||
888 | 3 | 3 | ||
889 | 4 | import os | 4 | import os |
890 | 5 | 5 | ||
894 | 6 | from nagios_plugin3 import CriticalError, UnknownError, WarnError, check_file_freshness, try_check | 6 | from nagios_plugin3 import ( |
895 | 7 | 7 | CriticalError, | |
896 | 8 | OUTPUT_FILE = '/var/lib/nagios/ipmi_sensors.out' | 8 | UnknownError, |
897 | 9 | WarnError, | ||
898 | 10 | check_file_freshness, | ||
899 | 11 | try_check, | ||
900 | 12 | ) | ||
901 | 13 | |||
902 | 14 | OUTPUT_FILE = "/var/lib/nagios/ipmi_sensors.out" | ||
903 | 9 | NAGIOS_ERRORS = { | 15 | NAGIOS_ERRORS = { |
907 | 10 | 'CRITICAL': CriticalError, | 16 | "CRITICAL": CriticalError, |
908 | 11 | 'UNKNOWN': UnknownError, | 17 | "UNKNOWN": UnknownError, |
909 | 12 | 'WARNING': WarnError, | 18 | "WARNING": WarnError, |
910 | 13 | } | 19 | } |
911 | 14 | 20 | ||
912 | 15 | 21 | ||
913 | 16 | def parse_output(): | 22 | def parse_output(): |
914 | 17 | if not os.path.exists(OUTPUT_FILE): | 23 | if not os.path.exists(OUTPUT_FILE): |
916 | 18 | raise UnknownError('UNKNOWN: {} does not exist (yet?)'.format(OUTPUT_FILE)) | 24 | raise UnknownError("UNKNOWN: {} does not exist (yet?)".format(OUTPUT_FILE)) |
917 | 19 | 25 | ||
918 | 20 | # Check if file is newer than 10min | 26 | # Check if file is newer than 10min |
919 | 21 | try_check(check_file_freshness, OUTPUT_FILE) | 27 | try_check(check_file_freshness, OUTPUT_FILE) |
920 | 22 | 28 | ||
921 | 23 | try: | 29 | try: |
923 | 24 | with open(OUTPUT_FILE, 'r') as fd: | 30 | with open(OUTPUT_FILE, "r") as fd: |
924 | 25 | output = fd.read() | 31 | output = fd.read() |
925 | 26 | except PermissionError as error: | 32 | except PermissionError as error: |
926 | 27 | raise UnknownError(error) | 33 | raise UnknownError(error) |
927 | 28 | 34 | ||
928 | 29 | for startline in NAGIOS_ERRORS: | 35 | for startline in NAGIOS_ERRORS: |
930 | 30 | if output.startswith('{}: '.format(startline)): | 36 | if output.startswith("{}: ".format(startline)): |
931 | 31 | func = NAGIOS_ERRORS[startline] | 37 | func = NAGIOS_ERRORS[startline] |
932 | 32 | raise func(output) | 38 | raise func(output) |
933 | 33 | 39 | ||
935 | 34 | print('OK: {}'.format(output)) | 40 | print("OK: {}".format(output)) |
936 | 35 | 41 | ||
937 | 36 | 42 | ||
939 | 37 | if __name__ == '__main__': | 43 | if __name__ == "__main__": |
940 | 38 | try_check(parse_output) | 44 | try_check(parse_output) |
941 | diff --git a/src/files/ipmi/cron_ipmi_sensors.py b/src/files/ipmi/cron_ipmi_sensors.py | |||
942 | index 2b6cdd5..aa3430d 100644 | |||
943 | --- a/src/files/ipmi/cron_ipmi_sensors.py | |||
944 | +++ b/src/files/ipmi/cron_ipmi_sensors.py | |||
945 | @@ -4,20 +4,20 @@ import os | |||
946 | 4 | import subprocess | 4 | import subprocess |
947 | 5 | import sys | 5 | import sys |
948 | 6 | 6 | ||
953 | 7 | CHECK_IPMI_PID = '/var/run/nagios/check_ipmi_sensors.pid' | 7 | CHECK_IPMI_PID = "/var/run/nagios/check_ipmi_sensors.pid" |
954 | 8 | OUTPUT_FILE = '/var/lib/nagios/ipmi_sensors.out' | 8 | OUTPUT_FILE = "/var/lib/nagios/ipmi_sensors.out" |
955 | 9 | TMP_OUTPUT_FILE = OUTPUT_FILE + '.tmp' | 9 | TMP_OUTPUT_FILE = OUTPUT_FILE + ".tmp" |
956 | 10 | CMD = '/usr/local/lib/nagios/plugins/check_ipmi_sensor' | 10 | CMD = "/usr/local/lib/nagios/plugins/check_ipmi_sensor" |
957 | 11 | NAGIOS_ERRORS = { | 11 | NAGIOS_ERRORS = { |
961 | 12 | 1: 'WARNING', | 12 | 1: "WARNING", |
962 | 13 | 2: 'CRITICAL', | 13 | 2: "CRITICAL", |
963 | 14 | 3: 'UNKNOWN', | 14 | 3: "UNKNOWN", |
964 | 15 | } | 15 | } |
965 | 16 | 16 | ||
966 | 17 | 17 | ||
967 | 18 | def write_output_file(output): | 18 | def write_output_file(output): |
968 | 19 | try: | 19 | try: |
970 | 20 | with open(TMP_OUTPUT_FILE, 'w') as fd: | 20 | with open(TMP_OUTPUT_FILE, "w") as fd: |
971 | 21 | fd.write(output) | 21 | fd.write(output) |
972 | 22 | except IOError as e: | 22 | except IOError as e: |
973 | 23 | print("Cannot write output file {}, error {}".format(TMP_OUTPUT_FILE, e)) | 23 | print("Cannot write output file {}, error {}".format(TMP_OUTPUT_FILE, e)) |
974 | @@ -29,16 +29,16 @@ def gather_metrics(): | |||
975 | 29 | # Check if a PID file exists | 29 | # Check if a PID file exists |
976 | 30 | if os.path.exists(CHECK_IPMI_PID): | 30 | if os.path.exists(CHECK_IPMI_PID): |
977 | 31 | # is the PID valid? | 31 | # is the PID valid? |
979 | 32 | with open(CHECK_IPMI_PID, 'r') as fd: | 32 | with open(CHECK_IPMI_PID, "r") as fd: |
980 | 33 | PID = fd.read() | 33 | PID = fd.read() |
982 | 34 | if PID not in os.listdir('/proc'): | 34 | if PID not in os.listdir("/proc"): |
983 | 35 | # PID file is invalid, remove it | 35 | # PID file is invalid, remove it |
984 | 36 | os.remove(CHECK_IPMI_PID) | 36 | os.remove(CHECK_IPMI_PID) |
985 | 37 | else: | 37 | else: |
986 | 38 | return | 38 | return |
987 | 39 | 39 | ||
988 | 40 | try: | 40 | try: |
990 | 41 | with open(CHECK_IPMI_PID, 'w') as fd: | 41 | with open(CHECK_IPMI_PID, "w") as fd: |
991 | 42 | fd.write(str(os.getpid())) | 42 | fd.write(str(os.getpid())) |
992 | 43 | except IOError as e: | 43 | except IOError as e: |
993 | 44 | # unable to write PID file, can't lock | 44 | # unable to write PID file, can't lock |
994 | @@ -49,17 +49,17 @@ def gather_metrics(): | |||
995 | 49 | if len(sys.argv) > 1: | 49 | if len(sys.argv) > 1: |
996 | 50 | cmdline.extend(sys.argv[1:]) | 50 | cmdline.extend(sys.argv[1:]) |
997 | 51 | try: | 51 | try: |
999 | 52 | output = subprocess.check_output(cmdline).decode('utf8') | 52 | output = subprocess.check_output(cmdline).decode("utf8") |
1000 | 53 | write_output_file(output) | 53 | write_output_file(output) |
1001 | 54 | except subprocess.CalledProcessError as error: | 54 | except subprocess.CalledProcessError as error: |
1004 | 55 | output = error.stdout.decode(errors='ignore') | 55 | output = error.stdout.decode(errors="ignore") |
1005 | 56 | write_output_file('{}: {}'.format(NAGIOS_ERRORS[error.returncode], output)) | 56 | write_output_file("{}: {}".format(NAGIOS_ERRORS[error.returncode], output)) |
1006 | 57 | except PermissionError as error: | 57 | except PermissionError as error: |
1008 | 58 | write_output_file('UNKNOWN: {}'.format(error)) | 58 | write_output_file("UNKNOWN: {}".format(error)) |
1009 | 59 | 59 | ||
1010 | 60 | # remove pid reference | 60 | # remove pid reference |
1011 | 61 | os.remove(CHECK_IPMI_PID) | 61 | os.remove(CHECK_IPMI_PID) |
1012 | 62 | 62 | ||
1013 | 63 | 63 | ||
1015 | 64 | if __name__ == '__main__': | 64 | if __name__ == "__main__": |
1016 | 65 | gather_metrics() | 65 | gather_metrics() |
1017 | diff --git a/src/files/mdadm/check_mdadm.py b/src/files/mdadm/check_mdadm.py | |||
1018 | index 29a82c6..c602877 100755 | |||
1019 | --- a/src/files/mdadm/check_mdadm.py | |||
1020 | +++ b/src/files/mdadm/check_mdadm.py | |||
1021 | @@ -12,38 +12,33 @@ try: | |||
1022 | 12 | except ImportError: | 12 | except ImportError: |
1023 | 13 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 13 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
1024 | 14 | common_libs_dir = os.path.abspath( | 14 | common_libs_dir = os.path.abspath( |
1026 | 15 | os.path.join(os.path.dirname(__file__), '..', 'common') | 15 | os.path.join(os.path.dirname(__file__), "..", "common") |
1027 | 16 | ) | 16 | ) |
1028 | 17 | if common_libs_dir not in sys.path: | 17 | if common_libs_dir not in sys.path: |
1029 | 18 | sys.path.append(common_libs_dir) | 18 | sys.path.append(common_libs_dir) |
1030 | 19 | from hw_health_lib import HWCheckArgumentParser | 19 | from hw_health_lib import HWCheckArgumentParser |
1031 | 20 | 20 | ||
1033 | 21 | INPUT_FILE = '/var/lib/nagios/mdadm.out' | 21 | INPUT_FILE = "/var/lib/nagios/mdadm.out" |
1034 | 22 | ARGS = argparse.Namespace() | 22 | ARGS = argparse.Namespace() |
1035 | 23 | 23 | ||
1036 | 24 | 24 | ||
1037 | 25 | def parse_output(): | 25 | def parse_output(): |
1038 | 26 | if not os.path.exists(ARGS.input_file): | 26 | if not os.path.exists(ARGS.input_file): |
1042 | 27 | raise UnknownError( | 27 | raise UnknownError("UNKNOWN: file not found ({})".format(ARGS.input_file)) |
1040 | 28 | 'UNKNOWN: file not found ({})'.format(ARGS.input_file) | ||
1041 | 29 | ) | ||
1043 | 30 | 28 | ||
1045 | 31 | with open(ARGS.input_file, 'r') as fd: | 29 | with open(ARGS.input_file, "r") as fd: |
1046 | 32 | for line in fd.readlines(): | 30 | for line in fd.readlines(): |
1047 | 33 | line = line.strip() | 31 | line = line.strip() |
1049 | 34 | if line.startswith('CRITICAL: '): | 32 | if line.startswith("CRITICAL: "): |
1050 | 35 | raise CriticalError(line) | 33 | raise CriticalError(line) |
1052 | 36 | elif line.startswith('WARNING: '): | 34 | elif line.startswith("WARNING: "): |
1053 | 37 | raise WarnError(line) | 35 | raise WarnError(line) |
1054 | 38 | else: | 36 | else: |
1055 | 39 | print(line) | 37 | print(line) |
1056 | 40 | 38 | ||
1057 | 41 | 39 | ||
1058 | 42 | def parse_args(argv=None): | 40 | def parse_args(argv=None): |
1063 | 43 | parser = HWCheckArgumentParser( | 41 | parser = HWCheckArgumentParser(prog="check_mdadm", def_input_file=INPUT_FILE) |
1060 | 44 | prog='check_mdadm', | ||
1061 | 45 | def_input_file=INPUT_FILE, | ||
1062 | 46 | ) | ||
1064 | 47 | return parser.parse_args(args=argv, namespace=ARGS) | 42 | return parser.parse_args(args=argv, namespace=ARGS) |
1065 | 48 | 43 | ||
1066 | 49 | 44 | ||
1067 | @@ -52,5 +47,5 @@ def main(argv): | |||
1068 | 52 | try_check(parse_output) | 47 | try_check(parse_output) |
1069 | 53 | 48 | ||
1070 | 54 | 49 | ||
1072 | 55 | if __name__ == '__main__': | 50 | if __name__ == "__main__": |
1073 | 56 | main(sys.argv[1:]) | 51 | main(sys.argv[1:]) |
1074 | diff --git a/src/files/mdadm/cron_mdadm.py b/src/files/mdadm/cron_mdadm.py | |||
1075 | index 3e7e8e9..fed9ea8 100755 | |||
1076 | --- a/src/files/mdadm/cron_mdadm.py | |||
1077 | +++ b/src/files/mdadm/cron_mdadm.py | |||
1078 | @@ -13,7 +13,7 @@ try: | |||
1079 | 13 | except ImportError: | 13 | except ImportError: |
1080 | 14 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 14 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
1081 | 15 | common_libs_dir = os.path.abspath( | 15 | common_libs_dir = os.path.abspath( |
1083 | 16 | os.path.join(os.path.dirname(__file__), '..', 'common') | 16 | os.path.join(os.path.dirname(__file__), "..", "common") |
1084 | 17 | ) | 17 | ) |
1085 | 18 | if common_libs_dir not in sys.path: | 18 | if common_libs_dir not in sys.path: |
1086 | 19 | sys.path.append(common_libs_dir) | 19 | sys.path.append(common_libs_dir) |
1087 | @@ -25,22 +25,20 @@ ARGS = argparse.Namespace() | |||
1088 | 25 | 25 | ||
1089 | 26 | 26 | ||
1090 | 27 | def get_devices(): | 27 | def get_devices(): |
1092 | 28 | if os.path.exists('/sbin/mdadm'): | 28 | if os.path.exists("/sbin/mdadm"): |
1093 | 29 | try: | 29 | try: |
1094 | 30 | cmd = ["/sbin/mdadm", "--detail", "--scan"] | 30 | cmd = ["/sbin/mdadm", "--detail", "--scan"] |
1095 | 31 | devices_raw = subprocess.check_output(cmd) | 31 | devices_raw = subprocess.check_output(cmd) |
1096 | 32 | devices_re = re.compile(r"^ARRAY\s+([^ ]+) ") | 32 | devices_re = re.compile(r"^ARRAY\s+([^ ]+) ") |
1097 | 33 | devices = set() | 33 | devices = set() |
1099 | 34 | for line in devices_raw.decode().split('\n'): | 34 | for line in devices_raw.decode().split("\n"): |
1100 | 35 | line = line.strip() | 35 | line = line.strip() |
1101 | 36 | device_re = devices_re.match(line) | 36 | device_re = devices_re.match(line) |
1102 | 37 | if device_re is not None: | 37 | if device_re is not None: |
1103 | 38 | devices.add(device_re.group(1)) | 38 | devices.add(device_re.group(1)) |
1104 | 39 | return devices | 39 | return devices |
1105 | 40 | except subprocess.CalledProcessError as error: | 40 | except subprocess.CalledProcessError as error: |
1109 | 41 | rc = generate_output( | 41 | rc = generate_output("CRITICAL: get_devices error - {}".format(error)) |
1107 | 42 | "CRITICAL: get_devices error - {}".format(error) | ||
1108 | 43 | ) | ||
1110 | 44 | if rc: | 42 | if rc: |
1111 | 45 | sys.exit(0) | 43 | sys.exit(0) |
1112 | 46 | return set() | 44 | return set() |
1113 | @@ -48,28 +46,28 @@ def get_devices(): | |||
1114 | 48 | 46 | ||
1115 | 49 | def generate_output(msg): | 47 | def generate_output(msg): |
1116 | 50 | try: | 48 | try: |
1118 | 51 | with open(TEMP_FILE, 'w') as fd: | 49 | with open(TEMP_FILE, "w") as fd: |
1119 | 52 | fd.write(msg) | 50 | fd.write(msg) |
1120 | 53 | shutil.move(TEMP_FILE, ARGS.write) | 51 | shutil.move(TEMP_FILE, ARGS.write) |
1121 | 54 | return True | 52 | return True |
1122 | 55 | except Exception as error: | 53 | except Exception as error: |
1124 | 56 | print('Unable to generate output file:', error) | 54 | print("Unable to generate output file:", error) |
1125 | 57 | return False | 55 | return False |
1126 | 58 | 56 | ||
1127 | 59 | 57 | ||
1128 | 60 | def get_devices_stats(devices): | 58 | def get_devices_stats(devices): |
1130 | 61 | mdadm_detail = ['/sbin/mdadm', '--detail'] | 59 | mdadm_detail = ["/sbin/mdadm", "--detail"] |
1131 | 62 | mdadm_detail.extend(sorted(devices)) | 60 | mdadm_detail.extend(sorted(devices)) |
1132 | 63 | 61 | ||
1133 | 64 | devices_details_raw = subprocess.check_output(mdadm_detail) | 62 | devices_details_raw = subprocess.check_output(mdadm_detail) |
1134 | 65 | 63 | ||
1140 | 66 | devices_re = r'^(/\S+):$' | 64 | devices_re = r"^(/\S+):$" |
1141 | 67 | state_re = r'^\s*State\s+:\s+(.+)\s*$' | 65 | state_re = r"^\s*State\s+:\s+(.+)\s*$" |
1142 | 68 | status_re = r'^\s*(Active|Working|Failed|Spare) Devices\s+:\s+(\d+)$' | 66 | status_re = r"^\s*(Active|Working|Failed|Spare) Devices\s+:\s+(\d+)$" |
1143 | 69 | rebuild_status_re = r'^\s*Rebuild Status\s+:\s+(\d+%\s+\S+)$' | 67 | rebuild_status_re = r"^\s*Rebuild Status\s+:\s+(\d+%\s+\S+)$" |
1144 | 70 | removed_re = r'^\s*-\s+0\s+0\s+(\d+)\s+removed$' | 68 | removed_re = r"^\s*-\s+0\s+0\s+(\d+)\s+removed$" |
1145 | 71 | # 4 8 162 3 spare rebuilding /dev/sdk2 | 69 | # 4 8 162 3 spare rebuilding /dev/sdk2 |
1147 | 72 | rebuilding_re = r'^\s*\d+\s+\d+\s+\d+\s+\d+\s+\S+\s+rebuilding\s+(\S+)$' | 70 | rebuilding_re = r"^\s*\d+\s+\d+\s+\d+\s+\d+\s+\S+\s+rebuilding\s+(\S+)$" |
1148 | 73 | 71 | ||
1149 | 74 | devices_cre = re.compile(devices_re) | 72 | devices_cre = re.compile(devices_re) |
1150 | 75 | state_cre = re.compile(state_re) | 73 | state_cre = re.compile(state_re) |
1151 | @@ -80,54 +78,50 @@ def get_devices_stats(devices): | |||
1152 | 80 | 78 | ||
1153 | 81 | device = None | 79 | device = None |
1154 | 82 | devices_stats = {} | 80 | devices_stats = {} |
1156 | 83 | for line in devices_details_raw.decode().split('\n'): | 81 | for line in devices_details_raw.decode().split("\n"): |
1157 | 84 | line = line.rstrip() | 82 | line = line.rstrip() |
1158 | 85 | m = devices_cre.match(line) | 83 | m = devices_cre.match(line) |
1159 | 86 | if m: | 84 | if m: |
1160 | 87 | device = m.group(1) | 85 | device = m.group(1) |
1161 | 88 | devices_stats[device] = { | 86 | devices_stats[device] = { |
1173 | 89 | 'stats': { | 87 | "stats": {"Active": 0, "Working": 0, "Failed": 0, "Spare": 0}, |
1174 | 90 | 'Active': 0, | 88 | "rebuild_status": "", |
1175 | 91 | 'Working': 0, | 89 | "degraded": False, |
1176 | 92 | 'Failed': 0, | 90 | "recovering": False, |
1177 | 93 | 'Spare': 0, | 91 | "removed": [], |
1178 | 94 | }, | 92 | "rebuilding": [], |
1168 | 95 | 'rebuild_status': '', | ||
1169 | 96 | 'degraded': False, | ||
1170 | 97 | 'recovering': False, | ||
1171 | 98 | 'removed': [], | ||
1172 | 99 | 'rebuilding': [], | ||
1179 | 100 | } | 93 | } |
1180 | 101 | continue | 94 | continue |
1181 | 102 | 95 | ||
1182 | 103 | m = state_cre.match(line) | 96 | m = state_cre.match(line) |
1183 | 104 | if m: | 97 | if m: |
1185 | 105 | # format for State line can be "clean" or "clean, degraded" or "active, degraded, rebuilding", etc. | 98 | # format for State line can be "clean" or "clean, degraded", |
1186 | 99 | # or "active, degraded, rebuilding", etc. | ||
1187 | 106 | states = m.group(1).split(", ") | 100 | states = m.group(1).split(", ") |
1192 | 107 | if 'degraded' in states and device: | 101 | if "degraded" in states and device: |
1193 | 108 | devices_stats[device]['degraded'] = True | 102 | devices_stats[device]["degraded"] = True |
1194 | 109 | if 'recovering' in states and device: | 103 | if "recovering" in states and device: |
1195 | 110 | devices_stats[device]['recovering'] = True | 104 | devices_stats[device]["recovering"] = True |
1196 | 111 | continue | 105 | continue |
1197 | 112 | 106 | ||
1198 | 113 | m = status_cre.match(line) | 107 | m = status_cre.match(line) |
1199 | 114 | if m and device: | 108 | if m and device: |
1201 | 115 | devices_stats[device]['stats'][m.group(1)] = int(m.group(2)) | 109 | devices_stats[device]["stats"][m.group(1)] = int(m.group(2)) |
1202 | 116 | continue | 110 | continue |
1203 | 117 | 111 | ||
1204 | 118 | m = removed_cre.match(line) | 112 | m = removed_cre.match(line) |
1205 | 119 | if m and device: | 113 | if m and device: |
1207 | 120 | devices_stats[device]['removed'].append(m.group(1)) | 114 | devices_stats[device]["removed"].append(m.group(1)) |
1208 | 121 | continue | 115 | continue |
1209 | 122 | 116 | ||
1210 | 123 | m = rebuild_status_cre.match(line) | 117 | m = rebuild_status_cre.match(line) |
1211 | 124 | if m and device: | 118 | if m and device: |
1213 | 125 | devices_stats[device]['rebuild_status'] = m.group(1) | 119 | devices_stats[device]["rebuild_status"] = m.group(1) |
1214 | 126 | continue | 120 | continue |
1215 | 127 | 121 | ||
1216 | 128 | m = rebuilding_cre.match(line) | 122 | m = rebuilding_cre.match(line) |
1217 | 129 | if m and device: | 123 | if m and device: |
1219 | 130 | devices_stats[device]['rebuilding'].append(m.group(1)) | 124 | devices_stats[device]["rebuilding"].append(m.group(1)) |
1220 | 131 | continue | 125 | continue |
1221 | 132 | 126 | ||
1222 | 133 | return devices_stats | 127 | return devices_stats |
1223 | @@ -136,14 +130,12 @@ def get_devices_stats(devices): | |||
1224 | 136 | def parse_output(): # noqa:C901 | 130 | def parse_output(): # noqa:C901 |
1225 | 137 | devices = get_devices() | 131 | devices = get_devices() |
1226 | 138 | if len(devices) == 0: | 132 | if len(devices) == 0: |
1228 | 139 | return generate_output('WARNING: unexpectedly checked no devices') | 133 | return generate_output("WARNING: unexpectedly checked no devices") |
1229 | 140 | 134 | ||
1230 | 141 | try: | 135 | try: |
1231 | 142 | devices_stats = get_devices_stats(devices) | 136 | devices_stats = get_devices_stats(devices) |
1232 | 143 | except subprocess.CalledProcessError as error: | 137 | except subprocess.CalledProcessError as error: |
1236 | 144 | return generate_output( | 138 | return generate_output("WARNING: error executing mdadm: {}".format(error)) |
1234 | 145 | "WARNING: error executing mdadm: {}".format(error) | ||
1235 | 146 | ) | ||
1237 | 147 | 139 | ||
1238 | 148 | msg = [] | 140 | msg = [] |
1239 | 149 | critical = False | 141 | critical = False |
1240 | @@ -151,51 +143,50 @@ def parse_output(): # noqa:C901 | |||
1241 | 151 | for device in devices_stats: | 143 | for device in devices_stats: |
1242 | 152 | parts = [] | 144 | parts = [] |
1243 | 153 | # Is device degraded? | 145 | # Is device degraded? |
1245 | 154 | if devices_stats[device]['degraded'] and devices_stats[device]['recovering']: | 146 | if devices_stats[device]["degraded"] and devices_stats[device]["recovering"]: |
1246 | 155 | warning = True | 147 | warning = True |
1249 | 156 | parts = ['{} recovering'.format(device)] | 148 | parts = ["{} recovering".format(device)] |
1250 | 157 | elif devices_stats[device]['degraded']: | 149 | elif devices_stats[device]["degraded"]: |
1251 | 158 | critical = True | 150 | critical = True |
1253 | 159 | parts = ['{} degraded'.format(device)] | 151 | parts = ["{} degraded".format(device)] |
1254 | 160 | else: | 152 | else: |
1256 | 161 | parts = ['{} ok'.format(device)] | 153 | parts = ["{} ok".format(device)] |
1257 | 162 | 154 | ||
1258 | 163 | # If Failed drives are found, list counters (how many?) | 155 | # If Failed drives are found, list counters (how many?) |
1260 | 164 | failed_cnt = devices_stats[device]['stats'].get('Failed', 0) | 156 | failed_cnt = devices_stats[device]["stats"].get("Failed", 0) |
1261 | 165 | if failed_cnt > 0: | 157 | if failed_cnt > 0: |
1262 | 166 | critical = True | 158 | critical = True |
1263 | 167 | dev_stats = [ | 159 | dev_stats = [ |
1266 | 168 | '{}[{}]'.format(status, devices_stats[device]['stats'][status]) | 160 | "{}[{}]".format(status, devices_stats[device]["stats"][status]) |
1267 | 169 | for status in sorted(devices_stats[device]['stats']) | 161 | for status in sorted(devices_stats[device]["stats"]) |
1268 | 170 | ] | 162 | ] |
1269 | 171 | parts.extend(dev_stats) | 163 | parts.extend(dev_stats) |
1270 | 172 | 164 | ||
1272 | 173 | if len(devices_stats[device]['removed']) != 0: | 165 | if len(devices_stats[device]["removed"]) != 0: |
1273 | 174 | critical = True | 166 | critical = True |
1281 | 175 | members = " and ".join(devices_stats[device]['removed']) | 167 | members = " and ".join(devices_stats[device]["removed"]) |
1282 | 176 | parts.append('RaidDevice(s) {} marked removed'.format(members)) | 168 | parts.append("RaidDevice(s) {} marked removed".format(members)) |
1283 | 177 | 169 | ||
1284 | 178 | if len(devices_stats[device]['rebuilding']) != 0: | 170 | if len(devices_stats[device]["rebuilding"]) != 0: |
1285 | 179 | rebuilding_members = " ".join(devices_stats[device]['rebuilding']) | 171 | rebuilding_members = " ".join(devices_stats[device]["rebuilding"]) |
1286 | 180 | rebuild_status = devices_stats[device]['rebuild_status'] | 172 | rebuild_status = devices_stats[device]["rebuild_status"] |
1287 | 181 | parts.append('{} rebuilding ({})'.format(rebuilding_members, rebuild_status)) | 173 | parts.append( |
1288 | 174 | "{} rebuilding ({})".format(rebuilding_members, rebuild_status) | ||
1289 | 175 | ) | ||
1290 | 182 | 176 | ||
1292 | 183 | msg.append(', '.join(parts)) | 177 | msg.append(", ".join(parts)) |
1293 | 184 | 178 | ||
1294 | 185 | if critical: | 179 | if critical: |
1296 | 186 | msg = 'CRITICAL: {}'.format('; '.join(msg)) | 180 | msg = "CRITICAL: {}".format("; ".join(msg)) |
1297 | 187 | elif warning: | 181 | elif warning: |
1299 | 188 | msg = 'WARNING: {}'.format('; '.join(msg)) | 182 | msg = "WARNING: {}".format("; ".join(msg)) |
1300 | 189 | else: | 183 | else: |
1302 | 190 | msg = 'OK: {}'.format('; '.join(msg)) | 184 | msg = "OK: {}".format("; ".join(msg)) |
1303 | 191 | return generate_output(msg) | 185 | return generate_output(msg) |
1304 | 192 | 186 | ||
1305 | 193 | 187 | ||
1306 | 194 | def parse_args(argv=None): | 188 | def parse_args(argv=None): |
1311 | 195 | parser = HWCronArgumentParser( | 189 | parser = HWCronArgumentParser(prog="cron_mdadm", def_write_file=OUTPUT_FILE) |
1308 | 196 | prog='cron_mdadm', | ||
1309 | 197 | def_write_file=OUTPUT_FILE, | ||
1310 | 198 | ) | ||
1312 | 199 | return parser.parse_args(args=argv, namespace=ARGS) | 190 | return parser.parse_args(args=argv, namespace=ARGS) |
1313 | 200 | 191 | ||
1314 | 201 | 192 | ||
1315 | diff --git a/src/files/megacli/check_megacli.py b/src/files/megacli/check_megacli.py | |||
1316 | index b587d10..03d3a5d 100755 | |||
1317 | --- a/src/files/megacli/check_megacli.py | |||
1318 | +++ b/src/files/megacli/check_megacli.py | |||
1319 | @@ -13,55 +13,54 @@ try: | |||
1320 | 13 | except ImportError: | 13 | except ImportError: |
1321 | 14 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 14 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
1322 | 15 | common_libs_dir = os.path.abspath( | 15 | common_libs_dir = os.path.abspath( |
1324 | 16 | os.path.join(os.path.dirname(__file__), '..', 'common') | 16 | os.path.join(os.path.dirname(__file__), "..", "common") |
1325 | 17 | ) | 17 | ) |
1326 | 18 | if common_libs_dir not in sys.path: | 18 | if common_libs_dir not in sys.path: |
1327 | 19 | sys.path.append(common_libs_dir) | 19 | sys.path.append(common_libs_dir) |
1328 | 20 | from hw_health_lib import HWCheckArgumentParser | 20 | from hw_health_lib import HWCheckArgumentParser |
1329 | 21 | 21 | ||
1331 | 22 | INPUT_FILE = '/var/lib/nagios/megacli.out' | 22 | INPUT_FILE = "/var/lib/nagios/megacli.out" |
1332 | 23 | ARGS = argparse.Namespace() | 23 | ARGS = argparse.Namespace() |
1333 | 24 | 24 | ||
1334 | 25 | 25 | ||
1338 | 26 | def handle_results( | 26 | def handle_results(nlines, match, critical, errors, num_ldrive, num_pdrive, policy): |
1336 | 27 | nlines, match, critical, errors, num_ldrive, num_pdrive, policy | ||
1337 | 28 | ): | ||
1339 | 29 | if nlines == 0: | 27 | if nlines == 0: |
1341 | 30 | raise WarnError('WARNING: controller not found') | 28 | raise WarnError("WARNING: controller not found") |
1342 | 31 | elif not match: | 29 | elif not match: |
1344 | 32 | raise WarnError('WARNING: error parsing megacli output') | 30 | raise WarnError("WARNING: error parsing megacli output") |
1345 | 33 | elif critical: | 31 | elif critical: |
1346 | 34 | if len(errors) > 0: | 32 | if len(errors) > 0: |
1353 | 35 | msg = ', '.join([ | 33 | msg = ", ".join( |
1354 | 36 | '{}({})'.format(cnt, vars()[cnt]) | 34 | [ |
1355 | 37 | for cnt in ('failed_ld', 'wrg_policy_ld') | 35 | "{}({})".format(cnt, vars()[cnt]) |
1356 | 38 | if vars().get(cnt, 0) > 0 | 36 | for cnt in ("failed_ld", "wrg_policy_ld") |
1357 | 39 | ]) | 37 | if vars().get(cnt, 0) > 0 |
1358 | 40 | msg += '; '.join(errors) | 38 | ] |
1359 | 39 | ) | ||
1360 | 40 | msg += "; ".join(errors) | ||
1361 | 41 | else: | 41 | else: |
1364 | 42 | msg = 'failure caught but no output available' | 42 | msg = "failure caught but no output available" |
1365 | 43 | raise CriticalError('CRITICAL: {}'.format(msg)) | 43 | raise CriticalError("CRITICAL: {}".format(msg)) |
1366 | 44 | elif len(errors) > 0: | 44 | elif len(errors) > 0: |
1368 | 45 | raise WarnError('WARNING: {}'.format('; '.join(errors))) | 45 | raise WarnError("WARNING: {}".format("; ".join(errors))) |
1369 | 46 | 46 | ||
1370 | 47 | else: | 47 | else: |
1371 | 48 | if num_ldrive == 0: | 48 | if num_ldrive == 0: |
1373 | 49 | msg = 'OK: no disks configured for RAID' | 49 | msg = "OK: no disks configured for RAID" |
1374 | 50 | else: | 50 | else: |
1377 | 51 | msg = ('OK: Optimal, ldrives[{}], pdrives[{}]' | 51 | msg = "OK: Optimal, ldrives[{}], pdrives[{}]".format(num_ldrive, num_pdrive) |
1376 | 52 | ''.format(num_ldrive, num_pdrive)) | ||
1378 | 53 | if policy: | 52 | if policy: |
1380 | 54 | msg += ', policy[{}]'.format(policy) | 53 | msg += ", policy[{}]".format(policy) |
1381 | 55 | print(msg) | 54 | print(msg) |
1382 | 56 | 55 | ||
1383 | 57 | 56 | ||
1384 | 58 | def parse_output(policy=False): # noqa:C901 | 57 | def parse_output(policy=False): # noqa:C901 |
1391 | 59 | noadapter_re = r'^Adapter \d+: No Virtual Drive Configured' | 58 | noadapter_re = r"^Adapter \d+: No Virtual Drive Configured" |
1392 | 60 | adapter_re = r'^Adapter (\d+) -- Virtual Drive Information:' | 59 | adapter_re = r"^Adapter (\d+) -- Virtual Drive Information:" |
1393 | 61 | ldrive_re = r'^Virtual Drive\s*:\s+(\d+)' | 60 | ldrive_re = r"^Virtual Drive\s*:\s+(\d+)" |
1394 | 62 | state_re = r'^State\s*:\s+([^\n]+)' | 61 | state_re = r"^State\s*:\s+([^\n]+)" |
1395 | 63 | npdrives_re = r'^Number Of Drives(?: per span)?\s*:\s+(\d+)' | 62 | npdrives_re = r"^Number Of Drives(?: per span)?\s*:\s+(\d+)" |
1396 | 64 | w_policy_re = r'^Current Cache Policy\s*:\s+([^,]+)' | 63 | w_policy_re = r"^Current Cache Policy\s*:\s+([^,]+)" |
1397 | 65 | 64 | ||
1398 | 66 | noadapter_cre = re.compile(noadapter_re) | 65 | noadapter_cre = re.compile(noadapter_re) |
1399 | 67 | adapter_cre = re.compile(adapter_re) | 66 | adapter_cre = re.compile(adapter_re) |
1400 | @@ -78,7 +77,7 @@ def parse_output(policy=False): # noqa:C901 | |||
1401 | 78 | 77 | ||
1402 | 79 | with open(ARGS.input_file) as devices_raw: | 78 | with open(ARGS.input_file) as devices_raw: |
1403 | 80 | for line in devices_raw.readlines(): | 79 | for line in devices_raw.readlines(): |
1405 | 81 | if len(line.strip()) and not line.startswith('Exit Code'): | 80 | if len(line.strip()) and not line.startswith("Exit Code"): |
1406 | 82 | nlines += 1 | 81 | nlines += 1 |
1407 | 83 | 82 | ||
1408 | 84 | if noadapter_cre.match(line): | 83 | if noadapter_cre.match(line): |
1409 | @@ -101,10 +100,11 @@ def parse_output(policy=False): # noqa:C901 | |||
1410 | 101 | if m: | 100 | if m: |
1411 | 102 | num_ldrive += 1 | 101 | num_ldrive += 1 |
1412 | 103 | state = m.group(1) | 102 | state = m.group(1) |
1414 | 104 | if state != 'Optimal': | 103 | if state != "Optimal": |
1415 | 105 | failed_ld += 1 | 104 | failed_ld += 1 |
1418 | 106 | msg = 'adapter({}):ld({}):state({})'.format( | 105 | msg = "adapter({}):ld({}):state({})".format( |
1419 | 107 | adapter_id, ldrive_id, state) | 106 | adapter_id, ldrive_id, state |
1420 | 107 | ) | ||
1421 | 108 | errors.append(msg) | 108 | errors.append(msg) |
1422 | 109 | critical = True | 109 | critical = True |
1423 | 110 | continue | 110 | continue |
1424 | @@ -120,22 +120,18 @@ def parse_output(policy=False): # noqa:C901 | |||
1425 | 120 | w_policy = m.group(1) | 120 | w_policy = m.group(1) |
1426 | 121 | if w_policy != policy: | 121 | if w_policy != policy: |
1427 | 122 | wrg_policy_ld += 1 | 122 | wrg_policy_ld += 1 |
1430 | 123 | msg = 'adp({}):ld({}):policy({})'.format( | 123 | msg = "adp({}):ld({}):policy({})".format( |
1431 | 124 | adapter_id, ldrive_id, w_policy) | 124 | adapter_id, ldrive_id, w_policy |
1432 | 125 | ) | ||
1433 | 125 | errors.append(msg) | 126 | errors.append(msg) |
1434 | 126 | critical = True | 127 | critical = True |
1435 | 127 | continue | 128 | continue |
1436 | 128 | 129 | ||
1440 | 129 | handle_results( | 130 | handle_results(nlines, match, critical, errors, num_ldrive, num_pdrive, policy) |
1438 | 130 | nlines, match, critical, errors, num_ldrive, num_pdrive, policy | ||
1439 | 131 | ) | ||
1441 | 132 | 131 | ||
1442 | 133 | 132 | ||
1443 | 134 | def parse_args(argv=None): | 133 | def parse_args(argv=None): |
1448 | 135 | parser = HWCheckArgumentParser( | 134 | parser = HWCheckArgumentParser(prog="check_megacli", def_input_file=INPUT_FILE) |
1445 | 136 | prog='check_megacli', | ||
1446 | 137 | def_input_file=INPUT_FILE, | ||
1447 | 138 | ) | ||
1449 | 139 | return parser.parse_args(args=argv, namespace=ARGS) | 135 | return parser.parse_args(args=argv, namespace=ARGS) |
1450 | 140 | 136 | ||
1451 | 141 | 137 | ||
1452 | @@ -144,5 +140,5 @@ def main(argv, policy=False): | |||
1453 | 144 | try_check(parse_output, policy) | 140 | try_check(parse_output, policy) |
1454 | 145 | 141 | ||
1455 | 146 | 142 | ||
1457 | 147 | if __name__ == '__main__': | 143 | if __name__ == "__main__": |
1458 | 148 | main(sys.argv[1:]) | 144 | main(sys.argv[1:]) |
1459 | diff --git a/src/files/nvme/check_nvme.py b/src/files/nvme/check_nvme.py | |||
1460 | index d4117c9..701bc9c 100755 | |||
1461 | --- a/src/files/nvme/check_nvme.py | |||
1462 | +++ b/src/files/nvme/check_nvme.py | |||
1463 | @@ -8,55 +8,64 @@ import re | |||
1464 | 8 | import subprocess | 8 | import subprocess |
1465 | 9 | from nagios_plugin3 import CriticalError, try_check, UnknownError | 9 | from nagios_plugin3 import CriticalError, try_check, UnknownError |
1466 | 10 | 10 | ||
1468 | 11 | NVME_RE = re.compile(r'^/dev/nvme\d+$') | 11 | NVME_RE = re.compile(r"^/dev/nvme\d+$") |
1469 | 12 | 12 | ||
1470 | 13 | 13 | ||
1471 | 14 | def parse_output(): | 14 | def parse_output(): |
1472 | 15 | keymap = {} | 15 | keymap = {} |
1473 | 16 | critical = False | 16 | critical = False |
1474 | 17 | alloutputs = [] | 17 | alloutputs = [] |
1476 | 18 | for device in glob.glob('/dev/nvme*'): | 18 | for device in glob.glob("/dev/nvme*"): |
1477 | 19 | if not NVME_RE.match(device): | 19 | if not NVME_RE.match(device): |
1478 | 20 | continue | 20 | continue |
1479 | 21 | try: | 21 | try: |
1482 | 22 | output = subprocess.check_output(['sudo', '/usr/sbin/nvme', | 22 | output = subprocess.check_output( |
1483 | 23 | 'smart-log', device]) | 23 | ["sudo", "/usr/sbin/nvme", "smart-log", device] |
1484 | 24 | ) | ||
1485 | 24 | except subprocess.CalledProcessError as error: | 25 | except subprocess.CalledProcessError as error: |
1487 | 25 | print('nvme check error: {}'.format(error)) | 26 | print("nvme check error: {}".format(error)) |
1488 | 26 | return | 27 | return |
1489 | 27 | 28 | ||
1492 | 28 | for line in output.decode(errors='ignore').splitlines(): | 29 | for line in output.decode(errors="ignore").splitlines(): |
1493 | 29 | datavalues_re = re.match(r'^(\w+)\s+:\s+([\d.]+)', line.strip()) | 30 | datavalues_re = re.match(r"^(\w+)\s+:\s+([\d.]+)", line.strip()) |
1494 | 30 | if not datavalues_re: | 31 | if not datavalues_re: |
1495 | 31 | continue | 32 | continue |
1496 | 32 | key, value = datavalues_re.groups() | 33 | key, value = datavalues_re.groups() |
1498 | 33 | keymap[key] = value.replace('.', '') | 34 | keymap[key] = value.replace(".", "") |
1499 | 34 | 35 | ||
1503 | 35 | if int(keymap['critical_warning']) != 0: | 36 | if int(keymap["critical_warning"]) != 0: |
1504 | 36 | status = ('CRITICAL: {} critical_warning is {}' | 37 | status = ("CRITICAL: {} critical_warning is {}").format( |
1505 | 37 | '').format(device, keymap['critical_warning']) | 38 | device, keymap["critical_warning"] |
1506 | 39 | ) | ||
1507 | 38 | critical = True | 40 | critical = True |
1508 | 39 | else: | 41 | else: |
1510 | 40 | status = 'OK: no errors on {}'.format(device) | 42 | status = "OK: no errors on {}".format(device) |
1511 | 41 | 43 | ||
1515 | 42 | alloutputs.append('{} | {}'.format( | 44 | alloutputs.append( |
1516 | 43 | status, ' '.join(['{}={}'.format(repr(key), value) | 45 | "{} | {}".format( |
1517 | 44 | for key, value in keymap.items()]))) | 46 | status, |
1518 | 47 | " ".join( | ||
1519 | 48 | ["{}={}".format(repr(key), value) for key, value in keymap.items()] | ||
1520 | 49 | ), | ||
1521 | 50 | ) | ||
1522 | 51 | ) | ||
1523 | 45 | 52 | ||
1524 | 46 | if critical: | 53 | if critical: |
1526 | 47 | raise CriticalError('\n'.join(alloutputs)) | 54 | raise CriticalError("\n".join(alloutputs)) |
1527 | 48 | 55 | ||
1528 | 49 | if not alloutputs: | 56 | if not alloutputs: |
1530 | 50 | raise UnknownError('no nvme devices found') | 57 | raise UnknownError("no nvme devices found") |
1531 | 51 | 58 | ||
1533 | 52 | print('\n'.join(alloutputs)) | 59 | print("\n".join(alloutputs)) |
1534 | 53 | 60 | ||
1535 | 54 | 61 | ||
1536 | 55 | def parse_args(argv=None): | 62 | def parse_args(argv=None): |
1537 | 56 | parser = argparse.ArgumentParser( | 63 | parser = argparse.ArgumentParser( |
1541 | 57 | prog='check_nvme', | 64 | prog="check_nvme", |
1542 | 58 | description=('this program reads the nvme smart-log and outputs an ' | 65 | description=( |
1543 | 59 | 'appropriate Nagios status line'), | 66 | "this program reads the nvme smart-log and outputs an " |
1544 | 67 | "appropriate Nagios status line" | ||
1545 | 68 | ), | ||
1546 | 60 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, | 69 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, |
1547 | 61 | ) | 70 | ) |
1548 | 62 | return parser.parse_args(argv) | 71 | return parser.parse_args(argv) |
1549 | @@ -67,5 +76,5 @@ def main(argv): | |||
1550 | 67 | try_check(parse_output) | 76 | try_check(parse_output) |
1551 | 68 | 77 | ||
1552 | 69 | 78 | ||
1554 | 70 | if __name__ == '__main__': | 79 | if __name__ == "__main__": |
1555 | 71 | main(sys.argv[1:]) | 80 | main(sys.argv[1:]) |
1556 | diff --git a/src/files/sas2ircu/check_sas2ircu.py b/src/files/sas2ircu/check_sas2ircu.py | |||
1557 | index b38e131..9628893 100755 | |||
1558 | --- a/src/files/sas2ircu/check_sas2ircu.py | |||
1559 | +++ b/src/files/sas2ircu/check_sas2ircu.py | |||
1560 | @@ -13,20 +13,20 @@ try: | |||
1561 | 13 | except ImportError: | 13 | except ImportError: |
1562 | 14 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 14 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
1563 | 15 | common_libs_dir = os.path.abspath( | 15 | common_libs_dir = os.path.abspath( |
1565 | 16 | os.path.join(os.path.dirname(__file__), '..', 'common') | 16 | os.path.join(os.path.dirname(__file__), "..", "common") |
1566 | 17 | ) | 17 | ) |
1567 | 18 | if common_libs_dir not in sys.path: | 18 | if common_libs_dir not in sys.path: |
1568 | 19 | sys.path.append(common_libs_dir) | 19 | sys.path.append(common_libs_dir) |
1569 | 20 | from hw_health_lib import HWCheckArgumentParser | 20 | from hw_health_lib import HWCheckArgumentParser |
1570 | 21 | 21 | ||
1572 | 22 | INPUT_FILE = '/var/lib/nagios/sas2ircu.out' | 22 | INPUT_FILE = "/var/lib/nagios/sas2ircu.out" |
1573 | 23 | ARGS = argparse.Namespace() | 23 | ARGS = argparse.Namespace() |
1574 | 24 | 24 | ||
1575 | 25 | 25 | ||
1576 | 26 | def parse_output(): | 26 | def parse_output(): |
1580 | 27 | enclosure_re = r'^\s+Enclosure #\s+:\s+(\d+)' | 27 | enclosure_re = r"^\s+Enclosure #\s+:\s+(\d+)" |
1581 | 28 | slot_re = r'^\s+Slot #\s+:\s+(\d+)' | 28 | slot_re = r"^\s+Slot #\s+:\s+(\d+)" |
1582 | 29 | state_re = r'^\s+State\s+:\s+(\S+)' | 29 | state_re = r"^\s+State\s+:\s+(\S+)" |
1583 | 30 | 30 | ||
1584 | 31 | encl_slot_state_cre = [ | 31 | encl_slot_state_cre = [ |
1585 | 32 | re.compile(enclosure_re), | 32 | re.compile(enclosure_re), |
1586 | @@ -47,28 +47,25 @@ def parse_output(): | |||
1587 | 47 | 47 | ||
1588 | 48 | if len(device) == 3: | 48 | if len(device) == 3: |
1589 | 49 | tmpdev = devices.get(device[2], []) | 49 | tmpdev = devices.get(device[2], []) |
1591 | 50 | tmpdev.append('{}:{}'.format(device[0], device[1])) | 50 | tmpdev.append("{}:{}".format(device[0], device[1])) |
1592 | 51 | devices[device[2]] = tmpdev | 51 | devices[device[2]] = tmpdev |
1594 | 52 | if not ('Ready' in device or 'Optimal' in device): | 52 | if not ("Ready" in device or "Optimal" in device): |
1595 | 53 | critical = True | 53 | critical = True |
1596 | 54 | device = [] | 54 | device = [] |
1597 | 55 | 55 | ||
1603 | 56 | msg = '; '.join([ | 56 | msg = "; ".join( |
1604 | 57 | '{}[{}]'.format(state, ','.join(devices[state])) for state in devices | 57 | ["{}[{}]".format(state, ",".join(devices[state])) for state in devices] |
1605 | 58 | ]) | 58 | ) |
1606 | 59 | if msg == '': | 59 | if msg == "": |
1607 | 60 | raise WarnError('WARNING: no output') | 60 | raise WarnError("WARNING: no output") |
1608 | 61 | elif critical: | 61 | elif critical: |
1610 | 62 | raise CriticalError('CRITICAL: {}'.format(msg)) | 62 | raise CriticalError("CRITICAL: {}".format(msg)) |
1611 | 63 | else: | 63 | else: |
1613 | 64 | print('OK: {}'.format(msg)) | 64 | print("OK: {}".format(msg)) |
1614 | 65 | 65 | ||
1615 | 66 | 66 | ||
1616 | 67 | def parse_args(argv=None): | 67 | def parse_args(argv=None): |
1621 | 68 | parser = HWCheckArgumentParser( | 68 | parser = HWCheckArgumentParser(prog="check_sas2ircu", def_input_file=INPUT_FILE) |
1618 | 69 | prog='check_sas2ircu', | ||
1619 | 70 | def_input_file=INPUT_FILE, | ||
1620 | 71 | ) | ||
1622 | 72 | return parser.parse_args(args=argv, namespace=ARGS) | 69 | return parser.parse_args(args=argv, namespace=ARGS) |
1623 | 73 | 70 | ||
1624 | 74 | 71 | ||
1625 | @@ -77,5 +74,5 @@ def main(argv): | |||
1626 | 77 | try_check(parse_output) | 74 | try_check(parse_output) |
1627 | 78 | 75 | ||
1628 | 79 | 76 | ||
1630 | 80 | if __name__ == '__main__': | 77 | if __name__ == "__main__": |
1631 | 81 | main(sys.argv[1:]) | 78 | main(sys.argv[1:]) |
1632 | diff --git a/src/files/sas3ircu/check_sas3ircu.py b/src/files/sas3ircu/check_sas3ircu.py | |||
1633 | index d62a90f..8b62679 100755 | |||
1634 | --- a/src/files/sas3ircu/check_sas3ircu.py | |||
1635 | +++ b/src/files/sas3ircu/check_sas3ircu.py | |||
1636 | @@ -14,182 +14,174 @@ try: | |||
1637 | 14 | except ImportError: | 14 | except ImportError: |
1638 | 15 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 15 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
1639 | 16 | common_libs_dir = os.path.abspath( | 16 | common_libs_dir = os.path.abspath( |
1641 | 17 | os.path.join(os.path.dirname(__file__), '..', 'common') | 17 | os.path.join(os.path.dirname(__file__), "..", "common") |
1642 | 18 | ) | 18 | ) |
1643 | 19 | if common_libs_dir not in sys.path: | 19 | if common_libs_dir not in sys.path: |
1644 | 20 | sys.path.append(common_libs_dir) | 20 | sys.path.append(common_libs_dir) |
1645 | 21 | from hw_health_lib import HWCheckArgumentParser | 21 | from hw_health_lib import HWCheckArgumentParser |
1646 | 22 | 22 | ||
1648 | 23 | INPUT_FILE = '/var/lib/nagios/sas3ircu.out' | 23 | INPUT_FILE = "/var/lib/nagios/sas3ircu.out" |
1649 | 24 | ARGS = argparse.Namespace() | 24 | ARGS = argparse.Namespace() |
1650 | 25 | 25 | ||
1651 | 26 | 26 | ||
1652 | 27 | def parse_output(input_file): | 27 | def parse_output(input_file): |
1654 | 28 | ''' | 28 | """ |
1655 | 29 | Turn the whole sas3ircu output into a dictionary | 29 | Turn the whole sas3ircu output into a dictionary |
1657 | 30 | ''' | 30 | """ |
1658 | 31 | sections_re = re.compile( | 31 | sections_re = re.compile( |
1676 | 32 | r'(?<=^Controller information\n)' | 32 | r"(?<=^Controller information\n)" |
1677 | 33 | r'-+\n' | 33 | r"-+\n" |
1678 | 34 | r'(?P<ctrl>(?:.|\n)*)' | 34 | r"(?P<ctrl>(?:.|\n)*)" |
1679 | 35 | r'^-+\n' | 35 | r"^-+\n" |
1680 | 36 | r'^IR Volume information\n' | 36 | r"^IR Volume information\n" |
1681 | 37 | r'-+\n' | 37 | r"-+\n" |
1682 | 38 | r'(?P<vols>(?:.|\n)*)' | 38 | r"(?P<vols>(?:.|\n)*)" |
1683 | 39 | r'^-+\n' | 39 | r"^-+\n" |
1684 | 40 | r'^Physical device information\n' | 40 | r"^Physical device information\n" |
1685 | 41 | r'-+\n' | 41 | r"-+\n" |
1686 | 42 | r'(?P<disks>(?:.|\n)*)' | 42 | r"(?P<disks>(?:.|\n)*)" |
1687 | 43 | r'^-+\n' | 43 | r"^-+\n" |
1688 | 44 | r'^Enclosure information\n' | 44 | r"^Enclosure information\n" |
1689 | 45 | r'-+\n' | 45 | r"-+\n" |
1690 | 46 | r'(?P<encl>(?:.|\n)*)' | 46 | r"(?P<encl>(?:.|\n)*)" |
1691 | 47 | r'^-+\n', | 47 | r"^-+\n", |
1692 | 48 | re.MULTILINE | 48 | re.MULTILINE, |
1693 | 49 | ) | 49 | ) |
1694 | 50 | disks_re = re.compile( | 50 | disks_re = re.compile( |
1697 | 51 | r'(?<=^Device is a Hard disk\n)(?P<kv_data>(?:.|\n)*?)(?=^$)', | 51 | r"(?<=^Device is a Hard disk\n)(?P<kv_data>(?:.|\n)*?)(?=^$)", re.MULTILINE |
1696 | 52 | re.MULTILINE | ||
1698 | 53 | ) | 52 | ) |
1699 | 54 | 53 | ||
1700 | 55 | with open(input_file) as devices_raw: | 54 | with open(input_file) as devices_raw: |
1701 | 56 | sections = sections_re.search(devices_raw.read()).groupdict() | 55 | sections = sections_re.search(devices_raw.read()).groupdict() |
1704 | 57 | controller = _kv_parse(sections['ctrl']) | 56 | controller = _kv_parse(sections["ctrl"]) |
1705 | 58 | volumes = _vols_parse(sections['vols']) | 57 | volumes = _vols_parse(sections["vols"]) |
1706 | 59 | # This collects disk level information in a structure simulating the | 58 | # This collects disk level information in a structure simulating the |
1707 | 60 | # physical encl/slot arrangement | 59 | # physical encl/slot arrangement |
1708 | 61 | topology = defaultdict(dict) | 60 | topology = defaultdict(dict) |
1710 | 62 | for match in disks_re.findall(sections['disks']): | 61 | for match in disks_re.findall(sections["disks"]): |
1711 | 63 | disk = _kv_parse(match) | 62 | disk = _kv_parse(match) |
1714 | 64 | encl = disk['Enclosure #'] | 63 | encl = disk["Enclosure #"] |
1715 | 65 | slot = disk['Slot #'] | 64 | slot = disk["Slot #"] |
1716 | 66 | topology[encl][slot] = disk | 65 | topology[encl][slot] = disk |
1718 | 67 | enclosure = _kv_parse(sections['encl']) | 66 | enclosure = _kv_parse(sections["encl"]) |
1719 | 68 | 67 | ||
1720 | 69 | return { | 68 | return { |
1725 | 70 | 'controller': controller, | 69 | "controller": controller, |
1726 | 71 | 'volumes': volumes, | 70 | "volumes": volumes, |
1727 | 72 | 'disks': topology, | 71 | "disks": topology, |
1728 | 73 | 'enclosure': enclosure, | 72 | "enclosure": enclosure, |
1729 | 74 | } | 73 | } |
1730 | 75 | 74 | ||
1731 | 76 | 75 | ||
1732 | 77 | def _vols_parse(text): | 76 | def _vols_parse(text): |
1733 | 78 | vols_re = re.compile( | 77 | vols_re = re.compile( |
1739 | 79 | r'^IR volume (?P<n>\d+)\n' | 78 | r"^IR volume (?P<n>\d+)\n" |
1740 | 80 | r'(?P<kv_data>(?:.|\n)*?)' | 79 | r"(?P<kv_data>(?:.|\n)*?)" |
1741 | 81 | r'\s+Physical hard disks\s+:.*\n' | 80 | r"\s+Physical hard disks\s+:.*\n" |
1742 | 82 | r'(?P<topology>(?:^\s+PHY.*\n)+)', | 81 | r"(?P<topology>(?:^\s+PHY.*\n)+)", |
1743 | 83 | re.MULTILINE | 82 | re.MULTILINE, |
1744 | 84 | ) | 83 | ) |
1745 | 85 | vol_topology_re = re.compile( | 84 | vol_topology_re = re.compile( |
1748 | 86 | r'\s+PHY\[(?P<n>\d+)\]\s+Enclosure#\/Slot#\s+' | 85 | r"\s+PHY\[(?P<n>\d+)\]\s+Enclosure#\/Slot#\s+" r":\s+(?P<enc>\d+):(?P<slot>\d+)" |
1747 | 87 | r':\s+(?P<enc>\d+):(?P<slot>\d+)' | ||
1749 | 88 | ) | 86 | ) |
1750 | 89 | volumes = {} | 87 | volumes = {} |
1751 | 90 | for (vol_n, kv_data, vol_topology) in vols_re.findall(text): | 88 | for (vol_n, kv_data, vol_topology) in vols_re.findall(text): |
1752 | 91 | topology = {} | 89 | topology = {} |
1753 | 92 | for (member_n, enc, slot) in vol_topology_re.findall(vol_topology): | 90 | for (member_n, enc, slot) in vol_topology_re.findall(vol_topology): |
1756 | 93 | topology[member_n] = {'enc': enc, 'slot': slot} | 91 | topology[member_n] = {"enc": enc, "slot": slot} |
1757 | 94 | volumes[vol_n] = {**_kv_parse(kv_data), 'topology': topology} | 92 | volumes[vol_n] = {**_kv_parse(kv_data), "topology": topology} |
1758 | 95 | 93 | ||
1759 | 96 | return volumes | 94 | return volumes |
1760 | 97 | 95 | ||
1761 | 98 | 96 | ||
1762 | 99 | def _kv_parse(text): | 97 | def _kv_parse(text): |
1764 | 100 | ''' | 98 | """ |
1765 | 101 | Build a dict by parsing text like: | 99 | Build a dict by parsing text like: |
1766 | 102 | 100 | ||
1767 | 103 | key1 : value1 | 101 | key1 : value1 |
1768 | 104 | key2 : value2 | 102 | key2 : value2 |
1773 | 105 | ''' | 103 | """ |
1774 | 106 | key_value_re = re.compile( | 104 | key_value_re = re.compile(r"^\s*(?P<key>.*?)\s+:\s+(?P<value>.*)") |
1771 | 107 | r'^\s*(?P<key>.*?)\s+:\s+(?P<value>.*)' | ||
1772 | 108 | ) | ||
1775 | 109 | text = text.strip() | 105 | text = text.strip() |
1776 | 110 | return { | 106 | return { |
1779 | 111 | m.group('key'): m.group('value') | 107 | m.group("key"): m.group("value") |
1780 | 112 | for m in map(key_value_re.search, text.split('\n')) | 108 | for m in map(key_value_re.search, text.split("\n")) |
1781 | 113 | } | 109 | } |
1782 | 114 | 110 | ||
1783 | 115 | 111 | ||
1784 | 116 | def eval_status(data): | 112 | def eval_status(data): |
1786 | 117 | ''' | 113 | """ |
1787 | 118 | Given a dictionary and a set of rules, determine the state of the storage | 114 | Given a dictionary and a set of rules, determine the state of the storage |
1788 | 119 | subsystem | 115 | subsystem |
1793 | 120 | ''' | 116 | """ |
1794 | 121 | OK = 'Okay (OKY)' | 117 | OK = "Okay (OKY)" |
1795 | 122 | READY = 'Ready (RDY)' | 118 | READY = "Ready (RDY)" |
1796 | 123 | OPTIMAL = 'Optimal (OPT)' | 119 | OPTIMAL = "Optimal (OPT)" |
1797 | 124 | status = Status() | 120 | status = Status() |
1798 | 125 | 121 | ||
1799 | 126 | # 1. Volumes must be in Okay state | 122 | # 1. Volumes must be in Okay state |
1803 | 127 | for volume in data['volumes'].values(): | 123 | for volume in data["volumes"].values(): |
1804 | 128 | vol_id = volume['Volume ID'] | 124 | vol_id = volume["Volume ID"] |
1805 | 129 | vol_status = volume['Status of volume'] | 125 | vol_status = volume["Status of volume"] |
1806 | 130 | if vol_status != OK: | 126 | if vol_status != OK: |
1807 | 131 | status.crit("Volume {}: {}".format(vol_id, vol_status)) | 127 | status.crit("Volume {}: {}".format(vol_id, vol_status)) |
1808 | 132 | else: | 128 | else: |
1809 | 133 | # 2. Volume members must be in Optimal state | 129 | # 2. Volume members must be in Optimal state |
1813 | 134 | for member in volume['topology'].values(): | 130 | for member in volume["topology"].values(): |
1814 | 135 | disk = data['disks'][member['enc']][member['slot']] | 131 | disk = data["disks"][member["enc"]][member["slot"]] |
1815 | 136 | if disk['State'] != OPTIMAL: | 132 | if disk["State"] != OPTIMAL: |
1816 | 137 | msg = "Disk {}:{} {}".format( | 133 | msg = "Disk {}:{} {}".format( |
1820 | 138 | member['enc'], | 134 | member["enc"], member["slot"], disk["State"] |
1818 | 139 | member['slot'], | ||
1819 | 140 | disk['State'] | ||
1821 | 141 | ) | 135 | ) |
1823 | 142 | if disk['State'] == READY: | 136 | if disk["State"] == READY: |
1824 | 143 | status.warn(msg) | 137 | status.warn(msg) |
1825 | 144 | else: | 138 | else: |
1826 | 145 | status.crit(msg) | 139 | status.crit(msg) |
1827 | 146 | # 3. Disks can be in Optimal or Ready state ("ready" is ok for non-RAID | 140 | # 3. Disks can be in Optimal or Ready state ("ready" is ok for non-RAID |
1828 | 147 | # members) | 141 | # members) |
1830 | 148 | for enclosure_id, enclosure in data['disks'].items(): | 142 | for enclosure_id, enclosure in data["disks"].items(): |
1831 | 149 | for slot_id, slot in enclosure.items(): | 143 | for slot_id, slot in enclosure.items(): |
1838 | 150 | if slot['State'] not in [OPTIMAL, READY]: | 144 | if slot["State"] not in [OPTIMAL, READY]: |
1839 | 151 | status.crit("Disk {}:{} {}".format( | 145 | status.crit( |
1840 | 152 | enclosure_id, | 146 | "Disk {}:{} {}".format(enclosure_id, slot_id, slot["State"]) |
1841 | 153 | slot_id, | 147 | ) |
1836 | 154 | slot['State'] | ||
1837 | 155 | )) | ||
1842 | 156 | status.get_status() | 148 | status.get_status() |
1843 | 157 | 149 | ||
1844 | 158 | 150 | ||
1845 | 159 | class Status: | 151 | class Status: |
1847 | 160 | ''' | 152 | """ |
1848 | 161 | Class hiding the whole "CRIT >> WARN >> OK" priority scheme | 153 | Class hiding the whole "CRIT >> WARN >> OK" priority scheme |
1851 | 162 | ''' | 154 | """ |
1852 | 163 | def __init__(self, status='OK'): | 155 | |
1853 | 156 | def __init__(self, status="OK"): | ||
1854 | 164 | self._status = status | 157 | self._status = status |
1855 | 165 | self._msgs = set() | 158 | self._msgs = set() |
1856 | 166 | 159 | ||
1857 | 167 | def crit(self, msg): | 160 | def crit(self, msg): |
1859 | 168 | self._status = 'CRITICAL' | 161 | self._status = "CRITICAL" |
1860 | 169 | self._msgs.add(msg) | 162 | self._msgs.add(msg) |
1861 | 170 | 163 | ||
1862 | 171 | def warn(self, msg): | 164 | def warn(self, msg): |
1865 | 172 | if self._status != 'CRITICAL': | 165 | if self._status != "CRITICAL": |
1866 | 173 | self._status = 'WARNING' | 166 | self._status = "WARNING" |
1867 | 174 | self._msgs.add(msg) | 167 | self._msgs.add(msg) |
1868 | 175 | 168 | ||
1869 | 176 | def ok(self, msg): | 169 | def ok(self, msg): |
1870 | 177 | self._msgs.add(msg) | 170 | self._msgs.add(msg) |
1871 | 178 | 171 | ||
1872 | 179 | def get_status(self): | 172 | def get_status(self): |
1874 | 180 | ''' | 173 | """ |
1875 | 181 | Render the current status, rasing nagios_plugin3 exceptions if things | 174 | Render the current status, rasing nagios_plugin3 exceptions if things |
1876 | 182 | are not OK | 175 | are not OK |
1880 | 183 | ''' | 176 | """ |
1881 | 184 | if self._status == 'OK': | 177 | if self._status == "OK": |
1882 | 185 | msg = '{}: no errors'.format(self._status) | 178 | msg = "{}: no errors".format(self._status) |
1883 | 186 | print(msg) | 179 | print(msg) |
1884 | 187 | else: | 180 | else: |
1888 | 188 | msg = '{}: {}'.format(self._status, | 181 | msg = "{}: {}".format(self._status, " | ".join(self._msgs)) |
1889 | 189 | ' | '.join(self._msgs)) | 182 | if self._status == "CRITICAL": |
1887 | 190 | if self._status == 'CRITICAL': | ||
1890 | 191 | raise CriticalError(msg) | 183 | raise CriticalError(msg) |
1892 | 192 | elif self._status == 'WARNING': | 184 | elif self._status == "WARNING": |
1893 | 193 | raise WarnError(msg) | 185 | raise WarnError(msg) |
1894 | 194 | else: | 186 | else: |
1895 | 195 | # this really shouldn't be happening | 187 | # this really shouldn't be happening |
1896 | @@ -200,10 +192,7 @@ class Status: | |||
1897 | 200 | 192 | ||
1898 | 201 | 193 | ||
1899 | 202 | def parse_args(argv=None): | 194 | def parse_args(argv=None): |
1904 | 203 | parser = HWCheckArgumentParser( | 195 | parser = HWCheckArgumentParser(prog="check_sas3ircu", def_input_file=INPUT_FILE) |
1901 | 204 | prog='check_sas3ircu', | ||
1902 | 205 | def_input_file=INPUT_FILE, | ||
1903 | 206 | ) | ||
1905 | 207 | return parser.parse_args(args=argv, namespace=ARGS) | 196 | return parser.parse_args(args=argv, namespace=ARGS) |
1906 | 208 | 197 | ||
1907 | 209 | 198 | ||
1908 | @@ -213,5 +202,5 @@ def main(argv=None): | |||
1909 | 213 | try_check(eval_status, data) | 202 | try_check(eval_status, data) |
1910 | 214 | 203 | ||
1911 | 215 | 204 | ||
1913 | 216 | if __name__ == '__main__': | 205 | if __name__ == "__main__": |
1914 | 217 | main(sys.argv[1:]) | 206 | main(sys.argv[1:]) |
1915 | diff --git a/src/files/ssacli/cron_ssacli.py b/src/files/ssacli/cron_ssacli.py | |||
1916 | index 94c7ef7..eadd711 100755 | |||
1917 | --- a/src/files/ssacli/cron_ssacli.py | |||
1918 | +++ b/src/files/ssacli/cron_ssacli.py | |||
1919 | @@ -25,7 +25,7 @@ try: | |||
1920 | 25 | except ImportError: | 25 | except ImportError: |
1921 | 26 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests | 26 | # shared lib will be under $CHARM_SOURCE_DIR/files/common during unit tests |
1922 | 27 | common_libs_dir = os.path.abspath( | 27 | common_libs_dir = os.path.abspath( |
1924 | 28 | os.path.join(os.path.dirname(__file__), '..', 'common') | 28 | os.path.join(os.path.dirname(__file__), "..", "common") |
1925 | 29 | ) | 29 | ) |
1926 | 30 | if common_libs_dir not in sys.path: | 30 | if common_libs_dir not in sys.path: |
1927 | 31 | sys.path.append(common_libs_dir) | 31 | sys.path.append(common_libs_dir) |
1928 | @@ -37,16 +37,14 @@ except ImportError: | |||
1929 | 37 | HPArgumentParser, | 37 | HPArgumentParser, |
1930 | 38 | ) | 38 | ) |
1931 | 39 | 39 | ||
1935 | 40 | SSACLI_BIN = '/opt/smartstorageadmin/ssacli/bin/ssacli' | 40 | SSACLI_BIN = "/opt/smartstorageadmin/ssacli/bin/ssacli" |
1936 | 41 | OUTPUT_FILE = '/var/lib/nagios/ssacli.out' | 41 | OUTPUT_FILE = "/var/lib/nagios/ssacli.out" |
1937 | 42 | EXCLUDE_FILE = '/etc/nagios/ssacli.exclude.yaml' | 42 | EXCLUDE_FILE = "/etc/nagios/ssacli.exclude.yaml" |
1938 | 43 | 43 | ||
1939 | 44 | 44 | ||
1940 | 45 | def parse_args(argv=None): | 45 | def parse_args(argv=None): |
1941 | 46 | parser = HPArgumentParser( | 46 | parser = HPArgumentParser( |
1945 | 47 | prog='cron_ssacli', | 47 | prog="cron_ssacli", def_write_file=OUTPUT_FILE, def_exclude_file=EXCLUDE_FILE |
1943 | 48 | def_write_file=OUTPUT_FILE, | ||
1944 | 49 | def_exclude_file=EXCLUDE_FILE | ||
1946 | 50 | ) | 48 | ) |
1947 | 51 | return parser.parse_args(args=argv) | 49 | return parser.parse_args(args=argv) |
1948 | 52 | 50 | ||
1949 | @@ -60,38 +58,46 @@ def check_array(slot): | |||
1950 | 60 | physicaldrive 1:1 (box 1:bay 1, 72 GB): OK | 58 | physicaldrive 1:1 (box 1:bay 1, 72 GB): OK |
1951 | 61 | physicaldrive 1:2 (box 1:bay 2, 72 GB): OK | 59 | physicaldrive 1:2 (box 1:bay 2, 72 GB): OK |
1952 | 62 | """ | 60 | """ |
1954 | 63 | if os.path.isfile('/etc/nagios/skip-cat-hp-array.txt'): | 61 | if os.path.isfile("/etc/nagios/skip-cat-hp-array.txt"): |
1955 | 64 | return | 62 | return |
1956 | 65 | 63 | ||
1957 | 66 | cmd = ( | 64 | cmd = ( |
1960 | 67 | '{ssacli} ctrl slot={slot} ld all show status; ' | 65 | "{ssacli} ctrl slot={slot} ld all show status; " |
1961 | 68 | '{ssacli} ctrl slot={slot} pd all show status'.format(ssacli=SSACLI_BIN, slot=slot) | 66 | "{ssacli} ctrl slot={slot} pd all show status".format( |
1962 | 67 | ssacli=SSACLI_BIN, slot=slot | ||
1963 | 68 | ) | ||
1964 | 69 | ) | 69 | ) |
1965 | 70 | try: | 70 | try: |
1967 | 71 | result = subprocess.check_output(cmd, shell=True).decode('UTF-8') | 71 | result = subprocess.check_output(cmd, shell=True).decode("UTF-8") |
1968 | 72 | return _parse_array_output(result) | 72 | return _parse_array_output(result) |
1969 | 73 | except subprocess.CalledProcessError as e: | 73 | except subprocess.CalledProcessError as e: |
1970 | 74 | return ( | 74 | return ( |
1974 | 75 | 'UNKNOWN Call to ssacli to show ld/pd info failed. ' | 75 | "UNKNOWN Call to ssacli to show ld/pd info failed. " |
1975 | 76 | 'Array Slot {} - Return Code {} - {}' | 76 | "Array Slot {} - Return Code {} - {}" |
1976 | 77 | ''.format(slot, e.returncode, e.output) | 77 | "".format(slot, e.returncode, e.output) |
1977 | 78 | ) | 78 | ) |
1978 | 79 | 79 | ||
1979 | 80 | 80 | ||
1980 | 81 | def _parse_array_output(output): | 81 | def _parse_array_output(output): |
1985 | 82 | innocuous_errors = re.compile(r'^Error: The specified (device|controller) ' | 82 | innocuous_errors = re.compile( |
1986 | 83 | 'does not have any (logical|physical)') | 83 | r"^Error: The specified (device|controller) " |
1987 | 84 | drive_status_line = re.compile(r'^\s*(logicaldrive|physicaldrive)') | 84 | "does not have any (logical|physical)" |
1988 | 85 | ignore_file = '/etc/nagios/ignores/ignores-cat-hp-array.txt' | 85 | ) |
1989 | 86 | drive_status_line = re.compile(r"^\s*(logicaldrive|physicaldrive)") | ||
1990 | 87 | ignore_file = "/etc/nagios/ignores/ignores-cat-hp-array.txt" | ||
1991 | 86 | ignores = read_ignore_file(ignore_file) | 88 | ignores = read_ignore_file(ignore_file) |
1992 | 87 | 89 | ||
1993 | 88 | for line in output.splitlines(): | 90 | for line in output.splitlines(): |
1994 | 89 | line = line.strip() | 91 | line = line.strip() |
1996 | 90 | if not line or innocuous_errors.search(line) or not drive_status_line.search(line): | 92 | if ( |
1997 | 93 | not line | ||
1998 | 94 | or innocuous_errors.search(line) | ||
1999 | 95 | or not drive_status_line.search(line) | ||
2000 | 96 | ): | ||
2001 | 91 | continue | 97 | continue |
2002 | 92 | (drivetype, number) = line.split()[:2] | 98 | (drivetype, number) = line.split()[:2] |
2005 | 93 | status = line.split('):')[1].lstrip().upper() | 99 | status = line.split("):")[1].lstrip().upper() |
2006 | 94 | if status != 'OK': | 100 | if status != "OK": |
2007 | 95 | err = '{} {} is "{}"'.format(drivetype, number, status) | 101 | err = '{} {} is "{}"'.format(drivetype, number, status) |
2008 | 96 | if not ignore(err, ignores): | 102 | if not ignore(err, ignores): |
2009 | 97 | return err | 103 | return err |
2010 | @@ -107,24 +113,24 @@ def check_controller(slot): | |||
2011 | 107 | Cache Status: OK | 113 | Cache Status: OK |
2012 | 108 | Battery Status: Failed (Replace Batteries) | 114 | Battery Status: Failed (Replace Batteries) |
2013 | 109 | """ | 115 | """ |
2015 | 110 | if os.path.isfile('/etc/nagios/skip-cat-hp-controller.txt'): | 116 | if os.path.isfile("/etc/nagios/skip-cat-hp-controller.txt"): |
2016 | 111 | return | 117 | return |
2017 | 112 | 118 | ||
2019 | 113 | cmd = '{ssacli} ctrl slot={slot} show status'.format(ssacli=SSACLI_BIN, slot=slot) | 119 | cmd = "{ssacli} ctrl slot={slot} show status".format(ssacli=SSACLI_BIN, slot=slot) |
2020 | 114 | try: | 120 | try: |
2022 | 115 | result = subprocess.check_output(cmd, shell=True).decode('UTF-8') | 121 | result = subprocess.check_output(cmd, shell=True).decode("UTF-8") |
2023 | 116 | return _parse_controller_output(result) | 122 | return _parse_controller_output(result) |
2024 | 117 | except subprocess.CalledProcessError as e: | 123 | except subprocess.CalledProcessError as e: |
2025 | 118 | return ( | 124 | return ( |
2029 | 119 | 'UNKNOWN Call to ssacli to show ld/pd info failed. ' | 125 | "UNKNOWN Call to ssacli to show ld/pd info failed. " |
2030 | 120 | 'Array Slot {} - Return Code {} - {}' | 126 | "Array Slot {} - Return Code {} - {}" |
2031 | 121 | ''.format(slot, e.returncode, e.output) | 127 | "".format(slot, e.returncode, e.output) |
2032 | 122 | ) | 128 | ) |
2033 | 123 | 129 | ||
2034 | 124 | 130 | ||
2035 | 125 | def _parse_controller_output(output): | 131 | def _parse_controller_output(output): |
2036 | 126 | controller = "Unknown" | 132 | controller = "Unknown" |
2038 | 127 | ignore_file = '/etc/nagios/ignores/ignores-cat-hp-controller.txt' | 133 | ignore_file = "/etc/nagios/ignores/ignores-cat-hp-controller.txt" |
2039 | 128 | ignores = read_ignore_file(ignore_file) | 134 | ignores = read_ignore_file(ignore_file) |
2040 | 129 | for line in output.splitlines(): | 135 | for line in output.splitlines(): |
2041 | 130 | line = line.strip() | 136 | line = line.strip() |
2042 | @@ -151,7 +157,7 @@ def main(): | |||
2043 | 151 | 157 | ||
2044 | 152 | slots = get_hp_controller_slots() | 158 | slots = get_hp_controller_slots() |
2045 | 153 | if not slots: | 159 | if not slots: |
2047 | 154 | msg = 'OK: no controller/array found to check' | 160 | msg = "OK: no controller/array found to check" |
2048 | 155 | exit = 0 | 161 | exit = 0 |
2049 | 156 | 162 | ||
2050 | 157 | errors = [] | 163 | errors = [] |
2051 | @@ -160,19 +166,19 @@ def main(): | |||
2052 | 160 | errors += check_array(slot) | 166 | errors += check_array(slot) |
2053 | 161 | 167 | ||
2054 | 162 | if len(errors) > 0: | 168 | if len(errors) > 0: |
2056 | 163 | msg = 'CRIT {} error(s): {}'.format(len(errors), ' - '.join(errors)) | 169 | msg = "CRIT {} error(s): {}".format(len(errors), " - ".join(errors)) |
2057 | 164 | exit = 2 | 170 | exit = 2 |
2058 | 165 | else: | 171 | else: |
2060 | 166 | msg = 'OK No errors found' | 172 | msg = "OK No errors found" |
2061 | 167 | exit = 0 | 173 | exit = 0 |
2062 | 168 | 174 | ||
2063 | 169 | if ARGS.write: | 175 | if ARGS.write: |
2065 | 170 | with open(ARGS.write, 'w') as f: | 176 | with open(ARGS.write, "w") as f: |
2066 | 171 | f.write(msg) | 177 | f.write(msg) |
2067 | 172 | else: | 178 | else: |
2068 | 173 | print(msg) | 179 | print(msg) |
2069 | 174 | sys.exit(exit) | 180 | sys.exit(exit) |
2070 | 175 | 181 | ||
2071 | 176 | 182 | ||
2073 | 177 | if __name__ == '__main__': | 183 | if __name__ == "__main__": |
2074 | 178 | main() | 184 | main() |
2075 | diff --git a/src/lib/hwhealth/discovery/lshw.py b/src/lib/hwhealth/discovery/lshw.py | |||
2076 | index c2f6653..1e52304 100644 | |||
2077 | --- a/src/lib/hwhealth/discovery/lshw.py | |||
2078 | +++ b/src/lib/hwhealth/discovery/lshw.py | |||
2079 | @@ -7,48 +7,45 @@ from charmhelpers.core import hookenv | |||
2080 | 7 | 7 | ||
2081 | 8 | 8 | ||
2082 | 9 | class Hardware(object): | 9 | class Hardware(object): |
2084 | 10 | def __init__(self, filename='/var/run/hw_health_lshw.json'): | 10 | def __init__(self, filename="/var/run/hw_health_lshw.json"): |
2085 | 11 | self.__filename = filename | 11 | self.__filename = filename |
2086 | 12 | self._lshw = self.__load_hwinfo() | 12 | self._lshw = self.__load_hwinfo() |
2087 | 13 | 13 | ||
2088 | 14 | def __load_hwinfo(self): | 14 | def __load_hwinfo(self): |
2089 | 15 | try: | 15 | try: |
2090 | 16 | if os.path.exists(self.__filename): | 16 | if os.path.exists(self.__filename): |
2092 | 17 | with open(self.__filename, 'r') as fd: | 17 | with open(self.__filename, "r") as fd: |
2093 | 18 | hwinfo = json.load(fd) | 18 | hwinfo = json.load(fd) |
2094 | 19 | else: | 19 | else: |
2096 | 20 | output = subprocess.check_output(['lshw', '-json']) | 20 | output = subprocess.check_output(["lshw", "-json"]) |
2097 | 21 | # Note(aluria): py35 does not support extra args on | 21 | # Note(aluria): py35 does not support extra args on |
2098 | 22 | # subprocess.check_output | 22 | # subprocess.check_output |
2100 | 23 | output_str = output.decode(errors='ignore') | 23 | output_str = output.decode(errors="ignore") |
2101 | 24 | hwinfo = json.loads(output_str) | 24 | hwinfo = json.loads(output_str) |
2103 | 25 | with open(self.__filename, 'w') as fd: | 25 | with open(self.__filename, "w") as fd: |
2104 | 26 | fd.write(output_str) | 26 | fd.write(output_str) |
2105 | 27 | 27 | ||
2106 | 28 | return hwinfo | 28 | return hwinfo |
2107 | 29 | except PermissionError as error: | 29 | except PermissionError as error: |
2110 | 30 | hookenv.log('lshw io error: {}'.format(error), | 30 | hookenv.log("lshw io error: {}".format(error), hookenv.ERROR) |
2109 | 31 | hookenv.ERROR) | ||
2111 | 32 | return {} | 31 | return {} |
2112 | 33 | except subprocess.CalledProcessError as error: | 32 | except subprocess.CalledProcessError as error: |
2115 | 34 | hookenv.log('lshw subprocess error: {}'.format(error), | 33 | hookenv.log("lshw subprocess error: {}".format(error), hookenv.ERROR) |
2114 | 35 | hookenv.ERROR) | ||
2116 | 36 | return {} | 34 | return {} |
2117 | 37 | except json.JSONDecodeError as error: | 35 | except json.JSONDecodeError as error: |
2120 | 38 | hookenv.log('lshw json error: {}'.format(error), | 36 | hookenv.log("lshw json error: {}".format(error), hookenv.ERROR) |
2119 | 39 | hookenv.ERROR) | ||
2121 | 40 | return {} | 37 | return {} |
2122 | 41 | 38 | ||
2123 | 42 | @property | 39 | @property |
2124 | 43 | def get_system(self): | 40 | def get_system(self): |
2125 | 44 | """Helper to get vendor info retrieved via actions | 41 | """Helper to get vendor info retrieved via actions |
2126 | 45 | """ | 42 | """ |
2128 | 46 | keys = 'id description vendor product version serial'.split() | 43 | keys = "id description vendor product version serial".split() |
2129 | 47 | sysinfo = {} | 44 | sysinfo = {} |
2130 | 48 | for k in keys: | 45 | for k in keys: |
2131 | 49 | v = self._lshw.get(k) | 46 | v = self._lshw.get(k) |
2134 | 50 | if k == 'id': | 47 | if k == "id": |
2135 | 51 | k = 'hostname' | 48 | k = "hostname" |
2136 | 52 | sysinfo.update({k: v}) | 49 | sysinfo.update({k: v}) |
2137 | 53 | return sysinfo | 50 | return sysinfo |
2138 | 54 | 51 | ||
2139 | @@ -56,22 +53,20 @@ class Hardware(object): | |||
2140 | 56 | def get_motherboard(self): | 53 | def get_motherboard(self): |
2141 | 57 | """Helper to get vendor info retrieved via actions | 54 | """Helper to get vendor info retrieved via actions |
2142 | 58 | """ | 55 | """ |
2144 | 59 | keys = 'description vendor product version serial'.split() | 56 | keys = "description vendor product version serial".split() |
2145 | 60 | buses = [] | 57 | buses = [] |
2148 | 61 | for child in self._lshw.get('children', [{}]): | 58 | for child in self._lshw.get("children", [{}]): |
2149 | 62 | if child.get('class') != 'bus': | 59 | if child.get("class") != "bus": |
2150 | 63 | continue | 60 | continue |
2151 | 64 | buses.append(dict([(k, child.get(k)) for k in keys])) | 61 | buses.append(dict([(k, child.get(k)) for k in keys])) |
2152 | 65 | return buses | 62 | return buses |
2153 | 66 | 63 | ||
2155 | 67 | def _get_inspect_bridges(self, bridge_item, bridge_class='storage'): | 64 | def _get_inspect_bridges(self, bridge_item, bridge_class="storage"): |
2156 | 68 | bridge_class_items = [] | 65 | bridge_class_items = [] |
2163 | 69 | for item in bridge_item.get('children', [{}]): | 66 | for item in bridge_item.get("children", [{}]): |
2164 | 70 | if item.get('class', '') == 'bridge': | 67 | if item.get("class", "") == "bridge": |
2165 | 71 | bridge_class_items.extend( | 68 | bridge_class_items.extend(self._get_inspect_bridges(item, bridge_class)) |
2166 | 72 | self._get_inspect_bridges(item, bridge_class) | 69 | elif item.get("class", "") == bridge_class: |
2161 | 73 | ) | ||
2162 | 74 | elif item.get('class', '') == bridge_class: | ||
2167 | 75 | bridge_class_items.append(item) | 70 | bridge_class_items.append(item) |
2168 | 76 | return bridge_class_items | 71 | return bridge_class_items |
2169 | 77 | 72 | ||
2170 | @@ -85,18 +80,16 @@ class Hardware(object): | |||
2171 | 85 | """ | 80 | """ |
2172 | 86 | storage = [] | 81 | storage = [] |
2173 | 87 | # system -> bus -> bridge -> storage | 82 | # system -> bus -> bridge -> storage |
2176 | 88 | for bus in self._lshw.get('children', [{}]): | 83 | for bus in self._lshw.get("children", [{}]): |
2177 | 89 | if bus.get('class', '') != 'bus': | 84 | if bus.get("class", "") != "bus": |
2178 | 90 | continue | 85 | continue |
2181 | 91 | for bridge in bus.get('children', [{}]): | 86 | for bridge in bus.get("children", [{}]): |
2182 | 92 | if bridge.get('class', '') != 'bridge': | 87 | if bridge.get("class", "") != "bridge": |
2183 | 93 | continue | 88 | continue |
2190 | 94 | for item in bridge.get('children', [{}]): | 89 | for item in bridge.get("children", [{}]): |
2191 | 95 | if item.get('class', '') == 'bridge': | 90 | if item.get("class", "") == "bridge": |
2192 | 96 | storage.extend( | 91 | storage.extend(self._get_inspect_bridges(item, "storage")) |
2193 | 97 | self._get_inspect_bridges(item, 'storage') | 92 | elif item.get("class", "") == "storage": |
2188 | 98 | ) | ||
2189 | 99 | elif item.get('class', '') == 'storage': | ||
2194 | 100 | storage.append(item) | 93 | storage.append(item) |
2195 | 101 | return storage | 94 | return storage |
2196 | 102 | 95 | ||
2197 | @@ -108,15 +101,15 @@ class Hardware(object): | |||
2198 | 108 | businfo and linux driver The aim of the function is to easily parse | 101 | businfo and linux driver The aim of the function is to easily parse |
2199 | 109 | products to detect which tool(s) need to be used. | 102 | products to detect which tool(s) need to be used. |
2200 | 110 | """ | 103 | """ |
2203 | 111 | keys = 'vendor product businfo'.split() | 104 | keys = "vendor product businfo".split() |
2204 | 112 | config_keys = ['driver'] | 105 | config_keys = ["driver"] |
2205 | 113 | storage = [] | 106 | storage = [] |
2206 | 114 | for item in self._get_storage_class: | 107 | for item in self._get_storage_class: |
2207 | 115 | storage_item = dict([(k, item.get(k)) for k in keys]) | 108 | storage_item = dict([(k, item.get(k)) for k in keys]) |
2208 | 116 | storage_item.update( | 109 | storage_item.update( |
2212 | 117 | dict([(k, item.get('configuration', {}).get(k)) | 110 | dict([(k, item.get("configuration", {}).get(k)) for k in config_keys]) |
2213 | 118 | for k in config_keys])) | 111 | ) |
2214 | 119 | storage_item.update({'has_children': 'children' in item}) | 112 | storage_item.update({"has_children": "children" in item}) |
2215 | 120 | storage.append(storage_item) | 113 | storage.append(storage_item) |
2216 | 121 | 114 | ||
2217 | 122 | return storage | 115 | return storage |
2218 | @@ -133,14 +126,14 @@ class Hardware(object): | |||
2219 | 133 | The aim of the function is to easily parse products to detect which | 126 | The aim of the function is to easily parse products to detect which |
2220 | 134 | tool(s) need to be used. | 127 | tool(s) need to be used. |
2221 | 135 | """ | 128 | """ |
2223 | 136 | keys = 'product serial businfo physid dev size logicalname'.split() | 129 | keys = "product serial businfo physid dev size logicalname".split() |
2224 | 137 | disks = [] | 130 | disks = [] |
2225 | 138 | for item in self._get_storage_class: | 131 | for item in self._get_storage_class: |
2228 | 139 | for child in item.get('children', [{}]): | 132 | for child in item.get("children", [{}]): |
2229 | 140 | if child.get('class', '') != 'disk': | 133 | if child.get("class", "") != "disk": |
2230 | 141 | continue | 134 | continue |
2231 | 142 | disk = dict([(k, child.get(k)) for k in keys]) | 135 | disk = dict([(k, child.get(k)) for k in keys]) |
2233 | 143 | disk.update({'storage_parent': item.get('product')}) | 136 | disk.update({"storage_parent": item.get("product")}) |
2234 | 144 | disks.append(disk) | 137 | disks.append(disk) |
2235 | 145 | return disks | 138 | return disks |
2236 | 146 | 139 | ||
2237 | @@ -153,28 +146,28 @@ class Hardware(object): | |||
2238 | 153 | The aim of the function is to easily parse products to detect which | 146 | The aim of the function is to easily parse products to detect which |
2239 | 154 | tool(s) need to be used. | 147 | tool(s) need to be used. |
2240 | 155 | """ | 148 | """ |
2243 | 156 | keys = 'vendor product businfo logicalname serial'.split() | 149 | keys = "vendor product businfo logicalname serial".split() |
2244 | 157 | config_keys = 'driver driverversion firmware speed'.split() | 150 | config_keys = "driver driverversion firmware speed".split() |
2245 | 158 | nics = [] | 151 | nics = [] |
2246 | 159 | # system -> bus -> bridge -> network | 152 | # system -> bus -> bridge -> network |
2249 | 160 | for bus in self._lshw.get('children', [{}]): | 153 | for bus in self._lshw.get("children", [{}]): |
2250 | 161 | if bus.get('class', '') != 'bus': | 154 | if bus.get("class", "") != "bus": |
2251 | 162 | continue | 155 | continue |
2254 | 163 | for bridge in bus.get('children', [{}]): | 156 | for bridge in bus.get("children", [{}]): |
2255 | 164 | if bridge.get('class', '') != 'bridge': | 157 | if bridge.get("class", "") != "bridge": |
2256 | 165 | continue | 158 | continue |
2261 | 166 | for item in bridge.get('children', [{}]): | 159 | for item in bridge.get("children", [{}]): |
2262 | 167 | if item.get('class', '') == 'bridge': | 160 | if item.get("class", "") == "bridge": |
2263 | 168 | nics.extend(self._get_inspect_bridges(item, 'network')) | 161 | nics.extend(self._get_inspect_bridges(item, "network")) |
2264 | 169 | elif item.get('class', '') == 'network': | 162 | elif item.get("class", "") == "network": |
2265 | 170 | nics.append(item) | 163 | nics.append(item) |
2266 | 171 | 164 | ||
2267 | 172 | nics_filtered = [] | 165 | nics_filtered = [] |
2268 | 173 | for nic in nics: | 166 | for nic in nics: |
2269 | 174 | nic_item = dict([(k, nic.get(k)) for k in keys]) | 167 | nic_item = dict([(k, nic.get(k)) for k in keys]) |
2270 | 175 | nic_item.update( | 168 | nic_item.update( |
2273 | 176 | dict([(k, nic.get('configuration', {}).get(k)) | 169 | dict([(k, nic.get("configuration", {}).get(k)) for k in config_keys]) |
2274 | 177 | for k in config_keys])) | 170 | ) |
2275 | 178 | nics_filtered.append(nic_item) | 171 | nics_filtered.append(nic_item) |
2276 | 179 | return nics_filtered | 172 | return nics_filtered |
2277 | 180 | 173 | ||
2278 | @@ -182,60 +175,67 @@ class Hardware(object): | |||
2279 | 182 | def formatted_system_info(self): | 175 | def formatted_system_info(self): |
2280 | 183 | ctxt = self.get_system | 176 | ctxt = self.get_system |
2281 | 184 | return ( | 177 | return ( |
2284 | 185 | '{description}: vendor[{vendor}], product_name[{product}], ' | 178 | "{description}: vendor[{vendor}], product_name[{product}], " |
2285 | 186 | 'version[{version}], serial[{serial}], hostname[{hostname}]' | 179 | "version[{version}], serial[{serial}], hostname[{hostname}]" |
2286 | 187 | ).format(**ctxt) | 180 | ).format(**ctxt) |
2287 | 188 | 181 | ||
2288 | 189 | @property | 182 | @property |
2289 | 190 | def formatted_motherboard_info(self): | 183 | def formatted_motherboard_info(self): |
2294 | 191 | return '\n'.join([ | 184 | return "\n".join( |
2295 | 192 | '{description}: vendor[{vendor}], product_name[{product}], ' | 185 | [ |
2296 | 193 | 'version[{version}], serial[{serial}]'.format(**ctxt) | 186 | "{description}: vendor[{vendor}], product_name[{product}], " |
2297 | 194 | for ctxt in self.get_motherboard]) | 187 | "version[{version}], serial[{serial}]".format(**ctxt) |
2298 | 188 | for ctxt in self.get_motherboard | ||
2299 | 189 | ] | ||
2300 | 190 | ) | ||
2301 | 195 | 191 | ||
2302 | 196 | @property | 192 | @property |
2303 | 197 | def formatted_storage_class_info(self): | 193 | def formatted_storage_class_info(self): |
2306 | 198 | LINE = ('driver[{driver}], businfo[{businfo}], ' | 194 | LINE = "driver[{driver}], businfo[{businfo}], has_children[{has_children}]" |
2305 | 199 | 'has_children[{has_children}]') | ||
2307 | 200 | ctxts = [] | 195 | ctxts = [] |
2308 | 201 | for ctxt in self.get_storage_class_info: | 196 | for ctxt in self.get_storage_class_info: |
2312 | 202 | if ctxt.get('vendor') and ctxt.get('product'): | 197 | if ctxt.get("vendor") and ctxt.get("product"): |
2313 | 203 | tmpl = ('Storage class: vendor[{vendor}],' | 198 | tmpl = ( |
2314 | 204 | 'product_name[{product}], ') + LINE | 199 | "Storage class: vendor[{vendor}],product_name[{product}], " |
2315 | 200 | ) + LINE | ||
2316 | 205 | else: | 201 | else: |
2318 | 206 | tmpl = 'Storage class: {}'.format(LINE) | 202 | tmpl = "Storage class: {}".format(LINE) |
2319 | 207 | ctxts.append(tmpl.format(**ctxt)) | 203 | ctxts.append(tmpl.format(**ctxt)) |
2320 | 208 | 204 | ||
2321 | 209 | if ctxts: | 205 | if ctxts: |
2323 | 210 | return '\n'.join(ctxts) | 206 | return "\n".join(ctxts) |
2324 | 211 | 207 | ||
2325 | 212 | @property | 208 | @property |
2326 | 213 | def formatted_disk_class_info(self): | 209 | def formatted_disk_class_info(self): |
2333 | 214 | return '\n'.join([ | 210 | return "\n".join( |
2334 | 215 | 'Disk class: ld[{logicalname}], dev[{dev}], physid[{physid}], ' | 211 | [ |
2335 | 216 | 'businfo[{businfo}], product_name[{product}], serial[{serial}], ' | 212 | "Disk class: ld[{logicalname}], dev[{dev}], physid[{physid}], " |
2336 | 217 | 'size[{size}], storage_parent[{storage_parent}]'.format(**ctxt) | 213 | "businfo[{businfo}], product_name[{product}], serial[{serial}], " |
2337 | 218 | for ctxt in self.get_disk_class_info | 214 | "size[{size}], storage_parent[{storage_parent}]".format(**ctxt) |
2338 | 219 | ]) | 215 | for ctxt in self.get_disk_class_info |
2339 | 216 | ] | ||
2340 | 217 | ) | ||
2341 | 220 | 218 | ||
2342 | 221 | @property | 219 | @property |
2343 | 222 | def formatted_network_class_info(self): | 220 | def formatted_network_class_info(self): |
2354 | 223 | return '\n'.join([ | 221 | return "\n".join( |
2355 | 224 | 'NIC: iface[{logicalname}], businfo[{businfo}], vendor[{vendor}]' | 222 | [ |
2356 | 225 | ', product_name[{product}], firmware[{firmware}], driver[{driver}' | 223 | "NIC: iface[{logicalname}], businfo[{businfo}], vendor[{vendor}]" |
2357 | 226 | ', {driverversion}], serial[{serial}]' | 224 | ", product_name[{product}], firmware[{firmware}], driver[{driver}" |
2358 | 227 | ', speed[{speed}]'.format(**ctxt) | 225 | ", {driverversion}], serial[{serial}]" |
2359 | 228 | for ctxt in self.get_network_class_info | 226 | ", speed[{speed}]".format(**ctxt) |
2360 | 229 | ]) | 227 | for ctxt in self.get_network_class_info |
2361 | 230 | 228 | ] | |
2362 | 231 | 229 | ) | |
2363 | 232 | if __name__ == '__main__': | 230 | |
2364 | 231 | |||
2365 | 232 | if __name__ == "__main__": | ||
2366 | 233 | hw = Hardware() | 233 | hw = Hardware() |
2367 | 234 | print(hw.formatted_system_info) | 234 | print(hw.formatted_system_info) |
2368 | 235 | print(hw.formatted_motherboard_info) | 235 | print(hw.formatted_motherboard_info) |
2370 | 236 | print('\n== get_storage_classes') | 236 | print("\n== get_storage_classes") |
2371 | 237 | print(hw.formatted_storage_class_info) | 237 | print(hw.formatted_storage_class_info) |
2373 | 238 | print('== get_disk_classes') | 238 | print("== get_disk_classes") |
2374 | 239 | print(hw.formatted_disk_class_info) | 239 | print(hw.formatted_disk_class_info) |
2376 | 240 | print('\n== get_network_class_info') | 240 | print("\n== get_network_class_info") |
2377 | 241 | print(hw.formatted_network_class_info) | 241 | print(hw.formatted_network_class_info) |
2378 | diff --git a/src/lib/hwhealth/discovery/supported_vendors.py b/src/lib/hwhealth/discovery/supported_vendors.py | |||
2379 | index ef37a5a..459f078 100644 | |||
2380 | --- a/src/lib/hwhealth/discovery/supported_vendors.py | |||
2381 | +++ b/src/lib/hwhealth/discovery/supported_vendors.py | |||
2382 | @@ -2,37 +2,32 @@ | |||
2383 | 2 | from hwhealth import tools | 2 | from hwhealth import tools |
2384 | 3 | 3 | ||
2385 | 4 | SUPPORTED_STORAGE = { | 4 | SUPPORTED_STORAGE = { |
2390 | 5 | 'LSI Logic / Symbios Logic': { | 5 | "LSI Logic / Symbios Logic": { |
2391 | 6 | 'SAS2308 PCI-Express Fusion-MPT SAS-2': tools.Sas2Ircu, | 6 | "SAS2308 PCI-Express Fusion-MPT SAS-2": tools.Sas2Ircu, |
2392 | 7 | 'SAS3008 PCI-Express Fusion-MPT SAS-3': tools.Sas3Ircu, | 7 | "SAS3008 PCI-Express Fusion-MPT SAS-3": tools.Sas3Ircu, |
2393 | 8 | 'MegaRAID SAS-3 3108 [Invader]': tools.MegaCLI, | 8 | "MegaRAID SAS-3 3108 [Invader]": tools.MegaCLI, |
2394 | 9 | }, | 9 | }, |
2395 | 10 | # 'Mellanox Technologies': { | 10 | # 'Mellanox Technologies': { |
2396 | 11 | # 'MT27710 Family [ConnectX-4 Lx]': lambda: 'mlxconfig', | 11 | # 'MT27710 Family [ConnectX-4 Lx]': lambda: 'mlxconfig', |
2397 | 12 | # 'MT27700 Family [ConnectX-4]': lambda: 'mlxconfig', | 12 | # 'MT27700 Family [ConnectX-4]': lambda: 'mlxconfig', |
2398 | 13 | # }, | 13 | # }, |
2401 | 14 | 'Intel Corporation': { | 14 | "Intel Corporation": {"PCIe Data Center SSD": tools.Nvme}, |
2402 | 15 | 'PCIe Data Center SSD': tools.Nvme, | 15 | "Samsung Electronics Co Ltd": { |
2403 | 16 | "NVMe SSD Controller SM961/PM961": tools.Nvme, | ||
2404 | 17 | "NVMe SSD Controller 172Xa/172Xb": tools.Nvme, | ||
2405 | 16 | }, | 18 | }, |
2413 | 17 | 'Samsung Electronics Co Ltd': { | 19 | "Hewlett-Packard Company": { |
2414 | 18 | 'NVMe SSD Controller SM961/PM961': tools.Nvme, | 20 | "Smart Array Gen9 Controllers": tools.SsaCli, |
2415 | 19 | 'NVMe SSD Controller 172Xa/172Xb': tools.Nvme, | 21 | "Smart Storage PQI 12G SAS/PCIe 3": tools.ILOrest, |
2409 | 20 | }, | ||
2410 | 21 | 'Hewlett-Packard Company': { | ||
2411 | 22 | 'Smart Array Gen9 Controllers': tools.SsaCli, | ||
2412 | 23 | 'Smart Storage PQI 12G SAS/PCIe 3': tools.ILOrest, | ||
2416 | 24 | }, | 22 | }, |
2417 | 25 | } | 23 | } |
2418 | 26 | 24 | ||
2419 | 27 | SUPPORTED_SYSTEMS = { | 25 | SUPPORTED_SYSTEMS = { |
2421 | 28 | 'HPE': { | 26 | "HPE": { |
2422 | 29 | # tools.HpLog, # not sure if this works on gen10+ | 27 | # tools.HpLog, # not sure if this works on gen10+ |
2423 | 30 | tools.ILOrest, | 28 | tools.ILOrest, |
2424 | 31 | }, | 29 | }, |
2429 | 32 | 'HP': { | 30 | "HP": {tools.HpLog, tools.SsaCli}, |
2426 | 33 | tools.HpLog, | ||
2427 | 34 | tools.SsaCli, | ||
2428 | 35 | } | ||
2430 | 36 | } | 31 | } |
2431 | 37 | 32 | ||
2432 | 38 | SUPPORTED_DRIVERS = { | 33 | SUPPORTED_DRIVERS = { |
2433 | @@ -41,5 +36,5 @@ SUPPORTED_DRIVERS = { | |||
2434 | 41 | # 'apt': 'hioa', | 36 | # 'apt': 'hioa', |
2435 | 42 | # 'tool': lambda: 'hio_info', | 37 | # 'tool': lambda: 'hio_info', |
2436 | 43 | # }, | 38 | # }, |
2438 | 44 | 'nvme': tools.Nvme, | 39 | "nvme": tools.Nvme, |
2439 | 45 | } | 40 | } |
2440 | diff --git a/src/lib/hwhealth/hwdiscovery.py b/src/lib/hwhealth/hwdiscovery.py | |||
2441 | index c23ee13..fe27403 100644 | |||
2442 | --- a/src/lib/hwhealth/hwdiscovery.py | |||
2443 | +++ b/src/lib/hwhealth/hwdiscovery.py | |||
2444 | @@ -10,23 +10,23 @@ from hwhealth.discovery.lshw import Hardware | |||
2445 | 10 | from hwhealth.discovery.supported_vendors import ( | 10 | from hwhealth.discovery.supported_vendors import ( |
2446 | 11 | SUPPORTED_STORAGE, | 11 | SUPPORTED_STORAGE, |
2447 | 12 | SUPPORTED_DRIVERS, | 12 | SUPPORTED_DRIVERS, |
2449 | 13 | SUPPORTED_SYSTEMS | 13 | SUPPORTED_SYSTEMS, |
2450 | 14 | ) | 14 | ) |
2451 | 15 | 15 | ||
2452 | 16 | from charmhelpers.core import hookenv | 16 | from charmhelpers.core import hookenv |
2453 | 17 | 17 | ||
2454 | 18 | 18 | ||
2456 | 19 | def get_tools(manufacturer='auto'): | 19 | def get_tools(manufacturer="auto"): |
2457 | 20 | """Return list of tool classes relevent for the current hardware. | 20 | """Return list of tool classes relevent for the current hardware. |
2458 | 21 | 21 | ||
2459 | 22 | In testing, we set manufacturer = test in order to test all tools classes. | 22 | In testing, we set manufacturer = test in order to test all tools classes. |
2460 | 23 | Filtering added for known bad tools that don't work on all series combinations. | 23 | Filtering added for known bad tools that don't work on all series combinations. |
2461 | 24 | """ | 24 | """ |
2463 | 25 | if manufacturer == 'test': | 25 | if manufacturer == "test": |
2464 | 26 | # Return all possible tools to aid testing | 26 | # Return all possible tools to aid testing |
2468 | 27 | storage_tools = {tool | 27 | storage_tools = { |
2469 | 28 | for vendor in SUPPORTED_STORAGE.values() | 28 | tool for vendor in SUPPORTED_STORAGE.values() for tool in vendor.values() |
2470 | 29 | for tool in vendor.values()} | 29 | } |
2471 | 30 | # Some system vendors have multiple tools, have to iterate sets | 30 | # Some system vendors have multiple tools, have to iterate sets |
2472 | 31 | system_tools = set(chain.from_iterable(SUPPORTED_SYSTEMS.values())) | 31 | system_tools = set(chain.from_iterable(SUPPORTED_SYSTEMS.values())) |
2473 | 32 | driver_tools = set(SUPPORTED_DRIVERS.values()) | 32 | driver_tools = set(SUPPORTED_DRIVERS.values()) |
2474 | @@ -35,7 +35,7 @@ def get_tools(manufacturer='auto'): | |||
2475 | 35 | tool for tool in all_tools if tool.is_series_supported() | 35 | tool for tool in all_tools if tool.is_series_supported() |
2476 | 36 | ) | 36 | ) |
2477 | 37 | return series_filtered_tools | 37 | return series_filtered_tools |
2479 | 38 | elif manufacturer == 'auto': | 38 | elif manufacturer == "auto": |
2480 | 39 | return _get_tools() | 39 | return _get_tools() |
2481 | 40 | else: | 40 | else: |
2482 | 41 | raise NotImplementedError | 41 | raise NotImplementedError |
2483 | @@ -46,41 +46,43 @@ def _get_tools(): | |||
2484 | 46 | hwinfo = Hardware() | 46 | hwinfo = Hardware() |
2485 | 47 | toolset = set() | 47 | toolset = set() |
2486 | 48 | for storage in hwinfo.get_storage_class_info: | 48 | for storage in hwinfo.get_storage_class_info: |
2489 | 49 | vendor = storage.get('vendor') | 49 | vendor = storage.get("vendor") |
2490 | 50 | product = storage.get('product') | 50 | product = storage.get("product") |
2491 | 51 | tool = SUPPORTED_STORAGE.get(vendor, {}).get(product) | 51 | tool = SUPPORTED_STORAGE.get(vendor, {}).get(product) |
2492 | 52 | if isinstance(tool, list) or isinstance(tool, set): | 52 | if isinstance(tool, list) or isinstance(tool, set): |
2493 | 53 | toolset.update(tool) | 53 | toolset.update(tool) |
2494 | 54 | elif tool: | 54 | elif tool: |
2495 | 55 | toolset.add(tool) | 55 | toolset.add(tool) |
2496 | 56 | else: | 56 | else: |
2499 | 57 | hookenv.log('Product not supported: [{}][{}]' | 57 | hookenv.log( |
2500 | 58 | ''.format(vendor, product), hookenv.DEBUG) | 58 | "Product not supported: [{}][{}]".format(vendor, product), |
2501 | 59 | hookenv.DEBUG, | ||
2502 | 60 | ) | ||
2503 | 59 | 61 | ||
2505 | 60 | driver = storage.get('driver') | 62 | driver = storage.get("driver") |
2506 | 61 | if driver: | 63 | if driver: |
2507 | 62 | if driver in SUPPORTED_DRIVERS: | 64 | if driver in SUPPORTED_DRIVERS: |
2508 | 63 | toolset.add(SUPPORTED_DRIVERS[driver]) | 65 | toolset.add(SUPPORTED_DRIVERS[driver]) |
2509 | 64 | continue | 66 | continue |
2512 | 65 | hookenv.log('Driver not supported: {}'.format(driver), | 67 | hookenv.log("Driver not supported: {}".format(driver), hookenv.DEBUG) |
2511 | 66 | hookenv.DEBUG) | ||
2513 | 67 | 68 | ||
2514 | 68 | # SW RAID? | 69 | # SW RAID? |
2515 | 69 | if _supports_mdadm(): | 70 | if _supports_mdadm(): |
2516 | 70 | toolset.add(tools.Mdadm) | 71 | toolset.add(tools.Mdadm) |
2517 | 71 | 72 | ||
2519 | 72 | if hookenv.config('enable_ipmi'): | 73 | if hookenv.config("enable_ipmi"): |
2520 | 73 | toolset.add(tools.Ipmi) | 74 | toolset.add(tools.Ipmi) |
2521 | 74 | 75 | ||
2523 | 75 | system_vendor = hwinfo.get_system.get('vendor') | 76 | system_vendor = hwinfo.get_system.get("vendor") |
2524 | 76 | tool = SUPPORTED_SYSTEMS.get(system_vendor) | 77 | tool = SUPPORTED_SYSTEMS.get(system_vendor) |
2525 | 77 | if isinstance(tool, list) or isinstance(tool, set): | 78 | if isinstance(tool, list) or isinstance(tool, set): |
2526 | 78 | toolset.update(tool) | 79 | toolset.update(tool) |
2527 | 79 | elif tool: | 80 | elif tool: |
2528 | 80 | toolset.add(tool) | 81 | toolset.add(tool) |
2529 | 81 | else: | 82 | else: |
2532 | 82 | hookenv.log('System vendor not supported: {}'.format(system_vendor), | 83 | hookenv.log( |
2533 | 83 | hookenv.DEBUG) | 84 | "System vendor not supported: {}".format(system_vendor), hookenv.DEBUG |
2534 | 85 | ) | ||
2535 | 84 | 86 | ||
2536 | 85 | executed_toolset = set([tool() for tool in toolset if tool.is_series_supported]) | 87 | executed_toolset = set([tool() for tool in toolset if tool.is_series_supported]) |
2537 | 86 | return executed_toolset | 88 | return executed_toolset |
2538 | @@ -91,27 +93,24 @@ def _supports_mdadm(): | |||
2539 | 91 | 93 | ||
2540 | 92 | Returns True when the first one is found; otherwise, it returns False) | 94 | Returns True when the first one is found; otherwise, it returns False) |
2541 | 93 | """ | 95 | """ |
2543 | 94 | if os.path.exists('/sbin/mdadm'): | 96 | if os.path.exists("/sbin/mdadm"): |
2544 | 95 | try: | 97 | try: |
2549 | 96 | devices_raw = subprocess.check_output( | 98 | devices_raw = subprocess.check_output(["/sbin/mdadm", "--detail", "--scan"]) |
2550 | 97 | ['/sbin/mdadm', '--detail', '--scan'] | 99 | devices_re = re.compile(r"^ARRAY\s+(\S+) ") |
2547 | 98 | ) | ||
2548 | 99 | devices_re = re.compile(r'^ARRAY\s+(\S+) ') | ||
2551 | 100 | for line in devices_raw.splitlines(): | 100 | for line in devices_raw.splitlines(): |
2552 | 101 | line = line.decode().strip() | 101 | line = line.decode().strip() |
2553 | 102 | raid_dev = devices_re.search(line) | 102 | raid_dev = devices_re.search(line) |
2554 | 103 | if raid_dev: | 103 | if raid_dev: |
2557 | 104 | hookenv.log("Found md raid array {}" | 104 | hookenv.log("Found md raid array {}".format(raid_dev.group(1))) |
2556 | 105 | "".format(raid_dev.group(1))) | ||
2558 | 106 | return True | 105 | return True |
2559 | 107 | except Exception as e: | 106 | except Exception as e: |
2560 | 108 | hookenv.log("mdadm scan failed with {}".format(e)) | 107 | hookenv.log("mdadm scan failed with {}".format(e)) |
2561 | 109 | return False | 108 | return False |
2562 | 110 | 109 | ||
2563 | 111 | 110 | ||
2565 | 112 | if __name__ == '__main__': | 111 | if __name__ == "__main__": |
2566 | 113 | toolset = get_tools() | 112 | toolset = get_tools() |
2567 | 114 | if not toolset: | 113 | if not toolset: |
2569 | 115 | print('No RAID') | 114 | print("No RAID") |
2570 | 116 | else: | 115 | else: |
2571 | 117 | print(toolset) | 116 | print(toolset) |
2572 | diff --git a/src/lib/hwhealth/tools.py b/src/lib/hwhealth/tools.py | |||
2573 | index e90cb6b..da3615d 100644 | |||
2574 | --- a/src/lib/hwhealth/tools.py | |||
2575 | +++ b/src/lib/hwhealth/tools.py | |||
2576 | @@ -24,12 +24,14 @@ from charms import apt | |||
2577 | 24 | class JujuResourceNotFound(Exception): | 24 | class JujuResourceNotFound(Exception): |
2578 | 25 | """Resource needed but not attached | 25 | """Resource needed but not attached |
2579 | 26 | """ | 26 | """ |
2580 | 27 | |||
2581 | 27 | pass | 28 | pass |
2582 | 28 | 29 | ||
2583 | 29 | 30 | ||
2584 | 30 | class ToolError(Exception): | 31 | class ToolError(Exception): |
2585 | 31 | """Allows a dict to be shared with try-except | 32 | """Allows a dict to be shared with try-except |
2586 | 32 | """ | 33 | """ |
2587 | 34 | |||
2588 | 33 | def __init__(self, keymap): | 35 | def __init__(self, keymap): |
2589 | 34 | self._keymap = keymap | 36 | self._keymap = keymap |
2590 | 35 | 37 | ||
2591 | @@ -41,16 +43,18 @@ class ToolError(Exception): | |||
2592 | 41 | class ToolChecksumError(ToolError): | 43 | class ToolChecksumError(ToolError): |
2593 | 42 | """Resource does not match a whitelisted checksum | 44 | """Resource does not match a whitelisted checksum |
2594 | 43 | """ | 45 | """ |
2595 | 46 | |||
2596 | 44 | pass | 47 | pass |
2597 | 45 | 48 | ||
2598 | 46 | 49 | ||
2599 | 47 | class ToolNotFound(ToolError): | 50 | class ToolNotFound(ToolError): |
2600 | 48 | """Resource found (zipfile) but does not contain a needed binary | 51 | """Resource found (zipfile) but does not contain a needed binary |
2601 | 49 | """ | 52 | """ |
2602 | 53 | |||
2603 | 50 | pass | 54 | pass |
2604 | 51 | 55 | ||
2605 | 52 | 56 | ||
2607 | 53 | class Tool(): | 57 | class Tool: |
2608 | 54 | """An abstract class representing a "tool". | 58 | """An abstract class representing a "tool". |
2609 | 55 | 59 | ||
2610 | 56 | The idea is to delegate install/configure duties to specific per-tool | 60 | The idea is to delegate install/configure duties to specific per-tool |
2611 | @@ -59,21 +63,22 @@ class Tool(): | |||
2612 | 59 | Every tool should implement its own internal logic regarding how to be | 63 | Every tool should implement its own internal logic regarding how to be |
2613 | 60 | installed, configured, and removed. | 64 | installed, configured, and removed. |
2614 | 61 | """ | 65 | """ |
2616 | 62 | CROND_DIR = '/etc/cron.d' | 66 | |
2617 | 67 | CROND_DIR = "/etc/cron.d" | ||
2618 | 63 | CRONJOB_SCRIPT_MODE = 0o100755 | 68 | CRONJOB_SCRIPT_MODE = 0o100755 |
2619 | 64 | CRONJOB_SCRIPT_UID = 0 | 69 | CRONJOB_SCRIPT_UID = 0 |
2620 | 65 | CRONJOB_SCRIPT_GID = 0 | 70 | CRONJOB_SCRIPT_GID = 0 |
2623 | 66 | CRONJOB_OUTPUT_DIR = '/var/lib/nagios' | 71 | CRONJOB_OUTPUT_DIR = "/var/lib/nagios" |
2624 | 67 | NRPE_PLUGINS_DIR = '/usr/local/lib/nagios/plugins' | 72 | NRPE_PLUGINS_DIR = "/usr/local/lib/nagios/plugins" |
2625 | 68 | NRPE_PLUGINS_MODE = 0o100755 | 73 | NRPE_PLUGINS_MODE = 0o100755 |
2626 | 69 | NRPE_PLUGINS_UID = 0 | 74 | NRPE_PLUGINS_UID = 0 |
2627 | 70 | NRPE_PLUGINS_GID = 0 | 75 | NRPE_PLUGINS_GID = 0 |
2629 | 71 | SUDOERS_DIR = '/etc/sudoers.d' | 76 | SUDOERS_DIR = "/etc/sudoers.d" |
2630 | 72 | SUDOERS_MODE = 0o100440 | 77 | SUDOERS_MODE = 0o100440 |
2631 | 73 | SUDOERS_UID = 0 | 78 | SUDOERS_UID = 0 |
2632 | 74 | SUDOERS_GID = 0 | 79 | SUDOERS_GID = 0 |
2635 | 75 | SUPPORTED_SERIES = ['xenial', 'bionic', 'focal'] | 80 | SUPPORTED_SERIES = ["xenial", "bionic", "focal"] |
2636 | 76 | TOOLS_DIR = '/usr/local/bin' | 81 | TOOLS_DIR = "/usr/local/bin" |
2637 | 77 | TOOLS_MODE = 0o100755 | 82 | TOOLS_MODE = 0o100755 |
2638 | 78 | TOOLS_UID = 0 | 83 | TOOLS_UID = 0 |
2639 | 79 | TOOLS_GID = 0 | 84 | TOOLS_GID = 0 |
2640 | @@ -81,7 +86,7 @@ class Tool(): | |||
2641 | 81 | def __init__( | 86 | def __init__( |
2642 | 82 | self, | 87 | self, |
2643 | 83 | shortname=None, | 88 | shortname=None, |
2645 | 84 | nrpe_opts='', | 89 | nrpe_opts="", |
2646 | 85 | nrpe_script=None, | 90 | nrpe_script=None, |
2647 | 86 | nrpe_script_dir=None, | 91 | nrpe_script_dir=None, |
2648 | 87 | cron_script=None, | 92 | cron_script=None, |
2649 | @@ -90,25 +95,21 @@ class Tool(): | |||
2650 | 90 | ): | 95 | ): |
2651 | 91 | self._nagios_hostname = nrpe.get_nagios_hostname() | 96 | self._nagios_hostname = nrpe.get_nagios_hostname() |
2652 | 92 | self._nrpe_opts = nrpe_opts | 97 | self._nrpe_opts = nrpe_opts |
2672 | 93 | self._shortname = (shortname if shortname | 98 | self._shortname = shortname if shortname else self.__class__.__name__.lower() |
2673 | 94 | else self.__class__.__name__.lower()) | 99 | self._files_dir = os.path.join(hookenv.charm_dir(), "files", self._shortname) |
2674 | 95 | self._files_dir = os.path.join(hookenv.charm_dir(), | 100 | self._nrpe_script = ( |
2675 | 96 | 'files', | 101 | nrpe_script if nrpe_script else "check_{}.py".format(self._shortname) |
2676 | 97 | self._shortname) | 102 | ) |
2677 | 98 | self._nrpe_script = (nrpe_script if nrpe_script | 103 | self._nrpe_script_dir = nrpe_script_dir if nrpe_script_dir else self._files_dir |
2678 | 99 | else 'check_{}.py'.format(self._shortname)) | 104 | self._cron_script = ( |
2679 | 100 | self._nrpe_script_dir = (nrpe_script_dir if nrpe_script_dir | 105 | cron_script if cron_script else "cron_{}.py".format(self._shortname) |
2680 | 101 | else self._files_dir) | 106 | ) |
2681 | 102 | self._cron_script = (cron_script if cron_script | 107 | self._cron_script_dir = cron_script_dir if cron_script_dir else self._files_dir |
2682 | 103 | else 'cron_{}.py'.format(self._shortname)) | 108 | self._templates_dir = os.path.join( |
2683 | 104 | self._cron_script_dir = (cron_script_dir if cron_script_dir | 109 | hookenv.charm_dir(), "templates", self._shortname |
2684 | 105 | else self._files_dir) | 110 | ) |
2685 | 106 | self._templates_dir = os.path.join(hookenv.charm_dir(), | 111 | self._common_libs_dir = os.path.join(hookenv.charm_dir(), "files/common") |
2686 | 107 | 'templates', | 112 | self._common_libs = ["hw_health_lib.py"] |
2668 | 108 | self._shortname) | ||
2669 | 109 | self._common_libs_dir = os.path.join(hookenv.charm_dir(), | ||
2670 | 110 | 'files/common') | ||
2671 | 111 | self._common_libs = ['hw_health_lib.py'] | ||
2687 | 112 | self._cron_script_args = cron_script_args | 113 | self._cron_script_args = cron_script_args |
2688 | 113 | 114 | ||
2689 | 114 | def _install_nrpe_plugin(self): | 115 | def _install_nrpe_plugin(self): |
2690 | @@ -117,9 +118,9 @@ class Tool(): | |||
2691 | 117 | os.chmod(dst, self.NRPE_PLUGINS_MODE) | 118 | os.chmod(dst, self.NRPE_PLUGINS_MODE) |
2692 | 118 | os.chown(dst, uid=self.NRPE_PLUGINS_UID, gid=self.NRPE_PLUGINS_GID) | 119 | os.chown(dst, uid=self.NRPE_PLUGINS_UID, gid=self.NRPE_PLUGINS_GID) |
2693 | 119 | hookenv.log( | 120 | hookenv.log( |
2697 | 120 | 'NRPE script for tool [{}] installed at as {}' | 121 | "NRPE script for tool [{}] installed at as {}" |
2698 | 121 | ''.format(self._shortname, dst), | 122 | "".format(self._shortname, dst), |
2699 | 122 | hookenv.DEBUG | 123 | hookenv.DEBUG, |
2700 | 123 | ) | 124 | ) |
2701 | 124 | return dst | 125 | return dst |
2702 | 125 | 126 | ||
2703 | @@ -131,9 +132,9 @@ class Tool(): | |||
2704 | 131 | os.chmod(dst, self.NRPE_PLUGINS_MODE) | 132 | os.chmod(dst, self.NRPE_PLUGINS_MODE) |
2705 | 132 | os.chown(dst, uid=self.NRPE_PLUGINS_UID, gid=self.NRPE_PLUGINS_GID) | 133 | os.chown(dst, uid=self.NRPE_PLUGINS_UID, gid=self.NRPE_PLUGINS_GID) |
2706 | 133 | hookenv.log( | 134 | hookenv.log( |
2710 | 134 | 'Common Library {} for tool [{}] installed at as {}' | 135 | "Common Library {} for tool [{}] installed at as {}" |
2711 | 135 | ''.format(lib, self._shortname, dst), | 136 | "".format(lib, self._shortname, dst), |
2712 | 136 | hookenv.DEBUG | 137 | hookenv.DEBUG, |
2713 | 137 | ) | 138 | ) |
2714 | 138 | dsts.append(dst) | 139 | dsts.append(dst) |
2715 | 139 | return dsts | 140 | return dsts |
2716 | @@ -144,48 +145,42 @@ class Tool(): | |||
2717 | 144 | return | 145 | return |
2718 | 145 | os.remove(plugin_path) | 146 | os.remove(plugin_path) |
2719 | 146 | hookenv.log( | 147 | hookenv.log( |
2722 | 147 | 'deleted NRPE script for tool [{}]'.format(self._shortname), | 148 | "deleted NRPE script for tool [{}]".format(self._shortname), hookenv.DEBUG |
2721 | 148 | hookenv.DEBUG | ||
2723 | 149 | ) | 149 | ) |
2724 | 150 | 150 | ||
2725 | 151 | def configure_nrpe_check(self, nrpe_setup): | 151 | def configure_nrpe_check(self, nrpe_setup): |
2727 | 152 | cmd = ' '.join([os.path.basename(self._nrpe_script), self._nrpe_opts]) | 152 | cmd = " ".join([os.path.basename(self._nrpe_script), self._nrpe_opts]) |
2728 | 153 | nrpe_setup.add_check( | 153 | nrpe_setup.add_check( |
2729 | 154 | shortname=self._shortname, | 154 | shortname=self._shortname, |
2731 | 155 | description='{} Hardware Health'.format(self._shortname), | 155 | description="{} Hardware Health".format(self._shortname), |
2732 | 156 | check_cmd=cmd, | 156 | check_cmd=cmd, |
2733 | 157 | ) | 157 | ) |
2734 | 158 | hookenv.log( | 158 | hookenv.log( |
2737 | 159 | 'configured NRPE check for tool [{}]'.format(self._shortname), | 159 | "configured NRPE check for tool [{}]".format(self._shortname), hookenv.DEBUG |
2736 | 160 | hookenv.DEBUG | ||
2738 | 161 | ) | 160 | ) |
2739 | 162 | 161 | ||
2740 | 163 | def remove_nrpe_check(self): | 162 | def remove_nrpe_check(self): |
2741 | 164 | nrpe_setup = nrpe.NRPE(hostname=self._nagios_hostname, primary=False) | 163 | nrpe_setup = nrpe.NRPE(hostname=self._nagios_hostname, primary=False) |
2747 | 165 | cmd = ' '.join([self._nrpe_script, self._nrpe_opts]) | 164 | cmd = " ".join([self._nrpe_script, self._nrpe_opts]) |
2748 | 166 | nrpe_setup.remove_check( | 165 | nrpe_setup.remove_check(shortname=self._shortname, check_cmd=cmd) |
2744 | 167 | shortname=self._shortname, | ||
2745 | 168 | check_cmd=cmd | ||
2746 | 169 | ) | ||
2749 | 170 | nrpe_setup.write() | 166 | nrpe_setup.write() |
2750 | 171 | hookenv.log( | 167 | hookenv.log( |
2753 | 172 | 'removed NRPE check for tool [{}]'.format(self._shortname), | 168 | "removed NRPE check for tool [{}]".format(self._shortname), hookenv.DEBUG |
2752 | 173 | hookenv.DEBUG | ||
2754 | 174 | ) | 169 | ) |
2755 | 175 | 170 | ||
2756 | 176 | def install(self): | 171 | def install(self): |
2757 | 177 | self._install_common_libs() | 172 | self._install_common_libs() |
2758 | 178 | self._install_nrpe_plugin() | 173 | self._install_nrpe_plugin() |
2760 | 179 | hookenv.log('Installed tool [{}]'.format(self._shortname)) | 174 | hookenv.log("Installed tool [{}]".format(self._shortname)) |
2761 | 180 | 175 | ||
2762 | 181 | def remove(self): | 176 | def remove(self): |
2763 | 182 | self.remove_nrpe_check() | 177 | self.remove_nrpe_check() |
2764 | 183 | self._remove_nrpe_plugin() | 178 | self._remove_nrpe_plugin() |
2766 | 184 | hookenv.log('Removed tool [{}]'.format(self._shortname)) | 179 | hookenv.log("Removed tool [{}]".format(self._shortname)) |
2767 | 185 | 180 | ||
2768 | 186 | @classmethod | 181 | @classmethod |
2769 | 187 | def is_series_supported(cls): | 182 | def is_series_supported(cls): |
2771 | 188 | series = lsb_release()['DISTRIB_CODENAME'] | 183 | series = lsb_release()["DISTRIB_CODENAME"] |
2772 | 189 | 184 | ||
2773 | 190 | # BUG(lp#1890652) The following works around xenial layer-apt bug during test | 185 | # BUG(lp#1890652) The following works around xenial layer-apt bug during test |
2774 | 191 | if ( | 186 | if ( |
2775 | @@ -197,7 +192,7 @@ class Tool(): | |||
2776 | 197 | 192 | ||
2777 | 198 | return series in cls.SUPPORTED_SERIES | 193 | return series in cls.SUPPORTED_SERIES |
2778 | 199 | 194 | ||
2780 | 200 | def _install_cronjob(self, cron_user='root'): | 195 | def _install_cronjob(self, cron_user="root"): |
2781 | 201 | assert self._cron_script is not None | 196 | assert self._cron_script is not None |
2782 | 202 | 197 | ||
2783 | 203 | # Copy the cronjob script to the nagios plugins directory | 198 | # Copy the cronjob script to the nagios plugins directory |
2784 | @@ -206,17 +201,15 @@ class Tool(): | |||
2785 | 206 | os.chmod(dst, self.CRONJOB_SCRIPT_MODE) | 201 | os.chmod(dst, self.CRONJOB_SCRIPT_MODE) |
2786 | 207 | os.chown(dst, uid=self.CRONJOB_SCRIPT_UID, gid=self.CRONJOB_SCRIPT_GID) | 202 | os.chown(dst, uid=self.CRONJOB_SCRIPT_UID, gid=self.CRONJOB_SCRIPT_GID) |
2787 | 208 | hookenv.log( | 203 | hookenv.log( |
2791 | 209 | 'Cronjob script [{}] copied to {}' | 204 | "Cronjob script [{}] copied to {}" |
2792 | 210 | ''.format(self._cron_script, self.NRPE_PLUGINS_DIR), | 205 | "".format(self._cron_script, self.NRPE_PLUGINS_DIR), |
2793 | 211 | hookenv.DEBUG | 206 | hookenv.DEBUG, |
2794 | 212 | ) | 207 | ) |
2795 | 213 | 208 | ||
2796 | 214 | cmdline = [dst] | 209 | cmdline = [dst] |
2802 | 215 | if self._cron_script_args \ | 210 | if self._cron_script_args and isinstance(self._cron_script_args, str): |
2803 | 216 | and isinstance(self._cron_script_args, str): | 211 | cmdline.extend([shlex.quote(arg) for arg in self._cron_script_args.split()]) |
2804 | 217 | cmdline.extend([shlex.quote(arg) | 212 | elif hookenv.config("manufacturer") != "test": |
2800 | 218 | for arg in self._cron_script_args.split()]) | ||
2801 | 219 | elif hookenv.config('manufacturer') != 'test': | ||
2805 | 220 | # Run it once to generate the temp file unless we're on a test | 213 | # Run it once to generate the temp file unless we're on a test |
2806 | 221 | # container, otherwise the nrpe check # might fail at first. | 214 | # container, otherwise the nrpe check # might fail at first. |
2807 | 222 | # For security reasons, cronjobs that allow parameters shared | 215 | # For security reasons, cronjobs that allow parameters shared |
2808 | @@ -227,39 +220,32 @@ class Tool(): | |||
2809 | 227 | # Generate random cronjob execution (internal in minutes) | 220 | # Generate random cronjob execution (internal in minutes) |
2810 | 228 | cron_interval = 5 | 221 | cron_interval = 5 |
2811 | 229 | minutes_offsets = [] | 222 | minutes_offsets = [] |
2814 | 230 | minute_num = binascii.crc_hqx( | 223 | minute_num = binascii.crc_hqx("".join(cmdline).encode(), 0) % cron_interval |
2813 | 231 | ''.join(cmdline).encode(), 0) % cron_interval | ||
2815 | 232 | while minute_num < 60: | 224 | while minute_num < 60: |
2816 | 233 | minutes_offsets.append(str(minute_num)) | 225 | minutes_offsets.append(str(minute_num)) |
2817 | 234 | minute_num += cron_interval | 226 | minute_num += cron_interval |
2821 | 235 | cronjob_line = '{minutes} * * * * {user} {cmd}\n'.format( | 227 | cronjob_line = "{minutes} * * * * {user} {cmd}\n".format( |
2822 | 236 | minutes=','.join(minutes_offsets), user=cron_user, | 228 | minutes=",".join(minutes_offsets), user=cron_user, cmd=" ".join(cmdline) |
2823 | 237 | cmd=' '.join(cmdline)) | 229 | ) |
2824 | 238 | 230 | ||
2828 | 239 | crond_file = os.path.join(self.CROND_DIR, | 231 | crond_file = os.path.join(self.CROND_DIR, "hwhealth_{}".format(self._shortname)) |
2829 | 240 | 'hwhealth_{}'.format(self._shortname)) | 232 | with open(crond_file, "w") as crond_fd: |
2827 | 241 | with open(crond_file, 'w') as crond_fd: | ||
2830 | 242 | crond_fd.write(cronjob_line) | 233 | crond_fd.write(cronjob_line) |
2835 | 243 | hookenv.log( | 234 | hookenv.log("Cronjob configured at {}".format(crond_file), hookenv.DEBUG) |
2832 | 244 | 'Cronjob configured at {}'.format(crond_file), | ||
2833 | 245 | hookenv.DEBUG | ||
2834 | 246 | ) | ||
2836 | 247 | return dst | 235 | return dst |
2837 | 248 | 236 | ||
2838 | 249 | def _remove_cronjob(self): | 237 | def _remove_cronjob(self): |
2839 | 250 | assert self._cron_script is not None | 238 | assert self._cron_script is not None |
2840 | 251 | 239 | ||
2843 | 252 | crond_file = os.path.join(self.CROND_DIR, | 240 | crond_file = os.path.join(self.CROND_DIR, "hwhealth_{}".format(self._shortname)) |
2842 | 253 | 'hwhealth_{}'.format(self._shortname)) | ||
2844 | 254 | cron_script = os.path.join(self.NRPE_PLUGINS_DIR, self._cron_script) | 241 | cron_script = os.path.join(self.NRPE_PLUGINS_DIR, self._cron_script) |
2845 | 255 | for filename in (crond_file, cron_script): | 242 | for filename in (crond_file, cron_script): |
2846 | 256 | if not os.path.exists(filename): | 243 | if not os.path.exists(filename): |
2847 | 257 | continue | 244 | continue |
2848 | 258 | os.remove(filename) | 245 | os.remove(filename) |
2849 | 259 | hookenv.log( | 246 | hookenv.log( |
2853 | 260 | 'Removed cronjob files [{}, {}]' | 247 | "Removed cronjob files [{}, {}]".format(crond_file, cron_script), |
2854 | 261 | ''.format(crond_file, cron_script), | 248 | hookenv.DEBUG, |
2852 | 262 | hookenv.DEBUG | ||
2855 | 263 | ) | 249 | ) |
2856 | 264 | 250 | ||
2857 | 265 | def _remove_sudoer(self): | 251 | def _remove_sudoer(self): |
2858 | @@ -267,8 +253,7 @@ class Tool(): | |||
2859 | 267 | if not sudoer_path.exists(): | 253 | if not sudoer_path.exists(): |
2860 | 268 | return | 254 | return |
2861 | 269 | sudoer_path.unlink() | 255 | sudoer_path.unlink() |
2864 | 270 | hookenv.log('deleted sudoer file: {}'.format(sudoer_path), | 256 | hookenv.log("deleted sudoer file: {}".format(sudoer_path), hookenv.DEBUG) |
2863 | 271 | hookenv.DEBUG) | ||
2865 | 272 | 257 | ||
2866 | 273 | 258 | ||
2867 | 274 | class VendorTool(Tool): | 259 | class VendorTool(Tool): |
2868 | @@ -279,6 +264,7 @@ class VendorTool(Tool): | |||
2869 | 279 | cronjob that runs as root and saves the tool output in a temporary file | 264 | cronjob that runs as root and saves the tool output in a temporary file |
2870 | 280 | that nrpe can read (as nagios user). | 265 | that nrpe can read (as nagios user). |
2871 | 281 | """ | 266 | """ |
2872 | 267 | |||
2873 | 282 | def __init__(self, *args, **kwargs): | 268 | def __init__(self, *args, **kwargs): |
2874 | 283 | super().__init__(*args, **kwargs) | 269 | super().__init__(*args, **kwargs) |
2875 | 284 | self.checksums = [] | 270 | self.checksums = [] |
2876 | @@ -294,30 +280,32 @@ class VendorTool(Tool): | |||
2877 | 294 | super().remove() | 280 | super().remove() |
2878 | 295 | 281 | ||
2879 | 296 | def _install_from_resource(self): | 282 | def _install_from_resource(self): |
2881 | 297 | resource = hookenv.resource_get('tools') | 283 | resource = hookenv.resource_get("tools") |
2882 | 298 | if not resource: | 284 | if not resource: |
2884 | 299 | raise JujuResourceNotFound('tools') | 285 | raise JujuResourceNotFound("tools") |
2885 | 300 | else: | 286 | else: |
2886 | 301 | hookenv.log( | 287 | hookenv.log( |
2889 | 302 | 'Installing tool [{}] from resource'.format(self._shortname), | 288 | "Installing tool [{}] from resource".format(self._shortname), |
2890 | 303 | hookenv.DEBUG | 289 | hookenv.DEBUG, |
2891 | 304 | ) | 290 | ) |
2892 | 305 | # Move in from a temp directory to be atomic | 291 | # Move in from a temp directory to be atomic |
2893 | 306 | with TemporaryDirectory() as tmpdir: | 292 | with TemporaryDirectory() as tmpdir: |
2894 | 307 | try: | 293 | try: |
2896 | 308 | with ZipFile(resource, 'r') as zipfile: | 294 | with ZipFile(resource, "r") as zipfile: |
2897 | 309 | tmpfile = zipfile.extract(self._shortname, tmpdir) | 295 | tmpfile = zipfile.extract(self._shortname, tmpdir) |
2898 | 310 | # Verify checksum | 296 | # Verify checksum |
2899 | 311 | checksum = hashlib.sha256() | 297 | checksum = hashlib.sha256() |
2901 | 312 | with open(tmpfile, 'rb') as fd: | 298 | with open(tmpfile, "rb") as fd: |
2902 | 313 | checksum.update(fd.read()) | 299 | checksum.update(fd.read()) |
2903 | 314 | if checksum.hexdigest() not in self.checksums: | 300 | if checksum.hexdigest() not in self.checksums: |
2910 | 315 | checksums_string = ', '.join(self.checksums) | 301 | checksums_string = ", ".join(self.checksums) |
2911 | 316 | raise ToolChecksumError({ | 302 | raise ToolChecksumError( |
2912 | 317 | 'shortname': self._shortname, | 303 | { |
2913 | 318 | 'checksum': checksum.hexdigest(), | 304 | "shortname": self._shortname, |
2914 | 319 | 'expected_checksums': checksums_string | 305 | "checksum": checksum.hexdigest(), |
2915 | 320 | }) | 306 | "expected_checksums": checksums_string, |
2916 | 307 | } | ||
2917 | 308 | ) | ||
2918 | 321 | # We could just use self.TOOLS_DIR as a destination | 309 | # We could just use self.TOOLS_DIR as a destination |
2919 | 322 | # here, but shutil.move refuses to overwrite the | 310 | # here, but shutil.move refuses to overwrite the |
2920 | 323 | # destination file unless it receives a full path | 311 | # destination file unless it receives a full path |
2921 | @@ -327,28 +315,24 @@ class VendorTool(Tool): | |||
2922 | 327 | os.chown(dst, uid=self.TOOLS_UID, gid=self.TOOLS_GID) | 315 | os.chown(dst, uid=self.TOOLS_UID, gid=self.TOOLS_GID) |
2923 | 328 | 316 | ||
2924 | 329 | except BadZipFile as error: | 317 | except BadZipFile as error: |
2926 | 330 | hookenv.log('BadZipFile: {}'.format(error), hookenv.ERROR) | 318 | hookenv.log("BadZipFile: {}".format(error), hookenv.ERROR) |
2927 | 331 | 319 | ||
2928 | 332 | except PermissionError as error: | 320 | except PermissionError as error: |
2929 | 333 | hookenv.log( | 321 | hookenv.log( |
2934 | 334 | 'Unable to unzip tool {} ' | 322 | "Unable to unzip tool {} " |
2935 | 335 | 'from the provided resource: {}' | 323 | "from the provided resource: {}" |
2936 | 336 | ''.format(self._shortname, error), | 324 | "".format(self._shortname, error), |
2937 | 337 | hookenv.ERROR | 325 | hookenv.ERROR, |
2938 | 338 | ) | 326 | ) |
2939 | 339 | except KeyError as error: | 327 | except KeyError as error: |
2942 | 340 | raise ToolNotFound({'shortname': self._shortname, | 328 | raise ToolNotFound({"shortname": self._shortname, "error": error}) |
2941 | 341 | 'error': error}) | ||
2943 | 342 | 329 | ||
2944 | 343 | def _remove_binary(self): | 330 | def _remove_binary(self): |
2945 | 344 | binary_path = Path(self.TOOLS_DIR) / self._shortname | 331 | binary_path = Path(self.TOOLS_DIR) / self._shortname |
2946 | 345 | if not binary_path.exists(): | 332 | if not binary_path.exists(): |
2947 | 346 | return | 333 | return |
2948 | 347 | binary_path.unlink() | 334 | binary_path.unlink() |
2953 | 348 | hookenv.log( | 335 | hookenv.log("Removed binary tool {}".format(binary_path), hookenv.DEBUG) |
2950 | 349 | 'Removed binary tool {}'.format(binary_path), | ||
2951 | 350 | hookenv.DEBUG | ||
2952 | 351 | ) | ||
2954 | 352 | 336 | ||
2955 | 353 | 337 | ||
2956 | 354 | class AptVendorTool(Tool): | 338 | class AptVendorTool(Tool): |
2957 | @@ -402,51 +386,42 @@ class AptVendorTool(Tool): | |||
2958 | 402 | -----END PGP PUBLIC KEY BLOCK-----""" | 386 | -----END PGP PUBLIC KEY BLOCK-----""" |
2959 | 403 | 387 | ||
2960 | 404 | HPE_MCP_REPO_TMPL = ( | 388 | HPE_MCP_REPO_TMPL = ( |
2962 | 405 | 'deb http://downloads.linux.hpe.com/SDR/repo/mcp {series}/current-gen9 non-free' | 389 | "deb http://downloads.linux.hpe.com/SDR/repo/mcp {series}/current-gen9 non-free" |
2963 | 406 | ) | 390 | ) |
2964 | 407 | HPE_ILOREST_REPO_TMPL = ( | 391 | HPE_ILOREST_REPO_TMPL = ( |
2966 | 408 | 'deb http://downloads.linux.hpe.com/SDR/repo/ilorest {series}/current non-free' | 392 | "deb http://downloads.linux.hpe.com/SDR/repo/ilorest {series}/current non-free" |
2967 | 409 | ) | 393 | ) |
2968 | 410 | 394 | ||
2969 | 411 | # HP doesn't have focal APT sources as of yet | 395 | # HP doesn't have focal APT sources as of yet |
2970 | 412 | APT_SOURCES = { | 396 | APT_SOURCES = { |
2974 | 413 | 'ssacli': { | 397 | "ssacli": { |
2975 | 414 | 'xenial': HPE_MCP_REPO_TMPL.format(series='xenial'), | 398 | "xenial": HPE_MCP_REPO_TMPL.format(series="xenial"), |
2976 | 415 | 'bionic': HPE_MCP_REPO_TMPL.format(series='bionic'), | 399 | "bionic": HPE_MCP_REPO_TMPL.format(series="bionic"), |
2977 | 416 | }, | 400 | }, |
2981 | 417 | 'ilorest': { | 401 | "ilorest": { |
2982 | 418 | 'xenial': HPE_ILOREST_REPO_TMPL.format(series='xenial'), | 402 | "xenial": HPE_ILOREST_REPO_TMPL.format(series="xenial"), |
2983 | 419 | 'bionic': HPE_ILOREST_REPO_TMPL.format(series='bionic'), | 403 | "bionic": HPE_ILOREST_REPO_TMPL.format(series="bionic"), |
2984 | 420 | }, | 404 | }, |
2988 | 421 | 'hplog': { | 405 | "hplog": { |
2989 | 422 | 'xenial': HPE_MCP_REPO_TMPL.format(series='xenial'), | 406 | "xenial": HPE_MCP_REPO_TMPL.format(series="xenial"), |
2990 | 423 | 'bionic': HPE_MCP_REPO_TMPL.format(series='bionic'), | 407 | "bionic": HPE_MCP_REPO_TMPL.format(series="bionic"), |
2991 | 424 | }, | 408 | }, |
2992 | 425 | } | 409 | } |
2993 | 426 | APT_KEYS = { | 410 | APT_KEYS = { |
3006 | 427 | 'ssacli': { | 411 | "ssacli": {"xenial": HPE_MCP_KEY, "bionic": HPE_MCP_KEY}, |
3007 | 428 | 'xenial': HPE_MCP_KEY, | 412 | "ilorest": {"xenial": HPE_ILOREST_KEY, "bionic": HPE_ILOREST_KEY}, |
3008 | 429 | 'bionic': HPE_MCP_KEY, | 413 | "hplog": {"xenial": HPE_MCP_KEY, "bionic": HPE_MCP_KEY}, |
2997 | 430 | }, | ||
2998 | 431 | 'ilorest': { | ||
2999 | 432 | 'xenial': HPE_ILOREST_KEY, | ||
3000 | 433 | 'bionic': HPE_ILOREST_KEY, | ||
3001 | 434 | }, | ||
3002 | 435 | 'hplog': { | ||
3003 | 436 | 'xenial': HPE_MCP_KEY, | ||
3004 | 437 | 'bionic': HPE_MCP_KEY, | ||
3005 | 438 | }, | ||
3009 | 439 | } | 414 | } |
3010 | 440 | 415 | ||
3011 | 441 | def __init__(self, shortname=None, apt_packages=[]): | 416 | def __init__(self, shortname=None, apt_packages=[]): |
3012 | 442 | super().__init__( | 417 | super().__init__( |
3013 | 443 | shortname=shortname, | 418 | shortname=shortname, |
3017 | 444 | nrpe_script='check_hw_health_cron_output.py', | 419 | nrpe_script="check_hw_health_cron_output.py", |
3018 | 445 | nrpe_opts='--filename {}/{}.out'.format(self.CRONJOB_OUTPUT_DIR, | 420 | nrpe_opts="--filename {}/{}.out".format(self.CRONJOB_OUTPUT_DIR, shortname), |
3019 | 446 | shortname) | 421 | ) |
3020 | 422 | self.apt_packages = ( | ||
3021 | 423 | apt_packages if apt_packages else [self.__class__.__name__.lower()] | ||
3022 | 447 | ) | 424 | ) |
3023 | 448 | self.apt_packages = (apt_packages if apt_packages | ||
3024 | 449 | else [self.__class__.__name__.lower()]) | ||
3025 | 450 | self._nrpe_script_dir = self._common_libs_dir | 425 | self._nrpe_script_dir = self._common_libs_dir |
3026 | 451 | 426 | ||
3027 | 452 | def install(self): | 427 | def install(self): |
3028 | @@ -471,38 +446,45 @@ class AptVendorTool(Tool): | |||
3029 | 471 | hardware present on the system. | 446 | hardware present on the system. |
3030 | 472 | """ | 447 | """ |
3031 | 473 | self._add_apt_source() | 448 | self._add_apt_source() |
3033 | 474 | if hookenv.config('manufacturer') == 'test': | 449 | if hookenv.config("manufacturer") == "test": |
3034 | 475 | # If we are forcing install on a container for functional tests, | 450 | # If we are forcing install on a container for functional tests, |
3035 | 476 | # we should only download and not install the packages, as some | 451 | # we should only download and not install the packages, as some |
3036 | 477 | # vendor tools depend on hardware to be preset to complete postinst | 452 | # vendor tools depend on hardware to be preset to complete postinst |
3037 | 478 | # need one option added per package | 453 | # need one option added per package |
3039 | 479 | apt.queue_install(self.apt_packages, options=["--download-only" for _ in self.apt_packages]) | 454 | apt.queue_install( |
3040 | 455 | self.apt_packages, | ||
3041 | 456 | options=["--download-only" for _ in self.apt_packages], | ||
3042 | 457 | ) | ||
3043 | 480 | else: | 458 | else: |
3044 | 481 | apt.queue_install(self.apt_packages) | 459 | apt.queue_install(self.apt_packages) |
3045 | 482 | 460 | ||
3046 | 483 | def _add_apt_source(self): | 461 | def _add_apt_source(self): |
3049 | 484 | series = lsb_release()['DISTRIB_CODENAME'] | 462 | series = lsb_release()["DISTRIB_CODENAME"] |
3050 | 485 | if self._shortname not in self.APT_SOURCES and self._shortname not in self.APT_KEYS: | 463 | if ( |
3051 | 464 | self._shortname not in self.APT_SOURCES | ||
3052 | 465 | and self._shortname not in self.APT_KEYS | ||
3053 | 466 | ): | ||
3054 | 486 | return | 467 | return |
3055 | 487 | if series in self.APT_SOURCES[self._shortname]: | 468 | if series in self.APT_SOURCES[self._shortname]: |
3058 | 488 | apt.add_source(self.APT_SOURCES[self._shortname][series], | 469 | apt.add_source( |
3059 | 489 | key=self.APT_KEYS[self._shortname][series]) | 470 | self.APT_SOURCES[self._shortname][series], |
3060 | 471 | key=self.APT_KEYS[self._shortname][series], | ||
3061 | 472 | ) | ||
3062 | 490 | 473 | ||
3063 | 491 | def _remove_packages(self): | 474 | def _remove_packages(self): |
3064 | 492 | apt.purge(self.apt_packages) | 475 | apt.purge(self.apt_packages) |
3065 | 493 | 476 | ||
3066 | 494 | def install_cronjob(self): | 477 | def install_cronjob(self): |
3067 | 495 | hookenv.log( | 478 | hookenv.log( |
3071 | 496 | 'Attempting AptVendorTool cronjob script install [{}]' | 479 | "Attempting AptVendorTool cronjob script install [{}]" |
3072 | 497 | ''.format(self._cron_script), | 480 | "".format(self._cron_script), |
3073 | 498 | hookenv.DEBUG | 481 | hookenv.DEBUG, |
3074 | 499 | ) | 482 | ) |
3075 | 500 | # Don't install a cronjob until the tools are installed | 483 | # Don't install a cronjob until the tools are installed |
3076 | 501 | if self.is_apt_installed(): | 484 | if self.is_apt_installed(): |
3077 | 502 | hookenv.log( | 485 | hookenv.log( |
3081 | 503 | 'calling _install_cronjob for {}' | 486 | "calling _install_cronjob for {}".format(self._cron_script), |
3082 | 504 | ''.format(self._cron_script), | 487 | hookenv.DEBUG, |
3080 | 505 | hookenv.DEBUG | ||
3083 | 506 | ) | 488 | ) |
3084 | 507 | self._install_cronjob() | 489 | self._install_cronjob() |
3085 | 508 | 490 | ||
3086 | @@ -521,7 +503,7 @@ class AptVendorTool(Tool): | |||
3087 | 521 | allows the reactive layer to know when the apt packages have finished | 503 | allows the reactive layer to know when the apt packages have finished |
3088 | 522 | installing so that it may go on to configure the nrpe layer of tools | 504 | installing so that it may go on to configure the nrpe layer of tools |
3089 | 523 | """ | 505 | """ |
3091 | 524 | if hookenv.config('manufacturer') == 'test': | 506 | if hookenv.config("manufacturer") == "test": |
3092 | 525 | # it is okay to skip this part in testing as layer-apt will block | 507 | # it is okay to skip this part in testing as layer-apt will block |
3093 | 526 | # if the package is not downloadable from sources | 508 | # if the package is not downloadable from sources |
3094 | 527 | return True | 509 | return True |
3095 | @@ -537,11 +519,12 @@ class Sas3Ircu(VendorTool): | |||
3096 | 537 | 519 | ||
3097 | 538 | This is a tool supporting the LSI SAS 12Gb/s controllers | 520 | This is a tool supporting the LSI SAS 12Gb/s controllers |
3098 | 539 | """ | 521 | """ |
3099 | 522 | |||
3100 | 540 | def __init__(self): | 523 | def __init__(self): |
3102 | 541 | super().__init__(cron_script='cron_sas3ircu.sh') | 524 | super().__init__(cron_script="cron_sas3ircu.sh") |
3103 | 542 | self.checksums = [ | 525 | self.checksums = [ |
3106 | 543 | 'f150eb37bb332668949a3eccf9636e0e03f874aecd17a39d586082c6be1386bd', | 526 | "f150eb37bb332668949a3eccf9636e0e03f874aecd17a39d586082c6be1386bd", |
3107 | 544 | 'd69967057992134df1b136f83bc775a641e32c4efc741def3ef6f6a25a9a14b5', | 527 | "d69967057992134df1b136f83bc775a641e32c4efc741def3ef6f6a25a9a14b5", |
3108 | 545 | ] | 528 | ] |
3109 | 546 | 529 | ||
3110 | 547 | 530 | ||
3111 | @@ -550,9 +533,12 @@ class Sas2Ircu(VendorTool): | |||
3112 | 550 | 533 | ||
3113 | 551 | This is a tool supporting the LSI SAS 6Gb/s controllers | 534 | This is a tool supporting the LSI SAS 6Gb/s controllers |
3114 | 552 | """ | 535 | """ |
3115 | 536 | |||
3116 | 553 | def __init__(self): | 537 | def __init__(self): |
3119 | 554 | super().__init__(cron_script='cron_sas2ircu.sh') | 538 | super().__init__(cron_script="cron_sas2ircu.sh") |
3120 | 555 | self.checksums = ['37467826d0b22aad47287efe70bb34e47f475d70e9b1b64cbd63f57607701e73'] # noqa: E501 | 539 | self.checksums = [ |
3121 | 540 | "37467826d0b22aad47287efe70bb34e47f475d70e9b1b64cbd63f57607701e73" | ||
3122 | 541 | ] # noqa: E501 | ||
3123 | 556 | 542 | ||
3124 | 557 | 543 | ||
3125 | 558 | class MegaCLI(VendorTool): | 544 | class MegaCLI(VendorTool): |
3126 | @@ -560,12 +546,13 @@ class MegaCLI(VendorTool): | |||
3127 | 560 | 546 | ||
3128 | 561 | This is a tool supporting the LSI MegaRAID SAS controllers | 547 | This is a tool supporting the LSI MegaRAID SAS controllers |
3129 | 562 | """ | 548 | """ |
3130 | 549 | |||
3131 | 563 | def __init__(self): | 550 | def __init__(self): |
3133 | 564 | super().__init__(cron_script='cron_megacli.sh') | 551 | super().__init__(cron_script="cron_megacli.sh") |
3134 | 565 | self.checksums = [ | 552 | self.checksums = [ |
3138 | 566 | '34f1a235543662615ee35f458317380b3f89fac0e415dee755e0dbc7c4cf6f92', | 553 | "34f1a235543662615ee35f458317380b3f89fac0e415dee755e0dbc7c4cf6f92", |
3139 | 567 | '1c4effe33ee5db82227e05925dd629771fd49c7d2be2382d48c48a864452cdec', | 554 | "1c4effe33ee5db82227e05925dd629771fd49c7d2be2382d48c48a864452cdec", |
3140 | 568 | '1a68e6646d1e3dfb7039f581be994500d0ed02de2f928e57399e86473d4c8662', | 555 | "1a68e6646d1e3dfb7039f581be994500d0ed02de2f928e57399e86473d4c8662", |
3141 | 569 | ] | 556 | ] |
3142 | 570 | 557 | ||
3143 | 571 | 558 | ||
3144 | @@ -575,10 +562,10 @@ class HpLog(AptVendorTool): | |||
3145 | 575 | This is a tool supporting the LSI MegaRAID SAS controllers | 562 | This is a tool supporting the LSI MegaRAID SAS controllers |
3146 | 576 | """ | 563 | """ |
3147 | 577 | 564 | ||
3149 | 578 | SUPPORTED_SERIES = ['xenial', 'bionic'] | 565 | SUPPORTED_SERIES = ["xenial", "bionic"] |
3150 | 579 | 566 | ||
3151 | 580 | def __init__(self): | 567 | def __init__(self): |
3153 | 581 | super().__init__(apt_packages=['hp-health']) | 568 | super().__init__(apt_packages=["hp-health"]) |
3154 | 582 | 569 | ||
3155 | 583 | 570 | ||
3156 | 584 | class SsaCli(AptVendorTool): | 571 | class SsaCli(AptVendorTool): |
3157 | @@ -587,7 +574,7 @@ class SsaCli(AptVendorTool): | |||
3158 | 587 | This is a tool supporting the HP Smart Array controllers | 574 | This is a tool supporting the HP Smart Array controllers |
3159 | 588 | """ | 575 | """ |
3160 | 589 | 576 | ||
3162 | 590 | SUPPORTED_SERIES = ['xenial', 'bionic'] | 577 | SUPPORTED_SERIES = ["xenial", "bionic"] |
3163 | 591 | 578 | ||
3164 | 592 | def __init__(self): | 579 | def __init__(self): |
3165 | 593 | super().__init__() | 580 | super().__init__() |
3166 | @@ -597,7 +584,7 @@ class ILOrest(AptVendorTool): | |||
3167 | 597 | """A class representing the ILOrest vendor tool (HPE hardware (Gen 10+) | 584 | """A class representing the ILOrest vendor tool (HPE hardware (Gen 10+) |
3168 | 598 | """ | 585 | """ |
3169 | 599 | 586 | ||
3171 | 600 | SUPPORTED_SERIES = ['xenial', 'bionic'] | 587 | SUPPORTED_SERIES = ["xenial", "bionic"] |
3172 | 601 | 588 | ||
3173 | 602 | def __init__(self): | 589 | def __init__(self): |
3174 | 603 | super().__init__() | 590 | super().__init__() |
3175 | @@ -609,12 +596,13 @@ class Mdadm(VendorTool): | |||
3176 | 609 | Our mdadm check kind of behaves like a VendorTool for the purpose of | 596 | Our mdadm check kind of behaves like a VendorTool for the purpose of |
3177 | 610 | installation as it has a cronjob + check script | 597 | installation as it has a cronjob + check script |
3178 | 611 | """ | 598 | """ |
3179 | 599 | |||
3180 | 612 | def __init__(self): | 600 | def __init__(self): |
3181 | 613 | super().__init__() | 601 | super().__init__() |
3182 | 614 | 602 | ||
3183 | 615 | def install(self): | 603 | def install(self): |
3184 | 616 | # mdadm should already be installed, but let's check | 604 | # mdadm should already be installed, but let's check |
3186 | 617 | fetch.apt_install(['mdadm'], fatal=True) | 605 | fetch.apt_install(["mdadm"], fatal=True) |
3187 | 618 | self._install_cronjob() | 606 | self._install_cronjob() |
3188 | 619 | # No vendor binary to install | 607 | # No vendor binary to install |
3189 | 620 | Tool.install(self) | 608 | Tool.install(self) |
3190 | @@ -634,12 +622,13 @@ class Ipmi(Tool): | |||
3191 | 634 | install; the plugin relies only on freeipmi, a few perl modules, and the | 622 | install; the plugin relies only on freeipmi, a few perl modules, and the |
3192 | 635 | actual nrpe check, which is imported as a git submodule | 623 | actual nrpe check, which is imported as a git submodule |
3193 | 636 | """ | 624 | """ |
3195 | 637 | def __init__(self, nrpe_opts=''): | 625 | |
3196 | 626 | def __init__(self, nrpe_opts=""): | ||
3197 | 638 | super().__init__( | 627 | super().__init__( |
3200 | 639 | cron_script='cron_ipmi_sensors.py', | 628 | cron_script="cron_ipmi_sensors.py", |
3201 | 640 | cron_script_args=hookenv.config('ipmi_check_options') | 629 | cron_script_args=hookenv.config("ipmi_check_options"), |
3202 | 641 | ) | 630 | ) |
3204 | 642 | self._sudoer_file = '99-check_ipmi_sensor' | 631 | self._sudoer_file = "99-check_ipmi_sensor" |
3205 | 643 | 632 | ||
3206 | 644 | def configure_nrpe_check(self, nrpe_setup): | 633 | def configure_nrpe_check(self, nrpe_setup): |
3207 | 645 | # extra options for check_ipmi_sensors Perl script are configured in | 634 | # extra options for check_ipmi_sensors Perl script are configured in |
3208 | @@ -652,7 +641,7 @@ class Ipmi(Tool): | |||
3209 | 652 | self._install_sudoer() | 641 | self._install_sudoer() |
3210 | 653 | # Install Perl script called by the (Python) cronjob | 642 | # Install Perl script called by the (Python) cronjob |
3211 | 654 | self._install_nrpe_helper_plugin() | 643 | self._install_nrpe_helper_plugin() |
3213 | 655 | self._install_cronjob(cron_user='nagios') | 644 | self._install_cronjob(cron_user="nagios") |
3214 | 656 | 645 | ||
3215 | 657 | # Install the Python script called by check_nrpe | 646 | # Install the Python script called by check_nrpe |
3216 | 658 | super().install() | 647 | super().install() |
3217 | @@ -665,13 +654,13 @@ class Ipmi(Tool): | |||
3218 | 665 | 654 | ||
3219 | 666 | def _install_nrpe_helper_plugin(self): | 655 | def _install_nrpe_helper_plugin(self): |
3220 | 667 | original_nrpe_script = self._nrpe_script | 656 | original_nrpe_script = self._nrpe_script |
3222 | 668 | self._nrpe_script = 'check_ipmi_sensor' | 657 | self._nrpe_script = "check_ipmi_sensor" |
3223 | 669 | super()._install_nrpe_plugin() | 658 | super()._install_nrpe_plugin() |
3224 | 670 | self._nrpe_script = original_nrpe_script | 659 | self._nrpe_script = original_nrpe_script |
3225 | 671 | 660 | ||
3226 | 672 | def _remove_nrpe_helper_plugin(self): | 661 | def _remove_nrpe_helper_plugin(self): |
3227 | 673 | original_nrpe_script = self._nrpe_script | 662 | original_nrpe_script = self._nrpe_script |
3229 | 674 | self._nrpe_script = 'check_ipmi_sensor' | 663 | self._nrpe_script = "check_ipmi_sensor" |
3230 | 675 | super()._remove_nrpe_plugin() | 664 | super()._remove_nrpe_plugin() |
3231 | 676 | self._nrpe_script = original_nrpe_script | 665 | self._nrpe_script = original_nrpe_script |
3232 | 677 | 666 | ||
3233 | @@ -683,9 +672,8 @@ class Ipmi(Tool): | |||
3234 | 683 | os.chmod(dst, self.SUDOERS_MODE) | 672 | os.chmod(dst, self.SUDOERS_MODE) |
3235 | 684 | os.chown(dst, uid=self.SUDOERS_UID, gid=self.SUDOERS_GID) | 673 | os.chown(dst, uid=self.SUDOERS_UID, gid=self.SUDOERS_GID) |
3236 | 685 | hookenv.log( | 674 | hookenv.log( |
3240 | 686 | 'sudoer file for tool [{}] installed at {}' | 675 | "sudoer file for tool [{}] installed at {}".format(self._shortname, dst), |
3241 | 687 | ''.format(self._shortname, dst), | 676 | hookenv.DEBUG, |
3239 | 688 | hookenv.DEBUG | ||
3242 | 689 | ) | 677 | ) |
3243 | 690 | return dst | 678 | return dst |
3244 | 691 | 679 | ||
3245 | @@ -696,15 +684,16 @@ class Nvme(Tool): | |||
3246 | 696 | This is a direct subclass of Tool because unlike a VendorTool we are not | 684 | This is a direct subclass of Tool because unlike a VendorTool we are not |
3247 | 697 | using a cronjob script | 685 | using a cronjob script |
3248 | 698 | """ | 686 | """ |
3249 | 687 | |||
3250 | 699 | def __init__(self): | 688 | def __init__(self): |
3251 | 700 | super().__init__() | 689 | super().__init__() |
3254 | 701 | self._sudoer_template = '99-check_nvme.tmpl' | 690 | self._sudoer_template = "99-check_nvme.tmpl" |
3255 | 702 | self._sudoer_file = '99-check_nvme' | 691 | self._sudoer_file = "99-check_nvme" |
3256 | 703 | self._cron_script = None | 692 | self._cron_script = None |
3257 | 704 | 693 | ||
3258 | 705 | def install(self): | 694 | def install(self): |
3259 | 706 | # mdadm should already be installed, but let's check | 695 | # mdadm should already be installed, but let's check |
3261 | 707 | fetch.apt_install(['nvme-cli'], fatal=True) | 696 | fetch.apt_install(["nvme-cli"], fatal=True) |
3262 | 708 | self._render_sudoer() | 697 | self._render_sudoer() |
3263 | 709 | super().install() | 698 | super().install() |
3264 | 710 | 699 | ||
3265 | @@ -722,24 +711,29 @@ class Nvme(Tool): | |||
3266 | 722 | if not devices: | 711 | if not devices: |
3267 | 723 | return | 712 | return |
3268 | 724 | 713 | ||
3276 | 725 | devices = dict([('CHECK{}'.format(dev.replace('/', '_').upper()), dev) | 714 | devices = dict( |
3277 | 726 | for dev in devices]) | 715 | [("CHECK{}".format(dev.replace("/", "_").upper()), dev) for dev in devices] |
3278 | 727 | ctxt = {'devices': devices} | 716 | ) |
3279 | 728 | ctxt['devices_cmnd_aliases'] = ', '.join(devices.keys()) | 717 | ctxt = {"devices": devices} |
3280 | 729 | 718 | ctxt["devices_cmnd_aliases"] = ", ".join(devices.keys()) | |
3281 | 730 | render(source=src, target=dst, context=ctxt, perms=self.SUDOERS_MODE, | 719 | |
3282 | 731 | templates_dir=None) | 720 | render( |
3283 | 721 | source=src, | ||
3284 | 722 | target=dst, | ||
3285 | 723 | context=ctxt, | ||
3286 | 724 | perms=self.SUDOERS_MODE, | ||
3287 | 725 | templates_dir=None, | ||
3288 | 726 | ) | ||
3289 | 732 | hookenv.log( | 727 | hookenv.log( |
3293 | 733 | 'sudoer file for tool [{}] installed at {}'.format(self._shortname, | 728 | "sudoer file for tool [{}] installed at {}".format(self._shortname, dst), |
3294 | 734 | dst), | 729 | hookenv.DEBUG, |
3292 | 735 | hookenv.DEBUG | ||
3295 | 736 | ) | 730 | ) |
3296 | 737 | return dst | 731 | return dst |
3297 | 738 | 732 | ||
3298 | 739 | def __get_nvme_devices(self): | 733 | def __get_nvme_devices(self): |
3299 | 740 | devices = [] | 734 | devices = [] |
3302 | 741 | for device in glob.glob('/dev/nvme*'): | 735 | for device in glob.glob("/dev/nvme*"): |
3303 | 742 | nvme_re = re.match(r'^/dev/nvme\d+$', device) | 736 | nvme_re = re.match(r"^/dev/nvme\d+$", device) |
3304 | 743 | if not nvme_re: | 737 | if not nvme_re: |
3305 | 744 | continue | 738 | continue |
3306 | 745 | devices.append(nvme_re.group()) | 739 | devices.append(nvme_re.group()) |
3307 | diff --git a/src/reactive/hw_health.py b/src/reactive/hw_health.py | |||
3308 | index a04f790..ff72e85 100644 | |||
3309 | --- a/src/reactive/hw_health.py | |||
3310 | +++ b/src/reactive/hw_health.py | |||
3311 | @@ -12,167 +12,176 @@ from hwhealth import tools | |||
3312 | 12 | 12 | ||
3313 | 13 | def _set_install_status(tool): | 13 | def _set_install_status(tool): |
3314 | 14 | if isinstance(tool, tools.VendorTool) and not isinstance(tool, tools.Mdadm): | 14 | if isinstance(tool, tools.VendorTool) and not isinstance(tool, tools.Mdadm): |
3316 | 15 | status.maintenance('Installing from attached resource') | 15 | status.maintenance("Installing from attached resource") |
3317 | 16 | elif isinstance(tool, tools.AptVendorTool): | 16 | elif isinstance(tool, tools.AptVendorTool): |
3320 | 17 | status.maintenance('Installing vendor tools via apt') | 17 | status.maintenance("Installing vendor tools via apt") |
3321 | 18 | set_flag('hw-health.wait-for-vendor-apt') | 18 | set_flag("hw-health.wait-for-vendor-apt") |
3322 | 19 | 19 | ||
3323 | 20 | 20 | ||
3327 | 21 | @when_none('hw-health.installed', 'hw-health.unsupported') | 21 | @when_none("hw-health.installed", "hw-health.unsupported") |
3328 | 22 | @when('nrpe-external-master.available') | 22 | @when("nrpe-external-master.available") |
3329 | 23 | @when('general-info.connected') | 23 | @when("general-info.connected") |
3330 | 24 | def install(): | 24 | def install(): |
3335 | 25 | manufacturer = hookenv.config('manufacturer') | 25 | manufacturer = hookenv.config("manufacturer") |
3336 | 26 | if host.is_container() and manufacturer != 'test': | 26 | if host.is_container() and manufacturer != "test": |
3337 | 27 | status.blocked('Containers are not supported') | 27 | status.blocked("Containers are not supported") |
3338 | 28 | set_flag('hw-health.unsupported') | 28 | set_flag("hw-health.unsupported") |
3339 | 29 | return | 29 | return |
3340 | 30 | 30 | ||
3343 | 31 | if manufacturer not in ['auto', 'test']: | 31 | if manufacturer not in ["auto", "test"]: |
3344 | 32 | status.blocked('manufacturer needs to be set to auto') | 32 | status.blocked("manufacturer needs to be set to auto") |
3345 | 33 | return | 33 | return |
3346 | 34 | 34 | ||
3347 | 35 | # Detect hardware and return a list of tools we need to use | 35 | # Detect hardware and return a list of tools we need to use |
3349 | 36 | status.maintenance('Autodiscovering hardware') | 36 | status.maintenance("Autodiscovering hardware") |
3350 | 37 | toolset = get_tools(manufacturer) | 37 | toolset = get_tools(manufacturer) |
3351 | 38 | if not toolset: | 38 | if not toolset: |
3354 | 39 | status.blocked('Hardware not supported') | 39 | status.blocked("Hardware not supported") |
3355 | 40 | set_flag('hw-health.unsupported') | 40 | set_flag("hw-health.unsupported") |
3356 | 41 | else: | 41 | else: |
3357 | 42 | try: | 42 | try: |
3358 | 43 | tool_list = list() | 43 | tool_list = list() |
3359 | 44 | for toolClass in toolset: | 44 | for toolClass in toolset: |
3360 | 45 | tool = toolClass() | 45 | tool = toolClass() |
3361 | 46 | _set_install_status(tool) | 46 | _set_install_status(tool) |
3363 | 47 | status.maintenance('Installing tool {}'.format(type(tool).__name__)) | 47 | status.maintenance("Installing tool {}".format(type(tool).__name__)) |
3364 | 48 | tool.install() | 48 | tool.install() |
3365 | 49 | # Save the class name in the unit kv db. This will be reused when | 49 | # Save the class name in the unit kv db. This will be reused when |
3366 | 50 | # reconfiguring or removing the checks | 50 | # reconfiguring or removing the checks |
3367 | 51 | tool_list.append(type(tool).__name__) | 51 | tool_list.append(type(tool).__name__) |
3368 | 52 | unitdb = unitdata.kv() | 52 | unitdb = unitdata.kv() |
3371 | 53 | unitdb.set('toolset', tool_list) | 53 | unitdb.set("toolset", tool_list) |
3372 | 54 | set_flag('hw-health.installed') | 54 | set_flag("hw-health.installed") |
3373 | 55 | except tools.JujuResourceNotFound as error: | 55 | except tools.JujuResourceNotFound as error: |
3378 | 56 | hookenv.log('Missing Juju resource: {} - alternative method is not ' | 56 | hookenv.log( |
3379 | 57 | ' available yet'.format(error), hookenv.ERROR) | 57 | "Missing Juju resource: {} - alternative method is not " |
3380 | 58 | status.blocked('Missing Juju resource: {}'.format(error)) | 58 | " available yet".format(error), |
3381 | 59 | set_flag('hw-health.unsupported') | 59 | hookenv.ERROR, |
3382 | 60 | ) | ||
3383 | 61 | status.blocked("Missing Juju resource: {}".format(error)) | ||
3384 | 62 | set_flag("hw-health.unsupported") | ||
3385 | 60 | except tools.ToolChecksumError as error: | 63 | except tools.ToolChecksumError as error: |
3386 | 61 | msg = error.message | 64 | msg = error.message |
3391 | 62 | hookenv.log('checksum error: tool [{shortname}], checksum[{checksum}],' | 65 | hookenv.log( |
3392 | 63 | ' expected[{expected_checksums}]'.format(**msg), hookenv.ERROR) | 66 | "checksum error: tool [{shortname}], checksum[{checksum}]," |
3393 | 64 | status.blocked('Tool {shortname} - checksum error'.format(**msg)) | 67 | " expected[{expected_checksums}]".format(**msg), |
3394 | 65 | set_flag('hw-health.unsupported') | 68 | hookenv.ERROR, |
3395 | 69 | ) | ||
3396 | 70 | status.blocked("Tool {shortname} - checksum error".format(**msg)) | ||
3397 | 71 | set_flag("hw-health.unsupported") | ||
3398 | 66 | except tools.ToolNotFound as error: | 72 | except tools.ToolNotFound as error: |
3399 | 67 | msg = error.message | 73 | msg = error.message |
3404 | 68 | hookenv.log('Tool {shortname} not found in the provided resource: ' | 74 | hookenv.log( |
3405 | 69 | '{error}'.format(**msg), hookenv.ERROR) | 75 | "Tool {shortname} not found in the provided resource: " |
3406 | 70 | status.blocked('Tool {shortname} not found'.format(**msg)) | 76 | "{error}".format(**msg), |
3407 | 71 | set_flag('hw-health.unsupported') | 77 | hookenv.ERROR, |
3408 | 78 | ) | ||
3409 | 79 | status.blocked("Tool {shortname} not found".format(**msg)) | ||
3410 | 80 | set_flag("hw-health.unsupported") | ||
3411 | 72 | 81 | ||
3412 | 73 | 82 | ||
3414 | 74 | @hook('upgrade-charm') | 83 | @hook("upgrade-charm") |
3415 | 75 | def upgrade(): | 84 | def upgrade(): |
3420 | 76 | clear_flag('hw-health.installed') | 85 | clear_flag("hw-health.installed") |
3421 | 77 | clear_flag('hw-health.unsupported') | 86 | clear_flag("hw-health.unsupported") |
3422 | 78 | clear_flag('hw-health.configured') | 87 | clear_flag("hw-health.configured") |
3423 | 79 | status.maintenance('Charm upgrade in progress') | 88 | status.maintenance("Charm upgrade in progress") |
3424 | 80 | 89 | ||
3425 | 81 | 90 | ||
3428 | 82 | @when('hw-health.installed') | 91 | @when("hw-health.installed") |
3429 | 83 | @when_not('general-info.available') | 92 | @when_not("general-info.available") |
3430 | 84 | def remove_tools(): | 93 | def remove_tools(): |
3431 | 85 | # If general-info is unavailable, the subordinate relationship towards the | 94 | # If general-info is unavailable, the subordinate relationship towards the |
3432 | 86 | # principal charm has been broken, so we need to remove the installed tools | 95 | # principal charm has been broken, so we need to remove the installed tools |
3433 | 87 | unitdb = unitdata.kv() | 96 | unitdb = unitdata.kv() |
3435 | 88 | for tool_class_name in unitdb.get('toolset', set()): | 97 | for tool_class_name in unitdb.get("toolset", set()): |
3436 | 89 | # Re-instantiate the tool from the saved class name | 98 | # Re-instantiate the tool from the saved class name |
3437 | 90 | tool_class = getattr(tools, tool_class_name) | 99 | tool_class = getattr(tools, tool_class_name) |
3438 | 91 | tool_class().remove() | 100 | tool_class().remove() |
3442 | 92 | clear_flag('hw-health.installed') | 101 | clear_flag("hw-health.installed") |
3443 | 93 | clear_flag('hw-health.unsupported') | 102 | clear_flag("hw-health.unsupported") |
3444 | 94 | clear_flag('hw-health.configured') | 103 | clear_flag("hw-health.configured") |
3445 | 95 | 104 | ||
3446 | 96 | 105 | ||
3448 | 97 | @when('hw-health.wait-for-vendor-apt') | 106 | @when("hw-health.wait-for-vendor-apt") |
3449 | 98 | def wait_for_vendor_apt(): | 107 | def wait_for_vendor_apt(): |
3450 | 99 | # cycle through any vendor tools that are of type AptVendorTool and | 108 | # cycle through any vendor tools that are of type AptVendorTool and |
3451 | 100 | # check if all packages needed are installed. If not, eject and wait | 109 | # check if all packages needed are installed. If not, eject and wait |
3452 | 101 | unitdb = unitdata.kv() | 110 | unitdb = unitdata.kv() |
3454 | 102 | for tool_class_name in unitdb.get('toolset', set()): | 111 | for tool_class_name in unitdb.get("toolset", set()): |
3455 | 103 | # Re-instantiate the tool from the saved class name | 112 | # Re-instantiate the tool from the saved class name |
3456 | 104 | tool_class = getattr(tools, tool_class_name) | 113 | tool_class = getattr(tools, tool_class_name) |
3457 | 105 | if isinstance(tool_class, tools.AptVendorTool): | 114 | if isinstance(tool_class, tools.AptVendorTool): |
3458 | 106 | if tool_class.is_apt_installed(): | 115 | if tool_class.is_apt_installed(): |
3459 | 107 | tool_class.install_cronjob() | 116 | tool_class.install_cronjob() |
3460 | 108 | else: | 117 | else: |
3462 | 109 | status.maintenance('Waiting for vendor tools to install via apt') | 118 | status.maintenance("Waiting for vendor tools to install via apt") |
3463 | 110 | return | 119 | return |
3466 | 111 | clear_flag('hw-health.wait-for-vendor-apt') | 120 | clear_flag("hw-health.wait-for-vendor-apt") |
3467 | 112 | clear_flag('hw-health.configured') | 121 | clear_flag("hw-health.configured") |
3468 | 113 | 122 | ||
3469 | 114 | 123 | ||
3472 | 115 | @when('config.changed') | 124 | @when("config.changed") |
3473 | 116 | @when_not('config.changed.manufacturer') | 125 | @when_not("config.changed.manufacturer") |
3474 | 117 | def config_changed(): | 126 | def config_changed(): |
3476 | 118 | clear_flag('hw-health.configured') | 127 | clear_flag("hw-health.configured") |
3477 | 119 | 128 | ||
3478 | 120 | 129 | ||
3480 | 121 | @when('config.changed.manufacturer') | 130 | @when("config.changed.manufacturer") |
3481 | 122 | def toolset_changed(): | 131 | def toolset_changed(): |
3483 | 123 | if not is_flag_set('hw-health.installed'): | 132 | if not is_flag_set("hw-health.installed"): |
3484 | 124 | # Note(aluria): useful for testing purposes | 133 | # Note(aluria): useful for testing purposes |
3486 | 125 | clear_flag('hw-health.unsupported') | 134 | clear_flag("hw-health.unsupported") |
3487 | 126 | return | 135 | return |
3488 | 127 | 136 | ||
3489 | 128 | # Changing the manufacturer option will trigger a reinstallation of the | 137 | # Changing the manufacturer option will trigger a reinstallation of the |
3490 | 129 | # tools | 138 | # tools |
3491 | 130 | remove_tools() | 139 | remove_tools() |
3493 | 131 | status.maintenance('Reinstallation of tools in progress') | 140 | status.maintenance("Reinstallation of tools in progress") |
3494 | 132 | 141 | ||
3495 | 133 | 142 | ||
3499 | 134 | @when('hw-health.installed') | 143 | @when("hw-health.installed") |
3500 | 135 | @when_not('nrpe-external-master.available') | 144 | @when_not("nrpe-external-master.available") |
3501 | 136 | @when_not('hw-health.configured') | 145 | @when_not("hw-health.configured") |
3502 | 137 | def blocked_on_nrpe(): | 146 | def blocked_on_nrpe(): |
3504 | 138 | status.blocked('Missing relations: nrpe-external-master') | 147 | status.blocked("Missing relations: nrpe-external-master") |
3505 | 139 | 148 | ||
3506 | 140 | 149 | ||
3510 | 141 | @when('hw-health.installed') | 150 | @when("hw-health.installed") |
3511 | 142 | @when('nrpe-external-master.available') | 151 | @when("nrpe-external-master.available") |
3512 | 143 | @when_not('hw-health.configured') | 152 | @when_not("hw-health.configured") |
3513 | 144 | def configure_nrpe(): | 153 | def configure_nrpe(): |
3516 | 145 | if not os.path.exists('/var/lib/nagios'): | 154 | if not os.path.exists("/var/lib/nagios"): |
3517 | 146 | status.waiting('Waiting for nrpe package installation') | 155 | status.waiting("Waiting for nrpe package installation") |
3518 | 147 | return | 156 | return |
3519 | 148 | 157 | ||
3521 | 149 | status.maintenance('Configuring nrpe checks') | 158 | status.maintenance("Configuring nrpe checks") |
3522 | 150 | 159 | ||
3523 | 151 | nrpe_setup = nrpe.NRPE(primary=False) | 160 | nrpe_setup = nrpe.NRPE(primary=False) |
3524 | 152 | unitdb = unitdata.kv() | 161 | unitdb = unitdata.kv() |
3526 | 153 | for tool_class_name in unitdb.get('toolset', set()): | 162 | for tool_class_name in unitdb.get("toolset", set()): |
3527 | 154 | # Re-instantiate the tool from the saved class name | 163 | # Re-instantiate the tool from the saved class name |
3528 | 155 | tool_class = getattr(tools, tool_class_name) | 164 | tool_class = getattr(tools, tool_class_name) |
3529 | 156 | tool_class().configure_nrpe_check(nrpe_setup) | 165 | tool_class().configure_nrpe_check(nrpe_setup) |
3530 | 157 | 166 | ||
3532 | 158 | if unitdb.get('toolset'): | 167 | if unitdb.get("toolset"): |
3533 | 159 | # Note(aluria): This needs to be run outside of | 168 | # Note(aluria): This needs to be run outside of |
3534 | 160 | # tool_class().configure_nrpe_check or shared dictionary with the | 169 | # tool_class().configure_nrpe_check or shared dictionary with the |
3535 | 161 | # nagios unit will list the last added check (LP#1821602) | 170 | # nagios unit will list the last added check (LP#1821602) |
3536 | 162 | nrpe_setup.write() | 171 | nrpe_setup.write() |
3537 | 163 | 172 | ||
3540 | 164 | status.active('ready') | 173 | status.active("ready") |
3541 | 165 | set_flag('hw-health.configured') | 174 | set_flag("hw-health.configured") |
3542 | 166 | 175 | ||
3543 | 167 | 176 | ||
3547 | 168 | @when('hw-health.installed') | 177 | @when("hw-health.installed") |
3548 | 169 | @when_not('nrpe-external-master.available') | 178 | @when_not("nrpe-external-master.available") |
3549 | 170 | @when('hw-health.configured') | 179 | @when("hw-health.configured") |
3550 | 171 | def remove_nrpe_checks(): | 180 | def remove_nrpe_checks(): |
3552 | 172 | status.maintenance('Removing nrpe checks') | 181 | status.maintenance("Removing nrpe checks") |
3553 | 173 | unitdb = unitdata.kv() | 182 | unitdb = unitdata.kv() |
3555 | 174 | for tool_class_name in unitdb.get('toolset', set()): | 183 | for tool_class_name in unitdb.get("toolset", set()): |
3556 | 175 | # Re-instantiate the tool from the saved class name | 184 | # Re-instantiate the tool from the saved class name |
3557 | 176 | tool_class = getattr(tools, tool_class_name) | 185 | tool_class = getattr(tools, tool_class_name) |
3558 | 177 | tool_class().remove_nrpe_check() | 186 | tool_class().remove_nrpe_check() |
3560 | 178 | clear_flag('hw-health.configured') | 187 | clear_flag("hw-health.configured") |
3561 | diff --git a/src/tests/download_nagios_plugin3.py b/src/tests/download_nagios_plugin3.py | |||
3562 | index b454caf..ded2851 100755 | |||
3563 | --- a/src/tests/download_nagios_plugin3.py | |||
3564 | +++ b/src/tests/download_nagios_plugin3.py | |||
3565 | @@ -3,9 +3,10 @@ from glob import glob | |||
3566 | 3 | import os.path | 3 | import os.path |
3567 | 4 | import urllib.request | 4 | import urllib.request |
3568 | 5 | 5 | ||
3572 | 6 | MODULE_NAME = 'nagios_plugin3.py' | 6 | MODULE_NAME = "nagios_plugin3.py" |
3573 | 7 | MODULE_URL = os.path.join('https://git.launchpad.net/nrpe-charm/plain/files', | 7 | MODULE_URL = os.path.join( |
3574 | 8 | MODULE_NAME) | 8 | "https://git.launchpad.net/nrpe-charm/plain/files", MODULE_NAME |
3575 | 9 | ) | ||
3576 | 9 | _cache = None | 10 | _cache = None |
3577 | 10 | 11 | ||
3578 | 11 | 12 | ||
3579 | @@ -18,11 +19,11 @@ def content(): | |||
3580 | 18 | 19 | ||
3581 | 19 | 20 | ||
3582 | 20 | def main(): | 21 | def main(): |
3584 | 21 | for i in glob('.tox/unit/lib/python3*/site-packages'): | 22 | for i in glob(".tox/unit/lib/python3*/site-packages"): |
3585 | 22 | mod_path = os.path.join(i, MODULE_NAME) | 23 | mod_path = os.path.join(i, MODULE_NAME) |
3586 | 23 | if os.path.isdir(i) and not os.path.exists(mod_path): | 24 | if os.path.isdir(i) and not os.path.exists(mod_path): |
3588 | 24 | open(mod_path, 'wb').write(content()) | 25 | open(mod_path, "wb").write(content()) |
3589 | 25 | 26 | ||
3590 | 26 | 27 | ||
3592 | 27 | if __name__ == '__main__': | 28 | if __name__ == "__main__": |
3593 | 28 | main() | 29 | main() |
3594 | diff --git a/src/tests/functional/conftest.py b/src/tests/functional/conftest.py | |||
3595 | index 9a65e83..5734800 100644 | |||
3596 | --- a/src/tests/functional/conftest.py | |||
3597 | +++ b/src/tests/functional/conftest.py | |||
3598 | @@ -1,5 +1,5 @@ | |||
3599 | 1 | #!/usr/bin/python3 | 1 | #!/usr/bin/python3 |
3601 | 2 | ''' | 2 | """ |
3602 | 3 | Reusable pytest fixtures for functional testing | 3 | Reusable pytest fixtures for functional testing |
3603 | 4 | 4 | ||
3604 | 5 | Environment variables | 5 | Environment variables |
3605 | @@ -7,7 +7,7 @@ Environment variables | |||
3606 | 7 | 7 | ||
3607 | 8 | test_preserve_model: | 8 | test_preserve_model: |
3608 | 9 | if set, the testing model won't be torn down at the end of the testing session | 9 | if set, the testing model won't be torn down at the end of the testing session |
3610 | 10 | ''' | 10 | """ |
3611 | 11 | 11 | ||
3612 | 12 | import asyncio | 12 | import asyncio |
3613 | 13 | import json | 13 | import json |
3614 | @@ -18,7 +18,7 @@ import juju | |||
3615 | 18 | from juju.controller import Controller | 18 | from juju.controller import Controller |
3616 | 19 | from juju.errors import JujuError | 19 | from juju.errors import JujuError |
3617 | 20 | 20 | ||
3619 | 21 | STAT_CMD = '''python3 - <<EOF | 21 | STAT_CMD = """python3 - <<EOF |
3620 | 22 | import json | 22 | import json |
3621 | 23 | import os | 23 | import os |
3622 | 24 | 24 | ||
3623 | @@ -33,13 +33,13 @@ stat_json = json.dumps(stat_hash) | |||
3624 | 33 | print(stat_json) | 33 | print(stat_json) |
3625 | 34 | 34 | ||
3626 | 35 | EOF | 35 | EOF |
3628 | 36 | ''' | 36 | """ |
3629 | 37 | 37 | ||
3630 | 38 | 38 | ||
3632 | 39 | @pytest.yield_fixture(scope='module') | 39 | @pytest.yield_fixture(scope="module") |
3633 | 40 | def event_loop(): | 40 | def event_loop(): |
3636 | 41 | '''Override the default pytest event loop to allow for fixtures using a | 41 | """Override the default pytest event loop to allow for fixtures using a |
3637 | 42 | broader scope''' | 42 | broader scope""" |
3638 | 43 | loop = asyncio.get_event_loop_policy().new_event_loop() | 43 | loop = asyncio.get_event_loop_policy().new_event_loop() |
3639 | 44 | asyncio.set_event_loop(loop) | 44 | asyncio.set_event_loop(loop) |
3640 | 45 | loop.set_debug(True) | 45 | loop.set_debug(True) |
3641 | @@ -48,23 +48,23 @@ def event_loop(): | |||
3642 | 48 | asyncio.set_event_loop(None) | 48 | asyncio.set_event_loop(None) |
3643 | 49 | 49 | ||
3644 | 50 | 50 | ||
3646 | 51 | @pytest.fixture(scope='module') | 51 | @pytest.fixture(scope="module") |
3647 | 52 | async def controller(): | 52 | async def controller(): |
3649 | 53 | '''Connect to the current controller''' | 53 | """Connect to the current controller""" |
3650 | 54 | _controller = Controller() | 54 | _controller = Controller() |
3651 | 55 | await _controller.connect_current() | 55 | await _controller.connect_current() |
3652 | 56 | yield _controller | 56 | yield _controller |
3653 | 57 | await _controller.disconnect() | 57 | await _controller.disconnect() |
3654 | 58 | 58 | ||
3655 | 59 | 59 | ||
3657 | 60 | @pytest.fixture(scope='module') | 60 | @pytest.fixture(scope="module") |
3658 | 61 | async def model(controller): # pylint: disable=redefined-outer-name | 61 | async def model(controller): # pylint: disable=redefined-outer-name |
3660 | 62 | '''This model lives only for the duration of the test''' | 62 | """This model lives only for the duration of the test""" |
3661 | 63 | model_name = "functest-{}".format(uuid.uuid4()) | 63 | model_name = "functest-{}".format(uuid.uuid4()) |
3662 | 64 | _model = await controller.add_model(model_name) | 64 | _model = await controller.add_model(model_name) |
3663 | 65 | yield _model | 65 | yield _model |
3664 | 66 | await _model.disconnect() | 66 | await _model.disconnect() |
3666 | 67 | if not os.getenv('test_preserve_model'): | 67 | if not os.getenv("test_preserve_model"): |
3667 | 68 | await controller.destroy_model(model_name) | 68 | await controller.destroy_model(model_name) |
3668 | 69 | while model_name in await controller.list_models(): | 69 | while model_name in await controller.list_models(): |
3669 | 70 | await asyncio.sleep(1) | 70 | await asyncio.sleep(1) |
3670 | @@ -72,30 +72,35 @@ async def model(controller): # pylint: disable=redefined-outer-name | |||
3671 | 72 | 72 | ||
3672 | 73 | @pytest.fixture() | 73 | @pytest.fixture() |
3673 | 74 | async def get_app(model): # pylint: disable=redefined-outer-name | 74 | async def get_app(model): # pylint: disable=redefined-outer-name |
3675 | 75 | '''Returns the application requested''' | 75 | """Returns the application requested""" |
3676 | 76 | |||
3677 | 76 | async def _get_app(name): | 77 | async def _get_app(name): |
3678 | 77 | try: | 78 | try: |
3679 | 78 | return model.applications[name] | 79 | return model.applications[name] |
3680 | 79 | except KeyError: | 80 | except KeyError: |
3681 | 80 | raise JujuError("Cannot find application {}".format(name)) | 81 | raise JujuError("Cannot find application {}".format(name)) |
3682 | 82 | |||
3683 | 81 | return _get_app | 83 | return _get_app |
3684 | 82 | 84 | ||
3685 | 83 | 85 | ||
3686 | 84 | @pytest.fixture() | 86 | @pytest.fixture() |
3687 | 85 | async def get_unit(model): # pylint: disable=redefined-outer-name | 87 | async def get_unit(model): # pylint: disable=redefined-outer-name |
3689 | 86 | '''Returns the requested <app_name>/<unit_number> unit''' | 88 | """Returns the requested <app_name>/<unit_number> unit""" |
3690 | 89 | |||
3691 | 87 | async def _get_unit(name): | 90 | async def _get_unit(name): |
3692 | 88 | try: | 91 | try: |
3694 | 89 | (app_name, unit_number) = name.split('/') | 92 | (app_name, unit_number) = name.split("/") |
3695 | 90 | return model.applications[app_name].units[unit_number] | 93 | return model.applications[app_name].units[unit_number] |
3696 | 91 | except (KeyError, ValueError): | 94 | except (KeyError, ValueError): |
3697 | 92 | raise JujuError("Cannot find unit {}".format(name)) | 95 | raise JujuError("Cannot find unit {}".format(name)) |
3698 | 96 | |||
3699 | 93 | return _get_unit | 97 | return _get_unit |
3700 | 94 | 98 | ||
3701 | 95 | 99 | ||
3702 | 96 | @pytest.fixture() | 100 | @pytest.fixture() |
3703 | 97 | async def get_entity(get_unit, get_app): # pylint: disable=redefined-outer-name | 101 | async def get_entity(get_unit, get_app): # pylint: disable=redefined-outer-name |
3705 | 98 | '''Returns a unit or an application''' | 102 | """Returns a unit or an application""" |
3706 | 103 | |||
3707 | 99 | async def _get_entity(name): | 104 | async def _get_entity(name): |
3708 | 100 | try: | 105 | try: |
3709 | 101 | return await get_unit(name) | 106 | return await get_unit(name) |
3710 | @@ -104,69 +109,72 @@ async def get_entity(get_unit, get_app): # pylint: disable=redefined-outer-name | |||
3711 | 104 | return await get_app(name) | 109 | return await get_app(name) |
3712 | 105 | except JujuError: | 110 | except JujuError: |
3713 | 106 | raise JujuError("Cannot find entity {}".format(name)) | 111 | raise JujuError("Cannot find entity {}".format(name)) |
3714 | 112 | |||
3715 | 107 | return _get_entity | 113 | return _get_entity |
3716 | 108 | 114 | ||
3717 | 109 | 115 | ||
3718 | 110 | @pytest.fixture | 116 | @pytest.fixture |
3719 | 111 | async def run_command(get_unit): # pylint: disable=redefined-outer-name | 117 | async def run_command(get_unit): # pylint: disable=redefined-outer-name |
3721 | 112 | ''' | 118 | """ |
3722 | 113 | Runs a command on a unit. | 119 | Runs a command on a unit. |
3723 | 114 | 120 | ||
3724 | 115 | :param cmd: Command to be run | 121 | :param cmd: Command to be run |
3725 | 116 | :param target: Unit object or unit name string | 122 | :param target: Unit object or unit name string |
3727 | 117 | ''' | 123 | """ |
3728 | 124 | |||
3729 | 118 | async def _run_command(cmd, target): | 125 | async def _run_command(cmd, target): |
3735 | 119 | unit = ( | 126 | unit = target if isinstance(target, juju.unit.Unit) else await get_unit(target) |
3731 | 120 | target | ||
3732 | 121 | if isinstance(target, juju.unit.Unit) | ||
3733 | 122 | else await get_unit(target) | ||
3734 | 123 | ) | ||
3736 | 124 | action = await unit.run(cmd) | 127 | action = await unit.run(cmd) |
3737 | 125 | return action.results | 128 | return action.results |
3738 | 129 | |||
3739 | 126 | return _run_command | 130 | return _run_command |
3740 | 127 | 131 | ||
3741 | 128 | 132 | ||
3742 | 129 | @pytest.fixture | 133 | @pytest.fixture |
3743 | 130 | async def file_stat(run_command): # pylint: disable=redefined-outer-name | 134 | async def file_stat(run_command): # pylint: disable=redefined-outer-name |
3745 | 131 | ''' | 135 | """ |
3746 | 132 | Runs stat on a file | 136 | Runs stat on a file |
3747 | 133 | 137 | ||
3748 | 134 | :param path: File path | 138 | :param path: File path |
3749 | 135 | :param target: Unit object or unit name string | 139 | :param target: Unit object or unit name string |
3751 | 136 | ''' | 140 | """ |
3752 | 141 | |||
3753 | 137 | async def _file_stat(path, target): | 142 | async def _file_stat(path, target): |
3754 | 138 | cmd = STAT_CMD % path | 143 | cmd = STAT_CMD % path |
3755 | 139 | results = await run_command(cmd, target) | 144 | results = await run_command(cmd, target) |
3757 | 140 | if results['Code'] != '0': | 145 | if results["Code"] != "0": |
3758 | 141 | # A common possible error is simply ENOENT, the file ain't there. | 146 | # A common possible error is simply ENOENT, the file ain't there. |
3759 | 142 | # A better solution would be to retrieve the exception that the | 147 | # A better solution would be to retrieve the exception that the |
3760 | 143 | # remote python code raised, but that would probably require a real | 148 | # remote python code raised, but that would probably require a real |
3761 | 144 | # RPC setup | 149 | # RPC setup |
3763 | 145 | raise RuntimeError('Stat failed: {}'.format(results)) | 150 | raise RuntimeError("Stat failed: {}".format(results)) |
3764 | 146 | else: | 151 | else: |
3766 | 147 | return json.loads(results['Stdout']) | 152 | return json.loads(results["Stdout"]) |
3767 | 148 | 153 | ||
3768 | 149 | return _file_stat | 154 | return _file_stat |
3769 | 150 | 155 | ||
3770 | 151 | 156 | ||
3771 | 152 | @pytest.fixture | 157 | @pytest.fixture |
3772 | 153 | async def file_contents(run_command): # pylint: disable=redefined-outer-name | 158 | async def file_contents(run_command): # pylint: disable=redefined-outer-name |
3774 | 154 | ''' | 159 | """ |
3775 | 155 | Returns the contents of a file | 160 | Returns the contents of a file |
3776 | 156 | 161 | ||
3777 | 157 | :param path: File path | 162 | :param path: File path |
3778 | 158 | :param target: Unit object or unit name string | 163 | :param target: Unit object or unit name string |
3780 | 159 | ''' | 164 | """ |
3781 | 165 | |||
3782 | 160 | async def _file_contents(path, target): | 166 | async def _file_contents(path, target): |
3784 | 161 | cmd = 'cat {}'.format(path) | 167 | cmd = "cat {}".format(path) |
3785 | 162 | results = await run_command(cmd, target) | 168 | results = await run_command(cmd, target) |
3787 | 163 | return results['Stdout'] | 169 | return results["Stdout"] |
3788 | 170 | |||
3789 | 164 | return _file_contents | 171 | return _file_contents |
3790 | 165 | 172 | ||
3791 | 166 | 173 | ||
3792 | 167 | @pytest.fixture | 174 | @pytest.fixture |
3793 | 168 | async def reconfigure_app(get_app, model): # pylint: disable=redefined-outer-name | 175 | async def reconfigure_app(get_app, model): # pylint: disable=redefined-outer-name |
3795 | 169 | '''Applies a different config to the requested app''' | 176 | """Applies a different config to the requested app""" |
3796 | 177 | |||
3797 | 170 | async def _reconfigure_app(cfg, target): | 178 | async def _reconfigure_app(cfg, target): |
3798 | 171 | application = ( | 179 | application = ( |
3799 | 172 | target | 180 | target |
3800 | @@ -175,5 +183,6 @@ async def reconfigure_app(get_app, model): # pylint: disable=redefined-outer-na | |||
3801 | 175 | ) | 183 | ) |
3802 | 176 | await application.set_config(cfg) | 184 | await application.set_config(cfg) |
3803 | 177 | await application.get_config() | 185 | await application.get_config() |
3805 | 178 | await model.block_until(lambda: application.status == 'active') | 186 | await model.block_until(lambda: application.status == "active") |
3806 | 187 | |||
3807 | 179 | return _reconfigure_app | 188 | return _reconfigure_app |
3808 | diff --git a/src/tests/functional/test_hwhealth.py b/src/tests/functional/test_hwhealth.py | |||
3809 | index cae8fd4..d17aae7 100644 | |||
3810 | --- a/src/tests/functional/test_hwhealth.py | |||
3811 | +++ b/src/tests/functional/test_hwhealth.py | |||
3812 | @@ -5,22 +5,22 @@ import subprocess | |||
3813 | 5 | import asyncio | 5 | import asyncio |
3814 | 6 | from os.path import abspath, dirname | 6 | from os.path import abspath, dirname |
3815 | 7 | 7 | ||
3817 | 8 | sys.path.append('lib') | 8 | sys.path.append("lib") |
3818 | 9 | 9 | ||
3819 | 10 | from hwhealth import hwdiscovery # noqa: E402 | 10 | from hwhealth import hwdiscovery # noqa: E402 |
3821 | 11 | from hwhealth import tools # noqa: E402 | 11 | from hwhealth import tools # noqa: E402 |
3822 | 12 | 12 | ||
3823 | 13 | 13 | ||
3824 | 14 | # Treat all tests as coroutines | 14 | # Treat all tests as coroutines |
3825 | 15 | pytestmark = pytest.mark.asyncio | 15 | pytestmark = pytest.mark.asyncio |
3826 | 16 | SERIES = [ | 16 | SERIES = [ |
3830 | 17 | 'focal', | 17 | "focal", |
3831 | 18 | 'bionic', | 18 | "bionic", |
3832 | 19 | 'xenial', | 19 | "xenial", |
3833 | 20 | ] | 20 | ] |
3834 | 21 | CHARM_DIR = dirname(dirname(dirname(abspath(__file__)))) | 21 | CHARM_DIR = dirname(dirname(dirname(abspath(__file__)))) |
3835 | 22 | CHARM_BUILD_DIR = dirname(CHARM_DIR) | 22 | CHARM_BUILD_DIR = dirname(CHARM_DIR) |
3837 | 23 | NRPECFG_DIR = '/etc/nagios/nrpe.d' | 23 | NRPECFG_DIR = "/etc/nagios/nrpe.d" |
3838 | 24 | DEF_TIMEOUT = 600 | 24 | DEF_TIMEOUT = 600 |
3839 | 25 | # These go along with the hpe repos for the hp* tools | 25 | # These go along with the hpe repos for the hp* tools |
3840 | 26 | 26 | ||
3841 | @@ -33,46 +33,50 @@ async def deploy_hwhealth_res(model, app_name, res_filename): | |||
3842 | 33 | # Attaching resources is not implemented yet in libjuju | 33 | # Attaching resources is not implemented yet in libjuju |
3843 | 34 | # see https://github.com/juju/python-libjuju/issues/294 | 34 | # see https://github.com/juju/python-libjuju/issues/294 |
3844 | 35 | tools_res_path = os.path.join(CHARM_BUILD_DIR, res_filename) | 35 | tools_res_path = os.path.join(CHARM_BUILD_DIR, res_filename) |
3855 | 36 | subprocess.check_call([ | 36 | subprocess.check_call( |
3856 | 37 | 'juju', | 37 | [ |
3857 | 38 | 'deploy', | 38 | "juju", |
3858 | 39 | '-m', | 39 | "deploy", |
3859 | 40 | model.info.name, | 40 | "-m", |
3860 | 41 | os.path.join(CHARM_BUILD_DIR, 'hw-health'), | 41 | model.info.name, |
3861 | 42 | app_name, | 42 | os.path.join(CHARM_BUILD_DIR, "hw-health"), |
3862 | 43 | '--resource', | 43 | app_name, |
3863 | 44 | 'tools={}'.format(tools_res_path), | 44 | "--resource", |
3864 | 45 | ]) | 45 | "tools={}".format(tools_res_path), |
3865 | 46 | ] | ||
3866 | 47 | ) | ||
3867 | 46 | 48 | ||
3868 | 47 | 49 | ||
3869 | 48 | async def update_hwhealth_res(model, app_name, res_filename): | 50 | async def update_hwhealth_res(model, app_name, res_filename): |
3870 | 49 | tools_res_path = os.path.join(CHARM_BUILD_DIR, res_filename) | 51 | tools_res_path = os.path.join(CHARM_BUILD_DIR, res_filename) |
3879 | 50 | subprocess.check_call([ | 52 | subprocess.check_call( |
3880 | 51 | 'juju', | 53 | [ |
3881 | 52 | 'attach-resource', | 54 | "juju", |
3882 | 53 | '-m', | 55 | "attach-resource", |
3883 | 54 | model.info.name, | 56 | "-m", |
3884 | 55 | app_name, | 57 | model.info.name, |
3885 | 56 | 'tools={}'.format(tools_res_path), | 58 | app_name, |
3886 | 57 | ]) | 59 | "tools={}".format(tools_res_path), |
3887 | 60 | ] | ||
3888 | 61 | ) | ||
3889 | 62 | |||
3890 | 58 | 63 | ||
3891 | 59 | ################### | 64 | ################### |
3892 | 60 | # Custom fixtures # | 65 | # Custom fixtures # |
3893 | 61 | ################### | 66 | ################### |
3894 | 62 | 67 | ||
3895 | 63 | 68 | ||
3898 | 64 | @pytest.fixture(scope='module', | 69 | @pytest.fixture(scope="module", params=SERIES) |
3897 | 65 | params=SERIES) | ||
3899 | 66 | async def deploy_app(request, model): | 70 | async def deploy_app(request, model): |
3901 | 67 | '''Deploys the hw-health charm as a subordinate of ubuntu''' | 71 | """Deploys the hw-health charm as a subordinate of ubuntu""" |
3902 | 68 | # TODO: this might look nicer if we deployed a bundle instead. It could be | 72 | # TODO: this might look nicer if we deployed a bundle instead. It could be |
3903 | 69 | # a jinja template to handle the parametrization | 73 | # a jinja template to handle the parametrization |
3904 | 70 | release = request.param | 74 | release = request.param |
3908 | 71 | channel = 'stable' | 75 | channel = "stable" |
3909 | 72 | hw_health_app_name = 'hw-health-{}'.format(release) | 76 | hw_health_app_name = "hw-health-{}".format(release) |
3910 | 73 | hw_health_checksum_app_name = 'hw-health-checksum-{}'.format(release) | 77 | hw_health_checksum_app_name = "hw-health-checksum-{}".format(release) |
3911 | 74 | 78 | ||
3913 | 75 | for principal_app in ['ubuntu', 'nagios']: | 79 | for principal_app in ["ubuntu", "nagios"]: |
3914 | 76 | relname = series = release | 80 | relname = series = release |
3915 | 77 | if principal_app == "nagios" and release == "focal": | 81 | if principal_app == "nagios" and release == "focal": |
3916 | 78 | # NOTE(aluria): cs:nagios was not available in focal | 82 | # NOTE(aluria): cs:nagios was not available in focal |
3917 | @@ -82,97 +86,90 @@ async def deploy_app(request, model): | |||
3918 | 82 | series = "bionic" | 86 | series = "bionic" |
3919 | 83 | await model.deploy( | 87 | await model.deploy( |
3920 | 84 | principal_app, | 88 | principal_app, |
3922 | 85 | application_name='{}-{}'.format(principal_app, relname), | 89 | application_name="{}-{}".format(principal_app, relname), |
3923 | 86 | series=series, | 90 | series=series, |
3924 | 87 | channel=channel, | 91 | channel=channel, |
3925 | 88 | ) | 92 | ) |
3926 | 89 | await model.deploy( | 93 | await model.deploy( |
3929 | 90 | 'ubuntu', | 94 | "ubuntu", |
3930 | 91 | application_name='ubuntu-checksum-{}'.format(release), | 95 | application_name="ubuntu-checksum-{}".format(release), |
3931 | 92 | series=release, | 96 | series=release, |
3933 | 93 | channel=channel | 97 | channel=channel, |
3934 | 94 | ) | 98 | ) |
3935 | 95 | nrpe_app = await model.deploy( | 99 | nrpe_app = await model.deploy( |
3938 | 96 | 'nrpe', | 100 | "nrpe", |
3939 | 97 | application_name='nrpe-{}'.format(release), | 101 | application_name="nrpe-{}".format(release), |
3940 | 98 | series=release, | 102 | series=release, |
3941 | 99 | num_units=0, | 103 | num_units=0, |
3942 | 100 | channel=channel, | 104 | channel=channel, |
3943 | 101 | ) | 105 | ) |
3945 | 102 | for ubuntu_unit in ['ubuntu', 'ubuntu-checksum']: | 106 | for ubuntu_unit in ["ubuntu", "ubuntu-checksum"]: |
3946 | 103 | await nrpe_app.add_relation( | 107 | await nrpe_app.add_relation( |
3949 | 104 | 'general-info', | 108 | "general-info", "{}-{}:juju-info".format(ubuntu_unit, release) |
3948 | 105 | '{}-{}:juju-info'.format(ubuntu_unit, release) | ||
3950 | 106 | ) | 109 | ) |
3955 | 107 | await nrpe_app.add_relation( | 110 | await nrpe_app.add_relation("monitors", "nagios-{}:monitors".format(relname)) |
3952 | 108 | 'monitors', | ||
3953 | 109 | 'nagios-{}:monitors'.format(relname) | ||
3954 | 110 | ) | ||
3956 | 111 | 111 | ||
3957 | 112 | # Attaching resources is not implemented yet in libjuju | 112 | # Attaching resources is not implemented yet in libjuju |
3958 | 113 | # see https://github.com/juju/python-libjuju/issues/294 | 113 | # see https://github.com/juju/python-libjuju/issues/294 |
3962 | 114 | await deploy_hwhealth_res(model, hw_health_app_name, 'tools.zip') | 114 | await deploy_hwhealth_res(model, hw_health_app_name, "tools.zip") |
3963 | 115 | await deploy_hwhealth_res(model, hw_health_checksum_app_name, | 115 | await deploy_hwhealth_res(model, hw_health_checksum_app_name, "tools-checksum.zip") |
3961 | 116 | 'tools-checksum.zip') | ||
3964 | 117 | 116 | ||
3965 | 118 | # This is pretty horrible, but we can't deploy via libjuju | 117 | # This is pretty horrible, but we can't deploy via libjuju |
3966 | 119 | while True: | 118 | while True: |
3967 | 120 | try: | 119 | try: |
3968 | 121 | hw_health_app = model.applications[hw_health_app_name] | 120 | hw_health_app = model.applications[hw_health_app_name] |
3971 | 122 | hw_health_checksum_app = \ | 121 | hw_health_checksum_app = model.applications[hw_health_checksum_app_name] |
3970 | 123 | model.applications[hw_health_checksum_app_name] | ||
3972 | 124 | break | 122 | break |
3973 | 125 | except KeyError: | 123 | except KeyError: |
3974 | 126 | await asyncio.sleep(5) | 124 | await asyncio.sleep(5) |
3975 | 127 | 125 | ||
3976 | 128 | await hw_health_app.add_relation( | 126 | await hw_health_app.add_relation( |
3979 | 129 | 'general-info', | 127 | "general-info", "ubuntu-{}:juju-info".format(release) |
3978 | 130 | 'ubuntu-{}:juju-info'.format(release) | ||
3980 | 131 | ) | 128 | ) |
3981 | 132 | await hw_health_app.add_relation( | 129 | await hw_health_app.add_relation( |
3984 | 133 | 'nrpe-external-master', | 130 | "nrpe-external-master", "{}:nrpe-external-master".format(nrpe_app.name) |
3983 | 134 | '{}:nrpe-external-master'.format(nrpe_app.name) | ||
3985 | 135 | ) | 131 | ) |
3986 | 136 | 132 | ||
3987 | 137 | await hw_health_checksum_app.add_relation( | 133 | await hw_health_checksum_app.add_relation( |
3990 | 138 | 'general-info', | 134 | "general-info", "ubuntu-checksum-{}:juju-info".format(release) |
3989 | 139 | 'ubuntu-checksum-{}:juju-info'.format(release) | ||
3991 | 140 | ) | 135 | ) |
3992 | 141 | await hw_health_checksum_app.add_relation( | 136 | await hw_health_checksum_app.add_relation( |
3995 | 142 | 'nrpe-external-master', | 137 | "nrpe-external-master", "{}:nrpe-external-master".format(nrpe_app.name) |
3994 | 143 | '{}:nrpe-external-master'.format(nrpe_app.name) | ||
3996 | 144 | ) | 138 | ) |
3997 | 145 | 139 | ||
3998 | 146 | # The app will initially be in blocked state because it's running in a | 140 | # The app will initially be in blocked state because it's running in a |
3999 | 147 | # container | 141 | # container |
4000 | 148 | await model.block_until( | 142 | await model.block_until( |
4004 | 149 | lambda: (hw_health_app.status == 'blocked' and # noqa:W504 | 143 | lambda: ( |
4005 | 150 | hw_health_checksum_app.status == 'blocked'), | 144 | hw_health_app.status == "blocked" |
4006 | 151 | timeout=DEF_TIMEOUT | 145 | and hw_health_checksum_app.status == "blocked" # noqa:W504 |
4007 | 146 | ), | ||
4008 | 147 | timeout=DEF_TIMEOUT, | ||
4009 | 152 | ) | 148 | ) |
4010 | 153 | yield hw_health_app | 149 | yield hw_health_app |
4011 | 154 | 150 | ||
4012 | 155 | 151 | ||
4014 | 156 | @pytest.fixture(scope='module') | 152 | @pytest.fixture(scope="module") |
4015 | 157 | async def deployed_unit(deploy_app): | 153 | async def deployed_unit(deploy_app): |
4017 | 158 | '''Returns the hw-health unit we've deployed''' | 154 | """Returns the hw-health unit we've deployed""" |
4018 | 159 | return deploy_app.units[0] | 155 | return deploy_app.units[0] |
4019 | 160 | 156 | ||
4020 | 161 | 157 | ||
4022 | 162 | @pytest.fixture(scope='function') | 158 | @pytest.fixture(scope="function") |
4023 | 163 | async def toolset(monkeypatch): | 159 | async def toolset(monkeypatch): |
4024 | 164 | # All tool classes know which files should be installed and how, so we can | 160 | # All tool classes know which files should be installed and how, so we can |
4025 | 165 | # use them to read the expected stat results. Monkeypatching is however | 161 | # use them to read the expected stat results. Monkeypatching is however |
4026 | 166 | # required as the classes code is not expected to be run outside of a | 162 | # required as the classes code is not expected to be run outside of a |
4027 | 167 | # deployed charm | 163 | # deployed charm |
4028 | 168 | with monkeypatch.context() as m: | 164 | with monkeypatch.context() as m: |
4036 | 169 | m.setattr('charmhelpers.core.hookenv.charm_dir', | 165 | m.setattr("charmhelpers.core.hookenv.charm_dir", lambda: CHARM_BUILD_DIR) |
4037 | 170 | lambda: CHARM_BUILD_DIR) | 166 | m.setattr("charmhelpers.core.hookenv.config", lambda x=None: dict()) |
4038 | 171 | m.setattr('charmhelpers.core.hookenv.config', | 167 | m.setattr( |
4039 | 172 | lambda x=None: dict()) | 168 | "charmhelpers.contrib.charmsupport.nrpe.get_nagios_hostname", |
4040 | 173 | m.setattr('charmhelpers.contrib.charmsupport.nrpe.get_nagios_hostname', | 169 | lambda: "pytest", |
4041 | 174 | lambda: 'pytest') | 170 | ) |
4042 | 175 | return [tool() for tool in hwdiscovery.get_tools('test')] | 171 | return [tool() for tool in hwdiscovery.get_tools("test")] |
4043 | 172 | |||
4044 | 176 | 173 | ||
4045 | 177 | ######### | 174 | ######### |
4046 | 178 | # Tests # | 175 | # Tests # |
4047 | @@ -180,69 +177,69 @@ async def toolset(monkeypatch): | |||
4048 | 180 | 177 | ||
4049 | 181 | 178 | ||
4050 | 182 | async def test_cannot_run_in_container(deploy_app): | 179 | async def test_cannot_run_in_container(deploy_app): |
4052 | 183 | assert deploy_app.status == 'blocked' | 180 | assert deploy_app.status == "blocked" |
4053 | 184 | 181 | ||
4054 | 185 | 182 | ||
4055 | 186 | async def test_forced_deploy(deploy_app, model, run_command): | 183 | async def test_forced_deploy(deploy_app, model, run_command): |
4056 | 187 | # Create a fake NVMe device for the cronjob to be configured | 184 | # Create a fake NVMe device for the cronjob to be configured |
4057 | 188 | CREATE_FAKE_NVME = "/bin/bash -c 'touch /dev/nvme0'" | 185 | CREATE_FAKE_NVME = "/bin/bash -c 'touch /dev/nvme0'" |
4059 | 189 | series = deploy_app.name.split('-')[-1] | 186 | series = deploy_app.name.split("-")[-1] |
4060 | 190 | for unit in model.units.values(): | 187 | for unit in model.units.values(): |
4062 | 191 | if unit.entity_id.startswith('ubuntu-{}'.format(series)): | 188 | if unit.entity_id.startswith("ubuntu-{}".format(series)): |
4063 | 192 | ubuntu_unit = unit | 189 | ubuntu_unit = unit |
4064 | 193 | await model.block_until( | 190 | await model.block_until( |
4067 | 194 | lambda: ubuntu_unit.workload_status == 'active', | 191 | lambda: ubuntu_unit.workload_status == "active", timeout=DEF_TIMEOUT |
4066 | 195 | timeout=DEF_TIMEOUT | ||
4068 | 196 | ) | 192 | ) |
4069 | 197 | await run_command(CREATE_FAKE_NVME, ubuntu_unit) | 193 | await run_command(CREATE_FAKE_NVME, ubuntu_unit) |
4070 | 198 | break | 194 | break |
4071 | 199 | 195 | ||
4078 | 200 | await deploy_app.set_config({'manufacturer': 'test'}) | 196 | await deploy_app.set_config({"manufacturer": "test"}) |
4079 | 201 | await model.block_until( | 197 | await model.block_until(lambda: deploy_app.status == "active", timeout=DEF_TIMEOUT) |
4080 | 202 | lambda: deploy_app.status == 'active', | 198 | assert deploy_app.status == "active" |
4075 | 203 | timeout=DEF_TIMEOUT | ||
4076 | 204 | ) | ||
4077 | 205 | assert deploy_app.status == 'active' | ||
4081 | 206 | 199 | ||
4082 | 207 | 200 | ||
4083 | 208 | async def test_checksum_forced_deploy(deploy_app, model, run_command): | 201 | async def test_checksum_forced_deploy(deploy_app, model, run_command): |
4084 | 209 | # Create a fake NVMe device for the cronjob to be configured | 202 | # Create a fake NVMe device for the cronjob to be configured |
4085 | 210 | CREATE_FAKE_NVME = "/bin/bash -c 'touch /dev/nvme0'" | 203 | CREATE_FAKE_NVME = "/bin/bash -c 'touch /dev/nvme0'" |
4088 | 211 | series = deploy_app.name.split('-')[-1] | 204 | series = deploy_app.name.split("-")[-1] |
4089 | 212 | checksum_app_name = 'hw-health-checksum-{}'.format(series) | 205 | checksum_app_name = "hw-health-checksum-{}".format(series) |
4090 | 213 | checksum_app = model.applications[checksum_app_name] | 206 | checksum_app = model.applications[checksum_app_name] |
4091 | 214 | for unit in model.units.values(): | 207 | for unit in model.units.values(): |
4093 | 215 | if unit.entity_id.startswith('ubuntu-checksum-{}'.format(series)): | 208 | if unit.entity_id.startswith("ubuntu-checksum-{}".format(series)): |
4094 | 216 | ubuntu_unit = unit | 209 | ubuntu_unit = unit |
4095 | 217 | await model.block_until( | 210 | await model.block_until( |
4098 | 218 | lambda: ubuntu_unit.workload_status == 'active', | 211 | lambda: ubuntu_unit.workload_status == "active", timeout=DEF_TIMEOUT |
4097 | 219 | timeout=DEF_TIMEOUT | ||
4099 | 220 | ) | 212 | ) |
4100 | 221 | await run_command(CREATE_FAKE_NVME, ubuntu_unit) | 213 | await run_command(CREATE_FAKE_NVME, ubuntu_unit) |
4101 | 222 | elif unit.entity_id.startswith(checksum_app_name): | 214 | elif unit.entity_id.startswith(checksum_app_name): |
4102 | 223 | checksum_unit = unit | 215 | checksum_unit = unit |
4103 | 224 | 216 | ||
4105 | 225 | await checksum_app.set_config({'manufacturer': 'test'}) | 217 | await checksum_app.set_config({"manufacturer": "test"}) |
4106 | 226 | try: | 218 | try: |
4107 | 227 | await model.block_until( | 219 | await model.block_until( |
4108 | 228 | lambda: ( | 220 | lambda: ( |
4110 | 229 | checksum_app.status == 'blocked' and checksum_unit.workload_status_message == 'Tool megacli - checksum error' # noqa E501 | 221 | checksum_app.status == "blocked" |
4111 | 222 | and checksum_unit.workload_status_message | ||
4112 | 223 | == "Tool megacli - checksum error" # noqa E501 | ||
4113 | 230 | ), | 224 | ), |
4115 | 231 | timeout=DEF_TIMEOUT) | 225 | timeout=DEF_TIMEOUT, |
4116 | 226 | ) | ||
4117 | 232 | except asyncio.exceptions.TimeoutError: | 227 | except asyncio.exceptions.TimeoutError: |
4118 | 233 | print( | 228 | print( |
4119 | 234 | "failed to get expected state 'blocked:Tool megacli - checksum error', " | 229 | "failed to get expected state 'blocked:Tool megacli - checksum error', " |
4121 | 235 | "witnessed '{}:{}'".format(checksum_app.status, checksum_unit.workload_status_message) | 230 | "witnessed '{}:{}'".format( |
4122 | 231 | checksum_app.status, checksum_unit.workload_status_message | ||
4123 | 232 | ) | ||
4124 | 236 | ) | 233 | ) |
4127 | 237 | assert checksum_app.status == 'blocked' | 234 | assert checksum_app.status == "blocked" |
4128 | 238 | assert checksum_unit.workload_status_message == 'Tool megacli - checksum error' | 235 | assert checksum_unit.workload_status_message == "Tool megacli - checksum error" |
4129 | 239 | 236 | ||
4130 | 240 | 237 | ||
4131 | 241 | async def test_checksum_updated_resource_missing(deploy_app, model): | 238 | async def test_checksum_updated_resource_missing(deploy_app, model): |
4134 | 242 | series = deploy_app.name.split('-')[-1] | 239 | series = deploy_app.name.split("-")[-1] |
4135 | 243 | checksum_app_name = 'hw-health-checksum-{}'.format(series) | 240 | checksum_app_name = "hw-health-checksum-{}".format(series) |
4136 | 244 | checksum_app = model.applications[checksum_app_name] | 241 | checksum_app = model.applications[checksum_app_name] |
4138 | 245 | await update_hwhealth_res(model, checksum_app_name, 'tools-missing.zip') | 242 | await update_hwhealth_res(model, checksum_app_name, "tools-missing.zip") |
4139 | 246 | for unit in model.units.values(): | 243 | for unit in model.units.values(): |
4140 | 247 | if unit.entity_id.startswith(checksum_app_name): | 244 | if unit.entity_id.startswith(checksum_app_name): |
4141 | 248 | checksum_unit = unit | 245 | checksum_unit = unit |
4142 | @@ -250,209 +247,242 @@ async def test_checksum_updated_resource_missing(deploy_app, model): | |||
4143 | 250 | 247 | ||
4144 | 251 | await model.block_until( | 248 | await model.block_until( |
4145 | 252 | lambda: ( | 249 | lambda: ( |
4147 | 253 | checksum_app.status == 'blocked' and checksum_unit.workload_status_message == 'Tool megacli not found' # noqa E501 | 250 | checksum_app.status == "blocked" |
4148 | 251 | and checksum_unit.workload_status_message | ||
4149 | 252 | == "Tool megacli not found" # noqa E501 | ||
4150 | 254 | ), | 253 | ), |
4152 | 255 | timeout=DEF_TIMEOUT | 254 | timeout=DEF_TIMEOUT, |
4153 | 256 | ) | 255 | ) |
4154 | 257 | 256 | ||
4155 | 258 | 257 | ||
4156 | 259 | async def test_checksum_updated_resource_ok(deploy_app, model): | 258 | async def test_checksum_updated_resource_ok(deploy_app, model): |
4159 | 260 | series = deploy_app.name.split('-')[-1] | 259 | series = deploy_app.name.split("-")[-1] |
4160 | 261 | checksum_app_name = 'hw-health-checksum-{}'.format(series) | 260 | checksum_app_name = "hw-health-checksum-{}".format(series) |
4161 | 262 | checksum_app = model.applications[checksum_app_name] | 261 | checksum_app = model.applications[checksum_app_name] |
4163 | 263 | await update_hwhealth_res(model, checksum_app_name, 'tools.zip') | 262 | await update_hwhealth_res(model, checksum_app_name, "tools.zip") |
4164 | 264 | for unit in model.units.values(): | 263 | for unit in model.units.values(): |
4165 | 265 | if unit.entity_id.startswith(checksum_app_name): | 264 | if unit.entity_id.startswith(checksum_app_name): |
4166 | 266 | checksum_unit = unit | 265 | checksum_unit = unit |
4167 | 267 | break | 266 | break |
4168 | 268 | 267 | ||
4169 | 269 | await model.block_until( | 268 | await model.block_until( |
4173 | 270 | lambda: (checksum_app.status == 'active' and # noqa:W504 | 269 | lambda: ( |
4174 | 271 | checksum_unit.workload_status_message == 'ready'), | 270 | checksum_app.status == "active" |
4175 | 272 | timeout=DEF_TIMEOUT | 271 | and checksum_unit.workload_status_message == "ready" # noqa:W504 |
4176 | 272 | ), | ||
4177 | 273 | timeout=DEF_TIMEOUT, | ||
4178 | 273 | ) | 274 | ) |
4179 | 274 | 275 | ||
4180 | 275 | 276 | ||
4182 | 276 | async def test_deployed_file_stats(monkeypatch, toolset, deploy_app, deployed_unit, file_stat): | 277 | async def test_deployed_file_stats( |
4183 | 278 | monkeypatch, toolset, deploy_app, deployed_unit, file_stat | ||
4184 | 279 | ): | ||
4185 | 277 | # This should really be a parametrized test, but fixtures cannot be used as | 280 | # This should really be a parametrized test, but fixtures cannot be used as |
4186 | 278 | # params value as if they were iterators | 281 | # params value as if they were iterators |
4187 | 279 | # It should also check for other installed files and differentiate between | 282 | # It should also check for other installed files and differentiate between |
4188 | 280 | # tool types (e.g. tools.Ipmi does not use a vendor binary) | 283 | # tool types (e.g. tools.Ipmi does not use a vendor binary) |
4190 | 281 | series = deploy_app.name.split('-')[-1] | 284 | series = deploy_app.name.split("-")[-1] |
4191 | 282 | for tool in toolset: | 285 | for tool in toolset: |
4192 | 283 | # Skip tools that are out of series for the currently deployed application | 286 | # Skip tools that are out of series for the currently deployed application |
4193 | 284 | with monkeypatch.context() as m: | 287 | with monkeypatch.context() as m: |
4198 | 285 | m.setattr('hwhealth.tools.lsb_release', | 288 | m.setattr( |
4199 | 286 | lambda x=None: {'DISTRIB_CODENAME': series}) | 289 | "hwhealth.tools.lsb_release", |
4200 | 287 | m.setattr('charmhelpers.core.hookenv.config', | 290 | lambda x=None: {"DISTRIB_CODENAME": series}, |
4201 | 288 | lambda x=None: {'manufacturer': 'test'}) | 291 | ) |
4202 | 292 | m.setattr( | ||
4203 | 293 | "charmhelpers.core.hookenv.config", | ||
4204 | 294 | lambda x=None: {"manufacturer": "test"}, | ||
4205 | 295 | ) | ||
4206 | 289 | if not tool.is_series_supported(): | 296 | if not tool.is_series_supported(): |
4208 | 290 | print('Skipping tool {}. Distribution {} not supported.'.format(tool, series)) | 297 | print( |
4209 | 298 | "Skipping tool {}. Distribution {} not supported.".format( | ||
4210 | 299 | tool, series | ||
4211 | 300 | ) | ||
4212 | 301 | ) | ||
4213 | 291 | continue | 302 | continue |
4214 | 292 | # Have we rendered the nrpe check cfg? | 303 | # Have we rendered the nrpe check cfg? |
4218 | 293 | nrpecfg_path = os.path.join(NRPECFG_DIR, | 304 | nrpecfg_path = os.path.join(NRPECFG_DIR, "check_{}.cfg".format(tool._shortname)) |
4219 | 294 | 'check_{}.cfg'.format(tool._shortname)) | 305 | print("Checking {}".format(nrpecfg_path)) |
4217 | 295 | print('Checking {}'.format(nrpecfg_path)) | ||
4220 | 296 | test_stat = await file_stat(nrpecfg_path, deployed_unit) | 306 | test_stat = await file_stat(nrpecfg_path, deployed_unit) |
4222 | 297 | assert test_stat['size'] > 0 | 307 | assert test_stat["size"] > 0 |
4223 | 298 | 308 | ||
4224 | 299 | # Have we installed the nrpe check script? | 309 | # Have we installed the nrpe check script? |
4228 | 300 | nrpescript_path = os.path.join(tool.NRPE_PLUGINS_DIR, | 310 | nrpescript_path = os.path.join( |
4229 | 301 | os.path.basename(tool._nrpe_script)) | 311 | tool.NRPE_PLUGINS_DIR, os.path.basename(tool._nrpe_script) |
4230 | 302 | print('Checking {}'.format(nrpescript_path)) | 312 | ) |
4231 | 313 | print("Checking {}".format(nrpescript_path)) | ||
4232 | 303 | test_stat = await file_stat(nrpescript_path, deployed_unit) | 314 | test_stat = await file_stat(nrpescript_path, deployed_unit) |
4237 | 304 | assert test_stat['size'] > 0 | 315 | assert test_stat["size"] > 0 |
4238 | 305 | assert test_stat['gid'] == tool.NRPE_PLUGINS_GID | 316 | assert test_stat["gid"] == tool.NRPE_PLUGINS_GID |
4239 | 306 | assert test_stat['uid'] == tool.NRPE_PLUGINS_UID | 317 | assert test_stat["uid"] == tool.NRPE_PLUGINS_UID |
4240 | 307 | assert test_stat['mode'] == oct(tool.NRPE_PLUGINS_MODE) | 318 | assert test_stat["mode"] == oct(tool.NRPE_PLUGINS_MODE) |
4241 | 308 | 319 | ||
4242 | 309 | # Have we installed any common libs? | 320 | # Have we installed any common libs? |
4243 | 310 | for lib in tool._common_libs: | 321 | for lib in tool._common_libs: |
4247 | 311 | lib_path = os.path.join(tool.NRPE_PLUGINS_DIR, | 322 | lib_path = os.path.join(tool.NRPE_PLUGINS_DIR, os.path.basename(lib)) |
4248 | 312 | os.path.basename(lib)) | 323 | print("Checking {}".format(nrpescript_path)) |
4246 | 313 | print('Checking {}'.format(nrpescript_path)) | ||
4249 | 314 | test_stat = await file_stat(lib_path, deployed_unit) | 324 | test_stat = await file_stat(lib_path, deployed_unit) |
4254 | 315 | assert test_stat['size'] > 0 | 325 | assert test_stat["size"] > 0 |
4255 | 316 | assert test_stat['gid'] == tool.NRPE_PLUGINS_GID | 326 | assert test_stat["gid"] == tool.NRPE_PLUGINS_GID |
4256 | 317 | assert test_stat['uid'] == tool.NRPE_PLUGINS_UID | 327 | assert test_stat["uid"] == tool.NRPE_PLUGINS_UID |
4257 | 318 | assert test_stat['mode'] == oct(tool.NRPE_PLUGINS_MODE) | 328 | assert test_stat["mode"] == oct(tool.NRPE_PLUGINS_MODE) |
4258 | 319 | 329 | ||
4259 | 320 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.Nvme): | 330 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.Nvme): |
4260 | 321 | # Have we added sudo rights for running freeipmi commands? | 331 | # Have we added sudo rights for running freeipmi commands? |
4261 | 322 | sudoer_path = os.path.join(tool.SUDOERS_DIR, tool._sudoer_file) | 332 | sudoer_path = os.path.join(tool.SUDOERS_DIR, tool._sudoer_file) |
4263 | 323 | print('Checking {}'.format(sudoer_path)) | 333 | print("Checking {}".format(sudoer_path)) |
4264 | 324 | test_stat = await file_stat(sudoer_path, deployed_unit) | 334 | test_stat = await file_stat(sudoer_path, deployed_unit) |
4269 | 325 | assert test_stat['size'] > 0 | 335 | assert test_stat["size"] > 0 |
4270 | 326 | assert test_stat['gid'] == tool.SUDOERS_GID | 336 | assert test_stat["gid"] == tool.SUDOERS_GID |
4271 | 327 | assert test_stat['uid'] == tool.SUDOERS_UID | 337 | assert test_stat["uid"] == tool.SUDOERS_UID |
4272 | 328 | assert test_stat['mode'] == oct(tool.SUDOERS_MODE) | 338 | assert test_stat["mode"] == oct(tool.SUDOERS_MODE) |
4273 | 329 | 339 | ||
4274 | 330 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.VendorTool): | 340 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.VendorTool): |
4275 | 331 | # Have we installed the cronjob script helper? | 341 | # Have we installed the cronjob script helper? |
4279 | 332 | cron_script_path = os.path.join(tool.NRPE_PLUGINS_DIR, | 342 | cron_script_path = os.path.join(tool.NRPE_PLUGINS_DIR, tool._cron_script) |
4280 | 333 | tool._cron_script) | 343 | print("Checking {}".format(cron_script_path)) |
4278 | 334 | print('Checking {}'.format(cron_script_path)) | ||
4281 | 335 | test_stat = await file_stat(cron_script_path, deployed_unit) | 344 | test_stat = await file_stat(cron_script_path, deployed_unit) |
4286 | 336 | assert test_stat['size'] > 0 | 345 | assert test_stat["size"] > 0 |
4287 | 337 | assert test_stat['gid'] == tool.CRONJOB_SCRIPT_GID | 346 | assert test_stat["gid"] == tool.CRONJOB_SCRIPT_GID |
4288 | 338 | assert test_stat['uid'] == tool.CRONJOB_SCRIPT_UID | 347 | assert test_stat["uid"] == tool.CRONJOB_SCRIPT_UID |
4289 | 339 | assert test_stat['mode'] == oct(tool.CRONJOB_SCRIPT_MODE) | 348 | assert test_stat["mode"] == oct(tool.CRONJOB_SCRIPT_MODE) |
4290 | 340 | 349 | ||
4291 | 341 | # Have we installed the cronjob itself? | 350 | # Have we installed the cronjob itself? |
4295 | 342 | cronjob_path = os.path.join(tool.CROND_DIR, | 351 | cronjob_path = os.path.join( |
4296 | 343 | 'hwhealth_{}'.format(tool._shortname)) | 352 | tool.CROND_DIR, "hwhealth_{}".format(tool._shortname) |
4297 | 344 | print('Checking {}'.format(cronjob_path)) | 353 | ) |
4298 | 354 | print("Checking {}".format(cronjob_path)) | ||
4299 | 345 | test_stat = await file_stat(cronjob_path, deployed_unit) | 355 | test_stat = await file_stat(cronjob_path, deployed_unit) |
4301 | 346 | assert test_stat['size'] > 0 | 356 | assert test_stat["size"] > 0 |
4302 | 347 | 357 | ||
4303 | 348 | if isinstance(tool, tools.VendorTool): | 358 | if isinstance(tool, tools.VendorTool): |
4304 | 349 | # Have we installed the vendor binary? | 359 | # Have we installed the vendor binary? |
4305 | 350 | if isinstance(tool, tools.Mdadm): | 360 | if isinstance(tool, tools.Mdadm): |
4307 | 351 | bin_path = os.path.join('/sbin', tool._shortname) | 361 | bin_path = os.path.join("/sbin", tool._shortname) |
4308 | 352 | else: | 362 | else: |
4309 | 353 | bin_path = os.path.join(tool.TOOLS_DIR, tool._shortname) | 363 | bin_path = os.path.join(tool.TOOLS_DIR, tool._shortname) |
4311 | 354 | print('Checking {}'.format(bin_path)) | 364 | print("Checking {}".format(bin_path)) |
4312 | 355 | test_stat = await file_stat(bin_path, deployed_unit) | 365 | test_stat = await file_stat(bin_path, deployed_unit) |
4323 | 356 | assert test_stat['size'] > 0 | 366 | assert test_stat["size"] > 0 |
4324 | 357 | assert test_stat['gid'] == tool.TOOLS_GID | 367 | assert test_stat["gid"] == tool.TOOLS_GID |
4325 | 358 | assert test_stat['uid'] == tool.TOOLS_UID | 368 | assert test_stat["uid"] == tool.TOOLS_UID |
4326 | 359 | assert test_stat['mode'] == oct(tool.TOOLS_MODE) | 369 | assert test_stat["mode"] == oct(tool.TOOLS_MODE) |
4327 | 360 | 370 | ||
4328 | 361 | 371 | ||
4329 | 362 | @pytest.mark.parametrize('script_type', ['_nrpe_script', '_cron_script']) | 372 | @pytest.mark.parametrize("script_type", ["_nrpe_script", "_cron_script"]) |
4330 | 363 | async def test_imports(script_type, monkeypatch, toolset, deploy_app, deployed_unit, run_command): | 373 | async def test_imports( |
4331 | 364 | '''Dry run all auxiliary files to ensure we have all needed dependecies''' | 374 | script_type, monkeypatch, toolset, deploy_app, deployed_unit, run_command |
4332 | 365 | series = deploy_app.name.split('-')[-1] | 375 | ): |
4333 | 376 | """Dry run all auxiliary files to ensure we have all needed dependecies""" | ||
4334 | 377 | series = deploy_app.name.split("-")[-1] | ||
4335 | 366 | for tool in toolset: | 378 | for tool in toolset: |
4336 | 367 | # Skip tools that are out of series for the currently deployed application | 379 | # Skip tools that are out of series for the currently deployed application |
4337 | 368 | with monkeypatch.context() as m: | 380 | with monkeypatch.context() as m: |
4342 | 369 | m.setattr('hwhealth.tools.lsb_release', | 381 | m.setattr( |
4343 | 370 | lambda x=None: {'DISTRIB_CODENAME': series}) | 382 | "hwhealth.tools.lsb_release", |
4344 | 371 | m.setattr('charmhelpers.core.hookenv.config', | 383 | lambda x=None: {"DISTRIB_CODENAME": series}, |
4345 | 372 | lambda x=None: {'manufacturer': 'test'}) | 384 | ) |
4346 | 385 | m.setattr( | ||
4347 | 386 | "charmhelpers.core.hookenv.config", | ||
4348 | 387 | lambda x=None: {"manufacturer": "test"}, | ||
4349 | 388 | ) | ||
4350 | 373 | if not tool.is_series_supported(): | 389 | if not tool.is_series_supported(): |
4352 | 374 | print('Skipping tool {}. Distribution {} not supported.'.format(tool, series)) | 390 | print( |
4353 | 391 | "Skipping tool {}. Distribution {} not supported.".format( | ||
4354 | 392 | tool, series | ||
4355 | 393 | ) | ||
4356 | 394 | ) | ||
4357 | 375 | continue | 395 | continue |
4358 | 376 | script_name = getattr(tool, script_type) | 396 | script_name = getattr(tool, script_type) |
4359 | 377 | tool_name = tool.__class__.__name__ | 397 | tool_name = tool.__class__.__name__ |
4360 | 378 | if not script_name: | 398 | if not script_name: |
4361 | 379 | # Cannot pytest.skip because it would break out of the loop | 399 | # Cannot pytest.skip because it would break out of the loop |
4364 | 380 | print('Skipping test as {} does not have a {}' | 400 | print( |
4365 | 381 | ''.format(tool_name, script_type)) | 401 | "Skipping test as {} does not have a {}" |
4366 | 402 | "".format(tool_name, script_type) | ||
4367 | 403 | ) | ||
4368 | 382 | else: | 404 | else: |
4370 | 383 | print('Checking {}: {}'.format(tool_name, script_name)) | 405 | print("Checking {}: {}".format(tool_name, script_name)) |
4371 | 384 | path = os.path.join(tool.NRPE_PLUGINS_DIR, script_name) | 406 | path = os.path.join(tool.NRPE_PLUGINS_DIR, script_name) |
4372 | 385 | cmd = path + " --help" | 407 | cmd = path + " --help" |
4373 | 386 | results = await run_command(cmd, deployed_unit) | 408 | results = await run_command(cmd, deployed_unit) |
4377 | 387 | rc = results['Code'] | 409 | rc = results["Code"] |
4378 | 388 | assert rc == '0', ('{}, {}. RC is non-zero. results={}' | 410 | assert rc == "0", "{}, {}. RC is non-zero. results={}".format( |
4379 | 389 | ''.format(tool_name, script_name, results)) | 411 | tool_name, script_name, results |
4380 | 412 | ) | ||
4381 | 390 | 413 | ||
4382 | 391 | 414 | ||
4383 | 392 | async def test_removal(monkeypatch, toolset, model, deploy_app, file_stat): | 415 | async def test_removal(monkeypatch, toolset, model, deploy_app, file_stat): |
4385 | 393 | '''Remove the unit, test that all files have been cleaned up''' | 416 | """Remove the unit, test that all files have been cleaned up""" |
4386 | 394 | hw_health_app_name = deploy_app.name | 417 | hw_health_app_name = deploy_app.name |
4388 | 395 | series = deploy_app.name.split('-')[-1] | 418 | series = deploy_app.name.split("-")[-1] |
4389 | 396 | await deploy_app.remove() | 419 | await deploy_app.remove() |
4390 | 397 | await model.block_until( | 420 | await model.block_until( |
4393 | 398 | lambda: hw_health_app_name not in model.applications, | 421 | lambda: hw_health_app_name not in model.applications, timeout=DEF_TIMEOUT |
4392 | 399 | timeout=DEF_TIMEOUT | ||
4394 | 400 | ) | 422 | ) |
4395 | 401 | # Since we've removed the hw-health app, we can't target it anymore, we | 423 | # Since we've removed the hw-health app, we can't target it anymore, we |
4396 | 402 | # need to find the principal unit | 424 | # need to find the principal unit |
4397 | 403 | for unit in model.units.values(): | 425 | for unit in model.units.values(): |
4399 | 404 | if unit.entity_id.startswith('ubuntu-{}'.format(series)): | 426 | if unit.entity_id.startswith("ubuntu-{}".format(series)): |
4400 | 405 | ubuntu_unit = unit | 427 | ubuntu_unit = unit |
4401 | 406 | for tool in toolset: | 428 | for tool in toolset: |
4402 | 407 | # Skip tools that are out of series for the currently deployed application | 429 | # Skip tools that are out of series for the currently deployed application |
4403 | 408 | with monkeypatch.context() as m: | 430 | with monkeypatch.context() as m: |
4408 | 409 | m.setattr('hwhealth.tools.lsb_release', | 431 | m.setattr( |
4409 | 410 | lambda x=None: {'DISTRIB_CODENAME': series}) | 432 | "hwhealth.tools.lsb_release", |
4410 | 411 | m.setattr('charmhelpers.core.hookenv.config', | 433 | lambda x=None: {"DISTRIB_CODENAME": series}, |
4411 | 412 | lambda x=None: {'manufacturer': 'test'}) | 434 | ) |
4412 | 435 | m.setattr( | ||
4413 | 436 | "charmhelpers.core.hookenv.config", | ||
4414 | 437 | lambda x=None: {"manufacturer": "test"}, | ||
4415 | 438 | ) | ||
4416 | 413 | if not tool.is_series_supported(): | 439 | if not tool.is_series_supported(): |
4418 | 414 | print('Skipping tool {}. Distribution {} not supported.'.format(tool, series)) | 440 | print( |
4419 | 441 | "Skipping tool {}. Distribution {} not supported.".format( | ||
4420 | 442 | tool, series | ||
4421 | 443 | ) | ||
4422 | 444 | ) | ||
4423 | 415 | continue | 445 | continue |
4424 | 416 | # Have we removed the nrpe check cfg? | 446 | # Have we removed the nrpe check cfg? |
4428 | 417 | nrpecfg_path = os.path.join(NRPECFG_DIR, | 447 | nrpecfg_path = os.path.join(NRPECFG_DIR, "check_{}.cfg".format(tool._shortname)) |
4429 | 418 | 'check_{}.cfg'.format(tool._shortname)) | 448 | print("Checking {}".format(nrpecfg_path)) |
4427 | 419 | print('Checking {}'.format(nrpecfg_path)) | ||
4430 | 420 | with pytest.raises(RuntimeError): | 449 | with pytest.raises(RuntimeError): |
4431 | 421 | await file_stat(nrpecfg_path, ubuntu_unit) | 450 | await file_stat(nrpecfg_path, ubuntu_unit) |
4432 | 422 | 451 | ||
4433 | 423 | # Have we removed the nrpe check script? | 452 | # Have we removed the nrpe check script? |
4437 | 424 | nrpescript_path = os.path.join(tool.NRPE_PLUGINS_DIR, | 453 | nrpescript_path = os.path.join(tool.NRPE_PLUGINS_DIR, tool._nrpe_script) |
4438 | 425 | tool._nrpe_script) | 454 | print("Checking {}".format(nrpescript_path)) |
4436 | 426 | print('Checking {}'.format(nrpescript_path)) | ||
4439 | 427 | with pytest.raises(RuntimeError): | 455 | with pytest.raises(RuntimeError): |
4440 | 428 | await file_stat(nrpescript_path, ubuntu_unit) | 456 | await file_stat(nrpescript_path, ubuntu_unit) |
4441 | 429 | 457 | ||
4442 | 430 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.Nvme): | 458 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.Nvme): |
4443 | 431 | # Have we removed sudo rights for running freeipmi commands? | 459 | # Have we removed sudo rights for running freeipmi commands? |
4444 | 432 | sudoer_path = os.path.join(tool.SUDOERS_DIR, tool._sudoer_file) | 460 | sudoer_path = os.path.join(tool.SUDOERS_DIR, tool._sudoer_file) |
4446 | 433 | print('Checking {}'.format(sudoer_path)) | 461 | print("Checking {}".format(sudoer_path)) |
4447 | 434 | with pytest.raises(RuntimeError): | 462 | with pytest.raises(RuntimeError): |
4448 | 435 | await file_stat(sudoer_path, ubuntu_unit) | 463 | await file_stat(sudoer_path, ubuntu_unit) |
4449 | 436 | 464 | ||
4450 | 437 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.VendorTool): | 465 | if isinstance(tool, tools.Ipmi) or isinstance(tool, tools.VendorTool): |
4451 | 438 | # Have we removed the cronjob script helper? | 466 | # Have we removed the cronjob script helper? |
4455 | 439 | cronjob_path = os.path.join(tool.NRPE_PLUGINS_DIR, | 467 | cronjob_path = os.path.join(tool.NRPE_PLUGINS_DIR, tool._cron_script) |
4456 | 440 | tool._cron_script) | 468 | print("Checking {}".format(cronjob_path)) |
4454 | 441 | print('Checking {}'.format(cronjob_path)) | ||
4457 | 442 | with pytest.raises(RuntimeError): | 469 | with pytest.raises(RuntimeError): |
4458 | 443 | await file_stat(cronjob_path, ubuntu_unit) | 470 | await file_stat(cronjob_path, ubuntu_unit) |
4459 | 444 | 471 | ||
4460 | 445 | # Have we removed the cronjob itself? | 472 | # Have we removed the cronjob itself? |
4464 | 446 | cronjob_path = os.path.join(tool.CROND_DIR, | 473 | cronjob_path = os.path.join( |
4465 | 447 | 'hwhealth_{}'.format(tool._shortname)) | 474 | tool.CROND_DIR, "hwhealth_{}".format(tool._shortname) |
4466 | 448 | print('Checking {}'.format(cronjob_path)) | 475 | ) |
4467 | 476 | print("Checking {}".format(cronjob_path)) | ||
4468 | 449 | with pytest.raises(RuntimeError): | 477 | with pytest.raises(RuntimeError): |
4469 | 450 | await file_stat(cronjob_path, ubuntu_unit) | 478 | await file_stat(cronjob_path, ubuntu_unit) |
4470 | 451 | 479 | ||
4472 | 452 | if isinstance(tool, tools.VendorTool) and not isinstance(tool, tools.Mdadm): # noqa E501 | 480 | if isinstance(tool, tools.VendorTool) and not isinstance( |
4473 | 481 | tool, tools.Mdadm | ||
4474 | 482 | ): # noqa E501 | ||
4475 | 453 | # /sbin/mdadm will not be removed, but the vendor binaries | 483 | # /sbin/mdadm will not be removed, but the vendor binaries |
4476 | 454 | # should have been | 484 | # should have been |
4477 | 455 | bin_path = os.path.join(tool.TOOLS_DIR, tool._shortname) | 485 | bin_path = os.path.join(tool.TOOLS_DIR, tool._shortname) |
4479 | 456 | print('Checking {}'.format(bin_path)) | 486 | print("Checking {}".format(bin_path)) |
4480 | 457 | with pytest.raises(RuntimeError): | 487 | with pytest.raises(RuntimeError): |
4481 | 458 | await file_stat(bin_path, ubuntu_unit) | 488 | await file_stat(bin_path, ubuntu_unit) |
4482 | diff --git a/src/tests/unit/lib/samples.py b/src/tests/unit/lib/samples.py | |||
4483 | index a0790ee..db34828 100644 | |||
4484 | --- a/src/tests/unit/lib/samples.py | |||
4485 | +++ b/src/tests/unit/lib/samples.py | |||
4486 | @@ -1,12 +1,7 @@ | |||
4487 | 1 | import os | 1 | import os |
4488 | 2 | import glob | 2 | import glob |
4489 | 3 | 3 | ||
4496 | 4 | SAMPLES_DIR = os.path.join( | 4 | SAMPLES_DIR = os.path.join(os.path.dirname(__file__), "..", "..", "hw-health-samples") |
4491 | 5 | os.path.dirname(__file__), | ||
4492 | 6 | '..', | ||
4493 | 7 | '..', | ||
4494 | 8 | 'hw-health-samples' | ||
4495 | 9 | ) | ||
4497 | 10 | 5 | ||
4498 | 11 | 6 | ||
4499 | 12 | def get_sample(name): | 7 | def get_sample(name): |
4500 | diff --git a/src/tests/unit/test_actions.py b/src/tests/unit/test_actions.py | |||
4501 | index dfa7323..3a3f4f2 100644 | |||
4502 | --- a/src/tests/unit/test_actions.py | |||
4503 | +++ b/src/tests/unit/test_actions.py | |||
4504 | @@ -17,110 +17,149 @@ import sys | |||
4505 | 17 | import unittest | 17 | import unittest |
4506 | 18 | import unittest.mock as mock | 18 | import unittest.mock as mock |
4507 | 19 | 19 | ||
4509 | 20 | sys.path.append('.') | 20 | sys.path.append(".") |
4510 | 21 | from actions.actions import clear_sel, show_sel # noqa:E402 | 21 | from actions.actions import clear_sel, show_sel # noqa:E402 |
4511 | 22 | 22 | ||
4512 | 23 | 23 | ||
4513 | 24 | class ClearSelTestCase(unittest.TestCase): | 24 | class ClearSelTestCase(unittest.TestCase): |
4518 | 25 | 25 | @mock.patch("actions.actions.log") | |
4519 | 26 | @mock.patch('actions.actions.log') | 26 | @mock.patch("subprocess.check_output") |
4520 | 27 | @mock.patch('subprocess.check_output') | 27 | @mock.patch("subprocess.check_call") |
4517 | 28 | @mock.patch('subprocess.check_call') | ||
4521 | 29 | def test_clear_sel(self, mock_check_call, mock_subprocess, mock_log): | 28 | def test_clear_sel(self, mock_check_call, mock_subprocess, mock_log): |
4522 | 30 | sel_output = "Unittest system event log output".encode() | 29 | sel_output = "Unittest system event log output".encode() |
4523 | 31 | mock_subprocess.return_value = sel_output | 30 | mock_subprocess.return_value = sel_output |
4524 | 32 | mock_check_call.return_value = None | 31 | mock_check_call.return_value = None |
4525 | 33 | clear_sel() | 32 | clear_sel() |
4527 | 34 | mock_check_call.assert_called_once_with(['action-set', "message={}".format(sel_output.decode())]) | 33 | mock_check_call.assert_called_once_with( |
4528 | 34 | ["action-set", "message={}".format(sel_output.decode())] | ||
4529 | 35 | ) | ||
4530 | 35 | 36 | ||
4531 | 36 | 37 | ||
4532 | 37 | class ShowSelTestCase(unittest.TestCase): | 38 | class ShowSelTestCase(unittest.TestCase): |
4538 | 38 | 39 | @mock.patch("actions.actions.log") | |
4539 | 39 | @mock.patch('actions.actions.log') | 40 | @mock.patch("actions.actions.action_set") |
4540 | 40 | @mock.patch('actions.actions.action_set') | 41 | @mock.patch("actions.actions.action_get") |
4541 | 41 | @mock.patch('actions.actions.action_get') | 42 | @mock.patch("subprocess.check_output") |
4537 | 42 | @mock.patch('subprocess.check_output') | ||
4542 | 43 | def test_empty_output_from_ipmi_sel( | 43 | def test_empty_output_from_ipmi_sel( |
4544 | 44 | self, mock_check_output, mock_action_get, mock_action_set, mock_log): | 44 | self, mock_check_output, mock_action_get, mock_action_set, mock_log |
4545 | 45 | ): | ||
4546 | 45 | show_all_flag = False | 46 | show_all_flag = False |
4547 | 46 | output_body = "" | 47 | output_body = "" |
4548 | 47 | expected_output = "No matching entries found" | 48 | expected_output = "No matching entries found" |
4551 | 48 | self._test_valid_show_sel_call(show_all_flag, output_body, expected_output, | 49 | self._test_valid_show_sel_call( |
4552 | 49 | mock_check_output, mock_action_get, mock_action_set) | 50 | show_all_flag, |
4553 | 51 | output_body, | ||
4554 | 52 | expected_output, | ||
4555 | 53 | mock_check_output, | ||
4556 | 54 | mock_action_get, | ||
4557 | 55 | mock_action_set, | ||
4558 | 56 | ) | ||
4559 | 50 | 57 | ||
4564 | 51 | @mock.patch('actions.actions.log') | 58 | @mock.patch("actions.actions.log") |
4565 | 52 | @mock.patch('actions.actions.action_set') | 59 | @mock.patch("actions.actions.action_set") |
4566 | 53 | @mock.patch('actions.actions.action_get') | 60 | @mock.patch("actions.actions.action_get") |
4567 | 54 | @mock.patch('subprocess.check_output') | 61 | @mock.patch("subprocess.check_output") |
4568 | 55 | def test_only_nominal_entries_with_show_all_false( | 62 | def test_only_nominal_entries_with_show_all_false( |
4570 | 56 | self, mock_check_output, mock_action_get, mock_action_set, mock_log): | 63 | self, mock_check_output, mock_action_get, mock_action_set, mock_log |
4571 | 64 | ): | ||
4572 | 57 | show_all_flag = False | 65 | show_all_flag = False |
4579 | 58 | output_body = "\n".join([ | 66 | output_body = "\n".join( |
4580 | 59 | "Header line", | 67 | [ |
4581 | 60 | "Nominal body line #1", | 68 | "Header line", |
4582 | 61 | "Nominal body line #2", | 69 | "Nominal body line #1", |
4583 | 62 | "Nominal body line #3", | 70 | "Nominal body line #2", |
4584 | 63 | ]) | 71 | "Nominal body line #3", |
4585 | 72 | ] | ||
4586 | 73 | ) | ||
4587 | 64 | expected_output = "No matching entries found" | 74 | expected_output = "No matching entries found" |
4590 | 65 | self._test_valid_show_sel_call(show_all_flag, output_body, expected_output, | 75 | self._test_valid_show_sel_call( |
4591 | 66 | mock_check_output, mock_action_get, mock_action_set) | 76 | show_all_flag, |
4592 | 77 | output_body, | ||
4593 | 78 | expected_output, | ||
4594 | 79 | mock_check_output, | ||
4595 | 80 | mock_action_get, | ||
4596 | 81 | mock_action_set, | ||
4597 | 82 | ) | ||
4598 | 67 | 83 | ||
4603 | 68 | @mock.patch('actions.actions.log') | 84 | @mock.patch("actions.actions.log") |
4604 | 69 | @mock.patch('actions.actions.action_set') | 85 | @mock.patch("actions.actions.action_set") |
4605 | 70 | @mock.patch('actions.actions.action_get') | 86 | @mock.patch("actions.actions.action_get") |
4606 | 71 | @mock.patch('subprocess.check_output') | 87 | @mock.patch("subprocess.check_output") |
4607 | 72 | def test_only_nominal_entries_with_show_all_true( | 88 | def test_only_nominal_entries_with_show_all_true( |
4609 | 73 | self, mock_check_output, mock_action_get, mock_action_set, mock_log): | 89 | self, mock_check_output, mock_action_get, mock_action_set, mock_log |
4610 | 90 | ): | ||
4611 | 74 | show_all_flag = True | 91 | show_all_flag = True |
4618 | 75 | output_body = "\n".join([ | 92 | output_body = "\n".join( |
4619 | 76 | "Header line", | 93 | [ |
4620 | 77 | "Nominal body line #1", | 94 | "Header line", |
4621 | 78 | "Nominal body line #2", | 95 | "Nominal body line #1", |
4622 | 79 | "Nominal body line #3", | 96 | "Nominal body line #2", |
4623 | 80 | ]) | 97 | "Nominal body line #3", |
4624 | 98 | ] | ||
4625 | 99 | ) | ||
4626 | 81 | expected_output = output_body | 100 | expected_output = output_body |
4629 | 82 | self._test_valid_show_sel_call(show_all_flag, output_body, expected_output, | 101 | self._test_valid_show_sel_call( |
4630 | 83 | mock_check_output, mock_action_get, mock_action_set) | 102 | show_all_flag, |
4631 | 103 | output_body, | ||
4632 | 104 | expected_output, | ||
4633 | 105 | mock_check_output, | ||
4634 | 106 | mock_action_get, | ||
4635 | 107 | mock_action_set, | ||
4636 | 108 | ) | ||
4637 | 84 | 109 | ||
4642 | 85 | @mock.patch('actions.actions.log') | 110 | @mock.patch("actions.actions.log") |
4643 | 86 | @mock.patch('actions.actions.action_set') | 111 | @mock.patch("actions.actions.action_set") |
4644 | 87 | @mock.patch('actions.actions.action_get') | 112 | @mock.patch("actions.actions.action_get") |
4645 | 88 | @mock.patch('subprocess.check_output') | 113 | @mock.patch("subprocess.check_output") |
4646 | 89 | def test_non_nominal_entries_present_with_show_all_false( | 114 | def test_non_nominal_entries_present_with_show_all_false( |
4648 | 90 | self, mock_check_output, mock_action_get, mock_action_set, mock_log): | 115 | self, mock_check_output, mock_action_get, mock_action_set, mock_log |
4649 | 116 | ): | ||
4650 | 91 | show_all_flag = False | 117 | show_all_flag = False |
4668 | 92 | output_body = "\n".join([ | 118 | output_body = "\n".join( |
4669 | 93 | "Header line", | 119 | [ |
4670 | 94 | "Nominal body line #1", | 120 | "Header line", |
4671 | 95 | "Warning line #1", | 121 | "Nominal body line #1", |
4672 | 96 | "Critical line #1", | 122 | "Warning line #1", |
4673 | 97 | "Nominal body line #2", | 123 | "Critical line #1", |
4674 | 98 | "Nominal body line #3", | 124 | "Nominal body line #2", |
4675 | 99 | "Warning line #2", | 125 | "Nominal body line #3", |
4676 | 100 | ]) | 126 | "Warning line #2", |
4677 | 101 | expected_output = "\n".join([ | 127 | ] |
4678 | 102 | "Header line", | 128 | ) |
4679 | 103 | "Warning line #1", | 129 | expected_output = "\n".join( |
4680 | 104 | "Critical line #1", | 130 | ["Header line", "Warning line #1", "Critical line #1", "Warning line #2"] |
4681 | 105 | "Warning line #2", | 131 | ) |
4682 | 106 | ]) | 132 | self._test_valid_show_sel_call( |
4683 | 107 | self._test_valid_show_sel_call(show_all_flag, output_body, expected_output, | 133 | show_all_flag, |
4684 | 108 | mock_check_output, mock_action_get, mock_action_set) | 134 | output_body, |
4685 | 135 | expected_output, | ||
4686 | 136 | mock_check_output, | ||
4687 | 137 | mock_action_get, | ||
4688 | 138 | mock_action_set, | ||
4689 | 139 | ) | ||
4690 | 109 | 140 | ||
4693 | 110 | def _test_valid_show_sel_call(self, show_all_flag, output_body, expected_output, | 141 | def _test_valid_show_sel_call( |
4694 | 111 | mock_check_output, mock_action_get, mock_action_set): | 142 | self, |
4695 | 143 | show_all_flag, | ||
4696 | 144 | output_body, | ||
4697 | 145 | expected_output, | ||
4698 | 146 | mock_check_output, | ||
4699 | 147 | mock_action_get, | ||
4700 | 148 | mock_action_set, | ||
4701 | 149 | ): | ||
4702 | 112 | mock_action_get.return_value = show_all_flag | 150 | mock_action_get.return_value = show_all_flag |
4703 | 113 | mock_check_output.return_value = output_body.encode() | 151 | mock_check_output.return_value = output_body.encode() |
4704 | 114 | show_sel() | 152 | show_sel() |
4707 | 115 | self.assertEqual(mock_action_set.call_args[0][0]['message'], | 153 | self.assertEqual(mock_action_set.call_args[0][0]["message"], expected_output) |
4706 | 116 | expected_output) | ||
4708 | 117 | 154 | ||
4713 | 118 | @mock.patch('actions.actions.action_fail') | 155 | @mock.patch("actions.actions.action_fail") |
4714 | 119 | @mock.patch('actions.actions.action_get') | 156 | @mock.patch("actions.actions.action_get") |
4715 | 120 | @mock.patch('subprocess.check_output') | 157 | @mock.patch("subprocess.check_output") |
4716 | 121 | def test_subprocess_error(self, mock_check_output, mock_action_get, mock_action_fail): | 158 | def test_subprocess_error( |
4717 | 159 | self, mock_check_output, mock_action_get, mock_action_fail | ||
4718 | 160 | ): | ||
4719 | 122 | def raise_error(*args, **kwargs): | 161 | def raise_error(*args, **kwargs): |
4721 | 123 | raise subprocess.CalledProcessError(1, ['bogus-cmd']) | 162 | raise subprocess.CalledProcessError(1, ["bogus-cmd"]) |
4722 | 124 | 163 | ||
4723 | 125 | show_all_flag = False | 164 | show_all_flag = False |
4724 | 126 | mock_action_get.return_value = show_all_flag | 165 | mock_action_get.return_value = show_all_flag |
4725 | @@ -128,4 +167,6 @@ class ShowSelTestCase(unittest.TestCase): | |||
4726 | 128 | show_sel() | 167 | show_sel() |
4727 | 129 | self.assertEqual( | 168 | self.assertEqual( |
4728 | 130 | mock_action_fail.call_args[0][0], | 169 | mock_action_fail.call_args[0][0], |
4730 | 131 | "Action failed with Command '['bogus-cmd']' returned non-zero exit status 1.") | 170 | "Action failed with Command '['bogus-cmd']' " |
4731 | 171 | "returned non-zero exit status 1.", | ||
4732 | 172 | ) | ||
4733 | diff --git a/src/tests/unit/test_check_mdadm.py b/src/tests/unit/test_check_mdadm.py | |||
4734 | index ca20633..fe7e357 100644 | |||
4735 | --- a/src/tests/unit/test_check_mdadm.py | |||
4736 | +++ b/src/tests/unit/test_check_mdadm.py | |||
4737 | @@ -6,67 +6,56 @@ import unittest.mock as mock | |||
4738 | 6 | 6 | ||
4739 | 7 | import nagios_plugin3 | 7 | import nagios_plugin3 |
4740 | 8 | 8 | ||
4742 | 9 | sys.path.append(os.path.join(os.path.dirname(__file__), 'lib')) | 9 | sys.path.append(os.path.join(os.path.dirname(__file__), "lib")) |
4743 | 10 | from samples import get_sample # noqa: E402 | 10 | from samples import get_sample # noqa: E402 |
4744 | 11 | 11 | ||
4746 | 12 | sys.path.append(os.path.join(os.path.dirname(__file__), '../../files/mdadm')) | 12 | sys.path.append(os.path.join(os.path.dirname(__file__), "../../files/mdadm")) |
4747 | 13 | import check_mdadm # noqa: E402 | 13 | import check_mdadm # noqa: E402 |
4748 | 14 | 14 | ||
4749 | 15 | 15 | ||
4750 | 16 | class TestCheckMdadm(unittest.TestCase): | 16 | class TestCheckMdadm(unittest.TestCase): |
4751 | 17 | def setUp(self): | 17 | def setUp(self): |
4757 | 18 | self.samples_dir = os.path.join( | 18 | self.samples_dir = os.path.join(os.getcwd(), "tests", "hw-health-samples") |
4753 | 19 | os.getcwd(), | ||
4754 | 20 | 'tests', | ||
4755 | 21 | 'hw-health-samples' | ||
4756 | 22 | ) | ||
4758 | 23 | 19 | ||
4759 | 24 | def test_parse_output_crit(self): | 20 | def test_parse_output_crit(self): |
4762 | 25 | check_mdadm.ARGS.input_file = get_sample('mdadm.output.nrpe.critical') | 21 | check_mdadm.ARGS.input_file = get_sample("mdadm.output.nrpe.critical") |
4763 | 26 | expected = 'CRITICAL: critical msg' | 22 | expected = "CRITICAL: critical msg" |
4764 | 27 | with self.assertRaises(nagios_plugin3.CriticalError) as context: | 23 | with self.assertRaises(nagios_plugin3.CriticalError) as context: |
4765 | 28 | check_mdadm.parse_output() | 24 | check_mdadm.parse_output() |
4766 | 29 | self.assertTrue(expected in str(context.exception)) | 25 | self.assertTrue(expected in str(context.exception)) |
4767 | 30 | 26 | ||
4768 | 31 | def test_parse_output_warn(self): | 27 | def test_parse_output_warn(self): |
4771 | 32 | check_mdadm.ARGS.input_file = get_sample('mdadm.output.nrpe.warning') | 28 | check_mdadm.ARGS.input_file = get_sample("mdadm.output.nrpe.warning") |
4772 | 33 | expected = 'WARNING: warning msg' | 29 | expected = "WARNING: warning msg" |
4773 | 34 | with self.assertRaises(nagios_plugin3.WarnError) as context: | 30 | with self.assertRaises(nagios_plugin3.WarnError) as context: |
4774 | 35 | check_mdadm.parse_output() | 31 | check_mdadm.parse_output() |
4775 | 36 | self.assertTrue(expected in str(context.exception)) | 32 | self.assertTrue(expected in str(context.exception)) |
4776 | 37 | 33 | ||
4778 | 38 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 34 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4779 | 39 | def test_parse_output_ok(self, mock_print): | 35 | def test_parse_output_ok(self, mock_print): |
4781 | 40 | check_mdadm.ARGS.input_file = get_sample('mdadm.output.nrpe.ok') | 36 | check_mdadm.ARGS.input_file = get_sample("mdadm.output.nrpe.ok") |
4782 | 41 | check_mdadm.parse_output() | 37 | check_mdadm.parse_output() |
4783 | 42 | self.assertEqual( | 38 | self.assertEqual( |
4784 | 43 | mock_print.getvalue(), | 39 | mock_print.getvalue(), |
4786 | 44 | 'OK: /dev/md0 ok; /dev/md1 ok; /dev/md3 ok; /dev/md2 ok\n' | 40 | "OK: /dev/md0 ok; /dev/md1 ok; /dev/md3 ok; /dev/md2 ok\n", |
4787 | 45 | ) | 41 | ) |
4788 | 46 | 42 | ||
4790 | 47 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 43 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4791 | 48 | def test_parse_output_unknown_filenotfound(self, mock_print): | 44 | def test_parse_output_unknown_filenotfound(self, mock_print): |
4795 | 49 | check_mdadm.ARGS.input_file = get_sample('thisfiledoesnotexist') | 45 | check_mdadm.ARGS.input_file = get_sample("thisfiledoesnotexist") |
4796 | 50 | expected = 'UNKNOWN: file not found ({})'.format( | 46 | expected = "UNKNOWN: file not found ({})".format(check_mdadm.ARGS.input_file) |
4794 | 51 | check_mdadm.ARGS.input_file) | ||
4797 | 52 | with self.assertRaises(nagios_plugin3.UnknownError) as context: | 47 | with self.assertRaises(nagios_plugin3.UnknownError) as context: |
4798 | 53 | check_mdadm.parse_output() | 48 | check_mdadm.parse_output() |
4799 | 54 | self.assertTrue(expected in str(context.exception)) | 49 | self.assertTrue(expected in str(context.exception)) |
4800 | 55 | 50 | ||
4802 | 56 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 51 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4803 | 57 | def test_parse_output_unknown1(self, mock_print): | 52 | def test_parse_output_unknown1(self, mock_print): |
4805 | 58 | check_mdadm.ARGS.input_file = get_sample('mdadm.output.nrpe.unknown.1') | 53 | check_mdadm.ARGS.input_file = get_sample("mdadm.output.nrpe.unknown.1") |
4806 | 59 | check_mdadm.parse_output() | 54 | check_mdadm.parse_output() |
4811 | 60 | self.assertEqual( | 55 | self.assertEqual(mock_print.getvalue(), "UNKNOWN: unknown msg\n") |
4808 | 61 | mock_print.getvalue(), | ||
4809 | 62 | 'UNKNOWN: unknown msg\n' | ||
4810 | 63 | ) | ||
4812 | 64 | 56 | ||
4814 | 65 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 57 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4815 | 66 | def test_parse_output_unknown2(self, mock_print): | 58 | def test_parse_output_unknown2(self, mock_print): |
4817 | 67 | check_mdadm.ARGS.input_file = get_sample('mdadm.output.nrpe.unknown.2') | 59 | check_mdadm.ARGS.input_file = get_sample("mdadm.output.nrpe.unknown.2") |
4818 | 68 | check_mdadm.parse_output() | 60 | check_mdadm.parse_output() |
4823 | 69 | self.assertEqual( | 61 | self.assertEqual(mock_print.getvalue(), "unknown msg2\n") |
4820 | 70 | mock_print.getvalue(), | ||
4821 | 71 | 'unknown msg2\n' | ||
4822 | 72 | ) | ||
4824 | diff --git a/src/tests/unit/test_check_megacli.py b/src/tests/unit/test_check_megacli.py | |||
4825 | index 00b9f4e..a6e7501 100644 | |||
4826 | --- a/src/tests/unit/test_check_megacli.py | |||
4827 | +++ b/src/tests/unit/test_check_megacli.py | |||
4828 | @@ -6,42 +6,40 @@ import unittest.mock as mock | |||
4829 | 6 | 6 | ||
4830 | 7 | import nagios_plugin3 | 7 | import nagios_plugin3 |
4831 | 8 | 8 | ||
4833 | 9 | sys.path.append(os.path.join(os.path.dirname(__file__), 'lib')) | 9 | sys.path.append(os.path.join(os.path.dirname(__file__), "lib")) |
4834 | 10 | from samples import get_sample # noqa: E402 | 10 | from samples import get_sample # noqa: E402 |
4835 | 11 | 11 | ||
4837 | 12 | sys.path.append(os.path.join(os.path.dirname(__file__), '../../files/megacli')) | 12 | sys.path.append(os.path.join(os.path.dirname(__file__), "../../files/megacli")) |
4838 | 13 | import check_megacli # noqa: E402 | 13 | import check_megacli # noqa: E402 |
4839 | 14 | 14 | ||
4840 | 15 | 15 | ||
4841 | 16 | class TestCheckMegaCLI(unittest.TestCase): | 16 | class TestCheckMegaCLI(unittest.TestCase): |
4842 | 17 | def setUp(self): | 17 | def setUp(self): |
4848 | 18 | self.samples_dir = os.path.join( | 18 | self.samples_dir = os.path.join(os.getcwd(), "tests", "hw-health-samples") |
4844 | 19 | os.getcwd(), | ||
4845 | 20 | 'tests', | ||
4846 | 21 | 'hw-health-samples' | ||
4847 | 22 | ) | ||
4849 | 23 | 19 | ||
4851 | 24 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 20 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4852 | 25 | def test_parse_output(self, mock_print): | 21 | def test_parse_output(self, mock_print): |
4854 | 26 | check_megacli.ARGS.input_file = get_sample('megacli.output.1') | 22 | check_megacli.ARGS.input_file = get_sample("megacli.output.1") |
4855 | 27 | check_megacli.parse_output() | 23 | check_megacli.parse_output() |
4856 | 28 | actual = mock_print.getvalue() | 24 | actual = mock_print.getvalue() |
4858 | 29 | expected = 'OK: Optimal, ldrives[1], pdrives[4]\n' | 25 | expected = "OK: Optimal, ldrives[1], pdrives[4]\n" |
4859 | 30 | self.assertEqual(actual, expected) | 26 | self.assertEqual(actual, expected) |
4860 | 31 | 27 | ||
4862 | 32 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 28 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4863 | 33 | def test_parse_output_critical_singledrive(self, mock_print): | 29 | def test_parse_output_critical_singledrive(self, mock_print): |
4866 | 34 | check_megacli.ARGS.input_file = get_sample('megacli.output.nrpe.critical.1') | 30 | check_megacli.ARGS.input_file = get_sample("megacli.output.nrpe.critical.1") |
4867 | 35 | expected = 'CRITICAL: adapter(0):ld(0):state(Degraded)' | 31 | expected = "CRITICAL: adapter(0):ld(0):state(Degraded)" |
4868 | 36 | with self.assertRaises(nagios_plugin3.CriticalError) as context: | 32 | with self.assertRaises(nagios_plugin3.CriticalError) as context: |
4869 | 37 | check_megacli.parse_output() | 33 | check_megacli.parse_output() |
4870 | 38 | self.assertEqual(expected, str(context.exception)) | 34 | self.assertEqual(expected, str(context.exception)) |
4871 | 39 | 35 | ||
4873 | 40 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 36 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4874 | 41 | def test_parse_output_critical_multiple(self, mock_print): | 37 | def test_parse_output_critical_multiple(self, mock_print): |
4878 | 42 | check_megacli.ARGS.input_file = get_sample('megacli.output.nrpe.critical.2') | 38 | check_megacli.ARGS.input_file = get_sample("megacli.output.nrpe.critical.2") |
4879 | 43 | expected = ('CRITICAL: adapter(0):ld(0):state(Degraded);' | 39 | expected = ( |
4880 | 44 | ' adapter(0):ld(4):state(Degraded)') | 40 | "CRITICAL: adapter(0):ld(0):state(Degraded);" |
4881 | 41 | " adapter(0):ld(4):state(Degraded)" | ||
4882 | 42 | ) | ||
4883 | 45 | with self.assertRaises(nagios_plugin3.CriticalError) as context: | 43 | with self.assertRaises(nagios_plugin3.CriticalError) as context: |
4884 | 46 | check_megacli.parse_output() | 44 | check_megacli.parse_output() |
4885 | 47 | self.assertEqual(expected, str(context.exception)) | 45 | self.assertEqual(expected, str(context.exception)) |
4886 | diff --git a/src/tests/unit/test_check_nvme.py b/src/tests/unit/test_check_nvme.py | |||
4887 | index 097fd76..4218a85 100644 | |||
4888 | --- a/src/tests/unit/test_check_nvme.py | |||
4889 | +++ b/src/tests/unit/test_check_nvme.py | |||
4890 | @@ -4,21 +4,21 @@ import sys | |||
4891 | 4 | import unittest | 4 | import unittest |
4892 | 5 | import unittest.mock as mock | 5 | import unittest.mock as mock |
4893 | 6 | 6 | ||
4895 | 7 | sys.path.append(os.path.join(os.path.dirname(__file__), 'lib')) | 7 | sys.path.append(os.path.join(os.path.dirname(__file__), "lib")) |
4896 | 8 | from samples import get_sample # noqa: E402 | 8 | from samples import get_sample # noqa: E402 |
4897 | 9 | 9 | ||
4899 | 10 | sys.path.append(os.path.join(os.path.dirname(__file__), '../../files/nvme')) | 10 | sys.path.append(os.path.join(os.path.dirname(__file__), "../../files/nvme")) |
4900 | 11 | import check_nvme # noqa: E402 | 11 | import check_nvme # noqa: E402 |
4901 | 12 | 12 | ||
4902 | 13 | 13 | ||
4903 | 14 | class TestCheckNvme(unittest.TestCase): | 14 | class TestCheckNvme(unittest.TestCase): |
4907 | 15 | @mock.patch('check_nvme.glob.glob') | 15 | @mock.patch("check_nvme.glob.glob") |
4908 | 16 | @mock.patch('check_nvme.subprocess.check_output') | 16 | @mock.patch("check_nvme.subprocess.check_output") |
4909 | 17 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 17 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4910 | 18 | def test_parse_output(self, mock_print, mock_subprocess, mock_glob): | 18 | def test_parse_output(self, mock_print, mock_subprocess, mock_glob): |
4914 | 19 | mock_glob.return_value = ['/dev/nvme0'] | 19 | mock_glob.return_value = ["/dev/nvme0"] |
4915 | 20 | input_file = get_sample('nvme.output.1') | 20 | input_file = get_sample("nvme.output.1") |
4916 | 21 | with open(input_file, 'r') as fd: | 21 | with open(input_file, "r") as fd: |
4917 | 22 | mock_subprocess.return_value = fd.read().encode() | 22 | mock_subprocess.return_value = fd.read().encode() |
4918 | 23 | check_nvme.parse_output() | 23 | check_nvme.parse_output() |
4919 | 24 | expected = ( | 24 | expected = ( |
4920 | diff --git a/src/tests/unit/test_check_sas2ircu.py b/src/tests/unit/test_check_sas2ircu.py | |||
4921 | index cd5e854..e2053d7 100644 | |||
4922 | --- a/src/tests/unit/test_check_sas2ircu.py | |||
4923 | +++ b/src/tests/unit/test_check_sas2ircu.py | |||
4924 | @@ -4,18 +4,18 @@ import sys | |||
4925 | 4 | import unittest | 4 | import unittest |
4926 | 5 | import unittest.mock as mock | 5 | import unittest.mock as mock |
4927 | 6 | 6 | ||
4929 | 7 | sys.path.append(os.path.join(os.path.dirname(__file__), 'lib')) | 7 | sys.path.append(os.path.join(os.path.dirname(__file__), "lib")) |
4930 | 8 | from samples import get_sample # noqa: E402 | 8 | from samples import get_sample # noqa: E402 |
4931 | 9 | 9 | ||
4933 | 10 | sys.path.append(os.path.join(os.path.dirname(__file__), '../../files/sas2ircu')) | 10 | sys.path.append(os.path.join(os.path.dirname(__file__), "../../files/sas2ircu")) |
4934 | 11 | import check_sas2ircu # noqa: E402 | 11 | import check_sas2ircu # noqa: E402 |
4935 | 12 | 12 | ||
4936 | 13 | 13 | ||
4937 | 14 | class TestCheckMegaCLI(unittest.TestCase): | 14 | class TestCheckMegaCLI(unittest.TestCase): |
4939 | 15 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 15 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4940 | 16 | def test_parse_output(self, mock_print): | 16 | def test_parse_output(self, mock_print): |
4942 | 17 | check_sas2ircu.ARGS.input_file = get_sample('sas2ircu.huawei.output.1') | 17 | check_sas2ircu.ARGS.input_file = get_sample("sas2ircu.huawei.output.1") |
4943 | 18 | check_sas2ircu.parse_output() | 18 | check_sas2ircu.parse_output() |
4944 | 19 | actual = mock_print.getvalue() | 19 | actual = mock_print.getvalue() |
4946 | 20 | expected = 'OK: Ready[1:0,1:1,1:2,1:3,1:4,1:5,1:6,1:7]\n' | 20 | expected = "OK: Ready[1:0,1:1,1:2,1:3,1:4,1:5,1:6,1:7]\n" |
4947 | 21 | self.assertEqual(actual, expected) | 21 | self.assertEqual(actual, expected) |
4948 | diff --git a/src/tests/unit/test_check_sas3ircu.py b/src/tests/unit/test_check_sas3ircu.py | |||
4949 | index 1379369..bb79688 100644 | |||
4950 | --- a/src/tests/unit/test_check_sas3ircu.py | |||
4951 | +++ b/src/tests/unit/test_check_sas3ircu.py | |||
4952 | @@ -4,19 +4,19 @@ import sys | |||
4953 | 4 | import unittest | 4 | import unittest |
4954 | 5 | import unittest.mock as mock | 5 | import unittest.mock as mock |
4955 | 6 | 6 | ||
4957 | 7 | sys.path.append(os.path.join(os.path.dirname(__file__), 'lib')) | 7 | sys.path.append(os.path.join(os.path.dirname(__file__), "lib")) |
4958 | 8 | from samples import get_sample # noqa: E402 | 8 | from samples import get_sample # noqa: E402 |
4959 | 9 | 9 | ||
4961 | 10 | sys.path.append(os.path.join(os.path.dirname(__file__), '../../files/sas3ircu')) | 10 | sys.path.append(os.path.join(os.path.dirname(__file__), "../../files/sas3ircu")) |
4962 | 11 | import check_sas3ircu # noqa: E402 | 11 | import check_sas3ircu # noqa: E402 |
4963 | 12 | 12 | ||
4964 | 13 | 13 | ||
4965 | 14 | class TestCheckMegaCLI(unittest.TestCase): | 14 | class TestCheckMegaCLI(unittest.TestCase): |
4967 | 15 | @mock.patch('sys.stdout', new_callable=io.StringIO) | 15 | @mock.patch("sys.stdout", new_callable=io.StringIO) |
4968 | 16 | def test_parse_output_ok(self, mock_print): | 16 | def test_parse_output_ok(self, mock_print): |
4970 | 17 | _filepath = get_sample('sas3ircu.supermicro.ok.output.1') | 17 | _filepath = get_sample("sas3ircu.supermicro.ok.output.1") |
4971 | 18 | data = check_sas3ircu.parse_output(_filepath) | 18 | data = check_sas3ircu.parse_output(_filepath) |
4972 | 19 | check_sas3ircu.eval_status(data) | 19 | check_sas3ircu.eval_status(data) |
4973 | 20 | actual = mock_print.getvalue() | 20 | actual = mock_print.getvalue() |
4975 | 21 | expected = 'OK: no errors\n' | 21 | expected = "OK: no errors\n" |
4976 | 22 | self.assertEqual(actual, expected) | 22 | self.assertEqual(actual, expected) |
4977 | diff --git a/src/tests/unit/test_cron_hplog.py b/src/tests/unit/test_cron_hplog.py | |||
4978 | index 87c5034..b17904e 100644 | |||
4979 | --- a/src/tests/unit/test_cron_hplog.py | |||
4980 | +++ b/src/tests/unit/test_cron_hplog.py | |||
4981 | @@ -4,27 +4,30 @@ import unittest | |||
4982 | 4 | from argparse import Namespace | 4 | from argparse import Namespace |
4983 | 5 | from pathlib import Path | 5 | from pathlib import Path |
4984 | 6 | 6 | ||
4986 | 7 | sys.path.append('files/hplog') | 7 | sys.path.append("files/hplog") |
4987 | 8 | import cron_hplog # noqa: E402 | 8 | import cron_hplog # noqa: E402 |
4988 | 9 | 9 | ||
4989 | 10 | 10 | ||
4990 | 11 | class TestCronHPlog(unittest.TestCase): | 11 | class TestCronHPlog(unittest.TestCase): |
4991 | 12 | def setUp(self): | 12 | def setUp(self): |
4992 | 13 | # Skip the v flag, it serves a different purpose | 13 | # Skip the v flag, it serves a different purpose |
4994 | 14 | self.test_flags = {'t', 'f', 'p'} | 14 | self.test_flags = {"t", "f", "p"} |
4995 | 15 | 15 | ||
4996 | 16 | def _get_no_error_sample(self, flag): | 16 | def _get_no_error_sample(self, flag): |
4999 | 17 | _filepath = os.path.join(os.getcwd(), 'tests', 'hw-health-samples', | 17 | _filepath = os.path.join( |
5000 | 18 | 'hplog.{}.ewah.out'.format(flag)) | 18 | os.getcwd(), "tests", "hw-health-samples", "hplog.{}.ewah.out".format(flag) |
The diff has been truncated for viewing.
LGTM