Merge lp:~sseman/juju-chaos-monkey/logging into lp:juju-chaos-monkey
- logging
- Merge into trunk
Proposed by
Seman
Status: | Merged |
---|---|
Merged at revision: | 3 |
Proposed branch: | lp:~sseman/juju-chaos-monkey/logging |
Merge into: | lp:juju-chaos-monkey |
Diff against target: |
289 lines (+115/-23) 5 files modified
chaos/kill.py (+6/-5) chaos/net.py (+14/-13) runner.py (+11/-1) tests/test_utility.py (+67/-1) utility.py (+17/-3) |
To merge this branch: | bzr merge lp:~sseman/juju-chaos-monkey/logging |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
John George (community) | Approve | ||
Review via email: mp+257917@code.launchpad.net |
Commit message
Description of the change
Added logging and timestamp.
To post a comment you must log in.
Revision history for this message
John George (jog) : | # |
Revision history for this message
Seman (sseman) wrote : | # |
Thank you John for the review. I removed log() and log_error(). Also, see inline comments.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'chaos/kill.py' |
2 | --- chaos/kill.py 2015-04-29 19:57:39 +0000 |
3 | +++ chaos/kill.py 2015-04-30 22:20:24 +0000 |
4 | @@ -1,9 +1,10 @@ |
5 | +import logging |
6 | + |
7 | from chaos_monkey_base import ( |
8 | Chaos, |
9 | ChaosMonkeyBase, |
10 | ) |
11 | from utility import ( |
12 | - log, |
13 | NotFound, |
14 | run_shell_command, |
15 | ) |
16 | @@ -29,20 +30,20 @@ |
17 | return pids.split(' ') |
18 | |
19 | def kill_jujud(self, quiet_mode=True): |
20 | - log("Kill.kill_jujud") |
21 | + logging.info("Kill.kill_jujud") |
22 | pids = self.get_pids('jujud') |
23 | if not pids: |
24 | - log("Jujud process ID not found") |
25 | + logging.error("Jujud process ID not found") |
26 | if not quiet_mode: |
27 | raise NotFound('Process id not found') |
28 | return |
29 | run_shell_command('kill -s SIGKILL ' + pids[0]) |
30 | |
31 | def kill_mongodb(self, quiet_mode=True): |
32 | - log("Kill.kill_mongod") |
33 | + logging.info("Kill.kill_mongod") |
34 | pids = self.get_pids('mongod') |
35 | if not pids: |
36 | - log("MongoDB process ID not found") |
37 | + logging.error("MongoDB process ID not found") |
38 | if not quiet_mode: |
39 | raise NotFound('Process id not found') |
40 | return |
41 | |
42 | === modified file 'chaos/net.py' |
43 | --- chaos/net.py 2015-04-29 19:57:39 +0000 |
44 | +++ chaos/net.py 2015-04-30 22:20:24 +0000 |
45 | @@ -1,9 +1,10 @@ |
46 | +import logging |
47 | + |
48 | from chaos_monkey_base import ( |
49 | Chaos, |
50 | ChaosMonkeyBase, |
51 | ) |
52 | from utility import ( |
53 | - log, |
54 | run_shell_command, |
55 | ) |
56 | |
57 | @@ -22,62 +23,62 @@ |
58 | return cls() |
59 | |
60 | def reset(self): |
61 | - log("Net.reset ") |
62 | + logging.info("Net.reset ") |
63 | cmd = 'ufw reset' |
64 | run_shell_command(cmd) |
65 | |
66 | def default_deny(self): |
67 | - log("Net.default_deny") |
68 | + logging.info("Net.default_deny") |
69 | cmd = "ufw default deny" |
70 | run_shell_command(cmd) |
71 | |
72 | def default_allow(self): |
73 | - log("Net.default_allow") |
74 | + logging.info("Net.default_allow") |
75 | cmd = "ufw default allow" |
76 | run_shell_command(cmd) |
77 | |
78 | def allow_ssh(self): |
79 | - log("Net.allow_ssh") |
80 | + logging.info("Net.allow_ssh") |
81 | cmd = 'ufw allow ssh' |
82 | run_shell_command(cmd) |
83 | |
84 | def deny_ssh(self): |
85 | - log("Net.deny_ssh") |
86 | + logging.info("Net.deny_ssh") |
87 | cmd = 'ufw deny ssh' |
88 | run_shell_command(cmd) |
89 | |
90 | def deny_all_incoming_and_outgoing_except_ssh(self): |
91 | - log("Net.deny_all_incoming_and_outgoing_except_ssh") |
92 | + logging.info("Net.deny_all_incoming_and_outgoing_except_ssh") |
93 | self.deny_all_incoming_except_ssh() |
94 | self.deny_all_outgoing_except_ssh() |
95 | |
96 | def allow_all_incoming_and_outgoing(self): |
97 | - log("Net.allow_all_incoming_and_outgoing") |
98 | + logging.info("Net.allow_all_incoming_and_outgoing") |
99 | self.allow_all_incoming() |
100 | self.allow_all_outgoing() |
101 | |
102 | def deny_all_incoming_except_ssh(self): |
103 | - log("Net.deny_all_incoming_except_ssh") |
104 | + logging.info("Net.deny_all_incoming_except_ssh") |
105 | self.allow_ssh() |
106 | self.default_deny() |
107 | |
108 | def allow_all_incoming(self): |
109 | - log("Net.allow_all_incoming") |
110 | + logging.info("Net.allow_all_incoming") |
111 | self.default_allow() |
112 | |
113 | def deny_all_outgoing_except_ssh(self): |
114 | - log("Net.deny_all_outgoing_except_ssh") |
115 | + logging.info("Net.deny_all_outgoing_except_ssh") |
116 | self.allow_ssh() |
117 | cmd = 'ufw deny out to any' |
118 | run_shell_command(cmd) |
119 | |
120 | def allow_all_outgoing(self): |
121 | - log("Net.allow_all_outgoing") |
122 | + logging.info("Net.allow_all_outgoing") |
123 | cmd = 'ufw delete deny out to any' |
124 | run_shell_command(cmd) |
125 | |
126 | def deny_port(self, port=8080): |
127 | - log("Net.deny_port port=%s" % port) |
128 | + logging.info("Net.deny_port port=%s" % port) |
129 | cmd = 'ufw deny ' + str(port) |
130 | run_shell_command(cmd) |
131 | |
132 | |
133 | === modified file 'runner.py' |
134 | --- runner.py 2015-04-29 19:57:39 +0000 |
135 | +++ runner.py 2015-04-30 22:20:24 +0000 |
136 | @@ -1,8 +1,12 @@ |
137 | from argparse import ArgumentParser |
138 | +import logging |
139 | from time import time |
140 | |
141 | from chaos_monkey import ChaosMonkey |
142 | -from utility import BadRequest |
143 | +from utility import ( |
144 | + BadRequest, |
145 | + setup_logging, |
146 | +) |
147 | |
148 | |
149 | def random(run_timeout, enablement_timeout): |
150 | @@ -35,6 +39,12 @@ |
151 | parser.add_argument( |
152 | '-tt', '--total-timeout', default=60, type=int, |
153 | help="Total timeout in seconds") |
154 | + parser.add_argument('-lp', '--log-path', help='Where to write logs.', |
155 | + default='log/results.log') |
156 | + parser.add_argument('-lc', '--log-count', default=2, type=int, |
157 | + help='The number of backups to keep.') |
158 | args = parser.parse_args() |
159 | + setup_logging(log_path=args.log_path, log_count=args.log_count) |
160 | + logging.info('Chaos monkey started') |
161 | random(run_timeout=args.total_timeout, |
162 | enablement_timeout=args.enablement_timeout) |
163 | |
164 | === modified file 'tests/test_utility.py' |
165 | --- tests/test_utility.py 2015-04-29 19:57:39 +0000 |
166 | +++ tests/test_utility.py 2015-04-30 22:20:24 +0000 |
167 | @@ -1,9 +1,15 @@ |
168 | +import logging |
169 | +from logging.handlers import RotatingFileHandler |
170 | from subprocess import CalledProcessError |
171 | +from tempfile import NamedTemporaryFile |
172 | from unittest import TestCase |
173 | |
174 | from mock import patch |
175 | |
176 | -from utility import run_shell_command |
177 | +from utility import ( |
178 | + run_shell_command, |
179 | + setup_logging, |
180 | +) |
181 | |
182 | |
183 | class TestUtility(TestCase): |
184 | @@ -20,3 +26,63 @@ |
185 | def test_run_shell_command_output(self): |
186 | output = run_shell_command('echo "hello"') |
187 | self.assertEqual(output, '"hello"\n') |
188 | + |
189 | + def test_setup_logging(self): |
190 | + with NamedTemporaryFile() as temp_file: |
191 | + setup_logging(temp_file.name, log_count=1) |
192 | + logger = logging.getLogger() |
193 | + self.assertEqual(logger.level, logging.DEBUG) |
194 | + self.assertEqual(logger.name, 'root') |
195 | + handlers = logger.handlers |
196 | + self.assertIn( |
197 | + type(handlers[0]), [RotatingFileHandler, logging.StreamHandler]) |
198 | + self.assertIn( |
199 | + type(handlers[1]), [RotatingFileHandler, logging.StreamHandler]) |
200 | + |
201 | + def test_setup_logging_formatter(self): |
202 | + log_count = 1 |
203 | + with NamedTemporaryFile() as temp_file: |
204 | + with patch('logging.Formatter') as l_mock: |
205 | + setup_logging(temp_file.name, log_count=log_count) |
206 | + logger = logging.getLogger() |
207 | + self.assertEqual(logger.name, 'root') |
208 | + l_mock.assert_called_once_with( |
209 | + '%(asctime)s %(levelname)s %(message)s', '%Y-%m-%d %H:%M:%S') |
210 | + |
211 | + def test_setup_logging_rotating_file_handler(self): |
212 | + log_count = 1 |
213 | + with NamedTemporaryFile() as temp_file: |
214 | + with patch('utility.RotatingFileHandler') as mock: |
215 | + setup_logging(temp_file.name, log_count=log_count) |
216 | + mock.assert_called_once_with( |
217 | + temp_file.name, maxBytes=1024 * 1024 * 512, backupCount=log_count) |
218 | + |
219 | + def test_log(self): |
220 | + with NamedTemporaryFile() as temp_file: |
221 | + setup_logging(temp_file.name, log_count=1) |
222 | + logging.info('testing123') |
223 | + with open(temp_file.name, 'r') as file_reader: |
224 | + content = file_reader.read() |
225 | + # log format: 2015-04-29 14:03:02 INFO testing123 |
226 | + match = content.split(' ', 2)[2] |
227 | + self.assertEqual(match, 'INFO testing123\n') |
228 | + |
229 | + def test_log_debug(self): |
230 | + with NamedTemporaryFile() as temp_file: |
231 | + setup_logging(temp_file.name, log_count=1) |
232 | + logging.debug("testing123") |
233 | + with open(temp_file.name, 'r') as file_reader: |
234 | + content = file_reader.read() |
235 | + # log format: 2015-04-29 14:03:02 INFO testing123 |
236 | + match = content.split(' ', 2)[2] |
237 | + self.assertEqual(match, 'DEBUG testing123\n') |
238 | + |
239 | + def test_log_error(self): |
240 | + with NamedTemporaryFile() as temp_file: |
241 | + setup_logging(temp_file.name, log_count=1) |
242 | + logging.error("testing123") |
243 | + with open(temp_file.name, 'r') as file_reader: |
244 | + content = file_reader.read() |
245 | + # log format: 2015-04-29 14:03:02 INFO testing123 |
246 | + match = content.split(' ', 2)[2] |
247 | + self.assertEqual(match, 'ERROR testing123\n') |
248 | |
249 | === modified file 'utility.py' |
250 | --- utility.py 2015-04-29 19:57:39 +0000 |
251 | +++ utility.py 2015-04-30 22:20:24 +0000 |
252 | @@ -1,5 +1,8 @@ |
253 | from __future__ import print_function |
254 | |
255 | +import logging |
256 | +from logging.handlers import RotatingFileHandler |
257 | + |
258 | from subprocess import ( |
259 | CalledProcessError, |
260 | check_output, |
261 | @@ -12,14 +15,25 @@ |
262 | try: |
263 | output = check_output(shell_cmd) |
264 | except CalledProcessError: |
265 | - log("Command generated error: %s " % cmd) |
266 | + logging.error("Command generated error: %s " % cmd) |
267 | if not quiet_mode: |
268 | raise |
269 | return output |
270 | |
271 | |
272 | -def log(log_str): |
273 | - print(log_str) |
274 | +def setup_logging(log_path, log_count): |
275 | + """Install log handlers to output to file and stream.""" |
276 | + formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', |
277 | + '%Y-%m-%d %H:%M:%S') |
278 | + root_logger = logging.getLogger() |
279 | + rf_handler = RotatingFileHandler( |
280 | + log_path, maxBytes=1024 * 1024 * 512, backupCount=log_count) |
281 | + rf_handler.setFormatter(formatter) |
282 | + root_logger.addHandler(rf_handler) |
283 | + s_handler = logging.StreamHandler() |
284 | + s_handler.setFormatter(formatter) |
285 | + root_logger.addHandler(s_handler) |
286 | + root_logger.setLevel(logging.DEBUG) |
287 | |
288 | |
289 | class NotFound(Exception): |
You would not need to provide and maintain the helper log() and log_error() functions if only the root logger was used. Then to write to the log each file would just need to import the logger module and call logger.info(), logger.warning, logger.error(), logger,critical(), logger.debug or logger.exception().