Merge lp:~abentley/ci-director/remove-ci-director into lp:ci-director
- remove-ci-director
- Merge into trunk
Proposed by
Aaron Bentley
Status: | Merged |
---|---|
Merged at revision: | 191 |
Proposed branch: | lp:~abentley/ci-director/remove-ci-director |
Merge into: | lp:ci-director |
Diff against target: |
430 lines (+0/-347) 2 files modified
cidirector/cidirector.py (+0/-91) cidirector/tests/test_cidirector.py (+0/-256) |
To merge this branch: | bzr merge lp:~abentley/ci-director/remove-ci-director |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Curtis Hovey (community) | code | Approve | |
Review via email: mp+316256@code.launchpad.net |
Commit message
Start removing obsolete ci-director functionality.
Description of the change
This branch begins removing obsolete functionality from CI Director.
Now that cidirector.py is no longer being run, only functionality imported by start_builds.py or update_outcomes.py is still needed.
This branch removes the execute bit, the shebang, and the symlink, since ci-director will no longer be executible.
It removes main(), build_revision(), schedule_builds(), and their associated tests.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === removed symlink 'ci-director' | |||
2 | === target was u'cidirector/cidirector.py' | |||
3 | === modified file 'cidirector/cidirector.py' (properties changed: +x to -x) | |||
4 | --- cidirector/cidirector.py 2017-01-23 15:37:35 +0000 | |||
5 | +++ cidirector/cidirector.py 2017-02-02 17:57:29 +0000 | |||
6 | @@ -1,4 +1,3 @@ | |||
7 | 1 | #!/usr/bin/env python | ||
8 | 2 | from argparse import ArgumentParser | 1 | from argparse import ArgumentParser |
9 | 3 | from collections import namedtuple | 2 | from collections import namedtuple |
10 | 4 | from contextlib import contextmanager | 3 | from contextlib import contextmanager |
11 | @@ -15,8 +14,6 @@ | |||
12 | 15 | import re | 14 | import re |
13 | 16 | import shlex | 15 | import shlex |
14 | 17 | from smtplib import SMTP | 16 | from smtplib import SMTP |
15 | 18 | from socket import setdefaulttimeout | ||
16 | 19 | import sys | ||
17 | 20 | from time import sleep | 17 | from time import sleep |
18 | 21 | import urllib2 | 18 | import urllib2 |
19 | 22 | 19 | ||
20 | @@ -47,11 +44,9 @@ | |||
21 | 47 | PENDING, | 44 | PENDING, |
22 | 48 | SUCCEEDED, | 45 | SUCCEEDED, |
23 | 49 | StateFile, | 46 | StateFile, |
24 | 50 | StateFileInUse, | ||
25 | 51 | TERMINAL_STATUS, | 47 | TERMINAL_STATUS, |
26 | 52 | ) | 48 | ) |
27 | 53 | from utility import ( | 49 | from utility import ( |
28 | 54 | log_exceptions, | ||
29 | 55 | S3Storage, | 50 | S3Storage, |
30 | 56 | ) | 51 | ) |
31 | 57 | 52 | ||
32 | @@ -865,61 +860,6 @@ | |||
33 | 865 | active.extend(item['task']['name'] for item in queued) | 860 | active.extend(item['task']['name'] for item in queued) |
34 | 866 | return active | 861 | return active |
35 | 867 | 862 | ||
36 | 868 | def schedule_builds(self, server_info): | ||
37 | 869 | """Request builds for appropriate jobs. | ||
38 | 870 | |||
39 | 871 | Can schedule BuildRevisionJob, ResourcefulJob, RecordResultJob. | ||
40 | 872 | :param server_info: ServerInfo for the current Jenkins. | ||
41 | 873 | """ | ||
42 | 874 | self.adopt_builds(server_info) | ||
43 | 875 | active_jobs = set(self.get_active_jobs(server_info)) | ||
44 | 876 | self.logger.info('Active jobs: %s', ', '.join(sorted(active_jobs))) | ||
45 | 877 | candidate_jobs = [] | ||
46 | 878 | self.update_builds() | ||
47 | 879 | judge = ResultJudge(self.logger, self.state_file, self.jenkins) | ||
48 | 880 | all_results_ready = judge.finalize(self.mailer) | ||
49 | 881 | job = RecordResultJob( | ||
50 | 882 | self.logger, self.state_file, self.jenkins, self.mailer) | ||
51 | 883 | judged = job.maybe_build() | ||
52 | 884 | if all_results_ready or judged: | ||
53 | 885 | self.state_file.finish_revision(datetime.utcnow()) | ||
54 | 886 | # Disabled in favour of update-outcomes. | ||
55 | 887 | # self.state_file.build_info_to_s3() | ||
56 | 888 | resourcefuljobs = ResourcefulJob.get_available_jobs( | ||
57 | 889 | server_info, self.jenkins, self.state_file, self.logger) | ||
58 | 890 | for r_job in resourcefuljobs: | ||
59 | 891 | candidate_jobs.append(r_job) | ||
60 | 892 | candidate_jobs.append(BuildRevisionJob( | ||
61 | 893 | self.repo_path, self.branches, self.ignore_branches, self.logger, | ||
62 | 894 | self.state_file, self.jenkins)) | ||
63 | 895 | for job in candidate_jobs: | ||
64 | 896 | if job.maybe_build(active_jobs): | ||
65 | 897 | # We create a new set so that tests that access the parameters | ||
66 | 898 | # of maybe_build don't get mutated results. | ||
67 | 899 | active_jobs = active_jobs.union({job.job_id}) | ||
68 | 900 | if should_update_health(self.jenkins, active_jobs): | ||
69 | 901 | jobs = list_cloud_health(server_info, self.jenkins, self.logger) | ||
70 | 902 | self.update_cloud_health(jobs) | ||
71 | 903 | |||
72 | 904 | def adopt_builds(self, server_info): | ||
73 | 905 | """Adopt any builds for the build-revision not already recorded.""" | ||
74 | 906 | # We happen to know that branch will not be used here. | ||
75 | 907 | job_instances = [BuildRevisionJob(self.repo_path, None, None, | ||
76 | 908 | self.logger, self.state_file, self.jenkins)] | ||
77 | 909 | job_instances.extend( | ||
78 | 910 | Job.make_subclass(si_job['name'], self.logger, self.state_file, | ||
79 | 911 | self.jenkins) | ||
80 | 912 | for si_job in server_info.info['jobs']) | ||
81 | 913 | for job in job_instances: | ||
82 | 914 | if job is None: | ||
83 | 915 | continue | ||
84 | 916 | job_info = JobInfo.from_jenkins(self.jenkins, job.job_id) | ||
85 | 917 | if job_info.current_build_number is None: | ||
86 | 918 | continue | ||
87 | 919 | build_info = self.jenkins.get_build_info( | ||
88 | 920 | job.job_id, job_info.current_build_number) | ||
89 | 921 | job.adopt_build(build_info) | ||
90 | 922 | |||
91 | 923 | def update_builds(self): | 863 | def update_builds(self): |
92 | 924 | """Record the status of jobs that are not currently building.""" | 864 | """Record the status of jobs that are not currently building.""" |
93 | 925 | self.logger.info('Updating job outcomes.') | 865 | self.logger.info('Updating job outcomes.') |
94 | @@ -958,19 +898,6 @@ | |||
95 | 958 | version, job, status, info['timestamp'], info['duration']) | 898 | version, job, status, info['timestamp'], info['duration']) |
96 | 959 | 899 | ||
97 | 960 | 900 | ||
98 | 961 | def build_revision(branches, ignore_branches): | ||
99 | 962 | """Top-level logic for building jobs. | ||
100 | 963 | |||
101 | 964 | For a supplied list of branches, it | ||
102 | 965 | - Uses the config to access Jenkins, and loads the current state. | ||
103 | 966 | - Runs jobs using CIDirector. | ||
104 | 967 | - Saves the updated state. | ||
105 | 968 | """ | ||
106 | 969 | with CIDirector.stateful(branches, ignore_branches) as director: | ||
107 | 970 | server_info = ServerInfo(director.jenkins.get_info()) | ||
108 | 971 | director.schedule_builds(server_info) | ||
109 | 972 | |||
110 | 973 | |||
111 | 974 | def list_cloud_health(si, jenkins, logger): | 901 | def list_cloud_health(si, jenkins, logger): |
112 | 975 | """Iterate through CloudHealthJobs for every relevent job on Jenkins.""" | 902 | """Iterate through CloudHealthJobs for every relevent job on Jenkins.""" |
113 | 976 | for job in si.info['jobs']: | 903 | for job in si.info['jobs']: |
114 | @@ -1028,21 +955,3 @@ | |||
115 | 1028 | root_logger.addHandler(s_handler) | 955 | root_logger.addHandler(s_handler) |
116 | 1029 | if verbose: | 956 | if verbose: |
117 | 1030 | root_logger.setLevel(logging.INFO) | 957 | root_logger.setLevel(logging.INFO) |
118 | 1031 | |||
119 | 1032 | |||
120 | 1033 | def main(argv=None): | ||
121 | 1034 | setdefaulttimeout(30) | ||
122 | 1035 | args = get_arg_parser().parse_args(argv) | ||
123 | 1036 | setup_logging(args.log_path, args.log_count, args.verbose) | ||
124 | 1037 | logger = logging.getLogger('cidirector') | ||
125 | 1038 | with log_exceptions(logger) as result: | ||
126 | 1039 | try: | ||
127 | 1040 | build_revision(args.branch, args.ignore) | ||
128 | 1041 | except StateFileInUse: | ||
129 | 1042 | logger.warning('State file already in use.') | ||
130 | 1043 | return 1 | ||
131 | 1044 | return result.exit_status | ||
132 | 1045 | |||
133 | 1046 | |||
134 | 1047 | if __name__ == '__main__': | ||
135 | 1048 | sys.exit(main()) | ||
136 | 1049 | 958 | ||
137 | === modified file 'cidirector/tests/test_cidirector.py' | |||
138 | --- cidirector/tests/test_cidirector.py 2017-01-23 15:37:35 +0000 | |||
139 | +++ cidirector/tests/test_cidirector.py 2017-02-02 17:57:29 +0000 | |||
140 | @@ -6,7 +6,6 @@ | |||
141 | 6 | ) | 6 | ) |
142 | 7 | import logging | 7 | import logging |
143 | 8 | import os | 8 | import os |
144 | 9 | import sys | ||
145 | 10 | from StringIO import StringIO | 9 | from StringIO import StringIO |
146 | 11 | from textwrap import dedent | 10 | from textwrap import dedent |
147 | 12 | from unittest import TestCase | 11 | from unittest import TestCase |
148 | @@ -30,7 +29,6 @@ | |||
149 | 30 | get_buildvars_file, | 29 | get_buildvars_file, |
150 | 31 | Job, | 30 | Job, |
151 | 32 | JobInfo, | 31 | JobInfo, |
152 | 33 | main, | ||
153 | 34 | Mailer, | 32 | Mailer, |
154 | 35 | NoSuchBranch, | 33 | NoSuchBranch, |
155 | 36 | list_cloud_health, | 34 | list_cloud_health, |
156 | @@ -306,154 +304,6 @@ | |||
157 | 306 | self.assertEqual(['foo', 'bar', 'baz', 'qux'], | 304 | self.assertEqual(['foo', 'bar', 'baz', 'qux'], |
158 | 307 | director.get_active_jobs(si)) | 305 | director.get_active_jobs(si)) |
159 | 308 | 306 | ||
160 | 309 | def make_schedule_builds_director(self, branches, active=True, | ||
161 | 310 | published=True, server_info=None): | ||
162 | 311 | state_file = StateFile() | ||
163 | 312 | state_file.start_revision('a', 1, 'b', '1.6', 2) | ||
164 | 313 | if published: | ||
165 | 314 | state_file.publication_job().update_from_build_result('SUCCESS') | ||
166 | 315 | version = 2 if active else 3 | ||
167 | 316 | jenkins_dict = { | ||
168 | 317 | PUBLISH_REVISION: { | ||
169 | 318 | 'build_number': 5, | ||
170 | 319 | 'description': '[ci-director]\n requires: build-revision'}, | ||
171 | 320 | REVISION_RESULTS: {'build_number': 1}, | ||
172 | 321 | 'aws-deploy': { | ||
173 | 322 | 'build_number': 5, | ||
174 | 323 | 'description': '[ci-director]\n requires: build-revision'}, | ||
175 | 324 | } | ||
176 | 325 | if server_info is not None: | ||
177 | 326 | for entry in server_info.info['jobs']: | ||
178 | 327 | jenkins_dict.setdefault(entry['name'], { | ||
179 | 328 | 'build_number': 277, | ||
180 | 329 | }) | ||
181 | 330 | jenkins = FakeJenkins(jenkins_dict, current_version=version) | ||
182 | 331 | if not active: | ||
183 | 332 | jenkins.set_build_result(BUILD_REVISION, 2, 'SUCCESS') | ||
184 | 333 | return CIDirector(jenkins, state_file, branches, | ||
185 | 334 | repo_path='hubgit.com/mumu/mumu') | ||
186 | 335 | |||
187 | 336 | def test_schedule_builds_uses_get_active_jobs(self): | ||
188 | 337 | director = CIDirector(FakeJenkins({ | ||
189 | 338 | REVISION_RESULTS: {'build_number': 2} | ||
190 | 339 | }, current_version=5), StateFile()) | ||
191 | 340 | server_info = make_server_info('idle') | ||
192 | 341 | director.state_file.start_revision('a', 54, 'b', '1.25', 5) | ||
193 | 342 | # get_determine_result calls get_available_jobs, but doesn't count | ||
194 | 343 | # because it doesn't run those jobs. | ||
195 | 344 | with patch.object( | ||
196 | 345 | ResultJudge, 'get_candidate_determine_result_jobs', | ||
197 | 346 | return_value=[]): | ||
198 | 347 | with patch.object( | ||
199 | 348 | director, 'get_active_jobs', | ||
200 | 349 | return_value=[BUILD_REVISION]) as gaj_mock: | ||
201 | 350 | with patch.object(director.state_file, 'build_info_to_s3'): | ||
202 | 351 | director.schedule_builds(server_info) | ||
203 | 352 | gaj_mock.assert_called_with(server_info) | ||
204 | 353 | |||
205 | 354 | def test_schedule_builds_respects_branch_order(self): | ||
206 | 355 | bar_url = 'gitbranch:bar:hubgit.com/mumu/mumu' | ||
207 | 356 | foo_url = 'gitbranch:foo:hubgit.com/mumu/mumu' | ||
208 | 357 | director = self.make_schedule_builds_director([bar_url, foo_url]) | ||
209 | 358 | ls_remote_out = dedent("""\ | ||
210 | 359 | rev-1\trefs/heads/bar | ||
211 | 360 | rev-2\trefs/heads/foo | ||
212 | 361 | """) | ||
213 | 362 | with patch('subprocess.check_output', return_value=ls_remote_out): | ||
214 | 363 | director.schedule_builds(make_server_info('idle')) | ||
215 | 364 | self.assertEqual( | ||
216 | 365 | director.jenkins.calls['build_job'], | ||
217 | 366 | [((BUILD_REVISION, { | ||
218 | 367 | 'branch': bar_url, 'revision': 'rev-1' | ||
219 | 368 | }), {})]) | ||
220 | 369 | director = self.make_schedule_builds_director([foo_url, bar_url]) | ||
221 | 370 | with patch('subprocess.check_output', return_value=ls_remote_out): | ||
222 | 371 | director.schedule_builds(make_server_info('idle')) | ||
223 | 372 | self.assertEqual( | ||
224 | 373 | director.jenkins.calls['build_job'], | ||
225 | 374 | [((BUILD_REVISION, { | ||
226 | 375 | 'branch': foo_url, 'revision': 'rev-2' | ||
227 | 376 | }), {})]) | ||
228 | 377 | |||
229 | 378 | def test_schedule_builds_updates_builds(self): | ||
230 | 379 | director = self.make_schedule_builds_director([]) | ||
231 | 380 | with patch.object(director, 'update_builds') as ub_mock: | ||
232 | 381 | with patch('subprocess.check_output'): | ||
233 | 382 | director.schedule_builds(make_server_info('idle')) | ||
234 | 383 | ub_mock.assert_called_with() | ||
235 | 384 | |||
236 | 385 | def test_schedule_builds_records_results(self): | ||
237 | 386 | director = self.make_schedule_builds_director([]) | ||
238 | 387 | with patch.object(RecordResultJob, 'maybe_build') as mb_mock: | ||
239 | 388 | with patch.object(director.state_file, 'build_info_to_s3'): | ||
240 | 389 | with patch('subprocess.check_output'): | ||
241 | 390 | director.schedule_builds(make_server_info('idle')) | ||
242 | 391 | mb_mock.assert_called_with() | ||
243 | 392 | |||
244 | 393 | def test_schedule_builds_publishes(self): | ||
245 | 394 | director = self.make_schedule_builds_director([], active=True) | ||
246 | 395 | with patch.object(ResourcefulJob, 'maybe_build') as mb_mock: | ||
247 | 396 | si = make_server_info('idle', buildable=[PUBLISH_REVISION]) | ||
248 | 397 | director.schedule_builds(si) | ||
249 | 398 | mb_mock.assert_called_with(set()) | ||
250 | 399 | |||
251 | 400 | def test_schedule_builds_inactive_not_publishes(self): | ||
252 | 401 | director = self.make_schedule_builds_director([], active=False) | ||
253 | 402 | with patch.object(ResourcefulJob, 'maybe_build') as mb_mock: | ||
254 | 403 | with patch('subprocess.check_output'): | ||
255 | 404 | director.schedule_builds(make_server_info('idle')) | ||
256 | 405 | self.assertEqual(mb_mock.call_count, 0) | ||
257 | 406 | |||
258 | 407 | def test_schedule_builds_schedules_resourceful_tests_unique(self): | ||
259 | 408 | server_info = make_server_info('idle', buildable=['package-foo']) | ||
260 | 409 | director = self.make_schedule_builds_director( | ||
261 | 410 | [], server_info=server_info) | ||
262 | 411 | description = "[ci-director]\n requires: build-revision" | ||
263 | 412 | director.jenkins.job_info['package-foo']['description'] = description | ||
264 | 413 | with patch.object(ResourcefulJob, 'maybe_build') as mb_mock: | ||
265 | 414 | director.schedule_builds(server_info) | ||
266 | 415 | mb_mock.assert_called_once_with(set()) | ||
267 | 416 | |||
268 | 417 | def test_schedule_builds_adopts_builds(self): | ||
269 | 418 | director = self.make_schedule_builds_director([]) | ||
270 | 419 | server_info = make_server_info('idle') | ||
271 | 420 | with patch.object(director, 'adopt_builds') as ab_mock: | ||
272 | 421 | with patch('subprocess.check_output'): | ||
273 | 422 | director.schedule_builds(server_info) | ||
274 | 423 | ab_mock.assert_called_with(server_info) | ||
275 | 424 | |||
276 | 425 | def test_schedule_builds_calls_send_final_mail(self): | ||
277 | 426 | director = self.make_schedule_builds_director([]) | ||
278 | 427 | server_info = make_server_info('idle') | ||
279 | 428 | with patch.object(ResultJudge, 'finalize') as finalize_mock: | ||
280 | 429 | with patch.object(director.state_file, 'build_info_to_s3'): | ||
281 | 430 | with patch('subprocess.check_output'): | ||
282 | 431 | director.schedule_builds(server_info) | ||
283 | 432 | finalize_mock.assert_called_with(director.mailer) | ||
284 | 433 | |||
285 | 434 | def test_schedule_builds_no_write_results_to_s3_without_final_data(self): | ||
286 | 435 | director = self.make_schedule_builds_director([]) | ||
287 | 436 | with patch.object(ResultJudge, 'finalize', return_value=False): | ||
288 | 437 | with patch.object(RecordResultJob, 'maybe_build', | ||
289 | 438 | return_value=False): | ||
290 | 439 | with patch.object(director.state_file, | ||
291 | 440 | 'build_info_to_s3') as s3_mock: | ||
292 | 441 | with patch('subprocess.check_output'): | ||
293 | 442 | director.schedule_builds(make_server_info('idle')) | ||
294 | 443 | self.assertEqual(0, s3_mock.call_count) | ||
295 | 444 | |||
296 | 445 | def test_schedule_builds_completes_unfinished_tests_with_new_rev(self): | ||
297 | 446 | # A test suite is finished even when a new revision is available. | ||
298 | 447 | server_info = make_server_info( | ||
299 | 448 | 'idle', buildable=['aws-deploy']) | ||
300 | 449 | director = self.make_schedule_builds_director( | ||
301 | 450 | ['branch-url'], server_info=server_info, published=True) | ||
302 | 451 | director.schedule_builds(server_info) | ||
303 | 452 | # build_job is not called for the build-revision job. | ||
304 | 453 | self.assertEqual( | ||
305 | 454 | [(('aws-deploy', {'revision_build': 2}), {})], | ||
306 | 455 | director.jenkins.calls['build_job']) | ||
307 | 456 | |||
308 | 457 | def test_from_config(self): | 307 | def test_from_config(self): |
309 | 458 | director = CIDirector.from_config({ | 308 | director = CIDirector.from_config({ |
310 | 459 | 'jenkins_url': 'http://192.168.1.1:8080', | 309 | 'jenkins_url': 'http://192.168.1.1:8080', |
311 | @@ -646,57 +496,6 @@ | |||
312 | 646 | director.update_builds() | 496 | director.update_builds() |
313 | 647 | self.assertEqual(sfj.get_status(), SUCCEEDED) | 497 | self.assertEqual(sfj.get_status(), SUCCEEDED) |
314 | 648 | 498 | ||
315 | 649 | def test_adopt_builds(self): | ||
316 | 650 | jenkins = FakeJenkins({ | ||
317 | 651 | 'foo-deploy': { | ||
318 | 652 | 'build_number': 5, | ||
319 | 653 | 'description': '[ci-director]\n requires: build-revision'}, | ||
320 | 654 | BuildRevisionJob.job_id: {'build_number': 2}, | ||
321 | 655 | }) | ||
322 | 656 | sf = self.get_state_file() | ||
323 | 657 | director = CIDirector(jenkins, sf) | ||
324 | 658 | with patch.object(ResourcefulJob, 'adopt_build') as ab_mock: | ||
325 | 659 | director.adopt_builds(make_server_info('idle')) | ||
326 | 660 | self.assertEqual(ab_mock.call_count, 0) | ||
327 | 661 | director.adopt_builds( | ||
328 | 662 | make_server_info( | ||
329 | 663 | 'idle', buildable=['foo-deploy'])) | ||
330 | 664 | ab_mock.assert_called_with({ | ||
331 | 665 | 'result': 'SUCCESS', | ||
332 | 666 | 'number': 5, | ||
333 | 667 | 'building': False, | ||
334 | 668 | 'artifacts': [], | ||
335 | 669 | 'timestamp': 0, | ||
336 | 670 | 'url': 'fakettp://fake.fake/job/foo-deploy/5' | ||
337 | 671 | }) | ||
338 | 672 | |||
339 | 673 | def test_adopt_builds_unbuilt(self): | ||
340 | 674 | info = ServerInfo({'jobs': [{ | ||
341 | 675 | 'name': 'aws-deploy', | ||
342 | 676 | }]}) | ||
343 | 677 | jenkins = FakeJenkins({ | ||
344 | 678 | BuildRevisionJob.job_id: {'build_number': 2}, | ||
345 | 679 | 'aws-deploy': { | ||
346 | 680 | 'build_number': 2, | ||
347 | 681 | 'description': '[ci-director]\n requires: build-revision', | ||
348 | 682 | 'lastBuild': None} | ||
349 | 683 | }) | ||
350 | 684 | jenkins.job_info['aws-deploy'] = {'lastBuild': None} | ||
351 | 685 | sf = StateFile() | ||
352 | 686 | director = CIDirector(jenkins, sf) | ||
353 | 687 | director.adopt_builds(info) | ||
354 | 688 | self.assertEqual( | ||
355 | 689 | [c[0][0] for c in jenkins.calls['get_build_info']], | ||
356 | 690 | ['build-revision']) | ||
357 | 691 | jenkins.job_info['aws-deploy'] = { | ||
358 | 692 | 'lastBuild': {'number': 1}, | ||
359 | 693 | 'description': '[ci-director]\n requires: build-revision'} | ||
360 | 694 | jenkins.build_info['aws-deploy'] = {1: {}} | ||
361 | 695 | director.adopt_builds(info) | ||
362 | 696 | self.assertEqual( | ||
363 | 697 | [c[0][0] for c in jenkins.calls['get_build_info']], | ||
364 | 698 | ['build-revision', 'build-revision', 'aws-deploy']) | ||
365 | 699 | |||
366 | 700 | def update_cloud_health_check(self, result, status): | 499 | def update_cloud_health_check(self, result, status): |
367 | 701 | foo = CloudHealthJob(name='foo', substrate='bar', last_completed=4) | 500 | foo = CloudHealthJob(name='foo', substrate='bar', last_completed=4) |
368 | 702 | jenkins = make_cloud_health_jenkins([foo]) | 501 | jenkins = make_cloud_health_jenkins([foo]) |
369 | @@ -2295,61 +2094,6 @@ | |||
370 | 2295 | }) | 2094 | }) |
371 | 2296 | 2095 | ||
372 | 2297 | 2096 | ||
373 | 2298 | class TestMain(TestCase): | ||
374 | 2299 | |||
375 | 2300 | def setUp(self): | ||
376 | 2301 | self.logger = logging.getLogger() | ||
377 | 2302 | self.orig_handlers = self.logger.handlers | ||
378 | 2303 | self.logger.handlers = [] | ||
379 | 2304 | self.orig_level = self.logger.level | ||
380 | 2305 | |||
381 | 2306 | def tearDown(self): | ||
382 | 2307 | self.logger.handlers = self.orig_handlers | ||
383 | 2308 | self.logger.level = self.orig_level | ||
384 | 2309 | |||
385 | 2310 | def test_main_default(self): | ||
386 | 2311 | with patch('cidirector.cidirector.build_revision', | ||
387 | 2312 | autospec=True) as br_mock: | ||
388 | 2313 | with patch('cidirector.cidirector.RotatingFileHandler' | ||
389 | 2314 | ) as rfh_mock: | ||
390 | 2315 | ret = main(['b1', 'b2']) | ||
391 | 2316 | self.assertEqual(ret, 0) | ||
392 | 2317 | br_mock.assert_called_with(['b1', 'b2'], []) | ||
393 | 2318 | self.assertEqual(1, br_mock.call_count) | ||
394 | 2319 | root_logger = logging.getLogger() | ||
395 | 2320 | self.assertEqual(logging.WARNING, root_logger.level) | ||
396 | 2321 | rfh_mock.assert_called_once_with( | ||
397 | 2322 | 'ci-director.log', backupCount=2, maxBytes=1024 * 1024) | ||
398 | 2323 | self.assertIs(sys.stderr, root_logger.handlers[1].stream) | ||
399 | 2324 | |||
400 | 2325 | def test_main_logging_config(self): | ||
401 | 2326 | with patch('cidirector.cidirector.build_revision'): | ||
402 | 2327 | with temp_dir() as log_dir: | ||
403 | 2328 | log_path = '{}/ci-director.log'.format(log_dir) | ||
404 | 2329 | ret = main(['-v', '--log-path', log_path, '--log-count', | ||
405 | 2330 | '5', 'b1']) | ||
406 | 2331 | self.assertEqual(ret, 0) | ||
407 | 2332 | root_logger = logging.getLogger() | ||
408 | 2333 | self.assertEqual(logging.INFO, root_logger.level) | ||
409 | 2334 | self.assertEqual(5, root_logger.handlers[0].backupCount) | ||
410 | 2335 | self.assertEqual(log_path, root_logger.handlers[0].baseFilename) | ||
411 | 2336 | |||
412 | 2337 | def test_main_logs_exception(self): | ||
413 | 2338 | with test_logger() as (log, log_stream): | ||
414 | 2339 | with patch('cidirector.cidirector.setup_logging', autospec=True): | ||
415 | 2340 | with patch('cidirector.cidirector.build_revision', | ||
416 | 2341 | side_effect=ValueError("mishap")): | ||
417 | 2342 | ret = main(["branch"]) | ||
418 | 2343 | self.assertEqual(ret, 1) | ||
419 | 2344 | log_pattern = ( | ||
420 | 2345 | "(?s)\\Aexception during build revision:\n" | ||
421 | 2346 | "Traceback \\(most recent call last\\):\n" | ||
422 | 2347 | ".*\n" | ||
423 | 2348 | "ValueError: mishap\n\\Z" | ||
424 | 2349 | ) | ||
425 | 2350 | self.assertRegexpMatches(log_stream.getvalue(), log_pattern) | ||
426 | 2351 | |||
427 | 2352 | |||
428 | 2353 | def make_description(tags=None, section='ci-director'): | 2097 | def make_description(tags=None, section='ci-director'): |
429 | 2354 | lines = ['[{}]\n'.format(section)] | 2098 | lines = ['[{}]\n'.format(section)] |
430 | 2355 | if tags is not None: | 2099 | if tags is not None: |
Thank you.