Merge ~lloydwaltersj/maas-ci/+git/system-tests:refactor-image-promotion-workflows into ~lloydwaltersj/maas-ci/+git/system-tests:refactor-packer-testing-workflows

Proposed by Jack Lloyd-Walters
Status: Superseded
Proposed branch: ~lloydwaltersj/maas-ci/+git/system-tests:refactor-image-promotion-workflows
Merge into: ~lloydwaltersj/maas-ci/+git/system-tests:refactor-packer-testing-workflows
Diff against target: 1722 lines (+1281/-75)
13 files modified
temporal/common/argument_parser.py (+32/-3)
temporal/common/common.py (+0/-2)
temporal/common/github_functions.py (+36/-12)
temporal/common/jenkins_functions.py (+5/-3)
temporal/common/jenkins_workflows.py (+9/-6)
temporal/start_packer_cron.py (+5/-3)
temporal/start_promotion_cron.py (+182/-0)
temporal/test_promotion_images.py (+153/-0)
temporal/workflows/matrix_testing_worker.py (+12/-2)
temporal/workflows/packer_image_cron.py (+7/-2)
temporal/workflows/packer_image_tester.py (+53/-42)
temporal/workflows/promotion_image_cron.py (+242/-0)
temporal/workflows/promotion_image_tester.py (+545/-0)
Reviewer Review Type Date Requested Status
MAAS Lander Pending
Jack Lloyd-Walters Pending
Review via email: mp+457929@code.launchpad.net

This proposal has been superseded by a proposal from 2024-01-17.

Commit message

refactors the image promotion workflows

Description of the change

refactor the image promotion workflow to align with https://code.launchpad.net/~lloydwaltersj/maas-ci/+git/system-tests/+merge/457784

Targets that MP, as it requires most of the refactor changes made there.

To post a comment you must log in.
3619cc7... by Jack Lloyd-Walters

remove return all logs left in function

915d20e... by Jack Lloyd-Walters

fetch centos eol

c952750... by Jack Lloyd-Walters

don't promote if nothing changed

a3e7e60... by Jack Lloyd-Walters

move to combined area

0669c9e... by Jack Lloyd-Walters

move to combined area

Revision history for this message
Jack Lloyd-Walters (lloydwaltersj) wrote :

jenkins: !test

f7c92e7... by Jack Lloyd-Walters

convert classes to CamelCase

4f099a1... by Jack Lloyd-Walters

add canonilibraries

8f232f6... by Jack Lloyd-Walters

fixes

7c1cc6b... by Jack Lloyd-Walters

remove http:// from localhost url

5659603... by Jack Lloyd-Walters

fix workflow trigger

75272ef... by Jack Lloyd-Walters

linting

60d5983... by Jack Lloyd-Walters

fix id

Unmerged commits

60d5983... by Jack Lloyd-Walters

fix id

Succeeded
[SUCCEEDED] lint:0 (build)
11 of 1 result
75272ef... by Jack Lloyd-Walters

linting

Succeeded
[SUCCEEDED] lint:0 (build)
11 of 1 result
5659603... by Jack Lloyd-Walters

fix workflow trigger

Failed
[FAILED] lint:0 (build)
11 of 1 result
7c1cc6b... by Jack Lloyd-Walters

remove http:// from localhost url

8f232f6... by Jack Lloyd-Walters

fixes

4f099a1... by Jack Lloyd-Walters

add canonilibraries

f7c92e7... by Jack Lloyd-Walters

convert classes to CamelCase

0669c9e... by Jack Lloyd-Walters

move to combined area

f3b54e4... by Jack Lloyd-Walters

refactor and new workflows

a3e7e60... by Jack Lloyd-Walters

move to combined area

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/temporal/common/argument_parser.py b/temporal/common/argument_parser.py
2index 64a4e3d..cf19172 100644
3--- a/temporal/common/argument_parser.py
4+++ b/temporal/common/argument_parser.py
5@@ -5,7 +5,7 @@ import sys
6 from dataclasses import dataclass
7 from datetime import datetime, timedelta
8 from enum import Enum
9-from typing import TypeVar
10+from typing import Any, TypeVar
11
12 from dateutil.relativedelta import relativedelta
13 from temporalio import workflow
14@@ -22,7 +22,36 @@ def readable_list(*items: str) -> str:
15 return ", ".join(items[:-1]) + f", and {items[-1]}"
16
17
18-def iso8601todelta(iso_string: str) -> relativedelta:
19+class RelativeDelta(relativedelta):
20+ """Make relativedelta serialisable"""
21+
22+ @classmethod
23+ def parse_obj(cls: type[T], obj: Any) -> T:
24+ return cls(**obj)
25+
26+ def dict(self) -> dict[str, Any]:
27+ return {
28+ "years": self.years,
29+ "months": self.months,
30+ "days": self.days,
31+ "leapdays": self.leapdays,
32+ "weeks": self.weeks,
33+ "hours": self.hours,
34+ "minutes": self.minutes,
35+ "seconds": self.seconds,
36+ "microseconds": self.microseconds,
37+ "year": self.year,
38+ "month": self.month,
39+ "day": self.day,
40+ "weekday": self.weekday,
41+ "hour": self.hour,
42+ "minute": self.minute,
43+ "second": self.second,
44+ "microsecond": self.microsecond,
45+ }
46+
47+
48+def iso8601todelta(iso_string: str) -> RelativeDelta:
49 # search for an ISO8601 duration: P[n]Y[n]M[n]W[n]DT[n]H[n]M[n]S
50 if duration := re.search(
51 r"P(\d+Y)?(\d+M)?(\d+W)?(\d+D)?(T)?(\d+H)?(\d+M)?(\d+S)?", iso_string
52@@ -48,7 +77,7 @@ def iso8601todelta(iso_string: str) -> relativedelta:
53 return int(digit.group())
54 return 0
55
56- return relativedelta(
57+ return RelativeDelta(
58 years=num(years),
59 months=num(months),
60 weeks=num(weeks),
61diff --git a/temporal/common/common.py b/temporal/common/common.py
62index 86bde71..859ee00 100644
63--- a/temporal/common/common.py
64+++ b/temporal/common/common.py
65@@ -1,5 +1,3 @@
66-"""Common functionality for temporal."""
67-
68 import asyncio
69 import re
70 import sys
71diff --git a/temporal/common/github_functions.py b/temporal/common/github_functions.py
72index 4d71b9f..0c1acfb 100644
73--- a/temporal/common/github_functions.py
74+++ b/temporal/common/github_functions.py
75@@ -6,23 +6,27 @@ from pathlib import Path
76 from subprocess import CompletedProcess, run
77 from typing import Iterator
78
79+COMMIT_MESSAGE_REGEX = re.compile(r"[0-9a-f]+\s([^\n]+)")
80+
81
82 @dataclass
83 class GitRepo:
84- _repo_url: str
85- _repo_path: str | Path
86+ # reccomended to accese these values using the parameters instead
87+ # url() and path(), path_str(), directory() or directory_str()
88+ repo_url_string: str
89+ repo_path_string: str | Path
90
91 # properties
92 @cached_property
93 def url(self) -> str:
94- return self._repo_url
95+ return self.repo_url_string
96
97 def _as_path_(self, file_path: str | Path) -> Path:
98 return file_path if isinstance(file_path, Path) else Path(file_path)
99
100 @cached_property
101 def path(self) -> Path:
102- return self._as_path_(self._repo_path)
103+ return self._as_path_(self.repo_path_string)
104
105 @cached_property
106 def path_str(self) -> str:
107@@ -83,8 +87,11 @@ class GitRepo:
108 return "main" if "main" in self.branches else "master"
109
110 def remote_of(self, branch: str) -> str | None:
111- if remote_branch := re.search(
112- f"remotes/(.*{branch})", self.execute(["git", "branch", "-a"])
113+ # we can't determine remote branches if the repo isn't cloned.
114+ if self.exists and (
115+ remote_branch := re.search(
116+ f"remotes/(.*{branch})", "\n".join(self.branches)
117+ )
118 ):
119 return remote_branch.group()
120 return None
121@@ -171,6 +178,10 @@ class GitRepo:
122 self.checkout(branch)
123 return new_branch
124
125+ def clean_checkout(self, branch: str) -> bool:
126+ self.reset_to_remote()
127+ return self.checkout(branch)
128+
129 def pull(self) -> None:
130 self.execute(["git", "pull"])
131
132@@ -196,8 +207,8 @@ class GitRepo:
133 def all_commits(self) -> list[str]:
134 return [
135 str(msg)
136- for msg in re.findall(
137- r"[0-9a-f]+\s([^\n]+)", self.execute(["git", "log", "--oneline"])
138+ for msg in COMMIT_MESSAGE_REGEX.findall(
139+ self.execute(["git", "log", "--oneline"])
140 )
141 ]
142
143@@ -221,7 +232,13 @@ class GitRepo:
144 and (cmts := self.all_commits)
145 and msg in cmts
146 ):
147- return cmts.index(msg)
148+ idx = 0
149+ for newidx, cmt in enumerate(cmts):
150+ if idx == newidx and cmt == msg:
151+ idx += 1
152+ else:
153+ break
154+ return idx
155 return None
156
157 # staging
158@@ -231,8 +248,8 @@ class GitRepo:
159
160 def staged_file(self) -> dict[str, list[str]]:
161 staged: dict[str, list[str]] = {"changed": [], "staged": []}
162- for _file in self.files:
163- staged["changed" if _file[0] == " " else "staged"].append(_file)
164+ for file_ in self.files:
165+ staged["changed" if file_[0] == " " else "staged"].append(file_)
166 return staged
167
168 def _file_path_(self, filename: str | Path) -> Path:
169@@ -258,10 +275,17 @@ class GitRepo:
170 base_branch: str | None = None,
171 add_files: str | list[str] | None = None,
172 orphan: bool = False,
173+ clean_tree: bool = True,
174 ) -> Iterator[None]:
175+ if clean_tree:
176+ self.reset_to_remote()
177 # checkout the branch
178 if self.branch_exists(branch):
179- new_branch = self.checkout_from(branch, base_branch or self.main)
180+ from_branch = base_branch or self.main
181+ if branch == from_branch:
182+ new_branch = self.checkout(branch)
183+ else:
184+ new_branch = self.checkout_from(branch, base_branch or self.main)
185 elif orphan:
186 new_branch = self.checkout_orphan(branch)
187 force_push: bool = self.diverged
188diff --git a/temporal/common/jenkins_functions.py b/temporal/common/jenkins_functions.py
189index 1964793..f88f4da 100644
190--- a/temporal/common/jenkins_functions.py
191+++ b/temporal/common/jenkins_functions.py
192@@ -95,8 +95,8 @@ class JenkinsBuild(Build, Serializable): # type:ignore [misc]
193 return int(self.get_number())
194
195 def _artifact_(self, name: str) -> Artifact | None:
196- for a_name, artifact in self._artifacts_.items():
197- if a_name == name:
198+ for artifact_name, artifact in self._artifacts_.items():
199+ if artifact_name == name:
200 return artifact
201 return None
202
203@@ -127,7 +127,9 @@ class JenkinsBuild(Build, Serializable): # type:ignore [misc]
204
205 @cached_property
206 def results(self) -> dict[str, Any]:
207- return {k: v.__dict__ for k, v in self.get_resultset().items()}
208+ if self.has_resultset():
209+ return {k: v.__dict__ for k, v in self.get_resultset().items()}
210+ return {}
211
212 @cached_property
213 def result_statuses(self) -> dict[str, bool]:
214diff --git a/temporal/common/jenkins_workflows.py b/temporal/common/jenkins_workflows.py
215index e1b97ed..cfd8534 100644
216--- a/temporal/common/jenkins_workflows.py
217+++ b/temporal/common/jenkins_workflows.py
218@@ -22,7 +22,7 @@ class JenkinsTesterWorkflowParams(TestWorkflowParams):
219
220 # job details
221 job_name: str = ""
222- job_parameters: dict[str, str] = field(default_factory=dict)
223+ job_parameters: dict[str, str | int | bool] = field(default_factory=dict)
224 job_build: int | None = None
225
226 # Any jobs that will be triggered by the primary job
227@@ -163,7 +163,7 @@ async def fetch_downstream_build(params: FetchDownstreamBuildParams) -> int:
228 ds_job_name = params.downstream_job
229
230 for test_num in range(
231- max(1, params.upstream_build), server.next_build_number(ds_job_name)
232+ max(1, params.downstream_build), server.next_build_number(ds_job_name)
233 )[::-1]:
234 build: JenkinsBuild = server.build(ds_job_name, test_num)
235 if (us_job_name, us_build_num) in build.upstream_jobs:
236@@ -189,7 +189,7 @@ async def fetch_job_details(params: FetchJobDetailsParams) -> dict[str, Any]:
237 return "-".join(groups)
238 return default or to_search
239
240- # if we're searching for specific log sections
241+ # only return logs we've specifically asked for
242 output_logs: dict[str, str] = {}
243 if log_parts := params.log_parts:
244 for log_name, log_content in build.logs.items():
245@@ -227,9 +227,6 @@ async def fetch_job_details(params: FetchJobDetailsParams) -> dict[str, Any]:
246 # otherwise return the whole log
247 else:
248 output_logs[name] = log_content
249- # otherwise, return all the logs
250- else:
251- output_logs = build.logs
252 # return the job details
253 return {
254 "logs": output_logs,
255@@ -327,6 +324,12 @@ class JenkinsTesterWorkflow:
256 start_to_close_timeout=params.to_delta(params.fetch_results_timeout),
257 )
258
259+ # if we're testing child jobs, the parent must have passed also
260+ if sub_jobs:
261+ assert job_details[
262+ "passed"
263+ ], "Cannot search for children of a failed parent!"
264+
265 # itterate on any sub jobs
266 for downstream_name, downstream_number in sub_job_min_numbers.items():
267 # fetch an old build or request new
268diff --git a/temporal/start_packer_cron.py b/temporal/start_packer_cron.py
269index 354a9f7..1564867 100644
270--- a/temporal/start_packer_cron.py
271+++ b/temporal/start_packer_cron.py
272@@ -32,7 +32,7 @@ async def main(
273 await client.start_workflow(
274 SchedulePackerImageTests.run,
275 params,
276- id="Determine packer tests",
277+ id="Determine-packer-tests",
278 task_queue=task_queue,
279 start_delay=schedule.start - now
280 if schedule.start and (now := datetime.utcnow()) and schedule.start > now
281@@ -45,7 +45,7 @@ async def main(
282 action=ScheduleActionStartWorkflow(
283 SchedulePackerImageTests.run,
284 params,
285- id="Determine packer tests",
286+ id="Determine-packer-tests",
287 task_queue=task_queue,
288 ),
289 spec=ScheduleSpec(
290@@ -59,7 +59,9 @@ async def main(
291
292 if __name__ == "__main__":
293 # parser
294- parser = WorkflowParser(description="Starts an image test for the supplied images.")
295+ parser = WorkflowParser(
296+ description="Starts a cron job to select packer images for testing."
297+ )
298 parser.add_cron()
299
300 # test specific
301diff --git a/temporal/start_promotion_cron.py b/temporal/start_promotion_cron.py
302new file mode 100644
303index 0000000..89878e9
304--- /dev/null
305+++ b/temporal/start_promotion_cron.py
306@@ -0,0 +1,182 @@
307+import asyncio
308+from datetime import datetime
309+
310+from temporalio import workflow
311+from temporalio.client import (
312+ Schedule,
313+ ScheduleActionStartWorkflow,
314+ ScheduleIntervalSpec,
315+ ScheduleSpec,
316+)
317+
318+with workflow.unsafe.imports_passed_through():
319+ from .common.argument_parser import WorkflowParser, iso8601todelta
320+ from .common.common import get_authenticated_client, task_queue, validateurl
321+ from .workflows.promotion_image_cron import (
322+ ScheduleImagePromotionTests,
323+ ScheduleImagePromotionTestsParams,
324+ )
325+
326+
327+async def main(
328+ parser: WorkflowParser,
329+) -> None:
330+ params, conn, schedule = parser.parse_argv(ScheduleImagePromotionTestsParams)
331+ client = await get_authenticated_client(
332+ connection=conn,
333+ root_cas_key=parser.lets_encrypt_root_cas,
334+ task_queue=task_queue,
335+ )
336+
337+ if not schedule.every:
338+ await client.start_workflow(
339+ ScheduleImagePromotionTests.run,
340+ params,
341+ id="Determine-promotion-targets",
342+ task_queue=task_queue,
343+ start_delay=schedule.start - now
344+ if schedule.start and (now := datetime.utcnow()) and schedule.start > now
345+ else None,
346+ )
347+ else:
348+ await client.create_schedule(
349+ "Promotion-test-scheduler",
350+ schedule=Schedule(
351+ action=ScheduleActionStartWorkflow(
352+ ScheduleImagePromotionTests.run,
353+ params,
354+ id="Determine-promotion-targets",
355+ task_queue=task_queue,
356+ ),
357+ spec=ScheduleSpec(
358+ start_at=schedule.start,
359+ end_at=schedule.end,
360+ intervals=[ScheduleIntervalSpec(every=schedule.every)],
361+ ),
362+ ),
363+ )
364+
365+
366+if __name__ == "__main__":
367+ # parser
368+ parser = WorkflowParser(
369+ description="Starts a cron job to select promotion images for testing."
370+ )
371+ parser.add_cron()
372+
373+ # for this parser
374+ parser.add_argument(
375+ "--image-stream",
376+ "--stream",
377+ type=validateurl,
378+ default="https://images.maas.io/ephemeral-v3/candidate/",
379+ help="The image stream to pull images from.",
380+ )
381+ parser.add_argument(
382+ "--maas-snap-channel",
383+ "--snap-channel",
384+ "--sc",
385+ type=str,
386+ default="latest/edge",
387+ help="The snap channel to install MAAS for testing with.",
388+ )
389+ parser.add_argument(
390+ "--silent-messages",
391+ "--sm",
392+ action="store_true",
393+ help="""Flag to prevent Mattermost messages being sent.""",
394+ )
395+ parser.add_argument(
396+ "--include-unreleased",
397+ "--iu",
398+ action="store_true",
399+ help="""Flag to include unreleased images in the tests.""",
400+ )
401+
402+ # test specific
403+ testing_group = parser.add_argument_group(
404+ "Testing and building configurations", "Modify the testing and building jobs."
405+ )
406+ testing_group.add_argument(
407+ "--test",
408+ action="store_true",
409+ help="""Flag to test the results of the input arguments
410+ without progressing further.""",
411+ )
412+ testing_group.add_argument(
413+ "--patch-file",
414+ "--pf",
415+ type=str,
416+ default="conf/cpc-patch.yaml",
417+ help="The file containing the arches for the stream.",
418+ )
419+ testing_group.add_argument(
420+ "--packer-file",
421+ "--pkf",
422+ type=str,
423+ default="conf/packer-maas.yaml",
424+ help="The file containing release details about packer-based images.",
425+ )
426+ testing_group.add_argument(
427+ "--ubuntu-file",
428+ "--uf",
429+ type=str,
430+ default="conf/meph-v3.yaml",
431+ help="The file containing release details about ubuntu images.",
432+ )
433+ testing_group.add_argument(
434+ "--support-standard",
435+ "--sstand",
436+ type=iso8601todelta,
437+ default="P9M",
438+ help="""ISO Duration - P[n]Y[n]M[n]W[n]DT[n]H[n]M[n]S
439+ Length of standard support window for ubuntu images.""",
440+ )
441+ testing_group.add_argument(
442+ "--support-eol",
443+ "--seol",
444+ type=iso8601todelta,
445+ default="P5Y",
446+ help="""ISO Duration - P[n]Y[n]M[n]W[n]DT[n]H[n]M[n]S
447+ Length of end of life window for ubuntu images.""",
448+ )
449+ testing_group.add_argument(
450+ "--support-esm",
451+ "--sesm",
452+ type=iso8601todelta,
453+ default="P10Y",
454+ help="""ISO Duration - P[n]Y[n]M[n]W[n]DT[n]H[n]M[n]S
455+ Length of extended security maintenance window for ubuntu images.""",
456+ )
457+
458+ # jenkins
459+ parser.add_jenkins(jenkins_url="http://maas-integration-ci.internal:8080")
460+
461+ # git repos
462+ parser.add_repository(
463+ repo_name="image repo",
464+ repo_usage="pushing successful candidate images",
465+ repo_url="https://git.launchpad.net/maas-images",
466+ repo_location="~/maas-images",
467+ )
468+ parser.add_repository(
469+ repo_name="maas git",
470+ repo_usage="pulling maas source",
471+ repo_url="https://git.launchpad.net/maas",
472+ repo_branch="master",
473+ )
474+ parser.add_repository(
475+ repo_name="system test",
476+ repo_usage="testing images",
477+ repo_url="https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests",
478+ repo_branch="master",
479+ )
480+
481+ # timeouts
482+ parser.add_timeouts(parameter_class=ScheduleImagePromotionTestsParams)
483+
484+ # connectivity
485+ parser.add_connections(url="localhost:7233", namespace="image-testing")
486+
487+ # execute
488+ asyncio.run(main(parser))
489diff --git a/temporal/test_promotion_images.py b/temporal/test_promotion_images.py
490new file mode 100644
491index 0000000..f57f112
492--- /dev/null
493+++ b/temporal/test_promotion_images.py
494@@ -0,0 +1,153 @@
495+import asyncio
496+from datetime import datetime
497+
498+from temporalio import workflow
499+
500+with workflow.unsafe.imports_passed_through():
501+ from .common.argument_parser import WorkflowParser
502+ from .common.common import get_authenticated_client, task_queue, validateurl
503+ from .common.github_functions import GitRepo
504+ from .workflows.promotion_image_tester import (
505+ TestPromotionImages,
506+ TestPromotionImagesParams,
507+ image_names,
508+ )
509+
510+
511+async def main(
512+ parser: WorkflowParser,
513+) -> None:
514+ params, conn, _ = parser.parse_argv(TestPromotionImagesParams)
515+ client = await get_authenticated_client(
516+ connection=conn,
517+ root_cas_key=parser.lets_encrypt_root_cas,
518+ task_queue=task_queue,
519+ )
520+ dt = datetime.utcnow().strftime("%Y%m%d-%H%M%S%f")[:-3]
521+
522+ await client.start_workflow(
523+ TestPromotionImages.run,
524+ params,
525+ id="-".join(["image", "promotion"] + params.images + [dt]),
526+ task_queue=task_queue,
527+ )
528+
529+
530+def fetch_promotion_images(
531+ repo_url: str = "https://git.launchpad.net/maas-images",
532+ packer_file: str = "conf/packer-maas.yaml",
533+ ubuntu_file: str = "conf/meph-v3.yaml",
534+) -> list[str]:
535+ image_repo = GitRepo(repo_url, "~/maas-images")
536+ image_repo.clone_clean()
537+ images = image_names(
538+ image_repo=image_repo, packer_file=packer_file, ubuntu_file=ubuntu_file
539+ )
540+ return list(dict(sorted(images.items())).values())
541+
542+
543+if __name__ == "__main__":
544+ images = fetch_promotion_images()
545+
546+ # parser
547+ parser = WorkflowParser(description="Tests and promotes the supplied images.")
548+
549+ # base args
550+ parser.add_argument(
551+ "images",
552+ type=str,
553+ help="Name of an image to be promoted.",
554+ choices=images,
555+ nargs="+",
556+ )
557+ parser.add_argument(
558+ "--image-stream",
559+ "--stream",
560+ type=validateurl,
561+ default="https://images.maas.io/ephemeral-v3/candidate/",
562+ help="The image stream to pull images from.",
563+ )
564+ parser.add_argument(
565+ "--maas-snap-channel",
566+ "--snap-channel",
567+ "--sc",
568+ type=str,
569+ default="latest/edge",
570+ help="The snap channel to install MAAS for testing with.",
571+ )
572+ parser.add_argument(
573+ "--silent-messages",
574+ "--sm",
575+ action="store_true",
576+ help="""Flag to prevent Mattermost messages being sent.""",
577+ )
578+ parser.add_argument(
579+ "--include-unreleased",
580+ "--iu",
581+ action="store_true",
582+ help="""Flag to include unreleased images in the tests.""",
583+ )
584+
585+ # test specific
586+ testing_group = parser.add_argument_group(
587+ "Testing and building configurations", "Modify the testing and building jobs."
588+ )
589+ testing_group.add_argument(
590+ "--test",
591+ action="store_true",
592+ help="""Flag to test the results of the input arguments
593+ without progressing further.""",
594+ )
595+ testing_group.add_argument(
596+ "--patch-file",
597+ "--pf",
598+ type=str,
599+ default="conf/cpc-patch.yaml",
600+ help="The file containing the arches for the stream.",
601+ )
602+ testing_group.add_argument(
603+ "--packer-file",
604+ "--pkf",
605+ type=str,
606+ default="conf/packer-maas.yaml",
607+ help="The file containing release details about packer-based images.",
608+ )
609+ testing_group.add_argument(
610+ "--ubuntu-file",
611+ "--uf",
612+ type=str,
613+ default="conf/meph-v3.yaml",
614+ help="The file containing release details about ubuntu images.",
615+ )
616+
617+ # jenkins
618+ parser.add_jenkins(jenkins_url="http://maas-integration-ci.internal:8080")
619+
620+ # git repos
621+ parser.add_repository(
622+ repo_name="image repo",
623+ repo_usage="pushing successful candidate images",
624+ repo_url="https://git.launchpad.net/maas-images",
625+ repo_location="~/maas-images",
626+ )
627+ parser.add_repository(
628+ repo_name="maas git",
629+ repo_usage="pulling maas source",
630+ repo_url="https://git.launchpad.net/maas",
631+ repo_branch="master",
632+ )
633+ parser.add_repository(
634+ repo_name="system test",
635+ repo_usage="testing images",
636+ repo_url="https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests",
637+ repo_branch="master",
638+ )
639+
640+ # timeouts
641+ parser.add_timeouts(parameter_class=TestPromotionImagesParams)
642+
643+ # connectivity
644+ parser.add_connections(url="localhost:7233", namespace="image-testing")
645+
646+ # execute
647+ asyncio.run(main(parser))
648diff --git a/temporal/workflows/matrix_testing_worker.py b/temporal/workflows/matrix_testing_worker.py
649index a5da67a..31ada0d 100644
650--- a/temporal/workflows/matrix_testing_worker.py
651+++ b/temporal/workflows/matrix_testing_worker.py
652@@ -6,10 +6,20 @@ with workflow.unsafe.imports_passed_through():
653 from .packer_image_cron import workflows as packer_cron_workflows
654 from .packer_image_tester import activities as packer_image_activities
655 from .packer_image_tester import workflows as packer_image_workflows
656+ from .promotion_image_cron import activities as promotion_cron_activities
657+ from .promotion_image_cron import workflows as promotion_cron_workflows
658+ from .promotion_image_tester import activities as promotion_image_activities
659+ from .promotion_image_tester import workflows as promotion_image_workflows
660
661 if __name__ == "__main__":
662 start_worker(
663 task_queue=task_queue,
664- workflows=packer_image_workflows + packer_cron_workflows,
665- activities=packer_image_activities + packer_cron_activies,
666+ workflows=packer_image_workflows
667+ + packer_cron_workflows
668+ + promotion_image_workflows
669+ + promotion_cron_workflows,
670+ activities=packer_image_activities
671+ + packer_cron_activies
672+ + promotion_image_activities
673+ + promotion_cron_activities,
674 )
675diff --git a/temporal/workflows/packer_image_cron.py b/temporal/workflows/packer_image_cron.py
676index c3a1ed2..2b0b16c 100644
677--- a/temporal/workflows/packer_image_cron.py
678+++ b/temporal/workflows/packer_image_cron.py
679@@ -12,6 +12,7 @@ with workflow.unsafe.imports_passed_through():
680 nested_dict,
681 read_yaml,
682 snap_channels,
683+ task_queue,
684 todict,
685 )
686 from ..common.github_workflows import (
687@@ -328,6 +329,10 @@ class SchedulePackerImageTests:
688 results_file=params.results_file,
689 results_repo=params.results_repo,
690 results_location=params.results_location,
691+ packer_maas_repo=params.packer_maas_repo,
692+ packer_maas_branch=params.packer_maas_branch,
693+ system_test_repo=params.system_test_repo,
694+ system_test_branch=params.system_test_branch,
695 # jenkins details
696 jenkins_url=params.jenkins_url,
697 jenkins_user=params.jenkins_user,
698@@ -343,8 +348,8 @@ class SchedulePackerImageTests:
699 fetch_results_timeout=params.fetch_results_timeout,
700 default_timeout=params.default_timeout,
701 ),
702- task_queue="mono_queue",
703- id=f"Starting {test_image}-{test_channel} Test",
704+ task_queue=task_queue,
705+ id=f"Starting-{test_image}-{test_channel}-Test",
706 )
707
708
709diff --git a/temporal/workflows/packer_image_tester.py b/temporal/workflows/packer_image_tester.py
710index f8fded6..98c74bc 100644
711--- a/temporal/workflows/packer_image_tester.py
712+++ b/temporal/workflows/packer_image_tester.py
713@@ -13,6 +13,7 @@ with workflow.unsafe.imports_passed_through():
714 matches_image,
715 nested_dict,
716 read_yaml,
717+ task_queue,
718 todict,
719 )
720 from ..common.github_workflows import ReportToMatrix, ReportToMatrixParams
721@@ -38,6 +39,16 @@ STATUS_MAPPING = {
722 }
723 POSITIVE_STATUSES = ["passed", "fixed"]
724
725+RESULT_NAME_SEARCH = re.compile(
726+ r"\[(?P<machine>.*)\.(?P<arch>.*)\-(?P<image>.*)\-(?P<step>.*)\]"
727+)
728+FEATURE_NAME_SEARCH = re.compile(
729+ r"(?P<machine>.*)\-(?P<step>.*)\-(?P<feature_group>.*)\-(?P<feature_config>.*)"
730+)
731+MAAS_VERSIONS = re.compile(
732+ r"maas\s+(?P<long_version>(?P<short_version>\d+\.\d+)[^\s]+)"
733+)
734+
735
736 @dataclass
737 class TestPackerImagesParams(TestWorkflowParams):
738@@ -181,13 +192,6 @@ async def parse_test_results(params: ParseTestParams) -> dict[str, dict[str, Any
739 image_results = params.image_results
740 name_from_mapping = params.name_from_mapping
741
742- result_name_search = re.compile(
743- r"\[(?P<machine>.*)\.(?P<arch>.*)\-(?P<image>.*)\-(?P<step>.*)\]"
744- )
745- feature_name_search = re.compile(
746- r"(?P<machine>.*)\-(?P<step>.*)\-(?P<feature_group>.*)\-(?P<feature_config>.*)"
747- )
748-
749 output_results = nested_dict()
750 for image in params.images:
751 image_oseries_name = name_from_mapping.get(image, image)
752@@ -199,7 +203,7 @@ async def parse_test_results(params: ParseTestParams) -> dict[str, dict[str, Any
753 for result_name, result_status in sorted(
754 this_test_results.get("result_statuses", {}).items()
755 ):
756- if search := result_name_search.search(result_name):
757+ if search := RESULT_NAME_SEARCH.search(result_name):
758 if not matches_image(
759 search.group("image"), [image, image_oseries_name]
760 ):
761@@ -234,7 +238,7 @@ async def parse_test_results(params: ParseTestParams) -> dict[str, dict[str, Any
762 this_test_results.get("logs", {}).items()
763 ):
764 if (
765- feature_search := feature_name_search.search(feature_name)
766+ feature_search := FEATURE_NAME_SEARCH.search(feature_name)
767 ) and (feature_search.group("machine") == this_machine):
768 feature, config = feature_search.group(
769 "feature_group", "feature_config"
770@@ -315,12 +319,7 @@ async def parse_test_results(params: ParseTestParams) -> dict[str, dict[str, Any
771 maas_version := image_results.get(image, {})
772 .get("logs", {})
773 .get("env_builder-maas")
774- ) and (
775- search := re.search(
776- r"maas\s+(?P<long_version>(?P<short_version>\d+\.\d+)[^\s]+)",
777- maas_version,
778- )
779- ):
780+ ) and (search := MAAS_VERSIONS.search(maas_version)):
781 this_image_details["maas_long_version"] = search.group("long_version")
782 this_image_details["maas_short_version"] = search.group("short_version")
783
784@@ -339,6 +338,9 @@ async def parse_failed_results(params: ParseTestParams) -> dict[str, dict[str, A
785 image_results = params.image_results
786 name_from_mapping = params.name_from_mapping
787
788+ build_status = str(build_details["status"])
789+ test_status = str(image_results["status"])
790+
791 output_results: dict[str, dict[str, Any]] = {}
792 for image in params.images:
793 build_logs: dict[str, str] = build_details["logs"]
794@@ -356,6 +358,7 @@ async def parse_failed_results(params: ParseTestParams) -> dict[str, dict[str, A
795 ]
796 + ["failed"]
797 )[0]
798+ tests_run = image_results.get(image, {}).get("results", {})
799 features: list[str] = [
800 feat
801 for feat_name, feat in image_results.get(image, {}).get("logs", {}).items()
802@@ -363,16 +366,26 @@ async def parse_failed_results(params: ParseTestParams) -> dict[str, dict[str, A
803 ]
804
805 summary: str = ""
806- if build_details["status"] == "aborted":
807- summary = "Build job aborted."
808+ if build_status == "aborted":
809+ summary = "Build job was aborted."
810+ elif test_status == "aborted":
811+ summary = "Test job was aborted."
812 elif not build_details["passed"]:
813 summary = "Build job failed."
814 elif image_build_status not in POSITIVE_STATUSES:
815 summary = "Could not build image."
816 elif not image_results.get(image, {}):
817- summary = "Could not test image."
818+ summary = "Could not run image test."
819+ elif not ["deploy" in run for run in tests_run.keys()]:
820+ summary = "Test failed before image deployment."
821+ elif [
822+ run["status"] not in POSITIVE_STATUSES
823+ for name, run in tests_run.items()
824+ if "deploy" in name
825+ ]:
826+ summary = "Could not deploy image."
827 elif not features:
828- summary = "Could not test any features."
829+ summary = "Could not test image features."
830
831 image_details: dict[str, Any] = {
832 "architectures": [],
833@@ -391,12 +404,7 @@ async def parse_failed_results(params: ParseTestParams) -> dict[str, dict[str, A
834 maas_version := image_results.get(image, {})
835 .get("logs", {})
836 .get("env_builder-maas")
837- ) and (
838- search := re.search(
839- r"maas\s+(?P<long_version>(?P<short_version>\d+\.\d+)[^\s]+)",
840- maas_version,
841- )
842- ):
843+ ) and (search := MAAS_VERSIONS.search(maas_version)):
844 image_details["maas_long_version"] = search.group("long_version")
845 image_details["maas_short_version"] = search.group("short_version")
846
847@@ -455,15 +463,16 @@ class TestPackerImages:
848 max_retry_attempts=params.max_retry_attempts,
849 heartbeat_delay=params.heartbeat_delay,
850 # timeouts
851+ default_timeout=params.default_timeout,
852 login_timeout=params.login_timeout,
853- request_build_timeout=params.request_build_timeout,
854+ read_file_timeout=params.read_file_timeout,
855 start_build_timeout=params.start_build_timeout,
856- complete_build_timeout=params.complete_build_timeout,
857 fetch_results_timeout=params.fetch_results_timeout,
858- default_timeout=params.default_timeout,
859+ request_build_timeout=params.request_build_timeout,
860+ complete_build_timeout=params.complete_build_timeout,
861 ),
862- task_queue="mono_queue",
863- id=f"Determining parameters: {', '.join(job_search)}",
864+ task_queue=task_queue,
865+ id=f"Determining-Parameters:{'-'.join(job_search)}",
866 )
867
868 # build the images
869@@ -500,15 +509,16 @@ class TestPackerImages:
870 max_retry_attempts=params.max_retry_attempts,
871 heartbeat_delay=params.heartbeat_delay,
872 # timeouts
873+ default_timeout=params.default_timeout,
874 login_timeout=params.login_timeout,
875- request_build_timeout=params.request_build_timeout,
876+ read_file_timeout=params.read_file_timeout,
877 start_build_timeout=params.start_build_timeout,
878- complete_build_timeout=params.complete_build_timeout,
879 fetch_results_timeout=params.fetch_results_timeout,
880- default_timeout=params.default_timeout,
881+ request_build_timeout=params.request_build_timeout,
882+ complete_build_timeout=params.complete_build_timeout,
883 ),
884- task_queue="mono_queue",
885- id=f"Building: {', '.join(params.images)}",
886+ task_queue=task_queue,
887+ id=f"Building:{'-'.join(params.images)}",
888 )
889
890 # determine the images that built
891@@ -549,15 +559,16 @@ class TestPackerImages:
892 max_retry_attempts=params.max_retry_attempts,
893 heartbeat_delay=params.heartbeat_delay,
894 # timeouts
895+ default_timeout=params.default_timeout,
896 login_timeout=params.login_timeout,
897- request_build_timeout=params.request_build_timeout,
898+ read_file_timeout=params.read_file_timeout,
899 start_build_timeout=params.start_build_timeout,
900- complete_build_timeout=params.complete_build_timeout,
901 fetch_results_timeout=params.fetch_results_timeout,
902- default_timeout=params.default_timeout,
903+ request_build_timeout=params.request_build_timeout,
904+ complete_build_timeout=params.complete_build_timeout,
905 ),
906- task_queue="mono_queue",
907- id=f"Testing: {image}",
908+ task_queue=task_queue,
909+ id=f"Testing:{image}",
910 )
911 image_test_results |= {image: image_details}
912
913@@ -617,8 +628,8 @@ class TestPackerImages:
914 fetch_results_timeout=params.fetch_results_timeout,
915 default_timeout=params.default_timeout,
916 ),
917- task_queue="mono_queue",
918- id=f"Reporting: {', '.join(output_results.keys())}",
919+ task_queue=task_queue,
920+ id=f"Reporting:{'-'.join(output_results.keys())}",
921 )
922 return output_results
923
924diff --git a/temporal/workflows/promotion_image_cron.py b/temporal/workflows/promotion_image_cron.py
925new file mode 100644
926index 0000000..4e5b92e
927--- /dev/null
928+++ b/temporal/workflows/promotion_image_cron.py
929@@ -0,0 +1,242 @@
930+from dataclasses import dataclass
931+from datetime import datetime
932+
933+import yaml
934+from temporalio import activity, workflow
935+from temporalio.common import RetryPolicy
936+
937+with workflow.unsafe.imports_passed_through():
938+ from ..common.argument_parser import RelativeDelta
939+ from ..common.common import TestWorkflowParams, task_queue
940+ from ..common.github_functions import GitRepo
941+ from ..common.github_workflows import FetchGitRepoParams, fetch_git_repo
942+ from .promotion_image_tester import (
943+ PATCH_DICT_TYPE,
944+ DetermineReleaseNamesParams,
945+ FetchMephChangesParams,
946+ TestPromotionImages,
947+ TestPromotionImagesParams,
948+ )
949+ from .promotion_image_tester import activities as test_promotion_activities
950+ from .promotion_image_tester import (
951+ determine_release_names,
952+ fetch_meph_changes,
953+ image_details,
954+ )
955+ from .promotion_image_tester import workflows as test_promotion_workflows
956+
957+
958+@dataclass
959+class ScheduleImagePromotionTestsParams(TestWorkflowParams):
960+ image_stream: str = "https://images.maas.io/ephemeral-v3/candidate/"
961+
962+ maas_snap_channel: str = "latest/edge"
963+
964+ # files to be accessed
965+ patch_file: str = "conf/cpc-patch.yaml"
966+ packer_file: str = "conf/packer-maas.yaml"
967+ ubuntu_file: str = "conf/meph-v3.yaml"
968+
969+ # jenkins details
970+ jenkins_url: str = ""
971+ jenkins_user: str = ""
972+ jenkins_pass: str = ""
973+
974+ # repo
975+ system_test_repo: str = (
976+ "https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests"
977+ )
978+ system_test_branch: str = "master"
979+
980+ maas_git_repo: str = "https://git.launchpad.net/maas"
981+ maas_git_branch: str = "master"
982+
983+ image_repo_url: str = "https://git.launchpad.net/maas-images"
984+ image_repo_location: str = "~/maas-images"
985+
986+ # what we report to mattermost
987+ silent_messages: bool = False
988+ include_unreleased: bool = False
989+
990+ # support windows
991+ support_standard: RelativeDelta = RelativeDelta(months=9)
992+ support_eol: RelativeDelta = RelativeDelta(years=5)
993+ support_esm: RelativeDelta = RelativeDelta(years=10)
994+
995+
996+@dataclass
997+class DetermineTestableImagesParams:
998+ image_repo: GitRepo
999+ packer_file: str
1000+
1001+ release_names: dict[str, str]
1002+ old_patch: PATCH_DICT_TYPE
1003+ new_patch: PATCH_DICT_TYPE
1004+
1005+ include_unreleased: bool
1006+ support_standard: RelativeDelta
1007+ support_eol: RelativeDelta
1008+ support_esm: RelativeDelta
1009+
1010+
1011+def ymd(time: datetime) -> str:
1012+ return time.strftime("%Y-%m-%d")
1013+
1014+
1015+@activity.defn
1016+async def determine_testable_images(
1017+ params: DetermineTestableImagesParams,
1018+) -> dict[str, bool]:
1019+ """Determine which images are testable, that is:
1020+ 1. The image has new versions in the stream.
1021+ 2. The image is currently supported, and released"""
1022+ today = ymd(datetime.now())
1023+
1024+ with params.image_repo.directory.joinpath(params.packer_file).open() as f:
1025+ packer_releases: dict[str, dict[str, datetime]] = yaml.safe_load(f).get(
1026+ "packer-maas", {}
1027+ )
1028+
1029+ testable: dict[str, bool] = {}
1030+ for image in image_details(
1031+ release_names=params.release_names,
1032+ patch=params.new_patch,
1033+ old_patch=params.old_patch,
1034+ ):
1035+ # unchanged images don't need to be worried about
1036+ if not image.changed:
1037+ continue
1038+ # default
1039+ lts = False
1040+ released = False
1041+ end = "Unkown"
1042+ # ubuntu image
1043+ if release_date := datetime.strptime(image.release, "%y.%m"):
1044+ lts = release_date.year % 2 == 0 and release_date.month == 4
1045+ released = ymd(release_date) < today
1046+ end = ymd(release_date + params.support_standard)
1047+ eol = ymd(release_date + params.support_eol)
1048+ esm = ymd(release_date + params.support_esm)
1049+ # a packer image we know about
1050+ elif packer_eol := packer_releases.get(image.name, {}).get("support_eol"):
1051+ released = True
1052+ end = packer_eol.strftime("%Y-%m-%d")
1053+ # determine end of life dates
1054+ usable = image.detail.get("support_esm_eol", {}).get(
1055+ "stable", esm if lts else end
1056+ )
1057+ support = image.detail.get("support_eol", {}).get("stable", eol if lts else end)
1058+ # determine supported status
1059+ testable[image.name] = (
1060+ testable.get(image.name, False)
1061+ or (released or params.include_unreleased)
1062+ and ((usable > today) or (support > today))
1063+ )
1064+ return testable
1065+
1066+
1067+@workflow.defn
1068+class ScheduleImagePromotionTests:
1069+ @workflow.run
1070+ async def run(self, params: ScheduleImagePromotionTestsParams) -> str:
1071+ # clone the image repo
1072+ image_repo = await workflow.execute_activity(
1073+ fetch_git_repo,
1074+ FetchGitRepoParams(
1075+ params.image_repo_url,
1076+ params.image_repo_location,
1077+ ),
1078+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1079+ start_to_close_timeout=params.to_delta(params.request_build_timeout),
1080+ )
1081+
1082+ # fetch the mapping between image release and release name
1083+ release_names = await workflow.execute_activity(
1084+ determine_release_names,
1085+ DetermineReleaseNamesParams(
1086+ image_repo=image_repo,
1087+ packer_file=params.packer_file,
1088+ ubuntu_file=params.ubuntu_file,
1089+ ),
1090+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1091+ start_to_close_timeout=params.to_delta(params.read_file_timeout),
1092+ )
1093+
1094+ # fetch the promotion changes
1095+ meph_changes = await workflow.execute_activity(
1096+ fetch_meph_changes,
1097+ FetchMephChangesParams(
1098+ image_repo=image_repo,
1099+ image_stream=params.image_stream,
1100+ patch_file=params.patch_file,
1101+ ),
1102+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1103+ start_to_close_timeout=params.to_delta(params.read_file_timeout),
1104+ )
1105+
1106+ # determine which images have been changed
1107+ testable_images = await workflow.execute_activity(
1108+ determine_testable_images,
1109+ DetermineTestableImagesParams(
1110+ image_repo=image_repo,
1111+ packer_file=params.packer_file,
1112+ release_names=release_names,
1113+ old_patch=meph_changes.get("old", {}),
1114+ new_patch=meph_changes.get("new", {}),
1115+ include_unreleased=params.include_unreleased,
1116+ support_standard=params.support_standard,
1117+ support_eol=params.support_eol,
1118+ support_esm=params.support_esm,
1119+ ),
1120+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1121+ start_to_close_timeout=params.to_delta(params.fetch_results_timeout),
1122+ )
1123+
1124+ # test those changed images
1125+ images = [image for image, testable in testable_images.items() if testable]
1126+ return await workflow.execute_child_workflow(
1127+ TestPromotionImages,
1128+ TestPromotionImagesParams(
1129+ images=images,
1130+ image_stream=params.image_stream,
1131+ maas_snap_channel=params.maas_snap_channel,
1132+ silent_messages=params.silent_messages,
1133+ include_unreleased=params.include_unreleased,
1134+ # files
1135+ patch_file=params.patch_file,
1136+ packer_file=params.packer_file,
1137+ ubuntu_file=params.ubuntu_file,
1138+ # repos
1139+ image_repo_url=params.image_repo_url,
1140+ image_repo_location=params.image_repo_location,
1141+ maas_git_repo=params.maas_git_repo,
1142+ maas_git_branch=params.maas_git_branch,
1143+ system_test_repo=params.system_test_repo,
1144+ system_test_branch=params.system_test_branch,
1145+ # jenkins details
1146+ jenkins_url=params.jenkins_url,
1147+ jenkins_user=params.jenkins_user,
1148+ jenkins_pass=params.jenkins_pass,
1149+ # retries
1150+ max_retry_attempts=params.max_retry_attempts,
1151+ heartbeat_delay=params.heartbeat_delay,
1152+ # timeouts
1153+ login_timeout=params.login_timeout,
1154+ request_build_timeout=params.request_build_timeout,
1155+ start_build_timeout=params.start_build_timeout,
1156+ complete_build_timeout=params.complete_build_timeout,
1157+ fetch_results_timeout=params.fetch_results_timeout,
1158+ default_timeout=params.default_timeout,
1159+ ),
1160+ task_queue=task_queue,
1161+ id=f"Promotion-Request:{'-'.join(images)}",
1162+ )
1163+
1164+
1165+activities = test_promotion_activities + [
1166+ fetch_git_repo,
1167+ determine_release_names,
1168+ fetch_meph_changes,
1169+ determine_testable_images,
1170+]
1171+workflows = test_promotion_workflows + [ScheduleImagePromotionTests]
1172diff --git a/temporal/workflows/promotion_image_tester.py b/temporal/workflows/promotion_image_tester.py
1173new file mode 100644
1174index 0000000..101f3db
1175--- /dev/null
1176+++ b/temporal/workflows/promotion_image_tester.py
1177@@ -0,0 +1,545 @@
1178+from dataclasses import dataclass, field
1179+from datetime import datetime
1180+from enum import Enum
1181+from typing import Any, Iterator
1182+
1183+import yaml
1184+from temporalio import activity, workflow
1185+from temporalio.common import RetryPolicy
1186+
1187+with workflow.unsafe.imports_passed_through():
1188+ from ..common.common import (
1189+ TestWorkflowParams,
1190+ current_time,
1191+ nested_dict,
1192+ task_queue,
1193+ todict,
1194+ )
1195+ from ..common.github_functions import GitRepo
1196+ from ..common.github_workflows import FetchGitRepoParams
1197+ from ..common.github_workflows import activities as github_activities
1198+ from ..common.github_workflows import fetch_git_repo
1199+ from ..common.github_workflows import workflows as github_workflows
1200+ from ..common.jenkins_workflows import (
1201+ JenkinsTesterWorkflow,
1202+ JenkinsTesterWorkflowParams,
1203+ )
1204+ from ..common.jenkins_workflows import activites as jenkins_activities
1205+ from ..common.jenkins_workflows import workflows as jenkins_workflows
1206+
1207+PATCH_KEYS = {
1208+ "centos": "com.ubuntu.maas:centos-bases-download.json",
1209+ "ubuntu": "com.ubuntu.maas:v3:download.json",
1210+}
1211+EOL_KEYS = {"support_eol", "support_esm_eol"}
1212+PATCH_DICT_TYPE = dict[str, dict[str, dict[str, Any]]]
1213+
1214+
1215+@dataclass
1216+class TestPromotionImagesParams(TestWorkflowParams):
1217+ images: list[str] = field(default_factory=list)
1218+ image_stream: str = "https://images.maas.io/ephemeral-v3/candidate/"
1219+
1220+ maas_snap_channel: str = "latest/edge"
1221+
1222+ # files to be accessed
1223+ patch_file: str = "conf/cpc-patch.yaml"
1224+ packer_file: str = "conf/packer-maas.yaml"
1225+ ubuntu_file: str = "conf/meph-v3.yaml"
1226+
1227+ # jenkins details
1228+ jenkins_url: str = ""
1229+ jenkins_user: str = ""
1230+ jenkins_pass: str = ""
1231+
1232+ # repositories
1233+ system_test_repo: str = (
1234+ "https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests"
1235+ )
1236+ system_test_branch: str = "master"
1237+
1238+ maas_git_repo: str = "https://git.launchpad.net/maas"
1239+ maas_git_branch: str = "master"
1240+
1241+ image_repo_url: str = "https://git.launchpad.net/maas-images"
1242+ image_repo_location: str = "~/maas-images"
1243+
1244+ silent_messages: bool = False
1245+ include_unreleased: bool = False
1246+
1247+
1248+@dataclass
1249+class FetchMephChangesParams:
1250+ image_repo: GitRepo
1251+ image_stream: str
1252+ patch_file: str = "conf/cpc-patch.yaml"
1253+
1254+
1255+@activity.defn
1256+async def fetch_meph_changes(
1257+ params: FetchMephChangesParams,
1258+) -> dict[str, dict[str, Any]]:
1259+ params.image_repo.clean_checkout(params.image_repo.main)
1260+
1261+ with params.image_repo.directory.joinpath(params.patch_file).open() as f:
1262+ old_patch_file = yaml.safe_load(f)
1263+
1264+ params.image_repo.execute(
1265+ [
1266+ "./bin/meph2-util",
1267+ "diff",
1268+ params.image_stream,
1269+ "https://images.maas.io/ephemeral-v3/stable",
1270+ "--new-versions-only",
1271+ "--latest-only",
1272+ "--promote",
1273+ "-o",
1274+ params.patch_file,
1275+ ]
1276+ )
1277+
1278+ with params.image_repo.directory.joinpath(params.patch_file).open() as f:
1279+ new_patch_file = yaml.safe_load(f)
1280+
1281+ return {"old": old_patch_file, "new": new_patch_file}
1282+
1283+
1284+@dataclass
1285+class DetermineReleaseNamesParams:
1286+ image_repo: GitRepo
1287+ packer_file: str = "conf/packer-maas.yaml"
1288+ ubuntu_file: str = "conf/meph-v3.yaml"
1289+
1290+
1291+def image_names(
1292+ image_repo: GitRepo, packer_file: str, ubuntu_file: str
1293+) -> dict[str, str]:
1294+ with image_repo.directory.joinpath(packer_file).open() as f:
1295+ packer_releases: dict[str, dict[str, int | str]] = yaml.safe_load(f).get(
1296+ "packer-maas", {}
1297+ )
1298+ with image_repo.directory.joinpath(ubuntu_file).open() as f:
1299+ ubuntu_releases: list[dict[str, str]] = yaml.safe_load(f).get("releases", {})
1300+
1301+ packer_names = {
1302+ f"{details.get('os')}{details.get('version'):0.1f}": image
1303+ for image, details in packer_releases.items()
1304+ }
1305+ ubuntu_names = {
1306+ release["version"]: release["release"] for release in ubuntu_releases
1307+ }
1308+
1309+ return packer_names | ubuntu_names
1310+
1311+
1312+@activity.defn
1313+async def determine_release_names(
1314+ params: DetermineReleaseNamesParams,
1315+) -> dict[str, str]:
1316+ return image_names(
1317+ image_repo=params.image_repo,
1318+ packer_file=params.packer_file,
1319+ ubuntu_file=params.ubuntu_file,
1320+ )
1321+
1322+
1323+@dataclass
1324+class DetermineArchesForImagesParams:
1325+ images: list[str]
1326+ release_names: dict[str, str]
1327+ meph_changes: dict[str, dict[str, Any]]
1328+
1329+
1330+@dataclass
1331+class ImageDetail:
1332+ id: str
1333+ key: str
1334+ name: str
1335+ release: str
1336+ arch: str
1337+ detail: dict[str, Any]
1338+ old_detail: dict[str, Any]
1339+ changed: bool
1340+
1341+
1342+def release_arch(image_id: str) -> tuple[str, str]:
1343+ """Return the release name and arch from the image id of
1344+ the cpc_patch keys"""
1345+ if "centos" in image_id:
1346+ _, release, arch = image_id.rsplit(":", 2)
1347+ release = f"centos{release}"
1348+ else:
1349+ _, release, arch, _ = image_id.rsplit(":", 3)
1350+ return release, arch
1351+
1352+
1353+def image_details(
1354+ release_names: dict[str, str],
1355+ patch: PATCH_DICT_TYPE,
1356+ old_patch: PATCH_DICT_TYPE | None = None,
1357+ combine_keys: bool = False,
1358+) -> Iterator[ImageDetail]:
1359+ for key in PATCH_KEYS.values():
1360+ patch_keys = set(patch.get(key, {}).keys())
1361+ if old_patch and combine_keys:
1362+ patch_keys.update(set(old_patch.get(key, {}).keys()))
1363+ for image_id in patch_keys:
1364+ release, arch = release_arch(image_id=image_id)
1365+ if image_name := release_names.get(release):
1366+ current = patch.get(key, {}).get(image_id, {})
1367+ old = (old_patch or {}).get(key, {}).get(image_id, {})
1368+ yield ImageDetail(
1369+ id=image_id,
1370+ key=key,
1371+ name=image_name,
1372+ release=release,
1373+ arch=arch,
1374+ detail=current,
1375+ old_detail=old,
1376+ changed=(current != old),
1377+ )
1378+
1379+
1380+@activity.defn
1381+async def determine_arches_for_images(
1382+ params: DetermineArchesForImagesParams,
1383+) -> dict[str, set[str]]:
1384+ """Return the arches that are supported for an image in the stream"""
1385+ arches_per_image: dict[str, set[str]] = {}
1386+ # search all of the keys
1387+ for image in image_details(
1388+ release_names=params.release_names, patch=params.meph_changes
1389+ ):
1390+ if image.name in params.images:
1391+ arches_per_image[image.name] = arches_per_image.get(
1392+ image.name, set()
1393+ ).union({image.arch})
1394+ return arches_per_image
1395+
1396+
1397+@dataclass
1398+class GeneratePromotionPatchParams:
1399+ image_test_status: dict[str, bool]
1400+ release_names: dict[str, str]
1401+ old_patch: PATCH_DICT_TYPE
1402+ new_patch: PATCH_DICT_TYPE
1403+ patch_file: str = "conf/cpc-patch.yaml"
1404+ include_unreleased: bool = False
1405+
1406+
1407+def image_released(release_name: str) -> bool:
1408+ # we only have access to ubunutu images early
1409+ if release_date := datetime.strptime(release_name, "%y.%m"):
1410+ return release_date.strftime("%Y-%m-%d") < current_time().split()[0]
1411+ return True
1412+
1413+
1414+@activity.defn
1415+async def generate_promotion_patch(
1416+ params: GeneratePromotionPatchParams,
1417+) -> PATCH_DICT_TYPE:
1418+ patch: PATCH_DICT_TYPE = nested_dict()
1419+
1420+ for image in image_details(
1421+ release_names=params.release_names,
1422+ patch=params.new_patch,
1423+ old_patch=params.old_patch,
1424+ ):
1425+ # skip unreleased images early
1426+ released = image_released(image.release)
1427+ if not (released or params.include_unreleased):
1428+ continue
1429+
1430+ # nothing changed
1431+ if not image.changed:
1432+ patch[image.key][image.id] = image.detail
1433+ continue
1434+
1435+ # the image is allowed to change
1436+ if params.image_test_status.get(image.name):
1437+ patch[image.key][image.id] = image.detail
1438+ else:
1439+ patch[image.key][image.id] = image.old_detail
1440+
1441+ # always allow esm/eol date changed
1442+ for eol_key in set(image.detail.keys()).intersection(EOL_KEYS):
1443+ patch[image.key][image.id][eol_key] = image.detail[eol_key]
1444+
1445+ # only allow unreleased images to be candidate
1446+ if not released:
1447+ for version in image.detail.get("versions", {}).keys():
1448+ patch[image.key][image.id][version]["labels"] = ["candidate"]
1449+ if "labels" in image.detail:
1450+ patch[image.key][image.id]["labels"] = ["candidate"]
1451+
1452+ return todict(patch)
1453+
1454+
1455+@dataclass
1456+class GeneratePromotionMessageParams:
1457+ old_patch: PATCH_DICT_TYPE
1458+ new_patch: PATCH_DICT_TYPE
1459+ release_names: dict[str, str]
1460+
1461+
1462+class ImageChanges(Enum):
1463+ # eol changes
1464+ update_image_eol = 0
1465+ add_image_eol = 1
1466+ remove_image_eol = 2
1467+ # image changes
1468+ update_image = 3
1469+ add_image = 4
1470+ remove_image = 5
1471+
1472+
1473+@activity.defn
1474+async def generate_promotion_message(params: GeneratePromotionMessageParams) -> str:
1475+ changes: dict[str, set[ImageChanges]] = {
1476+ image_name: set() for image_name in params.release_names.keys()
1477+ }
1478+
1479+ for image in image_details(
1480+ release_names=params.release_names,
1481+ patch=params.new_patch,
1482+ old_patch=params.old_patch,
1483+ combine_keys=True,
1484+ ):
1485+ if not image.changed:
1486+ continue
1487+
1488+ old_keys = set(image.old_detail.keys())
1489+ new_keys = set(image.detail.keys())
1490+
1491+ # a new image was added
1492+ if not old_keys and new_keys:
1493+ changes[image.name].add(ImageChanges.add_image)
1494+
1495+ # an old image was removed
1496+ elif not new_keys and old_keys:
1497+ changes[image.name].add(ImageChanges.remove_image)
1498+
1499+ # an image had eol dates added
1500+ if not old_keys.intersection(EOL_KEYS) and new_keys.intersection(EOL_KEYS):
1501+ changes[image.name].add(ImageChanges.add_image_eol)
1502+
1503+ # an image had eol dates removed
1504+ elif not new_keys.intersection(EOL_KEYS) and old_keys.intersection(EOL_KEYS):
1505+ changes[image.name].add(ImageChanges.remove_image_eol)
1506+
1507+ # and image had eol dates changed
1508+ elif new_keys.intersection(EOL_KEYS) != old_keys.intersection(EOL_KEYS):
1509+ changes[image.name].add(ImageChanges.update_image_eol)
1510+
1511+ # some other aspect of the image changed
1512+ elif new_keys.difference(EOL_KEYS) != old_keys.difference(EOL_KEYS):
1513+ changes[image.name].add(ImageChanges.update_image)
1514+
1515+ # convert from image: changes to change: images
1516+ changed: dict[ImageChanges, list[str]] = {}
1517+ for name, change in changes.items():
1518+ for c in change:
1519+ changed[c] = changed.get(c, []) + [name]
1520+
1521+ return "\n".join(
1522+ [f"{c.name}: {', '.join(images)}" for c, images in changed.items()]
1523+ ).strip("\n")
1524+
1525+
1526+@dataclass
1527+class PromoteImagesParams:
1528+ image_repo: GitRepo
1529+ promotion_patch: PATCH_DICT_TYPE
1530+ promotion_message: str
1531+ patch_file: str = "conf/cpc-patch.yaml"
1532+
1533+
1534+@activity.defn
1535+async def promote_images(params: PromoteImagesParams) -> None:
1536+ now = current_time().split()[0]
1537+ patch_file = params.image_repo.directory.joinpath(params.patch_file)
1538+
1539+ with params.image_repo.checkout_and_commit(
1540+ message=f"promotion workflow {now}\n{params.promotion_message}",
1541+ branch=params.image_repo.main,
1542+ add_files=str(patch_file),
1543+ ):
1544+ comments = []
1545+ with patch_file.open() as f:
1546+ # maintain the comments in the patch
1547+ for line in f.readlines():
1548+ if line[0] not in ["#", "\n"]:
1549+ break
1550+ comments.append(line)
1551+ current_patch = yaml.safe_load(f)
1552+
1553+ # splice the new patch into the old
1554+ patch = {
1555+ key: value for key, value in current_patch.items if key not in PATCH_KEYS
1556+ } | params.promotion_patch
1557+
1558+ with patch_file.open("w") as f:
1559+ f.writelines(comments)
1560+ yaml.safe_dump(patch, f)
1561+
1562+
1563+@workflow.defn
1564+class TestPromotionImages:
1565+ @workflow.run
1566+ async def run(self, params: TestPromotionImagesParams) -> str:
1567+ config_job = "maas-system-tests-config-generator"
1568+ tester_job = "maas-system-tests-executor"
1569+
1570+ # clone the image repo
1571+ image_repo = await workflow.execute_activity(
1572+ fetch_git_repo,
1573+ FetchGitRepoParams(
1574+ params.image_repo_url,
1575+ params.image_repo_location,
1576+ ),
1577+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1578+ start_to_close_timeout=params.to_delta(params.request_build_timeout),
1579+ )
1580+
1581+ # fetch the mapping between image release and release name
1582+ release_names = await workflow.execute_activity(
1583+ determine_release_names,
1584+ DetermineReleaseNamesParams(
1585+ image_repo=image_repo,
1586+ packer_file=params.packer_file,
1587+ ubuntu_file=params.ubuntu_file,
1588+ ),
1589+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1590+ start_to_close_timeout=params.to_delta(params.read_file_timeout),
1591+ )
1592+
1593+ # fetch the promotion changes
1594+ meph_changes = await workflow.execute_activity(
1595+ fetch_meph_changes,
1596+ FetchMephChangesParams(
1597+ image_repo=image_repo,
1598+ image_stream=params.image_stream,
1599+ patch_file=params.patch_file,
1600+ ),
1601+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1602+ start_to_close_timeout=params.to_delta(params.read_file_timeout),
1603+ )
1604+
1605+ # determine all of the arches to be tested for each image
1606+ image_arch_mapping = await workflow.execute_activity(
1607+ determine_arches_for_images,
1608+ DetermineArchesForImagesParams(
1609+ images=params.images,
1610+ release_names=release_names,
1611+ meph_changes=meph_changes.get("new", {}),
1612+ ),
1613+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1614+ start_to_close_timeout=params.to_delta(params.read_file_timeout),
1615+ )
1616+
1617+ # request a test for each image
1618+ image_test_status: dict[str, bool] = {}
1619+ for image, arches in image_arch_mapping.items():
1620+ image_job_details = await workflow.execute_child_workflow(
1621+ JenkinsTesterWorkflow,
1622+ JenkinsTesterWorkflowParams(
1623+ job_name=config_job,
1624+ job_parameters={
1625+ "CANDIDATE_IMAGE_NAME": image,
1626+ "IMAGE_STREAM_URL": params.image_stream,
1627+ "SYSTEMTESTS_GIT_REPO": params.system_test_repo,
1628+ "SYSTEMTESTS_GIT_BRANCH": params.system_test_branch,
1629+ "MAAS_GIT_REPO": params.maas_git_repo,
1630+ "MAAS_GIT_BRANCH": params.maas_git_branch,
1631+ "MAAS_SNAP_CHANNEL": params.maas_snap_channel,
1632+ "TEST_DB_SNAP_CHANNEL": params.maas_snap_channel,
1633+ "ENABLE_VAULT": False,
1634+ "REPORT_MATTERMOST": not params.silent_messages,
1635+ "GEN_CONFIG_ARGS": " ".join(
1636+ f"--architecture {arch}" for arch in arches
1637+ ),
1638+ },
1639+ sub_job_names=[tester_job],
1640+ # jenkins details
1641+ jenkins_url=params.jenkins_url,
1642+ jenkins_user=params.jenkins_user,
1643+ jenkins_pass=params.jenkins_pass,
1644+ # retries
1645+ max_retry_attempts=params.max_retry_attempts,
1646+ heartbeat_delay=params.heartbeat_delay,
1647+ # timeouts
1648+ default_timeout=params.default_timeout,
1649+ login_timeout=params.login_timeout,
1650+ read_file_timeout=params.read_file_timeout,
1651+ start_build_timeout=params.start_build_timeout,
1652+ fetch_results_timeout=params.fetch_results_timeout,
1653+ request_build_timeout=params.request_build_timeout,
1654+ complete_build_timeout=params.complete_build_timeout,
1655+ ),
1656+ task_queue=task_queue,
1657+ id=f"Testing:{image}",
1658+ )
1659+
1660+ # store the result of the image test
1661+ image_test_status[image] = image_job_details.get(tester_job, {}).get(
1662+ "passed", False
1663+ )
1664+
1665+ # generate the new patch file for the promotion
1666+ promotion_patch = await workflow.execute_activity(
1667+ generate_promotion_patch,
1668+ GeneratePromotionPatchParams(
1669+ image_test_status=image_test_status,
1670+ release_names=release_names,
1671+ old_patch=meph_changes.get("old", {}),
1672+ new_patch=meph_changes.get("new", {}),
1673+ patch_file=params.patch_file,
1674+ include_unreleased=params.include_unreleased,
1675+ ),
1676+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1677+ start_to_close_timeout=params.to_delta(params.fetch_results_timeout),
1678+ )
1679+
1680+ # generate the promotion summary
1681+ promotion_message = await workflow.execute_activity(
1682+ generate_promotion_message,
1683+ GeneratePromotionMessageParams(
1684+ old_patch=meph_changes.get("old", {}),
1685+ new_patch=promotion_patch,
1686+ release_names=release_names,
1687+ ),
1688+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1689+ start_to_close_timeout=params.to_delta(params.fetch_results_timeout),
1690+ )
1691+
1692+ # promote if something changes
1693+ if promotion_message:
1694+ await workflow.execute_activity(
1695+ promote_images,
1696+ PromoteImagesParams(
1697+ image_repo=image_repo,
1698+ promotion_patch=promotion_patch,
1699+ promotion_message=promotion_message,
1700+ patch_file=params.patch_file,
1701+ ),
1702+ retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts),
1703+ start_to_close_timeout=params.to_delta(params.fetch_results_timeout),
1704+ )
1705+
1706+ return promotion_message or "No changes made to repo."
1707+
1708+
1709+activities = (
1710+ jenkins_activities
1711+ + github_activities
1712+ + [
1713+ fetch_git_repo,
1714+ determine_release_names,
1715+ fetch_meph_changes,
1716+ determine_arches_for_images,
1717+ generate_promotion_patch,
1718+ generate_promotion_message,
1719+ promote_images,
1720+ ]
1721+)
1722+workflows = jenkins_workflows + github_workflows + [TestPromotionImages]

Subscribers

People subscribed via source and target branches

to all changes: