Merge ~maas-committers/maas-ci/+git/system-tests:MAASENG-1717-Automated-Image-Testing-feature-branch into ~maas-committers/maas-ci/+git/system-tests:master
- Git
- lp:~maas-committers/maas-ci/+git/system-tests
- MAASENG-1717-Automated-Image-Testing-feature-branch
- Merge into master
Status: | Merged |
---|---|
Approved by: | Alexsander de Souza |
Approved revision: | 02b5fbe61ed5bfe5bffa33775938b9af25486261 |
Merge reported by: | MAAS Lander |
Merged at revision: | not available |
Proposed branch: | ~maas-committers/maas-ci/+git/system-tests:MAASENG-1717-Automated-Image-Testing-feature-branch |
Merge into: | ~maas-committers/maas-ci/+git/system-tests:master |
Diff against target: |
2522 lines (+1936/-71) 26 files modified
.gitignore (+7/-7) image_mapping.yaml.sample (+17/-17) setup.py (+2/-0) systemtests/api.py (+36/-0) systemtests/conftest.py (+3/-1) systemtests/fixtures.py (+4/-1) systemtests/git_build.py (+14/-0) systemtests/image_builder/test_packer.py (+7/-4) systemtests/image_config.py (+2/-2) systemtests/packer.py (+23/-6) systemtests/state.py (+2/-3) systemtests/tests_per_machine/test_machine.py (+41/-14) systemtests/utils.py (+26/-6) temporal/README.md (+88/-0) temporal/build_results.py (+395/-0) temporal/common_tasks.py (+293/-0) temporal/e2e_worker.py (+10/-0) temporal/e2e_workflow.py (+206/-0) temporal/image_building_worker.py (+10/-0) temporal/image_building_workflow.py (+165/-0) temporal/image_reporting_worker.py (+10/-0) temporal/image_reporting_workflow.py (+450/-0) temporal/image_testing_worker.py (+10/-0) temporal/image_testing_workflow.py (+100/-0) tox.ini (+6/-5) utils/gen_config.py (+9/-5) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
MAAS Lander | Approve | ||
Jack Lloyd-Walters | Approve | ||
Review via email:
|
Commit message
automated image testing
adds the capability of:
- building custom images using packer-maas
- testing the deployment of custom images
includes Temporal workflows to build, test and report the results
Co-authored-by: Jack Lloyd-Walters <email address hidden>
Description of the change
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
MAAS Lander (maas-lander) wrote : | # |
UNIT TESTS
-b MAASENG-
STATUS: SUCCESS
COMMIT: db3f8c2f2a2aff7
- 02b5fbe... by Jack Lloyd-Walters
-
rebase changes and merge again
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
MAAS Lander (maas-lander) wrote : | # |
UNIT TESTS
-b MAASENG-
STATUS: SUCCESS
COMMIT: 02b5fbe61ed5bfe
Preview Diff
1 | diff --git a/.gitignore b/.gitignore | |||
2 | index e71b819..3ed1283 100644 | |||
3 | --- a/.gitignore | |||
4 | +++ b/.gitignore | |||
5 | @@ -1,16 +1,16 @@ | |||
8 | 1 | *.egg-info | 1 | __pycache__ |
7 | 2 | .vscode | ||
9 | 3 | .idea | 2 | .idea |
10 | 4 | .tox | ||
11 | 5 | .mypy_cache | 3 | .mypy_cache |
12 | 4 | .tox | ||
13 | 5 | .vscode | ||
14 | 6 | *.egg-info | ||
15 | 7 | base_config.yaml | ||
16 | 8 | build-*.log | ||
17 | 6 | build/ | 9 | build/ |
18 | 7 | __pycache__ | ||
19 | 8 | config.yaml | 10 | config.yaml |
20 | 9 | credentials.yaml | 11 | credentials.yaml |
21 | 10 | base_config.yaml | ||
22 | 11 | image_mapping.yaml | 12 | image_mapping.yaml |
23 | 13 | images/ | ||
24 | 12 | junit*.xml | 14 | junit*.xml |
25 | 13 | sosreport | 15 | sosreport |
26 | 14 | systemtests*.log | 16 | systemtests*.log |
27 | 15 | images/ | ||
28 | 16 | build/ | ||
29 | diff --git a/image_mapping.yaml.sample b/image_mapping.yaml.sample | |||
30 | index d23a1fc..72b2c35 100644 | |||
31 | --- a/image_mapping.yaml.sample | |||
32 | +++ b/image_mapping.yaml.sample | |||
33 | @@ -5,7 +5,7 @@ | |||
34 | 5 | # An example of a mapping is: | 5 | # An example of a mapping is: |
35 | 6 | # images: | 6 | # images: |
36 | 7 | # $IMAGE_NAME: | 7 | # $IMAGE_NAME: |
38 | 8 | # url: $IMAGE_URL | 8 | # filename: $IMAGE_FILENAME |
39 | 9 | # filetype: $IMAGE_FILETYPE | 9 | # filetype: $IMAGE_FILETYPE |
40 | 10 | # architecture: $IMAGE_ARCH | 10 | # architecture: $IMAGE_ARCH |
41 | 11 | # osystem: $IMAGE_OSYSTEM | 11 | # osystem: $IMAGE_OSYSTEM |
42 | @@ -17,7 +17,7 @@ | |||
43 | 17 | 17 | ||
44 | 18 | images: | 18 | images: |
45 | 19 | centos7: | 19 | centos7: |
47 | 20 | url: centos7.tar.gz | 20 | filename: centos7.tar.gz |
48 | 21 | filetype: tgz | 21 | filetype: tgz |
49 | 22 | architecture: amd64/generic | 22 | architecture: amd64/generic |
50 | 23 | osystem: centos | 23 | osystem: centos |
51 | @@ -25,7 +25,7 @@ images: | |||
52 | 25 | packer_template: centos7 | 25 | packer_template: centos7 |
53 | 26 | ssh_username: centos | 26 | ssh_username: centos |
54 | 27 | centos8: | 27 | centos8: |
56 | 28 | url: centos8.tar.gz | 28 | filename: centos8.tar.gz |
57 | 29 | filetype: tgz | 29 | filetype: tgz |
58 | 30 | architecture: amd64/generic | 30 | architecture: amd64/generic |
59 | 31 | osystem: centos | 31 | osystem: centos |
60 | @@ -33,7 +33,7 @@ images: | |||
61 | 33 | packer_template: centos8 | 33 | packer_template: centos8 |
62 | 34 | ssh_username: centos | 34 | ssh_username: centos |
63 | 35 | centos8-stream: | 35 | centos8-stream: |
65 | 36 | url: centos8-stream.tar.gz | 36 | filename: centos8-stream.tar.gz |
66 | 37 | filetype: tgz | 37 | filetype: tgz |
67 | 38 | architecture: amd64/generic | 38 | architecture: amd64/generic |
68 | 39 | osystem: centos | 39 | osystem: centos |
69 | @@ -41,7 +41,7 @@ images: | |||
70 | 41 | packer_template: centos8-stream | 41 | packer_template: centos8-stream |
71 | 42 | ssh_username: centos | 42 | ssh_username: centos |
72 | 43 | rhel7: | 43 | rhel7: |
74 | 44 | url: rhel7.tar.gz | 44 | filename: rhel7.tar.gz |
75 | 45 | filetype: tgz | 45 | filetype: tgz |
76 | 46 | architecture: amd64/generic | 46 | architecture: amd64/generic |
77 | 47 | osystem: rhel | 47 | osystem: rhel |
78 | @@ -50,7 +50,7 @@ images: | |||
79 | 50 | source_iso: rhel-server-7.9-x86_64-dvd.iso | 50 | source_iso: rhel-server-7.9-x86_64-dvd.iso |
80 | 51 | ssh_username: cloud-user | 51 | ssh_username: cloud-user |
81 | 52 | rhel8: | 52 | rhel8: |
83 | 53 | url: rhel8.tar.gz | 53 | filename: rhel8.tar.gz |
84 | 54 | filetype: tgz | 54 | filetype: tgz |
85 | 55 | architecture: amd64/generic | 55 | architecture: amd64/generic |
86 | 56 | osystem: rhel | 56 | osystem: rhel |
87 | @@ -59,7 +59,7 @@ images: | |||
88 | 59 | source_iso: rhel-8.6-x86_64-dvd.iso | 59 | source_iso: rhel-8.6-x86_64-dvd.iso |
89 | 60 | ssh_username: cloud-user | 60 | ssh_username: cloud-user |
90 | 61 | rhel9: | 61 | rhel9: |
92 | 62 | url: rhel9.tar.gz | 62 | filename: rhel9.tar.gz |
93 | 63 | filetype: tgz | 63 | filetype: tgz |
94 | 64 | architecture: amd64/generic | 64 | architecture: amd64/generic |
95 | 65 | osystem: rhel | 65 | osystem: rhel |
96 | @@ -68,7 +68,7 @@ images: | |||
97 | 68 | source_iso: rhel-baseos-9.1-x86_64-dvd.iso | 68 | source_iso: rhel-baseos-9.1-x86_64-dvd.iso |
98 | 69 | ssh_username: cloud-user | 69 | ssh_username: cloud-user |
99 | 70 | rocky8: | 70 | rocky8: |
101 | 71 | url: rocky8.tar.gz | 71 | filename: rocky8.tar.gz |
102 | 72 | filetype: tgz | 72 | filetype: tgz |
103 | 73 | architecture: amd64/generic | 73 | architecture: amd64/generic |
104 | 74 | osystem: custom | 74 | osystem: custom |
105 | @@ -77,7 +77,7 @@ images: | |||
106 | 77 | base_image: "rhel/8" | 77 | base_image: "rhel/8" |
107 | 78 | ssh_username: cloud-user | 78 | ssh_username: cloud-user |
108 | 79 | rocky9: | 79 | rocky9: |
110 | 80 | url: rocky9.tar.gz | 80 | filename: rocky9.tar.gz |
111 | 81 | filetype: tgz | 81 | filetype: tgz |
112 | 82 | architecture: amd64/generic | 82 | architecture: amd64/generic |
113 | 83 | osystem: custom | 83 | osystem: custom |
114 | @@ -86,7 +86,7 @@ images: | |||
115 | 86 | base_image: "rhel/9" | 86 | base_image: "rhel/9" |
116 | 87 | ssh_username: cloud-user | 87 | ssh_username: cloud-user |
117 | 88 | sles12: | 88 | sles12: |
119 | 89 | url: sles12.tar.gz | 89 | filename: sles12.tar.gz |
120 | 90 | filetype: tgz | 90 | filetype: tgz |
121 | 91 | architecture: amd64/generic | 91 | architecture: amd64/generic |
122 | 92 | osystem: suse | 92 | osystem: suse |
123 | @@ -95,7 +95,7 @@ images: | |||
124 | 95 | source_iso: SLES12-SP5-JeOS.x86_64-12.5-OpenStack-Cloud-GM.qcow2 | 95 | source_iso: SLES12-SP5-JeOS.x86_64-12.5-OpenStack-Cloud-GM.qcow2 |
125 | 96 | ssh_username: sles | 96 | ssh_username: sles |
126 | 97 | sles15: | 97 | sles15: |
128 | 98 | url: sles15.tar.gz | 98 | filename: sles15.tar.gz |
129 | 99 | filetype: tgz | 99 | filetype: tgz |
130 | 100 | architecture: amd64/generic | 100 | architecture: amd64/generic |
131 | 101 | osystem: suse | 101 | osystem: suse |
132 | @@ -104,7 +104,7 @@ images: | |||
133 | 104 | source_iso: SLE-15-SP4-Full-x86_64-GM-Media1.iso | 104 | source_iso: SLE-15-SP4-Full-x86_64-GM-Media1.iso |
134 | 105 | ssh_username: sles | 105 | ssh_username: sles |
135 | 106 | esxi6: | 106 | esxi6: |
137 | 107 | url: vmware-esxi-6.dd.gz | 107 | filename: vmware-esxi-6.dd.gz |
138 | 108 | filetype: ddgz | 108 | filetype: ddgz |
139 | 109 | architecture: amd64/generic | 109 | architecture: amd64/generic |
140 | 110 | osystem: esxi | 110 | osystem: esxi |
141 | @@ -113,7 +113,7 @@ images: | |||
142 | 113 | source_iso: VMware-VMvisor-Installer-6.7.0.update03-14320388.x86_64.iso | 113 | source_iso: VMware-VMvisor-Installer-6.7.0.update03-14320388.x86_64.iso |
143 | 114 | ssh_username: root | 114 | ssh_username: root |
144 | 115 | esxi7: | 115 | esxi7: |
146 | 116 | url: vmware-esxi-7.dd.gz | 116 | filename: vmware-esxi-7.dd.gz |
147 | 117 | filetype: ddgz | 117 | filetype: ddgz |
148 | 118 | architecture: amd64/generic | 118 | architecture: amd64/generic |
149 | 119 | osystem: esxi | 119 | osystem: esxi |
150 | @@ -122,7 +122,7 @@ images: | |||
151 | 122 | source_iso: VMware-VMvisor-Installer-7.0U3g-20328353.x86_64.iso | 122 | source_iso: VMware-VMvisor-Installer-7.0U3g-20328353.x86_64.iso |
152 | 123 | ssh_username: root | 123 | ssh_username: root |
153 | 124 | esxi8: | 124 | esxi8: |
155 | 125 | url: vmware-esxi-8.dd.gz | 125 | filename: vmware-esxi-8.dd.gz |
156 | 126 | filetype: ddgz | 126 | filetype: ddgz |
157 | 127 | architecture: amd64/generic | 127 | architecture: amd64/generic |
158 | 128 | osystem: esxi | 128 | osystem: esxi |
159 | @@ -131,7 +131,7 @@ images: | |||
160 | 131 | source_iso: VMware-VMvisor-Installer-8.0b-21203435.x86_64.iso | 131 | source_iso: VMware-VMvisor-Installer-8.0b-21203435.x86_64.iso |
161 | 132 | ssh_username: root | 132 | ssh_username: root |
162 | 133 | ubuntu: | 133 | ubuntu: |
164 | 134 | url: ubuntu-cloudimg.tar.gz | 134 | filename: ubuntu-cloudimg.tar.gz |
165 | 135 | filetype: tgz | 135 | filetype: tgz |
166 | 136 | architecture: amd64/generic | 136 | architecture: amd64/generic |
167 | 137 | osystem: custom | 137 | osystem: custom |
168 | @@ -139,7 +139,7 @@ images: | |||
169 | 139 | packer_template: ubuntu | 139 | packer_template: ubuntu |
170 | 140 | packer_target: custom-cloudimg.tar.gz | 140 | packer_target: custom-cloudimg.tar.gz |
171 | 141 | ubuntu-flat: | 141 | ubuntu-flat: |
173 | 142 | url: ubuntu-flat.tar.gz | 142 | filename: ubuntu-flat.tar.gz |
174 | 143 | filetype: tgz | 143 | filetype: tgz |
175 | 144 | architecture: amd64/generic | 144 | architecture: amd64/generic |
176 | 145 | osystem: custom | 145 | osystem: custom |
177 | @@ -147,7 +147,7 @@ images: | |||
178 | 147 | packer_template: ubuntu | 147 | packer_template: ubuntu |
179 | 148 | packer_target: custom-ubuntu.tar.gz | 148 | packer_target: custom-ubuntu.tar.gz |
180 | 149 | ubuntu-lvm: | 149 | ubuntu-lvm: |
182 | 150 | url: ubuntu-lvm.tar.gz | 150 | filename: ubuntu-lvm.tar.gz |
183 | 151 | filetype: ddgz | 151 | filetype: ddgz |
184 | 152 | architecture: amd64/generic | 152 | architecture: amd64/generic |
185 | 153 | osystem: custom | 153 | osystem: custom |
186 | diff --git a/setup.py b/setup.py | |||
187 | index f6d6ae4..b6c9b32 100644 | |||
188 | --- a/setup.py | |||
189 | +++ b/setup.py | |||
190 | @@ -1,6 +1,7 @@ | |||
191 | 1 | from setuptools import find_packages, setup | 1 | from setuptools import find_packages, setup |
192 | 2 | 2 | ||
193 | 3 | install_requires = ( | 3 | install_requires = ( |
194 | 4 | 'jenkinsapi', | ||
195 | 4 | 'netaddr', | 5 | 'netaddr', |
196 | 5 | 'paramiko', | 6 | 'paramiko', |
197 | 6 | 'pytest-dependency', | 7 | 'pytest-dependency', |
198 | @@ -12,6 +13,7 @@ install_requires = ( | |||
199 | 12 | 'requests', | 13 | 'requests', |
200 | 13 | 'retry', | 14 | 'retry', |
201 | 14 | 'ruamel.yaml', | 15 | 'ruamel.yaml', |
202 | 16 | 'temporalio' | ||
203 | 15 | ) | 17 | ) |
204 | 16 | 18 | ||
205 | 17 | 19 | ||
206 | diff --git a/systemtests/api.py b/systemtests/api.py | |||
207 | index ec76b0e..dde94dd 100644 | |||
208 | --- a/systemtests/api.py | |||
209 | +++ b/systemtests/api.py | |||
210 | @@ -78,6 +78,7 @@ class BootSource(TypedDict): | |||
211 | 78 | # TODO: Expand these to TypedDict matching API response structure | 78 | # TODO: Expand these to TypedDict matching API response structure |
212 | 79 | 79 | ||
213 | 80 | Subnet = Dict[str, Any] | 80 | Subnet = Dict[str, Any] |
214 | 81 | Interface = Dict[str, Any] | ||
215 | 81 | RackController = Dict[str, Any] | 82 | RackController = Dict[str, Any] |
216 | 82 | RegionController = Dict[str, Any] | 83 | RegionController = Dict[str, Any] |
217 | 83 | IPRange = Dict[str, Any] | 84 | IPRange = Dict[str, Any] |
218 | @@ -256,6 +257,7 @@ class AuthenticatedAPIClient: | |||
219 | 256 | architecture: str, | 257 | architecture: str, |
220 | 257 | filetype: str, | 258 | filetype: str, |
221 | 258 | image_file_path: str, | 259 | image_file_path: str, |
222 | 260 | base_image: str | None = None, | ||
223 | 259 | ) -> None: | 261 | ) -> None: |
224 | 260 | cmd = [ | 262 | cmd = [ |
225 | 261 | "boot-resources", | 263 | "boot-resources", |
226 | @@ -266,6 +268,8 @@ class AuthenticatedAPIClient: | |||
227 | 266 | f"filetype={filetype}", | 268 | f"filetype={filetype}", |
228 | 267 | f"content@={image_file_path}", | 269 | f"content@={image_file_path}", |
229 | 268 | ] | 270 | ] |
230 | 271 | if base_image: | ||
231 | 272 | cmd.append(f"base_image={base_image}") | ||
232 | 269 | self.execute(cmd, json_output=False) | 273 | self.execute(cmd, json_output=False) |
233 | 270 | 274 | ||
234 | 271 | def import_boot_resources(self) -> str: | 275 | def import_boot_resources(self) -> str: |
235 | @@ -716,6 +720,38 @@ class AuthenticatedAPIClient: | |||
236 | 716 | + [f"{k}={v}" for k, v in options.items()] | 720 | + [f"{k}={v}" for k, v in options.items()] |
237 | 717 | ) | 721 | ) |
238 | 718 | 722 | ||
239 | 723 | def create_interface( | ||
240 | 724 | self, machine: Machine, network_type: str, options: dict[str, str] = {} | ||
241 | 725 | ) -> Interface: | ||
242 | 726 | """bond, bridge,""" | ||
243 | 727 | interface: Interface = self.execute( | ||
244 | 728 | ["interfaces", f"create-{network_type}", machine["system_id"]] | ||
245 | 729 | + [f"{k}={v}" for k, v in options.items()] | ||
246 | 730 | ) | ||
247 | 731 | return interface | ||
248 | 732 | |||
249 | 733 | def delete_interface(self, machine: Machine, interface: Interface) -> str: | ||
250 | 734 | result: str = self.execute( | ||
251 | 735 | ["interface", "delete", machine["systed_id"], str(interface["id"])], | ||
252 | 736 | json_output=False, | ||
253 | 737 | ) | ||
254 | 738 | return result | ||
255 | 739 | |||
256 | 740 | def read_interfaces(self, machine: Machine) -> list[Interface]: | ||
257 | 741 | result: list[Interface] = self.execute( | ||
258 | 742 | ["interfaces", "read", machine["system_id"]] | ||
259 | 743 | ) | ||
260 | 744 | return result | ||
261 | 745 | |||
262 | 746 | def update_interface( | ||
263 | 747 | self, machine: Machine, interface: Interface, options: dict[str, str] | ||
264 | 748 | ) -> Interface: | ||
265 | 749 | updated_interface: Interface = self.execute( | ||
266 | 750 | ["interface", "update", machine["system_id"], str(interface["id"])] | ||
267 | 751 | + [f"{k}={v}" for k, v in options.items()] | ||
268 | 752 | ) | ||
269 | 753 | return updated_interface | ||
270 | 754 | |||
271 | 719 | 755 | ||
272 | 720 | class QuietAuthenticatedAPIClient(AuthenticatedAPIClient): | 756 | class QuietAuthenticatedAPIClient(AuthenticatedAPIClient): |
273 | 721 | """An Authenticated API Client that is quiet.""" | 757 | """An Authenticated API Client that is quiet.""" |
274 | diff --git a/systemtests/conftest.py b/systemtests/conftest.py | |||
275 | index a069d84..6acabf7 100644 | |||
276 | --- a/systemtests/conftest.py | |||
277 | +++ b/systemtests/conftest.py | |||
278 | @@ -358,7 +358,9 @@ def pytest_generate_tests(metafunc: Metafunc) -> None: | |||
279 | 358 | metafunc.parametrize("instance_config", instance_config, ids=str, indirect=True) | 358 | metafunc.parametrize("instance_config", instance_config, ids=str, indirect=True) |
280 | 359 | 359 | ||
281 | 360 | if "image_to_test" in metafunc.fixturenames: | 360 | if "image_to_test" in metafunc.fixturenames: |
283 | 361 | if images_to_test := [image for image in generate_images(cfg) if image.url]: | 361 | if images_to_test := [ |
284 | 362 | image for image in generate_images(cfg) if image.filename | ||
285 | 363 | ]: | ||
286 | 362 | metafunc.parametrize( | 364 | metafunc.parametrize( |
287 | 363 | "image_to_test", images_to_test, ids=str, indirect=True | 365 | "image_to_test", images_to_test, ids=str, indirect=True |
288 | 364 | ) | 366 | ) |
289 | diff --git a/systemtests/fixtures.py b/systemtests/fixtures.py | |||
290 | index 7521c7d..e53ba45 100644 | |||
291 | --- a/systemtests/fixtures.py | |||
292 | +++ b/systemtests/fixtures.py | |||
293 | @@ -763,7 +763,9 @@ def dns_tester( | |||
294 | 763 | 763 | ||
295 | 764 | 764 | ||
296 | 765 | @pytest.fixture(scope="session") | 765 | @pytest.fixture(scope="session") |
298 | 766 | def packer_main(config: dict[str, Any]) -> Optional[Iterator[PackerMain]]: | 766 | def packer_main( |
299 | 767 | request: pytest.FixtureRequest, config: dict[str, Any] | ||
300 | 768 | ) -> Optional[Iterator[PackerMain]]: | ||
301 | 767 | """Set up a new LXD container with Packer installed.""" | 769 | """Set up a new LXD container with Packer installed.""" |
302 | 768 | packer_config = config.get("packer-maas", {}) | 770 | packer_config = config.get("packer-maas", {}) |
303 | 769 | repo = packer_config.get("git-repo") | 771 | repo = packer_config.get("git-repo") |
304 | @@ -787,6 +789,7 @@ def packer_main(config: dict[str, Any]) -> Optional[Iterator[PackerMain]]: | |||
305 | 787 | proxy_env=proxy_env, | 789 | proxy_env=proxy_env, |
306 | 788 | file_store=config.get("file-store", {}), | 790 | file_store=config.get("file-store", {}), |
307 | 789 | debug=packer_config.get("verbosity", ""), | 791 | debug=packer_config.get("verbosity", ""), |
308 | 792 | root_path=request.config.rootpath, | ||
309 | 790 | ) | 793 | ) |
310 | 791 | main.setup() | 794 | main.setup() |
311 | 792 | yield main | 795 | yield main |
312 | diff --git a/systemtests/git_build.py b/systemtests/git_build.py | |||
313 | index 342fa0c..3803322 100644 | |||
314 | --- a/systemtests/git_build.py | |||
315 | +++ b/systemtests/git_build.py | |||
316 | @@ -5,6 +5,7 @@ from contextlib import closing | |||
317 | 5 | from functools import partial | 5 | from functools import partial |
318 | 6 | from pathlib import Path | 6 | from pathlib import Path |
319 | 7 | from subprocess import CalledProcessError | 7 | from subprocess import CalledProcessError |
320 | 8 | from textwrap import dedent | ||
321 | 8 | from timeit import Timer | 9 | from timeit import Timer |
322 | 9 | from typing import TYPE_CHECKING, Any, Callable | 10 | from typing import TYPE_CHECKING, Any, Callable |
323 | 10 | from urllib.request import urlopen | 11 | from urllib.request import urlopen |
324 | @@ -33,6 +34,7 @@ class GitBuild: | |||
325 | 33 | self._repos = repo | 34 | self._repos = repo |
326 | 34 | self._branch = branch | 35 | self._branch = branch |
327 | 35 | self._clone_path = clone_path | 36 | self._clone_path = clone_path |
328 | 37 | self._set_apt_proxy() | ||
329 | 36 | 38 | ||
330 | 37 | @property | 39 | @property |
331 | 38 | def clone_path(self) -> str: | 40 | def clone_path(self) -> str: |
332 | @@ -46,6 +48,18 @@ class GitBuild: | |||
333 | 46 | def logger(self, logger: Logger) -> None: | 48 | def logger(self, logger: Logger) -> None: |
334 | 47 | self._instance.logger = logger | 49 | self._instance.logger = logger |
335 | 48 | 50 | ||
336 | 51 | def _set_apt_proxy(self) -> None: | ||
337 | 52 | if proxy := self._env.get("http_proxy"): | ||
338 | 53 | conf = self._instance.files["/etc/apt/apt.conf.d/99-proxy.conf"] | ||
339 | 54 | conf.write( | ||
340 | 55 | dedent( | ||
341 | 56 | f"""\ | ||
342 | 57 | Acquire::http::Proxy "{proxy}"; | ||
343 | 58 | Acquire::https::Proxy "{proxy}"; | ||
344 | 59 | """ | ||
345 | 60 | ) | ||
346 | 61 | ) | ||
347 | 62 | |||
348 | 49 | def apt_update(self) -> None: | 63 | def apt_update(self) -> None: |
349 | 50 | """Update APT indices, fix broken dpkg.""" | 64 | """Update APT indices, fix broken dpkg.""" |
350 | 51 | self._instance.quietly_execute( | 65 | self._instance.quietly_execute( |
351 | diff --git a/systemtests/image_builder/test_packer.py b/systemtests/image_builder/test_packer.py | |||
352 | index 3bf5836..3619ff2 100644 | |||
353 | --- a/systemtests/image_builder/test_packer.py | |||
354 | +++ b/systemtests/image_builder/test_packer.py | |||
355 | @@ -19,7 +19,10 @@ class TestPackerMAASConfig: | |||
356 | 19 | assert readme.exists(), f"README.md not found in {packer_main.clone_path}" | 19 | assert readme.exists(), f"README.md not found in {packer_main.clone_path}" |
357 | 20 | 20 | ||
358 | 21 | def test_build_image( | 21 | def test_build_image( |
360 | 22 | self, testlog: Logger, packer_main: PackerMain, image_to_build: TestableImage | 22 | self, |
361 | 23 | testlog: Logger, | ||
362 | 24 | packer_main: PackerMain, | ||
363 | 25 | image_to_build: TestableImage, | ||
364 | 23 | ) -> None: | 26 | ) -> None: |
365 | 24 | # tell mypy we have this under control | 27 | # tell mypy we have this under control |
366 | 25 | assert image_to_build.packer_template is not None | 28 | assert image_to_build.packer_template is not None |
367 | @@ -28,12 +31,12 @@ class TestPackerMAASConfig: | |||
368 | 28 | image = packer_main.build_image( | 31 | image = packer_main.build_image( |
369 | 29 | image_to_build.packer_template, | 32 | image_to_build.packer_template, |
370 | 30 | image_to_build.packer_target, | 33 | image_to_build.packer_target, |
372 | 31 | image_to_build.filename, | 34 | image_to_build.packer_filename, |
373 | 32 | image_to_build.source_iso, | 35 | image_to_build.source_iso, |
374 | 33 | ) | 36 | ) |
375 | 34 | assert image is not None | 37 | assert image is not None |
376 | 35 | img_file = packer_main._instance.files[image] | 38 | img_file = packer_main._instance.files[image] |
377 | 36 | assert img_file.exists(), f"failed to produce the expected image ({img_file})" | 39 | assert img_file.exists(), f"failed to produce the expected image ({img_file})" |
378 | 37 | 40 | ||
381 | 38 | if image_to_build.url is not None: | 41 | if image_to_build.filename: |
382 | 39 | packer_main.upload_image(img_file, image_to_build.url) | 42 | packer_main.upload_image(img_file, image_to_build.filename) |
383 | diff --git a/systemtests/image_config.py b/systemtests/image_config.py | |||
384 | index 4f7a0e0..d92bff2 100644 | |||
385 | --- a/systemtests/image_config.py | |||
386 | +++ b/systemtests/image_config.py | |||
387 | @@ -21,7 +21,7 @@ EXTENSION_MAP = { | |||
388 | 21 | @dataclass(frozen=True) | 21 | @dataclass(frozen=True) |
389 | 22 | class TestableImage: | 22 | class TestableImage: |
390 | 23 | name: str | 23 | name: str |
392 | 24 | url: str | None | 24 | filename: str |
393 | 25 | filetype: str = "targz" | 25 | filetype: str = "targz" |
394 | 26 | architecture: str = "amd64/generic" | 26 | architecture: str = "amd64/generic" |
395 | 27 | osystem: str = "ubuntu" | 27 | osystem: str = "ubuntu" |
396 | @@ -48,7 +48,7 @@ class TestableImage: | |||
397 | 48 | ) | 48 | ) |
398 | 49 | 49 | ||
399 | 50 | @property | 50 | @property |
401 | 51 | def filename(self) -> str: | 51 | def packer_filename(self) -> str: |
402 | 52 | ext = EXTENSION_MAP[self.filetype] | 52 | ext = EXTENSION_MAP[self.filetype] |
403 | 53 | if self.packer_template is None: | 53 | if self.packer_template is None: |
404 | 54 | return f"{self.name}.{ext}" | 54 | return f"{self.name}.{ext}" |
405 | diff --git a/systemtests/packer.py b/systemtests/packer.py | |||
406 | index 693d6a4..03beb5e 100644 | |||
407 | --- a/systemtests/packer.py | |||
408 | +++ b/systemtests/packer.py | |||
409 | @@ -29,6 +29,7 @@ class PackerMain(GitBuild): | |||
410 | 29 | file_store: dict[str, Any], | 29 | file_store: dict[str, Any], |
411 | 30 | proxy_env: dict[str, str] | None, | 30 | proxy_env: dict[str, str] | None, |
412 | 31 | debug: str | None, | 31 | debug: str | None, |
413 | 32 | root_path: Path, | ||
414 | 32 | ) -> None: | 33 | ) -> None: |
415 | 33 | super().__init__( | 34 | super().__init__( |
416 | 34 | packer_repo, | 35 | packer_repo, |
417 | @@ -40,8 +41,14 @@ class PackerMain(GitBuild): | |||
418 | 40 | ) | 41 | ) |
419 | 41 | self.default_debug = debug or "" | 42 | self.default_debug = debug or "" |
420 | 42 | self.file_store = file_store | 43 | self.file_store = file_store |
421 | 44 | self.root_path = root_path | ||
422 | 43 | 45 | ||
423 | 44 | def setup(self) -> None: | 46 | def setup(self) -> None: |
424 | 47 | if "http_proxy" in self._env: | ||
425 | 48 | sudoers = self._instance.files["/etc/sudoers.d/50-preserve-proxy"] | ||
426 | 49 | sudoers.write( | ||
427 | 50 | 'Defaults env_keep += "ftp_proxy http_proxy https_proxy no_proxy"' | ||
428 | 51 | ) | ||
429 | 45 | self.apt_source_add( | 52 | self.apt_source_add( |
430 | 46 | "packer", | 53 | "packer", |
431 | 47 | "https://apt.releases.hashicorp.com", | 54 | "https://apt.releases.hashicorp.com", |
432 | @@ -101,8 +108,14 @@ class PackerMain(GitBuild): | |||
433 | 101 | source_iso: str | None, | 108 | source_iso: str | None, |
434 | 102 | ) -> str | None: | 109 | ) -> str | None: |
435 | 103 | env = self._env.copy() | 110 | env = self._env.copy() |
436 | 111 | env["SUDO"] = "sudo -E" | ||
437 | 112 | log_file = f"build-{packer_template}-{packer_target or 'all'}.log" | ||
438 | 113 | env["PACKER_LOG"] = "on" | ||
439 | 114 | env["PACKER_LOG_PATH"] = f"{self.clone_path}/{log_file}" | ||
440 | 104 | if source_iso: | 115 | if source_iso: |
441 | 105 | env["ISO"] = self.download_image(source_iso) | 116 | env["ISO"] = self.download_image(source_iso) |
442 | 117 | if proxy := env.get("https_proxy"): | ||
443 | 118 | env["KS_PROXY"] = f'--proxy="{proxy}"' | ||
444 | 106 | cmd: list[str] = [ | 119 | cmd: list[str] = [ |
445 | 107 | "eatmydata", | 120 | "eatmydata", |
446 | 108 | "make", | 121 | "make", |
447 | @@ -110,12 +123,16 @@ class PackerMain(GitBuild): | |||
448 | 110 | f"{self.clone_path}/{packer_template}", | 123 | f"{self.clone_path}/{packer_template}", |
449 | 111 | f"{packer_target or 'all'}", | 124 | f"{packer_target or 'all'}", |
450 | 112 | ] | 125 | ] |
457 | 113 | runtime = self.timed( | 126 | try: |
458 | 114 | self._instance.execute, | 127 | runtime = self.timed( |
459 | 115 | command=cmd, | 128 | self._instance.execute, |
460 | 116 | environment=env, | 129 | command=cmd, |
461 | 117 | ) | 130 | environment=env, |
462 | 118 | self.logger.info(f"Image built in {runtime:.2f}s") | 131 | ) |
463 | 132 | self.logger.info(f"Image built in {runtime:.2f}s") | ||
464 | 133 | finally: | ||
465 | 134 | build_log = self._instance.files[env["PACKER_LOG_PATH"]] | ||
466 | 135 | build_log.pull(str(self.root_path / log_file)) | ||
467 | 119 | return f"{self.clone_path}/{packer_template}/{img_filename}" | 136 | return f"{self.clone_path}/{packer_template}/{img_filename}" |
468 | 120 | 137 | ||
469 | 121 | def __repr__(self) -> str: | 138 | def __repr__(self) -> str: |
470 | diff --git a/systemtests/state.py b/systemtests/state.py | |||
471 | index 36b89ba..7ca5be8 100644 | |||
472 | --- a/systemtests/state.py | |||
473 | +++ b/systemtests/state.py | |||
474 | @@ -10,9 +10,8 @@ from urllib.parse import urljoin, urlparse | |||
475 | 10 | import pytest | 10 | import pytest |
476 | 11 | from retry import retry | 11 | from retry import retry |
477 | 12 | 12 | ||
481 | 13 | from systemtests.image_config import TestableImage | 13 | from .image_config import TestableImage |
482 | 14 | from systemtests.packer import UnknowStorageBackendError | 14 | from .packer import UnknowStorageBackendError |
480 | 15 | |||
483 | 16 | from .region import get_rack_controllers | 15 | from .region import get_rack_controllers |
484 | 17 | from .utils import waits_for_event_after | 16 | from .utils import waits_for_event_after |
485 | 18 | 17 | ||
486 | diff --git a/systemtests/tests_per_machine/test_machine.py b/systemtests/tests_per_machine/test_machine.py | |||
487 | index c4995b3..7c11328 100644 | |||
488 | --- a/systemtests/tests_per_machine/test_machine.py | |||
489 | +++ b/systemtests/tests_per_machine/test_machine.py | |||
490 | @@ -11,6 +11,7 @@ from ..utils import ( | |||
491 | 11 | assert_machine_in_machines, | 11 | assert_machine_in_machines, |
492 | 12 | assert_machine_not_in_machines, | 12 | assert_machine_not_in_machines, |
493 | 13 | release_and_redeploy_machine, | 13 | release_and_redeploy_machine, |
494 | 14 | report_feature_tests, | ||
495 | 14 | ssh_execute_command, | 15 | ssh_execute_command, |
496 | 15 | wait_for_machine, | 16 | wait_for_machine, |
497 | 16 | wait_for_machine_to_power_off, | 17 | wait_for_machine_to_power_off, |
498 | @@ -27,7 +28,7 @@ if TYPE_CHECKING: | |||
499 | 27 | from ..machine_config import MachineConfig | 28 | from ..machine_config import MachineConfig |
500 | 28 | 29 | ||
501 | 29 | 30 | ||
503 | 30 | @test_steps("enlist", "metadata", "commission", "deploy", "rescue") | 31 | @test_steps("enlist", "metadata", "commission", "deploy", "test_image", "rescue") |
504 | 31 | def test_full_circle( | 32 | def test_full_circle( |
505 | 32 | maas_api_client: AuthenticatedAPIClient, | 33 | maas_api_client: AuthenticatedAPIClient, |
506 | 33 | machine_config: MachineConfig, | 34 | machine_config: MachineConfig, |
507 | @@ -147,21 +148,47 @@ def test_full_circle( | |||
508 | 147 | yield | 148 | yield |
509 | 148 | 149 | ||
510 | 149 | if image_to_test: | 150 | if image_to_test: |
516 | 150 | testable_layouts = ["flat", "lvm", "bcache"] | 151 | testable_configs: dict[str, dict[str, str]] = { |
517 | 151 | for storage_layout in testable_layouts: | 152 | "bond": {"parents": "1"}, |
518 | 152 | testlog.info(f"Testing storage layout: {storage_layout}") | 153 | "bridge": {}, |
519 | 153 | passed = False | 154 | } |
520 | 154 | try: | 155 | for network_config, network_options in testable_configs.items(): |
521 | 156 | with report_feature_tests(testlog, f"network layout {network_config}"): | ||
522 | 155 | with release_and_redeploy_machine( | 157 | with release_and_redeploy_machine( |
527 | 156 | maas_api_client, machine, timeout=TIMEOUT | 158 | maas_api_client, |
528 | 157 | ) as redeployed: | 159 | machine, |
529 | 158 | maas_api_client.create_storage_layout( | 160 | osystem=deploy_osystem, |
530 | 159 | redeployed, storage_layout, {} | 161 | oseries=deploy_oseries, |
531 | 162 | timeout=TIMEOUT, | ||
532 | 163 | ): | ||
533 | 164 | interface = maas_api_client.create_interface( | ||
534 | 165 | machine, network_config, network_options | ||
535 | 160 | ) | 166 | ) |
540 | 161 | passed = True | 167 | assert interface in maas_api_client.read_interfaces(machine) |
541 | 162 | finally: | 168 | with release_and_redeploy_machine( |
542 | 163 | status = "PASSED" if passed else "FAILED" | 169 | maas_api_client, |
543 | 164 | testlog.info(f"Storage layout: {storage_layout} {status}") | 170 | machine, |
544 | 171 | osystem=deploy_osystem, | ||
545 | 172 | oseries=deploy_oseries, | ||
546 | 173 | timeout=TIMEOUT, | ||
547 | 174 | ): | ||
548 | 175 | maas_api_client.delete_interface(machine, interface) | ||
549 | 176 | assert interface not in maas_api_client.read_interfaces(machine) | ||
550 | 177 | testable_layouts = ["flat", "lvm", "bcache"] | ||
551 | 178 | for storage_layout in testable_layouts: | ||
552 | 179 | with report_feature_tests( | ||
553 | 180 | testlog, f"storage layout {storage_layout}" | ||
554 | 181 | ), release_and_redeploy_machine( | ||
555 | 182 | maas_api_client, | ||
556 | 183 | machine, | ||
557 | 184 | osystem=deploy_osystem, | ||
558 | 185 | oseries=deploy_oseries, | ||
559 | 186 | timeout=TIMEOUT, | ||
560 | 187 | ): | ||
561 | 188 | # release the machine, add a new storage layout, | ||
562 | 189 | # assert the machine can redeploy | ||
563 | 190 | maas_api_client.create_storage_layout(machine, storage_layout, {}) | ||
564 | 191 | yield | ||
565 | 165 | 192 | ||
566 | 166 | if deploy_osystem == "windows" or ( | 193 | if deploy_osystem == "windows" or ( |
567 | 167 | deploy_osystem == "custom" and deploy_oseries.startswith("esxi") | 194 | deploy_osystem == "custom" and deploy_oseries.startswith("esxi") |
568 | diff --git a/systemtests/utils.py b/systemtests/utils.py | |||
569 | index 66ebc8b..b412813 100644 | |||
570 | --- a/systemtests/utils.py | |||
571 | +++ b/systemtests/utils.py | |||
572 | @@ -9,6 +9,7 @@ import time | |||
573 | 9 | from contextlib import contextmanager | 9 | from contextlib import contextmanager |
574 | 10 | from dataclasses import dataclass | 10 | from dataclasses import dataclass |
575 | 11 | from logging import Logger | 11 | from logging import Logger |
576 | 12 | from subprocess import CalledProcessError | ||
577 | 12 | from typing import Iterator, Optional, TypedDict, Union | 13 | from typing import Iterator, Optional, TypedDict, Union |
578 | 13 | 14 | ||
579 | 14 | import paramiko | 15 | import paramiko |
580 | @@ -300,32 +301,51 @@ def assert_machine_not_in_machines( | |||
581 | 300 | def release_and_redeploy_machine( | 301 | def release_and_redeploy_machine( |
582 | 301 | maas_api_client: api.AuthenticatedAPIClient, | 302 | maas_api_client: api.AuthenticatedAPIClient, |
583 | 302 | machine: api.Machine, | 303 | machine: api.Machine, |
584 | 304 | osystem: str, | ||
585 | 305 | oseries: str | None = None, | ||
586 | 303 | timeout: int = 60 * 40, | 306 | timeout: int = 60 * 40, |
587 | 304 | ) -> Iterator[api.Machine]: | 307 | ) -> Iterator[api.Machine]: |
588 | 305 | name, osystem = machine["name"], machine["osystem"] | ||
589 | 306 | try: | 308 | try: |
590 | 307 | maas_api_client.release_machine(machine) | 309 | maas_api_client.release_machine(machine) |
592 | 308 | wait_for_machine( | 310 | yield wait_for_machine( |
593 | 309 | maas_api_client, | 311 | maas_api_client, |
594 | 310 | machine, | 312 | machine, |
595 | 311 | status="Ready", | 313 | status="Ready", |
596 | 312 | abort_status="Releasing failed", | 314 | abort_status="Releasing failed", |
597 | 313 | machine_id=name, | ||
598 | 314 | timeout=timeout, | 315 | timeout=timeout, |
599 | 315 | ) | 316 | ) |
600 | 316 | yield machine | ||
601 | 317 | finally: | 317 | finally: |
603 | 318 | maas_api_client.deploy_machine(machine, osystem=osystem) | 318 | maas_api_client.deploy_machine( |
604 | 319 | machine, osystem=osystem, distro_series=oseries or osystem | ||
605 | 320 | ) | ||
606 | 319 | wait_for_machine( | 321 | wait_for_machine( |
607 | 320 | maas_api_client, | 322 | maas_api_client, |
608 | 321 | machine, | 323 | machine, |
609 | 322 | status="Deployed", | 324 | status="Deployed", |
610 | 323 | abort_status="Failed deployment", | 325 | abort_status="Failed deployment", |
611 | 324 | machine_id=name, | ||
612 | 325 | timeout=timeout, | 326 | timeout=timeout, |
613 | 326 | ) | 327 | ) |
614 | 327 | 328 | ||
615 | 328 | 329 | ||
616 | 330 | @contextmanager | ||
617 | 331 | def report_feature_tests(testlog: Logger, feature_name: str) -> Iterator[Logger]: | ||
618 | 332 | """Return a context manager for reporting on a feature. | ||
619 | 333 | Ensures we always report a paas/fail state, irrespective of errors. | ||
620 | 334 | """ | ||
621 | 335 | feature_status = False | ||
622 | 336 | feature_logger = testlog.getChild(feature_name) | ||
623 | 337 | feature_logger.info("Starting test") | ||
624 | 338 | try: | ||
625 | 339 | yield feature_logger | ||
626 | 340 | feature_status = True | ||
627 | 341 | except CalledProcessError as exc: | ||
628 | 342 | feature_logger.exception(exc.stderr) | ||
629 | 343 | except Exception as e: | ||
630 | 344 | feature_logger.exception(e) | ||
631 | 345 | finally: | ||
632 | 346 | feature_logger.info("PASSED" if feature_status else "FAILED") | ||
633 | 347 | |||
634 | 348 | |||
635 | 329 | @dataclass | 349 | @dataclass |
636 | 330 | class IPRange: | 350 | class IPRange: |
637 | 331 | start: ipaddress.IPv4Address | 351 | start: ipaddress.IPv4Address |
638 | diff --git a/temporal/README.md b/temporal/README.md | |||
639 | 332 | new file mode 100644 | 352 | new file mode 100644 |
640 | index 0000000..3817166 | |||
641 | --- /dev/null | |||
642 | +++ b/temporal/README.md | |||
643 | @@ -0,0 +1,88 @@ | |||
644 | 1 | # Temporal workflows for OS Image Testing | ||
645 | 2 | |||
646 | 3 | Here be dragons. | ||
647 | 4 | (Well, maybe not quite) | ||
648 | 5 | |||
649 | 6 | Contained are the set of scripts required to take a supported image in the [PackerMAAS](https://github.com/canonical/packer-maas/tree/main) repository, build and test it's capabilities on a set MAAS version, and report the results of those tests to a [results area](https://github.com/maas/MAAS-Image-Results) ready to be consumed by documentation. | ||
650 | 7 | |||
651 | 8 | ## Workflows | ||
652 | 9 | |||
653 | 10 | We distribute four workflows, each with a correspondingly named worker that should be ran to execute that workflow. | ||
654 | 11 | |||
655 | 12 | - `image_building_workflow` - Builds an image according to the makefile listed in PackerMAAS. | ||
656 | 13 | - `image_testing_workflow` - Tests an image against `tests_per_mahcine` in this repo, | ||
657 | 14 | - `image_reporting_workflow` - Compiles the results of the two above workflows into YAML, exporting it to the remote store. | ||
658 | 15 | - `e2e_workflow` - Orchestrates the above as child workflows. Additionally performs some some mild pre-processing for the `image_reporting` workflow. | ||
659 | 16 | |||
660 | 17 | ## Execution | ||
661 | 18 | |||
662 | 19 | Connect all four workers to a running temporal server instance. An image test can then be requested with a single call to `e2e_workflow`, such as: | ||
663 | 20 | ```bash | ||
664 | 21 | temporal workflow start -t e2e_tests --type e2e_workflow -w 'centos_tests' -i '{"image_name": ["centos7", "centos8"], "maas_snap_channel": "3.3/stable", "jenkins_url": $jenkins_url, "jenkins_user": $jenkins_user, "jenkins_pass": $jenkins_pass}' | ||
665 | 22 | ``` | ||
666 | 23 | |||
667 | 24 | The `e2e_workflow` will then call it's children workflows as required to test the requested images. | ||
668 | 25 | |||
669 | 26 | ### Parameters | ||
670 | 27 | |||
671 | 28 | #### Required | ||
672 | 29 | |||
673 | 30 | - `image_name` - The name, or list of names, of images to test. | ||
674 | 31 | |||
675 | 32 | - Jenkins details | ||
676 | 33 | |||
677 | 34 | - `jenkins_url` - The url of the Jenkins server where image tests are located. | ||
678 | 35 | |||
679 | 36 | - `jenkins_user` - The username to use to login to the Jenkins server. | ||
680 | 37 | |||
681 | 38 | - `jenkins_pass` - The password to use to login to the Jenkins server. | ||
682 | 39 | |||
683 | 40 | #### Optional | ||
684 | 41 | |||
685 | 42 | - Filepaths | ||
686 | 43 | |||
687 | 44 | - `image_mapping` - The filepath of the image mapping YAML distributed as part of MAAS-Integration-CI, defaults as `image_mapping.yaml` in the current working directory. | ||
688 | 45 | |||
689 | 46 | - `repo_location` - The filepath of the location where the image results repo is to be cloned. | ||
690 | 47 | |||
691 | 48 | - Test instances | ||
692 | 49 | |||
693 | 50 | - `maas_snap_channel` - The snap channel to use when installing MAAS in image tests, defaults as `latest/edge`. | ||
694 | 51 | |||
695 | 52 | - `system_test_repo` - The url of the system-tests repo to use for building and testing images, defaults as `https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests`. | ||
696 | 53 | |||
697 | 54 | - `system_test_branch` - The branch in the system-test repo to use for building and tetsing images, defaults as `master`. | ||
698 | 55 | |||
699 | 56 | - `packer_maas_repo` - The url of the PackerMAAS repo to use for building images, defaults as `https://github.com/canonical/packer-maas.git`. | ||
700 | 57 | |||
701 | 58 | - `packer_maas_branch` - The branch in the PackerMAas repo to use for building images, defaults as `main`. | ||
702 | 59 | |||
703 | 60 | - `parallel_tests` - A flag to request a single image test build for all images, rather than a test build per image, defaults as `False`. | ||
704 | 61 | |||
705 | 62 | - `overwite_results` - A flag to request new results overwrite old results rather than combining with them, defaults as `False`. | ||
706 | 63 | |||
707 | 64 | - Retries | ||
708 | 65 | |||
709 | 66 | - `max_retry_attempts` - How many times workflow activities should retry before throwing an exception, defaults as `10` | ||
710 | 67 | |||
711 | 68 | - `heartbeat_delay` - How many seconds between heartbeats for long running workflow activities, defaults as `15` | ||
712 | 69 | |||
713 | 70 | - Timeouts | ||
714 | 71 | |||
715 | 72 | - Timeouts given are in seconds, and are passed to temporal as [`start_to_close`](https://www.temporal.io/blog/activity-timeouts), which defines the maximum execution time of a single invocation. | ||
716 | 73 | |||
717 | 74 | - `default_timeout` - How long a workflow activity can run before being timed out, defaults as `300`. This is used in place of any timeouts below that are not set. | ||
718 | 75 | |||
719 | 76 | - `jenkins_login_timeout` - How long we wait to log into the Jenkins server. | ||
720 | 77 | |||
721 | 78 | - `return_status_timeout` - How long we wait for an activity to fetch the status of a Jenkins build. | ||
722 | 79 | |||
723 | 80 | - `get_results_timeout` - How long we wait for the results of a Jenkins build to be available. | ||
724 | 81 | |||
725 | 82 | - `fetch_results_timeout` - How long we wait for an activity to fetch the results of a Jenkins build, and perform some operation on them. | ||
726 | 83 | |||
727 | 84 | - `log_details_timeout` - How long we wait for an activity to fetch logs from a Jenkins build, and perform some operation on them. | ||
728 | 85 | |||
729 | 86 | - `request_build_timeout` - How long we wait for an activity to request a Jenkins build. | ||
730 | 87 | |||
731 | 88 | - `build_complete_timeout` - How long we wait for a Jenkins build to complete, defaults as `7200`. | ||
732 | diff --git a/temporal/build_results.py b/temporal/build_results.py | |||
733 | 0 | new file mode 100644 | 89 | new file mode 100644 |
734 | index 0000000..f98eed8 | |||
735 | --- /dev/null | |||
736 | +++ b/temporal/build_results.py | |||
737 | @@ -0,0 +1,395 @@ | |||
738 | 1 | from __future__ import annotations | ||
739 | 2 | |||
740 | 3 | import re | ||
741 | 4 | import subprocess | ||
742 | 5 | from collections import defaultdict | ||
743 | 6 | from contextlib import contextmanager | ||
744 | 7 | from dataclasses import dataclass | ||
745 | 8 | from functools import cached_property | ||
746 | 9 | from typing import Any, Iterator | ||
747 | 10 | |||
748 | 11 | from common_tasks import cleanup_files | ||
749 | 12 | |||
750 | 13 | |||
751 | 14 | class TestStatus: | ||
752 | 15 | # failure | ||
753 | 16 | FAILED = 0 | ||
754 | 17 | REGRESSION = 1 | ||
755 | 18 | # successes | ||
756 | 19 | PASSED = 10 | ||
757 | 20 | FIXED = 11 | ||
758 | 21 | # no known state | ||
759 | 22 | UNKNOWN = 100 | ||
760 | 23 | |||
761 | 24 | def __init__(self, state: str | None = None, code: int | None = None) -> None: | ||
762 | 25 | if state is None and code is None: | ||
763 | 26 | s, c = "UNKNOWN", self.UNKNOWN | ||
764 | 27 | elif state is None and code is not None: | ||
765 | 28 | s, c = self._code_to_state_(code), code | ||
766 | 29 | elif state is not None and code is None: | ||
767 | 30 | s, c = state, self._state_to_code_(state) | ||
768 | 31 | elif state is not None and code is not None: | ||
769 | 32 | s, c = state, code | ||
770 | 33 | self._state_, self._code_ = s, c | ||
771 | 34 | |||
772 | 35 | def __str__(self) -> str: | ||
773 | 36 | return f"{self._state_} {self._code_}" | ||
774 | 37 | |||
775 | 38 | def __repr__(self) -> str: | ||
776 | 39 | return str(self) | ||
777 | 40 | |||
778 | 41 | @cached_property | ||
779 | 42 | def _code_state_map_(self) -> dict[int, str]: | ||
780 | 43 | return { | ||
781 | 44 | getattr(self, attr): attr for attr in dir(self) if not attr.startswith("_") | ||
782 | 45 | } | ||
783 | 46 | |||
784 | 47 | @cached_property | ||
785 | 48 | def _state_code_map_(self) -> dict[str, int]: | ||
786 | 49 | return {v: k for k, v in self._code_state_map_.items()} | ||
787 | 50 | |||
788 | 51 | def _code_to_state_(self, code: int) -> str: | ||
789 | 52 | return self._code_state_map_.get(code, "UNKNOWN") | ||
790 | 53 | |||
791 | 54 | def _state_to_code_(self, state: str) -> int: | ||
792 | 55 | return self._state_code_map_.get(state.upper(), self.UNKNOWN) | ||
793 | 56 | |||
794 | 57 | def _is_positive_state_(self, state: str) -> bool: | ||
795 | 58 | return self._is_positive_code_(self._state_to_code_(state)) | ||
796 | 59 | |||
797 | 60 | def _is_positive_code_(self, code: int) -> bool: | ||
798 | 61 | return False if code == self.UNKNOWN else code >= self.PASSED | ||
799 | 62 | |||
800 | 63 | @property | ||
801 | 64 | def _is_positive_(self) -> bool: | ||
802 | 65 | return self._is_positive_code_(self._code_) | ||
803 | 66 | |||
804 | 67 | @property | ||
805 | 68 | def _has_custom_state_(self) -> bool: | ||
806 | 69 | return (self._state_to_code_(self._state_) == self.UNKNOWN) and ( | ||
807 | 70 | self._state_ != "UNKNOWN" | ||
808 | 71 | ) | ||
809 | 72 | |||
810 | 73 | def to_dict(self) -> dict[str, str | int]: | ||
811 | 74 | return {"state": self._state_, "code": self._code_} | ||
812 | 75 | |||
813 | 76 | def __add__(self, other: Any) -> TestStatus: | ||
814 | 77 | if not isinstance(other, TestStatus): | ||
815 | 78 | return self | ||
816 | 79 | newcode = min(self._code_, other._code_) | ||
817 | 80 | custom_states = [self._has_custom_state_, other._has_custom_state_] | ||
818 | 81 | if all(custom_states): | ||
819 | 82 | newstate = self._state_ + "; " + other._state_ | ||
820 | 83 | elif any(custom_states): | ||
821 | 84 | newstate = self._state_ if self._has_custom_state_ else other._state_ | ||
822 | 85 | else: | ||
823 | 86 | newstate = self._code_to_state_(newcode) | ||
824 | 87 | return TestStatus(newstate, newcode) | ||
825 | 88 | |||
826 | 89 | def __radd__(self, other: Any) -> TestStatus: | ||
827 | 90 | if isinstance(other, TestStatus): | ||
828 | 91 | return self + other | ||
829 | 92 | return self | ||
830 | 93 | |||
831 | 94 | def __iadd__(self, other: Any) -> TestStatus: | ||
832 | 95 | if isinstance(other, TestStatus): | ||
833 | 96 | return self + other | ||
834 | 97 | return self | ||
835 | 98 | |||
836 | 99 | |||
837 | 100 | @dataclass | ||
838 | 101 | class FeatureStatus: | ||
839 | 102 | name: str = "" | ||
840 | 103 | state: bool = False | ||
841 | 104 | readable_state: str | dict[str, Any] = "Failed" | ||
842 | 105 | info: str = "Could not complete test" | ||
843 | 106 | |||
844 | 107 | def __str__(self) -> str: | ||
845 | 108 | return "\n - ".join([f"{self.name}: {self.readable_state}", self.info]) | ||
846 | 109 | |||
847 | 110 | def to_dict(self) -> dict[str, Any]: | ||
848 | 111 | return { | ||
849 | 112 | self.name: { | ||
850 | 113 | "state": "passed" if self.state else "failed", | ||
851 | 114 | "summary": self.readable_state, | ||
852 | 115 | "info": self.info, | ||
853 | 116 | } | ||
854 | 117 | } | ||
855 | 118 | |||
856 | 119 | def __add__(self, other: FeatureStatus) -> FeatureStatus: | ||
857 | 120 | if not other.state: | ||
858 | 121 | return self | ||
859 | 122 | elif not self.state: | ||
860 | 123 | return other | ||
861 | 124 | if self.name != other.name: | ||
862 | 125 | raise Exception(f"{other} does not correspond to the same feature!") | ||
863 | 126 | return FeatureStatus( | ||
864 | 127 | name=self.name, | ||
865 | 128 | state=self.state or other.state, | ||
866 | 129 | readable_state=self.readable_state, | ||
867 | 130 | info=self.info, | ||
868 | 131 | ) | ||
869 | 132 | |||
870 | 133 | |||
871 | 134 | class ImageTestResults: | ||
872 | 135 | def __init__( | ||
873 | 136 | self, | ||
874 | 137 | image: str = "", | ||
875 | 138 | maas_version: list[str] = [], | ||
876 | 139 | packer_version: list[str] = [], | ||
877 | 140 | readable_state: str = "", | ||
878 | 141 | tested_arches: list[str] = [], | ||
879 | 142 | prerequisites: list[str] = [], | ||
880 | 143 | ) -> None: | ||
881 | 144 | self.image = image | ||
882 | 145 | self.maas_version = maas_version | ||
883 | 146 | self.readable_state = readable_state | ||
884 | 147 | self.tested_arches = tested_arches | ||
885 | 148 | self.packer_version = packer_version | ||
886 | 149 | self.prerequisites = prerequisites | ||
887 | 150 | |||
888 | 151 | @property | ||
889 | 152 | def _feature_dicts_(self) -> dict[str, Any]: | ||
890 | 153 | out: dict[str, Any] = {} | ||
891 | 154 | for feature in self._results_: | ||
892 | 155 | out |= getattr(self, feature).to_dict() | ||
893 | 156 | return out | ||
894 | 157 | |||
895 | 158 | @property | ||
896 | 159 | def _features_(self) -> list[str]: | ||
897 | 160 | """Return a short summary of all test results of all features | ||
898 | 161 | for MAAS Image tests.""" | ||
899 | 162 | return [getattr(self, feature) for feature in self._results_] | ||
900 | 163 | |||
901 | 164 | @property | ||
902 | 165 | def _results_(self) -> list[str]: | ||
903 | 166 | """Return a list of all features whose results have been collected""" | ||
904 | 167 | return list(set(self.__dict__) - set(ImageTestResults().__dict__)) | ||
905 | 168 | |||
906 | 169 | def __str__(self) -> str: | ||
907 | 170 | return "\n".join( | ||
908 | 171 | [f"{self.image}: {self.readable_state}"] | ||
909 | 172 | + [str(feature) for feature in self._features_] | ||
910 | 173 | ) | ||
911 | 174 | |||
912 | 175 | @property | ||
913 | 176 | def state(self) -> str: | ||
914 | 177 | """Image test state, short pass/fail result as a single bianry string. | ||
915 | 178 | results formatted as: | ||
916 | 179 | 0b00000{storage}{network}{deploy}""" | ||
917 | 180 | byte = sum( | ||
918 | 181 | 2**i * getattr(result, "state", 0) | ||
919 | 182 | for i, result in enumerate(self._results_) | ||
920 | 183 | ) | ||
921 | 184 | return f"{byte:08b}" | ||
922 | 185 | |||
923 | 186 | def to_dict(self) -> dict[str, Any]: | ||
924 | 187 | return { | ||
925 | 188 | self.image: { | ||
926 | 189 | "summary": self.readable_state, | ||
927 | 190 | "maas_version": self.maas_version, | ||
928 | 191 | "architectures": list(self.tested_arches), | ||
929 | 192 | "packer_versions": self.packer_version, | ||
930 | 193 | "prerequisites": list(self.prerequisites), | ||
931 | 194 | } | ||
932 | 195 | | self._feature_dicts_ | ||
933 | 196 | } | ||
934 | 197 | |||
935 | 198 | def from_dict(self, fromdict: dict[str, Any]) -> ImageTestResults: | ||
936 | 199 | image, details = tuple(fromdict.items())[0] | ||
937 | 200 | results = ImageTestResults( | ||
938 | 201 | image=image, | ||
939 | 202 | maas_version=details.get("maas_version", []), | ||
940 | 203 | packer_version=details.get("packer_versions", []), | ||
941 | 204 | readable_state=details.get("summary", ""), | ||
942 | 205 | tested_arches=details.get("architectures", []), | ||
943 | 206 | prerequisites=details.get("prerequisites", []), | ||
944 | 207 | ) | ||
945 | 208 | for key in list(results.to_dict().values())[0].keys(): | ||
946 | 209 | details.pop(key) | ||
947 | 210 | for feature, feature_dict in details.items(): | ||
948 | 211 | setattr( | ||
949 | 212 | results, | ||
950 | 213 | feature, | ||
951 | 214 | FeatureStatus( | ||
952 | 215 | name=feature, | ||
953 | 216 | state=feature_dict["state"] == "passed", | ||
954 | 217 | readable_state=feature_dict["summary"], | ||
955 | 218 | info=feature_dict["info"], | ||
956 | 219 | ), | ||
957 | 220 | ) | ||
958 | 221 | return results | ||
959 | 222 | |||
960 | 223 | def __add__(self, other: ImageTestResults) -> ImageTestResults: | ||
961 | 224 | if self.image != other.image: | ||
962 | 225 | raise Exception(f"{other} does not correspond to the same image!") | ||
963 | 226 | # return itself if the other failed | ||
964 | 227 | if not int(other.state, 2) & 1: | ||
965 | 228 | return self | ||
966 | 229 | elif not int(self.state, 2) & 1: | ||
967 | 230 | return other | ||
968 | 231 | |||
969 | 232 | def force_set(var: str | list[Any] | set[Any]) -> set[Any]: | ||
970 | 233 | return set([var]) if isinstance(var, str) else set(var) | ||
971 | 234 | |||
972 | 235 | def combine_sets( | ||
973 | 236 | var: str | list[Any] | set[Any], var2: str | list[Any] | set[Any] | ||
974 | 237 | ) -> list[Any]: | ||
975 | 238 | return list(force_set(var).union(force_set(var2))) | ||
976 | 239 | |||
977 | 240 | combined_state = TestStatus(state=self.readable_state) + TestStatus( | ||
978 | 241 | state=other.readable_state | ||
979 | 242 | ) | ||
980 | 243 | results = ImageTestResults( | ||
981 | 244 | image=self.image, | ||
982 | 245 | maas_version=combine_sets(self.maas_version, other.maas_version), | ||
983 | 246 | packer_version=combine_sets(self.packer_version, other.packer_version), | ||
984 | 247 | readable_state=combined_state._state_, | ||
985 | 248 | tested_arches=combine_sets(self.tested_arches, other.tested_arches), | ||
986 | 249 | prerequisites=combine_sets(self.prerequisites, other.prerequisites), | ||
987 | 250 | ) | ||
988 | 251 | for feature in set(self._results_).union(set(other._results_)): | ||
989 | 252 | setattr( | ||
990 | 253 | results, | ||
991 | 254 | feature, | ||
992 | 255 | getattr(self, feature, FeatureStatus()) | ||
993 | 256 | + getattr(self, feature, FeatureStatus()), | ||
994 | 257 | ) | ||
995 | 258 | return results | ||
996 | 259 | |||
997 | 260 | |||
998 | 261 | def todict(nested: defaultdict[str, Any] | dict[str, Any]) -> dict[str, Any]: | ||
999 | 262 | for k, v in nested.items(): | ||
1000 | 263 | if isinstance(v, dict): | ||
1001 | 264 | nested[k] = todict(v) | ||
1002 | 265 | return dict(nested) | ||
1003 | 266 | |||
1004 | 267 | |||
1005 | 268 | def nested_dict() -> defaultdict[str, Any]: | ||
1006 | 269 | return defaultdict(nested_dict) | ||
1007 | 270 | |||
1008 | 271 | |||
1009 | 272 | def feature_dict_summary( | ||
1010 | 273 | feature_dict: dict[str, dict[str, list[str]]] | ||
1011 | 274 | ) -> tuple[bool, dict[str, list[str]], str]: | ||
1012 | 275 | # /artificial data for testing | ||
1013 | 276 | states = set(feature_dict.keys()) | ||
1014 | 277 | failed = set(feature_dict["FAILED"].keys()) | ||
1015 | 278 | passed = set(feature_dict["PASSED"].keys()) | ||
1016 | 279 | unknown: set[str] = set() | ||
1017 | 280 | for unknown_states in states - {"PASSED", "FAILED"}: | ||
1018 | 281 | unknown |= set(feature_dict[unknown_states].keys()) | ||
1019 | 282 | |||
1020 | 283 | # overall pass fail for the entire feature | ||
1021 | 284 | state = not (len(failed) or len(unknown)) | ||
1022 | 285 | # overall pass fail for each value of the feature | ||
1023 | 286 | summary: dict[str, list[str]] = {} | ||
1024 | 287 | if full_pass := passed - (failed | unknown): | ||
1025 | 288 | summary["PASS"] = list(full_pass) | ||
1026 | 289 | if full_fail := failed - (passed | unknown): | ||
1027 | 290 | summary["FAIL"] = list(full_fail) | ||
1028 | 291 | if partial_fail := (passed & failed) | unknown: | ||
1029 | 292 | summary["PARTIAL"] = list(partial_fail) | ||
1030 | 293 | # specific pass fail for each value of the feature | ||
1031 | 294 | info = [] | ||
1032 | 295 | for fstate, fvalue in feature_dict.items(): | ||
1033 | 296 | info.extend( | ||
1034 | 297 | [fstate.lower()] | ||
1035 | 298 | + [f" - {layout}: {', '.join(arch)}" for layout, arch in fvalue.items()] | ||
1036 | 299 | ) | ||
1037 | 300 | return state, summary, "\n".join(info) | ||
1038 | 301 | |||
1039 | 302 | |||
1040 | 303 | def scan_log_for_feature( | ||
1041 | 304 | feature_name: str, arches: dict[str, Any] | ||
1042 | 305 | ) -> dict[str, dict[str, list[str]]]: | ||
1043 | 306 | tested = nested_dict() | ||
1044 | 307 | """ Matches the two ways we can show test results: | ||
1045 | 308 | 'storage layout flat: PASSED' | ||
1046 | 309 | 'Storage layout: bcache - FAILED' | ||
1047 | 310 | returns the feature (flat, bcache) and result (PASSED, FAILED) | ||
1048 | 311 | """ | ||
1049 | 312 | versioning_match = r":?\s(\w+):?\s(?:\-\s)?([A-Z]{4,})" | ||
1050 | 313 | feature_match = re.compile(f"{feature_name}{versioning_match}", flags=re.IGNORECASE) | ||
1051 | 314 | for arch_name, arch in arches.items(): | ||
1052 | 315 | arch_log = "\n".join(arch["log"]) | ||
1053 | 316 | for feature, state in feature_match.findall(arch_log): | ||
1054 | 317 | if feature not in tested[state]: | ||
1055 | 318 | tested[state][feature] = [] | ||
1056 | 319 | tested[state][feature].append(arch_name) | ||
1057 | 320 | return todict(tested) | ||
1058 | 321 | |||
1059 | 322 | |||
1060 | 323 | def determine_feature_state( | ||
1061 | 324 | feature_name: str, arches: dict[str, Any] | ||
1062 | 325 | ) -> tuple[bool, dict[str, list[str]], str] | None: | ||
1063 | 326 | if feature_tested := scan_log_for_feature(feature_name, arches): | ||
1064 | 327 | return feature_dict_summary(feature_tested) | ||
1065 | 328 | return None | ||
1066 | 329 | |||
1067 | 330 | |||
1068 | 331 | def execute( | ||
1069 | 332 | command: list[str], cwd: str | None = None | ||
1070 | 333 | ) -> subprocess.CompletedProcess[str]: | ||
1071 | 334 | """Execute a command""" | ||
1072 | 335 | __tracebackhide__ = True | ||
1073 | 336 | return subprocess.run( | ||
1074 | 337 | command, | ||
1075 | 338 | capture_output=True, | ||
1076 | 339 | check=True, | ||
1077 | 340 | encoding="utf-8", | ||
1078 | 341 | errors="backslashreplace", | ||
1079 | 342 | cwd=cwd, | ||
1080 | 343 | ) | ||
1081 | 344 | |||
1082 | 345 | |||
1083 | 346 | @contextmanager | ||
1084 | 347 | def checkout_and_commit( | ||
1085 | 348 | branch: str, | ||
1086 | 349 | commit_message: str, | ||
1087 | 350 | base_branch: str | None = None, | ||
1088 | 351 | add_file: str | list[str] | None = None, | ||
1089 | 352 | cwd: str | None = None, | ||
1090 | 353 | ) -> Iterator[None]: | ||
1091 | 354 | branches = execute(["git", "branch", "-a"], cwd=cwd).stdout | ||
1092 | 355 | branch_base = base_branch or ("main" if "main" in branches else "master") | ||
1093 | 356 | current_branch = execute(["git", "rev-parse", "--abbrev-ref HEAD"], cwd=cwd).stdout | ||
1094 | 357 | |||
1095 | 358 | # ensure we're up to date with the base branch first | ||
1096 | 359 | if current_branch != branch_base: | ||
1097 | 360 | execute(["git", "checkout", branch_base], cwd=cwd) | ||
1098 | 361 | execute(["git", "pull"], cwd=cwd) | ||
1099 | 362 | current_branch = branch_base | ||
1100 | 363 | |||
1101 | 364 | # navigate to the correct branch | ||
1102 | 365 | if current_branch != branch: | ||
1103 | 366 | if branch in branches: | ||
1104 | 367 | execute(["git", "checkout", branch], cwd=cwd) | ||
1105 | 368 | try: | ||
1106 | 369 | execute(["git", "pull"], cwd=cwd) | ||
1107 | 370 | except Exception as e: | ||
1108 | 371 | print(e) | ||
1109 | 372 | else: | ||
1110 | 373 | execute(["git", "checkout", "-b", branch], cwd=cwd) | ||
1111 | 374 | |||
1112 | 375 | yield | ||
1113 | 376 | |||
1114 | 377 | if cwd and add_file: | ||
1115 | 378 | cleanup_files(cwd, preserve=add_file) | ||
1116 | 379 | |||
1117 | 380 | # if the previous commit matches the one we want to make, combine them | ||
1118 | 381 | reset = False | ||
1119 | 382 | while ( | ||
1120 | 383 | execute(["git", "show-branch", "--no-name", "HEAD~1"], cwd=cwd).stdout | ||
1121 | 384 | == f"{commit_message}" | ||
1122 | 385 | ): | ||
1123 | 386 | execute(["git", "reset", "--hard", "HEAD~1"], cwd=cwd) | ||
1124 | 387 | reset = True | ||
1125 | 388 | |||
1126 | 389 | # add files and commit | ||
1127 | 390 | execute(["git", "add", "."], cwd=cwd) | ||
1128 | 391 | execute(["git", "commit", "-m", f'"{commit_message}"'], cwd=cwd) | ||
1129 | 392 | if reset: | ||
1130 | 393 | execute(["git", "push", "-f"], cwd=cwd) | ||
1131 | 394 | else: | ||
1132 | 395 | execute(["git", "push"], cwd=cwd) | ||
1133 | diff --git a/temporal/common_tasks.py b/temporal/common_tasks.py | |||
1134 | 0 | new file mode 100644 | 396 | new file mode 100644 |
1135 | index 0000000..8fc6011 | |||
1136 | --- /dev/null | |||
1137 | +++ b/temporal/common_tasks.py | |||
1138 | @@ -0,0 +1,293 @@ | |||
1139 | 1 | import argparse | ||
1140 | 2 | import asyncio | ||
1141 | 3 | import os | ||
1142 | 4 | import sys | ||
1143 | 5 | from dataclasses import dataclass | ||
1144 | 6 | from datetime import timedelta | ||
1145 | 7 | from time import sleep | ||
1146 | 8 | from typing import Any | ||
1147 | 9 | |||
1148 | 10 | import yaml | ||
1149 | 11 | from temporalio import activity, workflow | ||
1150 | 12 | from temporalio.client import Client | ||
1151 | 13 | from temporalio.worker import Worker | ||
1152 | 14 | |||
1153 | 15 | with workflow.unsafe.imports_passed_through(): | ||
1154 | 16 | from jenkinsapi.build import Artifact, Build # type:ignore[import] | ||
1155 | 17 | from jenkinsapi.jenkins import Jenkins # type:ignore[import] | ||
1156 | 18 | from jenkinsapi.job import Job # type:ignore[import] | ||
1157 | 19 | |||
1158 | 20 | |||
1159 | 21 | # Workflow parameter class | ||
1160 | 22 | @dataclass | ||
1161 | 23 | class workflow_parameters: | ||
1162 | 24 | jenkins_url: str | ||
1163 | 25 | jenkins_user: str | ||
1164 | 26 | jenkins_pass: str | ||
1165 | 27 | job_name: str = "" | ||
1166 | 28 | build_num: int = -1 | ||
1167 | 29 | |||
1168 | 30 | # retry stuff | ||
1169 | 31 | max_retry_attempts: int = 10 | ||
1170 | 32 | heartbeat_delay: int = 15 | ||
1171 | 33 | |||
1172 | 34 | # default timeout to be used if none available | ||
1173 | 35 | default_timeout: int = 300 | ||
1174 | 36 | # how long should we wait to login | ||
1175 | 37 | jenkins_login_timeout: int = -1 | ||
1176 | 38 | # how long should we wait for the build to complete | ||
1177 | 39 | return_status_timeout: int = -1 | ||
1178 | 40 | # how long should we wait to get build results? | ||
1179 | 41 | fetch_results_timeout: int = -1 | ||
1180 | 42 | # how long should we wait for log scanning to occur? | ||
1181 | 43 | log_details_timeout: int = -1 | ||
1182 | 44 | # how long should we wait for this build to be requested | ||
1183 | 45 | request_build_timeout: int = -1 | ||
1184 | 46 | # how long should we wait for the build to complete | ||
1185 | 47 | build_complete_timeout: int = 7200 | ||
1186 | 48 | # how long should we wait for the results to be available | ||
1187 | 49 | get_results_timeout: int = -1 | ||
1188 | 50 | |||
1189 | 51 | # return the default timeout if the set timeout is not applicable | ||
1190 | 52 | def gettimeout(self, timeout_name: str = "") -> timedelta: | ||
1191 | 53 | if (timeout := self.__dict__.get(timeout_name, 0)) > 0: | ||
1192 | 54 | return timedelta(seconds=timeout) | ||
1193 | 55 | return timedelta(seconds=self.default_timeout) | ||
1194 | 56 | |||
1195 | 57 | |||
1196 | 58 | # common functions | ||
1197 | 59 | |||
1198 | 60 | |||
1199 | 61 | def cleanup_files(file_path: str, preserve: str | list[str] | None = None) -> None: | ||
1200 | 62 | if os.path.exists(file_path): | ||
1201 | 63 | files = os.listdir(file_path) | ||
1202 | 64 | files.remove(".git") | ||
1203 | 65 | if preserve: | ||
1204 | 66 | for preserved_file in aslist(preserve): | ||
1205 | 67 | this_file = os.path.basename(preserved_file) | ||
1206 | 68 | if this_file in files: | ||
1207 | 69 | files.remove(this_file) | ||
1208 | 70 | if files: | ||
1209 | 71 | print(f"Removing: {files}") | ||
1210 | 72 | for cleanup in files: | ||
1211 | 73 | os.remove(f"{file_path}/{cleanup}") | ||
1212 | 74 | |||
1213 | 75 | |||
1214 | 76 | def aslist(to_list: str | list[Any]) -> list[Any]: | ||
1215 | 77 | if isinstance(to_list, list): | ||
1216 | 78 | return to_list | ||
1217 | 79 | return [to_list] if to_list else [] | ||
1218 | 80 | |||
1219 | 81 | |||
1220 | 82 | def get_server(params: workflow_parameters) -> Jenkins: | ||
1221 | 83 | return Jenkins( | ||
1222 | 84 | params.jenkins_url, | ||
1223 | 85 | username=params.jenkins_user, | ||
1224 | 86 | password=params.jenkins_pass, | ||
1225 | 87 | timeout=params.gettimeout("jenkins_login_timeout").seconds, | ||
1226 | 88 | max_retries=params.max_retry_attempts, | ||
1227 | 89 | ) | ||
1228 | 90 | |||
1229 | 91 | |||
1230 | 92 | def get_job( | ||
1231 | 93 | params: workflow_parameters, | ||
1232 | 94 | job_name: str | None = None, | ||
1233 | 95 | ) -> Job: | ||
1234 | 96 | return get_server(params).get_job(job_name or params.job_name) | ||
1235 | 97 | |||
1236 | 98 | |||
1237 | 99 | def get_build( | ||
1238 | 100 | params: workflow_parameters, | ||
1239 | 101 | job_name: str | None = None, | ||
1240 | 102 | build_num: int | None = None, | ||
1241 | 103 | ) -> Build: | ||
1242 | 104 | job = get_job(params, job_name=job_name) | ||
1243 | 105 | if (num := build_num or params.build_num) >= 0: | ||
1244 | 106 | return job.get_build(num) | ||
1245 | 107 | return job.get_last_build() | ||
1246 | 108 | |||
1247 | 109 | |||
1248 | 110 | def get_params( | ||
1249 | 111 | params: workflow_parameters, | ||
1250 | 112 | job_name: str | None = None, | ||
1251 | 113 | build_num: int | None = None, | ||
1252 | 114 | ) -> dict[str, Any]: | ||
1253 | 115 | build = get_build(params, job_name=job_name, build_num=build_num) | ||
1254 | 116 | return build.get_params() # type: ignore | ||
1255 | 117 | |||
1256 | 118 | |||
1257 | 119 | def get_logs( | ||
1258 | 120 | params: workflow_parameters, | ||
1259 | 121 | job_name: str | None = None, | ||
1260 | 122 | build_num: int | None = None, | ||
1261 | 123 | ) -> dict[str, str]: | ||
1262 | 124 | # attempt utf-8. If that doesn't work, try utf-16 | ||
1263 | 125 | def decode_artifact_data(artifact: Artifact) -> str: | ||
1264 | 126 | data = artifact.get_data() | ||
1265 | 127 | try: | ||
1266 | 128 | return str(data, encoding="utf-8") | ||
1267 | 129 | except Exception as e: | ||
1268 | 130 | print(e) | ||
1269 | 131 | return str(data, encoding="utf-16") | ||
1270 | 132 | |||
1271 | 133 | build = get_build(params, job_name=job_name, build_num=build_num) | ||
1272 | 134 | logs = { | ||
1273 | 135 | name.split(".")[-2]: decode_artifact_data(artifact) | ||
1274 | 136 | for name, artifact in build.get_artifact_dict().items() | ||
1275 | 137 | if ".log" in name | ||
1276 | 138 | } | ||
1277 | 139 | return logs | ||
1278 | 140 | |||
1279 | 141 | |||
1280 | 142 | def get_config( | ||
1281 | 143 | params: workflow_parameters, | ||
1282 | 144 | job_name: str | None = None, | ||
1283 | 145 | build_num: int | None = None, | ||
1284 | 146 | ) -> dict[str, Any]: | ||
1285 | 147 | build = get_build(params, job_name=job_name, build_num=build_num) | ||
1286 | 148 | return yaml.safe_load( # type: ignore | ||
1287 | 149 | [ | ||
1288 | 150 | artifact.get_data() | ||
1289 | 151 | for name, artifact in build.get_artifact_dict().items() | ||
1290 | 152 | if "config.yaml" in name | ||
1291 | 153 | ][0] | ||
1292 | 154 | ) | ||
1293 | 155 | |||
1294 | 156 | |||
1295 | 157 | def get_results( | ||
1296 | 158 | params: workflow_parameters, | ||
1297 | 159 | job_name: str | None = None, | ||
1298 | 160 | build_num: int | None = None, | ||
1299 | 161 | ) -> dict[str, Any]: | ||
1300 | 162 | build = get_build(params, job_name=job_name, build_num=build_num) | ||
1301 | 163 | results = build.get_resultset() | ||
1302 | 164 | return {k: v.__dict__ for k, v in results.items()} | ||
1303 | 165 | |||
1304 | 166 | |||
1305 | 167 | def request_build( | ||
1306 | 168 | params: workflow_parameters, job_params: dict[str, Any], job_name: str | None = None | ||
1307 | 169 | ) -> int: | ||
1308 | 170 | server = get_server(params) | ||
1309 | 171 | last_build = int(server.get_job(params.job_name or job_name).get_last_buildnumber()) | ||
1310 | 172 | server.build_job(params.job_name, job_params) | ||
1311 | 173 | return last_build + 1 | ||
1312 | 174 | |||
1313 | 175 | |||
1314 | 176 | # common activities | ||
1315 | 177 | |||
1316 | 178 | |||
1317 | 179 | @activity.defn | ||
1318 | 180 | async def check_jenkins_reachable(params: workflow_parameters) -> bool: | ||
1319 | 181 | server = get_server(params) | ||
1320 | 182 | return bool(server and (server.version != "0.0")) | ||
1321 | 183 | |||
1322 | 184 | |||
1323 | 185 | @activity.defn | ||
1324 | 186 | async def check_build_has_results(params: workflow_parameters) -> bool: | ||
1325 | 187 | build = get_build(params) | ||
1326 | 188 | return bool(build.has_resultset()) | ||
1327 | 189 | |||
1328 | 190 | |||
1329 | 191 | @activity.defn | ||
1330 | 192 | async def fetch_build_status(params: workflow_parameters) -> str: | ||
1331 | 193 | build = get_build(params) | ||
1332 | 194 | while build.is_running(): | ||
1333 | 195 | sleep(params.heartbeat_delay) | ||
1334 | 196 | activity.heartbeat("Awaiting build finish") | ||
1335 | 197 | return str(build.get_status()) | ||
1336 | 198 | |||
1337 | 199 | |||
1338 | 200 | @activity.defn | ||
1339 | 201 | async def fetch_build_and_result( | ||
1340 | 202 | params: workflow_parameters, | ||
1341 | 203 | ) -> dict[str, dict[str, str]]: | ||
1342 | 204 | build = get_build(params) | ||
1343 | 205 | while not build.has_resultset(): | ||
1344 | 206 | sleep(params.heartbeat_delay) | ||
1345 | 207 | activity.heartbeat("Awaiting build results") | ||
1346 | 208 | return {k: {"status": v.status} for k, v in build.get_resultset().items()} | ||
1347 | 209 | |||
1348 | 210 | |||
1349 | 211 | @activity.defn | ||
1350 | 212 | async def await_build_exists(params: workflow_parameters) -> None: | ||
1351 | 213 | job = get_job(params) | ||
1352 | 214 | while not job.is_queued_or_running(): | ||
1353 | 215 | sleep(params.heartbeat_delay) | ||
1354 | 216 | activity.heartbeat("Awaiting job start") | ||
1355 | 217 | build = None | ||
1356 | 218 | while True: | ||
1357 | 219 | try: | ||
1358 | 220 | if build is None: | ||
1359 | 221 | build = get_build(params) | ||
1360 | 222 | if build.is_running(): | ||
1361 | 223 | break | ||
1362 | 224 | except Exception as e: | ||
1363 | 225 | activity.heartbeat(f"Could not fetch build: {e}") | ||
1364 | 226 | sleep(params.heartbeat_delay) | ||
1365 | 227 | activity.heartbeat("Awaiting build running") | ||
1366 | 228 | |||
1367 | 229 | |||
1368 | 230 | @activity.defn | ||
1369 | 231 | async def await_build_complete(params: workflow_parameters) -> None: | ||
1370 | 232 | build = get_build(params) | ||
1371 | 233 | while build.is_running(): | ||
1372 | 234 | sleep(params.heartbeat_delay) | ||
1373 | 235 | activity.heartbeat("Awaiting job completion") | ||
1374 | 236 | |||
1375 | 237 | |||
1376 | 238 | # workers | ||
1377 | 239 | |||
1378 | 240 | |||
1379 | 241 | def worker_url(argv: list[str]) -> str: | ||
1380 | 242 | parser = argparse.ArgumentParser() | ||
1381 | 243 | parser.add_argument( | ||
1382 | 244 | "temporal_url", | ||
1383 | 245 | type=str, | ||
1384 | 246 | default="localhost:7233", | ||
1385 | 247 | help="url of the temporal server", | ||
1386 | 248 | ) | ||
1387 | 249 | args = parser.parse_args(argv) | ||
1388 | 250 | return str(args.temporal_url) | ||
1389 | 251 | |||
1390 | 252 | |||
1391 | 253 | async def worker_main( | ||
1392 | 254 | interrupt_event: asyncio.Event, | ||
1393 | 255 | temporal_url: str, | ||
1394 | 256 | task_queue: str, | ||
1395 | 257 | workflows: list[Any], | ||
1396 | 258 | activities: list[Any], | ||
1397 | 259 | ) -> None: | ||
1398 | 260 | client = await Client.connect(temporal_url) | ||
1399 | 261 | async with Worker( | ||
1400 | 262 | client, | ||
1401 | 263 | task_queue=task_queue.lower().replace(" ", "_"), | ||
1402 | 264 | workflows=workflows, | ||
1403 | 265 | activities=activities, | ||
1404 | 266 | ): | ||
1405 | 267 | print( | ||
1406 | 268 | f"{task_queue} worker started, ctrl+c to exit".capitalize().replace( | ||
1407 | 269 | "_", " " | ||
1408 | 270 | ) | ||
1409 | 271 | ) | ||
1410 | 272 | await interrupt_event.wait() | ||
1411 | 273 | |||
1412 | 274 | |||
1413 | 275 | def start_worker(task_queue: str, workflows: list[Any], activities: list[Any]) -> None: | ||
1414 | 276 | temporal_url = worker_url(sys.argv[1:]) | ||
1415 | 277 | interrupt_event = asyncio.Event() | ||
1416 | 278 | |||
1417 | 279 | loop = asyncio.new_event_loop() | ||
1418 | 280 | asyncio.set_event_loop(loop) | ||
1419 | 281 | try: | ||
1420 | 282 | loop.run_until_complete( | ||
1421 | 283 | worker_main( | ||
1422 | 284 | interrupt_event, temporal_url, task_queue, workflows, activities | ||
1423 | 285 | ) | ||
1424 | 286 | ) | ||
1425 | 287 | except KeyboardInterrupt: | ||
1426 | 288 | interrupt_event.set() | ||
1427 | 289 | interrupt_event.clear() | ||
1428 | 290 | loop.run_until_complete(interrupt_event.wait()) | ||
1429 | 291 | finally: | ||
1430 | 292 | loop.run_until_complete(loop.shutdown_asyncgens()) | ||
1431 | 293 | loop.close() | ||
1432 | diff --git a/temporal/e2e_worker.py b/temporal/e2e_worker.py | |||
1433 | 0 | new file mode 100644 | 294 | new file mode 100644 |
1434 | index 0000000..97c5260 | |||
1435 | --- /dev/null | |||
1436 | +++ b/temporal/e2e_worker.py | |||
1437 | @@ -0,0 +1,10 @@ | |||
1438 | 1 | from common_tasks import start_worker | ||
1439 | 2 | from e2e_workflow import activities as e2e_activities | ||
1440 | 3 | from e2e_workflow import workflows as e2e_workflows | ||
1441 | 4 | |||
1442 | 5 | if __name__ == "__main__": | ||
1443 | 6 | start_worker( | ||
1444 | 7 | task_queue="e2e_tests", | ||
1445 | 8 | workflows=e2e_workflows, | ||
1446 | 9 | activities=e2e_activities, | ||
1447 | 10 | ) | ||
1448 | diff --git a/temporal/e2e_workflow.py b/temporal/e2e_workflow.py | |||
1449 | 0 | new file mode 100644 | 11 | new file mode 100644 |
1450 | index 0000000..82f2ba7 | |||
1451 | --- /dev/null | |||
1452 | +++ b/temporal/e2e_workflow.py | |||
1453 | @@ -0,0 +1,206 @@ | |||
1454 | 1 | import re | ||
1455 | 2 | from dataclasses import dataclass | ||
1456 | 3 | from typing import Any | ||
1457 | 4 | |||
1458 | 5 | from build_results import nested_dict, todict | ||
1459 | 6 | from common_tasks import aslist, get_logs, workflow_parameters | ||
1460 | 7 | from image_building_workflow import image_building_param, image_building_workflow | ||
1461 | 8 | from image_reporting_workflow import image_reporting_param, image_reporting_workflow | ||
1462 | 9 | from image_testing_workflow import image_testing_param, image_testing_workflow | ||
1463 | 10 | from temporalio import activity, workflow | ||
1464 | 11 | from temporalio.common import RetryPolicy | ||
1465 | 12 | |||
1466 | 13 | |||
1467 | 14 | @dataclass | ||
1468 | 15 | class e2e_workflow_params(workflow_parameters): | ||
1469 | 16 | image_name: str | list[str] = "" | ||
1470 | 17 | image_mapping: str = ( | ||
1471 | 18 | "image_mapping.yaml" # this needs to be accessible to the worker | ||
1472 | 19 | ) | ||
1473 | 20 | |||
1474 | 21 | system_test_repo: str = ( | ||
1475 | 22 | "https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests" | ||
1476 | 23 | ) | ||
1477 | 24 | system_test_branch: str = "master" | ||
1478 | 25 | packer_naas_repo: str = "https://github.com/canonical/packer-maas.git" | ||
1479 | 26 | packer_maas_branch: str = "main" | ||
1480 | 27 | |||
1481 | 28 | maas_snap_channel: str = "latest/edge" | ||
1482 | 29 | |||
1483 | 30 | repo_location: str = "image_results_repo" | ||
1484 | 31 | |||
1485 | 32 | overwrite_results: bool = False | ||
1486 | 33 | # reccommended to leave this false until the rescue issue at CI is fixed | ||
1487 | 34 | parallel_tests: bool = False | ||
1488 | 35 | |||
1489 | 36 | |||
1490 | 37 | @activity.defn | ||
1491 | 38 | async def fetch_packer_version_from_logs( | ||
1492 | 39 | params: e2e_workflow_params, | ||
1493 | 40 | ) -> dict[str, Any]: | ||
1494 | 41 | logs = get_logs(params, job_name="maas-automated-image-builder") | ||
1495 | 42 | packer_details = nested_dict() | ||
1496 | 43 | for image in aslist(params.image_name): | ||
1497 | 44 | packer_details[image]["packer_version"] = "" | ||
1498 | 45 | packer_details[image]["prerequisites"] = [] | ||
1499 | 46 | # fetch the build log for this image | ||
1500 | 47 | if log := [v for k, v in logs.items() if image in k]: | ||
1501 | 48 | # fetch the packer version | ||
1502 | 49 | if search := re.search(r"Packer version\: ((\d+\.\d+)\.\d+)", log[0]): | ||
1503 | 50 | long_version, _ = search.groups() | ||
1504 | 51 | packer_details[image]["packer_version"] = long_version | ||
1505 | 52 | else: | ||
1506 | 53 | packer_details[image]["packer_version"] = "" | ||
1507 | 54 | # search for prerequisites | ||
1508 | 55 | return todict(packer_details) | ||
1509 | 56 | |||
1510 | 57 | |||
1511 | 58 | @activity.defn | ||
1512 | 59 | async def fetch_image_details(params: dict[str, Any]) -> dict[str, Any]: | ||
1513 | 60 | details: dict[str, Any] = {} | ||
1514 | 61 | for image in aslist(params["images"]): | ||
1515 | 62 | image_packer_details = params.get("packer_details", {}).get(image, {}) | ||
1516 | 63 | image_test_details = params.get("image_results", {}).get(image, {}) | ||
1517 | 64 | details[image] = { | ||
1518 | 65 | "built": image not in params.get("failed_images", []), | ||
1519 | 66 | "tested": bool(image_test_details), | ||
1520 | 67 | "build_num": params.get("build_num", -1), | ||
1521 | 68 | "test_num": image_test_details.get("build_num"), | ||
1522 | 69 | "packer_version": image_packer_details.get("packer_version", "0.0"), | ||
1523 | 70 | "prerequisites": image_packer_details.get("prerequisites", []), | ||
1524 | 71 | } | ||
1525 | 72 | return details | ||
1526 | 73 | |||
1527 | 74 | |||
1528 | 75 | @workflow.defn | ||
1529 | 76 | class e2e_workflow: | ||
1530 | 77 | @workflow.run | ||
1531 | 78 | async def run(self, params: e2e_workflow_params) -> None: | ||
1532 | 79 | # build images | ||
1533 | 80 | image_building_results: dict[str, Any] = await workflow.execute_child_workflow( | ||
1534 | 81 | image_building_workflow, | ||
1535 | 82 | image_building_param( | ||
1536 | 83 | # building parameters | ||
1537 | 84 | image_name=params.image_name, | ||
1538 | 85 | image_mapping=params.image_mapping, | ||
1539 | 86 | system_test_repo=params.system_test_repo, | ||
1540 | 87 | system_test_branch=params.system_test_branch, | ||
1541 | 88 | packer_naas_repo=params.packer_naas_repo, | ||
1542 | 89 | packer_maas_branch=params.packer_maas_branch, | ||
1543 | 90 | # jenkins stuff | ||
1544 | 91 | jenkins_url=params.jenkins_url, | ||
1545 | 92 | jenkins_user=params.jenkins_user, | ||
1546 | 93 | jenkins_pass=params.jenkins_pass, | ||
1547 | 94 | # timeouts and retry | ||
1548 | 95 | max_retry_attempts=params.max_retry_attempts, | ||
1549 | 96 | heartbeat_delay=params.heartbeat_delay, | ||
1550 | 97 | default_timeout=params.default_timeout, | ||
1551 | 98 | jenkins_login_timeout=params.jenkins_login_timeout, | ||
1552 | 99 | return_status_timeout=params.return_status_timeout, | ||
1553 | 100 | fetch_results_timeout=params.fetch_results_timeout, | ||
1554 | 101 | log_details_timeout=params.log_details_timeout, | ||
1555 | 102 | request_build_timeout=params.request_build_timeout, | ||
1556 | 103 | build_complete_timeout=params.build_complete_timeout, | ||
1557 | 104 | get_results_timeout=params.get_results_timeout, | ||
1558 | 105 | ), | ||
1559 | 106 | task_queue="image_building", | ||
1560 | 107 | id=f"Building: {','.join(params.image_name)}", | ||
1561 | 108 | ) | ||
1562 | 109 | # images that failed or succeeded to be built | ||
1563 | 110 | params.build_num = image_building_results.get("build_num", -1) | ||
1564 | 111 | images_built = image_building_results["image_results"] | ||
1565 | 112 | failed_images = [image for image, built in images_built.items() if not built] | ||
1566 | 113 | passed_images = [image for image, built in images_built.items() if built] | ||
1567 | 114 | # get the packer version and prerequisites | ||
1568 | 115 | packer_details = await workflow.execute_activity( | ||
1569 | 116 | fetch_packer_version_from_logs, | ||
1570 | 117 | params, | ||
1571 | 118 | start_to_close_timeout=params.gettimeout("log_details_timeout"), | ||
1572 | 119 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
1573 | 120 | ) | ||
1574 | 121 | # get all of the images that were built | ||
1575 | 122 | image_testing_results: dict[str, Any] = {} | ||
1576 | 123 | if images_to_test := passed_images: | ||
1577 | 124 | # test images | ||
1578 | 125 | # if we are testing images in parallel, this list will have one entry. | ||
1579 | 126 | for image_test_group in ( | ||
1580 | 127 | [images_to_test] if params.parallel_tests else images_to_test | ||
1581 | 128 | ): | ||
1582 | 129 | try: | ||
1583 | 130 | image_testing_results |= await workflow.execute_child_workflow( | ||
1584 | 131 | image_testing_workflow, | ||
1585 | 132 | image_testing_param( | ||
1586 | 133 | # testing parameters | ||
1587 | 134 | image_name=image_test_group, | ||
1588 | 135 | system_test_repo=params.system_test_repo, | ||
1589 | 136 | system_test_branch=params.system_test_branch, | ||
1590 | 137 | maas_snap_channel=params.maas_snap_channel, | ||
1591 | 138 | parallel_tests=params.parallel_tests, | ||
1592 | 139 | # jenkins stuff | ||
1593 | 140 | jenkins_url=params.jenkins_url, | ||
1594 | 141 | jenkins_user=params.jenkins_user, | ||
1595 | 142 | jenkins_pass=params.jenkins_pass, | ||
1596 | 143 | # timeouts and retry | ||
1597 | 144 | max_retry_attempts=params.max_retry_attempts, | ||
1598 | 145 | heartbeat_delay=params.heartbeat_delay, | ||
1599 | 146 | default_timeout=params.default_timeout, | ||
1600 | 147 | jenkins_login_timeout=params.jenkins_login_timeout, | ||
1601 | 148 | return_status_timeout=params.return_status_timeout, | ||
1602 | 149 | fetch_results_timeout=params.fetch_results_timeout, | ||
1603 | 150 | log_details_timeout=params.log_details_timeout, | ||
1604 | 151 | request_build_timeout=params.request_build_timeout, | ||
1605 | 152 | build_complete_timeout=params.build_complete_timeout, | ||
1606 | 153 | get_results_timeout=params.get_results_timeout, | ||
1607 | 154 | ), | ||
1608 | 155 | task_queue="image_testing", | ||
1609 | 156 | id=f"Testing: {','.join(aslist(image_test_group))}", | ||
1610 | 157 | ) | ||
1611 | 158 | except Exception as e: | ||
1612 | 159 | workflow.logger.exception(f"Could not test {image_test_group}: {e}") | ||
1613 | 160 | |||
1614 | 161 | # populate image details from test results | ||
1615 | 162 | image_details = await workflow.execute_activity( | ||
1616 | 163 | fetch_image_details, | ||
1617 | 164 | { | ||
1618 | 165 | "images": params.image_name, | ||
1619 | 166 | "packer_details": packer_details, | ||
1620 | 167 | "failed_images": failed_images, | ||
1621 | 168 | "build_num": params.build_num, | ||
1622 | 169 | "image_results": image_testing_results, | ||
1623 | 170 | }, | ||
1624 | 171 | start_to_close_timeout=params.gettimeout("log_details_timeout"), | ||
1625 | 172 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
1626 | 173 | ) | ||
1627 | 174 | |||
1628 | 175 | # report image results | ||
1629 | 176 | await workflow.execute_child_workflow( | ||
1630 | 177 | image_reporting_workflow, | ||
1631 | 178 | image_reporting_param( | ||
1632 | 179 | # reporting parameters | ||
1633 | 180 | image_details=image_details, | ||
1634 | 181 | repo_location=params.repo_location, | ||
1635 | 182 | overwrite_results=params.overwrite_results, | ||
1636 | 183 | maas_snap_channel=params.maas_snap_channel, | ||
1637 | 184 | # jenkins stuff | ||
1638 | 185 | jenkins_url=params.jenkins_url, | ||
1639 | 186 | jenkins_user=params.jenkins_user, | ||
1640 | 187 | jenkins_pass=params.jenkins_pass, | ||
1641 | 188 | # timeouts and retry | ||
1642 | 189 | max_retry_attempts=params.max_retry_attempts, | ||
1643 | 190 | heartbeat_delay=params.heartbeat_delay, | ||
1644 | 191 | default_timeout=params.default_timeout, | ||
1645 | 192 | jenkins_login_timeout=params.jenkins_login_timeout, | ||
1646 | 193 | return_status_timeout=params.return_status_timeout, | ||
1647 | 194 | fetch_results_timeout=params.fetch_results_timeout, | ||
1648 | 195 | log_details_timeout=params.log_details_timeout, | ||
1649 | 196 | request_build_timeout=params.request_build_timeout, | ||
1650 | 197 | build_complete_timeout=params.build_complete_timeout, | ||
1651 | 198 | get_results_timeout=params.get_results_timeout, | ||
1652 | 199 | ), | ||
1653 | 200 | task_queue="image_reporting", | ||
1654 | 201 | id=f"Reporting: {','.join(params.image_name)}", | ||
1655 | 202 | ) | ||
1656 | 203 | |||
1657 | 204 | |||
1658 | 205 | activities = [fetch_packer_version_from_logs, fetch_image_details] | ||
1659 | 206 | workflows = [e2e_workflow] | ||
1660 | diff --git a/temporal/image_building_worker.py b/temporal/image_building_worker.py | |||
1661 | 0 | new file mode 100644 | 207 | new file mode 100644 |
1662 | index 0000000..885f578 | |||
1663 | --- /dev/null | |||
1664 | +++ b/temporal/image_building_worker.py | |||
1665 | @@ -0,0 +1,10 @@ | |||
1666 | 1 | from common_tasks import start_worker | ||
1667 | 2 | from image_building_workflow import activities as image_build_activities | ||
1668 | 3 | from image_building_workflow import workflows as image_build_workflows | ||
1669 | 4 | |||
1670 | 5 | if __name__ == "__main__": | ||
1671 | 6 | start_worker( | ||
1672 | 7 | task_queue="image_building", | ||
1673 | 8 | workflows=image_build_workflows, | ||
1674 | 9 | activities=image_build_activities, | ||
1675 | 10 | ) | ||
1676 | diff --git a/temporal/image_building_workflow.py b/temporal/image_building_workflow.py | |||
1677 | 0 | new file mode 100644 | 11 | new file mode 100644 |
1678 | index 0000000..586f1f4 | |||
1679 | --- /dev/null | |||
1680 | +++ b/temporal/image_building_workflow.py | |||
1681 | @@ -0,0 +1,165 @@ | |||
1682 | 1 | import re | ||
1683 | 2 | from dataclasses import dataclass | ||
1684 | 3 | from typing import Any | ||
1685 | 4 | |||
1686 | 5 | import yaml | ||
1687 | 6 | from common_tasks import ( | ||
1688 | 7 | aslist, | ||
1689 | 8 | await_build_complete, | ||
1690 | 9 | await_build_exists, | ||
1691 | 10 | check_jenkins_reachable, | ||
1692 | 11 | fetch_build_and_result, | ||
1693 | 12 | fetch_build_status, | ||
1694 | 13 | request_build, | ||
1695 | 14 | workflow_parameters, | ||
1696 | 15 | ) | ||
1697 | 16 | from temporalio import activity, workflow | ||
1698 | 17 | from temporalio.common import RetryPolicy | ||
1699 | 18 | |||
1700 | 19 | |||
1701 | 20 | @dataclass | ||
1702 | 21 | class image_building_param(workflow_parameters): | ||
1703 | 22 | image_name: str | list[str] = "" # allow builk image building if desired | ||
1704 | 23 | image_mapping: str = ( | ||
1705 | 24 | "image_mapping.yaml" # this needs to be accessible to the worker | ||
1706 | 25 | ) | ||
1707 | 26 | |||
1708 | 27 | job_name: str = "maas-automated-image-builder" | ||
1709 | 28 | build_num: int = -1 | ||
1710 | 29 | |||
1711 | 30 | # job details with default values we may want to change | ||
1712 | 31 | system_test_repo: str = ( | ||
1713 | 32 | "https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests" | ||
1714 | 33 | ) | ||
1715 | 34 | system_test_branch: str = "master" | ||
1716 | 35 | packer_naas_repo: str = "https://github.com/canonical/packer-maas.git" | ||
1717 | 36 | packer_maas_branch: str = "main" | ||
1718 | 37 | |||
1719 | 38 | |||
1720 | 39 | @activity.defn | ||
1721 | 40 | async def request_images_built(params: image_building_param) -> int: | ||
1722 | 41 | """Start an image testing job, returning the job number.""" | ||
1723 | 42 | job_params: dict[str, Any] = { | ||
1724 | 43 | "IMAGE_NAMES": ",".join(image for image in aslist(params.image_name)), | ||
1725 | 44 | "SYSTEMTESTS_GIT_REPO": params.system_test_repo, | ||
1726 | 45 | "SYSTEMTESTS_GIT_BRANCH": params.system_test_branch, | ||
1727 | 46 | "PACKER_MAAS_GIT_REPO": params.packer_naas_repo, | ||
1728 | 47 | "PACKER_MAAS_GIT_BRANCH": params.packer_maas_branch, | ||
1729 | 48 | } | ||
1730 | 49 | return request_build(params, job_params) | ||
1731 | 50 | |||
1732 | 51 | |||
1733 | 52 | @activity.defn | ||
1734 | 53 | async def fetch_image_mapping( | ||
1735 | 54 | params: image_building_param, | ||
1736 | 55 | ) -> dict[str, dict[str, Any]]: | ||
1737 | 56 | with open(params.image_mapping, "r") as fh: | ||
1738 | 57 | image_cfg: dict[str, Any] = yaml.safe_load(fh) | ||
1739 | 58 | return image_cfg | ||
1740 | 59 | |||
1741 | 60 | |||
1742 | 61 | @activity.defn | ||
1743 | 62 | async def fetch_image_built_status(params: dict[str, Any]) -> dict[str, bool]: | ||
1744 | 63 | results: dict[str, dict[str, str]] = params["results"] | ||
1745 | 64 | mapping: dict[str, dict[str, Any]] = params["mapping"] | ||
1746 | 65 | image_built_results: dict[str, bool] = {} | ||
1747 | 66 | |||
1748 | 67 | for image in params["image"]: | ||
1749 | 68 | this_image = mapping["images"].get(image, {}) | ||
1750 | 69 | oseries = this_image.get("oseries") | ||
1751 | 70 | osystem = mapping["images"].get(image, {}).get("osystem") | ||
1752 | 71 | image_name = f"{osystem}/{oseries}" | ||
1753 | 72 | status = False | ||
1754 | 73 | for test_name, test_result in results.items(): | ||
1755 | 74 | if re.search(rf"test_build_image.*{image_name}", test_name): | ||
1756 | 75 | if test_result["status"] in ["FIXED", "PASSED"]: | ||
1757 | 76 | status = True | ||
1758 | 77 | break | ||
1759 | 78 | image_built_results[image] = status | ||
1760 | 79 | return image_built_results | ||
1761 | 80 | |||
1762 | 81 | |||
1763 | 82 | @workflow.defn | ||
1764 | 83 | class image_building_workflow: | ||
1765 | 84 | @workflow.run | ||
1766 | 85 | async def run( | ||
1767 | 86 | self, params: image_building_param | ||
1768 | 87 | ) -> dict[str, int | dict[str, bool]]: | ||
1769 | 88 | # await an open connection to the server | ||
1770 | 89 | await workflow.execute_activity( | ||
1771 | 90 | check_jenkins_reachable, | ||
1772 | 91 | params, | ||
1773 | 92 | start_to_close_timeout=params.gettimeout("jenkins_login_timeout"), | ||
1774 | 93 | ) | ||
1775 | 94 | # only attempt to build the image once | ||
1776 | 95 | params.build_num = await workflow.execute_activity( | ||
1777 | 96 | request_images_built, | ||
1778 | 97 | params, | ||
1779 | 98 | start_to_close_timeout=params.gettimeout("request_build_timeout"), | ||
1780 | 99 | ) | ||
1781 | 100 | # try multiple times to get the results or status | ||
1782 | 101 | await workflow.execute_activity( | ||
1783 | 102 | await_build_exists, | ||
1784 | 103 | params, | ||
1785 | 104 | start_to_close_timeout=params.gettimeout("request_build_timeout"), | ||
1786 | 105 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
1787 | 106 | ) | ||
1788 | 107 | await workflow.execute_activity( | ||
1789 | 108 | await_build_complete, | ||
1790 | 109 | params, | ||
1791 | 110 | start_to_close_timeout=params.gettimeout("build_complete_timeout"), | ||
1792 | 111 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
1793 | 112 | ) | ||
1794 | 113 | # return a default failure state if the build was aborted | ||
1795 | 114 | build_status = await workflow.execute_activity( | ||
1796 | 115 | fetch_build_status, | ||
1797 | 116 | params, | ||
1798 | 117 | start_to_close_timeout=params.gettimeout("build_complete_timeout"), | ||
1799 | 118 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
1800 | 119 | ) | ||
1801 | 120 | image_results: dict[str, bool] = {k: False for k in aslist(params.image_name)} | ||
1802 | 121 | if build_status.lower() != "aborted": | ||
1803 | 122 | try: | ||
1804 | 123 | # return pass/fail status for image/images being built | ||
1805 | 124 | results = await workflow.execute_activity( | ||
1806 | 125 | fetch_build_and_result, | ||
1807 | 126 | params, | ||
1808 | 127 | start_to_close_timeout=params.gettimeout("get_results_timeout"), | ||
1809 | 128 | retry_policy=RetryPolicy( | ||
1810 | 129 | maximum_attempts=params.max_retry_attempts | ||
1811 | 130 | ), | ||
1812 | 131 | ) | ||
1813 | 132 | # these should never require a retry | ||
1814 | 133 | mapping = await workflow.execute_activity( | ||
1815 | 134 | fetch_image_mapping, | ||
1816 | 135 | params, | ||
1817 | 136 | start_to_close_timeout=params.gettimeout(), | ||
1818 | 137 | ) | ||
1819 | 138 | image_results = await workflow.execute_activity( | ||
1820 | 139 | fetch_image_built_status, | ||
1821 | 140 | { | ||
1822 | 141 | "results": results, | ||
1823 | 142 | "image": aslist(params.image_name), | ||
1824 | 143 | "mapping": mapping, | ||
1825 | 144 | }, | ||
1826 | 145 | start_to_close_timeout=params.gettimeout("return_status_timeout"), | ||
1827 | 146 | ) | ||
1828 | 147 | except Exception as e: | ||
1829 | 148 | workflow.logger.exception(e) | ||
1830 | 149 | return { | ||
1831 | 150 | "build_num": params.build_num, | ||
1832 | 151 | "image_results": image_results, | ||
1833 | 152 | } | ||
1834 | 153 | |||
1835 | 154 | |||
1836 | 155 | activities = [ | ||
1837 | 156 | check_jenkins_reachable, | ||
1838 | 157 | await_build_exists, | ||
1839 | 158 | await_build_complete, | ||
1840 | 159 | request_images_built, | ||
1841 | 160 | fetch_build_status, | ||
1842 | 161 | fetch_build_and_result, | ||
1843 | 162 | fetch_image_mapping, | ||
1844 | 163 | fetch_image_built_status, | ||
1845 | 164 | ] | ||
1846 | 165 | workflows = [image_building_workflow] | ||
1847 | diff --git a/temporal/image_reporting_worker.py b/temporal/image_reporting_worker.py | |||
1848 | 0 | new file mode 100644 | 166 | new file mode 100644 |
1849 | index 0000000..bd1f08b | |||
1850 | --- /dev/null | |||
1851 | +++ b/temporal/image_reporting_worker.py | |||
1852 | @@ -0,0 +1,10 @@ | |||
1853 | 1 | from common_tasks import start_worker | ||
1854 | 2 | from image_reporting_workflow import activities as image_reporting_activities | ||
1855 | 3 | from image_reporting_workflow import workflows as image_reporting_workflows | ||
1856 | 4 | |||
1857 | 5 | if __name__ == "__main__": | ||
1858 | 6 | start_worker( | ||
1859 | 7 | task_queue="image_reporting", | ||
1860 | 8 | workflows=image_reporting_workflows, | ||
1861 | 9 | activities=image_reporting_activities, | ||
1862 | 10 | ) | ||
1863 | diff --git a/temporal/image_reporting_workflow.py b/temporal/image_reporting_workflow.py | |||
1864 | 0 | new file mode 100644 | 11 | new file mode 100644 |
1865 | index 0000000..05d5b63 | |||
1866 | --- /dev/null | |||
1867 | +++ b/temporal/image_reporting_workflow.py | |||
1868 | @@ -0,0 +1,450 @@ | |||
1869 | 1 | import copy | ||
1870 | 2 | import os | ||
1871 | 3 | import re | ||
1872 | 4 | from dataclasses import dataclass | ||
1873 | 5 | from typing import Any | ||
1874 | 6 | |||
1875 | 7 | import yaml | ||
1876 | 8 | from build_results import ( | ||
1877 | 9 | FeatureStatus, | ||
1878 | 10 | ImageTestResults, | ||
1879 | 11 | TestStatus, | ||
1880 | 12 | checkout_and_commit, | ||
1881 | 13 | determine_feature_state, | ||
1882 | 14 | execute, | ||
1883 | 15 | ) | ||
1884 | 16 | from common_tasks import ( | ||
1885 | 17 | check_jenkins_reachable, | ||
1886 | 18 | get_build, | ||
1887 | 19 | get_config, | ||
1888 | 20 | get_logs, | ||
1889 | 21 | get_results, | ||
1890 | 22 | workflow_parameters, | ||
1891 | 23 | ) | ||
1892 | 24 | from temporalio import activity, workflow | ||
1893 | 25 | from temporalio.common import RetryPolicy | ||
1894 | 26 | |||
1895 | 27 | STEPS_TO_PARSE = ["deploy", "test_image"] | ||
1896 | 28 | |||
1897 | 29 | |||
1898 | 30 | @dataclass | ||
1899 | 31 | class image_reporting_param(workflow_parameters): | ||
1900 | 32 | image_details: None | dict[str, Any] = None | ||
1901 | 33 | |||
1902 | 34 | job_name: str = "maas-automated-image-tester" | ||
1903 | 35 | |||
1904 | 36 | repo_location: str = "image_results_repo" | ||
1905 | 37 | |||
1906 | 38 | maas_snap_channel: str = "latest/edge" | ||
1907 | 39 | |||
1908 | 40 | overwrite_results: bool = False | ||
1909 | 41 | |||
1910 | 42 | |||
1911 | 43 | @dataclass | ||
1912 | 44 | class Filtered_Results: | ||
1913 | 45 | # image: arch: step: data | ||
1914 | 46 | data: dict[str, Any] = {} | ||
1915 | 47 | |||
1916 | 48 | def _add_image_(self, image: str) -> None: | ||
1917 | 49 | if image not in self.data: | ||
1918 | 50 | self.data[image] = {} | ||
1919 | 51 | if "state" not in self.data[image]: | ||
1920 | 52 | self.data[image]["state"] = TestStatus() | ||
1921 | 53 | |||
1922 | 54 | def add_result( | ||
1923 | 55 | self, image: str, arch: str, step: str, data: dict[str, Any], status: TestStatus | ||
1924 | 56 | ) -> None: | ||
1925 | 57 | self._add_image_(image) | ||
1926 | 58 | if arch not in self.data[image]: | ||
1927 | 59 | self.data[image][arch] = {} | ||
1928 | 60 | self.data[image][arch][step] = data | ||
1929 | 61 | self.data[image]["state"] += status | ||
1930 | 62 | |||
1931 | 63 | def to_dict(self) -> dict[str, Any]: | ||
1932 | 64 | data = copy.deepcopy(self.data) | ||
1933 | 65 | # convert statuses to dicts | ||
1934 | 66 | for image, image_data in data.items(): | ||
1935 | 67 | status: TestStatus = image_data["state"] | ||
1936 | 68 | data[image]["state"] = status.to_dict() | ||
1937 | 69 | # return | ||
1938 | 70 | return data | ||
1939 | 71 | |||
1940 | 72 | |||
1941 | 73 | def image_from_osytem_oseries( | ||
1942 | 74 | params: image_reporting_param, | ||
1943 | 75 | osystem: str, | ||
1944 | 76 | oseries: str, | ||
1945 | 77 | job_name: str | None = None, | ||
1946 | 78 | build_num: str | int | None = None, | ||
1947 | 79 | ) -> str: | ||
1948 | 80 | cfg = get_config( | ||
1949 | 81 | params, job_name=job_name, build_num=int(build_num) if build_num else None | ||
1950 | 82 | ) | ||
1951 | 83 | images = cfg.get("image-tests", {}) | ||
1952 | 84 | return [ | ||
1953 | 85 | str(k) | ||
1954 | 86 | for k, v in images.items() | ||
1955 | 87 | if v["osystem"] == osystem and v["oseries"] == oseries | ||
1956 | 88 | ][0] | ||
1957 | 89 | |||
1958 | 90 | |||
1959 | 91 | @activity.defn | ||
1960 | 92 | async def get_test_numbers(params: dict[str, Any]) -> dict[str, dict[str, Any]]: | ||
1961 | 93 | parameters = image_reporting_param(**params["params"]) | ||
1962 | 94 | image_details: dict[str, Any] = params["image_details"] | ||
1963 | 95 | |||
1964 | 96 | test_details: dict[str, dict[str, str | bool]] = {} | ||
1965 | 97 | test_numbers = list(set(details["test_num"] for details in image_details.values())) | ||
1966 | 98 | for test_num in test_numbers: | ||
1967 | 99 | if test_num: | ||
1968 | 100 | this_test = get_build(parameters, build_num=int(test_num)) | ||
1969 | 101 | test_details[str(test_num)] = { | ||
1970 | 102 | "status": str(this_test.get_status()), | ||
1971 | 103 | "has_results": bool(this_test.has_resultset()), | ||
1972 | 104 | } | ||
1973 | 105 | return test_details | ||
1974 | 106 | |||
1975 | 107 | |||
1976 | 108 | @activity.defn | ||
1977 | 109 | async def fetch_maas_version_from_logs( | ||
1978 | 110 | params: dict[str, Any], | ||
1979 | 111 | ) -> dict[str, dict[str, str]]: | ||
1980 | 112 | """MAAS version from a test log: ie: ["3.5","3.5.0~alpha1-14542-g.6d2c926d8"]""" | ||
1981 | 113 | parameters = image_reporting_param(**params["params"]) | ||
1982 | 114 | tests: list[str] = params["tests"] | ||
1983 | 115 | |||
1984 | 116 | maas_snap_info = str(execute(["snap", "info", "maas"]).stdout) | ||
1985 | 117 | long_version, short_version = ("", "") | ||
1986 | 118 | if search := re.search( | ||
1987 | 119 | rf"{parameters.maas_snap_channel}\:\s+((\d+\.\d+)\.\d+[^\s]+)", maas_snap_info | ||
1988 | 120 | ): | ||
1989 | 121 | long_version, short_version = search.groups() | ||
1990 | 122 | |||
1991 | 123 | versions: dict[str, dict[str, str]] = { | ||
1992 | 124 | "None": {"short": short_version, "long": long_version}, | ||
1993 | 125 | } | ||
1994 | 126 | for test in tests: | ||
1995 | 127 | test_logs = get_logs(parameters, build_num=int(test)) | ||
1996 | 128 | log = [v for k, v in test_logs.items() if k == "env_builder"][0] | ||
1997 | 129 | if search := re.search( | ||
1998 | 130 | r"maas\-client\: \|maas\s+((\d+\.\d+)\.\d+[^\s]+).*canonical\*", log | ||
1999 | 131 | ): | ||
2000 | 132 | long_version, short_version = search.groups() | ||
2001 | 133 | versions[test] = {"short": short_version, "long": long_version} | ||
2002 | 134 | continue | ||
2003 | 135 | raise Exception("Cannot determine MAAS version.") | ||
2004 | 136 | return versions | ||
2005 | 137 | |||
2006 | 138 | |||
2007 | 139 | @activity.defn | ||
2008 | 140 | async def filter_test_results(params: dict[str, Any]) -> dict[str, Any]: | ||
2009 | 141 | parameters = image_reporting_param(**params["params"]) | ||
2010 | 142 | test_num: str = params["test_num"] | ||
2011 | 143 | filtered_result = Filtered_Results() | ||
2012 | 144 | log = ( | ||
2013 | 145 | get_logs(parameters, build_num=int(test_num)) | ||
2014 | 146 | .get("tests_per_machine", "") | ||
2015 | 147 | .split("\n") | ||
2016 | 148 | ) | ||
2017 | 149 | results = get_results(parameters, build_num=int(test_num)) | ||
2018 | 150 | for test_name, test_result in results.items(): | ||
2019 | 151 | if "test_full_circle" not in test_name: | ||
2020 | 152 | continue | ||
2021 | 153 | if search := re.search(r"\[(.*)\.(.*)\-(.*)\/(.*)\-(.*)\]", test_name): | ||
2022 | 154 | machine, arch, osystem, oseries, step = search.groups() | ||
2023 | 155 | if step.lower() not in STEPS_TO_PARSE: | ||
2024 | 156 | continue | ||
2025 | 157 | |||
2026 | 158 | image = image_from_osytem_oseries( | ||
2027 | 159 | parameters, osystem, oseries, build_num=int(test_num) | ||
2028 | 160 | ) | ||
2029 | 161 | |||
2030 | 162 | this_status = TestStatus(test_result["status"]) | ||
2031 | 163 | this_result = { | ||
2032 | 164 | "result": test_result, | ||
2033 | 165 | "state": this_status.to_dict(), | ||
2034 | 166 | "error": test_result["errorDetails"], | ||
2035 | 167 | "error_trace": test_result["errorStackTrace"], | ||
2036 | 168 | "log": [line for line in log if test_result["name"] in line], | ||
2037 | 169 | } | ||
2038 | 170 | filtered_result.add_result(image, arch, step, this_result, this_status) | ||
2039 | 171 | # pack the results status so it is serialisable | ||
2040 | 172 | return filtered_result.to_dict() | ||
2041 | 173 | |||
2042 | 174 | |||
2043 | 175 | @activity.defn | ||
2044 | 176 | async def parse_test_results(params: dict[str, Any]) -> dict[str, Any]: | ||
2045 | 177 | maas_version: str = params["maas_version"] | ||
2046 | 178 | image_details: dict[str, Any] = params["image_details"] | ||
2047 | 179 | filtered_results: dict[str, Any] = params["results"] | ||
2048 | 180 | results: dict[str, Any] = {} | ||
2049 | 181 | |||
2050 | 182 | def get_step_from_results( | ||
2051 | 183 | image_results: dict[str, Any], step: str | ||
2052 | 184 | ) -> dict[str, Any]: | ||
2053 | 185 | arches = set(image_results.keys()) - {"state"} | ||
2054 | 186 | return { | ||
2055 | 187 | arch: image_results[arch].get(step) | ||
2056 | 188 | for arch in arches | ||
2057 | 189 | if step in image_results[arch] | ||
2058 | 190 | } | ||
2059 | 191 | |||
2060 | 192 | for image, this_image_result in filtered_results.items(): | ||
2061 | 193 | this_image_details: dict[str, Any] = image_details[image] | ||
2062 | 194 | packer_version: str = this_image_details["packer_version"] | ||
2063 | 195 | prereq: list[str] = this_image_details["prerequisites"] | ||
2064 | 196 | arches = set(this_image_result.keys()) - {"state"} | ||
2065 | 197 | image_results = ImageTestResults( | ||
2066 | 198 | image=image, | ||
2067 | 199 | maas_version=[maas_version], | ||
2068 | 200 | readable_state=this_image_result["state"]["state"], | ||
2069 | 201 | tested_arches=list(arches), | ||
2070 | 202 | packer_version=[packer_version], | ||
2071 | 203 | prerequisites=prereq, | ||
2072 | 204 | ) | ||
2073 | 205 | |||
2074 | 206 | # check for the deployment state | ||
2075 | 207 | if deployed := get_step_from_results(this_image_result, "deploy"): | ||
2076 | 208 | # Image deployment | ||
2077 | 209 | if deploy_state := sum( | ||
2078 | 210 | TestStatus(**arch["state"]) for arch in deployed.values() | ||
2079 | 211 | ): | ||
2080 | 212 | deployable = FeatureStatus( | ||
2081 | 213 | name="Deployable", | ||
2082 | 214 | state=deploy_state._is_positive_, | ||
2083 | 215 | readable_state=deploy_state._state_, | ||
2084 | 216 | info="All machines deployed" | ||
2085 | 217 | if deploy_state._is_positive_ | ||
2086 | 218 | else "; ".join( | ||
2087 | 219 | f"{name}:{arch['error']}" | ||
2088 | 220 | for name, arch in deployed.items() | ||
2089 | 221 | if arch["error"] | ||
2090 | 222 | ), | ||
2091 | 223 | ) | ||
2092 | 224 | image_results.deployable = deployable # type: ignore[attr-defined] | ||
2093 | 225 | # check to see if we did any tests of the image after it deployed | ||
2094 | 226 | if image_tests := get_step_from_results(this_image_result, "test_image"): | ||
2095 | 227 | # storage configuration | ||
2096 | 228 | if storage_state := determine_feature_state("storage layout", image_tests): | ||
2097 | 229 | state, readable, info = storage_state | ||
2098 | 230 | storage_conf = FeatureStatus( | ||
2099 | 231 | "Storage Configuration", | ||
2100 | 232 | state=state, | ||
2101 | 233 | readable_state=readable, | ||
2102 | 234 | info=info, | ||
2103 | 235 | ) | ||
2104 | 236 | image_results.storage_conf = storage_conf # type:ignore[attr-defined] | ||
2105 | 237 | # network configuration | ||
2106 | 238 | if network_state := determine_feature_state("network layout", image_tests): | ||
2107 | 239 | state, readable, info = network_state | ||
2108 | 240 | net_conf = FeatureStatus( | ||
2109 | 241 | "Network Configuration", | ||
2110 | 242 | state=state, | ||
2111 | 243 | readable_state=readable, | ||
2112 | 244 | info=info, | ||
2113 | 245 | ) | ||
2114 | 246 | image_results.net_conf = net_conf # type:ignore[attr-defined] | ||
2115 | 247 | # add to image results list | ||
2116 | 248 | results |= image_results.to_dict() | ||
2117 | 249 | return results | ||
2118 | 250 | |||
2119 | 251 | |||
2120 | 252 | @activity.defn | ||
2121 | 253 | async def parse_failed_images(params: dict[str, Any]) -> dict[str, Any]: | ||
2122 | 254 | maas_version: dict[str, dict[str, str]] = params["maas_version"] | ||
2123 | 255 | image_details: dict[str, Any] = params["image_details"] | ||
2124 | 256 | passed_images: list[str] = params["passed_images"] | ||
2125 | 257 | results: dict[str, Any] = {} | ||
2126 | 258 | |||
2127 | 259 | default_maas_version = maas_version["None"] | ||
2128 | 260 | |||
2129 | 261 | # report on images that failed one of the steps | ||
2130 | 262 | for image, details in image_details.items(): | ||
2131 | 263 | # don't report on images we've already recovered test statuses for | ||
2132 | 264 | if image in passed_images: | ||
2133 | 265 | continue | ||
2134 | 266 | |||
2135 | 267 | test_num = str(details["test_num"]) | ||
2136 | 268 | |||
2137 | 269 | readable_state = "Unkown Error" | ||
2138 | 270 | if not details["built"]: | ||
2139 | 271 | readable_state = "Could not build image" | ||
2140 | 272 | elif not details["tested"]: | ||
2141 | 273 | readable_state = "Could not test image" | ||
2142 | 274 | results |= ImageTestResults( | ||
2143 | 275 | image=image, | ||
2144 | 276 | maas_version=[maas_version.get(test_num, default_maas_version)["short"]], | ||
2145 | 277 | readable_state=readable_state, | ||
2146 | 278 | packer_version=[details["packer_version"]], | ||
2147 | 279 | prerequisites=details["prerequisites"], | ||
2148 | 280 | ).to_dict() | ||
2149 | 281 | return results | ||
2150 | 282 | |||
2151 | 283 | |||
2152 | 284 | @activity.defn | ||
2153 | 285 | async def post_test_results(params: dict[str, Any]) -> None: | ||
2154 | 286 | image_results: dict[str, Any] = params["image_results"] | ||
2155 | 287 | maas_version: dict[str, dict[str, str]] = params["maas_version"] | ||
2156 | 288 | repo_location: str = params["repo_location"] | ||
2157 | 289 | image_details: dict[str, Any] = params["image_details"] | ||
2158 | 290 | overwrite_results: bool = params["overwrite_results"] | ||
2159 | 291 | # clone the results repo | ||
2160 | 292 | if not os.path.exists(repo_location): | ||
2161 | 293 | execute( | ||
2162 | 294 | [ | ||
2163 | 295 | "git", | ||
2164 | 296 | "clone", | ||
2165 | 297 | "https://github.com/maas/MAAS-Image-Results", | ||
2166 | 298 | repo_location, | ||
2167 | 299 | ] | ||
2168 | 300 | ) | ||
2169 | 301 | |||
2170 | 302 | # read the combined results | ||
2171 | 303 | combined_results: dict[str, dict[str, Any]] = {"images": {}} | ||
2172 | 304 | combined_results_path = f"{repo_location}/image_results.yaml" | ||
2173 | 305 | with open(combined_results_path, "r") as result_file: | ||
2174 | 306 | if old_results := yaml.safe_load(result_file): | ||
2175 | 307 | combined_results = old_results | ||
2176 | 308 | |||
2177 | 309 | test_nums = set() | ||
2178 | 310 | # write the results for each image | ||
2179 | 311 | for image, image_results in params["image_results"].items(): | ||
2180 | 312 | this_result_path = f"{repo_location}/{image}.yaml" | ||
2181 | 313 | results: ImageTestResults = ImageTestResults().from_dict({image: image_results}) | ||
2182 | 314 | details: dict[str, Any] = image_details[image] | ||
2183 | 315 | this_test_num: str = str(details["test_num"]) | ||
2184 | 316 | default_maas_version = maas_version["None"] | ||
2185 | 317 | this_maas_version: str = maas_version.get(this_test_num, default_maas_version)[ | ||
2186 | 318 | "long" | ||
2187 | 319 | ] | ||
2188 | 320 | test_nums.add(int(this_test_num)) | ||
2189 | 321 | |||
2190 | 322 | with checkout_and_commit( | ||
2191 | 323 | branch=image, | ||
2192 | 324 | commit_message=f"{image} results: {this_maas_version} - {this_test_num}", | ||
2193 | 325 | add_file=this_result_path, | ||
2194 | 326 | cwd=repo_location, | ||
2195 | 327 | ): | ||
2196 | 328 | if os.path.exists(this_result_path) and not overwrite_results: | ||
2197 | 329 | with open(this_result_path, "r") as result_file: | ||
2198 | 330 | if old_results := yaml.safe_load(result_file): | ||
2199 | 331 | results += ImageTestResults().from_dict(old_results) | ||
2200 | 332 | |||
2201 | 333 | if combined_results["images"]: | ||
2202 | 334 | combined_results["images"] |= results.to_dict() | ||
2203 | 335 | else: | ||
2204 | 336 | combined_results["images"] = results.to_dict() | ||
2205 | 337 | |||
2206 | 338 | with open(this_result_path, "w") as result_file: | ||
2207 | 339 | yaml.safe_dump(results.to_dict(), result_file) | ||
2208 | 340 | |||
2209 | 341 | tested_builds = ( | ||
2210 | 342 | f"{min(test_nums)} - {max(test_nums)}" if len(test_nums) > 1 else f"{test_nums}" | ||
2211 | 343 | ) | ||
2212 | 344 | |||
2213 | 345 | # write the combined results to main | ||
2214 | 346 | with checkout_and_commit( | ||
2215 | 347 | branch="main", | ||
2216 | 348 | commit_message=f"Combined results: {tested_builds}", | ||
2217 | 349 | add_file=combined_results_path, | ||
2218 | 350 | cwd=repo_location, | ||
2219 | 351 | ), open(combined_results_path, "w") as result_file: | ||
2220 | 352 | yaml.safe_dump(combined_results, result_file) | ||
2221 | 353 | |||
2222 | 354 | |||
2223 | 355 | @workflow.defn | ||
2224 | 356 | class image_reporting_workflow: | ||
2225 | 357 | @workflow.run | ||
2226 | 358 | async def run(self, params: image_reporting_param) -> None: | ||
2227 | 359 | if not params.image_details: | ||
2228 | 360 | raise Exception("No Image details provided") | ||
2229 | 361 | # await an open connection to the server | ||
2230 | 362 | await workflow.execute_activity( | ||
2231 | 363 | check_jenkins_reachable, | ||
2232 | 364 | params, | ||
2233 | 365 | start_to_close_timeout=params.gettimeout("jenkins_login_timeout"), | ||
2234 | 366 | ) | ||
2235 | 367 | test_numbers = await workflow.execute_activity( | ||
2236 | 368 | get_test_numbers, | ||
2237 | 369 | { | ||
2238 | 370 | "image_details": params.image_details, | ||
2239 | 371 | "params": params, | ||
2240 | 372 | }, | ||
2241 | 373 | start_to_close_timeout=params.gettimeout("log_details_timeout"), | ||
2242 | 374 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
2243 | 375 | ) | ||
2244 | 376 | maas_versions = await workflow.execute_activity( | ||
2245 | 377 | fetch_maas_version_from_logs, | ||
2246 | 378 | { | ||
2247 | 379 | "params": params, | ||
2248 | 380 | "tests": list(test_numbers.keys()), | ||
2249 | 381 | }, | ||
2250 | 382 | start_to_close_timeout=params.gettimeout("log_details_timeout"), | ||
2251 | 383 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
2252 | 384 | ) | ||
2253 | 385 | results_to_report: dict[str, Any] = {} | ||
2254 | 386 | for test_num, test_details in test_numbers.items(): | ||
2255 | 387 | # if the tests completed and results are available. | ||
2256 | 388 | if ( | ||
2257 | 389 | test_details["status"].lower() != "aborted" | ||
2258 | 390 | and test_details["has_results"] | ||
2259 | 391 | ): | ||
2260 | 392 | results = await workflow.execute_activity( | ||
2261 | 393 | filter_test_results, | ||
2262 | 394 | {"params": params, "test_num": test_num}, | ||
2263 | 395 | start_to_close_timeout=params.gettimeout("fetch_results_timeout"), | ||
2264 | 396 | retry_policy=RetryPolicy( | ||
2265 | 397 | maximum_attempts=params.max_retry_attempts | ||
2266 | 398 | ), | ||
2267 | 399 | ) | ||
2268 | 400 | default_maas_version = maas_versions["None"] | ||
2269 | 401 | results_to_report |= await workflow.execute_activity( | ||
2270 | 402 | parse_test_results, | ||
2271 | 403 | { | ||
2272 | 404 | "maas_version": maas_versions.get( | ||
2273 | 405 | test_num, default_maas_version | ||
2274 | 406 | )["short"], | ||
2275 | 407 | "image_details": params.image_details, | ||
2276 | 408 | "results": results, | ||
2277 | 409 | }, | ||
2278 | 410 | start_to_close_timeout=params.gettimeout("fetch_results_timeout"), | ||
2279 | 411 | retry_policy=RetryPolicy( | ||
2280 | 412 | maximum_attempts=params.max_retry_attempts | ||
2281 | 413 | ), | ||
2282 | 414 | ) | ||
2283 | 415 | # add any images that didn't test | ||
2284 | 416 | results_to_report |= await workflow.execute_activity( | ||
2285 | 417 | parse_failed_images, | ||
2286 | 418 | { | ||
2287 | 419 | "image_details": params.image_details, | ||
2288 | 420 | "maas_version": maas_versions, | ||
2289 | 421 | "passed_images": list(results_to_report.keys()), | ||
2290 | 422 | }, | ||
2291 | 423 | start_to_close_timeout=params.gettimeout("fetch_results_timeout"), | ||
2292 | 424 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
2293 | 425 | ) | ||
2294 | 426 | # only try to upload once. | ||
2295 | 427 | await workflow.execute_activity( | ||
2296 | 428 | post_test_results, | ||
2297 | 429 | { | ||
2298 | 430 | "image_results": results_to_report, | ||
2299 | 431 | "maas_version": maas_versions, | ||
2300 | 432 | "repo_location": params.repo_location, | ||
2301 | 433 | "image_details": params.image_details, | ||
2302 | 434 | "overwrite_results": params.overwrite_results, | ||
2303 | 435 | }, | ||
2304 | 436 | start_to_close_timeout=params.gettimeout("fetch_results_timeout"), | ||
2305 | 437 | retry_policy=RetryPolicy(maximum_attempts=1), | ||
2306 | 438 | ) | ||
2307 | 439 | |||
2308 | 440 | |||
2309 | 441 | activities = [ | ||
2310 | 442 | check_jenkins_reachable, | ||
2311 | 443 | get_test_numbers, | ||
2312 | 444 | fetch_maas_version_from_logs, | ||
2313 | 445 | filter_test_results, | ||
2314 | 446 | parse_test_results, | ||
2315 | 447 | parse_failed_images, | ||
2316 | 448 | post_test_results, | ||
2317 | 449 | ] | ||
2318 | 450 | workflows = [image_reporting_workflow] | ||
2319 | diff --git a/temporal/image_testing_worker.py b/temporal/image_testing_worker.py | |||
2320 | 0 | new file mode 100644 | 451 | new file mode 100644 |
2321 | index 0000000..f28bb23 | |||
2322 | --- /dev/null | |||
2323 | +++ b/temporal/image_testing_worker.py | |||
2324 | @@ -0,0 +1,10 @@ | |||
2325 | 1 | from common_tasks import start_worker | ||
2326 | 2 | from image_testing_workflow import activities as image_test_activities | ||
2327 | 3 | from image_testing_workflow import workflows as image_test_workflows | ||
2328 | 4 | |||
2329 | 5 | if __name__ == "__main__": | ||
2330 | 6 | start_worker( | ||
2331 | 7 | task_queue="image_testing", | ||
2332 | 8 | workflows=image_test_workflows, | ||
2333 | 9 | activities=image_test_activities, | ||
2334 | 10 | ) | ||
2335 | diff --git a/temporal/image_testing_workflow.py b/temporal/image_testing_workflow.py | |||
2336 | 0 | new file mode 100644 | 11 | new file mode 100644 |
2337 | index 0000000..a587b4b | |||
2338 | --- /dev/null | |||
2339 | +++ b/temporal/image_testing_workflow.py | |||
2340 | @@ -0,0 +1,100 @@ | |||
2341 | 1 | from dataclasses import dataclass | ||
2342 | 2 | from typing import Any | ||
2343 | 3 | |||
2344 | 4 | from common_tasks import ( | ||
2345 | 5 | aslist, | ||
2346 | 6 | await_build_complete, | ||
2347 | 7 | await_build_exists, | ||
2348 | 8 | check_jenkins_reachable, | ||
2349 | 9 | fetch_build_status, | ||
2350 | 10 | request_build, | ||
2351 | 11 | workflow_parameters, | ||
2352 | 12 | ) | ||
2353 | 13 | from temporalio import activity, workflow | ||
2354 | 14 | from temporalio.common import RetryPolicy | ||
2355 | 15 | |||
2356 | 16 | |||
2357 | 17 | @dataclass | ||
2358 | 18 | class image_testing_param(workflow_parameters): | ||
2359 | 19 | image_name: str | list[str] = "" # allow builk image building if desired | ||
2360 | 20 | |||
2361 | 21 | job_name: str = ( | ||
2362 | 22 | "maas-automated-image-tester" # Need to check which job actually does this | ||
2363 | 23 | ) | ||
2364 | 24 | build_num: int = -1 | ||
2365 | 25 | |||
2366 | 26 | # job details with default values we may want to change | ||
2367 | 27 | system_test_repo: str = ( | ||
2368 | 28 | "https://git.launchpad.net/~maas-committers/maas-ci/+git/system-tests" | ||
2369 | 29 | ) | ||
2370 | 30 | system_test_branch: str = "master" | ||
2371 | 31 | |||
2372 | 32 | maas_snap_channel: str = "latest/edge" | ||
2373 | 33 | |||
2374 | 34 | parallel_tests: bool = False | ||
2375 | 35 | |||
2376 | 36 | |||
2377 | 37 | @activity.defn | ||
2378 | 38 | async def request_images_test(params: image_testing_param) -> int: | ||
2379 | 39 | """Start an image testing job, returning the job number.""" | ||
2380 | 40 | job_params: dict[str, Any] = { | ||
2381 | 41 | "IMAGE_NAMES": ",".join(image for image in aslist(params.image_name)), | ||
2382 | 42 | "SYSTEMTESTS_GIT_REPO": params.system_test_repo, | ||
2383 | 43 | "SYSTEMTESTS_GIT_BRANCH": params.system_test_branch, | ||
2384 | 44 | "MAAS_SNAP_CHANNEL": params.maas_snap_channel, | ||
2385 | 45 | } | ||
2386 | 46 | return request_build(params, job_params) | ||
2387 | 47 | |||
2388 | 48 | |||
2389 | 49 | @workflow.defn | ||
2390 | 50 | class image_testing_workflow: | ||
2391 | 51 | @workflow.run | ||
2392 | 52 | async def run(self, params: image_testing_param) -> dict[str, Any]: | ||
2393 | 53 | # await an open connection to the server | ||
2394 | 54 | await workflow.execute_activity( | ||
2395 | 55 | check_jenkins_reachable, | ||
2396 | 56 | params, | ||
2397 | 57 | start_to_close_timeout=params.gettimeout("jenkins_login_timeout"), | ||
2398 | 58 | ) | ||
2399 | 59 | # test the image, only trigger once | ||
2400 | 60 | params.build_num = await workflow.execute_activity( | ||
2401 | 61 | request_images_test, | ||
2402 | 62 | params, | ||
2403 | 63 | start_to_close_timeout=params.gettimeout("request_build_timeout"), | ||
2404 | 64 | ) | ||
2405 | 65 | # try multiple times to get the results or status | ||
2406 | 66 | await workflow.execute_activity( | ||
2407 | 67 | await_build_exists, | ||
2408 | 68 | params, | ||
2409 | 69 | start_to_close_timeout=params.gettimeout("request_build_timeout"), | ||
2410 | 70 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
2411 | 71 | ) | ||
2412 | 72 | await workflow.execute_activity( | ||
2413 | 73 | await_build_complete, | ||
2414 | 74 | params, | ||
2415 | 75 | start_to_close_timeout=params.gettimeout("build_complete_timeout"), | ||
2416 | 76 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
2417 | 77 | ) | ||
2418 | 78 | # return a default failure state if the build was aborted | ||
2419 | 79 | build_status = await workflow.execute_activity( | ||
2420 | 80 | fetch_build_status, | ||
2421 | 81 | params, | ||
2422 | 82 | start_to_close_timeout=params.gettimeout("build_complete_timeout"), | ||
2423 | 83 | retry_policy=RetryPolicy(maximum_attempts=params.max_retry_attempts), | ||
2424 | 84 | ) | ||
2425 | 85 | |||
2426 | 86 | # return the image details in the correct format | ||
2427 | 87 | return { | ||
2428 | 88 | image: {"build_num": params.build_num, "build_status": build_status} | ||
2429 | 89 | for image in aslist(params.image_name) | ||
2430 | 90 | } | ||
2431 | 91 | |||
2432 | 92 | |||
2433 | 93 | activities = [ | ||
2434 | 94 | check_jenkins_reachable, | ||
2435 | 95 | request_images_test, | ||
2436 | 96 | await_build_exists, | ||
2437 | 97 | await_build_complete, | ||
2438 | 98 | fetch_build_status, | ||
2439 | 99 | ] | ||
2440 | 100 | workflows = [image_testing_workflow] | ||
2441 | diff --git a/tox.ini b/tox.ini | |||
2442 | index e4efc18..f347a7b 100644 | |||
2443 | --- a/tox.ini | |||
2444 | +++ b/tox.ini | |||
2445 | @@ -66,8 +66,8 @@ description=Reformat Python code and README.md | |||
2446 | 66 | deps= -rrequirements.txt | 66 | deps= -rrequirements.txt |
2447 | 67 | skip_install = true | 67 | skip_install = true |
2448 | 68 | commands= | 68 | commands= |
2451 | 69 | isort --profile black systemtests utils | 69 | isort --profile black systemtests utils temporal |
2452 | 70 | black systemtests utils | 70 | black systemtests utils temporal |
2453 | 71 | cog -r README.md | 71 | cog -r README.md |
2454 | 72 | 72 | ||
2455 | 73 | [testenv:lint] | 73 | [testenv:lint] |
2456 | @@ -76,10 +76,10 @@ deps= -rrequirements.txt | |||
2457 | 76 | allowlist_externals=sh | 76 | allowlist_externals=sh |
2458 | 77 | skip_install = true | 77 | skip_install = true |
2459 | 78 | commands= | 78 | commands= |
2462 | 79 | isort --profile black --check-only systemtests utils | 79 | isort --profile black --check-only systemtests utils temporal |
2463 | 80 | black --check systemtests utils | 80 | black --check systemtests utils temporal |
2464 | 81 | cog --verbosity=0 --check README.md | 81 | cog --verbosity=0 --check README.md |
2466 | 82 | flake8 systemtests utils | 82 | flake8 systemtests utils temporal |
2467 | 83 | sh -c 'git ls-files \*.yaml\* | xargs -r yamllint' | 83 | sh -c 'git ls-files \*.yaml\* | xargs -r yamllint' |
2468 | 84 | 84 | ||
2469 | 85 | [testenv:mypy] | 85 | [testenv:mypy] |
2470 | @@ -95,6 +95,7 @@ deps= | |||
2471 | 95 | types-netaddr | 95 | types-netaddr |
2472 | 96 | commands= | 96 | commands= |
2473 | 97 | mypy -p systemtests -p utils --install-types | 97 | mypy -p systemtests -p utils --install-types |
2474 | 98 | mypy temporal | ||
2475 | 98 | 99 | ||
2476 | 99 | [testenv:generate_config] | 100 | [testenv:generate_config] |
2477 | 100 | description=Generate config.yaml | 101 | description=Generate config.yaml |
2478 | diff --git a/utils/gen_config.py b/utils/gen_config.py | |||
2479 | index 3a1a4cd..4ea3e5e 100755 | |||
2480 | --- a/utils/gen_config.py | |||
2481 | +++ b/utils/gen_config.py | |||
2482 | @@ -144,10 +144,14 @@ def main(argv: list[str]) -> int: | |||
2483 | 144 | packer_group.add_argument( | 144 | packer_group.add_argument( |
2484 | 145 | "--packer-repo", | 145 | "--packer-repo", |
2485 | 146 | type=str, | 146 | type=str, |
2486 | 147 | metavar="REPOS", | ||
2487 | 147 | help="Which git repository to use to get Packer from", | 148 | help="Which git repository to use to get Packer from", |
2488 | 148 | ) | 149 | ) |
2489 | 149 | packer_group.add_argument( | 150 | packer_group.add_argument( |
2491 | 150 | "--packer-branch", type=str, help="Which git branch use to get Packer" | 151 | "--packer-branch", |
2492 | 152 | type=str, | ||
2493 | 153 | metavar="BRANCH", | ||
2494 | 154 | help="Which git branch use to get Packer", | ||
2495 | 151 | ) | 155 | ) |
2496 | 152 | packer_group.add_argument( | 156 | packer_group.add_argument( |
2497 | 153 | "--packer-container-image", | 157 | "--packer-container-image", |
2498 | @@ -318,7 +322,7 @@ def main(argv: list[str]) -> int: | |||
2499 | 318 | # if running custom image tests, only use compatible machines | 322 | # if running custom image tests, only use compatible machines |
2500 | 319 | target_arches = ( | 323 | target_arches = ( |
2501 | 320 | args.architecture | 324 | args.architecture |
2503 | 321 | if not args.image_tests | 325 | if "image-tests" not in config |
2504 | 322 | else [image["architecture"] for image in config["image-tests"].values()] | 326 | else [image["architecture"] for image in config["image-tests"].values()] |
2505 | 323 | ) | 327 | ) |
2506 | 324 | # Filter out machines with architectures not matching specified ones. | 328 | # Filter out machines with architectures not matching specified ones. |
2507 | @@ -333,12 +337,12 @@ def main(argv: list[str]) -> int: | |||
2508 | 333 | machines["hardware"] = { | 337 | machines["hardware"] = { |
2509 | 334 | name: details | 338 | name: details |
2510 | 335 | for name, details in hardware.items() | 339 | for name, details in hardware.items() |
2512 | 336 | if name not in args.machine | 340 | if name in args.machine |
2513 | 337 | } | 341 | } |
2514 | 338 | 342 | ||
2516 | 339 | if args.vm_machine: | 343 | if vms: |
2517 | 340 | # Filter out VMs with name not listed in specified vm_machines | 344 | # Filter out VMs with name not listed in specified vm_machines |
2519 | 341 | if vms: | 345 | if args.vm_machine: |
2520 | 342 | vms["instances"] = { | 346 | vms["instances"] = { |
2521 | 343 | vm_name: vm_config | 347 | vm_name: vm_config |
2522 | 344 | for vm_name, vm_config in vms["instances"].items() | 348 | for vm_name, vm_config in vms["instances"].items() |
+1 on the merge once all branches are in