Merge ~cjdc/ubuntu-docker-images/+git/templates:add-skopeo-authfile into ~ubuntu-docker-images/ubuntu-docker-images/+git/templates:main

Proposed by Cristovao Cordeiro
Status: Merged
Merged at revision: 8e77445de94d5427288239f37e965eb167d71298
Proposed branch: ~cjdc/ubuntu-docker-images/+git/templates:add-skopeo-authfile
Merge into: ~ubuntu-docker-images/ubuntu-docker-images/+git/templates:main
Diff against target: 991 lines (+444/-451)
2 files modified
README.md (+2/-2)
generate_ubuntu_yaml.py (+442/-449)
Reviewer Review Type Date Requested Status
Samir Akarioh (community) Approve
Ubuntu Docker Images Pending
Review via email: mp+433043@code.launchpad.net

Commit message

fix: Skopeo needs an --authfile for private repos

To post a comment you must log in.
Revision history for this message
Samir Akarioh (samiraka) wrote :

You need to change some signatures of some function

review: Approve
Revision history for this message
Cristovao Cordeiro (cjdc) wrote :

> You need to change some signatures of some function

good catch. done, ty

Revision history for this message
Samir Akarioh (samiraka) wrote :

Good for me, you can merge it

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/README.md b/README.md
index 29d4e38..6d86182 100644
--- a/README.md
+++ b/README.md
@@ -15,12 +15,12 @@ The DevContainer will provide you with a working environment out of the box. **Y
15```bash15```bash
16git clone https://github.com/misterw97/RenderDown16git clone https://github.com/misterw97/RenderDown
17sudo apt update && sudo apt install -y python3-mako python3-yaml17sudo apt update && sudo apt install -y python3-mako python3-yaml
18pip install boto3 # if you want to run the generate_ubuntu_yaml file18pip install boto3 requests pyyaml && apt install -y distro-info # if you want to run the generate_ubuntu_yaml script
19```19```
2020
21#### Generate_ubuntu_yaml21#### Generate_ubuntu_yaml
2222
23This script allows to generate the ubuntu.yaml file in order to use it by the RenderDown script. It uses the template ubuntu.yaml located in the template folder.23This script allows us to generate the ubuntu.yaml file in order to use it by the RenderDown script. It uses the template ubuntu.yaml located in the template folder.
2424
25Here are the available arguments and examples of commands: 25Here are the available arguments and examples of commands:
2626
diff --git a/generate_ubuntu_yaml.py b/generate_ubuntu_yaml.py
index 6c70e7c..dacd6e5 100755
--- a/generate_ubuntu_yaml.py
+++ b/generate_ubuntu_yaml.py
@@ -1,525 +1,518 @@
1#!/usr/bin/env python31#!/usr/bin/env python3
22
3import argparse3import argparse
4import base64
4import datetime5import datetime
5import json6import json
6import logging7import logging
7import os8import os
8import subprocess9import subprocess
10import sys
9from typing import Dict, List11from typing import Dict, List
1012
11import boto313import boto3
12import requests14import requests
13import sys15import tempfile
14import yaml16import yaml
1517
16logging.basicConfig(stream=sys.stdout, level=logging.INFO)18logging.basicConfig(stream=sys.stdout, level=logging.INFO)
17NOW = datetime.datetime.now()
18SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))19SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
1920
2021
21def cli_args() -> argparse.ArgumentParser:22class GenerateUbuntuYaml:
22 """Argument parser"""23 def __init__(self):
23 parser = argparse.ArgumentParser(24 self.now = datetime.datetime.now()
24 description="Generate documentation about Ubuntu for ECR and DockerHub"25 self.validate_args()
25 )26 self.add_yaml_representer()
2627 self.build_image_endpoint()
27 parser.add_argument(28 self._skopeo_auth_token = None
28 "--provider",
29 default="docker",
30 dest="provider",
31 help="aws or docker",
32 required=True,
33 )
34 parser.add_argument(
35 "--username",
36 default="admin",
37 dest="username",
38 help="either the Docker Hub username, or the AWS access key ID",
39 required=True,
40 )
41 parser.add_argument(
42 "--password",
43 default="admin",
44 dest="password",
45 help="either the Docker Hub password/token, or the AWS secret access key",
46 required=True,
47 )
48 parser.add_argument(
49 "--token-docker",
50 dest="dockertoken",
51 default=None,
52 help="JWT token for Docker Hub authentication. \
53 Only useful for the 'docker' provider.",
54 )
55 parser.add_argument(
56 "--repository-basename",
57 dest="repository",
58 default=None,
59 help="repository basename of the ubuntu images. \
60 Used to infer existing information.",
61 )
62 parser.add_argument(
63 "--data-dir",
64 default="data",
65 dest="data_dir",
66 help="""The path of the folder
67 where the data file will be
68 saved ( if not exist, the script
69 will create the folder)""",
70 )
71 parser.add_argument(
72 "--unpublished-suite",
73 dest="unpublished_suite",
74 help="""an Ubuntu Suite (e.g. jammy).
75 if given we will take the
76 tags pass on command lines (required)
77 and the arches for this section
78 of the yaml file.
79 """,
80 )
81 parser.add_argument(
82 "--unpublished-tags",
83 dest="unpublished_tags",
84 help="""list of tags
85 (e.g. 'kinetic 22.10 22.10_edge kinetic)""",
86 )
87 parser.add_argument(
88 "--unpublished-archs",
89 dest="unpublished_archs",
90 help="list of archs (e.g amd64 arm)",
91 )
92
93 return parser
94
95
96def validate_args(
97 parser: argparse.ArgumentParser,
98) -> argparse.ArgumentParser.parse_args:
99 """Parse and validate the CLI arguments"""
100 args = parser.parse_args()
101 if any(
102 [
103 args.unpublished_suite is None,
104 args.unpublished_tags is None,
105 args.unpublished_archs is None,
106 ]
107 ) and not all(
108 [
109 args.unpublished_suite is None,
110 args.unpublished_tags is None,
111 args.unpublished_archs is None,
112 ]
113 ):
114 parser.error(
115 """--unpublished-suite need
116 --unpublished-archs and --unpublished_tags"""
117 )
118
119 return args
12029
30 @staticmethod
31 def cli_args() -> argparse.ArgumentParser:
32 """Argument parser"""
33 parser = argparse.ArgumentParser(
34 description="Generate documentation about Ubuntu for ECR and DockerHub"
35 )
12136
122def build_image_endpoint(provider: str, repo_base: str = None) -> (str, str):37 parser.add_argument(
123 """Define the image's registry URL"""38 "--provider",
124 if provider == "aws":39 default="docker",
125 registry_url = "docker://public.ecr.aws/"40 dest="provider",
126 staging_repo = "rocksdev"41 help="aws or docker",
127 else:42 required=True,
128 registry_url = "docker://docker.io/"43 )
129 staging_repo = "rocksdev4staging"44 parser.add_argument(
45 "--username",
46 default="admin",
47 dest="username",
48 help="either the Docker Hub username, or the AWS access key ID",
49 required=True,
50 )
51 parser.add_argument(
52 "--password",
53 default="admin",
54 dest="password",
55 help="either the Docker Hub password/token, or the AWS secret access key",
56 required=True,
57 )
58 parser.add_argument(
59 "--jwt-token-docker",
60 dest="jwt_token_docker",
61 default=None,
62 help="JWT token for Docker Hub authentication. \
63 Only useful for the 'docker' provider.",
64 )
65 parser.add_argument(
66 "--repository-basename",
67 dest="repository",
68 default=None,
69 help="repository basename of the ubuntu images. \
70 Used to infer existing information.",
71 )
72 parser.add_argument(
73 "--data-dir",
74 default="data",
75 dest="data_dir",
76 help="""The path of the folder
77 where the data file will be
78 saved ( if not exist, the script
79 will create the folder)""",
80 )
81 parser.add_argument(
82 "--unpublished-suite",
83 dest="unpublished_suite",
84 help="""an Ubuntu Suite (e.g. jammy).
85 if given we will take the
86 tags pass on command lines (required)
87 and the arches for this section
88 of the yaml file.
89 """,
90 )
91 parser.add_argument(
92 "--unpublished-tags",
93 dest="unpublished_tags",
94 help="""list of tags
95 (e.g. 'kinetic 22.10 22.10_edge kinetic)""",
96 )
97 parser.add_argument(
98 "--unpublished-archs",
99 dest="unpublished_archs",
100 help="list of archs (e.g amd64 arm)",
101 )
130102
131 if repo_base is None:103 return parser
132 logging.warning("Using staging repository")104
133 url = f"{registry_url}{staging_repo}/ubuntu"105 def validate_args(self) -> None:
134 namespace = staging_repo106 """Parse and validate the CLI arguments"""
135 else:107 parser = self.cli_args()
136 url = f"{registry_url}{repo_base}/ubuntu"108 parser.parse_args(namespace=self)
137 namespace = repo_base109 if any(
110 [
111 self.unpublished_suite is None,
112 self.unpublished_tags is None,
113 self.unpublished_archs is None,
114 ]
115 ) and not all(
116 [
117 self.unpublished_suite is None,
118 self.unpublished_tags is None,
119 self.unpublished_archs is None,
120 ]
121 ):
122 parser.error(
123 """--unpublished-suite need
124 --unpublished-archs and --unpublished_tags"""
125 )
138126
139 logging.info(f"Using {url} to collect information")127 def build_image_endpoint(self) -> None:
128 """Define the image's registry URL"""
129 if self.provider == "aws":
130 registry_url = "public.ecr.aws/"
131 staging_repo = "rocksdev"
132 else:
133 registry_url = "docker.io/"
134 staging_repo = "rocksdev4staging"
140135
141 return url, namespace136 if self.repository is None:
137 logging.warning("Using staging repository")
138 self.url = f"{registry_url}{staging_repo}/ubuntu"
139 self.namespace = staging_repo
140 else:
141 self.url = f"{registry_url}{self.repository}/ubuntu"
142 self.namespace = self.repository
143
144 logging.info(f"Using {self.url} to collect information")
145
146 @staticmethod
147 def add_yaml_representer() -> None:
148 def str_presenter(dumper, data):
149 """
150 Permit to format
151 multiline string into
152 yaml file
153 """
154
155 c = "tag:yaml.org,2002:str"
156 if len(data.splitlines()) > 1: # check for multiline string
157 return dumper.represent_scalar(c, data, style="|")
158 return dumper.represent_scalar(c, data)
159
160 yaml.add_representer(str, str_presenter)
161 yaml.representer.SafeRepresenter.add_representer(str, str_presenter)
162
163 @staticmethod
164 def process_run(command: List[str], **kwargs) -> str:
165 """Run a command and handle its output."""
166 logging.info(f"Execute process: {command!r}, kwargs={kwargs!r}")
167 try:
168 out = subprocess.run(
169 command,
170 **kwargs,
171 capture_output=True,
172 check=True,
173 universal_newlines=True,
174 )
175 except subprocess.CalledProcessError as err:
176 msg = f"Failed to run command: {err!s}"
177 if err.stderr:
178 msg += f" ({err.stderr.strip()!s})"
179 raise Exception(msg) from err
180
181 return out.stdout.strip()
182
183 def run_skopeo_command(self, cmd: str, args: List[str]) -> Dict:
184 """Builds the Skopeo command and runs it"""
185 command = ["skopeo", cmd]
186
187 with tempfile.TemporaryDirectory() as tmp_dir:
188 if self._skopeo_auth_token:
189 auth_config = {
190 "auths": {self.url: {"auth": self._skopeo_auth_token}}
191 }
192 auth_file = os.path.join(tmp_dir, "auth.json")
193 with open(auth_file, "w") as f:
194 os.fchmod(f.fileno(), 0o600)
195 json.dump(auth_config, f)
196 command += ["--authfile", auth_file]
197 command += args
198
199 return json.loads(self.process_run(command))
200
201 def get_arches(self, release: str) -> List[str]:
202 """
203 Permit to get the arches associated to the release
204 """
205 logging.info(f"Getting the arches for {release}")
206 manifest = self.run_skopeo_command(
207 "inspect", [f"docker://{self.url}:{release}", "--raw"]
208 )["manifests"]
209 arches = []
210 for arch in manifest:
211 arches.append(arch["platform"]["architecture"])
212 return arches
213
214 def get_dockerhub_jwt_token(self) -> str:
215 """
216 Permit to get the token associated to the docker account
217 """
218 logging.info("Getting the token form Docker")
142219
220 url_token = "https://hub.docker.com/v2/users/login"
221 data = {"username": self.username, "password": self.password}
222 get_jwt_token = requests.post(url_token, json=data)
223 get_jwt_token.raise_for_status()
224 return get_jwt_token.json()["token"]
143225
144def add_yaml_representer():226 def get_tags_docker(self, release: str, token: str) -> List[str]:
145 def str_presenter(dumper, data):
146 """227 """
147 Permit to format228 Permit to get the tags associated to the release
148 multiline string into
149 yaml file
150 """229 """
151230 logging.info(f"Getting the tags from Docker for {release}")
152 c = "tag:yaml.org,2002:str"231 tags = []
153 if len(data.splitlines()) > 1: # check for multiline string232 result_json = self.run_skopeo_command(
154 return dumper.represent_scalar(c, data, style="|")233 "inspect", [f"docker://{self.url}:{release}", "--raw"]
155 return dumper.represent_scalar(c, data)
156
157 yaml.add_representer(str, str_presenter)
158 yaml.representer.SafeRepresenter.add_representer(str, str_presenter)
159
160
161def _process_run(command: List[str], **kwargs) -> str:
162 """Run a command and handle its output."""
163 logging.info(f"Execute process: {command!r}, kwargs={kwargs!r}")
164 try:
165 out = subprocess.run(
166 command,
167 **kwargs,
168 capture_output=True,
169 check=True,
170 universal_newlines=True,
171 )234 )
172 except subprocess.CalledProcessError as err:235 digest = result_json["manifests"][0]["digest"]
173 msg = f"Failed to run command: {err!s}"
174 if err.stderr:
175 msg += f" ({err.stderr.strip()!s})"
176 raise Exception(msg) from err
177
178 return out.stdout.strip()
179
180
181def get_arches(release: str, image_url: str) -> List[str]:
182 """
183 Permit to get the arches associated to the release
184 """
185 logging.info(f"Getting the arches for {release}")
186 command = ["skopeo", "inspect", f"{image_url}:{release}", "--raw"]
187 manifest = json.loads(_process_run(command))["manifests"]
188 arches = []
189 for arch in manifest:
190 arches.append(arch["platform"]["architecture"])
191 return arches
192
193
194def get_dockerhub_token(username: str, password: str) -> str:
195 """
196 Permit to get the token associated to the docker account
197 """
198 logging.info("Getting the token form Docker")
199
200 url_token = "https://hub.docker.com/v2/users/login"
201 data = {"username": username, "password": password}
202 get_token = requests.post(url_token, json=data)
203 get_token.raise_for_status()
204 return get_token.json()["token"]
205
206
207def get_tags_docker(
208 release: str, token: str, image_url: str, image_namespace: str
209) -> List[str]:
210 """
211 Permit to get the tags associated to the release
212 """
213 logging.info(f"Getting the tags from Docker for {release}")
214 tags = []
215 command = [
216 "skopeo",
217 "inspect",
218 f"{image_url}:{release}",
219 "--raw",
220 ]
221 result_json = _process_run(command)
222 digest = json.loads(result_json)["manifests"][0]["digest"]
223
224 url_dockerhub = "https://hub.docker.com/v2/repositories/"
225 url_dockerhub += f"{image_namespace}/ubuntu/tags/?page_size=999"
226 Headers = {"Authorization": f"JWT {token}"}
227 get_the_tags = requests.get(url_dockerhub, headers=Headers)
228 get_the_tags = get_the_tags.json()["results"]
229 for image in get_the_tags:
230 for info_image in image["images"]:
231 if info_image["digest"] == digest and image["name"] not in tags:
232 tags.append(image["name"])
233
234 return tags
235
236
237def get_tags_aws(release: str, client: boto3.Session, image_url: str) -> List[str]:
238 """
239 Permit to get the tags associated to the release
240 """
241 logging.info(f"Getting the tags from AWS for {release}")
242
243 tags = []
244 command = [
245 "skopeo",
246 "inspect",
247 f"{image_url}:{release}",
248 ]
249 result_json = _process_run(command)
250 digest = json.loads(result_json)["Digest"]
251 response = client.describe_image_tags(repositoryName="ubuntu")
252
253 for image in response["imageTagDetails"]:
254 if (
255 image["imageDetail"]["imageDigest"] == digest
256 and image["imageTag"] not in tags
257 ):
258 tags.append(image["imageTag"])
259 return tags
260236
237 url_dockerhub = "https://hub.docker.com/v2/repositories/"
238 url_dockerhub += f"{self.namespace}/ubuntu/tags/?page_size=999"
239 Headers = {"Authorization": f"JWT {token}"}
240 get_the_tags = requests.get(url_dockerhub, headers=Headers)
241 get_the_tags = get_the_tags.json()["results"]
242 for image in get_the_tags:
243 for info_image in image["images"]:
244 if info_image["digest"] == digest and image["name"] not in tags:
245 tags.append(image["name"])
261246
262def get_fullname(release: str) -> str:247 return tags
263 """
264 Permit to get the full name associated to the release
265 """
266 logging.info(f"Getting full name of {release} ")
267248
268 command = ["ubuntu-distro-info", f"--series={release}", "-f"]249 def get_tags_aws(self, release: str, client: boto3.Session) -> List[str]:
269 result_json = _process_run(command)250 """
270 return result_json.replace("Ubuntu", "").strip()251 Permit to get the tags associated to the release
252 """
253 logging.info(f"Getting the tags from AWS for {release}")
271254
255 tags = []
256 result_json = self.run_skopeo_command(
257 "inspect", [f"docker://{self.url}:{release}"]
258 )
259 digest = result_json["Digest"]
260 response = client.describe_image_tags(repositoryName="ubuntu")
261
262 for image in response["imageTagDetails"]:
263 if (
264 image["imageDetail"]["imageDigest"] == digest
265 and image["imageTag"] not in tags
266 ):
267 tags.append(image["imageTag"])
268 return tags
269
270 def get_fullname(self, release: str) -> str:
271 """
272 Permit to get the full name associated to the release
273 """
274 logging.info(f"Getting full name of {release} ")
272275
273def get_support(series: str, is_lts: bool) -> Dict[str, Dict[str, str]]:276 command = ["ubuntu-distro-info", f"--series={release}", "-f"]
274 """Calculates the end of support dates for a given Ubuntu series"""277 result_json = self.process_run(command)
275 logging.info(f"Getting support information for the {series}")278 return result_json.replace("Ubuntu", "").strip()
276279
277 base_cmd = ["ubuntu-distro-info", "--series", series]280 def get_support(self, series: str, is_lts: bool) -> Dict[str, Dict[str, str]]:
278 eol_cmd = base_cmd + ["--day=eol"]281 """Calculates the end of support dates for a given Ubuntu series"""
282 logging.info(f"Getting support information for the {series}")
279283
280 eol = int(_process_run(eol_cmd))284 base_cmd = ["ubuntu-distro-info", "--series", series]
281 eol_date = NOW + datetime.timedelta(days=eol)285 eol_cmd = base_cmd + ["--day=eol"]
282286
283 support = {"support": {"until": f"{eol_date.month:02d}/{eol_date.year}"}}287 eol = int(self.process_run(eol_cmd))
288 eol_date = self.now + datetime.timedelta(days=eol)
284289
285 if not is_lts:290 support = {"support": {"until": f"{eol_date.month:02d}/{eol_date.year}"}}
286 return support
287291
288 # The it is LTS, and lts_until=until292 if not is_lts:
289 support["support"]["lts_until"] = support["support"]["until"]293 return support
290294
291 eol_esm_cmd = base_cmd + ["--day=eol-esm"]295 # The it is LTS, and lts_until=until
296 support["support"]["lts_until"] = support["support"]["until"]
292297
293 eol_esm = int(_process_run(eol_esm_cmd))298 eol_esm_cmd = base_cmd + ["--day=eol-esm"]
294 eol_esm_date = NOW + datetime.timedelta(days=eol_esm)
295 eol_esm_value = f"{eol_esm_date.month:02d}/{eol_esm_date.year}"
296 support["support"]["esm_until"] = eol_esm_value
297299
298 return support300 eol_esm = int(self.process_run(eol_esm_cmd))
301 eol_esm_date = self.now + datetime.timedelta(days=eol_esm)
302 eol_esm_value = f"{eol_esm_date.month:02d}/{eol_esm_date.year}"
303 support["support"]["esm_until"] = eol_esm_value
299304
305 return support
300306
301def get_deprecated(series: str) -> Dict[str, Dict[str, object]]:307 def get_deprecated(self, series: str) -> Dict[str, Dict[str, object]]:
302 """308 """
303 Calculated the deprecation date309 Calculated the deprecation date
304 and upgrade path for a deprecated release310 and upgrade path for a deprecated release
305 """311 """
306 logging.info(f"Getting support information for the {series}")312 logging.info(f"Getting support information for the {series}")
307313
308 eol_cmd = ["ubuntu-distro-info", "--series", series, "--day=eol"]314 eol_cmd = ["ubuntu-distro-info", "--series", series, "--day=eol"]
309315
310 eol = int(_process_run(eol_cmd))316 eol = int(self.process_run(eol_cmd))
311 eol_date = NOW + datetime.timedelta(days=eol)317 eol_date = self.now + datetime.timedelta(days=eol)
312 # For now, the upgrade path is always the next release318 # For now, the upgrade path is always the next release
313319
314 this_release_cmd = ["ubuntu-distro-info", "--series", series, "--day=release"]320 this_release_cmd = ["ubuntu-distro-info", "--series", series, "--day=release"]
315 this_release = int(_process_run(this_release_cmd))321 this_release = int(self.process_run(this_release_cmd))
316 # add 60 days to the release date, to get the next development version322 # add 60 days to the release date, to get the next development version
317 next_date = NOW + datetime.timedelta(days=this_release + 60)323 next_date = self.now + datetime.timedelta(days=this_release + 60)
318324
319 following_dev_series_cmd = [325 following_dev_series_cmd = [
320 "ubuntu-distro-info",326 "ubuntu-distro-info",
321 "-d",327 "-d",
322 f"--date={next_date.year}-{next_date.month}-{next_date.day}",328 f"--date={next_date.year}-{next_date.month}-{next_date.day}",
323 ]329 ]
324 development_suite_at_eol = _process_run(following_dev_series_cmd)330 development_suite_at_eol = self.process_run(following_dev_series_cmd)
325331
326 upgrade_path_cmd = [332 upgrade_path_cmd = [
327 "ubuntu-distro-info",333 "ubuntu-distro-info",
328 "--series",334 "--series",
329 development_suite_at_eol,335 development_suite_at_eol,
330 "-r",336 "-r",
331 ]337 ]
332 upgrade_path = _process_run(upgrade_path_cmd).strip(" LTS")338 upgrade_path = self.process_run(upgrade_path_cmd).strip(" LTS")
333339
334 return {340 return {
335 "deprecated": {341 "deprecated": {
336 "date": f"{eol_date.month:02d}/{eol_date.year}",342 "date": f"{eol_date.month:02d}/{eol_date.year}",
337 "path": {"track": upgrade_path},343 "path": {"track": upgrade_path},
344 }
338 }345 }
339 }
340
341
342def is_deprecated(series: str) -> bool:
343
344 """Checks whether a series is completely deprecated (both LTS and ESM)"""
345 logging.info(f"Checking is {series} is deprecated")
346 supported_cmd = "ubuntu-distro-info --supported"
347 supported_esm_cmd = supported_cmd + "-esm"
348 all_supported = _process_run(supported_cmd.split(" ")) + _process_run(
349 supported_esm_cmd.split(" ")
350 )
351 return series not in all_supported
352
353346
354def is_lts(series: str) -> bool:347 def is_deprecated(self, series: str) -> bool:
355348 """Checks whether a series is completely deprecated (both LTS and ESM)"""
356 """Checks if a given series is LTS"""349 logging.info(f"Checking is {series} is deprecated")
357 logging.info(f"Checking is {series} is lts")350 supported_cmd = "ubuntu-distro-info --supported"
358351 supported_esm_cmd = supported_cmd + "-esm"
359 cmd = ["ubuntu-distro-info", "--series", series, "-f"]352 all_supported = self.process_run(supported_cmd.split(" ")) + self.process_run(
360353 supported_esm_cmd.split(" ")
361 return "LTS" in _process_run(cmd)354 )
355 return series not in all_supported
362356
357 def is_lts(self, series: str) -> bool:
358 """Checks if a given series is LTS"""
359 logging.info(f"Checking is {series} is lts")
363360
364def get_lowest_risk(tags: List[str]) -> str:361 cmd = ["ubuntu-distro-info", "--series", series, "-f"]
365 """
366 Get the lowest risk associated with the release
367 """
368 risk_sorted = ["stable", "candidate", "beta", "edge"]
369362
370 all_tags_str = " ".join(tags)363 return "LTS" in self.process_run(cmd)
371 for risk in risk_sorted:
372 if risk in all_tags_str:
373 return risk
374364
375 return "edge"365 @staticmethod
366 def get_lowest_risk(tags: List[str]) -> str:
367 """
368 Get the lowest risk associated with the release
369 """
370 risk_sorted = ["stable", "candidate", "beta", "edge"]
376371
372 all_tags_str = " ".join(tags)
373 for risk in risk_sorted:
374 if risk in all_tags_str:
375 return risk
377376
378def get_release(series: str) -> str:377 return "edge"
379 command = ["ubuntu-distro-info", f"--series={series}", "-r"]
380378
381 return _process_run(command)379 def get_release(self, series: str) -> str:
380 command = ["ubuntu-distro-info", f"--series={series}", "-r"]
382381
382 return self.process_run(command)
383383
384def infer_registry_user(384 def infer_registry_user(self) -> object:
385 provider: str, username: str, password: str, dh_token: str = None385 user = None
386) -> object:386 if self.provider == "aws":
387 user = None387 logging.info("Connecting to AWS")
388 if provider == "aws":388 session = boto3.Session(
389 logging.info("Connecting to AWS")389 region_name="us-east-1",
390 session = boto3.Session(390 aws_access_key_id=self.username,
391 region_name="us-east-1",391 aws_secret_access_key=self.password,
392 aws_access_key_id=username,
393 aws_secret_access_key=password,
394 )
395 user = session.client("ecr-public")
396 else:
397 logging.info("Fetching Docker Hub token")
398 if dh_token:
399 user = dh_token
400 else:
401 user = get_dockerhub_token(username, password)
402
403 return user
404
405
406def build_releases_data(
407 list_of_series: List[str],
408 all_tags: List[str],
409 image_url: str,
410 image_ns: str,
411 arguments: argparse.ArgumentParser.parse_args,
412 registry_user: object,
413) -> Dict:
414 """Build the releases info data structure"""
415 releases = []
416 for count, series in enumerate(list_of_series):
417 if series not in all_tags and series != arguments.unpublished_suite:
418 logging.warning(
419 f"Series {series} does not exist in {image_url}. Skipping it..."
420 )392 )
421 continue393 user = session.client("ecr-public")
422394 self._skopeo_auth_token = user.get_authorization_token()[
423 release_data = {}395 "authorizationData"
424396 ]["authorizationToken"]
425 release = get_release(series)
426 if "LTS" in release:
427 release_data["type"] = "LTS"
428
429 release_data["track"] = release.rstrip(" LTS")
430
431 if arguments.unpublished_suite and arguments.unpublished_suite == series:
432 release_data["architectures"] = arguments.unpublished_archs.split()
433 release_data["version"] = get_fullname(arguments.unpublished_suite)
434 release_data["risk"] = get_lowest_risk(arguments.unpublished_tags.split())
435 release_data["tags"] = arguments.unpublished_tags.split()
436 else:397 else:
437 release_data["architectures"] = get_arches(series, image_url)398 logging.info("Fetching Docker Hub token")
438 release_data["version"] = get_fullname(series)399 if self.jwt_token_docker:
439 if arguments.provider == "docker":400 user = self.jwt_token_docker
440 release_data["tags"] = get_tags_docker(
441 series, registry_user, image_url, image_ns
442 )
443 else:401 else:
444 release_data["tags"] = get_tags_aws(series, registry_user, image_url)402 user = self.get_dockerhub_jwt_token()
445 release_data["risk"] = get_lowest_risk(release_data["tags"])403
446404 self._skopeo_auth_token = base64.b64encode(
447 if is_deprecated(series):405 f"{self.username}:{self.password}".encode()
448 release_data["deprecated"] = get_deprecated(series)406 ).decode()
449 else:407
450 release_data["support"] = get_support(series, is_lts(series))408 return user
451409
452 releases.append(release_data)410 def build_releases_data(
453411 self,
454 return releases412 list_of_series: List[str],
455413 all_tags: List[str],
456414 registry_user: object,
457def read_ubuntu_data_template() -> Dict:415 ) -> List[Dict]:
458 """Reads and parses the YAML contents of the data template"""416 """Build the releases info data structure"""
459 template_file = f"{SCRIPT_DIR}/templates/ubuntu.yaml"417 releases = []
460 logging.info(f"Opening the template file {template_file}")418 for count, series in enumerate(list_of_series):
461 with open(template_file) as file:419 if series not in all_tags and series != self.unpublished_suite:
462 try:420 logging.warning(
463 return yaml.safe_load(file)421 f"Series {series} does not exist in {self.url}. Skipping it..."
464 except yaml.YAMLError as exc:422 )
465 logging.error("Error when loading the ubuntu template file")423 continue
466 raise exc
467
468
469def create_data_dir(path: str):
470 """Create data dir if it doesn't exist"""
471 if not os.path.exists(path):
472 logging.info(f"Creating the {path} folder")
473
474 os.makedirs(path)
475
476
477def write_ubuntu_data_file(file_path: str, content: Dict):
478 """Write the YAML content into the ubuntu file path"""
479 with open(file_path, "w") as file:
480 logging.info(f"Create the yaml file {file_path}")
481 yaml.dump(content, file)
482424
425 release_data = {}
483426
484def main():427 release = self.get_release(series)
485 arguments = validate_args(cli_args())428 if "LTS" in release:
486 registry_user = infer_registry_user(429 release_data["type"] = "LTS"
487 arguments.provider,
488 arguments.username,
489 arguments.password,
490 arguments.dockertoken,
491 )
492430
493 add_yaml_representer()431 release_data["track"] = release.rstrip(" LTS")
494 url, ns = build_image_endpoint(arguments.provider, repo_base=arguments.repository)
495432
496 logging.info(f"Getting all tags from {url}")433 if self.unpublished_suite and self.unpublished_suite == series:
497 command_tags = ["skopeo", "list-tags", url]434 release_data["architectures"] = self.unpublished_archs.split()
498 existing_tags = json.loads(_process_run(command_tags))["Tags"]435 release_data["version"] = self.get_fullname(self.unpublished_suite)
436 release_data["risk"] = self.get_lowest_risk(
437 self.unpublished_tags.split()
438 )
439 release_data["tags"] = self.unpublished_tags.split()
440 else:
441 release_data["architectures"] = self.get_arches(series)
442 release_data["version"] = self.get_fullname(series)
443 if self.provider == "docker":
444 release_data["tags"] = self.get_tags_docker(series, registry_user)
445 else:
446 release_data["tags"] = self.get_tags_aws(series, registry_user)
447 release_data["risk"] = self.get_lowest_risk(release_data["tags"])
448
449 if self.is_deprecated(series):
450 release_data["deprecated"] = self.get_deprecated(series)
451 else:
452 release_data["support"] = self.get_support(series, self.is_lts(series))
453
454 releases.append(release_data)
455
456 return releases
457
458 @staticmethod
459 def read_ubuntu_data_template() -> Dict:
460 """Reads and parses the YAML contents of the data template"""
461 template_file = f"{SCRIPT_DIR}/templates/ubuntu.yaml"
462 logging.info(f"Opening the template file {template_file}")
463 with open(template_file) as file:
464 try:
465 return yaml.safe_load(file)
466 except yaml.YAMLError as exc:
467 logging.error("Error when loading the ubuntu template file")
468 raise exc
469
470 @staticmethod
471 def create_data_dir(path: str) -> None:
472 """Create data dir if it doesn't exist"""
473 if not os.path.exists(path):
474 logging.info(f"Creating the {path} folder")
475
476 os.makedirs(path)
477
478 @staticmethod
479 def write_ubuntu_data_file(file_path: str, content: Dict) -> None:
480 """Write the YAML content into the ubuntu file path"""
481 with open(file_path, "w") as file:
482 logging.info(f"Create the yaml file {file_path}")
483 yaml.dump(content, file)
484
485 def main(self) -> None:
486 registry_user = self.infer_registry_user()
487
488 logging.info(f"Getting all tags from {self.url}")
489
490 existing_tags = self.run_skopeo_command("list-tags", [f"docker://{self.url}"])[
491 "Tags"
492 ]
499493
500 logging.info("Getting all the series from ubuntu-distro-info")494 logging.info("Getting all the series from ubuntu-distro-info")
501 command_suites = ["ubuntu-distro-info", "--all"]495 command_suites = ["ubuntu-distro-info", "--all"]
502 series_names = _process_run(command_suites).split("\n")496 series_names = self.process_run(command_suites).split("\n")
503497
504 if arguments.unpublished_suite and arguments.unpublished_suite not in series_names:498 if self.unpublished_suite and self.unpublished_suite not in series_names:
505 logging.error(499 logging.error(
506 f"The provided unpublished suite {arguments.unpublished_suite}"500 f"The provided unpublished suite {self.unpublished_suite}"
507 "is not recognized. Ignoring it"501 "is not recognized. Ignoring it"
508 )502 )
509503
510 logging.info("Building releases info")504 logging.info("Building releases info")
511 releases = build_releases_data(505 releases = self.build_releases_data(series_names, existing_tags, registry_user)
512 series_names, existing_tags, url, ns, arguments, registry_user
513 )
514506
515 dict_file = read_ubuntu_data_template()507 dict_file = self.read_ubuntu_data_template()
516 dict_file["releases"] = releases508 dict_file["releases"] = releases
517509
518 create_data_dir(arguments.data_dir)510 self.create_data_dir(self.data_dir)
519511
520 ubuntu_data_file = f"{arguments.data_dir}/ubuntu.yaml"512 ubuntu_data_file = f"{self.data_dir}/ubuntu.yaml"
521 write_ubuntu_data_file(ubuntu_data_file, dict_file)513 self.write_ubuntu_data_file(ubuntu_data_file, dict_file)
522514
523515
524if __name__ == "__main__":516if __name__ == "__main__":
525 main()517 runner = GenerateUbuntuYaml()
518 runner.main()

Subscribers

People subscribed via source and target branches