Merge ~cjdc/ubuntu-docker-images/+git/templates:add-skopeo-authfile into ~ubuntu-docker-images/ubuntu-docker-images/+git/templates:main
- Git
- lp:~cjdc/ubuntu-docker-images/+git/templates
- add-skopeo-authfile
- Merge into main
Proposed by
Cristovao Cordeiro
Status: | Merged |
---|---|
Merged at revision: | 8e77445de94d5427288239f37e965eb167d71298 |
Proposed branch: | ~cjdc/ubuntu-docker-images/+git/templates:add-skopeo-authfile |
Merge into: | ~ubuntu-docker-images/ubuntu-docker-images/+git/templates:main |
Diff against target: |
991 lines (+444/-451) 2 files modified
README.md (+2/-2) generate_ubuntu_yaml.py (+442/-449) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Samir Akarioh (community) | Approve | ||
Ubuntu Docker Images | Pending | ||
Review via email:
|
Commit message
fix: Skopeo needs an --authfile for private repos
Description of the change
To post a comment you must log in.
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
Cristovao Cordeiro (cjdc) wrote : | # |
> You need to change some signatures of some function
good catch. done, ty
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
Samir Akarioh (samiraka) wrote : | # |
Good for me, you can merge it
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/README.md b/README.md |
2 | index 29d4e38..6d86182 100644 |
3 | --- a/README.md |
4 | +++ b/README.md |
5 | @@ -15,12 +15,12 @@ The DevContainer will provide you with a working environment out of the box. **Y |
6 | ```bash |
7 | git clone https://github.com/misterw97/RenderDown |
8 | sudo apt update && sudo apt install -y python3-mako python3-yaml |
9 | -pip install boto3 # if you want to run the generate_ubuntu_yaml file |
10 | +pip install boto3 requests pyyaml && apt install -y distro-info # if you want to run the generate_ubuntu_yaml script |
11 | ``` |
12 | |
13 | #### Generate_ubuntu_yaml |
14 | |
15 | -This script allows to generate the ubuntu.yaml file in order to use it by the RenderDown script. It uses the template ubuntu.yaml located in the template folder. |
16 | +This script allows us to generate the ubuntu.yaml file in order to use it by the RenderDown script. It uses the template ubuntu.yaml located in the template folder. |
17 | |
18 | Here are the available arguments and examples of commands: |
19 | |
20 | diff --git a/generate_ubuntu_yaml.py b/generate_ubuntu_yaml.py |
21 | index 6c70e7c..dacd6e5 100755 |
22 | --- a/generate_ubuntu_yaml.py |
23 | +++ b/generate_ubuntu_yaml.py |
24 | @@ -1,525 +1,518 @@ |
25 | #!/usr/bin/env python3 |
26 | |
27 | import argparse |
28 | +import base64 |
29 | import datetime |
30 | import json |
31 | import logging |
32 | import os |
33 | import subprocess |
34 | +import sys |
35 | from typing import Dict, List |
36 | |
37 | import boto3 |
38 | import requests |
39 | -import sys |
40 | +import tempfile |
41 | import yaml |
42 | |
43 | logging.basicConfig(stream=sys.stdout, level=logging.INFO) |
44 | -NOW = datetime.datetime.now() |
45 | SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) |
46 | |
47 | |
48 | -def cli_args() -> argparse.ArgumentParser: |
49 | - """Argument parser""" |
50 | - parser = argparse.ArgumentParser( |
51 | - description="Generate documentation about Ubuntu for ECR and DockerHub" |
52 | - ) |
53 | - |
54 | - parser.add_argument( |
55 | - "--provider", |
56 | - default="docker", |
57 | - dest="provider", |
58 | - help="aws or docker", |
59 | - required=True, |
60 | - ) |
61 | - parser.add_argument( |
62 | - "--username", |
63 | - default="admin", |
64 | - dest="username", |
65 | - help="either the Docker Hub username, or the AWS access key ID", |
66 | - required=True, |
67 | - ) |
68 | - parser.add_argument( |
69 | - "--password", |
70 | - default="admin", |
71 | - dest="password", |
72 | - help="either the Docker Hub password/token, or the AWS secret access key", |
73 | - required=True, |
74 | - ) |
75 | - parser.add_argument( |
76 | - "--token-docker", |
77 | - dest="dockertoken", |
78 | - default=None, |
79 | - help="JWT token for Docker Hub authentication. \ |
80 | - Only useful for the 'docker' provider.", |
81 | - ) |
82 | - parser.add_argument( |
83 | - "--repository-basename", |
84 | - dest="repository", |
85 | - default=None, |
86 | - help="repository basename of the ubuntu images. \ |
87 | - Used to infer existing information.", |
88 | - ) |
89 | - parser.add_argument( |
90 | - "--data-dir", |
91 | - default="data", |
92 | - dest="data_dir", |
93 | - help="""The path of the folder |
94 | - where the data file will be |
95 | - saved ( if not exist, the script |
96 | - will create the folder)""", |
97 | - ) |
98 | - parser.add_argument( |
99 | - "--unpublished-suite", |
100 | - dest="unpublished_suite", |
101 | - help="""an Ubuntu Suite (e.g. jammy). |
102 | - if given we will take the |
103 | - tags pass on command lines (required) |
104 | - and the arches for this section |
105 | - of the yaml file. |
106 | - """, |
107 | - ) |
108 | - parser.add_argument( |
109 | - "--unpublished-tags", |
110 | - dest="unpublished_tags", |
111 | - help="""list of tags |
112 | - (e.g. 'kinetic 22.10 22.10_edge kinetic)""", |
113 | - ) |
114 | - parser.add_argument( |
115 | - "--unpublished-archs", |
116 | - dest="unpublished_archs", |
117 | - help="list of archs (e.g amd64 arm)", |
118 | - ) |
119 | - |
120 | - return parser |
121 | - |
122 | - |
123 | -def validate_args( |
124 | - parser: argparse.ArgumentParser, |
125 | -) -> argparse.ArgumentParser.parse_args: |
126 | - """Parse and validate the CLI arguments""" |
127 | - args = parser.parse_args() |
128 | - if any( |
129 | - [ |
130 | - args.unpublished_suite is None, |
131 | - args.unpublished_tags is None, |
132 | - args.unpublished_archs is None, |
133 | - ] |
134 | - ) and not all( |
135 | - [ |
136 | - args.unpublished_suite is None, |
137 | - args.unpublished_tags is None, |
138 | - args.unpublished_archs is None, |
139 | - ] |
140 | - ): |
141 | - parser.error( |
142 | - """--unpublished-suite need |
143 | - --unpublished-archs and --unpublished_tags""" |
144 | - ) |
145 | - |
146 | - return args |
147 | +class GenerateUbuntuYaml: |
148 | + def __init__(self): |
149 | + self.now = datetime.datetime.now() |
150 | + self.validate_args() |
151 | + self.add_yaml_representer() |
152 | + self.build_image_endpoint() |
153 | + self._skopeo_auth_token = None |
154 | |
155 | + @staticmethod |
156 | + def cli_args() -> argparse.ArgumentParser: |
157 | + """Argument parser""" |
158 | + parser = argparse.ArgumentParser( |
159 | + description="Generate documentation about Ubuntu for ECR and DockerHub" |
160 | + ) |
161 | |
162 | -def build_image_endpoint(provider: str, repo_base: str = None) -> (str, str): |
163 | - """Define the image's registry URL""" |
164 | - if provider == "aws": |
165 | - registry_url = "docker://public.ecr.aws/" |
166 | - staging_repo = "rocksdev" |
167 | - else: |
168 | - registry_url = "docker://docker.io/" |
169 | - staging_repo = "rocksdev4staging" |
170 | + parser.add_argument( |
171 | + "--provider", |
172 | + default="docker", |
173 | + dest="provider", |
174 | + help="aws or docker", |
175 | + required=True, |
176 | + ) |
177 | + parser.add_argument( |
178 | + "--username", |
179 | + default="admin", |
180 | + dest="username", |
181 | + help="either the Docker Hub username, or the AWS access key ID", |
182 | + required=True, |
183 | + ) |
184 | + parser.add_argument( |
185 | + "--password", |
186 | + default="admin", |
187 | + dest="password", |
188 | + help="either the Docker Hub password/token, or the AWS secret access key", |
189 | + required=True, |
190 | + ) |
191 | + parser.add_argument( |
192 | + "--jwt-token-docker", |
193 | + dest="jwt_token_docker", |
194 | + default=None, |
195 | + help="JWT token for Docker Hub authentication. \ |
196 | + Only useful for the 'docker' provider.", |
197 | + ) |
198 | + parser.add_argument( |
199 | + "--repository-basename", |
200 | + dest="repository", |
201 | + default=None, |
202 | + help="repository basename of the ubuntu images. \ |
203 | + Used to infer existing information.", |
204 | + ) |
205 | + parser.add_argument( |
206 | + "--data-dir", |
207 | + default="data", |
208 | + dest="data_dir", |
209 | + help="""The path of the folder |
210 | + where the data file will be |
211 | + saved ( if not exist, the script |
212 | + will create the folder)""", |
213 | + ) |
214 | + parser.add_argument( |
215 | + "--unpublished-suite", |
216 | + dest="unpublished_suite", |
217 | + help="""an Ubuntu Suite (e.g. jammy). |
218 | + if given we will take the |
219 | + tags pass on command lines (required) |
220 | + and the arches for this section |
221 | + of the yaml file. |
222 | + """, |
223 | + ) |
224 | + parser.add_argument( |
225 | + "--unpublished-tags", |
226 | + dest="unpublished_tags", |
227 | + help="""list of tags |
228 | + (e.g. 'kinetic 22.10 22.10_edge kinetic)""", |
229 | + ) |
230 | + parser.add_argument( |
231 | + "--unpublished-archs", |
232 | + dest="unpublished_archs", |
233 | + help="list of archs (e.g amd64 arm)", |
234 | + ) |
235 | |
236 | - if repo_base is None: |
237 | - logging.warning("Using staging repository") |
238 | - url = f"{registry_url}{staging_repo}/ubuntu" |
239 | - namespace = staging_repo |
240 | - else: |
241 | - url = f"{registry_url}{repo_base}/ubuntu" |
242 | - namespace = repo_base |
243 | + return parser |
244 | + |
245 | + def validate_args(self) -> None: |
246 | + """Parse and validate the CLI arguments""" |
247 | + parser = self.cli_args() |
248 | + parser.parse_args(namespace=self) |
249 | + if any( |
250 | + [ |
251 | + self.unpublished_suite is None, |
252 | + self.unpublished_tags is None, |
253 | + self.unpublished_archs is None, |
254 | + ] |
255 | + ) and not all( |
256 | + [ |
257 | + self.unpublished_suite is None, |
258 | + self.unpublished_tags is None, |
259 | + self.unpublished_archs is None, |
260 | + ] |
261 | + ): |
262 | + parser.error( |
263 | + """--unpublished-suite need |
264 | + --unpublished-archs and --unpublished_tags""" |
265 | + ) |
266 | |
267 | - logging.info(f"Using {url} to collect information") |
268 | + def build_image_endpoint(self) -> None: |
269 | + """Define the image's registry URL""" |
270 | + if self.provider == "aws": |
271 | + registry_url = "public.ecr.aws/" |
272 | + staging_repo = "rocksdev" |
273 | + else: |
274 | + registry_url = "docker.io/" |
275 | + staging_repo = "rocksdev4staging" |
276 | |
277 | - return url, namespace |
278 | + if self.repository is None: |
279 | + logging.warning("Using staging repository") |
280 | + self.url = f"{registry_url}{staging_repo}/ubuntu" |
281 | + self.namespace = staging_repo |
282 | + else: |
283 | + self.url = f"{registry_url}{self.repository}/ubuntu" |
284 | + self.namespace = self.repository |
285 | + |
286 | + logging.info(f"Using {self.url} to collect information") |
287 | + |
288 | + @staticmethod |
289 | + def add_yaml_representer() -> None: |
290 | + def str_presenter(dumper, data): |
291 | + """ |
292 | + Permit to format |
293 | + multiline string into |
294 | + yaml file |
295 | + """ |
296 | + |
297 | + c = "tag:yaml.org,2002:str" |
298 | + if len(data.splitlines()) > 1: # check for multiline string |
299 | + return dumper.represent_scalar(c, data, style="|") |
300 | + return dumper.represent_scalar(c, data) |
301 | + |
302 | + yaml.add_representer(str, str_presenter) |
303 | + yaml.representer.SafeRepresenter.add_representer(str, str_presenter) |
304 | + |
305 | + @staticmethod |
306 | + def process_run(command: List[str], **kwargs) -> str: |
307 | + """Run a command and handle its output.""" |
308 | + logging.info(f"Execute process: {command!r}, kwargs={kwargs!r}") |
309 | + try: |
310 | + out = subprocess.run( |
311 | + command, |
312 | + **kwargs, |
313 | + capture_output=True, |
314 | + check=True, |
315 | + universal_newlines=True, |
316 | + ) |
317 | + except subprocess.CalledProcessError as err: |
318 | + msg = f"Failed to run command: {err!s}" |
319 | + if err.stderr: |
320 | + msg += f" ({err.stderr.strip()!s})" |
321 | + raise Exception(msg) from err |
322 | + |
323 | + return out.stdout.strip() |
324 | + |
325 | + def run_skopeo_command(self, cmd: str, args: List[str]) -> Dict: |
326 | + """Builds the Skopeo command and runs it""" |
327 | + command = ["skopeo", cmd] |
328 | + |
329 | + with tempfile.TemporaryDirectory() as tmp_dir: |
330 | + if self._skopeo_auth_token: |
331 | + auth_config = { |
332 | + "auths": {self.url: {"auth": self._skopeo_auth_token}} |
333 | + } |
334 | + auth_file = os.path.join(tmp_dir, "auth.json") |
335 | + with open(auth_file, "w") as f: |
336 | + os.fchmod(f.fileno(), 0o600) |
337 | + json.dump(auth_config, f) |
338 | + command += ["--authfile", auth_file] |
339 | + command += args |
340 | + |
341 | + return json.loads(self.process_run(command)) |
342 | + |
343 | + def get_arches(self, release: str) -> List[str]: |
344 | + """ |
345 | + Permit to get the arches associated to the release |
346 | + """ |
347 | + logging.info(f"Getting the arches for {release}") |
348 | + manifest = self.run_skopeo_command( |
349 | + "inspect", [f"docker://{self.url}:{release}", "--raw"] |
350 | + )["manifests"] |
351 | + arches = [] |
352 | + for arch in manifest: |
353 | + arches.append(arch["platform"]["architecture"]) |
354 | + return arches |
355 | + |
356 | + def get_dockerhub_jwt_token(self) -> str: |
357 | + """ |
358 | + Permit to get the token associated to the docker account |
359 | + """ |
360 | + logging.info("Getting the token form Docker") |
361 | |
362 | + url_token = "https://hub.docker.com/v2/users/login" |
363 | + data = {"username": self.username, "password": self.password} |
364 | + get_jwt_token = requests.post(url_token, json=data) |
365 | + get_jwt_token.raise_for_status() |
366 | + return get_jwt_token.json()["token"] |
367 | |
368 | -def add_yaml_representer(): |
369 | - def str_presenter(dumper, data): |
370 | + def get_tags_docker(self, release: str, token: str) -> List[str]: |
371 | """ |
372 | - Permit to format |
373 | - multiline string into |
374 | - yaml file |
375 | + Permit to get the tags associated to the release |
376 | """ |
377 | - |
378 | - c = "tag:yaml.org,2002:str" |
379 | - if len(data.splitlines()) > 1: # check for multiline string |
380 | - return dumper.represent_scalar(c, data, style="|") |
381 | - return dumper.represent_scalar(c, data) |
382 | - |
383 | - yaml.add_representer(str, str_presenter) |
384 | - yaml.representer.SafeRepresenter.add_representer(str, str_presenter) |
385 | - |
386 | - |
387 | -def _process_run(command: List[str], **kwargs) -> str: |
388 | - """Run a command and handle its output.""" |
389 | - logging.info(f"Execute process: {command!r}, kwargs={kwargs!r}") |
390 | - try: |
391 | - out = subprocess.run( |
392 | - command, |
393 | - **kwargs, |
394 | - capture_output=True, |
395 | - check=True, |
396 | - universal_newlines=True, |
397 | + logging.info(f"Getting the tags from Docker for {release}") |
398 | + tags = [] |
399 | + result_json = self.run_skopeo_command( |
400 | + "inspect", [f"docker://{self.url}:{release}", "--raw"] |
401 | ) |
402 | - except subprocess.CalledProcessError as err: |
403 | - msg = f"Failed to run command: {err!s}" |
404 | - if err.stderr: |
405 | - msg += f" ({err.stderr.strip()!s})" |
406 | - raise Exception(msg) from err |
407 | - |
408 | - return out.stdout.strip() |
409 | - |
410 | - |
411 | -def get_arches(release: str, image_url: str) -> List[str]: |
412 | - """ |
413 | - Permit to get the arches associated to the release |
414 | - """ |
415 | - logging.info(f"Getting the arches for {release}") |
416 | - command = ["skopeo", "inspect", f"{image_url}:{release}", "--raw"] |
417 | - manifest = json.loads(_process_run(command))["manifests"] |
418 | - arches = [] |
419 | - for arch in manifest: |
420 | - arches.append(arch["platform"]["architecture"]) |
421 | - return arches |
422 | - |
423 | - |
424 | -def get_dockerhub_token(username: str, password: str) -> str: |
425 | - """ |
426 | - Permit to get the token associated to the docker account |
427 | - """ |
428 | - logging.info("Getting the token form Docker") |
429 | - |
430 | - url_token = "https://hub.docker.com/v2/users/login" |
431 | - data = {"username": username, "password": password} |
432 | - get_token = requests.post(url_token, json=data) |
433 | - get_token.raise_for_status() |
434 | - return get_token.json()["token"] |
435 | - |
436 | - |
437 | -def get_tags_docker( |
438 | - release: str, token: str, image_url: str, image_namespace: str |
439 | -) -> List[str]: |
440 | - """ |
441 | - Permit to get the tags associated to the release |
442 | - """ |
443 | - logging.info(f"Getting the tags from Docker for {release}") |
444 | - tags = [] |
445 | - command = [ |
446 | - "skopeo", |
447 | - "inspect", |
448 | - f"{image_url}:{release}", |
449 | - "--raw", |
450 | - ] |
451 | - result_json = _process_run(command) |
452 | - digest = json.loads(result_json)["manifests"][0]["digest"] |
453 | - |
454 | - url_dockerhub = "https://hub.docker.com/v2/repositories/" |
455 | - url_dockerhub += f"{image_namespace}/ubuntu/tags/?page_size=999" |
456 | - Headers = {"Authorization": f"JWT {token}"} |
457 | - get_the_tags = requests.get(url_dockerhub, headers=Headers) |
458 | - get_the_tags = get_the_tags.json()["results"] |
459 | - for image in get_the_tags: |
460 | - for info_image in image["images"]: |
461 | - if info_image["digest"] == digest and image["name"] not in tags: |
462 | - tags.append(image["name"]) |
463 | - |
464 | - return tags |
465 | - |
466 | - |
467 | -def get_tags_aws(release: str, client: boto3.Session, image_url: str) -> List[str]: |
468 | - """ |
469 | - Permit to get the tags associated to the release |
470 | - """ |
471 | - logging.info(f"Getting the tags from AWS for {release}") |
472 | - |
473 | - tags = [] |
474 | - command = [ |
475 | - "skopeo", |
476 | - "inspect", |
477 | - f"{image_url}:{release}", |
478 | - ] |
479 | - result_json = _process_run(command) |
480 | - digest = json.loads(result_json)["Digest"] |
481 | - response = client.describe_image_tags(repositoryName="ubuntu") |
482 | - |
483 | - for image in response["imageTagDetails"]: |
484 | - if ( |
485 | - image["imageDetail"]["imageDigest"] == digest |
486 | - and image["imageTag"] not in tags |
487 | - ): |
488 | - tags.append(image["imageTag"]) |
489 | - return tags |
490 | + digest = result_json["manifests"][0]["digest"] |
491 | |
492 | + url_dockerhub = "https://hub.docker.com/v2/repositories/" |
493 | + url_dockerhub += f"{self.namespace}/ubuntu/tags/?page_size=999" |
494 | + Headers = {"Authorization": f"JWT {token}"} |
495 | + get_the_tags = requests.get(url_dockerhub, headers=Headers) |
496 | + get_the_tags = get_the_tags.json()["results"] |
497 | + for image in get_the_tags: |
498 | + for info_image in image["images"]: |
499 | + if info_image["digest"] == digest and image["name"] not in tags: |
500 | + tags.append(image["name"]) |
501 | |
502 | -def get_fullname(release: str) -> str: |
503 | - """ |
504 | - Permit to get the full name associated to the release |
505 | - """ |
506 | - logging.info(f"Getting full name of {release} ") |
507 | + return tags |
508 | |
509 | - command = ["ubuntu-distro-info", f"--series={release}", "-f"] |
510 | - result_json = _process_run(command) |
511 | - return result_json.replace("Ubuntu", "").strip() |
512 | + def get_tags_aws(self, release: str, client: boto3.Session) -> List[str]: |
513 | + """ |
514 | + Permit to get the tags associated to the release |
515 | + """ |
516 | + logging.info(f"Getting the tags from AWS for {release}") |
517 | |
518 | + tags = [] |
519 | + result_json = self.run_skopeo_command( |
520 | + "inspect", [f"docker://{self.url}:{release}"] |
521 | + ) |
522 | + digest = result_json["Digest"] |
523 | + response = client.describe_image_tags(repositoryName="ubuntu") |
524 | + |
525 | + for image in response["imageTagDetails"]: |
526 | + if ( |
527 | + image["imageDetail"]["imageDigest"] == digest |
528 | + and image["imageTag"] not in tags |
529 | + ): |
530 | + tags.append(image["imageTag"]) |
531 | + return tags |
532 | + |
533 | + def get_fullname(self, release: str) -> str: |
534 | + """ |
535 | + Permit to get the full name associated to the release |
536 | + """ |
537 | + logging.info(f"Getting full name of {release} ") |
538 | |
539 | -def get_support(series: str, is_lts: bool) -> Dict[str, Dict[str, str]]: |
540 | - """Calculates the end of support dates for a given Ubuntu series""" |
541 | - logging.info(f"Getting support information for the {series}") |
542 | + command = ["ubuntu-distro-info", f"--series={release}", "-f"] |
543 | + result_json = self.process_run(command) |
544 | + return result_json.replace("Ubuntu", "").strip() |
545 | |
546 | - base_cmd = ["ubuntu-distro-info", "--series", series] |
547 | - eol_cmd = base_cmd + ["--day=eol"] |
548 | + def get_support(self, series: str, is_lts: bool) -> Dict[str, Dict[str, str]]: |
549 | + """Calculates the end of support dates for a given Ubuntu series""" |
550 | + logging.info(f"Getting support information for the {series}") |
551 | |
552 | - eol = int(_process_run(eol_cmd)) |
553 | - eol_date = NOW + datetime.timedelta(days=eol) |
554 | + base_cmd = ["ubuntu-distro-info", "--series", series] |
555 | + eol_cmd = base_cmd + ["--day=eol"] |
556 | |
557 | - support = {"support": {"until": f"{eol_date.month:02d}/{eol_date.year}"}} |
558 | + eol = int(self.process_run(eol_cmd)) |
559 | + eol_date = self.now + datetime.timedelta(days=eol) |
560 | |
561 | - if not is_lts: |
562 | - return support |
563 | + support = {"support": {"until": f"{eol_date.month:02d}/{eol_date.year}"}} |
564 | |
565 | - # The it is LTS, and lts_until=until |
566 | - support["support"]["lts_until"] = support["support"]["until"] |
567 | + if not is_lts: |
568 | + return support |
569 | |
570 | - eol_esm_cmd = base_cmd + ["--day=eol-esm"] |
571 | + # The it is LTS, and lts_until=until |
572 | + support["support"]["lts_until"] = support["support"]["until"] |
573 | |
574 | - eol_esm = int(_process_run(eol_esm_cmd)) |
575 | - eol_esm_date = NOW + datetime.timedelta(days=eol_esm) |
576 | - eol_esm_value = f"{eol_esm_date.month:02d}/{eol_esm_date.year}" |
577 | - support["support"]["esm_until"] = eol_esm_value |
578 | + eol_esm_cmd = base_cmd + ["--day=eol-esm"] |
579 | |
580 | - return support |
581 | + eol_esm = int(self.process_run(eol_esm_cmd)) |
582 | + eol_esm_date = self.now + datetime.timedelta(days=eol_esm) |
583 | + eol_esm_value = f"{eol_esm_date.month:02d}/{eol_esm_date.year}" |
584 | + support["support"]["esm_until"] = eol_esm_value |
585 | |
586 | + return support |
587 | |
588 | -def get_deprecated(series: str) -> Dict[str, Dict[str, object]]: |
589 | - """ |
590 | - Calculated the deprecation date |
591 | - and upgrade path for a deprecated release |
592 | - """ |
593 | - logging.info(f"Getting support information for the {series}") |
594 | + def get_deprecated(self, series: str) -> Dict[str, Dict[str, object]]: |
595 | + """ |
596 | + Calculated the deprecation date |
597 | + and upgrade path for a deprecated release |
598 | + """ |
599 | + logging.info(f"Getting support information for the {series}") |
600 | |
601 | - eol_cmd = ["ubuntu-distro-info", "--series", series, "--day=eol"] |
602 | + eol_cmd = ["ubuntu-distro-info", "--series", series, "--day=eol"] |
603 | |
604 | - eol = int(_process_run(eol_cmd)) |
605 | - eol_date = NOW + datetime.timedelta(days=eol) |
606 | - # For now, the upgrade path is always the next release |
607 | + eol = int(self.process_run(eol_cmd)) |
608 | + eol_date = self.now + datetime.timedelta(days=eol) |
609 | + # For now, the upgrade path is always the next release |
610 | |
611 | - this_release_cmd = ["ubuntu-distro-info", "--series", series, "--day=release"] |
612 | - this_release = int(_process_run(this_release_cmd)) |
613 | - # add 60 days to the release date, to get the next development version |
614 | - next_date = NOW + datetime.timedelta(days=this_release + 60) |
615 | + this_release_cmd = ["ubuntu-distro-info", "--series", series, "--day=release"] |
616 | + this_release = int(self.process_run(this_release_cmd)) |
617 | + # add 60 days to the release date, to get the next development version |
618 | + next_date = self.now + datetime.timedelta(days=this_release + 60) |
619 | |
620 | - following_dev_series_cmd = [ |
621 | - "ubuntu-distro-info", |
622 | - "-d", |
623 | - f"--date={next_date.year}-{next_date.month}-{next_date.day}", |
624 | - ] |
625 | - development_suite_at_eol = _process_run(following_dev_series_cmd) |
626 | + following_dev_series_cmd = [ |
627 | + "ubuntu-distro-info", |
628 | + "-d", |
629 | + f"--date={next_date.year}-{next_date.month}-{next_date.day}", |
630 | + ] |
631 | + development_suite_at_eol = self.process_run(following_dev_series_cmd) |
632 | |
633 | - upgrade_path_cmd = [ |
634 | - "ubuntu-distro-info", |
635 | - "--series", |
636 | - development_suite_at_eol, |
637 | - "-r", |
638 | - ] |
639 | - upgrade_path = _process_run(upgrade_path_cmd).strip(" LTS") |
640 | + upgrade_path_cmd = [ |
641 | + "ubuntu-distro-info", |
642 | + "--series", |
643 | + development_suite_at_eol, |
644 | + "-r", |
645 | + ] |
646 | + upgrade_path = self.process_run(upgrade_path_cmd).strip(" LTS") |
647 | |
648 | - return { |
649 | - "deprecated": { |
650 | - "date": f"{eol_date.month:02d}/{eol_date.year}", |
651 | - "path": {"track": upgrade_path}, |
652 | + return { |
653 | + "deprecated": { |
654 | + "date": f"{eol_date.month:02d}/{eol_date.year}", |
655 | + "path": {"track": upgrade_path}, |
656 | + } |
657 | } |
658 | - } |
659 | - |
660 | - |
661 | -def is_deprecated(series: str) -> bool: |
662 | - |
663 | - """Checks whether a series is completely deprecated (both LTS and ESM)""" |
664 | - logging.info(f"Checking is {series} is deprecated") |
665 | - supported_cmd = "ubuntu-distro-info --supported" |
666 | - supported_esm_cmd = supported_cmd + "-esm" |
667 | - all_supported = _process_run(supported_cmd.split(" ")) + _process_run( |
668 | - supported_esm_cmd.split(" ") |
669 | - ) |
670 | - return series not in all_supported |
671 | - |
672 | |
673 | -def is_lts(series: str) -> bool: |
674 | - |
675 | - """Checks if a given series is LTS""" |
676 | - logging.info(f"Checking is {series} is lts") |
677 | - |
678 | - cmd = ["ubuntu-distro-info", "--series", series, "-f"] |
679 | - |
680 | - return "LTS" in _process_run(cmd) |
681 | + def is_deprecated(self, series: str) -> bool: |
682 | + """Checks whether a series is completely deprecated (both LTS and ESM)""" |
683 | + logging.info(f"Checking is {series} is deprecated") |
684 | + supported_cmd = "ubuntu-distro-info --supported" |
685 | + supported_esm_cmd = supported_cmd + "-esm" |
686 | + all_supported = self.process_run(supported_cmd.split(" ")) + self.process_run( |
687 | + supported_esm_cmd.split(" ") |
688 | + ) |
689 | + return series not in all_supported |
690 | |
691 | + def is_lts(self, series: str) -> bool: |
692 | + """Checks if a given series is LTS""" |
693 | + logging.info(f"Checking is {series} is lts") |
694 | |
695 | -def get_lowest_risk(tags: List[str]) -> str: |
696 | - """ |
697 | - Get the lowest risk associated with the release |
698 | - """ |
699 | - risk_sorted = ["stable", "candidate", "beta", "edge"] |
700 | + cmd = ["ubuntu-distro-info", "--series", series, "-f"] |
701 | |
702 | - all_tags_str = " ".join(tags) |
703 | - for risk in risk_sorted: |
704 | - if risk in all_tags_str: |
705 | - return risk |
706 | + return "LTS" in self.process_run(cmd) |
707 | |
708 | - return "edge" |
709 | + @staticmethod |
710 | + def get_lowest_risk(tags: List[str]) -> str: |
711 | + """ |
712 | + Get the lowest risk associated with the release |
713 | + """ |
714 | + risk_sorted = ["stable", "candidate", "beta", "edge"] |
715 | |
716 | + all_tags_str = " ".join(tags) |
717 | + for risk in risk_sorted: |
718 | + if risk in all_tags_str: |
719 | + return risk |
720 | |
721 | -def get_release(series: str) -> str: |
722 | - command = ["ubuntu-distro-info", f"--series={series}", "-r"] |
723 | + return "edge" |
724 | |
725 | - return _process_run(command) |
726 | + def get_release(self, series: str) -> str: |
727 | + command = ["ubuntu-distro-info", f"--series={series}", "-r"] |
728 | |
729 | + return self.process_run(command) |
730 | |
731 | -def infer_registry_user( |
732 | - provider: str, username: str, password: str, dh_token: str = None |
733 | -) -> object: |
734 | - user = None |
735 | - if provider == "aws": |
736 | - logging.info("Connecting to AWS") |
737 | - session = boto3.Session( |
738 | - region_name="us-east-1", |
739 | - aws_access_key_id=username, |
740 | - aws_secret_access_key=password, |
741 | - ) |
742 | - user = session.client("ecr-public") |
743 | - else: |
744 | - logging.info("Fetching Docker Hub token") |
745 | - if dh_token: |
746 | - user = dh_token |
747 | - else: |
748 | - user = get_dockerhub_token(username, password) |
749 | - |
750 | - return user |
751 | - |
752 | - |
753 | -def build_releases_data( |
754 | - list_of_series: List[str], |
755 | - all_tags: List[str], |
756 | - image_url: str, |
757 | - image_ns: str, |
758 | - arguments: argparse.ArgumentParser.parse_args, |
759 | - registry_user: object, |
760 | -) -> Dict: |
761 | - """Build the releases info data structure""" |
762 | - releases = [] |
763 | - for count, series in enumerate(list_of_series): |
764 | - if series not in all_tags and series != arguments.unpublished_suite: |
765 | - logging.warning( |
766 | - f"Series {series} does not exist in {image_url}. Skipping it..." |
767 | + def infer_registry_user(self) -> object: |
768 | + user = None |
769 | + if self.provider == "aws": |
770 | + logging.info("Connecting to AWS") |
771 | + session = boto3.Session( |
772 | + region_name="us-east-1", |
773 | + aws_access_key_id=self.username, |
774 | + aws_secret_access_key=self.password, |
775 | ) |
776 | - continue |
777 | - |
778 | - release_data = {} |
779 | - |
780 | - release = get_release(series) |
781 | - if "LTS" in release: |
782 | - release_data["type"] = "LTS" |
783 | - |
784 | - release_data["track"] = release.rstrip(" LTS") |
785 | - |
786 | - if arguments.unpublished_suite and arguments.unpublished_suite == series: |
787 | - release_data["architectures"] = arguments.unpublished_archs.split() |
788 | - release_data["version"] = get_fullname(arguments.unpublished_suite) |
789 | - release_data["risk"] = get_lowest_risk(arguments.unpublished_tags.split()) |
790 | - release_data["tags"] = arguments.unpublished_tags.split() |
791 | + user = session.client("ecr-public") |
792 | + self._skopeo_auth_token = user.get_authorization_token()[ |
793 | + "authorizationData" |
794 | + ]["authorizationToken"] |
795 | else: |
796 | - release_data["architectures"] = get_arches(series, image_url) |
797 | - release_data["version"] = get_fullname(series) |
798 | - if arguments.provider == "docker": |
799 | - release_data["tags"] = get_tags_docker( |
800 | - series, registry_user, image_url, image_ns |
801 | - ) |
802 | + logging.info("Fetching Docker Hub token") |
803 | + if self.jwt_token_docker: |
804 | + user = self.jwt_token_docker |
805 | else: |
806 | - release_data["tags"] = get_tags_aws(series, registry_user, image_url) |
807 | - release_data["risk"] = get_lowest_risk(release_data["tags"]) |
808 | - |
809 | - if is_deprecated(series): |
810 | - release_data["deprecated"] = get_deprecated(series) |
811 | - else: |
812 | - release_data["support"] = get_support(series, is_lts(series)) |
813 | - |
814 | - releases.append(release_data) |
815 | - |
816 | - return releases |
817 | - |
818 | - |
819 | -def read_ubuntu_data_template() -> Dict: |
820 | - """Reads and parses the YAML contents of the data template""" |
821 | - template_file = f"{SCRIPT_DIR}/templates/ubuntu.yaml" |
822 | - logging.info(f"Opening the template file {template_file}") |
823 | - with open(template_file) as file: |
824 | - try: |
825 | - return yaml.safe_load(file) |
826 | - except yaml.YAMLError as exc: |
827 | - logging.error("Error when loading the ubuntu template file") |
828 | - raise exc |
829 | - |
830 | - |
831 | -def create_data_dir(path: str): |
832 | - """Create data dir if it doesn't exist""" |
833 | - if not os.path.exists(path): |
834 | - logging.info(f"Creating the {path} folder") |
835 | - |
836 | - os.makedirs(path) |
837 | - |
838 | - |
839 | -def write_ubuntu_data_file(file_path: str, content: Dict): |
840 | - """Write the YAML content into the ubuntu file path""" |
841 | - with open(file_path, "w") as file: |
842 | - logging.info(f"Create the yaml file {file_path}") |
843 | - yaml.dump(content, file) |
844 | + user = self.get_dockerhub_jwt_token() |
845 | + |
846 | + self._skopeo_auth_token = base64.b64encode( |
847 | + f"{self.username}:{self.password}".encode() |
848 | + ).decode() |
849 | + |
850 | + return user |
851 | + |
852 | + def build_releases_data( |
853 | + self, |
854 | + list_of_series: List[str], |
855 | + all_tags: List[str], |
856 | + registry_user: object, |
857 | + ) -> List[Dict]: |
858 | + """Build the releases info data structure""" |
859 | + releases = [] |
860 | + for count, series in enumerate(list_of_series): |
861 | + if series not in all_tags and series != self.unpublished_suite: |
862 | + logging.warning( |
863 | + f"Series {series} does not exist in {self.url}. Skipping it..." |
864 | + ) |
865 | + continue |
866 | |
867 | + release_data = {} |
868 | |
869 | -def main(): |
870 | - arguments = validate_args(cli_args()) |
871 | - registry_user = infer_registry_user( |
872 | - arguments.provider, |
873 | - arguments.username, |
874 | - arguments.password, |
875 | - arguments.dockertoken, |
876 | - ) |
877 | + release = self.get_release(series) |
878 | + if "LTS" in release: |
879 | + release_data["type"] = "LTS" |
880 | |
881 | - add_yaml_representer() |
882 | - url, ns = build_image_endpoint(arguments.provider, repo_base=arguments.repository) |
883 | + release_data["track"] = release.rstrip(" LTS") |
884 | |
885 | - logging.info(f"Getting all tags from {url}") |
886 | - command_tags = ["skopeo", "list-tags", url] |
887 | - existing_tags = json.loads(_process_run(command_tags))["Tags"] |
888 | + if self.unpublished_suite and self.unpublished_suite == series: |
889 | + release_data["architectures"] = self.unpublished_archs.split() |
890 | + release_data["version"] = self.get_fullname(self.unpublished_suite) |
891 | + release_data["risk"] = self.get_lowest_risk( |
892 | + self.unpublished_tags.split() |
893 | + ) |
894 | + release_data["tags"] = self.unpublished_tags.split() |
895 | + else: |
896 | + release_data["architectures"] = self.get_arches(series) |
897 | + release_data["version"] = self.get_fullname(series) |
898 | + if self.provider == "docker": |
899 | + release_data["tags"] = self.get_tags_docker(series, registry_user) |
900 | + else: |
901 | + release_data["tags"] = self.get_tags_aws(series, registry_user) |
902 | + release_data["risk"] = self.get_lowest_risk(release_data["tags"]) |
903 | + |
904 | + if self.is_deprecated(series): |
905 | + release_data["deprecated"] = self.get_deprecated(series) |
906 | + else: |
907 | + release_data["support"] = self.get_support(series, self.is_lts(series)) |
908 | + |
909 | + releases.append(release_data) |
910 | + |
911 | + return releases |
912 | + |
913 | + @staticmethod |
914 | + def read_ubuntu_data_template() -> Dict: |
915 | + """Reads and parses the YAML contents of the data template""" |
916 | + template_file = f"{SCRIPT_DIR}/templates/ubuntu.yaml" |
917 | + logging.info(f"Opening the template file {template_file}") |
918 | + with open(template_file) as file: |
919 | + try: |
920 | + return yaml.safe_load(file) |
921 | + except yaml.YAMLError as exc: |
922 | + logging.error("Error when loading the ubuntu template file") |
923 | + raise exc |
924 | + |
925 | + @staticmethod |
926 | + def create_data_dir(path: str) -> None: |
927 | + """Create data dir if it doesn't exist""" |
928 | + if not os.path.exists(path): |
929 | + logging.info(f"Creating the {path} folder") |
930 | + |
931 | + os.makedirs(path) |
932 | + |
933 | + @staticmethod |
934 | + def write_ubuntu_data_file(file_path: str, content: Dict) -> None: |
935 | + """Write the YAML content into the ubuntu file path""" |
936 | + with open(file_path, "w") as file: |
937 | + logging.info(f"Create the yaml file {file_path}") |
938 | + yaml.dump(content, file) |
939 | + |
940 | + def main(self) -> None: |
941 | + registry_user = self.infer_registry_user() |
942 | + |
943 | + logging.info(f"Getting all tags from {self.url}") |
944 | + |
945 | + existing_tags = self.run_skopeo_command("list-tags", [f"docker://{self.url}"])[ |
946 | + "Tags" |
947 | + ] |
948 | |
949 | - logging.info("Getting all the series from ubuntu-distro-info") |
950 | - command_suites = ["ubuntu-distro-info", "--all"] |
951 | - series_names = _process_run(command_suites).split("\n") |
952 | + logging.info("Getting all the series from ubuntu-distro-info") |
953 | + command_suites = ["ubuntu-distro-info", "--all"] |
954 | + series_names = self.process_run(command_suites).split("\n") |
955 | |
956 | - if arguments.unpublished_suite and arguments.unpublished_suite not in series_names: |
957 | - logging.error( |
958 | - f"The provided unpublished suite {arguments.unpublished_suite}" |
959 | - "is not recognized. Ignoring it" |
960 | - ) |
961 | + if self.unpublished_suite and self.unpublished_suite not in series_names: |
962 | + logging.error( |
963 | + f"The provided unpublished suite {self.unpublished_suite}" |
964 | + "is not recognized. Ignoring it" |
965 | + ) |
966 | |
967 | - logging.info("Building releases info") |
968 | - releases = build_releases_data( |
969 | - series_names, existing_tags, url, ns, arguments, registry_user |
970 | - ) |
971 | + logging.info("Building releases info") |
972 | + releases = self.build_releases_data(series_names, existing_tags, registry_user) |
973 | |
974 | - dict_file = read_ubuntu_data_template() |
975 | - dict_file["releases"] = releases |
976 | + dict_file = self.read_ubuntu_data_template() |
977 | + dict_file["releases"] = releases |
978 | |
979 | - create_data_dir(arguments.data_dir) |
980 | + self.create_data_dir(self.data_dir) |
981 | |
982 | - ubuntu_data_file = f"{arguments.data_dir}/ubuntu.yaml" |
983 | - write_ubuntu_data_file(ubuntu_data_file, dict_file) |
984 | + ubuntu_data_file = f"{self.data_dir}/ubuntu.yaml" |
985 | + self.write_ubuntu_data_file(ubuntu_data_file, dict_file) |
986 | |
987 | |
988 | if __name__ == "__main__": |
989 | - main() |
990 | + runner = GenerateUbuntuYaml() |
991 | + runner.main() |
You need to change some signatures of some function